Преглед на файлове

上傳檔案到 'app/main/CTO20220622/Drivers/CMSIS/Core/Include'

rita преди 2 години
родител
ревизия
f016b5d7f4
променени са 21 файла, в които са добавени 29258 реда и са изтрити 0 реда
  1. 865 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_armcc.h
  2. 1869 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_armclang.h
  3. 266 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_compiler.h
  4. 2085 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_gcc.h
  5. 935 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_iccarm.h
  6. 39 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_version.h
  7. 1918 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_armv8mbl.h
  8. 2927 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_armv8mml.h
  9. 949 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm0.h
  10. 1083 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm0plus.h
  11. 976 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm1.h
  12. 1993 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm23.h
  13. 1941 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm3.h
  14. 3002 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm33.h
  15. 2129 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm4.h
  16. 2671 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm7.h
  17. 1022 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_sc000.h
  18. 1915 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_sc300.h
  19. 270 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/mpu_armv7.h
  20. 333 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/mpu_armv8.h
  21. 70 0
      app/main/CTO20220622/Drivers/CMSIS/Core/Include/tz_context.h

+ 865 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_armcc.h

@@ -0,0 +1,865 @@
+/**************************************************************************//**
+ * @file     cmsis_armcc.h
+ * @brief    CMSIS compiler ARMCC (Arm Compiler 5) header file
+ * @version  V5.0.4
+ * @date     10. January 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __CMSIS_ARMCC_H
+#define __CMSIS_ARMCC_H
+
+
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 400677)
+  #error "Please use Arm Compiler Toolchain V4.0.677 or later!"
+#endif
+
+/* CMSIS compiler control architecture macros */
+#if ((defined (__TARGET_ARCH_6_M  ) && (__TARGET_ARCH_6_M   == 1)) || \
+     (defined (__TARGET_ARCH_6S_M ) && (__TARGET_ARCH_6S_M  == 1))   )
+  #define __ARM_ARCH_6M__           1
+#endif
+
+#if (defined (__TARGET_ARCH_7_M ) && (__TARGET_ARCH_7_M  == 1))
+  #define __ARM_ARCH_7M__           1
+#endif
+
+#if (defined (__TARGET_ARCH_7E_M) && (__TARGET_ARCH_7E_M == 1))
+  #define __ARM_ARCH_7EM__          1
+#endif
+
+  /* __ARM_ARCH_8M_BASE__  not applicable */
+  /* __ARM_ARCH_8M_MAIN__  not applicable */
+
+
+/* CMSIS compiler specific defines */
+#ifndef   __ASM
+  #define __ASM                                  __asm
+#endif
+#ifndef   __INLINE
+  #define __INLINE                               __inline
+#endif
+#ifndef   __STATIC_INLINE
+  #define __STATIC_INLINE                        static __inline
+#endif
+#ifndef   __STATIC_FORCEINLINE                 
+  #define __STATIC_FORCEINLINE                   static __forceinline
+#endif           
+#ifndef   __NO_RETURN
+  #define __NO_RETURN                            __declspec(noreturn)
+#endif
+#ifndef   __USED
+  #define __USED                                 __attribute__((used))
+#endif
+#ifndef   __WEAK
+  #define __WEAK                                 __attribute__((weak))
+#endif
+#ifndef   __PACKED
+  #define __PACKED                               __attribute__((packed))
+#endif
+#ifndef   __PACKED_STRUCT
+  #define __PACKED_STRUCT                        __packed struct
+#endif
+#ifndef   __PACKED_UNION
+  #define __PACKED_UNION                         __packed union
+#endif
+#ifndef   __UNALIGNED_UINT32        /* deprecated */
+  #define __UNALIGNED_UINT32(x)                  (*((__packed uint32_t *)(x)))
+#endif
+#ifndef   __UNALIGNED_UINT16_WRITE
+  #define __UNALIGNED_UINT16_WRITE(addr, val)    ((*((__packed uint16_t *)(addr))) = (val))
+#endif
+#ifndef   __UNALIGNED_UINT16_READ
+  #define __UNALIGNED_UINT16_READ(addr)          (*((const __packed uint16_t *)(addr)))
+#endif
+#ifndef   __UNALIGNED_UINT32_WRITE
+  #define __UNALIGNED_UINT32_WRITE(addr, val)    ((*((__packed uint32_t *)(addr))) = (val))
+#endif
+#ifndef   __UNALIGNED_UINT32_READ
+  #define __UNALIGNED_UINT32_READ(addr)          (*((const __packed uint32_t *)(addr)))
+#endif
+#ifndef   __ALIGNED
+  #define __ALIGNED(x)                           __attribute__((aligned(x)))
+#endif
+#ifndef   __RESTRICT
+  #define __RESTRICT                             __restrict
+#endif
+
+/* ###########################  Core Function Access  ########################### */
+/** \ingroup  CMSIS_Core_FunctionInterface
+    \defgroup CMSIS_Core_RegAccFunctions CMSIS Core Register Access Functions
+  @{
+ */
+
+/**
+  \brief   Enable IRQ Interrupts
+  \details Enables IRQ interrupts by clearing the I-bit in the CPSR.
+           Can only be executed in Privileged modes.
+ */
+/* intrinsic void __enable_irq();     */
+
+
+/**
+  \brief   Disable IRQ Interrupts
+  \details Disables IRQ interrupts by setting the I-bit in the CPSR.
+           Can only be executed in Privileged modes.
+ */
+/* intrinsic void __disable_irq();    */
+
+/**
+  \brief   Get Control Register
+  \details Returns the content of the Control Register.
+  \return               Control Register value
+ */
+__STATIC_INLINE uint32_t __get_CONTROL(void)
+{
+  register uint32_t __regControl         __ASM("control");
+  return(__regControl);
+}
+
+
+/**
+  \brief   Set Control Register
+  \details Writes the given value to the Control Register.
+  \param [in]    control  Control Register value to set
+ */
+__STATIC_INLINE void __set_CONTROL(uint32_t control)
+{
+  register uint32_t __regControl         __ASM("control");
+  __regControl = control;
+}
+
+
+/**
+  \brief   Get IPSR Register
+  \details Returns the content of the IPSR Register.
+  \return               IPSR Register value
+ */
+__STATIC_INLINE uint32_t __get_IPSR(void)
+{
+  register uint32_t __regIPSR          __ASM("ipsr");
+  return(__regIPSR);
+}
+
+
+/**
+  \brief   Get APSR Register
+  \details Returns the content of the APSR Register.
+  \return               APSR Register value
+ */
+__STATIC_INLINE uint32_t __get_APSR(void)
+{
+  register uint32_t __regAPSR          __ASM("apsr");
+  return(__regAPSR);
+}
+
+
+/**
+  \brief   Get xPSR Register
+  \details Returns the content of the xPSR Register.
+  \return               xPSR Register value
+ */
+__STATIC_INLINE uint32_t __get_xPSR(void)
+{
+  register uint32_t __regXPSR          __ASM("xpsr");
+  return(__regXPSR);
+}
+
+
+/**
+  \brief   Get Process Stack Pointer
+  \details Returns the current value of the Process Stack Pointer (PSP).
+  \return               PSP Register value
+ */
+__STATIC_INLINE uint32_t __get_PSP(void)
+{
+  register uint32_t __regProcessStackPointer  __ASM("psp");
+  return(__regProcessStackPointer);
+}
+
+
+/**
+  \brief   Set Process Stack Pointer
+  \details Assigns the given value to the Process Stack Pointer (PSP).
+  \param [in]    topOfProcStack  Process Stack Pointer value to set
+ */
+__STATIC_INLINE void __set_PSP(uint32_t topOfProcStack)
+{
+  register uint32_t __regProcessStackPointer  __ASM("psp");
+  __regProcessStackPointer = topOfProcStack;
+}
+
+
+/**
+  \brief   Get Main Stack Pointer
+  \details Returns the current value of the Main Stack Pointer (MSP).
+  \return               MSP Register value
+ */
+__STATIC_INLINE uint32_t __get_MSP(void)
+{
+  register uint32_t __regMainStackPointer     __ASM("msp");
+  return(__regMainStackPointer);
+}
+
+
+/**
+  \brief   Set Main Stack Pointer
+  \details Assigns the given value to the Main Stack Pointer (MSP).
+  \param [in]    topOfMainStack  Main Stack Pointer value to set
+ */
+__STATIC_INLINE void __set_MSP(uint32_t topOfMainStack)
+{
+  register uint32_t __regMainStackPointer     __ASM("msp");
+  __regMainStackPointer = topOfMainStack;
+}
+
+
+/**
+  \brief   Get Priority Mask
+  \details Returns the current state of the priority mask bit from the Priority Mask Register.
+  \return               Priority Mask value
+ */
+__STATIC_INLINE uint32_t __get_PRIMASK(void)
+{
+  register uint32_t __regPriMask         __ASM("primask");
+  return(__regPriMask);
+}
+
+
+/**
+  \brief   Set Priority Mask
+  \details Assigns the given value to the Priority Mask Register.
+  \param [in]    priMask  Priority Mask
+ */
+__STATIC_INLINE void __set_PRIMASK(uint32_t priMask)
+{
+  register uint32_t __regPriMask         __ASM("primask");
+  __regPriMask = (priMask);
+}
+
+
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+     (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     )
+
+/**
+  \brief   Enable FIQ
+  \details Enables FIQ interrupts by clearing the F-bit in the CPSR.
+           Can only be executed in Privileged modes.
+ */
+#define __enable_fault_irq                __enable_fiq
+
+
+/**
+  \brief   Disable FIQ
+  \details Disables FIQ interrupts by setting the F-bit in the CPSR.
+           Can only be executed in Privileged modes.
+ */
+#define __disable_fault_irq               __disable_fiq
+
+
+/**
+  \brief   Get Base Priority
+  \details Returns the current value of the Base Priority register.
+  \return               Base Priority register value
+ */
+__STATIC_INLINE uint32_t  __get_BASEPRI(void)
+{
+  register uint32_t __regBasePri         __ASM("basepri");
+  return(__regBasePri);
+}
+
+
+/**
+  \brief   Set Base Priority
+  \details Assigns the given value to the Base Priority register.
+  \param [in]    basePri  Base Priority value to set
+ */
+__STATIC_INLINE void __set_BASEPRI(uint32_t basePri)
+{
+  register uint32_t __regBasePri         __ASM("basepri");
+  __regBasePri = (basePri & 0xFFU);
+}
+
+
+/**
+  \brief   Set Base Priority with condition
+  \details Assigns the given value to the Base Priority register only if BASEPRI masking is disabled,
+           or the new value increases the BASEPRI priority level.
+  \param [in]    basePri  Base Priority value to set
+ */
+__STATIC_INLINE void __set_BASEPRI_MAX(uint32_t basePri)
+{
+  register uint32_t __regBasePriMax      __ASM("basepri_max");
+  __regBasePriMax = (basePri & 0xFFU);
+}
+
+
+/**
+  \brief   Get Fault Mask
+  \details Returns the current value of the Fault Mask register.
+  \return               Fault Mask register value
+ */
+__STATIC_INLINE uint32_t __get_FAULTMASK(void)
+{
+  register uint32_t __regFaultMask       __ASM("faultmask");
+  return(__regFaultMask);
+}
+
+
+/**
+  \brief   Set Fault Mask
+  \details Assigns the given value to the Fault Mask register.
+  \param [in]    faultMask  Fault Mask value to set
+ */
+__STATIC_INLINE void __set_FAULTMASK(uint32_t faultMask)
+{
+  register uint32_t __regFaultMask       __ASM("faultmask");
+  __regFaultMask = (faultMask & (uint32_t)1U);
+}
+
+#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+           (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     ) */
+
+
+/**
+  \brief   Get FPSCR
+  \details Returns the current value of the Floating Point Status/Control register.
+  \return               Floating Point Status/Control register value
+ */
+__STATIC_INLINE uint32_t __get_FPSCR(void)
+{
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
+  register uint32_t __regfpscr         __ASM("fpscr");
+  return(__regfpscr);
+#else
+   return(0U);
+#endif
+}
+
+
+/**
+  \brief   Set FPSCR
+  \details Assigns the given value to the Floating Point Status/Control register.
+  \param [in]    fpscr  Floating Point Status/Control value to set
+ */
+__STATIC_INLINE void __set_FPSCR(uint32_t fpscr)
+{
+#if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+     (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
+  register uint32_t __regfpscr         __ASM("fpscr");
+  __regfpscr = (fpscr);
+#else
+  (void)fpscr;
+#endif
+}
+
+
+/*@} end of CMSIS_Core_RegAccFunctions */
+
+
+/* ##########################  Core Instruction Access  ######################### */
+/** \defgroup CMSIS_Core_InstructionInterface CMSIS Core Instruction Interface
+  Access to dedicated instructions
+  @{
+*/
+
+/**
+  \brief   No Operation
+  \details No Operation does nothing. This instruction can be used for code alignment purposes.
+ */
+#define __NOP                             __nop
+
+
+/**
+  \brief   Wait For Interrupt
+  \details Wait For Interrupt is a hint instruction that suspends execution until one of a number of events occurs.
+ */
+#define __WFI                             __wfi
+
+
+/**
+  \brief   Wait For Event
+  \details Wait For Event is a hint instruction that permits the processor to enter
+           a low-power state until one of a number of events occurs.
+ */
+#define __WFE                             __wfe
+
+
+/**
+  \brief   Send Event
+  \details Send Event is a hint instruction. It causes an event to be signaled to the CPU.
+ */
+#define __SEV                             __sev
+
+
+/**
+  \brief   Instruction Synchronization Barrier
+  \details Instruction Synchronization Barrier flushes the pipeline in the processor,
+           so that all instructions following the ISB are fetched from cache or memory,
+           after the instruction has been completed.
+ */
+#define __ISB() do {\
+                   __schedule_barrier();\
+                   __isb(0xF);\
+                   __schedule_barrier();\
+                } while (0U)
+
+/**
+  \brief   Data Synchronization Barrier
+  \details Acts as a special kind of Data Memory Barrier.
+           It completes when all explicit memory accesses before this instruction complete.
+ */
+#define __DSB() do {\
+                   __schedule_barrier();\
+                   __dsb(0xF);\
+                   __schedule_barrier();\
+                } while (0U)
+
+/**
+  \brief   Data Memory Barrier
+  \details Ensures the apparent order of the explicit memory operations before
+           and after the instruction, without ensuring their completion.
+ */
+#define __DMB() do {\
+                   __schedule_barrier();\
+                   __dmb(0xF);\
+                   __schedule_barrier();\
+                } while (0U)
+
+                  
+/**
+  \brief   Reverse byte order (32 bit)
+  \details Reverses the byte order in unsigned integer value. For example, 0x12345678 becomes 0x78563412.
+  \param [in]    value  Value to reverse
+  \return               Reversed value
+ */
+#define __REV                             __rev
+
+
+/**
+  \brief   Reverse byte order (16 bit)
+  \details Reverses the byte order within each halfword of a word. For example, 0x12345678 becomes 0x34127856.
+  \param [in]    value  Value to reverse
+  \return               Reversed value
+ */
+#ifndef __NO_EMBEDDED_ASM
+__attribute__((section(".rev16_text"))) __STATIC_INLINE __ASM uint32_t __REV16(uint32_t value)
+{
+  rev16 r0, r0
+  bx lr
+}
+#endif
+
+
+/**
+  \brief   Reverse byte order (16 bit)
+  \details Reverses the byte order in a 16-bit value and returns the signed 16-bit result. For example, 0x0080 becomes 0x8000.
+  \param [in]    value  Value to reverse
+  \return               Reversed value
+ */
+#ifndef __NO_EMBEDDED_ASM
+__attribute__((section(".revsh_text"))) __STATIC_INLINE __ASM int16_t __REVSH(int16_t value)
+{
+  revsh r0, r0
+  bx lr
+}
+#endif
+
+
+/**
+  \brief   Rotate Right in unsigned value (32 bit)
+  \details Rotate Right (immediate) provides the value of the contents of a register rotated by a variable number of bits.
+  \param [in]    op1  Value to rotate
+  \param [in]    op2  Number of Bits to rotate
+  \return               Rotated value
+ */
+#define __ROR                             __ror
+
+
+/**
+  \brief   Breakpoint
+  \details Causes the processor to enter Debug state.
+           Debug tools can use this to investigate system state when the instruction at a particular address is reached.
+  \param [in]    value  is ignored by the processor.
+                 If required, a debugger can use it to store additional information about the breakpoint.
+ */
+#define __BKPT(value)                       __breakpoint(value)
+
+
+/**
+  \brief   Reverse bit order of value
+  \details Reverses the bit order of the given value.
+  \param [in]    value  Value to reverse
+  \return               Reversed value
+ */
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+     (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     )
+  #define __RBIT                          __rbit
+#else
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __RBIT(uint32_t value)
+{
+  uint32_t result;
+  uint32_t s = (4U /*sizeof(v)*/ * 8U) - 1U; /* extra shift needed at end */
+
+  result = value;                      /* r will be reversed bits of v; first get LSB of v */
+  for (value >>= 1U; value != 0U; value >>= 1U)
+  {
+    result <<= 1U;
+    result |= value & 1U;
+    s--;
+  }
+  result <<= s;                        /* shift when v's highest bits are zero */
+  return result;
+}
+#endif
+
+
+/**
+  \brief   Count leading zeros
+  \details Counts the number of leading zeros of a data value.
+  \param [in]  value  Value to count the leading zeros
+  \return             number of leading zeros in value
+ */
+#define __CLZ                             __clz
+
+
+#if ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+     (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     )
+
+/**
+  \brief   LDR Exclusive (8 bit)
+  \details Executes a exclusive LDR instruction for 8 bit value.
+  \param [in]    ptr  Pointer to data
+  \return             value of type uint8_t at (*ptr)
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __LDREXB(ptr)                                                        ((uint8_t ) __ldrex(ptr))
+#else
+  #define __LDREXB(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint8_t ) __ldrex(ptr))  _Pragma("pop")
+#endif
+
+
+/**
+  \brief   LDR Exclusive (16 bit)
+  \details Executes a exclusive LDR instruction for 16 bit values.
+  \param [in]    ptr  Pointer to data
+  \return        value of type uint16_t at (*ptr)
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __LDREXH(ptr)                                                        ((uint16_t) __ldrex(ptr))
+#else
+  #define __LDREXH(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint16_t) __ldrex(ptr))  _Pragma("pop")
+#endif
+
+
+/**
+  \brief   LDR Exclusive (32 bit)
+  \details Executes a exclusive LDR instruction for 32 bit values.
+  \param [in]    ptr  Pointer to data
+  \return        value of type uint32_t at (*ptr)
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __LDREXW(ptr)                                                        ((uint32_t ) __ldrex(ptr))
+#else
+  #define __LDREXW(ptr)          _Pragma("push") _Pragma("diag_suppress 3731") ((uint32_t ) __ldrex(ptr))  _Pragma("pop")
+#endif
+
+
+/**
+  \brief   STR Exclusive (8 bit)
+  \details Executes a exclusive STR instruction for 8 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+  \return          0  Function succeeded
+  \return          1  Function failed
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __STREXB(value, ptr)                                                 __strex(value, ptr)
+#else
+  #define __STREXB(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
+#endif
+
+
+/**
+  \brief   STR Exclusive (16 bit)
+  \details Executes a exclusive STR instruction for 16 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+  \return          0  Function succeeded
+  \return          1  Function failed
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __STREXH(value, ptr)                                                 __strex(value, ptr)
+#else
+  #define __STREXH(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
+#endif
+
+
+/**
+  \brief   STR Exclusive (32 bit)
+  \details Executes a exclusive STR instruction for 32 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+  \return          0  Function succeeded
+  \return          1  Function failed
+ */
+#if defined(__ARMCC_VERSION) && (__ARMCC_VERSION < 5060020)
+  #define __STREXW(value, ptr)                                                 __strex(value, ptr)
+#else
+  #define __STREXW(value, ptr)   _Pragma("push") _Pragma("diag_suppress 3731") __strex(value, ptr)        _Pragma("pop")
+#endif
+
+
+/**
+  \brief   Remove the exclusive lock
+  \details Removes the exclusive lock which is created by LDREX.
+ */
+#define __CLREX                           __clrex
+
+
+/**
+  \brief   Signed Saturate
+  \details Saturates a signed value.
+  \param [in]  value  Value to be saturated
+  \param [in]    sat  Bit position to saturate to (1..32)
+  \return             Saturated value
+ */
+#define __SSAT                            __ssat
+
+
+/**
+  \brief   Unsigned Saturate
+  \details Saturates an unsigned value.
+  \param [in]  value  Value to be saturated
+  \param [in]    sat  Bit position to saturate to (0..31)
+  \return             Saturated value
+ */
+#define __USAT                            __usat
+
+
+/**
+  \brief   Rotate Right with Extend (32 bit)
+  \details Moves each bit of a bitstring right by one bit.
+           The carry input is shifted in at the left end of the bitstring.
+  \param [in]    value  Value to rotate
+  \return               Rotated value
+ */
+#ifndef __NO_EMBEDDED_ASM
+__attribute__((section(".rrx_text"))) __STATIC_INLINE __ASM uint32_t __RRX(uint32_t value)
+{
+  rrx r0, r0
+  bx lr
+}
+#endif
+
+
+/**
+  \brief   LDRT Unprivileged (8 bit)
+  \details Executes a Unprivileged LDRT instruction for 8 bit value.
+  \param [in]    ptr  Pointer to data
+  \return             value of type uint8_t at (*ptr)
+ */
+#define __LDRBT(ptr)                      ((uint8_t )  __ldrt(ptr))
+
+
+/**
+  \brief   LDRT Unprivileged (16 bit)
+  \details Executes a Unprivileged LDRT instruction for 16 bit values.
+  \param [in]    ptr  Pointer to data
+  \return        value of type uint16_t at (*ptr)
+ */
+#define __LDRHT(ptr)                      ((uint16_t)  __ldrt(ptr))
+
+
+/**
+  \brief   LDRT Unprivileged (32 bit)
+  \details Executes a Unprivileged LDRT instruction for 32 bit values.
+  \param [in]    ptr  Pointer to data
+  \return        value of type uint32_t at (*ptr)
+ */
+#define __LDRT(ptr)                       ((uint32_t ) __ldrt(ptr))
+
+
+/**
+  \brief   STRT Unprivileged (8 bit)
+  \details Executes a Unprivileged STRT instruction for 8 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+ */
+#define __STRBT(value, ptr)               __strt(value, ptr)
+
+
+/**
+  \brief   STRT Unprivileged (16 bit)
+  \details Executes a Unprivileged STRT instruction for 16 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+ */
+#define __STRHT(value, ptr)               __strt(value, ptr)
+
+
+/**
+  \brief   STRT Unprivileged (32 bit)
+  \details Executes a Unprivileged STRT instruction for 32 bit values.
+  \param [in]  value  Value to store
+  \param [in]    ptr  Pointer to location
+ */
+#define __STRT(value, ptr)                __strt(value, ptr)
+
+#else  /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+           (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     ) */
+
+/**
+  \brief   Signed Saturate
+  \details Saturates a signed value.
+  \param [in]  value  Value to be saturated
+  \param [in]    sat  Bit position to saturate to (1..32)
+  \return             Saturated value
+ */
+__attribute__((always_inline)) __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
+{
+  if ((sat >= 1U) && (sat <= 32U))
+  {
+    const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
+    const int32_t min = -1 - max ;
+    if (val > max)
+    {
+      return max;
+    }
+    else if (val < min)
+    {
+      return min;
+    }
+  }
+  return val;
+}
+
+/**
+  \brief   Unsigned Saturate
+  \details Saturates an unsigned value.
+  \param [in]  value  Value to be saturated
+  \param [in]    sat  Bit position to saturate to (0..31)
+  \return             Saturated value
+ */
+__attribute__((always_inline)) __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
+{
+  if (sat <= 31U)
+  {
+    const uint32_t max = ((1U << sat) - 1U);
+    if (val > (int32_t)max)
+    {
+      return max;
+    }
+    else if (val < 0)
+    {
+      return 0U;
+    }
+  }
+  return (uint32_t)val;
+}
+
+#endif /* ((defined (__ARM_ARCH_7M__ ) && (__ARM_ARCH_7M__  == 1)) || \
+           (defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     ) */
+
+/*@}*/ /* end of group CMSIS_Core_InstructionInterface */
+
+
+/* ###################  Compiler specific Intrinsics  ########################### */
+/** \defgroup CMSIS_SIMD_intrinsics CMSIS SIMD Intrinsics
+  Access to dedicated SIMD instructions
+  @{
+*/
+
+#if ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     )
+
+#define __SADD8                           __sadd8
+#define __QADD8                           __qadd8
+#define __SHADD8                          __shadd8
+#define __UADD8                           __uadd8
+#define __UQADD8                          __uqadd8
+#define __UHADD8                          __uhadd8
+#define __SSUB8                           __ssub8
+#define __QSUB8                           __qsub8
+#define __SHSUB8                          __shsub8
+#define __USUB8                           __usub8
+#define __UQSUB8                          __uqsub8
+#define __UHSUB8                          __uhsub8
+#define __SADD16                          __sadd16
+#define __QADD16                          __qadd16
+#define __SHADD16                         __shadd16
+#define __UADD16                          __uadd16
+#define __UQADD16                         __uqadd16
+#define __UHADD16                         __uhadd16
+#define __SSUB16                          __ssub16
+#define __QSUB16                          __qsub16
+#define __SHSUB16                         __shsub16
+#define __USUB16                          __usub16
+#define __UQSUB16                         __uqsub16
+#define __UHSUB16                         __uhsub16
+#define __SASX                            __sasx
+#define __QASX                            __qasx
+#define __SHASX                           __shasx
+#define __UASX                            __uasx
+#define __UQASX                           __uqasx
+#define __UHASX                           __uhasx
+#define __SSAX                            __ssax
+#define __QSAX                            __qsax
+#define __SHSAX                           __shsax
+#define __USAX                            __usax
+#define __UQSAX                           __uqsax
+#define __UHSAX                           __uhsax
+#define __USAD8                           __usad8
+#define __USADA8                          __usada8
+#define __SSAT16                          __ssat16
+#define __USAT16                          __usat16
+#define __UXTB16                          __uxtb16
+#define __UXTAB16                         __uxtab16
+#define __SXTB16                          __sxtb16
+#define __SXTAB16                         __sxtab16
+#define __SMUAD                           __smuad
+#define __SMUADX                          __smuadx
+#define __SMLAD                           __smlad
+#define __SMLADX                          __smladx
+#define __SMLALD                          __smlald
+#define __SMLALDX                         __smlaldx
+#define __SMUSD                           __smusd
+#define __SMUSDX                          __smusdx
+#define __SMLSD                           __smlsd
+#define __SMLSDX                          __smlsdx
+#define __SMLSLD                          __smlsld
+#define __SMLSLDX                         __smlsldx
+#define __SEL                             __sel
+#define __QADD                            __qadd
+#define __QSUB                            __qsub
+
+#define __PKHBT(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0x0000FFFFUL) |  \
+                                           ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL)  )
+
+#define __PKHTB(ARG1,ARG2,ARG3)          ( ((((uint32_t)(ARG1))          ) & 0xFFFF0000UL) |  \
+                                           ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL)  )
+
+#define __SMMLA(ARG1,ARG2,ARG3)          ( (int32_t)((((int64_t)(ARG1) * (ARG2)) + \
+                                                      ((int64_t)(ARG3) << 32U)     ) >> 32U))
+
+#endif /* ((defined (__ARM_ARCH_7EM__) && (__ARM_ARCH_7EM__ == 1))     ) */
+/*@} end of group CMSIS_SIMD_intrinsics */
+
+
+#endif /* __CMSIS_ARMCC_H */

Файловите разлики са ограничени, защото са твърде много
+ 1869 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_armclang.h


+ 266 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_compiler.h

@@ -0,0 +1,266 @@
+/**************************************************************************//**
+ * @file     cmsis_compiler.h
+ * @brief    CMSIS compiler generic header file
+ * @version  V5.0.4
+ * @date     10. January 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef __CMSIS_COMPILER_H
+#define __CMSIS_COMPILER_H
+
+#include <stdint.h>
+
+/*
+ * Arm Compiler 4/5
+ */
+#if   defined ( __CC_ARM )
+  #include "cmsis_armcc.h"
+
+
+/*
+ * Arm Compiler 6 (armclang)
+ */
+#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)
+  #include "cmsis_armclang.h"
+
+
+/*
+ * GNU Compiler
+ */
+#elif defined ( __GNUC__ )
+  #include "cmsis_gcc.h"
+
+
+/*
+ * IAR Compiler
+ */
+#elif defined ( __ICCARM__ )
+  #include <cmsis_iccarm.h>
+
+
+/*
+ * TI Arm Compiler
+ */
+#elif defined ( __TI_ARM__ )
+  #include <cmsis_ccs.h>
+
+  #ifndef   __ASM
+    #define __ASM                                  __asm
+  #endif
+  #ifndef   __INLINE
+    #define __INLINE                               inline
+  #endif
+  #ifndef   __STATIC_INLINE
+    #define __STATIC_INLINE                        static inline
+  #endif
+  #ifndef   __STATIC_FORCEINLINE
+    #define __STATIC_FORCEINLINE                   __STATIC_INLINE
+  #endif
+  #ifndef   __NO_RETURN
+    #define __NO_RETURN                            __attribute__((noreturn))
+  #endif
+  #ifndef   __USED
+    #define __USED                                 __attribute__((used))
+  #endif
+  #ifndef   __WEAK
+    #define __WEAK                                 __attribute__((weak))
+  #endif
+  #ifndef   __PACKED
+    #define __PACKED                               __attribute__((packed))
+  #endif
+  #ifndef   __PACKED_STRUCT
+    #define __PACKED_STRUCT                        struct __attribute__((packed))
+  #endif
+  #ifndef   __PACKED_UNION
+    #define __PACKED_UNION                         union __attribute__((packed))
+  #endif
+  #ifndef   __UNALIGNED_UINT32        /* deprecated */
+    struct __attribute__((packed)) T_UINT32 { uint32_t v; };
+    #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT16_WRITE
+    __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
+    #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void*)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT16_READ
+    __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
+    #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT32_WRITE
+    __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
+    #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT32_READ
+    __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
+    #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __ALIGNED
+    #define __ALIGNED(x)                           __attribute__((aligned(x)))
+  #endif
+  #ifndef   __RESTRICT
+    #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored.
+    #define __RESTRICT
+  #endif
+
+
+/*
+ * TASKING Compiler
+ */
+#elif defined ( __TASKING__ )
+  /*
+   * The CMSIS functions have been implemented as intrinsics in the compiler.
+   * Please use "carm -?i" to get an up to date list of all intrinsics,
+   * Including the CMSIS ones.
+   */
+
+  #ifndef   __ASM
+    #define __ASM                                  __asm
+  #endif
+  #ifndef   __INLINE
+    #define __INLINE                               inline
+  #endif
+  #ifndef   __STATIC_INLINE
+    #define __STATIC_INLINE                        static inline
+  #endif
+  #ifndef   __STATIC_FORCEINLINE
+    #define __STATIC_FORCEINLINE                   __STATIC_INLINE
+  #endif
+  #ifndef   __NO_RETURN
+    #define __NO_RETURN                            __attribute__((noreturn))
+  #endif
+  #ifndef   __USED
+    #define __USED                                 __attribute__((used))
+  #endif
+  #ifndef   __WEAK
+    #define __WEAK                                 __attribute__((weak))
+  #endif
+  #ifndef   __PACKED
+    #define __PACKED                               __packed__
+  #endif
+  #ifndef   __PACKED_STRUCT
+    #define __PACKED_STRUCT                        struct __packed__
+  #endif
+  #ifndef   __PACKED_UNION
+    #define __PACKED_UNION                         union __packed__
+  #endif
+  #ifndef   __UNALIGNED_UINT32        /* deprecated */
+    struct __packed__ T_UINT32 { uint32_t v; };
+    #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT16_WRITE
+    __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
+    #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT16_READ
+    __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
+    #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT32_WRITE
+    __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
+    #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT32_READ
+    __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
+    #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __ALIGNED
+    #define __ALIGNED(x)              __align(x)
+  #endif
+  #ifndef   __RESTRICT
+    #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored.
+    #define __RESTRICT
+  #endif
+
+
+/*
+ * COSMIC Compiler
+ */
+#elif defined ( __CSMC__ )
+   #include <cmsis_csm.h>
+
+ #ifndef   __ASM
+    #define __ASM                                  _asm
+  #endif
+  #ifndef   __INLINE
+    #define __INLINE                               inline
+  #endif
+  #ifndef   __STATIC_INLINE
+    #define __STATIC_INLINE                        static inline
+  #endif
+  #ifndef   __STATIC_FORCEINLINE
+    #define __STATIC_FORCEINLINE                   __STATIC_INLINE
+  #endif
+  #ifndef   __NO_RETURN
+    // NO RETURN is automatically detected hence no warning here
+    #define __NO_RETURN
+  #endif
+  #ifndef   __USED
+    #warning No compiler specific solution for __USED. __USED is ignored.
+    #define __USED
+  #endif
+  #ifndef   __WEAK
+    #define __WEAK                                 __weak
+  #endif
+  #ifndef   __PACKED
+    #define __PACKED                               @packed
+  #endif
+  #ifndef   __PACKED_STRUCT
+    #define __PACKED_STRUCT                        @packed struct
+  #endif
+  #ifndef   __PACKED_UNION
+    #define __PACKED_UNION                         @packed union
+  #endif
+  #ifndef   __UNALIGNED_UINT32        /* deprecated */
+    @packed struct T_UINT32 { uint32_t v; };
+    #define __UNALIGNED_UINT32(x)                  (((struct T_UINT32 *)(x))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT16_WRITE
+    __PACKED_STRUCT T_UINT16_WRITE { uint16_t v; };
+    #define __UNALIGNED_UINT16_WRITE(addr, val)    (void)((((struct T_UINT16_WRITE *)(void *)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT16_READ
+    __PACKED_STRUCT T_UINT16_READ { uint16_t v; };
+    #define __UNALIGNED_UINT16_READ(addr)          (((const struct T_UINT16_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __UNALIGNED_UINT32_WRITE
+    __PACKED_STRUCT T_UINT32_WRITE { uint32_t v; };
+    #define __UNALIGNED_UINT32_WRITE(addr, val)    (void)((((struct T_UINT32_WRITE *)(void *)(addr))->v) = (val))
+  #endif
+  #ifndef   __UNALIGNED_UINT32_READ
+    __PACKED_STRUCT T_UINT32_READ { uint32_t v; };
+    #define __UNALIGNED_UINT32_READ(addr)          (((const struct T_UINT32_READ *)(const void *)(addr))->v)
+  #endif
+  #ifndef   __ALIGNED
+    #warning No compiler specific solution for __ALIGNED. __ALIGNED is ignored.
+    #define __ALIGNED(x)
+  #endif
+  #ifndef   __RESTRICT
+    #warning No compiler specific solution for __RESTRICT. __RESTRICT is ignored.
+    #define __RESTRICT
+  #endif
+
+
+#else
+  #error Unknown compiler.
+#endif
+
+
+#endif /* __CMSIS_COMPILER_H */
+

Файловите разлики са ограничени, защото са твърде много
+ 2085 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_gcc.h


+ 935 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_iccarm.h

@@ -0,0 +1,935 @@
+/**************************************************************************//**
+ * @file     cmsis_iccarm.h
+ * @brief    CMSIS compiler ICCARM (IAR Compiler for Arm) header file
+ * @version  V5.0.7
+ * @date     19. June 2018
+ ******************************************************************************/
+
+//------------------------------------------------------------------------------
+//
+// Copyright (c) 2017-2018 IAR Systems
+//
+// Licensed under the Apache License, Version 2.0 (the "License")
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+//------------------------------------------------------------------------------
+
+
+#ifndef __CMSIS_ICCARM_H__
+#define __CMSIS_ICCARM_H__
+
+#ifndef __ICCARM__
+  #error This file should only be compiled by ICCARM
+#endif
+
+#pragma system_include
+
+#define __IAR_FT _Pragma("inline=forced") __intrinsic
+
+#if (__VER__ >= 8000000)
+  #define __ICCARM_V8 1
+#else
+  #define __ICCARM_V8 0
+#endif
+
+#ifndef __ALIGNED
+  #if __ICCARM_V8
+    #define __ALIGNED(x) __attribute__((aligned(x)))
+  #elif (__VER__ >= 7080000)
+    /* Needs IAR language extensions */
+    #define __ALIGNED(x) __attribute__((aligned(x)))
+  #else
+    #warning No compiler specific solution for __ALIGNED.__ALIGNED is ignored.
+    #define __ALIGNED(x)
+  #endif
+#endif
+
+
+/* Define compiler macros for CPU architecture, used in CMSIS 5.
+ */
+#if __ARM_ARCH_6M__ || __ARM_ARCH_7M__ || __ARM_ARCH_7EM__ || __ARM_ARCH_8M_BASE__ || __ARM_ARCH_8M_MAIN__
+/* Macros already defined */
+#else
+  #if defined(__ARM8M_MAINLINE__) || defined(__ARM8EM_MAINLINE__)
+    #define __ARM_ARCH_8M_MAIN__ 1
+  #elif defined(__ARM8M_BASELINE__)
+    #define __ARM_ARCH_8M_BASE__ 1
+  #elif defined(__ARM_ARCH_PROFILE) && __ARM_ARCH_PROFILE == 'M'
+    #if __ARM_ARCH == 6
+      #define __ARM_ARCH_6M__ 1
+    #elif __ARM_ARCH == 7
+      #if __ARM_FEATURE_DSP
+        #define __ARM_ARCH_7EM__ 1
+      #else
+        #define __ARM_ARCH_7M__ 1
+      #endif
+    #endif /* __ARM_ARCH */
+  #endif /* __ARM_ARCH_PROFILE == 'M' */
+#endif
+
+/* Alternativ core deduction for older ICCARM's */
+#if !defined(__ARM_ARCH_6M__) && !defined(__ARM_ARCH_7M__) && !defined(__ARM_ARCH_7EM__) && \
+    !defined(__ARM_ARCH_8M_BASE__) && !defined(__ARM_ARCH_8M_MAIN__)
+  #if defined(__ARM6M__) && (__CORE__ == __ARM6M__)
+    #define __ARM_ARCH_6M__ 1
+  #elif defined(__ARM7M__) && (__CORE__ == __ARM7M__)
+    #define __ARM_ARCH_7M__ 1
+  #elif defined(__ARM7EM__) && (__CORE__ == __ARM7EM__)
+    #define __ARM_ARCH_7EM__  1
+  #elif defined(__ARM8M_BASELINE__) && (__CORE == __ARM8M_BASELINE__)
+    #define __ARM_ARCH_8M_BASE__ 1
+  #elif defined(__ARM8M_MAINLINE__) && (__CORE == __ARM8M_MAINLINE__)
+    #define __ARM_ARCH_8M_MAIN__ 1
+  #elif defined(__ARM8EM_MAINLINE__) && (__CORE == __ARM8EM_MAINLINE__)
+    #define __ARM_ARCH_8M_MAIN__ 1
+  #else
+    #error "Unknown target."
+  #endif
+#endif
+
+
+
+#if defined(__ARM_ARCH_6M__) && __ARM_ARCH_6M__==1
+  #define __IAR_M0_FAMILY  1
+#elif defined(__ARM_ARCH_8M_BASE__) && __ARM_ARCH_8M_BASE__==1
+  #define __IAR_M0_FAMILY  1
+#else
+  #define __IAR_M0_FAMILY  0
+#endif
+
+
+#ifndef __ASM
+  #define __ASM __asm
+#endif
+
+#ifndef __INLINE
+  #define __INLINE inline
+#endif
+
+#ifndef   __NO_RETURN
+  #if __ICCARM_V8
+    #define __NO_RETURN __attribute__((__noreturn__))
+  #else
+    #define __NO_RETURN _Pragma("object_attribute=__noreturn")
+  #endif
+#endif
+
+#ifndef   __PACKED
+  #if __ICCARM_V8
+    #define __PACKED __attribute__((packed, aligned(1)))
+  #else
+    /* Needs IAR language extensions */
+    #define __PACKED __packed
+  #endif
+#endif
+
+#ifndef   __PACKED_STRUCT
+  #if __ICCARM_V8
+    #define __PACKED_STRUCT struct __attribute__((packed, aligned(1)))
+  #else
+    /* Needs IAR language extensions */
+    #define __PACKED_STRUCT __packed struct
+  #endif
+#endif
+
+#ifndef   __PACKED_UNION
+  #if __ICCARM_V8
+    #define __PACKED_UNION union __attribute__((packed, aligned(1)))
+  #else
+    /* Needs IAR language extensions */
+    #define __PACKED_UNION __packed union
+  #endif
+#endif
+
+#ifndef   __RESTRICT
+  #define __RESTRICT            __restrict
+#endif
+
+#ifndef   __STATIC_INLINE
+  #define __STATIC_INLINE       static inline
+#endif
+
+#ifndef   __FORCEINLINE
+  #define __FORCEINLINE         _Pragma("inline=forced")
+#endif
+
+#ifndef   __STATIC_FORCEINLINE
+  #define __STATIC_FORCEINLINE  __FORCEINLINE __STATIC_INLINE
+#endif
+
+#ifndef __UNALIGNED_UINT16_READ
+#pragma language=save
+#pragma language=extended
+__IAR_FT uint16_t __iar_uint16_read(void const *ptr)
+{
+  return *(__packed uint16_t*)(ptr);
+}
+#pragma language=restore
+#define __UNALIGNED_UINT16_READ(PTR) __iar_uint16_read(PTR)
+#endif
+
+
+#ifndef __UNALIGNED_UINT16_WRITE
+#pragma language=save
+#pragma language=extended
+__IAR_FT void __iar_uint16_write(void const *ptr, uint16_t val)
+{
+  *(__packed uint16_t*)(ptr) = val;;
+}
+#pragma language=restore
+#define __UNALIGNED_UINT16_WRITE(PTR,VAL) __iar_uint16_write(PTR,VAL)
+#endif
+
+#ifndef __UNALIGNED_UINT32_READ
+#pragma language=save
+#pragma language=extended
+__IAR_FT uint32_t __iar_uint32_read(void const *ptr)
+{
+  return *(__packed uint32_t*)(ptr);
+}
+#pragma language=restore
+#define __UNALIGNED_UINT32_READ(PTR) __iar_uint32_read(PTR)
+#endif
+
+#ifndef __UNALIGNED_UINT32_WRITE
+#pragma language=save
+#pragma language=extended
+__IAR_FT void __iar_uint32_write(void const *ptr, uint32_t val)
+{
+  *(__packed uint32_t*)(ptr) = val;;
+}
+#pragma language=restore
+#define __UNALIGNED_UINT32_WRITE(PTR,VAL) __iar_uint32_write(PTR,VAL)
+#endif
+
+#ifndef __UNALIGNED_UINT32   /* deprecated */
+#pragma language=save
+#pragma language=extended
+__packed struct  __iar_u32 { uint32_t v; };
+#pragma language=restore
+#define __UNALIGNED_UINT32(PTR) (((struct __iar_u32 *)(PTR))->v)
+#endif
+
+#ifndef   __USED
+  #if __ICCARM_V8
+    #define __USED __attribute__((used))
+  #else
+    #define __USED _Pragma("__root")
+  #endif
+#endif
+
+#ifndef   __WEAK
+  #if __ICCARM_V8
+    #define __WEAK __attribute__((weak))
+  #else
+    #define __WEAK _Pragma("__weak")
+  #endif
+#endif
+
+
+#ifndef __ICCARM_INTRINSICS_VERSION__
+  #define __ICCARM_INTRINSICS_VERSION__  0
+#endif
+
+#if __ICCARM_INTRINSICS_VERSION__ == 2
+
+  #if defined(__CLZ)
+    #undef __CLZ
+  #endif
+  #if defined(__REVSH)
+    #undef __REVSH
+  #endif
+  #if defined(__RBIT)
+    #undef __RBIT
+  #endif
+  #if defined(__SSAT)
+    #undef __SSAT
+  #endif
+  #if defined(__USAT)
+    #undef __USAT
+  #endif
+
+  #include "iccarm_builtin.h"
+
+  #define __disable_fault_irq __iar_builtin_disable_fiq
+  #define __disable_irq       __iar_builtin_disable_interrupt
+  #define __enable_fault_irq  __iar_builtin_enable_fiq
+  #define __enable_irq        __iar_builtin_enable_interrupt
+  #define __arm_rsr           __iar_builtin_rsr
+  #define __arm_wsr           __iar_builtin_wsr
+
+
+  #define __get_APSR()                (__arm_rsr("APSR"))
+  #define __get_BASEPRI()             (__arm_rsr("BASEPRI"))
+  #define __get_CONTROL()             (__arm_rsr("CONTROL"))
+  #define __get_FAULTMASK()           (__arm_rsr("FAULTMASK"))
+
+  #if ((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+       (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     )
+    #define __get_FPSCR()             (__arm_rsr("FPSCR"))
+    #define __set_FPSCR(VALUE)        (__arm_wsr("FPSCR", (VALUE)))
+  #else
+    #define __get_FPSCR()             ( 0 )
+    #define __set_FPSCR(VALUE)        ((void)VALUE)
+  #endif
+
+  #define __get_IPSR()                (__arm_rsr("IPSR"))
+  #define __get_MSP()                 (__arm_rsr("MSP"))
+  #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+       (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
+    // without main extensions, the non-secure MSPLIM is RAZ/WI
+    #define __get_MSPLIM()            (0U)
+  #else
+    #define __get_MSPLIM()            (__arm_rsr("MSPLIM"))
+  #endif
+  #define __get_PRIMASK()             (__arm_rsr("PRIMASK"))
+  #define __get_PSP()                 (__arm_rsr("PSP"))
+
+  #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+       (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
+    // without main extensions, the non-secure PSPLIM is RAZ/WI
+    #define __get_PSPLIM()            (0U)
+  #else
+    #define __get_PSPLIM()            (__arm_rsr("PSPLIM"))
+  #endif
+
+  #define __get_xPSR()                (__arm_rsr("xPSR"))
+
+  #define __set_BASEPRI(VALUE)        (__arm_wsr("BASEPRI", (VALUE)))
+  #define __set_BASEPRI_MAX(VALUE)    (__arm_wsr("BASEPRI_MAX", (VALUE)))
+  #define __set_CONTROL(VALUE)        (__arm_wsr("CONTROL", (VALUE)))
+  #define __set_FAULTMASK(VALUE)      (__arm_wsr("FAULTMASK", (VALUE)))
+  #define __set_MSP(VALUE)            (__arm_wsr("MSP", (VALUE)))
+
+  #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+       (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
+    // without main extensions, the non-secure MSPLIM is RAZ/WI
+    #define __set_MSPLIM(VALUE)       ((void)(VALUE))
+  #else
+    #define __set_MSPLIM(VALUE)       (__arm_wsr("MSPLIM", (VALUE)))
+  #endif
+  #define __set_PRIMASK(VALUE)        (__arm_wsr("PRIMASK", (VALUE)))
+  #define __set_PSP(VALUE)            (__arm_wsr("PSP", (VALUE)))
+  #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+       (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
+    // without main extensions, the non-secure PSPLIM is RAZ/WI
+    #define __set_PSPLIM(VALUE)       ((void)(VALUE))
+  #else
+    #define __set_PSPLIM(VALUE)       (__arm_wsr("PSPLIM", (VALUE)))
+  #endif
+
+  #define __TZ_get_CONTROL_NS()       (__arm_rsr("CONTROL_NS"))
+  #define __TZ_set_CONTROL_NS(VALUE)  (__arm_wsr("CONTROL_NS", (VALUE)))
+  #define __TZ_get_PSP_NS()           (__arm_rsr("PSP_NS"))
+  #define __TZ_set_PSP_NS(VALUE)      (__arm_wsr("PSP_NS", (VALUE)))
+  #define __TZ_get_MSP_NS()           (__arm_rsr("MSP_NS"))
+  #define __TZ_set_MSP_NS(VALUE)      (__arm_wsr("MSP_NS", (VALUE)))
+  #define __TZ_get_SP_NS()            (__arm_rsr("SP_NS"))
+  #define __TZ_set_SP_NS(VALUE)       (__arm_wsr("SP_NS", (VALUE)))
+  #define __TZ_get_PRIMASK_NS()       (__arm_rsr("PRIMASK_NS"))
+  #define __TZ_set_PRIMASK_NS(VALUE)  (__arm_wsr("PRIMASK_NS", (VALUE)))
+  #define __TZ_get_BASEPRI_NS()       (__arm_rsr("BASEPRI_NS"))
+  #define __TZ_set_BASEPRI_NS(VALUE)  (__arm_wsr("BASEPRI_NS", (VALUE)))
+  #define __TZ_get_FAULTMASK_NS()     (__arm_rsr("FAULTMASK_NS"))
+  #define __TZ_set_FAULTMASK_NS(VALUE)(__arm_wsr("FAULTMASK_NS", (VALUE)))
+
+  #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+       (!defined (__ARM_FEATURE_CMSE) || (__ARM_FEATURE_CMSE < 3)))
+    // without main extensions, the non-secure PSPLIM is RAZ/WI
+    #define __TZ_get_PSPLIM_NS()      (0U)
+    #define __TZ_set_PSPLIM_NS(VALUE) ((void)(VALUE))
+  #else
+    #define __TZ_get_PSPLIM_NS()      (__arm_rsr("PSPLIM_NS"))
+    #define __TZ_set_PSPLIM_NS(VALUE) (__arm_wsr("PSPLIM_NS", (VALUE)))
+  #endif
+
+  #define __TZ_get_MSPLIM_NS()        (__arm_rsr("MSPLIM_NS"))
+  #define __TZ_set_MSPLIM_NS(VALUE)   (__arm_wsr("MSPLIM_NS", (VALUE)))
+
+  #define __NOP     __iar_builtin_no_operation
+
+  #define __CLZ     __iar_builtin_CLZ
+  #define __CLREX   __iar_builtin_CLREX
+
+  #define __DMB     __iar_builtin_DMB
+  #define __DSB     __iar_builtin_DSB
+  #define __ISB     __iar_builtin_ISB
+
+  #define __LDREXB  __iar_builtin_LDREXB
+  #define __LDREXH  __iar_builtin_LDREXH
+  #define __LDREXW  __iar_builtin_LDREX
+
+  #define __RBIT    __iar_builtin_RBIT
+  #define __REV     __iar_builtin_REV
+  #define __REV16   __iar_builtin_REV16
+
+  __IAR_FT int16_t __REVSH(int16_t val)
+  {
+    return (int16_t) __iar_builtin_REVSH(val);
+  }
+
+  #define __ROR     __iar_builtin_ROR
+  #define __RRX     __iar_builtin_RRX
+
+  #define __SEV     __iar_builtin_SEV
+
+  #if !__IAR_M0_FAMILY
+    #define __SSAT    __iar_builtin_SSAT
+  #endif
+
+  #define __STREXB  __iar_builtin_STREXB
+  #define __STREXH  __iar_builtin_STREXH
+  #define __STREXW  __iar_builtin_STREX
+
+  #if !__IAR_M0_FAMILY
+    #define __USAT    __iar_builtin_USAT
+  #endif
+
+  #define __WFE     __iar_builtin_WFE
+  #define __WFI     __iar_builtin_WFI
+
+  #if __ARM_MEDIA__
+    #define __SADD8   __iar_builtin_SADD8
+    #define __QADD8   __iar_builtin_QADD8
+    #define __SHADD8  __iar_builtin_SHADD8
+    #define __UADD8   __iar_builtin_UADD8
+    #define __UQADD8  __iar_builtin_UQADD8
+    #define __UHADD8  __iar_builtin_UHADD8
+    #define __SSUB8   __iar_builtin_SSUB8
+    #define __QSUB8   __iar_builtin_QSUB8
+    #define __SHSUB8  __iar_builtin_SHSUB8
+    #define __USUB8   __iar_builtin_USUB8
+    #define __UQSUB8  __iar_builtin_UQSUB8
+    #define __UHSUB8  __iar_builtin_UHSUB8
+    #define __SADD16  __iar_builtin_SADD16
+    #define __QADD16  __iar_builtin_QADD16
+    #define __SHADD16 __iar_builtin_SHADD16
+    #define __UADD16  __iar_builtin_UADD16
+    #define __UQADD16 __iar_builtin_UQADD16
+    #define __UHADD16 __iar_builtin_UHADD16
+    #define __SSUB16  __iar_builtin_SSUB16
+    #define __QSUB16  __iar_builtin_QSUB16
+    #define __SHSUB16 __iar_builtin_SHSUB16
+    #define __USUB16  __iar_builtin_USUB16
+    #define __UQSUB16 __iar_builtin_UQSUB16
+    #define __UHSUB16 __iar_builtin_UHSUB16
+    #define __SASX    __iar_builtin_SASX
+    #define __QASX    __iar_builtin_QASX
+    #define __SHASX   __iar_builtin_SHASX
+    #define __UASX    __iar_builtin_UASX
+    #define __UQASX   __iar_builtin_UQASX
+    #define __UHASX   __iar_builtin_UHASX
+    #define __SSAX    __iar_builtin_SSAX
+    #define __QSAX    __iar_builtin_QSAX
+    #define __SHSAX   __iar_builtin_SHSAX
+    #define __USAX    __iar_builtin_USAX
+    #define __UQSAX   __iar_builtin_UQSAX
+    #define __UHSAX   __iar_builtin_UHSAX
+    #define __USAD8   __iar_builtin_USAD8
+    #define __USADA8  __iar_builtin_USADA8
+    #define __SSAT16  __iar_builtin_SSAT16
+    #define __USAT16  __iar_builtin_USAT16
+    #define __UXTB16  __iar_builtin_UXTB16
+    #define __UXTAB16 __iar_builtin_UXTAB16
+    #define __SXTB16  __iar_builtin_SXTB16
+    #define __SXTAB16 __iar_builtin_SXTAB16
+    #define __SMUAD   __iar_builtin_SMUAD
+    #define __SMUADX  __iar_builtin_SMUADX
+    #define __SMMLA   __iar_builtin_SMMLA
+    #define __SMLAD   __iar_builtin_SMLAD
+    #define __SMLADX  __iar_builtin_SMLADX
+    #define __SMLALD  __iar_builtin_SMLALD
+    #define __SMLALDX __iar_builtin_SMLALDX
+    #define __SMUSD   __iar_builtin_SMUSD
+    #define __SMUSDX  __iar_builtin_SMUSDX
+    #define __SMLSD   __iar_builtin_SMLSD
+    #define __SMLSDX  __iar_builtin_SMLSDX
+    #define __SMLSLD  __iar_builtin_SMLSLD
+    #define __SMLSLDX __iar_builtin_SMLSLDX
+    #define __SEL     __iar_builtin_SEL
+    #define __QADD    __iar_builtin_QADD
+    #define __QSUB    __iar_builtin_QSUB
+    #define __PKHBT   __iar_builtin_PKHBT
+    #define __PKHTB   __iar_builtin_PKHTB
+  #endif
+
+#else /* __ICCARM_INTRINSICS_VERSION__ == 2 */
+
+  #if __IAR_M0_FAMILY
+   /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
+    #define __CLZ  __cmsis_iar_clz_not_active
+    #define __SSAT __cmsis_iar_ssat_not_active
+    #define __USAT __cmsis_iar_usat_not_active
+    #define __RBIT __cmsis_iar_rbit_not_active
+    #define __get_APSR  __cmsis_iar_get_APSR_not_active
+  #endif
+
+
+  #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+         (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     ))
+    #define __get_FPSCR __cmsis_iar_get_FPSR_not_active
+    #define __set_FPSCR __cmsis_iar_set_FPSR_not_active
+  #endif
+
+  #ifdef __INTRINSICS_INCLUDED
+  #error intrinsics.h is already included previously!
+  #endif
+
+  #include <intrinsics.h>
+
+  #if __IAR_M0_FAMILY
+   /* Avoid clash between intrinsics.h and arm_math.h when compiling for Cortex-M0. */
+    #undef __CLZ
+    #undef __SSAT
+    #undef __USAT
+    #undef __RBIT
+    #undef __get_APSR
+
+    __STATIC_INLINE uint8_t __CLZ(uint32_t data)
+    {
+      if (data == 0U) { return 32U; }
+
+      uint32_t count = 0U;
+      uint32_t mask = 0x80000000U;
+
+      while ((data & mask) == 0U)
+      {
+        count += 1U;
+        mask = mask >> 1U;
+      }
+      return count;
+    }
+
+    __STATIC_INLINE uint32_t __RBIT(uint32_t v)
+    {
+      uint8_t sc = 31U;
+      uint32_t r = v;
+      for (v >>= 1U; v; v >>= 1U)
+      {
+        r <<= 1U;
+        r |= v & 1U;
+        sc--;
+      }
+      return (r << sc);
+    }
+
+    __STATIC_INLINE  uint32_t __get_APSR(void)
+    {
+      uint32_t res;
+      __asm("MRS      %0,APSR" : "=r" (res));
+      return res;
+    }
+
+  #endif
+
+  #if (!((defined (__FPU_PRESENT) && (__FPU_PRESENT == 1U)) && \
+         (defined (__FPU_USED   ) && (__FPU_USED    == 1U))     ))
+    #undef __get_FPSCR
+    #undef __set_FPSCR
+    #define __get_FPSCR()       (0)
+    #define __set_FPSCR(VALUE)  ((void)VALUE)
+  #endif
+
+  #pragma diag_suppress=Pe940
+  #pragma diag_suppress=Pe177
+
+  #define __enable_irq    __enable_interrupt
+  #define __disable_irq   __disable_interrupt
+  #define __NOP           __no_operation
+
+  #define __get_xPSR      __get_PSR
+
+  #if (!defined(__ARM_ARCH_6M__) || __ARM_ARCH_6M__==0)
+
+    __IAR_FT uint32_t __LDREXW(uint32_t volatile *ptr)
+    {
+      return __LDREX((unsigned long *)ptr);
+    }
+
+    __IAR_FT uint32_t __STREXW(uint32_t value, uint32_t volatile *ptr)
+    {
+      return __STREX(value, (unsigned long *)ptr);
+    }
+  #endif
+
+
+  /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
+  #if (__CORTEX_M >= 0x03)
+
+    __IAR_FT uint32_t __RRX(uint32_t value)
+    {
+      uint32_t result;
+      __ASM("RRX      %0, %1" : "=r"(result) : "r" (value) : "cc");
+      return(result);
+    }
+
+    __IAR_FT void __set_BASEPRI_MAX(uint32_t value)
+    {
+      __asm volatile("MSR      BASEPRI_MAX,%0"::"r" (value));
+    }
+
+
+    #define __enable_fault_irq  __enable_fiq
+    #define __disable_fault_irq __disable_fiq
+
+
+  #endif /* (__CORTEX_M >= 0x03) */
+
+  __IAR_FT uint32_t __ROR(uint32_t op1, uint32_t op2)
+  {
+    return (op1 >> op2) | (op1 << ((sizeof(op1)*8)-op2));
+  }
+
+  #if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
+       (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
+
+   __IAR_FT uint32_t __get_MSPLIM(void)
+    {
+      uint32_t res;
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure MSPLIM is RAZ/WI
+      res = 0U;
+    #else
+      __asm volatile("MRS      %0,MSPLIM" : "=r" (res));
+    #endif
+      return res;
+    }
+
+    __IAR_FT void   __set_MSPLIM(uint32_t value)
+    {
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure MSPLIM is RAZ/WI
+      (void)value;
+    #else
+      __asm volatile("MSR      MSPLIM,%0" :: "r" (value));
+    #endif
+    }
+
+    __IAR_FT uint32_t __get_PSPLIM(void)
+    {
+      uint32_t res;
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure PSPLIM is RAZ/WI
+      res = 0U;
+    #else
+      __asm volatile("MRS      %0,PSPLIM" : "=r" (res));
+    #endif
+      return res;
+    }
+
+    __IAR_FT void   __set_PSPLIM(uint32_t value)
+    {
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure PSPLIM is RAZ/WI
+      (void)value;
+    #else
+      __asm volatile("MSR      PSPLIM,%0" :: "r" (value));
+    #endif
+    }
+
+    __IAR_FT uint32_t __TZ_get_CONTROL_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,CONTROL_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_CONTROL_NS(uint32_t value)
+    {
+      __asm volatile("MSR      CONTROL_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_PSP_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,PSP_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_PSP_NS(uint32_t value)
+    {
+      __asm volatile("MSR      PSP_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_MSP_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,MSP_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_MSP_NS(uint32_t value)
+    {
+      __asm volatile("MSR      MSP_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_SP_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,SP_NS" : "=r" (res));
+      return res;
+    }
+    __IAR_FT void   __TZ_set_SP_NS(uint32_t value)
+    {
+      __asm volatile("MSR      SP_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_PRIMASK_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,PRIMASK_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_PRIMASK_NS(uint32_t value)
+    {
+      __asm volatile("MSR      PRIMASK_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_BASEPRI_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,BASEPRI_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_BASEPRI_NS(uint32_t value)
+    {
+      __asm volatile("MSR      BASEPRI_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_FAULTMASK_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,FAULTMASK_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_FAULTMASK_NS(uint32_t value)
+    {
+      __asm volatile("MSR      FAULTMASK_NS,%0" :: "r" (value));
+    }
+
+    __IAR_FT uint32_t   __TZ_get_PSPLIM_NS(void)
+    {
+      uint32_t res;
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure PSPLIM is RAZ/WI
+      res = 0U;
+    #else
+      __asm volatile("MRS      %0,PSPLIM_NS" : "=r" (res));
+    #endif
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_PSPLIM_NS(uint32_t value)
+    {
+    #if (!(defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) && \
+         (!defined (__ARM_FEATURE_CMSE  ) || (__ARM_FEATURE_CMSE   < 3)))
+      // without main extensions, the non-secure PSPLIM is RAZ/WI
+      (void)value;
+    #else
+      __asm volatile("MSR      PSPLIM_NS,%0" :: "r" (value));
+    #endif
+    }
+
+    __IAR_FT uint32_t   __TZ_get_MSPLIM_NS(void)
+    {
+      uint32_t res;
+      __asm volatile("MRS      %0,MSPLIM_NS" : "=r" (res));
+      return res;
+    }
+
+    __IAR_FT void   __TZ_set_MSPLIM_NS(uint32_t value)
+    {
+      __asm volatile("MSR      MSPLIM_NS,%0" :: "r" (value));
+    }
+
+  #endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
+
+#endif   /* __ICCARM_INTRINSICS_VERSION__ == 2 */
+
+#define __BKPT(value)    __asm volatile ("BKPT     %0" : : "i"(value))
+
+#if __IAR_M0_FAMILY
+  __STATIC_INLINE int32_t __SSAT(int32_t val, uint32_t sat)
+  {
+    if ((sat >= 1U) && (sat <= 32U))
+    {
+      const int32_t max = (int32_t)((1U << (sat - 1U)) - 1U);
+      const int32_t min = -1 - max ;
+      if (val > max)
+      {
+        return max;
+      }
+      else if (val < min)
+      {
+        return min;
+      }
+    }
+    return val;
+  }
+
+  __STATIC_INLINE uint32_t __USAT(int32_t val, uint32_t sat)
+  {
+    if (sat <= 31U)
+    {
+      const uint32_t max = ((1U << sat) - 1U);
+      if (val > (int32_t)max)
+      {
+        return max;
+      }
+      else if (val < 0)
+      {
+        return 0U;
+      }
+    }
+    return (uint32_t)val;
+  }
+#endif
+
+#if (__CORTEX_M >= 0x03)   /* __CORTEX_M is defined in core_cm0.h, core_cm3.h and core_cm4.h. */
+
+  __IAR_FT uint8_t __LDRBT(volatile uint8_t *addr)
+  {
+    uint32_t res;
+    __ASM("LDRBT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
+    return ((uint8_t)res);
+  }
+
+  __IAR_FT uint16_t __LDRHT(volatile uint16_t *addr)
+  {
+    uint32_t res;
+    __ASM("LDRHT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
+    return ((uint16_t)res);
+  }
+
+  __IAR_FT uint32_t __LDRT(volatile uint32_t *addr)
+  {
+    uint32_t res;
+    __ASM("LDRT %0, [%1]" : "=r" (res) : "r" (addr) : "memory");
+    return res;
+  }
+
+  __IAR_FT void __STRBT(uint8_t value, volatile uint8_t *addr)
+  {
+    __ASM("STRBT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
+  }
+
+  __IAR_FT void __STRHT(uint16_t value, volatile uint16_t *addr)
+  {
+    __ASM("STRHT %1, [%0]" : : "r" (addr), "r" ((uint32_t)value) : "memory");
+  }
+
+  __IAR_FT void __STRT(uint32_t value, volatile uint32_t *addr)
+  {
+    __ASM("STRT %1, [%0]" : : "r" (addr), "r" (value) : "memory");
+  }
+
+#endif /* (__CORTEX_M >= 0x03) */
+
+#if ((defined (__ARM_ARCH_8M_MAIN__ ) && (__ARM_ARCH_8M_MAIN__ == 1)) || \
+     (defined (__ARM_ARCH_8M_BASE__ ) && (__ARM_ARCH_8M_BASE__ == 1))    )
+
+
+  __IAR_FT uint8_t __LDAB(volatile uint8_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDAB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return ((uint8_t)res);
+  }
+
+  __IAR_FT uint16_t __LDAH(volatile uint16_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDAH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return ((uint16_t)res);
+  }
+
+  __IAR_FT uint32_t __LDA(volatile uint32_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDA %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return res;
+  }
+
+  __IAR_FT void __STLB(uint8_t value, volatile uint8_t *ptr)
+  {
+    __ASM volatile ("STLB %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
+  }
+
+  __IAR_FT void __STLH(uint16_t value, volatile uint16_t *ptr)
+  {
+    __ASM volatile ("STLH %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
+  }
+
+  __IAR_FT void __STL(uint32_t value, volatile uint32_t *ptr)
+  {
+    __ASM volatile ("STL %1, [%0]" :: "r" (ptr), "r" (value) : "memory");
+  }
+
+  __IAR_FT uint8_t __LDAEXB(volatile uint8_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDAEXB %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return ((uint8_t)res);
+  }
+
+  __IAR_FT uint16_t __LDAEXH(volatile uint16_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDAEXH %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return ((uint16_t)res);
+  }
+
+  __IAR_FT uint32_t __LDAEX(volatile uint32_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("LDAEX %0, [%1]" : "=r" (res) : "r" (ptr) : "memory");
+    return res;
+  }
+
+  __IAR_FT uint32_t __STLEXB(uint8_t value, volatile uint8_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("STLEXB %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
+    return res;
+  }
+
+  __IAR_FT uint32_t __STLEXH(uint16_t value, volatile uint16_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("STLEXH %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
+    return res;
+  }
+
+  __IAR_FT uint32_t __STLEX(uint32_t value, volatile uint32_t *ptr)
+  {
+    uint32_t res;
+    __ASM volatile ("STLEX %0, %2, [%1]" : "=r" (res) : "r" (ptr), "r" (value) : "memory");
+    return res;
+  }
+
+#endif /* __ARM_ARCH_8M_MAIN__ or __ARM_ARCH_8M_BASE__ */
+
+#undef __IAR_FT
+#undef __IAR_M0_FAMILY
+#undef __ICCARM_V8
+
+#pragma diag_default=Pe940
+#pragma diag_default=Pe177
+
+#endif /* __CMSIS_ICCARM_H__ */

+ 39 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/cmsis_version.h

@@ -0,0 +1,39 @@
+/**************************************************************************//**
+ * @file     cmsis_version.h
+ * @brief    CMSIS Core(M) Version definitions
+ * @version  V5.0.2
+ * @date     19. April 2017
+ ******************************************************************************/
+/*
+ * Copyright (c) 2009-2017 ARM Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header   /* treat file as system include file */
+#endif
+
+#ifndef __CMSIS_VERSION_H
+#define __CMSIS_VERSION_H
+
+/*  CMSIS Version definitions */
+#define __CM_CMSIS_VERSION_MAIN  ( 5U)                                      /*!< [31:16] CMSIS Core(M) main version */
+#define __CM_CMSIS_VERSION_SUB   ( 1U)                                      /*!< [15:0]  CMSIS Core(M) sub version */
+#define __CM_CMSIS_VERSION       ((__CM_CMSIS_VERSION_MAIN << 16U) | \
+                                   __CM_CMSIS_VERSION_SUB           )       /*!< CMSIS Core(M) version number */
+#endif

Файловите разлики са ограничени, защото са твърде много
+ 1918 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_armv8mbl.h


Файловите разлики са ограничени, защото са твърде много
+ 2927 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_armv8mml.h


+ 949 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm0.h

@@ -0,0 +1,949 @@
+/**************************************************************************//**
+ * @file     core_cm0.h
+ * @brief    CMSIS Cortex-M0 Core Peripheral Access Layer Header File
+ * @version  V5.0.5
+ * @date     28. May 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header   /* treat file as system include file */
+#endif
+
+#ifndef __CORE_CM0_H_GENERIC
+#define __CORE_CM0_H_GENERIC
+
+#include <stdint.h>
+
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+/**
+  \page CMSIS_MISRA_Exceptions  MISRA-C:2004 Compliance Exceptions
+  CMSIS violates the following MISRA-C:2004 rules:
+
+   \li Required Rule 8.5, object/function definition in header file.<br>
+     Function definitions in header files are used to allow 'inlining'.
+
+   \li Required Rule 18.4, declaration of union type or object of union type: '{...}'.<br>
+     Unions are used for effective representation of core registers.
+
+   \li Advisory Rule 19.7, Function-like macro defined.<br>
+     Function-like macros are used to allow more efficient code.
+ */
+
+
+/*******************************************************************************
+ *                 CMSIS definitions
+ ******************************************************************************/
+/**
+  \ingroup Cortex_M0
+  @{
+ */
+
+#include "cmsis_version.h"
+ 
+/*  CMSIS CM0 definitions */
+#define __CM0_CMSIS_VERSION_MAIN  (__CM_CMSIS_VERSION_MAIN)              /*!< \deprecated [31:16] CMSIS HAL main version */
+#define __CM0_CMSIS_VERSION_SUB   (__CM_CMSIS_VERSION_SUB)               /*!< \deprecated [15:0]  CMSIS HAL sub version */
+#define __CM0_CMSIS_VERSION       ((__CM0_CMSIS_VERSION_MAIN << 16U) | \
+                                    __CM0_CMSIS_VERSION_SUB           )  /*!< \deprecated CMSIS HAL version number */
+
+#define __CORTEX_M                (0U)                                   /*!< Cortex-M Core */
+
+/** __FPU_USED indicates whether an FPU is used or not.
+    This core does not support an FPU at all
+*/
+#define __FPU_USED       0U
+
+#if defined ( __CC_ARM )
+  #if defined __TARGET_FPU_VFP
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)
+  #if defined __ARM_PCS_VFP
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __GNUC__ )
+  #if defined (__VFP_FP__) && !defined(__SOFTFP__)
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __ICCARM__ )
+  #if defined __ARMVFP__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __TI_ARM__ )
+  #if defined __TI_VFP_SUPPORT__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __TASKING__ )
+  #if defined __FPU_VFP__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __CSMC__ )
+  #if ( __CSMC__ & 0x400U)
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#endif
+
+#include "cmsis_compiler.h"               /* CMSIS compiler specific defines */
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __CORE_CM0_H_GENERIC */
+
+#ifndef __CMSIS_GENERIC
+
+#ifndef __CORE_CM0_H_DEPENDANT
+#define __CORE_CM0_H_DEPENDANT
+
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+/* check device defines and use defaults */
+#if defined __CHECK_DEVICE_DEFINES
+  #ifndef __CM0_REV
+    #define __CM0_REV               0x0000U
+    #warning "__CM0_REV not defined in device header file; using default!"
+  #endif
+
+  #ifndef __NVIC_PRIO_BITS
+    #define __NVIC_PRIO_BITS          2U
+    #warning "__NVIC_PRIO_BITS not defined in device header file; using default!"
+  #endif
+
+  #ifndef __Vendor_SysTickConfig
+    #define __Vendor_SysTickConfig    0U
+    #warning "__Vendor_SysTickConfig not defined in device header file; using default!"
+  #endif
+#endif
+
+/* IO definitions (access restrictions to peripheral registers) */
+/**
+    \defgroup CMSIS_glob_defs CMSIS Global Defines
+
+    <strong>IO Type Qualifiers</strong> are used
+    \li to specify the access to peripheral variables.
+    \li for automatic generation of peripheral register debug information.
+*/
+#ifdef __cplusplus
+  #define   __I     volatile             /*!< Defines 'read only' permissions */
+#else
+  #define   __I     volatile const       /*!< Defines 'read only' permissions */
+#endif
+#define     __O     volatile             /*!< Defines 'write only' permissions */
+#define     __IO    volatile             /*!< Defines 'read / write' permissions */
+
+/* following defines should be used for structure members */
+#define     __IM     volatile const      /*! Defines 'read only' structure member permissions */
+#define     __OM     volatile            /*! Defines 'write only' structure member permissions */
+#define     __IOM    volatile            /*! Defines 'read / write' structure member permissions */
+
+/*@} end of group Cortex_M0 */
+
+
+
+/*******************************************************************************
+ *                 Register Abstraction
+  Core Register contain:
+  - Core Register
+  - Core NVIC Register
+  - Core SCB Register
+  - Core SysTick Register
+ ******************************************************************************/
+/**
+  \defgroup CMSIS_core_register Defines and Type Definitions
+  \brief Type definitions and defines for Cortex-M processor based devices.
+*/
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_CORE  Status and Control Registers
+  \brief      Core Register type definitions.
+  @{
+ */
+
+/**
+  \brief  Union type to access the Application Program Status Register (APSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t _reserved0:28;              /*!< bit:  0..27  Reserved */
+    uint32_t V:1;                        /*!< bit:     28  Overflow condition code flag */
+    uint32_t C:1;                        /*!< bit:     29  Carry condition code flag */
+    uint32_t Z:1;                        /*!< bit:     30  Zero condition code flag */
+    uint32_t N:1;                        /*!< bit:     31  Negative condition code flag */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} APSR_Type;
+
+/* APSR Register Definitions */
+#define APSR_N_Pos                         31U                                            /*!< APSR: N Position */
+#define APSR_N_Msk                         (1UL << APSR_N_Pos)                            /*!< APSR: N Mask */
+
+#define APSR_Z_Pos                         30U                                            /*!< APSR: Z Position */
+#define APSR_Z_Msk                         (1UL << APSR_Z_Pos)                            /*!< APSR: Z Mask */
+
+#define APSR_C_Pos                         29U                                            /*!< APSR: C Position */
+#define APSR_C_Msk                         (1UL << APSR_C_Pos)                            /*!< APSR: C Mask */
+
+#define APSR_V_Pos                         28U                                            /*!< APSR: V Position */
+#define APSR_V_Msk                         (1UL << APSR_V_Pos)                            /*!< APSR: V Mask */
+
+
+/**
+  \brief  Union type to access the Interrupt Program Status Register (IPSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t ISR:9;                      /*!< bit:  0.. 8  Exception number */
+    uint32_t _reserved0:23;              /*!< bit:  9..31  Reserved */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} IPSR_Type;
+
+/* IPSR Register Definitions */
+#define IPSR_ISR_Pos                        0U                                            /*!< IPSR: ISR Position */
+#define IPSR_ISR_Msk                       (0x1FFUL /*<< IPSR_ISR_Pos*/)                  /*!< IPSR: ISR Mask */
+
+
+/**
+  \brief  Union type to access the Special-Purpose Program Status Registers (xPSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t ISR:9;                      /*!< bit:  0.. 8  Exception number */
+    uint32_t _reserved0:15;              /*!< bit:  9..23  Reserved */
+    uint32_t T:1;                        /*!< bit:     24  Thumb bit        (read 0) */
+    uint32_t _reserved1:3;               /*!< bit: 25..27  Reserved */
+    uint32_t V:1;                        /*!< bit:     28  Overflow condition code flag */
+    uint32_t C:1;                        /*!< bit:     29  Carry condition code flag */
+    uint32_t Z:1;                        /*!< bit:     30  Zero condition code flag */
+    uint32_t N:1;                        /*!< bit:     31  Negative condition code flag */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} xPSR_Type;
+
+/* xPSR Register Definitions */
+#define xPSR_N_Pos                         31U                                            /*!< xPSR: N Position */
+#define xPSR_N_Msk                         (1UL << xPSR_N_Pos)                            /*!< xPSR: N Mask */
+
+#define xPSR_Z_Pos                         30U                                            /*!< xPSR: Z Position */
+#define xPSR_Z_Msk                         (1UL << xPSR_Z_Pos)                            /*!< xPSR: Z Mask */
+
+#define xPSR_C_Pos                         29U                                            /*!< xPSR: C Position */
+#define xPSR_C_Msk                         (1UL << xPSR_C_Pos)                            /*!< xPSR: C Mask */
+
+#define xPSR_V_Pos                         28U                                            /*!< xPSR: V Position */
+#define xPSR_V_Msk                         (1UL << xPSR_V_Pos)                            /*!< xPSR: V Mask */
+
+#define xPSR_T_Pos                         24U                                            /*!< xPSR: T Position */
+#define xPSR_T_Msk                         (1UL << xPSR_T_Pos)                            /*!< xPSR: T Mask */
+
+#define xPSR_ISR_Pos                        0U                                            /*!< xPSR: ISR Position */
+#define xPSR_ISR_Msk                       (0x1FFUL /*<< xPSR_ISR_Pos*/)                  /*!< xPSR: ISR Mask */
+
+
+/**
+  \brief  Union type to access the Control Registers (CONTROL).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t _reserved0:1;               /*!< bit:      0  Reserved */
+    uint32_t SPSEL:1;                    /*!< bit:      1  Stack to be used */
+    uint32_t _reserved1:30;              /*!< bit:  2..31  Reserved */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} CONTROL_Type;
+
+/* CONTROL Register Definitions */
+#define CONTROL_SPSEL_Pos                   1U                                            /*!< CONTROL: SPSEL Position */
+#define CONTROL_SPSEL_Msk                  (1UL << CONTROL_SPSEL_Pos)                     /*!< CONTROL: SPSEL Mask */
+
+/*@} end of group CMSIS_CORE */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_NVIC  Nested Vectored Interrupt Controller (NVIC)
+  \brief      Type definitions for the NVIC Registers
+  @{
+ */
+
+/**
+  \brief  Structure type to access the Nested Vectored Interrupt Controller (NVIC).
+ */
+typedef struct
+{
+  __IOM uint32_t ISER[1U];               /*!< Offset: 0x000 (R/W)  Interrupt Set Enable Register */
+        uint32_t RESERVED0[31U];
+  __IOM uint32_t ICER[1U];               /*!< Offset: 0x080 (R/W)  Interrupt Clear Enable Register */
+        uint32_t RSERVED1[31U];
+  __IOM uint32_t ISPR[1U];               /*!< Offset: 0x100 (R/W)  Interrupt Set Pending Register */
+        uint32_t RESERVED2[31U];
+  __IOM uint32_t ICPR[1U];               /*!< Offset: 0x180 (R/W)  Interrupt Clear Pending Register */
+        uint32_t RESERVED3[31U];
+        uint32_t RESERVED4[64U];
+  __IOM uint32_t IP[8U];                 /*!< Offset: 0x300 (R/W)  Interrupt Priority Register */
+}  NVIC_Type;
+
+/*@} end of group CMSIS_NVIC */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_SCB     System Control Block (SCB)
+  \brief    Type definitions for the System Control Block Registers
+  @{
+ */
+
+/**
+  \brief  Structure type to access the System Control Block (SCB).
+ */
+typedef struct
+{
+  __IM  uint32_t CPUID;                  /*!< Offset: 0x000 (R/ )  CPUID Base Register */
+  __IOM uint32_t ICSR;                   /*!< Offset: 0x004 (R/W)  Interrupt Control and State Register */
+        uint32_t RESERVED0;
+  __IOM uint32_t AIRCR;                  /*!< Offset: 0x00C (R/W)  Application Interrupt and Reset Control Register */
+  __IOM uint32_t SCR;                    /*!< Offset: 0x010 (R/W)  System Control Register */
+  __IOM uint32_t CCR;                    /*!< Offset: 0x014 (R/W)  Configuration Control Register */
+        uint32_t RESERVED1;
+  __IOM uint32_t SHP[2U];                /*!< Offset: 0x01C (R/W)  System Handlers Priority Registers. [0] is RESERVED */
+  __IOM uint32_t SHCSR;                  /*!< Offset: 0x024 (R/W)  System Handler Control and State Register */
+} SCB_Type;
+
+/* SCB CPUID Register Definitions */
+#define SCB_CPUID_IMPLEMENTER_Pos          24U                                            /*!< SCB CPUID: IMPLEMENTER Position */
+#define SCB_CPUID_IMPLEMENTER_Msk          (0xFFUL << SCB_CPUID_IMPLEMENTER_Pos)          /*!< SCB CPUID: IMPLEMENTER Mask */
+
+#define SCB_CPUID_VARIANT_Pos              20U                                            /*!< SCB CPUID: VARIANT Position */
+#define SCB_CPUID_VARIANT_Msk              (0xFUL << SCB_CPUID_VARIANT_Pos)               /*!< SCB CPUID: VARIANT Mask */
+
+#define SCB_CPUID_ARCHITECTURE_Pos         16U                                            /*!< SCB CPUID: ARCHITECTURE Position */
+#define SCB_CPUID_ARCHITECTURE_Msk         (0xFUL << SCB_CPUID_ARCHITECTURE_Pos)          /*!< SCB CPUID: ARCHITECTURE Mask */
+
+#define SCB_CPUID_PARTNO_Pos                4U                                            /*!< SCB CPUID: PARTNO Position */
+#define SCB_CPUID_PARTNO_Msk               (0xFFFUL << SCB_CPUID_PARTNO_Pos)              /*!< SCB CPUID: PARTNO Mask */
+
+#define SCB_CPUID_REVISION_Pos              0U                                            /*!< SCB CPUID: REVISION Position */
+#define SCB_CPUID_REVISION_Msk             (0xFUL /*<< SCB_CPUID_REVISION_Pos*/)          /*!< SCB CPUID: REVISION Mask */
+
+/* SCB Interrupt Control State Register Definitions */
+#define SCB_ICSR_NMIPENDSET_Pos            31U                                            /*!< SCB ICSR: NMIPENDSET Position */
+#define SCB_ICSR_NMIPENDSET_Msk            (1UL << SCB_ICSR_NMIPENDSET_Pos)               /*!< SCB ICSR: NMIPENDSET Mask */
+
+#define SCB_ICSR_PENDSVSET_Pos             28U                                            /*!< SCB ICSR: PENDSVSET Position */
+#define SCB_ICSR_PENDSVSET_Msk             (1UL << SCB_ICSR_PENDSVSET_Pos)                /*!< SCB ICSR: PENDSVSET Mask */
+
+#define SCB_ICSR_PENDSVCLR_Pos             27U                                            /*!< SCB ICSR: PENDSVCLR Position */
+#define SCB_ICSR_PENDSVCLR_Msk             (1UL << SCB_ICSR_PENDSVCLR_Pos)                /*!< SCB ICSR: PENDSVCLR Mask */
+
+#define SCB_ICSR_PENDSTSET_Pos             26U                                            /*!< SCB ICSR: PENDSTSET Position */
+#define SCB_ICSR_PENDSTSET_Msk             (1UL << SCB_ICSR_PENDSTSET_Pos)                /*!< SCB ICSR: PENDSTSET Mask */
+
+#define SCB_ICSR_PENDSTCLR_Pos             25U                                            /*!< SCB ICSR: PENDSTCLR Position */
+#define SCB_ICSR_PENDSTCLR_Msk             (1UL << SCB_ICSR_PENDSTCLR_Pos)                /*!< SCB ICSR: PENDSTCLR Mask */
+
+#define SCB_ICSR_ISRPREEMPT_Pos            23U                                            /*!< SCB ICSR: ISRPREEMPT Position */
+#define SCB_ICSR_ISRPREEMPT_Msk            (1UL << SCB_ICSR_ISRPREEMPT_Pos)               /*!< SCB ICSR: ISRPREEMPT Mask */
+
+#define SCB_ICSR_ISRPENDING_Pos            22U                                            /*!< SCB ICSR: ISRPENDING Position */
+#define SCB_ICSR_ISRPENDING_Msk            (1UL << SCB_ICSR_ISRPENDING_Pos)               /*!< SCB ICSR: ISRPENDING Mask */
+
+#define SCB_ICSR_VECTPENDING_Pos           12U                                            /*!< SCB ICSR: VECTPENDING Position */
+#define SCB_ICSR_VECTPENDING_Msk           (0x1FFUL << SCB_ICSR_VECTPENDING_Pos)          /*!< SCB ICSR: VECTPENDING Mask */
+
+#define SCB_ICSR_VECTACTIVE_Pos             0U                                            /*!< SCB ICSR: VECTACTIVE Position */
+#define SCB_ICSR_VECTACTIVE_Msk            (0x1FFUL /*<< SCB_ICSR_VECTACTIVE_Pos*/)       /*!< SCB ICSR: VECTACTIVE Mask */
+
+/* SCB Application Interrupt and Reset Control Register Definitions */
+#define SCB_AIRCR_VECTKEY_Pos              16U                                            /*!< SCB AIRCR: VECTKEY Position */
+#define SCB_AIRCR_VECTKEY_Msk              (0xFFFFUL << SCB_AIRCR_VECTKEY_Pos)            /*!< SCB AIRCR: VECTKEY Mask */
+
+#define SCB_AIRCR_VECTKEYSTAT_Pos          16U                                            /*!< SCB AIRCR: VECTKEYSTAT Position */
+#define SCB_AIRCR_VECTKEYSTAT_Msk          (0xFFFFUL << SCB_AIRCR_VECTKEYSTAT_Pos)        /*!< SCB AIRCR: VECTKEYSTAT Mask */
+
+#define SCB_AIRCR_ENDIANESS_Pos            15U                                            /*!< SCB AIRCR: ENDIANESS Position */
+#define SCB_AIRCR_ENDIANESS_Msk            (1UL << SCB_AIRCR_ENDIANESS_Pos)               /*!< SCB AIRCR: ENDIANESS Mask */
+
+#define SCB_AIRCR_SYSRESETREQ_Pos           2U                                            /*!< SCB AIRCR: SYSRESETREQ Position */
+#define SCB_AIRCR_SYSRESETREQ_Msk          (1UL << SCB_AIRCR_SYSRESETREQ_Pos)             /*!< SCB AIRCR: SYSRESETREQ Mask */
+
+#define SCB_AIRCR_VECTCLRACTIVE_Pos         1U                                            /*!< SCB AIRCR: VECTCLRACTIVE Position */
+#define SCB_AIRCR_VECTCLRACTIVE_Msk        (1UL << SCB_AIRCR_VECTCLRACTIVE_Pos)           /*!< SCB AIRCR: VECTCLRACTIVE Mask */
+
+/* SCB System Control Register Definitions */
+#define SCB_SCR_SEVONPEND_Pos               4U                                            /*!< SCB SCR: SEVONPEND Position */
+#define SCB_SCR_SEVONPEND_Msk              (1UL << SCB_SCR_SEVONPEND_Pos)                 /*!< SCB SCR: SEVONPEND Mask */
+
+#define SCB_SCR_SLEEPDEEP_Pos               2U                                            /*!< SCB SCR: SLEEPDEEP Position */
+#define SCB_SCR_SLEEPDEEP_Msk              (1UL << SCB_SCR_SLEEPDEEP_Pos)                 /*!< SCB SCR: SLEEPDEEP Mask */
+
+#define SCB_SCR_SLEEPONEXIT_Pos             1U                                            /*!< SCB SCR: SLEEPONEXIT Position */
+#define SCB_SCR_SLEEPONEXIT_Msk            (1UL << SCB_SCR_SLEEPONEXIT_Pos)               /*!< SCB SCR: SLEEPONEXIT Mask */
+
+/* SCB Configuration Control Register Definitions */
+#define SCB_CCR_STKALIGN_Pos                9U                                            /*!< SCB CCR: STKALIGN Position */
+#define SCB_CCR_STKALIGN_Msk               (1UL << SCB_CCR_STKALIGN_Pos)                  /*!< SCB CCR: STKALIGN Mask */
+
+#define SCB_CCR_UNALIGN_TRP_Pos             3U                                            /*!< SCB CCR: UNALIGN_TRP Position */
+#define SCB_CCR_UNALIGN_TRP_Msk            (1UL << SCB_CCR_UNALIGN_TRP_Pos)               /*!< SCB CCR: UNALIGN_TRP Mask */
+
+/* SCB System Handler Control and State Register Definitions */
+#define SCB_SHCSR_SVCALLPENDED_Pos         15U                                            /*!< SCB SHCSR: SVCALLPENDED Position */
+#define SCB_SHCSR_SVCALLPENDED_Msk         (1UL << SCB_SHCSR_SVCALLPENDED_Pos)            /*!< SCB SHCSR: SVCALLPENDED Mask */
+
+/*@} end of group CMSIS_SCB */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_SysTick     System Tick Timer (SysTick)
+  \brief    Type definitions for the System Timer Registers.
+  @{
+ */
+
+/**
+  \brief  Structure type to access the System Timer (SysTick).
+ */
+typedef struct
+{
+  __IOM uint32_t CTRL;                   /*!< Offset: 0x000 (R/W)  SysTick Control and Status Register */
+  __IOM uint32_t LOAD;                   /*!< Offset: 0x004 (R/W)  SysTick Reload Value Register */
+  __IOM uint32_t VAL;                    /*!< Offset: 0x008 (R/W)  SysTick Current Value Register */
+  __IM  uint32_t CALIB;                  /*!< Offset: 0x00C (R/ )  SysTick Calibration Register */
+} SysTick_Type;
+
+/* SysTick Control / Status Register Definitions */
+#define SysTick_CTRL_COUNTFLAG_Pos         16U                                            /*!< SysTick CTRL: COUNTFLAG Position */
+#define SysTick_CTRL_COUNTFLAG_Msk         (1UL << SysTick_CTRL_COUNTFLAG_Pos)            /*!< SysTick CTRL: COUNTFLAG Mask */
+
+#define SysTick_CTRL_CLKSOURCE_Pos          2U                                            /*!< SysTick CTRL: CLKSOURCE Position */
+#define SysTick_CTRL_CLKSOURCE_Msk         (1UL << SysTick_CTRL_CLKSOURCE_Pos)            /*!< SysTick CTRL: CLKSOURCE Mask */
+
+#define SysTick_CTRL_TICKINT_Pos            1U                                            /*!< SysTick CTRL: TICKINT Position */
+#define SysTick_CTRL_TICKINT_Msk           (1UL << SysTick_CTRL_TICKINT_Pos)              /*!< SysTick CTRL: TICKINT Mask */
+
+#define SysTick_CTRL_ENABLE_Pos             0U                                            /*!< SysTick CTRL: ENABLE Position */
+#define SysTick_CTRL_ENABLE_Msk            (1UL /*<< SysTick_CTRL_ENABLE_Pos*/)           /*!< SysTick CTRL: ENABLE Mask */
+
+/* SysTick Reload Register Definitions */
+#define SysTick_LOAD_RELOAD_Pos             0U                                            /*!< SysTick LOAD: RELOAD Position */
+#define SysTick_LOAD_RELOAD_Msk            (0xFFFFFFUL /*<< SysTick_LOAD_RELOAD_Pos*/)    /*!< SysTick LOAD: RELOAD Mask */
+
+/* SysTick Current Register Definitions */
+#define SysTick_VAL_CURRENT_Pos             0U                                            /*!< SysTick VAL: CURRENT Position */
+#define SysTick_VAL_CURRENT_Msk            (0xFFFFFFUL /*<< SysTick_VAL_CURRENT_Pos*/)    /*!< SysTick VAL: CURRENT Mask */
+
+/* SysTick Calibration Register Definitions */
+#define SysTick_CALIB_NOREF_Pos            31U                                            /*!< SysTick CALIB: NOREF Position */
+#define SysTick_CALIB_NOREF_Msk            (1UL << SysTick_CALIB_NOREF_Pos)               /*!< SysTick CALIB: NOREF Mask */
+
+#define SysTick_CALIB_SKEW_Pos             30U                                            /*!< SysTick CALIB: SKEW Position */
+#define SysTick_CALIB_SKEW_Msk             (1UL << SysTick_CALIB_SKEW_Pos)                /*!< SysTick CALIB: SKEW Mask */
+
+#define SysTick_CALIB_TENMS_Pos             0U                                            /*!< SysTick CALIB: TENMS Position */
+#define SysTick_CALIB_TENMS_Msk            (0xFFFFFFUL /*<< SysTick_CALIB_TENMS_Pos*/)    /*!< SysTick CALIB: TENMS Mask */
+
+/*@} end of group CMSIS_SysTick */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_CoreDebug       Core Debug Registers (CoreDebug)
+  \brief    Cortex-M0 Core Debug Registers (DCB registers, SHCSR, and DFSR) are only accessible over DAP and not via processor.
+            Therefore they are not covered by the Cortex-M0 header file.
+  @{
+ */
+/*@} end of group CMSIS_CoreDebug */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_core_bitfield     Core register bit field macros
+  \brief      Macros for use with bit field definitions (xxx_Pos, xxx_Msk).
+  @{
+ */
+
+/**
+  \brief   Mask and shift a bit field value for use in a register bit range.
+  \param[in] field  Name of the register bit field.
+  \param[in] value  Value of the bit field. This parameter is interpreted as an uint32_t type.
+  \return           Masked and shifted value.
+*/
+#define _VAL2FLD(field, value)    (((uint32_t)(value) << field ## _Pos) & field ## _Msk)
+
+/**
+  \brief     Mask and shift a register value to extract a bit filed value.
+  \param[in] field  Name of the register bit field.
+  \param[in] value  Value of register. This parameter is interpreted as an uint32_t type.
+  \return           Masked and shifted bit field value.
+*/
+#define _FLD2VAL(field, value)    (((uint32_t)(value) & field ## _Msk) >> field ## _Pos)
+
+/*@} end of group CMSIS_core_bitfield */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_core_base     Core Definitions
+  \brief      Definitions for base addresses, unions, and structures.
+  @{
+ */
+
+/* Memory mapping of Core Hardware */
+#define SCS_BASE            (0xE000E000UL)                            /*!< System Control Space Base Address */
+#define SysTick_BASE        (SCS_BASE +  0x0010UL)                    /*!< SysTick Base Address */
+#define NVIC_BASE           (SCS_BASE +  0x0100UL)                    /*!< NVIC Base Address */
+#define SCB_BASE            (SCS_BASE +  0x0D00UL)                    /*!< System Control Block Base Address */
+
+#define SCB                 ((SCB_Type       *)     SCB_BASE      )   /*!< SCB configuration struct */
+#define SysTick             ((SysTick_Type   *)     SysTick_BASE  )   /*!< SysTick configuration struct */
+#define NVIC                ((NVIC_Type      *)     NVIC_BASE     )   /*!< NVIC configuration struct */
+
+
+/*@} */
+
+
+
+/*******************************************************************************
+ *                Hardware Abstraction Layer
+  Core Function Interface contains:
+  - Core NVIC Functions
+  - Core SysTick Functions
+  - Core Register Access Functions
+ ******************************************************************************/
+/**
+  \defgroup CMSIS_Core_FunctionInterface Functions and Instructions Reference
+*/
+
+
+
+/* ##########################   NVIC functions  #################################### */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_NVICFunctions NVIC Functions
+  \brief    Functions that manage interrupts and exceptions via the NVIC.
+  @{
+ */
+
+#ifdef CMSIS_NVIC_VIRTUAL
+  #ifndef CMSIS_NVIC_VIRTUAL_HEADER_FILE
+    #define CMSIS_NVIC_VIRTUAL_HEADER_FILE "cmsis_nvic_virtual.h"
+  #endif
+  #include CMSIS_NVIC_VIRTUAL_HEADER_FILE
+#else
+  #define NVIC_SetPriorityGrouping    __NVIC_SetPriorityGrouping
+  #define NVIC_GetPriorityGrouping    __NVIC_GetPriorityGrouping
+  #define NVIC_EnableIRQ              __NVIC_EnableIRQ
+  #define NVIC_GetEnableIRQ           __NVIC_GetEnableIRQ
+  #define NVIC_DisableIRQ             __NVIC_DisableIRQ
+  #define NVIC_GetPendingIRQ          __NVIC_GetPendingIRQ
+  #define NVIC_SetPendingIRQ          __NVIC_SetPendingIRQ
+  #define NVIC_ClearPendingIRQ        __NVIC_ClearPendingIRQ
+/*#define NVIC_GetActive              __NVIC_GetActive             not available for Cortex-M0 */
+  #define NVIC_SetPriority            __NVIC_SetPriority
+  #define NVIC_GetPriority            __NVIC_GetPriority
+  #define NVIC_SystemReset            __NVIC_SystemReset
+#endif /* CMSIS_NVIC_VIRTUAL */
+
+#ifdef CMSIS_VECTAB_VIRTUAL
+  #ifndef CMSIS_VECTAB_VIRTUAL_HEADER_FILE
+    #define CMSIS_VECTAB_VIRTUAL_HEADER_FILE "cmsis_vectab_virtual.h"
+  #endif
+  #include CMSIS_VECTAB_VIRTUAL_HEADER_FILE
+#else
+  #define NVIC_SetVector              __NVIC_SetVector
+  #define NVIC_GetVector              __NVIC_GetVector
+#endif  /* (CMSIS_VECTAB_VIRTUAL) */
+
+#define NVIC_USER_IRQ_OFFSET          16
+
+
+/* The following EXC_RETURN values are saved the LR on exception entry */
+#define EXC_RETURN_HANDLER         (0xFFFFFFF1UL)     /* return to Handler mode, uses MSP after return                               */
+#define EXC_RETURN_THREAD_MSP      (0xFFFFFFF9UL)     /* return to Thread mode, uses MSP after return                                */
+#define EXC_RETURN_THREAD_PSP      (0xFFFFFFFDUL)     /* return to Thread mode, uses PSP after return                                */
+
+
+/* Interrupt Priorities are WORD accessible only under Armv6-M                  */
+/* The following MACROS handle generation of the register offset and byte masks */
+#define _BIT_SHIFT(IRQn)         (  ((((uint32_t)(int32_t)(IRQn))         )      &  0x03UL) * 8UL)
+#define _SHP_IDX(IRQn)           ( (((((uint32_t)(int32_t)(IRQn)) & 0x0FUL)-8UL) >>    2UL)      )
+#define _IP_IDX(IRQn)            (   (((uint32_t)(int32_t)(IRQn))                >>    2UL)      )
+
+#define __NVIC_SetPriorityGrouping(X) (void)(X)
+#define __NVIC_GetPriorityGrouping()  (0U)
+
+/**
+  \brief   Enable Interrupt
+  \details Enables a device specific interrupt in the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_EnableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ISER[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Get Interrupt Enable status
+  \details Returns a device specific interrupt enable status from the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \return             0  Interrupt is not enabled.
+  \return             1  Interrupt is enabled.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetEnableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->ISER[0U] & (1UL << (((uint32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));
+  }
+  else
+  {
+    return(0U);
+  }
+}
+
+
+/**
+  \brief   Disable Interrupt
+  \details Disables a device specific interrupt in the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_DisableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ICER[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+    __DSB();
+    __ISB();
+  }
+}
+
+
+/**
+  \brief   Get Pending Interrupt
+  \details Reads the NVIC pending register and returns the pending bit for the specified device specific interrupt.
+  \param [in]      IRQn  Device specific interrupt number.
+  \return             0  Interrupt status is not pending.
+  \return             1  Interrupt status is pending.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->ISPR[0U] & (1UL << (((uint32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));
+  }
+  else
+  {
+    return(0U);
+  }
+}
+
+
+/**
+  \brief   Set Pending Interrupt
+  \details Sets the pending bit of a device specific interrupt in the NVIC pending register.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_SetPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ISPR[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Clear Pending Interrupt
+  \details Clears the pending bit of a device specific interrupt in the NVIC pending register.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_ClearPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ICPR[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Set Interrupt Priority
+  \details Sets the priority of a device specific interrupt or a processor exception.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]      IRQn  Interrupt number.
+  \param [in]  priority  Priority to set.
+  \note    The priority cannot be set for every processor exception.
+ */
+__STATIC_INLINE void __NVIC_SetPriority(IRQn_Type IRQn, uint32_t priority)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->IP[_IP_IDX(IRQn)]  = ((uint32_t)(NVIC->IP[_IP_IDX(IRQn)]  & ~(0xFFUL << _BIT_SHIFT(IRQn))) |
+       (((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL) << _BIT_SHIFT(IRQn)));
+  }
+  else
+  {
+    SCB->SHP[_SHP_IDX(IRQn)] = ((uint32_t)(SCB->SHP[_SHP_IDX(IRQn)] & ~(0xFFUL << _BIT_SHIFT(IRQn))) |
+       (((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL) << _BIT_SHIFT(IRQn)));
+  }
+}
+
+
+/**
+  \brief   Get Interrupt Priority
+  \details Reads the priority of a device specific interrupt or a processor exception.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]   IRQn  Interrupt number.
+  \return             Interrupt Priority.
+                      Value is aligned automatically to the implemented priority bits of the microcontroller.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetPriority(IRQn_Type IRQn)
+{
+
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->IP[ _IP_IDX(IRQn)] >> _BIT_SHIFT(IRQn) ) & (uint32_t)0xFFUL) >> (8U - __NVIC_PRIO_BITS)));
+  }
+  else
+  {
+    return((uint32_t)(((SCB->SHP[_SHP_IDX(IRQn)] >> _BIT_SHIFT(IRQn) ) & (uint32_t)0xFFUL) >> (8U - __NVIC_PRIO_BITS)));
+  }
+}
+
+
+/**
+  \brief   Encode Priority
+  \details Encodes the priority for an interrupt with the given priority group,
+           preemptive priority value, and subpriority value.
+           In case of a conflict between priority grouping and available
+           priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.
+  \param [in]     PriorityGroup  Used priority group.
+  \param [in]   PreemptPriority  Preemptive priority value (starting from 0).
+  \param [in]       SubPriority  Subpriority value (starting from 0).
+  \return                        Encoded priority. Value can be used in the function \ref NVIC_SetPriority().
+ */
+__STATIC_INLINE uint32_t NVIC_EncodePriority (uint32_t PriorityGroup, uint32_t PreemptPriority, uint32_t SubPriority)
+{
+  uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL);   /* only values 0..7 are used          */
+  uint32_t PreemptPriorityBits;
+  uint32_t SubPriorityBits;
+
+  PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);
+  SubPriorityBits     = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));
+
+  return (
+           ((PreemptPriority & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL)) << SubPriorityBits) |
+           ((SubPriority     & (uint32_t)((1UL << (SubPriorityBits    )) - 1UL)))
+         );
+}
+
+
+/**
+  \brief   Decode Priority
+  \details Decodes an interrupt priority value with a given priority group to
+           preemptive priority value and subpriority value.
+           In case of a conflict between priority grouping and available
+           priority bits (__NVIC_PRIO_BITS) the smallest possible priority group is set.
+  \param [in]         Priority   Priority value, which can be retrieved with the function \ref NVIC_GetPriority().
+  \param [in]     PriorityGroup  Used priority group.
+  \param [out] pPreemptPriority  Preemptive priority value (starting from 0).
+  \param [out]     pSubPriority  Subpriority value (starting from 0).
+ */
+__STATIC_INLINE void NVIC_DecodePriority (uint32_t Priority, uint32_t PriorityGroup, uint32_t* const pPreemptPriority, uint32_t* const pSubPriority)
+{
+  uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL);   /* only values 0..7 are used          */
+  uint32_t PreemptPriorityBits;
+  uint32_t SubPriorityBits;
+
+  PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);
+  SubPriorityBits     = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));
+
+  *pPreemptPriority = (Priority >> SubPriorityBits) & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL);
+  *pSubPriority     = (Priority                   ) & (uint32_t)((1UL << (SubPriorityBits    )) - 1UL);
+}
+
+
+
+/**
+  \brief   Set Interrupt Vector
+  \details Sets an interrupt vector in SRAM based interrupt vector table.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+           Address 0 must be mapped to SRAM.
+  \param [in]   IRQn      Interrupt number
+  \param [in]   vector    Address of interrupt handler function
+ */
+__STATIC_INLINE void __NVIC_SetVector(IRQn_Type IRQn, uint32_t vector)
+{
+  uint32_t *vectors = (uint32_t *)0x0U;
+  vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET] = vector;
+}
+
+
+/**
+  \brief   Get Interrupt Vector
+  \details Reads an interrupt vector from interrupt vector table.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]   IRQn      Interrupt number.
+  \return                 Address of interrupt handler function
+ */
+__STATIC_INLINE uint32_t __NVIC_GetVector(IRQn_Type IRQn)
+{
+  uint32_t *vectors = (uint32_t *)0x0U;
+  return vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET];
+}
+
+
+/**
+  \brief   System Reset
+  \details Initiates a system reset request to reset the MCU.
+ */
+__NO_RETURN __STATIC_INLINE void __NVIC_SystemReset(void)
+{
+  __DSB();                                                          /* Ensure all outstanding memory accesses included
+                                                                       buffered write are completed before reset */
+  SCB->AIRCR  = ((0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |
+                 SCB_AIRCR_SYSRESETREQ_Msk);
+  __DSB();                                                          /* Ensure completion of memory access */
+
+  for(;;)                                                           /* wait until reset */
+  {
+    __NOP();
+  }
+}
+
+/*@} end of CMSIS_Core_NVICFunctions */
+
+
+/* ##########################  FPU functions  #################################### */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_FpuFunctions FPU Functions
+  \brief    Function that provides FPU type.
+  @{
+ */
+
+/**
+  \brief   get FPU type
+  \details returns the FPU type
+  \returns
+   - \b  0: No FPU
+   - \b  1: Single precision FPU
+   - \b  2: Double + Single precision FPU
+ */
+__STATIC_INLINE uint32_t SCB_GetFPUType(void)
+{
+    return 0U;           /* No FPU */
+}
+
+
+/*@} end of CMSIS_Core_FpuFunctions */
+
+
+
+/* ##################################    SysTick function  ############################################ */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_SysTickFunctions SysTick Functions
+  \brief    Functions that configure the System.
+  @{
+ */
+
+#if defined (__Vendor_SysTickConfig) && (__Vendor_SysTickConfig == 0U)
+
+/**
+  \brief   System Tick Configuration
+  \details Initializes the System Timer and its interrupt, and starts the System Tick Timer.
+           Counter is in free running mode to generate periodic interrupts.
+  \param [in]  ticks  Number of ticks between two interrupts.
+  \return          0  Function succeeded.
+  \return          1  Function failed.
+  \note    When the variable <b>__Vendor_SysTickConfig</b> is set to 1, then the
+           function <b>SysTick_Config</b> is not included. In this case, the file <b><i>device</i>.h</b>
+           must contain a vendor-specific implementation of this function.
+ */
+__STATIC_INLINE uint32_t SysTick_Config(uint32_t ticks)
+{
+  if ((ticks - 1UL) > SysTick_LOAD_RELOAD_Msk)
+  {
+    return (1UL);                                                   /* Reload value impossible */
+  }
+
+  SysTick->LOAD  = (uint32_t)(ticks - 1UL);                         /* set reload register */
+  NVIC_SetPriority (SysTick_IRQn, (1UL << __NVIC_PRIO_BITS) - 1UL); /* set Priority for Systick Interrupt */
+  SysTick->VAL   = 0UL;                                             /* Load the SysTick Counter Value */
+  SysTick->CTRL  = SysTick_CTRL_CLKSOURCE_Msk |
+                   SysTick_CTRL_TICKINT_Msk   |
+                   SysTick_CTRL_ENABLE_Msk;                         /* Enable SysTick IRQ and SysTick Timer */
+  return (0UL);                                                     /* Function successful */
+}
+
+#endif
+
+/*@} end of CMSIS_Core_SysTickFunctions */
+
+
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __CORE_CM0_H_DEPENDANT */
+
+#endif /* __CMSIS_GENERIC */

Файловите разлики са ограничени, защото са твърде много
+ 1083 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm0plus.h


+ 976 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm1.h

@@ -0,0 +1,976 @@
+/**************************************************************************//**
+ * @file     core_cm1.h
+ * @brief    CMSIS Cortex-M1 Core Peripheral Access Layer Header File
+ * @version  V1.0.0
+ * @date     23. July 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2009-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header   /* treat file as system include file */
+#endif
+
+#ifndef __CORE_CM1_H_GENERIC
+#define __CORE_CM1_H_GENERIC
+
+#include <stdint.h>
+
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+/**
+  \page CMSIS_MISRA_Exceptions  MISRA-C:2004 Compliance Exceptions
+  CMSIS violates the following MISRA-C:2004 rules:
+
+   \li Required Rule 8.5, object/function definition in header file.<br>
+     Function definitions in header files are used to allow 'inlining'.
+
+   \li Required Rule 18.4, declaration of union type or object of union type: '{...}'.<br>
+     Unions are used for effective representation of core registers.
+
+   \li Advisory Rule 19.7, Function-like macro defined.<br>
+     Function-like macros are used to allow more efficient code.
+ */
+
+
+/*******************************************************************************
+ *                 CMSIS definitions
+ ******************************************************************************/
+/**
+  \ingroup Cortex_M1
+  @{
+ */
+
+#include "cmsis_version.h"
+ 
+/*  CMSIS CM1 definitions */
+#define __CM1_CMSIS_VERSION_MAIN  (__CM_CMSIS_VERSION_MAIN)              /*!< \deprecated [31:16] CMSIS HAL main version */
+#define __CM1_CMSIS_VERSION_SUB   (__CM_CMSIS_VERSION_SUB)               /*!< \deprecated [15:0]  CMSIS HAL sub version */
+#define __CM1_CMSIS_VERSION       ((__CM1_CMSIS_VERSION_MAIN << 16U) | \
+                                    __CM1_CMSIS_VERSION_SUB           )  /*!< \deprecated CMSIS HAL version number */
+
+#define __CORTEX_M                (1U)                                   /*!< Cortex-M Core */
+
+/** __FPU_USED indicates whether an FPU is used or not.
+    This core does not support an FPU at all
+*/
+#define __FPU_USED       0U
+
+#if defined ( __CC_ARM )
+  #if defined __TARGET_FPU_VFP
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined (__ARMCC_VERSION) && (__ARMCC_VERSION >= 6010050)
+  #if defined __ARM_PCS_VFP
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __GNUC__ )
+  #if defined (__VFP_FP__) && !defined(__SOFTFP__)
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __ICCARM__ )
+  #if defined __ARMVFP__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __TI_ARM__ )
+  #if defined __TI_VFP_SUPPORT__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __TASKING__ )
+  #if defined __FPU_VFP__
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#elif defined ( __CSMC__ )
+  #if ( __CSMC__ & 0x400U)
+    #error "Compiler generates FPU instructions for a device without an FPU (check __FPU_PRESENT)"
+  #endif
+
+#endif
+
+#include "cmsis_compiler.h"               /* CMSIS compiler specific defines */
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __CORE_CM1_H_GENERIC */
+
+#ifndef __CMSIS_GENERIC
+
+#ifndef __CORE_CM1_H_DEPENDANT
+#define __CORE_CM1_H_DEPENDANT
+
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+/* check device defines and use defaults */
+#if defined __CHECK_DEVICE_DEFINES
+  #ifndef __CM1_REV
+    #define __CM1_REV               0x0100U
+    #warning "__CM1_REV not defined in device header file; using default!"
+  #endif
+
+  #ifndef __NVIC_PRIO_BITS
+    #define __NVIC_PRIO_BITS          2U
+    #warning "__NVIC_PRIO_BITS not defined in device header file; using default!"
+  #endif
+
+  #ifndef __Vendor_SysTickConfig
+    #define __Vendor_SysTickConfig    0U
+    #warning "__Vendor_SysTickConfig not defined in device header file; using default!"
+  #endif
+#endif
+
+/* IO definitions (access restrictions to peripheral registers) */
+/**
+    \defgroup CMSIS_glob_defs CMSIS Global Defines
+
+    <strong>IO Type Qualifiers</strong> are used
+    \li to specify the access to peripheral variables.
+    \li for automatic generation of peripheral register debug information.
+*/
+#ifdef __cplusplus
+  #define   __I     volatile             /*!< Defines 'read only' permissions */
+#else
+  #define   __I     volatile const       /*!< Defines 'read only' permissions */
+#endif
+#define     __O     volatile             /*!< Defines 'write only' permissions */
+#define     __IO    volatile             /*!< Defines 'read / write' permissions */
+
+/* following defines should be used for structure members */
+#define     __IM     volatile const      /*! Defines 'read only' structure member permissions */
+#define     __OM     volatile            /*! Defines 'write only' structure member permissions */
+#define     __IOM    volatile            /*! Defines 'read / write' structure member permissions */
+
+/*@} end of group Cortex_M1 */
+
+
+
+/*******************************************************************************
+ *                 Register Abstraction
+  Core Register contain:
+  - Core Register
+  - Core NVIC Register
+  - Core SCB Register
+  - Core SysTick Register
+ ******************************************************************************/
+/**
+  \defgroup CMSIS_core_register Defines and Type Definitions
+  \brief Type definitions and defines for Cortex-M processor based devices.
+*/
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_CORE  Status and Control Registers
+  \brief      Core Register type definitions.
+  @{
+ */
+
+/**
+  \brief  Union type to access the Application Program Status Register (APSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t _reserved0:28;              /*!< bit:  0..27  Reserved */
+    uint32_t V:1;                        /*!< bit:     28  Overflow condition code flag */
+    uint32_t C:1;                        /*!< bit:     29  Carry condition code flag */
+    uint32_t Z:1;                        /*!< bit:     30  Zero condition code flag */
+    uint32_t N:1;                        /*!< bit:     31  Negative condition code flag */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} APSR_Type;
+
+/* APSR Register Definitions */
+#define APSR_N_Pos                         31U                                            /*!< APSR: N Position */
+#define APSR_N_Msk                         (1UL << APSR_N_Pos)                            /*!< APSR: N Mask */
+
+#define APSR_Z_Pos                         30U                                            /*!< APSR: Z Position */
+#define APSR_Z_Msk                         (1UL << APSR_Z_Pos)                            /*!< APSR: Z Mask */
+
+#define APSR_C_Pos                         29U                                            /*!< APSR: C Position */
+#define APSR_C_Msk                         (1UL << APSR_C_Pos)                            /*!< APSR: C Mask */
+
+#define APSR_V_Pos                         28U                                            /*!< APSR: V Position */
+#define APSR_V_Msk                         (1UL << APSR_V_Pos)                            /*!< APSR: V Mask */
+
+
+/**
+  \brief  Union type to access the Interrupt Program Status Register (IPSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t ISR:9;                      /*!< bit:  0.. 8  Exception number */
+    uint32_t _reserved0:23;              /*!< bit:  9..31  Reserved */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} IPSR_Type;
+
+/* IPSR Register Definitions */
+#define IPSR_ISR_Pos                        0U                                            /*!< IPSR: ISR Position */
+#define IPSR_ISR_Msk                       (0x1FFUL /*<< IPSR_ISR_Pos*/)                  /*!< IPSR: ISR Mask */
+
+
+/**
+  \brief  Union type to access the Special-Purpose Program Status Registers (xPSR).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t ISR:9;                      /*!< bit:  0.. 8  Exception number */
+    uint32_t _reserved0:15;              /*!< bit:  9..23  Reserved */
+    uint32_t T:1;                        /*!< bit:     24  Thumb bit        (read 0) */
+    uint32_t _reserved1:3;               /*!< bit: 25..27  Reserved */
+    uint32_t V:1;                        /*!< bit:     28  Overflow condition code flag */
+    uint32_t C:1;                        /*!< bit:     29  Carry condition code flag */
+    uint32_t Z:1;                        /*!< bit:     30  Zero condition code flag */
+    uint32_t N:1;                        /*!< bit:     31  Negative condition code flag */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} xPSR_Type;
+
+/* xPSR Register Definitions */
+#define xPSR_N_Pos                         31U                                            /*!< xPSR: N Position */
+#define xPSR_N_Msk                         (1UL << xPSR_N_Pos)                            /*!< xPSR: N Mask */
+
+#define xPSR_Z_Pos                         30U                                            /*!< xPSR: Z Position */
+#define xPSR_Z_Msk                         (1UL << xPSR_Z_Pos)                            /*!< xPSR: Z Mask */
+
+#define xPSR_C_Pos                         29U                                            /*!< xPSR: C Position */
+#define xPSR_C_Msk                         (1UL << xPSR_C_Pos)                            /*!< xPSR: C Mask */
+
+#define xPSR_V_Pos                         28U                                            /*!< xPSR: V Position */
+#define xPSR_V_Msk                         (1UL << xPSR_V_Pos)                            /*!< xPSR: V Mask */
+
+#define xPSR_T_Pos                         24U                                            /*!< xPSR: T Position */
+#define xPSR_T_Msk                         (1UL << xPSR_T_Pos)                            /*!< xPSR: T Mask */
+
+#define xPSR_ISR_Pos                        0U                                            /*!< xPSR: ISR Position */
+#define xPSR_ISR_Msk                       (0x1FFUL /*<< xPSR_ISR_Pos*/)                  /*!< xPSR: ISR Mask */
+
+
+/**
+  \brief  Union type to access the Control Registers (CONTROL).
+ */
+typedef union
+{
+  struct
+  {
+    uint32_t _reserved0:1;               /*!< bit:      0  Reserved */
+    uint32_t SPSEL:1;                    /*!< bit:      1  Stack to be used */
+    uint32_t _reserved1:30;              /*!< bit:  2..31  Reserved */
+  } b;                                   /*!< Structure used for bit  access */
+  uint32_t w;                            /*!< Type      used for word access */
+} CONTROL_Type;
+
+/* CONTROL Register Definitions */
+#define CONTROL_SPSEL_Pos                   1U                                            /*!< CONTROL: SPSEL Position */
+#define CONTROL_SPSEL_Msk                  (1UL << CONTROL_SPSEL_Pos)                     /*!< CONTROL: SPSEL Mask */
+
+/*@} end of group CMSIS_CORE */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_NVIC  Nested Vectored Interrupt Controller (NVIC)
+  \brief      Type definitions for the NVIC Registers
+  @{
+ */
+
+/**
+  \brief  Structure type to access the Nested Vectored Interrupt Controller (NVIC).
+ */
+typedef struct
+{
+  __IOM uint32_t ISER[1U];               /*!< Offset: 0x000 (R/W)  Interrupt Set Enable Register */
+        uint32_t RESERVED0[31U];
+  __IOM uint32_t ICER[1U];               /*!< Offset: 0x080 (R/W)  Interrupt Clear Enable Register */
+        uint32_t RSERVED1[31U];
+  __IOM uint32_t ISPR[1U];               /*!< Offset: 0x100 (R/W)  Interrupt Set Pending Register */
+        uint32_t RESERVED2[31U];
+  __IOM uint32_t ICPR[1U];               /*!< Offset: 0x180 (R/W)  Interrupt Clear Pending Register */
+        uint32_t RESERVED3[31U];
+        uint32_t RESERVED4[64U];
+  __IOM uint32_t IP[8U];                 /*!< Offset: 0x300 (R/W)  Interrupt Priority Register */
+}  NVIC_Type;
+
+/*@} end of group CMSIS_NVIC */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_SCB     System Control Block (SCB)
+  \brief    Type definitions for the System Control Block Registers
+  @{
+ */
+
+/**
+  \brief  Structure type to access the System Control Block (SCB).
+ */
+typedef struct
+{
+  __IM  uint32_t CPUID;                  /*!< Offset: 0x000 (R/ )  CPUID Base Register */
+  __IOM uint32_t ICSR;                   /*!< Offset: 0x004 (R/W)  Interrupt Control and State Register */
+        uint32_t RESERVED0;
+  __IOM uint32_t AIRCR;                  /*!< Offset: 0x00C (R/W)  Application Interrupt and Reset Control Register */
+  __IOM uint32_t SCR;                    /*!< Offset: 0x010 (R/W)  System Control Register */
+  __IOM uint32_t CCR;                    /*!< Offset: 0x014 (R/W)  Configuration Control Register */
+        uint32_t RESERVED1;
+  __IOM uint32_t SHP[2U];                /*!< Offset: 0x01C (R/W)  System Handlers Priority Registers. [0] is RESERVED */
+  __IOM uint32_t SHCSR;                  /*!< Offset: 0x024 (R/W)  System Handler Control and State Register */
+} SCB_Type;
+
+/* SCB CPUID Register Definitions */
+#define SCB_CPUID_IMPLEMENTER_Pos          24U                                            /*!< SCB CPUID: IMPLEMENTER Position */
+#define SCB_CPUID_IMPLEMENTER_Msk          (0xFFUL << SCB_CPUID_IMPLEMENTER_Pos)          /*!< SCB CPUID: IMPLEMENTER Mask */
+
+#define SCB_CPUID_VARIANT_Pos              20U                                            /*!< SCB CPUID: VARIANT Position */
+#define SCB_CPUID_VARIANT_Msk              (0xFUL << SCB_CPUID_VARIANT_Pos)               /*!< SCB CPUID: VARIANT Mask */
+
+#define SCB_CPUID_ARCHITECTURE_Pos         16U                                            /*!< SCB CPUID: ARCHITECTURE Position */
+#define SCB_CPUID_ARCHITECTURE_Msk         (0xFUL << SCB_CPUID_ARCHITECTURE_Pos)          /*!< SCB CPUID: ARCHITECTURE Mask */
+
+#define SCB_CPUID_PARTNO_Pos                4U                                            /*!< SCB CPUID: PARTNO Position */
+#define SCB_CPUID_PARTNO_Msk               (0xFFFUL << SCB_CPUID_PARTNO_Pos)              /*!< SCB CPUID: PARTNO Mask */
+
+#define SCB_CPUID_REVISION_Pos              0U                                            /*!< SCB CPUID: REVISION Position */
+#define SCB_CPUID_REVISION_Msk             (0xFUL /*<< SCB_CPUID_REVISION_Pos*/)          /*!< SCB CPUID: REVISION Mask */
+
+/* SCB Interrupt Control State Register Definitions */
+#define SCB_ICSR_NMIPENDSET_Pos            31U                                            /*!< SCB ICSR: NMIPENDSET Position */
+#define SCB_ICSR_NMIPENDSET_Msk            (1UL << SCB_ICSR_NMIPENDSET_Pos)               /*!< SCB ICSR: NMIPENDSET Mask */
+
+#define SCB_ICSR_PENDSVSET_Pos             28U                                            /*!< SCB ICSR: PENDSVSET Position */
+#define SCB_ICSR_PENDSVSET_Msk             (1UL << SCB_ICSR_PENDSVSET_Pos)                /*!< SCB ICSR: PENDSVSET Mask */
+
+#define SCB_ICSR_PENDSVCLR_Pos             27U                                            /*!< SCB ICSR: PENDSVCLR Position */
+#define SCB_ICSR_PENDSVCLR_Msk             (1UL << SCB_ICSR_PENDSVCLR_Pos)                /*!< SCB ICSR: PENDSVCLR Mask */
+
+#define SCB_ICSR_PENDSTSET_Pos             26U                                            /*!< SCB ICSR: PENDSTSET Position */
+#define SCB_ICSR_PENDSTSET_Msk             (1UL << SCB_ICSR_PENDSTSET_Pos)                /*!< SCB ICSR: PENDSTSET Mask */
+
+#define SCB_ICSR_PENDSTCLR_Pos             25U                                            /*!< SCB ICSR: PENDSTCLR Position */
+#define SCB_ICSR_PENDSTCLR_Msk             (1UL << SCB_ICSR_PENDSTCLR_Pos)                /*!< SCB ICSR: PENDSTCLR Mask */
+
+#define SCB_ICSR_ISRPREEMPT_Pos            23U                                            /*!< SCB ICSR: ISRPREEMPT Position */
+#define SCB_ICSR_ISRPREEMPT_Msk            (1UL << SCB_ICSR_ISRPREEMPT_Pos)               /*!< SCB ICSR: ISRPREEMPT Mask */
+
+#define SCB_ICSR_ISRPENDING_Pos            22U                                            /*!< SCB ICSR: ISRPENDING Position */
+#define SCB_ICSR_ISRPENDING_Msk            (1UL << SCB_ICSR_ISRPENDING_Pos)               /*!< SCB ICSR: ISRPENDING Mask */
+
+#define SCB_ICSR_VECTPENDING_Pos           12U                                            /*!< SCB ICSR: VECTPENDING Position */
+#define SCB_ICSR_VECTPENDING_Msk           (0x1FFUL << SCB_ICSR_VECTPENDING_Pos)          /*!< SCB ICSR: VECTPENDING Mask */
+
+#define SCB_ICSR_VECTACTIVE_Pos             0U                                            /*!< SCB ICSR: VECTACTIVE Position */
+#define SCB_ICSR_VECTACTIVE_Msk            (0x1FFUL /*<< SCB_ICSR_VECTACTIVE_Pos*/)       /*!< SCB ICSR: VECTACTIVE Mask */
+
+/* SCB Application Interrupt and Reset Control Register Definitions */
+#define SCB_AIRCR_VECTKEY_Pos              16U                                            /*!< SCB AIRCR: VECTKEY Position */
+#define SCB_AIRCR_VECTKEY_Msk              (0xFFFFUL << SCB_AIRCR_VECTKEY_Pos)            /*!< SCB AIRCR: VECTKEY Mask */
+
+#define SCB_AIRCR_VECTKEYSTAT_Pos          16U                                            /*!< SCB AIRCR: VECTKEYSTAT Position */
+#define SCB_AIRCR_VECTKEYSTAT_Msk          (0xFFFFUL << SCB_AIRCR_VECTKEYSTAT_Pos)        /*!< SCB AIRCR: VECTKEYSTAT Mask */
+
+#define SCB_AIRCR_ENDIANESS_Pos            15U                                            /*!< SCB AIRCR: ENDIANESS Position */
+#define SCB_AIRCR_ENDIANESS_Msk            (1UL << SCB_AIRCR_ENDIANESS_Pos)               /*!< SCB AIRCR: ENDIANESS Mask */
+
+#define SCB_AIRCR_SYSRESETREQ_Pos           2U                                            /*!< SCB AIRCR: SYSRESETREQ Position */
+#define SCB_AIRCR_SYSRESETREQ_Msk          (1UL << SCB_AIRCR_SYSRESETREQ_Pos)             /*!< SCB AIRCR: SYSRESETREQ Mask */
+
+#define SCB_AIRCR_VECTCLRACTIVE_Pos         1U                                            /*!< SCB AIRCR: VECTCLRACTIVE Position */
+#define SCB_AIRCR_VECTCLRACTIVE_Msk        (1UL << SCB_AIRCR_VECTCLRACTIVE_Pos)           /*!< SCB AIRCR: VECTCLRACTIVE Mask */
+
+/* SCB System Control Register Definitions */
+#define SCB_SCR_SEVONPEND_Pos               4U                                            /*!< SCB SCR: SEVONPEND Position */
+#define SCB_SCR_SEVONPEND_Msk              (1UL << SCB_SCR_SEVONPEND_Pos)                 /*!< SCB SCR: SEVONPEND Mask */
+
+#define SCB_SCR_SLEEPDEEP_Pos               2U                                            /*!< SCB SCR: SLEEPDEEP Position */
+#define SCB_SCR_SLEEPDEEP_Msk              (1UL << SCB_SCR_SLEEPDEEP_Pos)                 /*!< SCB SCR: SLEEPDEEP Mask */
+
+#define SCB_SCR_SLEEPONEXIT_Pos             1U                                            /*!< SCB SCR: SLEEPONEXIT Position */
+#define SCB_SCR_SLEEPONEXIT_Msk            (1UL << SCB_SCR_SLEEPONEXIT_Pos)               /*!< SCB SCR: SLEEPONEXIT Mask */
+
+/* SCB Configuration Control Register Definitions */
+#define SCB_CCR_STKALIGN_Pos                9U                                            /*!< SCB CCR: STKALIGN Position */
+#define SCB_CCR_STKALIGN_Msk               (1UL << SCB_CCR_STKALIGN_Pos)                  /*!< SCB CCR: STKALIGN Mask */
+
+#define SCB_CCR_UNALIGN_TRP_Pos             3U                                            /*!< SCB CCR: UNALIGN_TRP Position */
+#define SCB_CCR_UNALIGN_TRP_Msk            (1UL << SCB_CCR_UNALIGN_TRP_Pos)               /*!< SCB CCR: UNALIGN_TRP Mask */
+
+/* SCB System Handler Control and State Register Definitions */
+#define SCB_SHCSR_SVCALLPENDED_Pos         15U                                            /*!< SCB SHCSR: SVCALLPENDED Position */
+#define SCB_SHCSR_SVCALLPENDED_Msk         (1UL << SCB_SHCSR_SVCALLPENDED_Pos)            /*!< SCB SHCSR: SVCALLPENDED Mask */
+
+/*@} end of group CMSIS_SCB */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_SCnSCB System Controls not in SCB (SCnSCB)
+  \brief    Type definitions for the System Control and ID Register not in the SCB
+  @{
+ */
+
+/**
+  \brief  Structure type to access the System Control and ID Register not in the SCB.
+ */
+typedef struct
+{
+        uint32_t RESERVED0[2U];
+  __IOM uint32_t ACTLR;                  /*!< Offset: 0x008 (R/W)  Auxiliary Control Register */
+} SCnSCB_Type;
+
+/* Auxiliary Control Register Definitions */
+#define SCnSCB_ACTLR_ITCMUAEN_Pos            4U                                        /*!< ACTLR: Instruction TCM Upper Alias Enable Position */
+#define SCnSCB_ACTLR_ITCMUAEN_Msk           (1UL << SCnSCB_ACTLR_ITCMUAEN_Pos)         /*!< ACTLR: Instruction TCM Upper Alias Enable Mask */
+
+#define SCnSCB_ACTLR_ITCMLAEN_Pos            3U                                        /*!< ACTLR: Instruction TCM Lower Alias Enable Position */
+#define SCnSCB_ACTLR_ITCMLAEN_Msk           (1UL << SCnSCB_ACTLR_ITCMLAEN_Pos)         /*!< ACTLR: Instruction TCM Lower Alias Enable Mask */
+
+/*@} end of group CMSIS_SCnotSCB */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_SysTick     System Tick Timer (SysTick)
+  \brief    Type definitions for the System Timer Registers.
+  @{
+ */
+
+/**
+  \brief  Structure type to access the System Timer (SysTick).
+ */
+typedef struct
+{
+  __IOM uint32_t CTRL;                   /*!< Offset: 0x000 (R/W)  SysTick Control and Status Register */
+  __IOM uint32_t LOAD;                   /*!< Offset: 0x004 (R/W)  SysTick Reload Value Register */
+  __IOM uint32_t VAL;                    /*!< Offset: 0x008 (R/W)  SysTick Current Value Register */
+  __IM  uint32_t CALIB;                  /*!< Offset: 0x00C (R/ )  SysTick Calibration Register */
+} SysTick_Type;
+
+/* SysTick Control / Status Register Definitions */
+#define SysTick_CTRL_COUNTFLAG_Pos         16U                                            /*!< SysTick CTRL: COUNTFLAG Position */
+#define SysTick_CTRL_COUNTFLAG_Msk         (1UL << SysTick_CTRL_COUNTFLAG_Pos)            /*!< SysTick CTRL: COUNTFLAG Mask */
+
+#define SysTick_CTRL_CLKSOURCE_Pos          2U                                            /*!< SysTick CTRL: CLKSOURCE Position */
+#define SysTick_CTRL_CLKSOURCE_Msk         (1UL << SysTick_CTRL_CLKSOURCE_Pos)            /*!< SysTick CTRL: CLKSOURCE Mask */
+
+#define SysTick_CTRL_TICKINT_Pos            1U                                            /*!< SysTick CTRL: TICKINT Position */
+#define SysTick_CTRL_TICKINT_Msk           (1UL << SysTick_CTRL_TICKINT_Pos)              /*!< SysTick CTRL: TICKINT Mask */
+
+#define SysTick_CTRL_ENABLE_Pos             0U                                            /*!< SysTick CTRL: ENABLE Position */
+#define SysTick_CTRL_ENABLE_Msk            (1UL /*<< SysTick_CTRL_ENABLE_Pos*/)           /*!< SysTick CTRL: ENABLE Mask */
+
+/* SysTick Reload Register Definitions */
+#define SysTick_LOAD_RELOAD_Pos             0U                                            /*!< SysTick LOAD: RELOAD Position */
+#define SysTick_LOAD_RELOAD_Msk            (0xFFFFFFUL /*<< SysTick_LOAD_RELOAD_Pos*/)    /*!< SysTick LOAD: RELOAD Mask */
+
+/* SysTick Current Register Definitions */
+#define SysTick_VAL_CURRENT_Pos             0U                                            /*!< SysTick VAL: CURRENT Position */
+#define SysTick_VAL_CURRENT_Msk            (0xFFFFFFUL /*<< SysTick_VAL_CURRENT_Pos*/)    /*!< SysTick VAL: CURRENT Mask */
+
+/* SysTick Calibration Register Definitions */
+#define SysTick_CALIB_NOREF_Pos            31U                                            /*!< SysTick CALIB: NOREF Position */
+#define SysTick_CALIB_NOREF_Msk            (1UL << SysTick_CALIB_NOREF_Pos)               /*!< SysTick CALIB: NOREF Mask */
+
+#define SysTick_CALIB_SKEW_Pos             30U                                            /*!< SysTick CALIB: SKEW Position */
+#define SysTick_CALIB_SKEW_Msk             (1UL << SysTick_CALIB_SKEW_Pos)                /*!< SysTick CALIB: SKEW Mask */
+
+#define SysTick_CALIB_TENMS_Pos             0U                                            /*!< SysTick CALIB: TENMS Position */
+#define SysTick_CALIB_TENMS_Msk            (0xFFFFFFUL /*<< SysTick_CALIB_TENMS_Pos*/)    /*!< SysTick CALIB: TENMS Mask */
+
+/*@} end of group CMSIS_SysTick */
+
+
+/**
+  \ingroup  CMSIS_core_register
+  \defgroup CMSIS_CoreDebug       Core Debug Registers (CoreDebug)
+  \brief    Cortex-M1 Core Debug Registers (DCB registers, SHCSR, and DFSR) are only accessible over DAP and not via processor.
+            Therefore they are not covered by the Cortex-M1 header file.
+  @{
+ */
+/*@} end of group CMSIS_CoreDebug */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_core_bitfield     Core register bit field macros
+  \brief      Macros for use with bit field definitions (xxx_Pos, xxx_Msk).
+  @{
+ */
+
+/**
+  \brief   Mask and shift a bit field value for use in a register bit range.
+  \param[in] field  Name of the register bit field.
+  \param[in] value  Value of the bit field. This parameter is interpreted as an uint32_t type.
+  \return           Masked and shifted value.
+*/
+#define _VAL2FLD(field, value)    (((uint32_t)(value) << field ## _Pos) & field ## _Msk)
+
+/**
+  \brief     Mask and shift a register value to extract a bit filed value.
+  \param[in] field  Name of the register bit field.
+  \param[in] value  Value of register. This parameter is interpreted as an uint32_t type.
+  \return           Masked and shifted bit field value.
+*/
+#define _FLD2VAL(field, value)    (((uint32_t)(value) & field ## _Msk) >> field ## _Pos)
+
+/*@} end of group CMSIS_core_bitfield */
+
+
+/**
+  \ingroup    CMSIS_core_register
+  \defgroup   CMSIS_core_base     Core Definitions
+  \brief      Definitions for base addresses, unions, and structures.
+  @{
+ */
+
+/* Memory mapping of Core Hardware */
+#define SCS_BASE            (0xE000E000UL)                            /*!< System Control Space Base Address */
+#define SysTick_BASE        (SCS_BASE +  0x0010UL)                    /*!< SysTick Base Address */
+#define NVIC_BASE           (SCS_BASE +  0x0100UL)                    /*!< NVIC Base Address */
+#define SCB_BASE            (SCS_BASE +  0x0D00UL)                    /*!< System Control Block Base Address */
+
+#define SCnSCB              ((SCnSCB_Type    *)     SCS_BASE      )   /*!< System control Register not in SCB */
+#define SCB                 ((SCB_Type       *)     SCB_BASE      )   /*!< SCB configuration struct */
+#define SysTick             ((SysTick_Type   *)     SysTick_BASE  )   /*!< SysTick configuration struct */
+#define NVIC                ((NVIC_Type      *)     NVIC_BASE     )   /*!< NVIC configuration struct */
+
+
+/*@} */
+
+
+
+/*******************************************************************************
+ *                Hardware Abstraction Layer
+  Core Function Interface contains:
+  - Core NVIC Functions
+  - Core SysTick Functions
+  - Core Register Access Functions
+ ******************************************************************************/
+/**
+  \defgroup CMSIS_Core_FunctionInterface Functions and Instructions Reference
+*/
+
+
+
+/* ##########################   NVIC functions  #################################### */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_NVICFunctions NVIC Functions
+  \brief    Functions that manage interrupts and exceptions via the NVIC.
+  @{
+ */
+
+#ifdef CMSIS_NVIC_VIRTUAL
+  #ifndef CMSIS_NVIC_VIRTUAL_HEADER_FILE
+    #define CMSIS_NVIC_VIRTUAL_HEADER_FILE "cmsis_nvic_virtual.h"
+  #endif
+  #include CMSIS_NVIC_VIRTUAL_HEADER_FILE
+#else
+  #define NVIC_SetPriorityGrouping    __NVIC_SetPriorityGrouping
+  #define NVIC_GetPriorityGrouping    __NVIC_GetPriorityGrouping
+  #define NVIC_EnableIRQ              __NVIC_EnableIRQ
+  #define NVIC_GetEnableIRQ           __NVIC_GetEnableIRQ
+  #define NVIC_DisableIRQ             __NVIC_DisableIRQ
+  #define NVIC_GetPendingIRQ          __NVIC_GetPendingIRQ
+  #define NVIC_SetPendingIRQ          __NVIC_SetPendingIRQ
+  #define NVIC_ClearPendingIRQ        __NVIC_ClearPendingIRQ
+/*#define NVIC_GetActive              __NVIC_GetActive             not available for Cortex-M1 */
+  #define NVIC_SetPriority            __NVIC_SetPriority
+  #define NVIC_GetPriority            __NVIC_GetPriority
+  #define NVIC_SystemReset            __NVIC_SystemReset
+#endif /* CMSIS_NVIC_VIRTUAL */
+
+#ifdef CMSIS_VECTAB_VIRTUAL
+  #ifndef CMSIS_VECTAB_VIRTUAL_HEADER_FILE
+    #define CMSIS_VECTAB_VIRTUAL_HEADER_FILE "cmsis_vectab_virtual.h"
+  #endif
+  #include CMSIS_VECTAB_VIRTUAL_HEADER_FILE
+#else
+  #define NVIC_SetVector              __NVIC_SetVector
+  #define NVIC_GetVector              __NVIC_GetVector
+#endif  /* (CMSIS_VECTAB_VIRTUAL) */
+
+#define NVIC_USER_IRQ_OFFSET          16
+
+
+/* The following EXC_RETURN values are saved the LR on exception entry */
+#define EXC_RETURN_HANDLER         (0xFFFFFFF1UL)     /* return to Handler mode, uses MSP after return                               */
+#define EXC_RETURN_THREAD_MSP      (0xFFFFFFF9UL)     /* return to Thread mode, uses MSP after return                                */
+#define EXC_RETURN_THREAD_PSP      (0xFFFFFFFDUL)     /* return to Thread mode, uses PSP after return                                */
+
+
+/* Interrupt Priorities are WORD accessible only under Armv6-M                  */
+/* The following MACROS handle generation of the register offset and byte masks */
+#define _BIT_SHIFT(IRQn)         (  ((((uint32_t)(int32_t)(IRQn))         )      &  0x03UL) * 8UL)
+#define _SHP_IDX(IRQn)           ( (((((uint32_t)(int32_t)(IRQn)) & 0x0FUL)-8UL) >>    2UL)      )
+#define _IP_IDX(IRQn)            (   (((uint32_t)(int32_t)(IRQn))                >>    2UL)      )
+
+#define __NVIC_SetPriorityGrouping(X) (void)(X)
+#define __NVIC_GetPriorityGrouping()  (0U)
+
+/**
+  \brief   Enable Interrupt
+  \details Enables a device specific interrupt in the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_EnableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ISER[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Get Interrupt Enable status
+  \details Returns a device specific interrupt enable status from the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \return             0  Interrupt is not enabled.
+  \return             1  Interrupt is enabled.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetEnableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->ISER[0U] & (1UL << (((uint32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));
+  }
+  else
+  {
+    return(0U);
+  }
+}
+
+
+/**
+  \brief   Disable Interrupt
+  \details Disables a device specific interrupt in the NVIC interrupt controller.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_DisableIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ICER[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+    __DSB();
+    __ISB();
+  }
+}
+
+
+/**
+  \brief   Get Pending Interrupt
+  \details Reads the NVIC pending register and returns the pending bit for the specified device specific interrupt.
+  \param [in]      IRQn  Device specific interrupt number.
+  \return             0  Interrupt status is not pending.
+  \return             1  Interrupt status is pending.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->ISPR[0U] & (1UL << (((uint32_t)IRQn) & 0x1FUL))) != 0UL) ? 1UL : 0UL));
+  }
+  else
+  {
+    return(0U);
+  }
+}
+
+
+/**
+  \brief   Set Pending Interrupt
+  \details Sets the pending bit of a device specific interrupt in the NVIC pending register.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_SetPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ISPR[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Clear Pending Interrupt
+  \details Clears the pending bit of a device specific interrupt in the NVIC pending register.
+  \param [in]      IRQn  Device specific interrupt number.
+  \note    IRQn must not be negative.
+ */
+__STATIC_INLINE void __NVIC_ClearPendingIRQ(IRQn_Type IRQn)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->ICPR[0U] = (uint32_t)(1UL << (((uint32_t)IRQn) & 0x1FUL));
+  }
+}
+
+
+/**
+  \brief   Set Interrupt Priority
+  \details Sets the priority of a device specific interrupt or a processor exception.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]      IRQn  Interrupt number.
+  \param [in]  priority  Priority to set.
+  \note    The priority cannot be set for every processor exception.
+ */
+__STATIC_INLINE void __NVIC_SetPriority(IRQn_Type IRQn, uint32_t priority)
+{
+  if ((int32_t)(IRQn) >= 0)
+  {
+    NVIC->IP[_IP_IDX(IRQn)]  = ((uint32_t)(NVIC->IP[_IP_IDX(IRQn)]  & ~(0xFFUL << _BIT_SHIFT(IRQn))) |
+       (((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL) << _BIT_SHIFT(IRQn)));
+  }
+  else
+  {
+    SCB->SHP[_SHP_IDX(IRQn)] = ((uint32_t)(SCB->SHP[_SHP_IDX(IRQn)] & ~(0xFFUL << _BIT_SHIFT(IRQn))) |
+       (((priority << (8U - __NVIC_PRIO_BITS)) & (uint32_t)0xFFUL) << _BIT_SHIFT(IRQn)));
+  }
+}
+
+
+/**
+  \brief   Get Interrupt Priority
+  \details Reads the priority of a device specific interrupt or a processor exception.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]   IRQn  Interrupt number.
+  \return             Interrupt Priority.
+                      Value is aligned automatically to the implemented priority bits of the microcontroller.
+ */
+__STATIC_INLINE uint32_t __NVIC_GetPriority(IRQn_Type IRQn)
+{
+
+  if ((int32_t)(IRQn) >= 0)
+  {
+    return((uint32_t)(((NVIC->IP[ _IP_IDX(IRQn)] >> _BIT_SHIFT(IRQn) ) & (uint32_t)0xFFUL) >> (8U - __NVIC_PRIO_BITS)));
+  }
+  else
+  {
+    return((uint32_t)(((SCB->SHP[_SHP_IDX(IRQn)] >> _BIT_SHIFT(IRQn) ) & (uint32_t)0xFFUL) >> (8U - __NVIC_PRIO_BITS)));
+  }
+}
+
+
+/**
+  \brief   Encode Priority
+  \details Encodes the priority for an interrupt with the given priority group,
+           preemptive priority value, and subpriority value.
+           In case of a conflict between priority grouping and available
+           priority bits (__NVIC_PRIO_BITS), the smallest possible priority group is set.
+  \param [in]     PriorityGroup  Used priority group.
+  \param [in]   PreemptPriority  Preemptive priority value (starting from 0).
+  \param [in]       SubPriority  Subpriority value (starting from 0).
+  \return                        Encoded priority. Value can be used in the function \ref NVIC_SetPriority().
+ */
+__STATIC_INLINE uint32_t NVIC_EncodePriority (uint32_t PriorityGroup, uint32_t PreemptPriority, uint32_t SubPriority)
+{
+  uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL);   /* only values 0..7 are used          */
+  uint32_t PreemptPriorityBits;
+  uint32_t SubPriorityBits;
+
+  PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);
+  SubPriorityBits     = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));
+
+  return (
+           ((PreemptPriority & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL)) << SubPriorityBits) |
+           ((SubPriority     & (uint32_t)((1UL << (SubPriorityBits    )) - 1UL)))
+         );
+}
+
+
+/**
+  \brief   Decode Priority
+  \details Decodes an interrupt priority value with a given priority group to
+           preemptive priority value and subpriority value.
+           In case of a conflict between priority grouping and available
+           priority bits (__NVIC_PRIO_BITS) the smallest possible priority group is set.
+  \param [in]         Priority   Priority value, which can be retrieved with the function \ref NVIC_GetPriority().
+  \param [in]     PriorityGroup  Used priority group.
+  \param [out] pPreemptPriority  Preemptive priority value (starting from 0).
+  \param [out]     pSubPriority  Subpriority value (starting from 0).
+ */
+__STATIC_INLINE void NVIC_DecodePriority (uint32_t Priority, uint32_t PriorityGroup, uint32_t* const pPreemptPriority, uint32_t* const pSubPriority)
+{
+  uint32_t PriorityGroupTmp = (PriorityGroup & (uint32_t)0x07UL);   /* only values 0..7 are used          */
+  uint32_t PreemptPriorityBits;
+  uint32_t SubPriorityBits;
+
+  PreemptPriorityBits = ((7UL - PriorityGroupTmp) > (uint32_t)(__NVIC_PRIO_BITS)) ? (uint32_t)(__NVIC_PRIO_BITS) : (uint32_t)(7UL - PriorityGroupTmp);
+  SubPriorityBits     = ((PriorityGroupTmp + (uint32_t)(__NVIC_PRIO_BITS)) < (uint32_t)7UL) ? (uint32_t)0UL : (uint32_t)((PriorityGroupTmp - 7UL) + (uint32_t)(__NVIC_PRIO_BITS));
+
+  *pPreemptPriority = (Priority >> SubPriorityBits) & (uint32_t)((1UL << (PreemptPriorityBits)) - 1UL);
+  *pSubPriority     = (Priority                   ) & (uint32_t)((1UL << (SubPriorityBits    )) - 1UL);
+}
+
+
+
+/**
+  \brief   Set Interrupt Vector
+  \details Sets an interrupt vector in SRAM based interrupt vector table.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+           Address 0 must be mapped to SRAM.
+  \param [in]   IRQn      Interrupt number
+  \param [in]   vector    Address of interrupt handler function
+ */
+__STATIC_INLINE void __NVIC_SetVector(IRQn_Type IRQn, uint32_t vector)
+{
+  uint32_t *vectors = (uint32_t *)0x0U;
+  vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET] = vector;
+}
+
+
+/**
+  \brief   Get Interrupt Vector
+  \details Reads an interrupt vector from interrupt vector table.
+           The interrupt number can be positive to specify a device specific interrupt,
+           or negative to specify a processor exception.
+  \param [in]   IRQn      Interrupt number.
+  \return                 Address of interrupt handler function
+ */
+__STATIC_INLINE uint32_t __NVIC_GetVector(IRQn_Type IRQn)
+{
+  uint32_t *vectors = (uint32_t *)0x0U;
+  return vectors[(int32_t)IRQn + NVIC_USER_IRQ_OFFSET];
+}
+
+
+/**
+  \brief   System Reset
+  \details Initiates a system reset request to reset the MCU.
+ */
+__NO_RETURN __STATIC_INLINE void __NVIC_SystemReset(void)
+{
+  __DSB();                                                          /* Ensure all outstanding memory accesses included
+                                                                       buffered write are completed before reset */
+  SCB->AIRCR  = ((0x5FAUL << SCB_AIRCR_VECTKEY_Pos) |
+                 SCB_AIRCR_SYSRESETREQ_Msk);
+  __DSB();                                                          /* Ensure completion of memory access */
+
+  for(;;)                                                           /* wait until reset */
+  {
+    __NOP();
+  }
+}
+
+/*@} end of CMSIS_Core_NVICFunctions */
+
+
+/* ##########################  FPU functions  #################################### */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_FpuFunctions FPU Functions
+  \brief    Function that provides FPU type.
+  @{
+ */
+
+/**
+  \brief   get FPU type
+  \details returns the FPU type
+  \returns
+   - \b  0: No FPU
+   - \b  1: Single precision FPU
+   - \b  2: Double + Single precision FPU
+ */
+__STATIC_INLINE uint32_t SCB_GetFPUType(void)
+{
+    return 0U;           /* No FPU */
+}
+
+
+/*@} end of CMSIS_Core_FpuFunctions */
+
+
+
+/* ##################################    SysTick function  ############################################ */
+/**
+  \ingroup  CMSIS_Core_FunctionInterface
+  \defgroup CMSIS_Core_SysTickFunctions SysTick Functions
+  \brief    Functions that configure the System.
+  @{
+ */
+
+#if defined (__Vendor_SysTickConfig) && (__Vendor_SysTickConfig == 0U)
+
+/**
+  \brief   System Tick Configuration
+  \details Initializes the System Timer and its interrupt, and starts the System Tick Timer.
+           Counter is in free running mode to generate periodic interrupts.
+  \param [in]  ticks  Number of ticks between two interrupts.
+  \return          0  Function succeeded.
+  \return          1  Function failed.
+  \note    When the variable <b>__Vendor_SysTickConfig</b> is set to 1, then the
+           function <b>SysTick_Config</b> is not included. In this case, the file <b><i>device</i>.h</b>
+           must contain a vendor-specific implementation of this function.
+ */
+__STATIC_INLINE uint32_t SysTick_Config(uint32_t ticks)
+{
+  if ((ticks - 1UL) > SysTick_LOAD_RELOAD_Msk)
+  {
+    return (1UL);                                                   /* Reload value impossible */
+  }
+
+  SysTick->LOAD  = (uint32_t)(ticks - 1UL);                         /* set reload register */
+  NVIC_SetPriority (SysTick_IRQn, (1UL << __NVIC_PRIO_BITS) - 1UL); /* set Priority for Systick Interrupt */
+  SysTick->VAL   = 0UL;                                             /* Load the SysTick Counter Value */
+  SysTick->CTRL  = SysTick_CTRL_CLKSOURCE_Msk |
+                   SysTick_CTRL_TICKINT_Msk   |
+                   SysTick_CTRL_ENABLE_Msk;                         /* Enable SysTick IRQ and SysTick Timer */
+  return (0UL);                                                     /* Function successful */
+}
+
+#endif
+
+/*@} end of CMSIS_Core_SysTickFunctions */
+
+
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* __CORE_CM1_H_DEPENDANT */
+
+#endif /* __CMSIS_GENERIC */

Файловите разлики са ограничени, защото са твърде много
+ 1993 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm23.h


Файловите разлики са ограничени, защото са твърде много
+ 1941 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm3.h


Файловите разлики са ограничени, защото са твърде много
+ 3002 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm33.h


Файловите разлики са ограничени, защото са твърде много
+ 2129 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm4.h


Файловите разлики са ограничени, защото са твърде много
+ 2671 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_cm7.h


Файловите разлики са ограничени, защото са твърде много
+ 1022 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_sc000.h


Файловите разлики са ограничени, защото са твърде много
+ 1915 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/core_sc300.h


+ 270 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/mpu_armv7.h

@@ -0,0 +1,270 @@
+/******************************************************************************
+ * @file     mpu_armv7.h
+ * @brief    CMSIS MPU API for Armv7-M MPU
+ * @version  V5.0.4
+ * @date     10. January 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2017-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ 
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header    /* treat file as system include file */
+#endif
+ 
+#ifndef ARM_MPU_ARMV7_H
+#define ARM_MPU_ARMV7_H
+
+#define ARM_MPU_REGION_SIZE_32B      ((uint8_t)0x04U) ///!< MPU Region Size 32 Bytes
+#define ARM_MPU_REGION_SIZE_64B      ((uint8_t)0x05U) ///!< MPU Region Size 64 Bytes
+#define ARM_MPU_REGION_SIZE_128B     ((uint8_t)0x06U) ///!< MPU Region Size 128 Bytes
+#define ARM_MPU_REGION_SIZE_256B     ((uint8_t)0x07U) ///!< MPU Region Size 256 Bytes
+#define ARM_MPU_REGION_SIZE_512B     ((uint8_t)0x08U) ///!< MPU Region Size 512 Bytes
+#define ARM_MPU_REGION_SIZE_1KB      ((uint8_t)0x09U) ///!< MPU Region Size 1 KByte
+#define ARM_MPU_REGION_SIZE_2KB      ((uint8_t)0x0AU) ///!< MPU Region Size 2 KBytes
+#define ARM_MPU_REGION_SIZE_4KB      ((uint8_t)0x0BU) ///!< MPU Region Size 4 KBytes
+#define ARM_MPU_REGION_SIZE_8KB      ((uint8_t)0x0CU) ///!< MPU Region Size 8 KBytes
+#define ARM_MPU_REGION_SIZE_16KB     ((uint8_t)0x0DU) ///!< MPU Region Size 16 KBytes
+#define ARM_MPU_REGION_SIZE_32KB     ((uint8_t)0x0EU) ///!< MPU Region Size 32 KBytes
+#define ARM_MPU_REGION_SIZE_64KB     ((uint8_t)0x0FU) ///!< MPU Region Size 64 KBytes
+#define ARM_MPU_REGION_SIZE_128KB    ((uint8_t)0x10U) ///!< MPU Region Size 128 KBytes
+#define ARM_MPU_REGION_SIZE_256KB    ((uint8_t)0x11U) ///!< MPU Region Size 256 KBytes
+#define ARM_MPU_REGION_SIZE_512KB    ((uint8_t)0x12U) ///!< MPU Region Size 512 KBytes
+#define ARM_MPU_REGION_SIZE_1MB      ((uint8_t)0x13U) ///!< MPU Region Size 1 MByte
+#define ARM_MPU_REGION_SIZE_2MB      ((uint8_t)0x14U) ///!< MPU Region Size 2 MBytes
+#define ARM_MPU_REGION_SIZE_4MB      ((uint8_t)0x15U) ///!< MPU Region Size 4 MBytes
+#define ARM_MPU_REGION_SIZE_8MB      ((uint8_t)0x16U) ///!< MPU Region Size 8 MBytes
+#define ARM_MPU_REGION_SIZE_16MB     ((uint8_t)0x17U) ///!< MPU Region Size 16 MBytes
+#define ARM_MPU_REGION_SIZE_32MB     ((uint8_t)0x18U) ///!< MPU Region Size 32 MBytes
+#define ARM_MPU_REGION_SIZE_64MB     ((uint8_t)0x19U) ///!< MPU Region Size 64 MBytes
+#define ARM_MPU_REGION_SIZE_128MB    ((uint8_t)0x1AU) ///!< MPU Region Size 128 MBytes
+#define ARM_MPU_REGION_SIZE_256MB    ((uint8_t)0x1BU) ///!< MPU Region Size 256 MBytes
+#define ARM_MPU_REGION_SIZE_512MB    ((uint8_t)0x1CU) ///!< MPU Region Size 512 MBytes
+#define ARM_MPU_REGION_SIZE_1GB      ((uint8_t)0x1DU) ///!< MPU Region Size 1 GByte
+#define ARM_MPU_REGION_SIZE_2GB      ((uint8_t)0x1EU) ///!< MPU Region Size 2 GBytes
+#define ARM_MPU_REGION_SIZE_4GB      ((uint8_t)0x1FU) ///!< MPU Region Size 4 GBytes
+
+#define ARM_MPU_AP_NONE 0U ///!< MPU Access Permission no access
+#define ARM_MPU_AP_PRIV 1U ///!< MPU Access Permission privileged access only
+#define ARM_MPU_AP_URO  2U ///!< MPU Access Permission unprivileged access read-only
+#define ARM_MPU_AP_FULL 3U ///!< MPU Access Permission full access
+#define ARM_MPU_AP_PRO  5U ///!< MPU Access Permission privileged access read-only
+#define ARM_MPU_AP_RO   6U ///!< MPU Access Permission read-only access
+
+/** MPU Region Base Address Register Value
+*
+* \param Region The region to be configured, number 0 to 15.
+* \param BaseAddress The base address for the region.
+*/
+#define ARM_MPU_RBAR(Region, BaseAddress) \
+  (((BaseAddress) & MPU_RBAR_ADDR_Msk) |  \
+   ((Region) & MPU_RBAR_REGION_Msk)    |  \
+   (MPU_RBAR_VALID_Msk))
+
+/**
+* MPU Memory Access Attributes
+* 
+* \param TypeExtField      Type extension field, allows you to configure memory access type, for example strongly ordered, peripheral.
+* \param IsShareable       Region is shareable between multiple bus masters.
+* \param IsCacheable       Region is cacheable, i.e. its value may be kept in cache.
+* \param IsBufferable      Region is bufferable, i.e. using write-back caching. Cacheable but non-bufferable regions use write-through policy.
+*/  
+#define ARM_MPU_ACCESS_(TypeExtField, IsShareable, IsCacheable, IsBufferable)   \
+  ((((TypeExtField ) << MPU_RASR_TEX_Pos) & MPU_RASR_TEX_Msk)                 | \
+   (((IsShareable ) << MPU_RASR_S_Pos) & MPU_RASR_S_Msk)                      | \
+   (((IsCacheable ) << MPU_RASR_C_Pos) & MPU_RASR_C_Msk)                      | \
+   (((IsBufferable ) << MPU_RASR_B_Pos) & MPU_RASR_B_Msk))
+
+/**
+* MPU Region Attribute and Size Register Value
+* 
+* \param DisableExec       Instruction access disable bit, 1= disable instruction fetches.
+* \param AccessPermission  Data access permissions, allows you to configure read/write access for User and Privileged mode.
+* \param AccessAttributes  Memory access attribution, see \ref ARM_MPU_ACCESS_.
+* \param SubRegionDisable  Sub-region disable field.
+* \param Size              Region size of the region to be configured, for example 4K, 8K.
+*/
+#define ARM_MPU_RASR_EX(DisableExec, AccessPermission, AccessAttributes, SubRegionDisable, Size)      \
+  ((((DisableExec ) << MPU_RASR_XN_Pos) & MPU_RASR_XN_Msk)                                          | \
+   (((AccessPermission) << MPU_RASR_AP_Pos) & MPU_RASR_AP_Msk)                                      | \
+   (((AccessAttributes) ) & (MPU_RASR_TEX_Msk | MPU_RASR_S_Msk | MPU_RASR_C_Msk | MPU_RASR_B_Msk)))
+  
+/**
+* MPU Region Attribute and Size Register Value
+* 
+* \param DisableExec       Instruction access disable bit, 1= disable instruction fetches.
+* \param AccessPermission  Data access permissions, allows you to configure read/write access for User and Privileged mode.
+* \param TypeExtField      Type extension field, allows you to configure memory access type, for example strongly ordered, peripheral.
+* \param IsShareable       Region is shareable between multiple bus masters.
+* \param IsCacheable       Region is cacheable, i.e. its value may be kept in cache.
+* \param IsBufferable      Region is bufferable, i.e. using write-back caching. Cacheable but non-bufferable regions use write-through policy.
+* \param SubRegionDisable  Sub-region disable field.
+* \param Size              Region size of the region to be configured, for example 4K, 8K.
+*/                         
+#define ARM_MPU_RASR(DisableExec, AccessPermission, TypeExtField, IsShareable, IsCacheable, IsBufferable, SubRegionDisable, Size) \
+  ARM_MPU_RASR_EX(DisableExec, AccessPermission, ARM_MPU_ACCESS_(TypeExtField, IsShareable, IsCacheable, IsBufferable), SubRegionDisable, Size)
+
+/**
+* MPU Memory Access Attribute for strongly ordered memory.
+*  - TEX: 000b
+*  - Shareable
+*  - Non-cacheable
+*  - Non-bufferable
+*/ 
+#define ARM_MPU_ACCESS_ORDERED ARM_MPU_ACCESS_(0U, 1U, 0U, 0U)
+
+/**
+* MPU Memory Access Attribute for device memory.
+*  - TEX: 000b (if non-shareable) or 010b (if shareable)
+*  - Shareable or non-shareable
+*  - Non-cacheable
+*  - Bufferable (if shareable) or non-bufferable (if non-shareable)
+*
+* \param IsShareable Configures the device memory as shareable or non-shareable.
+*/ 
+#define ARM_MPU_ACCESS_DEVICE(IsShareable) ((IsShareable) ? ARM_MPU_ACCESS_(0U, 1U, 0U, 1U) : ARM_MPU_ACCESS_(2U, 0U, 0U, 0U))
+
+/**
+* MPU Memory Access Attribute for normal memory.
+*  - TEX: 1BBb (reflecting outer cacheability rules)
+*  - Shareable or non-shareable
+*  - Cacheable or non-cacheable (reflecting inner cacheability rules)
+*  - Bufferable or non-bufferable (reflecting inner cacheability rules)
+*
+* \param OuterCp Configures the outer cache policy.
+* \param InnerCp Configures the inner cache policy.
+* \param IsShareable Configures the memory as shareable or non-shareable.
+*/ 
+#define ARM_MPU_ACCESS_NORMAL(OuterCp, InnerCp, IsShareable) ARM_MPU_ACCESS_((4U | (OuterCp)), IsShareable, ((InnerCp) & 2U), ((InnerCp) & 1U))
+
+/**
+* MPU Memory Access Attribute non-cacheable policy.
+*/
+#define ARM_MPU_CACHEP_NOCACHE 0U
+
+/**
+* MPU Memory Access Attribute write-back, write and read allocate policy.
+*/
+#define ARM_MPU_CACHEP_WB_WRA 1U
+
+/**
+* MPU Memory Access Attribute write-through, no write allocate policy.
+*/
+#define ARM_MPU_CACHEP_WT_NWA 2U
+
+/**
+* MPU Memory Access Attribute write-back, no write allocate policy.
+*/
+#define ARM_MPU_CACHEP_WB_NWA 3U
+
+
+/**
+* Struct for a single MPU Region
+*/
+typedef struct {
+  uint32_t RBAR; //!< The region base address register value (RBAR)
+  uint32_t RASR; //!< The region attribute and size register value (RASR) \ref MPU_RASR
+} ARM_MPU_Region_t;
+    
+/** Enable the MPU.
+* \param MPU_Control Default access permissions for unconfigured regions.
+*/
+__STATIC_INLINE void ARM_MPU_Enable(uint32_t MPU_Control)
+{
+  __DSB();
+  __ISB();
+  MPU->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+}
+
+/** Disable the MPU.
+*/
+__STATIC_INLINE void ARM_MPU_Disable(void)
+{
+  __DSB();
+  __ISB();
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+  MPU->CTRL  &= ~MPU_CTRL_ENABLE_Msk;
+}
+
+/** Clear and disable the given MPU region.
+* \param rnr Region number to be cleared.
+*/
+__STATIC_INLINE void ARM_MPU_ClrRegion(uint32_t rnr)
+{
+  MPU->RNR = rnr;
+  MPU->RASR = 0U;
+}
+
+/** Configure an MPU region.
+* \param rbar Value for RBAR register.
+* \param rsar Value for RSAR register.
+*/   
+__STATIC_INLINE void ARM_MPU_SetRegion(uint32_t rbar, uint32_t rasr)
+{
+  MPU->RBAR = rbar;
+  MPU->RASR = rasr;
+}
+
+/** Configure the given MPU region.
+* \param rnr Region number to be configured.
+* \param rbar Value for RBAR register.
+* \param rsar Value for RSAR register.
+*/   
+__STATIC_INLINE void ARM_MPU_SetRegionEx(uint32_t rnr, uint32_t rbar, uint32_t rasr)
+{
+  MPU->RNR = rnr;
+  MPU->RBAR = rbar;
+  MPU->RASR = rasr;
+}
+
+/** Memcopy with strictly ordered memory access, e.g. for register targets.
+* \param dst Destination data is copied to.
+* \param src Source data is copied from.
+* \param len Amount of data words to be copied.
+*/
+__STATIC_INLINE void orderedCpy(volatile uint32_t* dst, const uint32_t* __RESTRICT src, uint32_t len)
+{
+  uint32_t i;
+  for (i = 0U; i < len; ++i) 
+  {
+    dst[i] = src[i];
+  }
+}
+
+/** Load the given number of MPU regions from a table.
+* \param table Pointer to the MPU configuration table.
+* \param cnt Amount of regions to be configured.
+*/
+__STATIC_INLINE void ARM_MPU_Load(ARM_MPU_Region_t const* table, uint32_t cnt) 
+{
+  const uint32_t rowWordSize = sizeof(ARM_MPU_Region_t)/4U;
+  while (cnt > MPU_TYPE_RALIASES) {
+    orderedCpy(&(MPU->RBAR), &(table->RBAR), MPU_TYPE_RALIASES*rowWordSize);
+    table += MPU_TYPE_RALIASES;
+    cnt -= MPU_TYPE_RALIASES;
+  }
+  orderedCpy(&(MPU->RBAR), &(table->RBAR), cnt*rowWordSize);
+}
+
+#endif

+ 333 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/mpu_armv8.h

@@ -0,0 +1,333 @@
+/******************************************************************************
+ * @file     mpu_armv8.h
+ * @brief    CMSIS MPU API for Armv8-M MPU
+ * @version  V5.0.4
+ * @date     10. January 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2017-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header    /* treat file as system include file */
+#endif
+
+#ifndef ARM_MPU_ARMV8_H
+#define ARM_MPU_ARMV8_H
+
+/** \brief Attribute for device memory (outer only) */
+#define ARM_MPU_ATTR_DEVICE                           ( 0U )
+
+/** \brief Attribute for non-cacheable, normal memory */
+#define ARM_MPU_ATTR_NON_CACHEABLE                    ( 4U )
+
+/** \brief Attribute for normal memory (outer and inner)
+* \param NT Non-Transient: Set to 1 for non-transient data.
+* \param WB Write-Back: Set to 1 to use write-back update policy.
+* \param RA Read Allocation: Set to 1 to use cache allocation on read miss.
+* \param WA Write Allocation: Set to 1 to use cache allocation on write miss.
+*/
+#define ARM_MPU_ATTR_MEMORY_(NT, WB, RA, WA) \
+  (((NT & 1U) << 3U) | ((WB & 1U) << 2U) | ((RA & 1U) << 1U) | (WA & 1U))
+
+/** \brief Device memory type non Gathering, non Re-ordering, non Early Write Acknowledgement */
+#define ARM_MPU_ATTR_DEVICE_nGnRnE (0U)
+
+/** \brief Device memory type non Gathering, non Re-ordering, Early Write Acknowledgement */
+#define ARM_MPU_ATTR_DEVICE_nGnRE  (1U)
+
+/** \brief Device memory type non Gathering, Re-ordering, Early Write Acknowledgement */
+#define ARM_MPU_ATTR_DEVICE_nGRE   (2U)
+
+/** \brief Device memory type Gathering, Re-ordering, Early Write Acknowledgement */
+#define ARM_MPU_ATTR_DEVICE_GRE    (3U)
+
+/** \brief Memory Attribute
+* \param O Outer memory attributes
+* \param I O == ARM_MPU_ATTR_DEVICE: Device memory attributes, else: Inner memory attributes
+*/
+#define ARM_MPU_ATTR(O, I) (((O & 0xFU) << 4U) | (((O & 0xFU) != 0U) ? (I & 0xFU) : ((I & 0x3U) << 2U)))
+
+/** \brief Normal memory non-shareable  */
+#define ARM_MPU_SH_NON   (0U)
+
+/** \brief Normal memory outer shareable  */
+#define ARM_MPU_SH_OUTER (2U)
+
+/** \brief Normal memory inner shareable  */
+#define ARM_MPU_SH_INNER (3U)
+
+/** \brief Memory access permissions
+* \param RO Read-Only: Set to 1 for read-only memory.
+* \param NP Non-Privileged: Set to 1 for non-privileged memory.
+*/
+#define ARM_MPU_AP_(RO, NP) (((RO & 1U) << 1U) | (NP & 1U))
+
+/** \brief Region Base Address Register value
+* \param BASE The base address bits [31:5] of a memory region. The value is zero extended. Effective address gets 32 byte aligned.
+* \param SH Defines the Shareability domain for this memory region.
+* \param RO Read-Only: Set to 1 for a read-only memory region.
+* \param NP Non-Privileged: Set to 1 for a non-privileged memory region.
+* \oaram XN eXecute Never: Set to 1 for a non-executable memory region.
+*/
+#define ARM_MPU_RBAR(BASE, SH, RO, NP, XN) \
+  ((BASE & MPU_RBAR_BASE_Msk) | \
+  ((SH << MPU_RBAR_SH_Pos) & MPU_RBAR_SH_Msk) | \
+  ((ARM_MPU_AP_(RO, NP) << MPU_RBAR_AP_Pos) & MPU_RBAR_AP_Msk) | \
+  ((XN << MPU_RBAR_XN_Pos) & MPU_RBAR_XN_Msk))
+
+/** \brief Region Limit Address Register value
+* \param LIMIT The limit address bits [31:5] for this memory region. The value is one extended.
+* \param IDX The attribute index to be associated with this memory region.
+*/
+#define ARM_MPU_RLAR(LIMIT, IDX) \
+  ((LIMIT & MPU_RLAR_LIMIT_Msk) | \
+  ((IDX << MPU_RLAR_AttrIndx_Pos) & MPU_RLAR_AttrIndx_Msk) | \
+  (MPU_RLAR_EN_Msk))
+
+/**
+* Struct for a single MPU Region
+*/
+typedef struct {
+  uint32_t RBAR;                   /*!< Region Base Address Register value */
+  uint32_t RLAR;                   /*!< Region Limit Address Register value */
+} ARM_MPU_Region_t;
+    
+/** Enable the MPU.
+* \param MPU_Control Default access permissions for unconfigured regions.
+*/
+__STATIC_INLINE void ARM_MPU_Enable(uint32_t MPU_Control)
+{
+  __DSB();
+  __ISB();
+  MPU->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+}
+
+/** Disable the MPU.
+*/
+__STATIC_INLINE void ARM_MPU_Disable(void)
+{
+  __DSB();
+  __ISB();
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+  MPU->CTRL  &= ~MPU_CTRL_ENABLE_Msk;
+}
+
+#ifdef MPU_NS
+/** Enable the Non-secure MPU.
+* \param MPU_Control Default access permissions for unconfigured regions.
+*/
+__STATIC_INLINE void ARM_MPU_Enable_NS(uint32_t MPU_Control)
+{
+  __DSB();
+  __ISB();
+  MPU_NS->CTRL = MPU_Control | MPU_CTRL_ENABLE_Msk;
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB_NS->SHCSR |= SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+}
+
+/** Disable the Non-secure MPU.
+*/
+__STATIC_INLINE void ARM_MPU_Disable_NS(void)
+{
+  __DSB();
+  __ISB();
+#ifdef SCB_SHCSR_MEMFAULTENA_Msk
+  SCB_NS->SHCSR &= ~SCB_SHCSR_MEMFAULTENA_Msk;
+#endif
+  MPU_NS->CTRL  &= ~MPU_CTRL_ENABLE_Msk;
+}
+#endif
+
+/** Set the memory attribute encoding to the given MPU.
+* \param mpu Pointer to the MPU to be configured.
+* \param idx The attribute index to be set [0-7]
+* \param attr The attribute value to be set.
+*/
+__STATIC_INLINE void ARM_MPU_SetMemAttrEx(MPU_Type* mpu, uint8_t idx, uint8_t attr)
+{
+  const uint8_t reg = idx / 4U;
+  const uint32_t pos = ((idx % 4U) * 8U);
+  const uint32_t mask = 0xFFU << pos;
+  
+  if (reg >= (sizeof(mpu->MAIR) / sizeof(mpu->MAIR[0]))) {
+    return; // invalid index
+  }
+  
+  mpu->MAIR[reg] = ((mpu->MAIR[reg] & ~mask) | ((attr << pos) & mask));
+}
+
+/** Set the memory attribute encoding.
+* \param idx The attribute index to be set [0-7]
+* \param attr The attribute value to be set.
+*/
+__STATIC_INLINE void ARM_MPU_SetMemAttr(uint8_t idx, uint8_t attr)
+{
+  ARM_MPU_SetMemAttrEx(MPU, idx, attr);
+}
+
+#ifdef MPU_NS
+/** Set the memory attribute encoding to the Non-secure MPU.
+* \param idx The attribute index to be set [0-7]
+* \param attr The attribute value to be set.
+*/
+__STATIC_INLINE void ARM_MPU_SetMemAttr_NS(uint8_t idx, uint8_t attr)
+{
+  ARM_MPU_SetMemAttrEx(MPU_NS, idx, attr);
+}
+#endif
+
+/** Clear and disable the given MPU region of the given MPU.
+* \param mpu Pointer to MPU to be used.
+* \param rnr Region number to be cleared.
+*/
+__STATIC_INLINE void ARM_MPU_ClrRegionEx(MPU_Type* mpu, uint32_t rnr)
+{
+  mpu->RNR = rnr;
+  mpu->RLAR = 0U;
+}
+
+/** Clear and disable the given MPU region.
+* \param rnr Region number to be cleared.
+*/
+__STATIC_INLINE void ARM_MPU_ClrRegion(uint32_t rnr)
+{
+  ARM_MPU_ClrRegionEx(MPU, rnr);
+}
+
+#ifdef MPU_NS
+/** Clear and disable the given Non-secure MPU region.
+* \param rnr Region number to be cleared.
+*/
+__STATIC_INLINE void ARM_MPU_ClrRegion_NS(uint32_t rnr)
+{  
+  ARM_MPU_ClrRegionEx(MPU_NS, rnr);
+}
+#endif
+
+/** Configure the given MPU region of the given MPU.
+* \param mpu Pointer to MPU to be used.
+* \param rnr Region number to be configured.
+* \param rbar Value for RBAR register.
+* \param rlar Value for RLAR register.
+*/   
+__STATIC_INLINE void ARM_MPU_SetRegionEx(MPU_Type* mpu, uint32_t rnr, uint32_t rbar, uint32_t rlar)
+{
+  mpu->RNR = rnr;
+  mpu->RBAR = rbar;
+  mpu->RLAR = rlar;
+}
+
+/** Configure the given MPU region.
+* \param rnr Region number to be configured.
+* \param rbar Value for RBAR register.
+* \param rlar Value for RLAR register.
+*/   
+__STATIC_INLINE void ARM_MPU_SetRegion(uint32_t rnr, uint32_t rbar, uint32_t rlar)
+{
+  ARM_MPU_SetRegionEx(MPU, rnr, rbar, rlar);
+}
+
+#ifdef MPU_NS
+/** Configure the given Non-secure MPU region.
+* \param rnr Region number to be configured.
+* \param rbar Value for RBAR register.
+* \param rlar Value for RLAR register.
+*/   
+__STATIC_INLINE void ARM_MPU_SetRegion_NS(uint32_t rnr, uint32_t rbar, uint32_t rlar)
+{
+  ARM_MPU_SetRegionEx(MPU_NS, rnr, rbar, rlar);  
+}
+#endif
+
+/** Memcopy with strictly ordered memory access, e.g. for register targets.
+* \param dst Destination data is copied to.
+* \param src Source data is copied from.
+* \param len Amount of data words to be copied.
+*/
+__STATIC_INLINE void orderedCpy(volatile uint32_t* dst, const uint32_t* __RESTRICT src, uint32_t len)
+{
+  uint32_t i;
+  for (i = 0U; i < len; ++i) 
+  {
+    dst[i] = src[i];
+  }
+}
+
+/** Load the given number of MPU regions from a table to the given MPU.
+* \param mpu Pointer to the MPU registers to be used.
+* \param rnr First region number to be configured.
+* \param table Pointer to the MPU configuration table.
+* \param cnt Amount of regions to be configured.
+*/
+__STATIC_INLINE void ARM_MPU_LoadEx(MPU_Type* mpu, uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt) 
+{
+  const uint32_t rowWordSize = sizeof(ARM_MPU_Region_t)/4U;
+  if (cnt == 1U) {
+    mpu->RNR = rnr;
+    orderedCpy(&(mpu->RBAR), &(table->RBAR), rowWordSize);
+  } else {
+    uint32_t rnrBase   = rnr & ~(MPU_TYPE_RALIASES-1U);
+    uint32_t rnrOffset = rnr % MPU_TYPE_RALIASES;
+    
+    mpu->RNR = rnrBase;
+    while ((rnrOffset + cnt) > MPU_TYPE_RALIASES) {
+      uint32_t c = MPU_TYPE_RALIASES - rnrOffset;
+      orderedCpy(&(mpu->RBAR)+(rnrOffset*2U), &(table->RBAR), c*rowWordSize);
+      table += c;
+      cnt -= c;
+      rnrOffset = 0U;
+      rnrBase += MPU_TYPE_RALIASES;
+      mpu->RNR = rnrBase;
+    }
+    
+    orderedCpy(&(mpu->RBAR)+(rnrOffset*2U), &(table->RBAR), cnt*rowWordSize);
+  }
+}
+
+/** Load the given number of MPU regions from a table.
+* \param rnr First region number to be configured.
+* \param table Pointer to the MPU configuration table.
+* \param cnt Amount of regions to be configured.
+*/
+__STATIC_INLINE void ARM_MPU_Load(uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt) 
+{
+  ARM_MPU_LoadEx(MPU, rnr, table, cnt);
+}
+
+#ifdef MPU_NS
+/** Load the given number of MPU regions from a table to the Non-secure MPU.
+* \param rnr First region number to be configured.
+* \param table Pointer to the MPU configuration table.
+* \param cnt Amount of regions to be configured.
+*/
+__STATIC_INLINE void ARM_MPU_Load_NS(uint32_t rnr, ARM_MPU_Region_t const* table, uint32_t cnt) 
+{
+  ARM_MPU_LoadEx(MPU_NS, rnr, table, cnt);
+}
+#endif
+
+#endif
+

+ 70 - 0
app/main/CTO20220622/Drivers/CMSIS/Core/Include/tz_context.h

@@ -0,0 +1,70 @@
+/******************************************************************************
+ * @file     tz_context.h
+ * @brief    Context Management for Armv8-M TrustZone
+ * @version  V1.0.1
+ * @date     10. January 2018
+ ******************************************************************************/
+/*
+ * Copyright (c) 2017-2018 Arm Limited. All rights reserved.
+ *
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * Licensed under the Apache License, Version 2.0 (the License); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an AS IS BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#if   defined ( __ICCARM__ )
+  #pragma system_include         /* treat file as system include file for MISRA check */
+#elif defined (__clang__)
+  #pragma clang system_header   /* treat file as system include file */
+#endif
+
+#ifndef TZ_CONTEXT_H
+#define TZ_CONTEXT_H
+ 
+#include <stdint.h>
+ 
+#ifndef TZ_MODULEID_T
+#define TZ_MODULEID_T
+/// \details Data type that identifies secure software modules called by a process.
+typedef uint32_t TZ_ModuleId_t;
+#endif
+ 
+/// \details TZ Memory ID identifies an allocated memory slot.
+typedef uint32_t TZ_MemoryId_t;
+  
+/// Initialize secure context memory system
+/// \return execution status (1: success, 0: error)
+uint32_t TZ_InitContextSystem_S (void);
+ 
+/// Allocate context memory for calling secure software modules in TrustZone
+/// \param[in]  module   identifies software modules called from non-secure mode
+/// \return value != 0 id TrustZone memory slot identifier
+/// \return value 0    no memory available or internal error
+TZ_MemoryId_t TZ_AllocModuleContext_S (TZ_ModuleId_t module);
+ 
+/// Free context memory that was previously allocated with \ref TZ_AllocModuleContext_S
+/// \param[in]  id  TrustZone memory slot identifier
+/// \return execution status (1: success, 0: error)
+uint32_t TZ_FreeModuleContext_S (TZ_MemoryId_t id);
+ 
+/// Load secure context (called on RTOS thread context switch)
+/// \param[in]  id  TrustZone memory slot identifier
+/// \return execution status (1: success, 0: error)
+uint32_t TZ_LoadContext_S (TZ_MemoryId_t id);
+ 
+/// Store secure context (called on RTOS thread context switch)
+/// \param[in]  id  TrustZone memory slot identifier
+/// \return execution status (1: success, 0: error)
+uint32_t TZ_StoreContext_S (TZ_MemoryId_t id);
+ 
+#endif  // TZ_CONTEXT_H