cpu/cortexm: add cortex-m23 support
This commit is contained in:
parent
a78f357b26
commit
6d3fda7260
@ -65,7 +65,8 @@ void cortexm_init(void)
|
||||
/* configure the vector table location to internal flash */
|
||||
#if defined(CPU_ARCH_CORTEX_M3) || defined(CPU_ARCH_CORTEX_M4) || \
|
||||
defined(CPU_ARCH_CORTEX_M4F) || defined(CPU_ARCH_CORTEX_M7) || \
|
||||
(defined(CPU_ARCH_CORTEX_M0PLUS) && (__VTOR_PRESENT == 1))
|
||||
(defined(CPU_ARCH_CORTEX_M0PLUS) || defined(CPU_ARCH_CORTEX_M23) \
|
||||
&& (__VTOR_PRESENT == 1))
|
||||
SCB->VTOR = (uint32_t)&_isr_vectors;
|
||||
#endif
|
||||
|
||||
|
||||
@ -59,7 +59,8 @@ bool mpu_enabled(void) {
|
||||
}
|
||||
|
||||
int mpu_configure(uint_fast8_t region, uintptr_t base, uint_fast32_t attr) {
|
||||
#if __MPU_PRESENT
|
||||
/* Todo enable MPU support for Cortex-M23/M33 */
|
||||
#if __MPU_PRESENT && !defined(CPU_ARCH_CORTEX_M23)
|
||||
assert(region < MPU_NUM_REGIONS);
|
||||
|
||||
MPU->RNR = region;
|
||||
|
||||
@ -187,7 +187,8 @@ char *thread_stack_init(thread_task_func_t task_func,
|
||||
* For the Cortex-M3 and Cortex-M4 we write them continuously onto the stack
|
||||
* as they can be read/written continuously by stack instructions. */
|
||||
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS)
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
/* start with r7 - r4 */
|
||||
for (int i = 7; i >= 4; i--) {
|
||||
stk--;
|
||||
@ -286,7 +287,8 @@ void __attribute__((naked)) __attribute__((used)) isr_pendsv(void) {
|
||||
/* {r0-r3,r12,LR,PC,xPSR,s0-s15,FPSCR} are saved automatically on exception entry */
|
||||
".thumb_func \n"
|
||||
"mrs r0, psp \n" /* get stack pointer from user mode */
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS)
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
"mov r12, sp \n" /* remember the exception SP */
|
||||
"mov sp, r0 \n" /* set user mode SP as active SP */
|
||||
/* we can not push high registers directly, so we move R11-R8 into
|
||||
@ -326,7 +328,8 @@ void __attribute__((naked)) __attribute__((used)) isr_svc(void) {
|
||||
/* restore context and return from exception */
|
||||
".thumb_func \n"
|
||||
"context_restore: \n"
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS)
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
"mov lr, sp \n" /* save MSR stack pointer for later */
|
||||
"ldr r0, =sched_active_thread \n" /* load address of current TCB */
|
||||
"ldr r0, [r0] \n" /* dereference TCB */
|
||||
|
||||
@ -188,7 +188,8 @@ __attribute__((naked)) void hard_fault_default(void)
|
||||
" use_psp: \n" /* else { */
|
||||
"mrs r0, psp \n" /* r0 = psp */
|
||||
" out: \n" /* } */
|
||||
#if (__CORTEX_M == 0)
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
"push {r4-r7} \n" /* save r4..r7 to the stack */
|
||||
"mov r3, r8 \n" /* */
|
||||
"mov r4, r9 \n" /* */
|
||||
@ -208,9 +209,10 @@ __attribute__((naked)) void hard_fault_default(void)
|
||||
);
|
||||
}
|
||||
|
||||
#if (__CORTEX_M == 0)
|
||||
/* Cortex-M0 and Cortex-M0+ lack the extended fault status registers found in
|
||||
* Cortex-M3 and above. */
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
/* Cortex-M0, Cortex-M0+ and Cortex-M23 lack the extended fault status
|
||||
registers found in Cortex-M3 and above. */
|
||||
#define CPU_HAS_EXTENDED_FAULT_REGISTERS 0
|
||||
#else
|
||||
#define CPU_HAS_EXTENDED_FAULT_REGISTERS 1
|
||||
@ -261,11 +263,12 @@ __attribute__((used)) void hard_fault_handler(uint32_t* sp, uint32_t corrupted,
|
||||
|
||||
/* Reconstruct original stack pointer before fault occurred */
|
||||
orig_sp = sp + 8;
|
||||
#ifdef SCB_CCR_STKALIGN_Msk
|
||||
if (psr & SCB_CCR_STKALIGN_Msk) {
|
||||
/* Stack was not 8-byte aligned */
|
||||
orig_sp += 1;
|
||||
}
|
||||
|
||||
#endif /* SCB_CCR_STKALIGN_Msk */
|
||||
puts("\nContext before hardfault:");
|
||||
|
||||
/* TODO: printf in ISR context might be a bad idea */
|
||||
@ -315,7 +318,8 @@ __attribute__((used)) void hard_fault_handler(uint32_t* sp, uint32_t corrupted,
|
||||
"mov lr, r1\n"
|
||||
"mov sp, %[orig_sp]\n"
|
||||
"mov r1, %[extra_stack]\n"
|
||||
#if (__CORTEX_M == 0)
|
||||
#if defined(CPU_ARCH_CORTEX_M0) || defined(CPU_ARCH_CORTEX_M0PLUS) \
|
||||
|| defined(CPU_ARCH_CORTEX_M23)
|
||||
"ldm r1!, {r4-r7}\n"
|
||||
"mov r8, r4\n"
|
||||
"mov r9, r5\n"
|
||||
|
||||
@ -128,6 +128,8 @@ else ifeq ($(CPU_ARCH),cortex-m4f)
|
||||
export CFLAGS += -DARM_MATH_CM4
|
||||
else ifeq ($(CPU_ARCH),cortex-m7)
|
||||
export CFLAGS += -DARM_MATH_CM7
|
||||
else ifeq ($(CPU_ARCH),cortex-m23)
|
||||
export CFLAGS += -DARM_MATH_CM23
|
||||
endif
|
||||
endif
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user