diff --git a/cpu/fe310/intr.S b/cpu/fe310/intr.S deleted file mode 100644 index cb759759c7..0000000000 --- a/cpu/fe310/intr.S +++ /dev/null @@ -1,114 +0,0 @@ -/* - * Copyright (C) 2017, 2019 JP Bonn, Ken Rabold - * - * This file is subject to the terms and conditions of the GNU Lesser - * General Public License v2.1. See the file LICENSE in the top level - * directory for more details. - */ - -#include "vendor/encoding.h" -#include "context_frame.h" - - .section .text.entry - .align 2 - .global trap_entry - -trap_entry: - /* Save registers to stack */ - addi sp, sp, -CONTEXT_FRAME_SIZE - - sw s0, s0_OFFSET(sp) - sw s1, s1_OFFSET(sp) - sw s2, s2_OFFSET(sp) - sw s3, s3_OFFSET(sp) - sw s4, s4_OFFSET(sp) - sw s5, s5_OFFSET(sp) - sw s6, s6_OFFSET(sp) - sw s7, s7_OFFSET(sp) - sw s8, s8_OFFSET(sp) - sw s9, s9_OFFSET(sp) - sw s10, s10_OFFSET(sp) - sw s11, s11_OFFSET(sp) - sw ra, ra_OFFSET(sp) - sw t0, t0_OFFSET(sp) - sw t1, t1_OFFSET(sp) - sw t2, t2_OFFSET(sp) - sw t3, t3_OFFSET(sp) - sw t4, t4_OFFSET(sp) - sw t5, t5_OFFSET(sp) - sw t6, t6_OFFSET(sp) - sw a0, a0_OFFSET(sp) - sw a1, a1_OFFSET(sp) - sw a2, a2_OFFSET(sp) - sw a3, a3_OFFSET(sp) - sw a4, a4_OFFSET(sp) - sw a5, a5_OFFSET(sp) - sw a6, a6_OFFSET(sp) - sw a7, a7_OFFSET(sp) - - - /* Get the interrupt cause, PC, and address */ - csrr a0, mcause - csrr a1, mepc - csrr a2, mtval - - /* Save return PC in stack frame */ - sw a1, pc_OFFSET(sp) - - /* Get the active thread (could be NULL) */ - lw tp, sched_active_thread - beqz tp, null_thread - - /* Save stack pointer of current thread */ - sw sp, SP_OFFSET_IN_THREAD(tp) - -null_thread: - /* Switch to ISR stack. Interrupts are not nested so use fixed - * starting address and just abandon stack when finished. */ - la sp, _sp - - /* Call handle_trap with MCAUSE and MEPC register value as args */ - call handle_trap - - /* Get the active thread (guaranteed to be non NULL) */ - lw tp, sched_active_thread - - /* Load the thread SP of scheduled thread */ - lw sp, SP_OFFSET_IN_THREAD(tp) - - /* Set return PC */ - lw a1, pc_OFFSET(sp) - csrw mepc, a1 - - /* Restore registers from stack */ - lw s0, s0_OFFSET(sp) - lw s1, s1_OFFSET(sp) - lw s2, s2_OFFSET(sp) - lw s3, s3_OFFSET(sp) - lw s4, s4_OFFSET(sp) - lw s5, s5_OFFSET(sp) - lw s6, s6_OFFSET(sp) - lw s7, s7_OFFSET(sp) - lw s8, s8_OFFSET(sp) - lw s9, s9_OFFSET(sp) - lw s10, s10_OFFSET(sp) - lw s11, s11_OFFSET(sp) - lw ra, ra_OFFSET(sp) - lw t0, t0_OFFSET(sp) - lw t1, t1_OFFSET(sp) - lw t2, t2_OFFSET(sp) - lw t3, t3_OFFSET(sp) - lw t4, t4_OFFSET(sp) - lw t5, t5_OFFSET(sp) - lw t6, t6_OFFSET(sp) - lw a0, a0_OFFSET(sp) - lw a1, a1_OFFSET(sp) - lw a2, a2_OFFSET(sp) - lw a3, a3_OFFSET(sp) - lw a4, a4_OFFSET(sp) - lw a5, a5_OFFSET(sp) - lw a6, a6_OFFSET(sp) - lw a7, a7_OFFSET(sp) - - addi sp, sp, CONTEXT_FRAME_SIZE - mret diff --git a/cpu/fe310/irq_arch.c b/cpu/fe310/irq_arch.c index d449afcaac..53931a5eb3 100644 --- a/cpu/fe310/irq_arch.c +++ b/cpu/fe310/irq_arch.c @@ -21,7 +21,9 @@ #include #include +#include "macros/xtstr.h" #include "cpu.h" +#include "context_frame.h" #include "irq.h" #include "irq_arch.h" #include "panic.h" @@ -42,7 +44,7 @@ static external_isr_ptr_t _ext_isrs[PLIC_NUM_INTERRUPTS]; /** * @brief ISR trap vector */ -void trap_entry(void); +static void trap_entry(void); /** * @brief Timer ISR @@ -95,12 +97,8 @@ void external_isr(void) /** * @brief Global trap and interrupt handler */ -void handle_trap(unsigned int mcause, unsigned int mepc, unsigned int mtval) +void handle_trap(uint32_t mcause) { -#ifndef DEVELHELP - (void) mepc; - (void) mtval; -#endif /* Tell RIOT to set sched_context_switch_request instead of * calling thread_yield(). */ fe310_in_isr = 1; @@ -135,19 +133,153 @@ void handle_trap(unsigned int mcause, unsigned int mepc, unsigned int mtval) else { #ifdef DEVELHELP printf("Unhandled trap:\n"); - printf(" mcause: 0x%08x\n", mcause); - printf(" mepc: 0x%08x\n", mepc); - printf(" mtval: 0x%08x\n", mtval); + printf(" mcause: 0x%"PRIx32"\n", mcause); + printf(" mepc: 0x%"PRIx32"\n", read_csr(mepc)); + printf(" mtval: 0x%"PRIx32"\n", read_csr(mtval)); #endif /* Unknown trap */ core_panic(PANIC_GENERAL_ERROR, "Unhandled trap"); } - - /* Check if context change was requested */ - if (sched_context_switch_request) { - sched_run(); - } - /* ISR done - no more changes to thread states */ fe310_in_isr = 0; } + +/* Marking this as interrupt to ensure an mret at the end, provided by the + * compiler. Aligned to 4-byte boundary as per RISC-V spec */ +static void __attribute((aligned(4))) __attribute__((interrupt)) trap_entry(void) { + __asm__ volatile ( + "addi sp, sp, -"XTSTR(CONTEXT_FRAME_SIZE)" \n" + + /* Save caller-saved registers */ + "sw ra, "XTSTR(ra_OFFSET)"(sp) \n" + "sw t0, "XTSTR(t0_OFFSET)"(sp) \n" + "sw t1, "XTSTR(t1_OFFSET)"(sp) \n" + "sw t2, "XTSTR(t2_OFFSET)"(sp) \n" + "sw t3, "XTSTR(t3_OFFSET)"(sp) \n" + "sw t4, "XTSTR(t4_OFFSET)"(sp) \n" + "sw t5, "XTSTR(t5_OFFSET)"(sp) \n" + "sw t6, "XTSTR(t6_OFFSET)"(sp) \n" + "sw a0, "XTSTR(a0_OFFSET)"(sp) \n" + "sw a1, "XTSTR(a1_OFFSET)"(sp) \n" + "sw a2, "XTSTR(a2_OFFSET)"(sp) \n" + "sw a3, "XTSTR(a3_OFFSET)"(sp) \n" + "sw a4, "XTSTR(a4_OFFSET)"(sp) \n" + "sw a5, "XTSTR(a5_OFFSET)"(sp) \n" + "sw a6, "XTSTR(a6_OFFSET)"(sp) \n" + "sw a7, "XTSTR(a7_OFFSET)"(sp) \n" + + /* Save s0 and s1 extra for the active thread and the stack ptr */ + "sw s0, "XTSTR(s0_OFFSET)"(sp) \n" + "sw s1, "XTSTR(s1_OFFSET)"(sp) \n" + + /* Save the user stack ptr */ + "mv s0, sp \n" + /* Load exception stack ptr */ + "la sp, _sp \n" + + /* Get the interrupt cause */ + "csrr a0, mcause \n" + + /* Call trap handler, a0 contains mcause before, and the return value after + * the call */ + "call handle_trap \n" + + /* Load the sched_context_switch_request */ + "lw a0, sched_context_switch_request \n" + + /* And skip the context switch if not requested */ + "beqz a0, no_sched \n" + + /* Get the previous active thread (could be NULL) */ + "lw s1, sched_active_thread \n" + + /* Run the scheduler */ + "call sched_run \n" + + "no_sched: \n" + /* Restore the thread stack pointer and check if a new thread must be + * scheduled */ + "mv sp, s0 \n" + + /* No context switch required, shortcut to restore. a0 contains the return + * value of sched_run, or the sched_context_switch_request if the sched_run + * was skipped */ + "beqz a0, no_switch \n" + + /* Skips the rest of the save if no active thread */ + "beqz s1, null_thread \n" + + /* Store s2-s11 */ + "sw s2, "XTSTR(s2_OFFSET)"(sp) \n" + "sw s3, "XTSTR(s3_OFFSET)"(sp) \n" + "sw s4, "XTSTR(s4_OFFSET)"(sp) \n" + "sw s5, "XTSTR(s5_OFFSET)"(sp) \n" + "sw s6, "XTSTR(s6_OFFSET)"(sp) \n" + "sw s7, "XTSTR(s7_OFFSET)"(sp) \n" + "sw s8, "XTSTR(s8_OFFSET)"(sp) \n" + "sw s9, "XTSTR(s9_OFFSET)"(sp) \n" + "sw s10, "XTSTR(s10_OFFSET)"(sp) \n" + "sw s11, "XTSTR(s11_OFFSET)"(sp) \n" + + /* Grab mepc to save it to the stack */ + "csrr s2, mepc \n" + + /* Save return PC in stack frame */ + "sw s2, "XTSTR(pc_OFFSET)"(sp) \n" + + /* Save stack pointer of current thread */ + "sw sp, "XTSTR(SP_OFFSET_IN_THREAD)"(s1) \n" + + /* Context saving done, from here on the new thread is scheduled */ + "null_thread: \n" + + /* Get the new active thread (guaranteed to be non NULL) */ + "lw s1, sched_active_thread \n" + + /* Load the thread SP of scheduled thread */ + "lw sp, "XTSTR(SP_OFFSET_IN_THREAD)"(s1) \n" + + /* Set return PC to mepc */ + "lw a1, "XTSTR(pc_OFFSET)"(sp) \n" + "csrw mepc, a1 \n" + + /* restore s2-s11 */ + "lw s2, "XTSTR(s2_OFFSET)"(sp) \n" + "lw s3, "XTSTR(s3_OFFSET)"(sp) \n" + "lw s4, "XTSTR(s4_OFFSET)"(sp) \n" + "lw s5, "XTSTR(s5_OFFSET)"(sp) \n" + "lw s6, "XTSTR(s6_OFFSET)"(sp) \n" + "lw s7, "XTSTR(s7_OFFSET)"(sp) \n" + "lw s8, "XTSTR(s8_OFFSET)"(sp) \n" + "lw s9, "XTSTR(s9_OFFSET)"(sp) \n" + "lw s10, "XTSTR(s10_OFFSET)"(sp) \n" + "lw s11, "XTSTR(s11_OFFSET)"(sp) \n" + + "no_switch: \n" + + /* restore the caller-saved registers */ + "lw ra, "XTSTR(ra_OFFSET)"(sp) \n" + "lw t0, "XTSTR(t0_OFFSET)"(sp) \n" + "lw t1, "XTSTR(t1_OFFSET)"(sp) \n" + "lw t2, "XTSTR(t2_OFFSET)"(sp) \n" + "lw t3, "XTSTR(t3_OFFSET)"(sp) \n" + "lw t4, "XTSTR(t4_OFFSET)"(sp) \n" + "lw t5, "XTSTR(t5_OFFSET)"(sp) \n" + "lw t6, "XTSTR(t6_OFFSET)"(sp) \n" + "lw a0, "XTSTR(a0_OFFSET)"(sp) \n" + "lw a1, "XTSTR(a1_OFFSET)"(sp) \n" + "lw a2, "XTSTR(a2_OFFSET)"(sp) \n" + "lw a3, "XTSTR(a3_OFFSET)"(sp) \n" + "lw a4, "XTSTR(a4_OFFSET)"(sp) \n" + "lw a5, "XTSTR(a5_OFFSET)"(sp) \n" + "lw a6, "XTSTR(a6_OFFSET)"(sp) \n" + "lw a7, "XTSTR(a7_OFFSET)"(sp) \n" + "lw s0, "XTSTR(s0_OFFSET)"(sp) \n" + "lw s1, "XTSTR(s1_OFFSET)"(sp) \n" + + "addi sp, sp, "XTSTR(CONTEXT_FRAME_SIZE)" \n" + : + : + : + ); +}