More fixes for Cortex-M0/Cortex-M3/Cortex-M4 implementations.

Signed-off-by: NIIBE Yutaka <gniibe@fsij.org>
This commit is contained in:
NIIBE Yutaka
2021-02-08 12:22:16 +09:00
parent 2832104263
commit cfcdeebb78
4 changed files with 109 additions and 120 deletions

View File

@@ -29,7 +29,6 @@
*/
static struct chx_thread *running;
static struct chx_thread *preempting;
static struct chx_thread *
chx_running (void)
@@ -77,14 +76,14 @@ struct chx_stack_regs {
* ---------------------
* Prio 0x40: thread temporarily inhibiting schedule for critical region
* ...
* Prio 0xb0: systick, external interrupt, pendsv
* Prio 0xb0: systick, external interrupt
* =====================================
*
* Cortex-M0
* =====================================
* Prio 0x00: thread temporarily inhibiting schedule for critical region
* ...
* Prio 0x40: systick, external interrupt, pendsv
* Prio 0x40: systick, external interrupt
* Prio 0x80: svc (not used)
* =====================================
*/
@@ -96,7 +95,6 @@ struct chx_stack_regs {
/* ... */
#define CPU_EXCEPTION_PRIORITY_SYSTICK 0x40
#define CPU_EXCEPTION_PRIORITY_INTERRUPT CPU_EXCEPTION_PRIORITY_SYSTICK
#define CPU_EXCEPTION_PRIORITY_PENDSV CPU_EXCEPTION_PRIORITY_SYSTICK
#define CPU_EXCEPTION_PRIORITY_SVC 0x80 /* No use in this arch */
#elif defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__)
#define CPU_EXCEPTION_PRIORITY_SVC 0x30
@@ -105,7 +103,6 @@ struct chx_stack_regs {
/* ... */
#define CPU_EXCEPTION_PRIORITY_SYSTICK 0xb0
#define CPU_EXCEPTION_PRIORITY_INTERRUPT CPU_EXCEPTION_PRIORITY_SYSTICK
#define CPU_EXCEPTION_PRIORITY_PENDSV CPU_EXCEPTION_PRIORITY_SYSTICK
#else
#error "no support for this arch"
#endif
@@ -229,8 +226,7 @@ chx_interrupt_controller_init (void)
{
*AIRCR = 0x05FA0000 | ( 5 << 8); /* PRIGROUP = 5, 2-bit:2-bit. */
*SHPR2 = (CPU_EXCEPTION_PRIORITY_SVC << 24);
*SHPR3 = ((CPU_EXCEPTION_PRIORITY_SYSTICK << 24)
| (CPU_EXCEPTION_PRIORITY_PENDSV << 16));
*SHPR3 = (CPU_EXCEPTION_PRIORITY_SYSTICK << 24);
}
@@ -257,42 +253,106 @@ chx_cpu_sched_unlock (void)
}
static void
chx_request_preemption_possibly (struct chx_thread *tp_next)
static void __attribute__ ((naked, used))
involuntary_context_switch (struct chx_thread *tp_next)
{
if (!tp_next)
return;
register struct chx_thread *tp_current asm ("r1");
if (preempting)
chx_ready_push (tp_next);
asm (
"ldr r2, =running\n\t"
"ldr r1, [r2]"
: "=r" (tp_current)
: /* no input */
: "r2");
if (!tp_current)
/* It's idle thread. No need to save registers. */
;
else
{
preempting = tp_next;
*ICSR = (1 << 28);
asm volatile ("" : : : "memory");
/* Save registers onto CHX_THREAD struct. */
asm volatile (
"add %0, #20\n\t"
"stm %0!, {r4, r5, r6, r7}\n\t"
"mov r2, r8\n\t"
"mov r3, r9\n\t"
"mov r4, r10\n\t"
"mov r5, r11\n\t"
"mrs r6, PSP\n\t" /* r13(=SP) in user space. */
"stm %0!, {r2, r3, r4, r5, r6}"
: "=r" (tp_current)
: "0" (tp_current)
/*
* Memory clobber constraint here is not accurate, but this
* works. R7 keeps its value, but having "r7" here prevents
* use of R7 before this asm statement.
*/
: "r2", "r3", "r4", "r5", "r6", "r7", "memory");
tp_next = chx_running_preempted (tp_next);
}
/* Registers on stack (PSP): r0, r1, r2, r3, r12, lr, pc, xpsr */
asm volatile (
/* Now, r0 points to the thread to be switched. */
/* Put it to *running. */
"ldr r1, =running\n\t"
/* Update running. */
"str r0, [r1]\n\t"
/**/
"add r0, #20\n\t"
"ldm r0!, {r4, r5, r6, r7}\n\t"
#if defined(__ARM_ARCH_6M__)
"ldm r0!, {r1, r2, r3}\n\t"
"mov r8, r1\n\t"
"mov r9, r2\n\t"
"mov r10, r3\n\t"
"ldm r0!, {r1, r2}\n\t"
"mov r11, r1\n\t"
"msr PSP, r2\n\t"
#else
"ldr r8, [r0], #4\n\t"
"ldr r9, [r0], #4\n\t"
"ldr r10, [r0], #4\n\t"
"ldr r11, [r0], #4\n\t"
"ldr r1, [r0], #4\n\t"
"msr PSP, r1\n\t"
#endif
"mov r0, #0\n\t"
"sub r0, #3\n\t" /* EXC_RETURN to a thread with PSP */
"bx r0"
: /* no output */ : "r" (tp_next) : "memory");
}
void
chx_timer_handler (void)
{
struct chx_thread *tp_next;
tp_next = chx_timer_expired ();
chx_request_preemption_possibly (tp_next);
}
static struct chx_thread *chx_timer_expired (void) __attribute__ ((noinline,used));
static struct chx_thread *chx_recv_irq (uint32_t irq_num) __attribute__ ((noinline,used));
void
void __attribute__ ((naked))
chx_handle_intr (void)
{
register uint32_t irq_num;
struct chx_thread *tp_next;
register struct chx_thread *tp_next asm ("r0");;
asm volatile ("mrs %0, IPSR\n\t"
"sub %0, #16" /* Exception # - 16 = interrupt number. */
: "=r" (irq_num) : /* no input */ : "memory");
"sub %0, #16\n\t" /* Exception # - 16 = interrupt number. */
"bpl 0f\n\t"
"bl chx_timer_expired\n\t"
"b 1f\n"
"0:\n\t"
"bl chx_recv_irq\n"
"1:"
: "=r" (tp_next) : /* no input */ : "memory");
tp_next = chx_recv_irq (irq_num);
chx_request_preemption_possibly (tp_next);
if (tp_next)
asm volatile (
"b involuntary_context_switch"
: /*no input */ : /* no input */ : "memory");
else
asm volatile (
"mov r0, #0\n\t"
"sub r0, #3\n\t" /* EXC_RETURN to a thread with PSP */
"bx r0"
: /*no input */ : /* no input */ : "memory");
}
static void
@@ -476,87 +536,6 @@ chopstx_create_arch (uintptr_t stack_addr, size_t stack_size,
return tp;
}
/*
* Lower layer architecture specific exception handling entries.
*
*/
void __attribute__ ((naked))
preempt (void)
{
register struct chx_thread *tp_next asm ("r0");
register struct chx_thread *tp_current asm ("r1");
asm (
"mov r1, #0\n\t"
"ldr r2, =preempting\n\t"
"ldr r0, [r2]\n\t"
"str r1, [r2]\n\t"
"ldr r2, =running\n\t"
"ldr r1, [r2]"
: "=r" (tp_next), "=r" (tp_current)
: /* no input */
: "r2");
if (!tp_current)
/* It's idle thread. No need to save registers. */
;
else
{
/* Save registers onto CHX_THREAD struct. */
asm volatile (
"add %0, #20\n\t"
"stm %0!, {r4, r5, r6, r7}\n\t"
"mov r2, r8\n\t"
"mov r3, r9\n\t"
"mov r4, r10\n\t"
"mov r5, r11\n\t"
"mrs r6, PSP\n\t" /* r13(=SP) in user space. */
"stm %0!, {r2, r3, r4, r5, r6}"
: "=r" (tp_current)
: "0" (tp_current)
/*
* Memory clobber constraint here is not accurate, but this
* works. R7 keeps its value, but having "r7" here prevents
* use of R7 before this asm statement.
*/
: "r2", "r3", "r4", "r5", "r6", "r7", "memory");
tp_next = chx_running_preempted (tp_next);
}
/* Registers on stack (PSP): r0, r1, r2, r3, r12, lr, pc, xpsr */
asm volatile (
/* Now, r0 points to the thread to be switched. */
/* Put it to *running. */
"ldr r1, =running\n\t"
/* Update running. */
"str r0, [r1]\n\t"
/**/
"add r0, #20\n\t"
"ldm r0!, {r4, r5, r6, r7}\n\t"
#if defined(__ARM_ARCH_6M__)
"ldm r0!, {r1, r2, r3}\n\t"
"mov r8, r1\n\t"
"mov r9, r2\n\t"
"mov r10, r3\n\t"
"ldm r0!, {r1, r2}\n\t"
"mov r11, r1\n\t"
"msr PSP, r2\n\t"
#else
"ldr r8, [r0], #4\n\t"
"ldr r9, [r0], #4\n\t"
"ldr r10, [r0], #4\n\t"
"ldr r11, [r0], #4\n\t"
"ldr r1, [r0], #4\n\t"
"msr PSP, r1\n\t"
#endif
"mov r0, #0\n\t"
"sub r0, #3\n\t" /* EXC_RETURN to a thread with PSP */
"bx r0"
: /* no output */ : "r" (tp_next) : "memory");
}
#if defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__)
/*