Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

arch: arm: Introduce SMP support for Cortex-A/R #61206

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 5 additions & 3 deletions arch/arm/core/Kconfig
Expand Up @@ -34,11 +34,12 @@ config CPU_AARCH32_CORTEX_R
select HAS_CMSIS_CORE
select ARCH_HAS_NESTED_EXCEPTION_DETECTION
select HAS_FLASH_LOAD_OFFSET
select ARCH_HAS_USERSPACE if ARM_MPU
select ARCH_HAS_EXTRA_EXCEPTION_INFO
select ARCH_HAS_USERSPACE if ARM_MPU && !USE_SWITCH
select ARCH_HAS_EXTRA_EXCEPTION_INFO if !USE_SWITCH
select ARCH_HAS_CODE_DATA_RELOCATION
select ARCH_HAS_NOCACHE_MEMORY_SUPPORT if ARM_MPU && CPU_HAS_ARM_MPU && CPU_HAS_DCACHE
select ARCH_SUPPORTS_ROM_START
select USE_SWITCH_SUPPORTED
help
This option signifies the use of a CPU of the Cortex-R family.

Expand All @@ -54,8 +55,9 @@ config CPU_AARCH32_CORTEX_A
select CPU_HAS_MMU
select HAS_CMSIS_CORE
select HAS_FLASH_LOAD_OFFSET
select ARCH_HAS_EXTRA_EXCEPTION_INFO
select ARCH_HAS_EXTRA_EXCEPTION_INFO if !USE_SWITCH
select ARCH_HAS_NOCACHE_MEMORY_SUPPORT
select USE_SWITCH_SUPPORTED
help
This option signifies the use of a CPU of the Cortex-A family.

Expand Down
6 changes: 3 additions & 3 deletions arch/arm/core/cortex_a_r/CMakeLists.txt
Expand Up @@ -4,24 +4,24 @@ zephyr_library()

zephyr_library_sources(
exc.S
exc_exit.S
fault.c
irq_init.c
reboot.c
reset.S
stacks.c
tcm.c
vector_table.S
swap.c
swap_helper.S
irq_manage.c
prep_c.c
thread.c
cpu_idle.S
smp.c
)

zephyr_library_sources_ifdef(CONFIG_GEN_SW_ISR_TABLE isr_wrapper.S)
zephyr_library_sources_ifdef(CONFIG_USERSPACE thread.c)
zephyr_library_sources_ifdef(CONFIG_SEMIHOST semihost.c)
zephyr_library_sources_ifdef(CONFIG_THREAD_LOCAL_STORAGE __aeabi_read_tp.S)
zephyr_library_sources_ifdef(CONFIG_ARCH_CACHE cache.c)
zephyr_library_sources_ifdef(CONFIG_USE_SWITCH switch.S)
zephyr_library_sources_ifndef(CONFIG_USE_SWITCH swap.c swap_helper.S exc_exit.S)
6 changes: 5 additions & 1 deletion arch/arm/core/cortex_a_r/Kconfig
Expand Up @@ -99,7 +99,7 @@ config CPU_CORTEX_R52
select AARCH32_ARMV8_R
select CPU_HAS_ICACHE
select CPU_HAS_DCACHE
select VFP_SP_D16
select VFP_SP_D16 if !USE_SWITCH
help
This option signifies the use of a Cortex-R52 CPU

Expand Down Expand Up @@ -130,6 +130,7 @@ config ARMV7_R_FP
config AARCH32_ARMV8_R
bool
select ATOMIC_OPERATIONS_BUILTIN
select SCHED_IPI_SUPPORTED if SMP
help
This option signifies the use of an ARMv8-R AArch32 processor
implementation.
Expand Down Expand Up @@ -188,3 +189,6 @@ config ICACHE_LINE_SIZE
default 32

endif # CPU_AARCH32_CORTEX_R

config TEST_EXTRA_STACK_SIZE
default 1024 if SMP
5 changes: 4 additions & 1 deletion arch/arm/core/cortex_a_r/__aeabi_read_tp.S
Expand Up @@ -11,5 +11,8 @@ _ASM_FILE_PROLOGUE
GTEXT(__aeabi_read_tp)

SECTION_FUNC(text, __aeabi_read_tp)
mrc 15, 0, r0, c13, c0, 3
/*
* TPIDRURW will be used as a base pointer point to TLS aera.
*/
mrc 15, 0, r0, c13, c0, 2
bx lr
30 changes: 30 additions & 0 deletions arch/arm/core/cortex_a_r/boot.h
@@ -0,0 +1,30 @@
/*
* Copyright (c) 2023 Arm Limited (or its affiliates). All rights reserved.
* SPDX-License-Identifier: Apache-2.0
*/

/**
* @file
* @brief Definitions for boot code
*/

#ifndef _BOOT_H_
#define _BOOT_H_

#ifndef _ASMLANGUAGE

extern void *_vector_table[];
extern void __start(void);

#endif /* _ASMLANGUAGE */

/* Offsets into the boot_params structure */
#define BOOT_PARAM_MPID_OFFSET 0
#define BOOT_PARAM_IRQ_SP_OFFSET 4
#define BOOT_PARAM_FIQ_SP_OFFSET 8
#define BOOT_PARAM_ABT_SP_OFFSET 12
#define BOOT_PARAM_UDF_SP_OFFSET 16
#define BOOT_PARAM_SVC_SP_OFFSET 20
#define BOOT_PARAM_SYS_SP_OFFSET 24

#endif /* _BOOT_H_ */
71 changes: 65 additions & 6 deletions arch/arm/core/cortex_a_r/exc.S
Expand Up @@ -27,6 +27,7 @@
#include <zephyr/linker/sections.h>
#include <offsets_short.h>
#include <zephyr/arch/cpu.h>
#include "macro_priv.inc"

_ASM_FILE_PROLOGUE

Expand All @@ -41,6 +42,8 @@ GTEXT(z_arm_undef_instruction)
GTEXT(z_arm_prefetch_abort)
GTEXT(z_arm_data_abort)

#ifndef CONFIG_USE_SWITCH

.macro exception_entry mode
/*
* Store r0-r3, r12, lr, lr_und and spsr_und into the stack to
Expand Down Expand Up @@ -86,10 +89,10 @@ GTEXT(z_arm_data_abort)
#endif

/* Increment exception nesting count */
ldr r2, =_kernel
ldr r1, [r2, #_kernel_offset_to_nested]
get_cpu r2
ldr r1, [r2, #___cpu_t_nested_OFFSET]
add r1, r1, #1
str r1, [r2, #_kernel_offset_to_nested]
str r1, [r2, #___cpu_t_nested_OFFSET]
.endm

.macro exception_exit
Expand Down Expand Up @@ -128,10 +131,10 @@ SECTION_SUBSEC_FUNC(TEXT, __exc, z_arm_undef_instruction)
sub sp, #24

/* Increment exception nesting count */
ldr r2, =_kernel
ldr r1, [r2, #_kernel_offset_to_nested]
get_cpu r2
ldr r1, [r2, #___cpu_t_nested_OFFSET]
add r1, r1, #1
str r1, [r2, #_kernel_offset_to_nested]
str r1, [r2, #___cpu_t_nested_OFFSET]

#if defined(CONFIG_FPU_SHARING)
sub sp, #___fpu_t_SIZEOF
Expand Down Expand Up @@ -232,3 +235,59 @@ SECTION_SUBSEC_FUNC(TEXT, __exc, z_arm_data_abort)
streq r1, [sp, #24 + FPU_SF_SIZE]

b z_arm_exc_exit

#else
/**
* @brief Undefined instruction exception handler
*
* An undefined instruction (UNDEF) exception is generated when an undefined
* instruction, or a VFP instruction when the VFP is not enabled, is
* encountered.
*/
SECTION_SUBSEC_FUNC(TEXT, __exc, z_arm_undef_instruction)
/*
* The undefined instruction address is offset by 2 if the previous
* mode is Thumb; otherwise, it is offset by 4.
*/
push {r0}
mrs r0, spsr
tst r0, #T_BIT
subeq lr, #4 /* ARM (!T_BIT) */
subne lr, #2 /* Thumb (T_BIT) */
pop {r0}

z_arm_cortex_ar_enter_exc
bl z_arm_fault_undef_instruction
b z_arm_cortex_ar_exit_exc

/**
* @brief Prefetch abort exception handler
*
* A prefetch abort (PABT) exception is generated when the processor marks the
* prefetched instruction as invalid and the instruction is executed.
*/
SECTION_SUBSEC_FUNC(TEXT, __exc, z_arm_prefetch_abort)
/*
* The faulting instruction address is always offset by 4 for the
* prefetch abort exceptions.
*/
sub lr, #4
z_arm_cortex_ar_enter_exc
bl z_arm_fault_prefetch
b z_arm_cortex_ar_exit_exc

/**
* @brief Data abort exception handler
*
* A data abort (DABT) exception is generated when an error occurs on a data
* memory access. This exception can be either synchronous or asynchronous,
* depending on the type of fault that caused it.
*/
SECTION_SUBSEC_FUNC(TEXT, __exc, z_arm_data_abort)
sub lr, #8

z_arm_cortex_ar_enter_exc
bl z_arm_fault_data
b z_arm_cortex_ar_exit_exc

#endif
40 changes: 21 additions & 19 deletions arch/arm/core/cortex_a_r/exc_exit.S
Expand Up @@ -18,6 +18,7 @@
#include <zephyr/linker/sections.h>
#include <offsets_short.h>
#include <zephyr/arch/cpu.h>
#include "macro_priv.inc"

_ASM_FILE_PROLOGUE

Expand Down Expand Up @@ -52,8 +53,8 @@ GDATA(_kernel)
bne system_thread_exit\@

/* Restore user stack pointer */
ldr r0, =_kernel
ldr r0, [r0, #_kernel_offset_to_current]
get_cpu r0
ldr r0, [r0, #___cpu_t_current_OFFSET]
cps #MODE_SYS
ldr sp, [r0, #_thread_offset_to_sp_usr] /* sp_usr */
cps #MODE_SVC
Expand All @@ -68,8 +69,8 @@ system_thread_exit\@:
* If the floating point context pointer is null, then a context was
* saved so restore the float context from the exception stack frame.
*/
ldr r2, =_kernel
ldr r1, [r2, #_kernel_offset_to_fp_ctx]
get_cpu r2
ldr r1, [r2, #___cpu_t_fp_ctx_OFFSET]
cmp r1, #0
beq vfp_restore\@

Expand All @@ -79,7 +80,7 @@ system_thread_exit\@:
*/
cmp r0, #0
moveq r1, #0
streq r1, [r2, #_kernel_offset_to_fp_ctx]
streq r1, [r2, #___cpu_t_fp_ctx_OFFSET]
b vfp_exit\@

vfp_restore\@:
Expand Down Expand Up @@ -140,23 +141,24 @@ SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_int_exit)

#ifdef CONFIG_PREEMPT_ENABLED
/* Do not context switch if exiting a nested interrupt */
ldr r3, =_kernel
ldr r0, [r3, #_kernel_offset_to_nested]
get_cpu r3
ldr r0, [r3, #___cpu_t_nested_OFFSET]
cmp r0, #1
bhi __EXIT_INT

ldr r1, [r3, #_kernel_offset_to_current]
ldr r0, [r3, #_kernel_offset_to_ready_q_cache]
ldr r1, [r3, #___cpu_t_current_OFFSET]
ldr r2, =_kernel
ldr r0, [r2, #_kernel_offset_to_ready_q_cache]
cmp r0, r1
blne z_arm_do_swap
__EXIT_INT:
#endif /* CONFIG_PREEMPT_ENABLED */

/* Decrement interrupt nesting count */
ldr r2, =_kernel
ldr r0, [r2, #_kernel_offset_to_nested]
get_cpu r2
ldr r0, [r2, #___cpu_t_nested_OFFSET]
sub r0, r0, #1
str r0, [r2, #_kernel_offset_to_nested]
str r0, [r2, #___cpu_t_nested_OFFSET]

/* Restore previous stack pointer */
pop {r2, r3}
Expand Down Expand Up @@ -207,8 +209,8 @@ __EXIT_INT:
*/
SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_exc_exit)
/* Do not context switch if exiting a nested exception */
ldr r3, =_kernel
ldr r1, [r3, #_kernel_offset_to_nested]
get_cpu r3
ldr r1, [r3, #___cpu_t_nested_OFFSET]
cmp r1, #1
bhi __EXIT_EXC

Expand Down Expand Up @@ -239,10 +241,10 @@ SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_exc_exit)
bl z_arm_do_swap

/* Decrement exception nesting count */
ldr r3, =_kernel
ldr r0, [r3, #_kernel_offset_to_nested]
get_cpu r3
ldr r0, [r3, #___cpu_t_nested_OFFSET]
sub r0, r0, #1
str r0, [r3, #_kernel_offset_to_nested]
str r0, [r3, #___cpu_t_nested_OFFSET]

/* Return to the switched thread */
cps #MODE_SYS
Expand All @@ -255,9 +257,9 @@ SECTION_SUBSEC_FUNC(TEXT, _HandlerModeExit, z_arm_exc_exit)

__EXIT_EXC:
/* Decrement exception nesting count */
ldr r0, [r3, #_kernel_offset_to_nested]
ldr r0, [r3, #___cpu_t_nested_OFFSET]
sub r0, r0, #1
str r0, [r3, #_kernel_offset_to_nested]
str r0, [r3, #___cpu_t_nested_OFFSET]

#if defined(CONFIG_FPU_SHARING)
add sp, sp, #___fpu_t_SIZEOF
Expand Down
10 changes: 5 additions & 5 deletions arch/arm/core/cortex_a_r/fault.c
Expand Up @@ -147,20 +147,20 @@ bool z_arm_fault_undef_instruction_fp(void)

__set_FPEXC(FPEXC_EN);

if (_kernel.cpus[0].nested > 1) {
if (_current_cpu->nested > 1) {
/*
* If the nested count is greater than 1, the undefined
* instruction exception came from an irq/svc context. (The
* irq/svc handler would have the nested count at 1 and then
* the undef exception would increment it to 2).
*/
struct __fpu_sf *spill_esf =
(struct __fpu_sf *)_kernel.cpus[0].fp_ctx;
(struct __fpu_sf *)_current_cpu->fp_ctx;

if (spill_esf == NULL)
return false;

_kernel.cpus[0].fp_ctx = NULL;
_current_cpu->fp_ctx = NULL;

/*
* If the nested count is 2 and the current thread has used the
Expand All @@ -170,9 +170,9 @@ bool z_arm_fault_undef_instruction_fp(void)
* saved exception stack frame, then save the floating point
* context because it is about to be overwritten.
*/
if (((_kernel.cpus[0].nested == 2)
if (((_current_cpu->nested == 2)
&& (_current->base.user_options & K_FP_REGS))
|| ((_kernel.cpus[0].nested > 2)
|| ((_current_cpu->nested > 2)
&& (spill_esf->undefined & FPEXC_EN))) {
/*
* Spill VFP registers to specified exception stack
Expand Down