|
4 | 4 | * SPDX-License-Identifier: Apache-2.0 |
5 | 5 | */ |
6 | 6 |
|
7 | | -/* |
8 | | - * Reset handler |
9 | | - * |
10 | | - * Reset handler that prepares the system for running C code. |
11 | | - */ |
12 | | - |
13 | 7 | #include <toolchain.h> |
14 | 8 | #include <linker/sections.h> |
15 | 9 | #include <arch/cpu.h> |
|
19 | 13 | _ASM_FILE_PROLOGUE |
20 | 14 |
|
21 | 15 | /* |
22 | | - * Platform may do platform specific init at EL3. |
23 | | - * The function implementation must preserve callee saved registers as per |
24 | | - * AArch64 ABI PCS. |
| 16 | + * Platform specific pre-C init code |
| 17 | + * |
| 18 | + * Note: - Stack is not yet available |
| 19 | + * - x23 must be preserved |
25 | 20 | */ |
26 | 21 |
|
27 | | -WTEXT(z_arch_el3_plat_init) |
28 | | -SECTION_FUNC(TEXT,z_arch_el3_plat_init) |
29 | | -ret |
| 22 | +WTEXT(z_arm64_el3_plat_prep_c) |
| 23 | +SECTION_FUNC(TEXT,z_arm64_el3_plat_prep_c) |
| 24 | + ret |
| 25 | + |
| 26 | +WTEXT(z_arm64_el2_plat_prep_c) |
| 27 | +SECTION_FUNC(TEXT,z_arm64_el2_plat_prep_c) |
| 28 | + ret |
| 29 | + |
| 30 | +WTEXT(z_arm64_el1_plat_prep_c) |
| 31 | +SECTION_FUNC(TEXT,z_arm64_el1_plat_prep_c) |
| 32 | + ret |
30 | 33 |
|
31 | 34 | /* |
32 | | - * Reset vector |
33 | | - * |
34 | | - * Ran when the system comes out of reset. The processor is in thread mode with |
35 | | - * privileged level. At this point, neither SP_EL0 nor SP_ELx point to a valid |
36 | | - * area in SRAM. |
37 | | - * |
38 | | - * When these steps are completed, jump to z_arm64_prep_c(), which will finish |
39 | | - * setting up the system for running C code. |
| 35 | + * Set the minimum necessary to safely call C code |
40 | 36 | */ |
41 | 37 |
|
42 | | -GTEXT(__reset) |
43 | | -SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset) |
| 38 | +GTEXT(__reset_prep_c) |
| 39 | +SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset_prep_c) |
| 40 | + /* return address: x23 */ |
| 41 | + mov x23, x30 |
44 | 42 |
|
45 | | -GTEXT(__start) |
46 | | -SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start) |
| 43 | + switch_el x0, 3f, 2f, 1f |
| 44 | +3: |
| 45 | + /* Reinitialize SCTLR from scratch in EL3 */ |
| 46 | + ldr w0, =(SCTLR_EL3_RES1 | SCTLR_I_BIT | SCTLR_SA_BIT) |
| 47 | + msr sctlr_el3, x0 |
47 | 48 |
|
48 | | - /* Setup vector table */ |
49 | | - adr x19, _vector_table |
| 49 | + /* Custom plat prep_c init */ |
| 50 | + bl z_arm64_el3_plat_prep_c |
50 | 51 |
|
51 | | - switch_el x1, 3f, 2f, 1f |
52 | | -3: |
53 | | - /* |
54 | | - * Zephyr entry happened in EL3. Do EL3 specific init before |
55 | | - * dropping to lower EL. |
56 | | - */ |
| 52 | + b out |
| 53 | +2: |
| 54 | + /* Disable alignment fault checking */ |
| 55 | + mrs x0, sctlr_el2 |
| 56 | + bic x0, x0, SCTLR_A_BIT |
| 57 | + msr sctlr_el2, x0 |
| 58 | + |
| 59 | + /* Custom plat prep_c init */ |
| 60 | + bl z_arm64_el2_plat_prep_c |
57 | 61 |
|
58 | | - /* Initialize VBAR */ |
59 | | - msr vbar_el3, x19 |
| 62 | + b out |
| 63 | +1: |
| 64 | + /* Disable alignment fault checking */ |
| 65 | + mrs x0, sctlr_el1 |
| 66 | + bic x0, x0, SCTLR_A_BIT |
| 67 | + msr sctlr_el1, x0 |
| 68 | + |
| 69 | + /* Custom plat prep_c init */ |
| 70 | + bl z_arm64_el1_plat_prep_c |
| 71 | + |
| 72 | +out: |
60 | 73 | isb |
61 | 74 |
|
62 | | - /* Switch to SP_EL0 and setup the stack */ |
63 | | - msr spsel, #0 |
| 75 | + /* Select SP_EL0 */ |
| 76 | + msr SPSel, #0 |
64 | 77 |
|
| 78 | + /* Initialize stack */ |
65 | 79 | ldr x0, =(z_interrupt_stacks) |
66 | 80 | add x0, x0, #(CONFIG_ISR_STACK_SIZE) |
67 | 81 | mov sp, x0 |
68 | 82 |
|
69 | | - /* Initialize SCTLR_EL3 to reset value */ |
70 | | - mov_imm x1, SCTLR_EL3_RES1 |
71 | | - mrs x0, sctlr_el3 |
72 | | - orr x0, x0, x1 |
73 | | - msr sctlr_el3, x0 |
74 | | - isb |
| 83 | + ret x23 |
75 | 84 |
|
76 | | - /* |
77 | | - * Disable access traps to EL3 for CPACR, Trace, FP, ASIMD, |
78 | | - * SVE from lower EL. |
79 | | - */ |
80 | | - mov_imm x0, CPTR_EL3_RES0 |
81 | | - mov_imm x1, (CPTR_EL3_TTA_BIT | CPTR_EL3_TFP_BIT | CPTR_EL3_TCPAC_BIT) |
82 | | - bic x0, x0, x1 |
83 | | - orr x0, x0, #(CPTR_EL3_EZ_BIT) |
84 | | - msr cptr_el3, x0 |
85 | | - isb |
| 85 | +/* |
| 86 | + * Reset vector |
| 87 | + * |
| 88 | + * Ran when the system comes out of reset. The processor is in thread mode with |
| 89 | + * privileged level. At this point, neither SP_EL0 nor SP_ELx point to a valid |
| 90 | + * area in SRAM. |
| 91 | + */ |
86 | 92 |
|
87 | | - /* Platform specific configurations needed in EL3 */ |
88 | | - bl z_arch_el3_plat_init |
| 93 | +GTEXT(__reset) |
| 94 | +SECTION_SUBSEC_FUNC(TEXT,_reset_section,__reset) |
89 | 95 |
|
90 | | - /* Enable access control configuration from lower EL */ |
91 | | - mrs x0, actlr_el3 |
92 | | - orr x0, x0, #(ACTLR_EL3_L2ACTLR_BIT | ACTLR_EL3_L2ECTLR_BIT \ |
93 | | - | ACTLR_EL3_L2CTLR_BIT) |
94 | | - orr x0, x0, #(ACTLR_EL3_CPUACTLR_BIT | ACTLR_EL3_CPUECTLR_BIT) |
95 | | - msr actlr_el3, x0 |
| 96 | +GTEXT(__start) |
| 97 | +SECTION_SUBSEC_FUNC(TEXT,_reset_section,__start) |
| 98 | + /* Mask all exceptions */ |
| 99 | + msr DAIFSet, #0xf |
96 | 100 |
|
97 | | - /* Initialize SCTLR_EL1 to reset value */ |
98 | | - mov_imm x0, SCTLR_EL1_RES1 |
99 | | - msr sctlr_el1, x0 |
| 101 | + /* Prepare for calling C code */ |
| 102 | + bl __reset_prep_c |
100 | 103 |
|
101 | | - /* Disable EA/IRQ/FIQ routing to EL3 and set EL1 to AArch64 */ |
102 | | - mov x0, xzr |
103 | | - orr x0, x0, #(SCR_RW_BIT) |
104 | | - msr scr_el3, x0 |
| 104 | + /* Platform hook for highest EL */ |
| 105 | + bl z_arm64_el_highest_init |
105 | 106 |
|
106 | | - /* On eret return to secure EL1h with DAIF masked */ |
107 | | - mov_imm x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1H) |
108 | | - msr spsr_el3, x0 |
| 107 | +switch_el: |
| 108 | + switch_el x0, 3f, 2f, 1f |
| 109 | +3: |
| 110 | + /* EL3 init */ |
| 111 | + bl z_arm64_el3_init |
109 | 112 |
|
110 | | - adr x0, 1f |
111 | | - msr elr_el3, x0 |
| 113 | + /* Get next EL */ |
| 114 | + adr x0, switch_el |
| 115 | + bl z_arm64_el3_get_next_el |
112 | 116 | eret |
113 | 117 |
|
114 | 118 | 2: |
115 | | - /* Booting from EL2 is not supported */ |
116 | | - b . |
117 | | - |
118 | | -1: |
119 | | - /* Initialize VBAR */ |
120 | | - msr vbar_el1, x19 |
121 | | - isb |
| 119 | + /* EL2 init */ |
| 120 | + bl z_arm64_el2_init |
122 | 121 |
|
123 | | - /* Switch to SP_EL0 and setup the stack */ |
124 | | - msr spsel, #0 |
125 | | - |
126 | | - ldr x0, =(z_interrupt_stacks) |
127 | | - add x0, x0, #(CONFIG_ISR_STACK_SIZE) |
128 | | - mov sp, x0 |
| 122 | + /* Move to EL1 with all exceptions masked */ |
| 123 | + mov_imm x0, (SPSR_DAIF_MASK | SPSR_MODE_EL1T) |
| 124 | + msr spsr_el2, x0 |
129 | 125 |
|
130 | | - /* Disable access trapping in EL1 for NEON/FP */ |
131 | | - mov_imm x0, CPACR_EL1_FPEN_NOTRAP |
132 | | - msr cpacr_el1, x0 |
| 126 | + adr x0, 1f |
| 127 | + msr elr_el2, x0 |
| 128 | + eret |
133 | 129 |
|
134 | | - /* Enable the instruction cache and EL1 stack alignment check. */ |
135 | | - mov_imm x1, (SCTLR_I_BIT | SCTLR_SA_BIT) |
136 | | - mrs x0, sctlr_el1 |
137 | | - orr x0, x0, x1 |
138 | | - msr sctlr_el1, x0 |
| 130 | +1: |
| 131 | + /* EL1 init */ |
| 132 | + bl z_arm64_el1_init |
139 | 133 |
|
140 | | -0: |
| 134 | + /* Enable SError interrupts */ |
| 135 | + msr DAIFClr, #(DAIFCLR_ABT_BIT) |
141 | 136 | isb |
142 | 137 |
|
143 | | - /* Enable the SError interrupt */ |
144 | | - msr daifclr, #(DAIFCLR_ABT_BIT) |
145 | | - |
146 | | - bl z_arm64_prep_c |
| 138 | + b z_arm64_prep_c |
0 commit comments