Skip to content

Commit e515982

Browse files
committed
Merge branches 'for-next/asm' and 'for-next/insn' into for-next/bti
Merge in dependencies for in-kernel Branch Target Identification support. * for-next/asm: arm64: Disable old style assembly annotations arm64: kernel: Convert to modern annotations for assembly functions arm64: entry: Refactor and modernise annotation for ret_to_user x86/asm: Provide a Kconfig symbol for disabling old assembly annotations x86/32: Remove CONFIG_DOUBLEFAULT * for-next/insn: arm64: insn: Report PAC and BTI instructions as skippable arm64: insn: Don't assume unrecognized HINTs are skippable arm64: insn: Provide a better name for aarch64_insn_is_nop() arm64: insn: Add constants for new HINT instruction decode
3 parents 80e4e56 + 50479d5 + 47d67e4 commit e515982

File tree

27 files changed

+144
-126
lines changed

27 files changed

+144
-126
lines changed

arch/arm64/Kconfig

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,7 @@ config ARM64
6666
select ARCH_USE_GNU_PROPERTY
6767
select ARCH_USE_QUEUED_RWLOCKS
6868
select ARCH_USE_QUEUED_SPINLOCKS
69+
select ARCH_USE_SYM_ANNOTATIONS
6970
select ARCH_SUPPORTS_MEMORY_FAILURE
7071
select ARCH_SUPPORTS_ATOMIC_RMW
7172
select ARCH_SUPPORTS_INT128 if CC_HAS_INT128 && (GCC_VERSION >= 50000 || CC_IS_CLANG)

arch/arm64/include/asm/insn.h

Lines changed: 27 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,13 +39,37 @@ enum aarch64_insn_encoding_class {
3939
* system instructions */
4040
};
4141

42-
enum aarch64_insn_hint_op {
42+
enum aarch64_insn_hint_cr_op {
4343
AARCH64_INSN_HINT_NOP = 0x0 << 5,
4444
AARCH64_INSN_HINT_YIELD = 0x1 << 5,
4545
AARCH64_INSN_HINT_WFE = 0x2 << 5,
4646
AARCH64_INSN_HINT_WFI = 0x3 << 5,
4747
AARCH64_INSN_HINT_SEV = 0x4 << 5,
4848
AARCH64_INSN_HINT_SEVL = 0x5 << 5,
49+
50+
AARCH64_INSN_HINT_XPACLRI = 0x07 << 5,
51+
AARCH64_INSN_HINT_PACIA_1716 = 0x08 << 5,
52+
AARCH64_INSN_HINT_PACIB_1716 = 0x0A << 5,
53+
AARCH64_INSN_HINT_AUTIA_1716 = 0x0C << 5,
54+
AARCH64_INSN_HINT_AUTIB_1716 = 0x0E << 5,
55+
AARCH64_INSN_HINT_PACIAZ = 0x18 << 5,
56+
AARCH64_INSN_HINT_PACIASP = 0x19 << 5,
57+
AARCH64_INSN_HINT_PACIBZ = 0x1A << 5,
58+
AARCH64_INSN_HINT_PACIBSP = 0x1B << 5,
59+
AARCH64_INSN_HINT_AUTIAZ = 0x1C << 5,
60+
AARCH64_INSN_HINT_AUTIASP = 0x1D << 5,
61+
AARCH64_INSN_HINT_AUTIBZ = 0x1E << 5,
62+
AARCH64_INSN_HINT_AUTIBSP = 0x1F << 5,
63+
64+
AARCH64_INSN_HINT_ESB = 0x10 << 5,
65+
AARCH64_INSN_HINT_PSB = 0x11 << 5,
66+
AARCH64_INSN_HINT_TSB = 0x12 << 5,
67+
AARCH64_INSN_HINT_CSDB = 0x14 << 5,
68+
69+
AARCH64_INSN_HINT_BTI = 0x20 << 5,
70+
AARCH64_INSN_HINT_BTIC = 0x22 << 5,
71+
AARCH64_INSN_HINT_BTIJ = 0x24 << 5,
72+
AARCH64_INSN_HINT_BTIJC = 0x26 << 5,
4973
};
5074

5175
enum aarch64_insn_imm_type {
@@ -344,7 +368,7 @@ __AARCH64_INSN_FUNCS(msr_reg, 0xFFF00000, 0xD5100000)
344368

345369
#undef __AARCH64_INSN_FUNCS
346370

347-
bool aarch64_insn_is_nop(u32 insn);
371+
bool aarch64_insn_is_steppable_hint(u32 insn);
348372
bool aarch64_insn_is_branch_imm(u32 insn);
349373

350374
static inline bool aarch64_insn_is_adr_adrp(u32 insn)
@@ -370,7 +394,7 @@ u32 aarch64_insn_gen_comp_branch_imm(unsigned long pc, unsigned long addr,
370394
enum aarch64_insn_branch_type type);
371395
u32 aarch64_insn_gen_cond_branch_imm(unsigned long pc, unsigned long addr,
372396
enum aarch64_insn_condition cond);
373-
u32 aarch64_insn_gen_hint(enum aarch64_insn_hint_op op);
397+
u32 aarch64_insn_gen_hint(enum aarch64_insn_hint_cr_op op);
374398
u32 aarch64_insn_gen_nop(void);
375399
u32 aarch64_insn_gen_branch_reg(enum aarch64_insn_register reg,
376400
enum aarch64_insn_branch_type type);

arch/arm64/kernel/cpu-reset.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
* branch to what would be the reset vector. It must be executed with the
3030
* flat identity mapping.
3131
*/
32-
ENTRY(__cpu_soft_restart)
32+
SYM_CODE_START(__cpu_soft_restart)
3333
/* Clear sctlr_el1 flags. */
3434
mrs x12, sctlr_el1
3535
mov_q x13, SCTLR_ELx_FLAGS
@@ -47,6 +47,6 @@ ENTRY(__cpu_soft_restart)
4747
mov x1, x3 // arg1
4848
mov x2, x4 // arg2
4949
br x8
50-
ENDPROC(__cpu_soft_restart)
50+
SYM_CODE_END(__cpu_soft_restart)
5151

5252
.popsection

arch/arm64/kernel/efi-rt-wrapper.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55

66
#include <linux/linkage.h>
77

8-
ENTRY(__efi_rt_asm_wrapper)
8+
SYM_FUNC_START(__efi_rt_asm_wrapper)
99
stp x29, x30, [sp, #-32]!
1010
mov x29, sp
1111

@@ -35,4 +35,4 @@ ENTRY(__efi_rt_asm_wrapper)
3535
b.ne 0f
3636
ret
3737
0: b efi_handle_corrupted_x18 // tail call
38-
ENDPROC(__efi_rt_asm_wrapper)
38+
SYM_FUNC_END(__efi_rt_asm_wrapper)

arch/arm64/kernel/entry-fpsimd.S

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -16,34 +16,34 @@
1616
*
1717
* x0 - pointer to struct fpsimd_state
1818
*/
19-
ENTRY(fpsimd_save_state)
19+
SYM_FUNC_START(fpsimd_save_state)
2020
fpsimd_save x0, 8
2121
ret
22-
ENDPROC(fpsimd_save_state)
22+
SYM_FUNC_END(fpsimd_save_state)
2323

2424
/*
2525
* Load the FP registers.
2626
*
2727
* x0 - pointer to struct fpsimd_state
2828
*/
29-
ENTRY(fpsimd_load_state)
29+
SYM_FUNC_START(fpsimd_load_state)
3030
fpsimd_restore x0, 8
3131
ret
32-
ENDPROC(fpsimd_load_state)
32+
SYM_FUNC_END(fpsimd_load_state)
3333

3434
#ifdef CONFIG_ARM64_SVE
35-
ENTRY(sve_save_state)
35+
SYM_FUNC_START(sve_save_state)
3636
sve_save 0, x1, 2
3737
ret
38-
ENDPROC(sve_save_state)
38+
SYM_FUNC_END(sve_save_state)
3939

40-
ENTRY(sve_load_state)
40+
SYM_FUNC_START(sve_load_state)
4141
sve_load 0, x1, x2, 3, x4
4242
ret
43-
ENDPROC(sve_load_state)
43+
SYM_FUNC_END(sve_load_state)
4444

45-
ENTRY(sve_get_vl)
45+
SYM_FUNC_START(sve_get_vl)
4646
_sve_rdvl 0, 1
4747
ret
48-
ENDPROC(sve_get_vl)
48+
SYM_FUNC_END(sve_get_vl)
4949
#endif /* CONFIG_ARM64_SVE */

arch/arm64/kernel/entry.S

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -727,21 +727,10 @@ el0_error_naked:
727727
b ret_to_user
728728
SYM_CODE_END(el0_error)
729729

730-
/*
731-
* Ok, we need to do extra processing, enter the slow path.
732-
*/
733-
work_pending:
734-
mov x0, sp // 'regs'
735-
bl do_notify_resume
736-
#ifdef CONFIG_TRACE_IRQFLAGS
737-
bl trace_hardirqs_on // enabled while in userspace
738-
#endif
739-
ldr x1, [tsk, #TSK_TI_FLAGS] // re-check for single-step
740-
b finish_ret_to_user
741730
/*
742731
* "slow" syscall return path.
743732
*/
744-
ret_to_user:
733+
SYM_CODE_START_LOCAL(ret_to_user)
745734
disable_daif
746735
gic_prio_kentry_setup tmp=x3
747736
ldr x1, [tsk, #TSK_TI_FLAGS]
@@ -753,7 +742,19 @@ finish_ret_to_user:
753742
bl stackleak_erase
754743
#endif
755744
kernel_exit 0
756-
ENDPROC(ret_to_user)
745+
746+
/*
747+
* Ok, we need to do extra processing, enter the slow path.
748+
*/
749+
work_pending:
750+
mov x0, sp // 'regs'
751+
bl do_notify_resume
752+
#ifdef CONFIG_TRACE_IRQFLAGS
753+
bl trace_hardirqs_on // enabled while in userspace
754+
#endif
755+
ldr x1, [tsk, #TSK_TI_FLAGS] // re-check for single-step
756+
b finish_ret_to_user
757+
SYM_CODE_END(ret_to_user)
757758

758759
.popsection // .entry.text
759760

arch/arm64/kernel/hibernate-asm.S

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,7 @@
6565
* x5: physical address of a zero page that remains zero after resume
6666
*/
6767
.pushsection ".hibernate_exit.text", "ax"
68-
ENTRY(swsusp_arch_suspend_exit)
68+
SYM_CODE_START(swsusp_arch_suspend_exit)
6969
/*
7070
* We execute from ttbr0, change ttbr1 to our copied linear map tables
7171
* with a break-before-make via the zero page
@@ -110,7 +110,7 @@ ENTRY(swsusp_arch_suspend_exit)
110110
cbz x24, 3f /* Do we need to re-initialise EL2? */
111111
hvc #0
112112
3: ret
113-
ENDPROC(swsusp_arch_suspend_exit)
113+
SYM_CODE_END(swsusp_arch_suspend_exit)
114114

115115
/*
116116
* Restore the hyp stub.
@@ -119,15 +119,15 @@ ENDPROC(swsusp_arch_suspend_exit)
119119
*
120120
* x24: The physical address of __hyp_stub_vectors
121121
*/
122-
el1_sync:
122+
SYM_CODE_START_LOCAL(el1_sync)
123123
msr vbar_el2, x24
124124
eret
125-
ENDPROC(el1_sync)
125+
SYM_CODE_END(el1_sync)
126126

127127
.macro invalid_vector label
128-
\label:
128+
SYM_CODE_START_LOCAL(\label)
129129
b \label
130-
ENDPROC(\label)
130+
SYM_CODE_END(\label)
131131
.endm
132132

133133
invalid_vector el2_sync_invalid
@@ -141,7 +141,7 @@ ENDPROC(\label)
141141

142142
/* el2 vectors - switch el2 here while we restore the memory image. */
143143
.align 11
144-
ENTRY(hibernate_el2_vectors)
144+
SYM_CODE_START(hibernate_el2_vectors)
145145
ventry el2_sync_invalid // Synchronous EL2t
146146
ventry el2_irq_invalid // IRQ EL2t
147147
ventry el2_fiq_invalid // FIQ EL2t
@@ -161,6 +161,6 @@ ENTRY(hibernate_el2_vectors)
161161
ventry el1_irq_invalid // IRQ 32-bit EL1
162162
ventry el1_fiq_invalid // FIQ 32-bit EL1
163163
ventry el1_error_invalid // Error 32-bit EL1
164-
END(hibernate_el2_vectors)
164+
SYM_CODE_END(hibernate_el2_vectors)
165165

166166
.popsection

arch/arm64/kernel/hyp-stub.S

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121

2222
.align 11
2323

24-
ENTRY(__hyp_stub_vectors)
24+
SYM_CODE_START(__hyp_stub_vectors)
2525
ventry el2_sync_invalid // Synchronous EL2t
2626
ventry el2_irq_invalid // IRQ EL2t
2727
ventry el2_fiq_invalid // FIQ EL2t
@@ -41,11 +41,11 @@ ENTRY(__hyp_stub_vectors)
4141
ventry el1_irq_invalid // IRQ 32-bit EL1
4242
ventry el1_fiq_invalid // FIQ 32-bit EL1
4343
ventry el1_error_invalid // Error 32-bit EL1
44-
ENDPROC(__hyp_stub_vectors)
44+
SYM_CODE_END(__hyp_stub_vectors)
4545

4646
.align 11
4747

48-
el1_sync:
48+
SYM_CODE_START_LOCAL(el1_sync)
4949
cmp x0, #HVC_SET_VECTORS
5050
b.ne 2f
5151
msr vbar_el2, x1
@@ -68,12 +68,12 @@ el1_sync:
6868

6969
9: mov x0, xzr
7070
eret
71-
ENDPROC(el1_sync)
71+
SYM_CODE_END(el1_sync)
7272

7373
.macro invalid_vector label
74-
\label:
74+
SYM_CODE_START_LOCAL(\label)
7575
b \label
76-
ENDPROC(\label)
76+
SYM_CODE_END(\label)
7777
.endm
7878

7979
invalid_vector el2_sync_invalid
@@ -106,15 +106,15 @@ ENDPROC(\label)
106106
* initialisation entry point.
107107
*/
108108

109-
ENTRY(__hyp_set_vectors)
109+
SYM_FUNC_START(__hyp_set_vectors)
110110
mov x1, x0
111111
mov x0, #HVC_SET_VECTORS
112112
hvc #0
113113
ret
114-
ENDPROC(__hyp_set_vectors)
114+
SYM_FUNC_END(__hyp_set_vectors)
115115

116-
ENTRY(__hyp_reset_vectors)
116+
SYM_FUNC_START(__hyp_reset_vectors)
117117
mov x0, #HVC_RESET_VECTORS
118118
hvc #0
119119
ret
120-
ENDPROC(__hyp_reset_vectors)
120+
SYM_FUNC_END(__hyp_reset_vectors)

arch/arm64/kernel/insn.c

Lines changed: 22 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -51,21 +51,33 @@ enum aarch64_insn_encoding_class __kprobes aarch64_get_insn_class(u32 insn)
5151
return aarch64_insn_encoding_class[(insn >> 25) & 0xf];
5252
}
5353

54-
/* NOP is an alias of HINT */
55-
bool __kprobes aarch64_insn_is_nop(u32 insn)
54+
bool __kprobes aarch64_insn_is_steppable_hint(u32 insn)
5655
{
5756
if (!aarch64_insn_is_hint(insn))
5857
return false;
5958

6059
switch (insn & 0xFE0) {
61-
case AARCH64_INSN_HINT_YIELD:
62-
case AARCH64_INSN_HINT_WFE:
63-
case AARCH64_INSN_HINT_WFI:
64-
case AARCH64_INSN_HINT_SEV:
65-
case AARCH64_INSN_HINT_SEVL:
66-
return false;
67-
default:
60+
case AARCH64_INSN_HINT_XPACLRI:
61+
case AARCH64_INSN_HINT_PACIA_1716:
62+
case AARCH64_INSN_HINT_PACIB_1716:
63+
case AARCH64_INSN_HINT_AUTIA_1716:
64+
case AARCH64_INSN_HINT_AUTIB_1716:
65+
case AARCH64_INSN_HINT_PACIAZ:
66+
case AARCH64_INSN_HINT_PACIASP:
67+
case AARCH64_INSN_HINT_PACIBZ:
68+
case AARCH64_INSN_HINT_PACIBSP:
69+
case AARCH64_INSN_HINT_AUTIAZ:
70+
case AARCH64_INSN_HINT_AUTIASP:
71+
case AARCH64_INSN_HINT_AUTIBZ:
72+
case AARCH64_INSN_HINT_AUTIBSP:
73+
case AARCH64_INSN_HINT_BTI:
74+
case AARCH64_INSN_HINT_BTIC:
75+
case AARCH64_INSN_HINT_BTIJ:
76+
case AARCH64_INSN_HINT_BTIJC:
77+
case AARCH64_INSN_HINT_NOP:
6878
return true;
79+
default:
80+
return false;
6981
}
7082
}
7183

@@ -574,7 +586,7 @@ u32 aarch64_insn_gen_cond_branch_imm(unsigned long pc, unsigned long addr,
574586
offset >> 2);
575587
}
576588

577-
u32 __kprobes aarch64_insn_gen_hint(enum aarch64_insn_hint_op op)
589+
u32 __kprobes aarch64_insn_gen_hint(enum aarch64_insn_hint_cr_op op)
578590
{
579591
return aarch64_insn_get_hint_value() | op;
580592
}

arch/arm64/kernel/probes/decode-insn.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ static bool __kprobes aarch64_insn_is_steppable(u32 insn)
4646
* except for the NOP case.
4747
*/
4848
if (aarch64_insn_is_hint(insn))
49-
return aarch64_insn_is_nop(insn);
49+
return aarch64_insn_is_steppable_hint(insn);
5050

5151
return true;
5252
}

arch/arm64/kernel/probes/kprobes_trampoline.S

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@
6161
ldp x28, x29, [sp, #S_X28]
6262
.endm
6363

64-
ENTRY(kretprobe_trampoline)
64+
SYM_CODE_START(kretprobe_trampoline)
6565
sub sp, sp, #S_FRAME_SIZE
6666

6767
save_all_base_regs
@@ -79,4 +79,4 @@ ENTRY(kretprobe_trampoline)
7979
add sp, sp, #S_FRAME_SIZE
8080
ret
8181

82-
ENDPROC(kretprobe_trampoline)
82+
SYM_CODE_END(kretprobe_trampoline)

0 commit comments

Comments
 (0)