summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--arch/x86/boot/compressed/head_32.S4
-rw-r--r--arch/x86/boot/compressed/head_64.S18
-rw-r--r--arch/x86/entry/entry_64.S4
-rw-r--r--arch/x86/lib/copy_user_64.S14
-rw-r--r--arch/x86/lib/getuser.S16
-rw-r--r--arch/x86/lib/putuser.S22
6 files changed, 39 insertions, 39 deletions
diff --git a/arch/x86/boot/compressed/head_32.S b/arch/x86/boot/compressed/head_32.S
index 37380c0d5999..5e30eaaf8576 100644
--- a/arch/x86/boot/compressed/head_32.S
+++ b/arch/x86/boot/compressed/head_32.S
@@ -140,7 +140,7 @@ ENTRY(startup_32)
/*
* Jump to the relocated address.
*/
- leal relocated(%ebx), %eax
+ leal .Lrelocated(%ebx), %eax
jmp *%eax
ENDPROC(startup_32)
@@ -209,7 +209,7 @@ ENDPROC(efi32_stub_entry)
#endif
.text
-relocated:
+.Lrelocated:
/*
* Clear BSS (stack is currently empty)
diff --git a/arch/x86/boot/compressed/head_64.S b/arch/x86/boot/compressed/head_64.S
index 6233ae35d0d9..d98cd483377e 100644
--- a/arch/x86/boot/compressed/head_64.S
+++ b/arch/x86/boot/compressed/head_64.S
@@ -87,7 +87,7 @@ ENTRY(startup_32)
call verify_cpu
testl %eax, %eax
- jnz no_longmode
+ jnz .Lno_longmode
/*
* Compute the delta between where we were compiled to run at
@@ -322,7 +322,7 @@ ENTRY(startup_64)
1: popq %rdi
subq $1b, %rdi
- call adjust_got
+ call .Ladjust_got
/*
* At this point we are in long mode with 4-level paging enabled,
@@ -421,7 +421,7 @@ trampoline_return:
/* The new adjustment is the relocation address */
movq %rbx, %rdi
- call adjust_got
+ call .Ladjust_got
/*
* Copy the compressed kernel to the end of our buffer
@@ -440,7 +440,7 @@ trampoline_return:
/*
* Jump to the relocated address.
*/
- leaq relocated(%rbx), %rax
+ leaq .Lrelocated(%rbx), %rax
jmp *%rax
#ifdef CONFIG_EFI_STUB
@@ -511,7 +511,7 @@ ENDPROC(efi64_stub_entry)
#endif
.text
-relocated:
+.Lrelocated:
/*
* Clear BSS (stack is currently empty)
@@ -548,7 +548,7 @@ relocated:
* first time we touch GOT).
* RDI is the new adjustment to apply.
*/
-adjust_got:
+.Ladjust_got:
/* Walk through the GOT adding the address to the entries */
leaq _got(%rip), %rdx
leaq _egot(%rip), %rcx
@@ -622,7 +622,7 @@ ENTRY(trampoline_32bit_src)
movl %eax, %cr4
/* Calculate address of paging_enabled() once we are executing in the trampoline */
- leal paging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax
+ leal .Lpaging_enabled - trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_OFFSET(%ecx), %eax
/* Prepare the stack for far return to Long Mode */
pushl $__KERNEL_CS
@@ -635,7 +635,7 @@ ENTRY(trampoline_32bit_src)
lret
.code64
-paging_enabled:
+.Lpaging_enabled:
/* Return from the trampoline */
jmp *%rdi
@@ -647,7 +647,7 @@ paging_enabled:
.org trampoline_32bit_src + TRAMPOLINE_32BIT_CODE_SIZE
.code32
-no_longmode:
+.Lno_longmode:
/* This isn't an x86-64 CPU, so hang intentionally, we cannot continue */
1:
hlt
diff --git a/arch/x86/entry/entry_64.S b/arch/x86/entry/entry_64.S
index be9ca198c581..cf273242691b 100644
--- a/arch/x86/entry/entry_64.S
+++ b/arch/x86/entry/entry_64.S
@@ -1058,10 +1058,10 @@ ENTRY(native_load_gs_index)
ENDPROC(native_load_gs_index)
EXPORT_SYMBOL(native_load_gs_index)
- _ASM_EXTABLE(.Lgs_change, bad_gs)
+ _ASM_EXTABLE(.Lgs_change, .Lbad_gs)
.section .fixup, "ax"
/* running with kernelgs */
-bad_gs:
+.Lbad_gs:
SWAPGS /* switch back to user gs */
.macro ZAP_GS
/* This can't be a string because the preprocessor needs to see it. */
diff --git a/arch/x86/lib/copy_user_64.S b/arch/x86/lib/copy_user_64.S
index 4fe1601dbc5d..86976b55ae74 100644
--- a/arch/x86/lib/copy_user_64.S
+++ b/arch/x86/lib/copy_user_64.S
@@ -33,7 +33,7 @@
102:
.section .fixup,"ax"
103: addl %ecx,%edx /* ecx is zerorest also */
- jmp copy_user_handle_tail
+ jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(100b, 103b)
@@ -113,7 +113,7 @@ ENTRY(copy_user_generic_unrolled)
40: leal (%rdx,%rcx,8),%edx
jmp 60f
50: movl %ecx,%edx
-60: jmp copy_user_handle_tail /* ecx is zerorest also */
+60: jmp .Lcopy_user_handle_tail /* ecx is zerorest also */
.previous
_ASM_EXTABLE_UA(1b, 30b)
@@ -177,7 +177,7 @@ ENTRY(copy_user_generic_string)
.section .fixup,"ax"
11: leal (%rdx,%rcx,8),%ecx
12: movl %ecx,%edx /* ecx is zerorest also */
- jmp copy_user_handle_tail
+ jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, 11b)
@@ -210,7 +210,7 @@ ENTRY(copy_user_enhanced_fast_string)
.section .fixup,"ax"
12: movl %ecx,%edx /* ecx is zerorest also */
- jmp copy_user_handle_tail
+ jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, 12b)
@@ -231,7 +231,7 @@ EXPORT_SYMBOL(copy_user_enhanced_fast_string)
* eax uncopied bytes or 0 if successful.
*/
ALIGN;
-copy_user_handle_tail:
+.Lcopy_user_handle_tail:
movl %edx,%ecx
1: rep movsb
2: mov %ecx,%eax
@@ -239,7 +239,7 @@ copy_user_handle_tail:
ret
_ASM_EXTABLE_UA(1b, 2b)
-END(copy_user_handle_tail)
+END(.Lcopy_user_handle_tail)
/*
* copy_user_nocache - Uncached memory copy with exception handling
@@ -364,7 +364,7 @@ ENTRY(__copy_user_nocache)
movl %ecx,%edx
.L_fixup_handle_tail:
sfence
- jmp copy_user_handle_tail
+ jmp .Lcopy_user_handle_tail
.previous
_ASM_EXTABLE_UA(1b, .L_fixup_4x8b_copy)
diff --git a/arch/x86/lib/getuser.S b/arch/x86/lib/getuser.S
index 304f958c27b2..9578eb88fc87 100644
--- a/arch/x86/lib/getuser.S
+++ b/arch/x86/lib/getuser.S
@@ -115,7 +115,7 @@ ENDPROC(__get_user_8)
EXPORT_SYMBOL(__get_user_8)
-bad_get_user_clac:
+.Lbad_get_user_clac:
ASM_CLAC
bad_get_user:
xor %edx,%edx
@@ -123,7 +123,7 @@ bad_get_user:
ret
#ifdef CONFIG_X86_32
-bad_get_user_8_clac:
+.Lbad_get_user_8_clac:
ASM_CLAC
bad_get_user_8:
xor %edx,%edx
@@ -132,12 +132,12 @@ bad_get_user_8:
ret
#endif
- _ASM_EXTABLE_UA(1b, bad_get_user_clac)
- _ASM_EXTABLE_UA(2b, bad_get_user_clac)
- _ASM_EXTABLE_UA(3b, bad_get_user_clac)
+ _ASM_EXTABLE_UA(1b, .Lbad_get_user_clac)
+ _ASM_EXTABLE_UA(2b, .Lbad_get_user_clac)
+ _ASM_EXTABLE_UA(3b, .Lbad_get_user_clac)
#ifdef CONFIG_X86_64
- _ASM_EXTABLE_UA(4b, bad_get_user_clac)
+ _ASM_EXTABLE_UA(4b, .Lbad_get_user_clac)
#else
- _ASM_EXTABLE_UA(4b, bad_get_user_8_clac)
- _ASM_EXTABLE_UA(5b, bad_get_user_8_clac)
+ _ASM_EXTABLE_UA(4b, .Lbad_get_user_8_clac)
+ _ASM_EXTABLE_UA(5b, .Lbad_get_user_8_clac)
#endif
diff --git a/arch/x86/lib/putuser.S b/arch/x86/lib/putuser.S
index 14bf78341d3c..126dd6a9ec9b 100644
--- a/arch/x86/lib/putuser.S
+++ b/arch/x86/lib/putuser.S
@@ -37,7 +37,7 @@
ENTRY(__put_user_1)
ENTER
cmp TASK_addr_limit(%_ASM_BX),%_ASM_CX
- jae bad_put_user
+ jae .Lbad_put_user
ASM_STAC
1: movb %al,(%_ASM_CX)
xor %eax,%eax
@@ -51,7 +51,7 @@ ENTRY(__put_user_2)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $1,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
- jae bad_put_user
+ jae .Lbad_put_user
ASM_STAC
2: movw %ax,(%_ASM_CX)
xor %eax,%eax
@@ -65,7 +65,7 @@ ENTRY(__put_user_4)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $3,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
- jae bad_put_user
+ jae .Lbad_put_user
ASM_STAC
3: movl %eax,(%_ASM_CX)
xor %eax,%eax
@@ -79,7 +79,7 @@ ENTRY(__put_user_8)
mov TASK_addr_limit(%_ASM_BX),%_ASM_BX
sub $7,%_ASM_BX
cmp %_ASM_BX,%_ASM_CX
- jae bad_put_user
+ jae .Lbad_put_user
ASM_STAC
4: mov %_ASM_AX,(%_ASM_CX)
#ifdef CONFIG_X86_32
@@ -91,16 +91,16 @@ ENTRY(__put_user_8)
ENDPROC(__put_user_8)
EXPORT_SYMBOL(__put_user_8)
-bad_put_user_clac:
+.Lbad_put_user_clac:
ASM_CLAC
-bad_put_user:
+.Lbad_put_user:
movl $-EFAULT,%eax
RET
- _ASM_EXTABLE_UA(1b, bad_put_user_clac)
- _ASM_EXTABLE_UA(2b, bad_put_user_clac)
- _ASM_EXTABLE_UA(3b, bad_put_user_clac)
- _ASM_EXTABLE_UA(4b, bad_put_user_clac)
+ _ASM_EXTABLE_UA(1b, .Lbad_put_user_clac)
+ _ASM_EXTABLE_UA(2b, .Lbad_put_user_clac)
+ _ASM_EXTABLE_UA(3b, .Lbad_put_user_clac)
+ _ASM_EXTABLE_UA(4b, .Lbad_put_user_clac)
#ifdef CONFIG_X86_32
- _ASM_EXTABLE_UA(5b, bad_put_user_clac)
+ _ASM_EXTABLE_UA(5b, .Lbad_put_user_clac)
#endif