Commit 64b028b2 authored by Ingo Molnar's avatar Ingo Molnar

x86: Clean up the loadsegment() macro

Make it readable in the source too, not just in the assembly output.
No change in functionality.

Cc: Brian Gerst <brgerst@gmail.com>
LKML-Reference: <1259176706-5908-1-git-send-email-brgerst@gmail.com>
Signed-off-by: default avatarIngo Molnar <mingo@elte.hu>
parent 79b0379c
......@@ -155,19 +155,21 @@ extern void native_load_gs_index(unsigned);
* Load a segment. Fall back on loading the zero
* segment if something goes wrong..
*/
#define loadsegment(seg, value) \
do { \
unsigned short __val = value; \
asm volatile("\n" \
"1:\t" \
"movl %k0,%%" #seg "\n" \
".section .fixup,\"ax\"\n" \
"2:\t" \
"xorl %k0,%k0\n\t" \
"jmp 1b\n" \
".previous\n" \
_ASM_EXTABLE(1b, 2b) \
: "+r" (__val) : : "memory"); \
#define loadsegment(seg, value) \
do { \
unsigned short __val = (value); \
\
asm volatile(" \n" \
"1: movl %k0,%%" #seg " \n" \
\
".section .fixup,\"ax\" \n" \
"2: xorl %k0,%k0 \n" \
" jmp 1b \n" \
".previous \n" \
\
_ASM_EXTABLE(1b, 2b) \
\
: "+r" (__val) : : "memory"); \
} while (0)
/*
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment