Commit 82b3980b authored by Pavel Machek's avatar Pavel Machek Committed by Christoph Hellwig

[PATCH] swsusp updates

This uses better constraints that do not go through the register
unneccessarily.
parent 24cc3559
......@@ -54,10 +54,10 @@ static inline void save_processor_context (void)
/*
* descriptor tables
*/
asm volatile ("sgdt (%0)" : "=m" (saved_context.gdt_limit));
asm volatile ("sidt (%0)" : "=m" (saved_context.idt_limit));
asm volatile ("sldt (%0)" : "=m" (saved_context.ldt));
asm volatile ("str (%0)" : "=m" (saved_context.tr));
asm volatile ("sgdt %0" : "=m" (saved_context.gdt_limit));
asm volatile ("sidt %0" : "=m" (saved_context.idt_limit));
asm volatile ("sldt %0" : "=m" (saved_context.ldt));
asm volatile ("str %0" : "=m" (saved_context.tr));
/*
* save the general registers.
......@@ -67,22 +67,22 @@ static inline void save_processor_context (void)
* It's really not necessary, and kinda fishy (check the assembly output),
* so it's avoided.
*/
asm volatile ("movl %%esp, (%0)" : "=m" (saved_context.esp));
asm volatile ("movl %%eax, (%0)" : "=m" (saved_context.eax));
asm volatile ("movl %%ebx, (%0)" : "=m" (saved_context.ebx));
asm volatile ("movl %%ecx, (%0)" : "=m" (saved_context.ecx));
asm volatile ("movl %%edx, (%0)" : "=m" (saved_context.edx));
asm volatile ("movl %%ebp, (%0)" : "=m" (saved_context.ebp));
asm volatile ("movl %%esi, (%0)" : "=m" (saved_context.esi));
asm volatile ("movl %%edi, (%0)" : "=m" (saved_context.edi));
asm volatile ("movl %%esp, %0" : "=m" (saved_context.esp));
asm volatile ("movl %%eax, %0" : "=m" (saved_context.eax));
asm volatile ("movl %%ebx, %0" : "=m" (saved_context.ebx));
asm volatile ("movl %%ecx, %0" : "=m" (saved_context.ecx));
asm volatile ("movl %%edx, %0" : "=m" (saved_context.edx));
asm volatile ("movl %%ebp, %0" : "=m" (saved_context.ebp));
asm volatile ("movl %%esi, %0" : "=m" (saved_context.esi));
asm volatile ("movl %%edi, %0" : "=m" (saved_context.edi));
/* FIXME: Need to save XMM0..XMM15? */
/*
* segment registers
*/
asm volatile ("movw %%es, %0" : "=r" (saved_context.es));
asm volatile ("movw %%fs, %0" : "=r" (saved_context.fs));
asm volatile ("movw %%gs, %0" : "=r" (saved_context.gs));
asm volatile ("movw %%ss, %0" : "=r" (saved_context.ss));
asm volatile ("movw %%es, %0" : "=m" (saved_context.es));
asm volatile ("movw %%fs, %0" : "=m" (saved_context.fs));
asm volatile ("movw %%gs, %0" : "=m" (saved_context.gs));
asm volatile ("movw %%ss, %0" : "=m" (saved_context.ss));
/*
* control registers
......@@ -95,7 +95,7 @@ static inline void save_processor_context (void)
/*
* eflags
*/
asm volatile ("pushfl ; popl (%0)" : "=m" (saved_context.eflags));
asm volatile ("pushfl ; popl %0" : "=m" (saved_context.eflags));
}
static void
......@@ -125,9 +125,7 @@ static inline void restore_processor_context (void)
/*
* first restore %ds, so we can access our data properly
*/
asm volatile (".align 4");
asm volatile ("movw %0, %%ds" :: "r" ((u16)__KERNEL_DS));
asm volatile ("movw %0, %%ds" :: "r" (__KERNEL_DS));
/*
* control registers
......@@ -167,9 +165,9 @@ static inline void restore_processor_context (void)
* now restore the descriptor tables to their proper values
* ltr is done i fix_processor_context().
*/
asm volatile ("lgdt (%0)" :: "m" (saved_context.gdt_limit));
asm volatile ("lidt (%0)" :: "m" (saved_context.idt_limit));
asm volatile ("lldt (%0)" :: "m" (saved_context.ldt));
asm volatile ("lgdt %0" :: "m" (saved_context.gdt_limit));
asm volatile ("lidt %0" :: "m" (saved_context.idt_limit));
asm volatile ("lldt %0" :: "m" (saved_context.ldt));
fix_processor_context();
......
......@@ -15,22 +15,22 @@ arch_prepare_suspend(void)
/* image of the saved processor state */
struct saved_context {
u32 eax, ebx, ecx, edx;
u32 esp, ebp, esi, edi;
unsigned long eax, ebx, ecx, edx;
unsigned long esp, ebp, esi, edi;
u16 es, fs, gs, ss;
u32 cr0, cr2, cr3, cr4;
unsigned long cr0, cr2, cr3, cr4;
u16 gdt_pad;
u16 gdt_limit;
u32 gdt_base;
unsigned long gdt_base;
u16 idt_pad;
u16 idt_limit;
u32 idt_base;
unsigned long idt_base;
u16 ldt;
u16 tss;
u32 tr;
u32 safety;
u32 return_address;
u32 eflags;
unsigned long tr;
unsigned long safety;
unsigned long return_address;
unsigned long eflags;
} __attribute__((packed));
#define loaddebug(thread,register) \
......@@ -52,11 +52,11 @@ extern unsigned long saved_edi;
static inline void acpi_save_register_state(unsigned long return_point)
{
saved_eip = return_point;
asm volatile ("movl %%esp,(%0)" : "=m" (saved_esp));
asm volatile ("movl %%ebp,(%0)" : "=m" (saved_ebp));
asm volatile ("movl %%ebx,(%0)" : "=m" (saved_ebx));
asm volatile ("movl %%edi,(%0)" : "=m" (saved_edi));
asm volatile ("movl %%esi,(%0)" : "=m" (saved_esi));
asm volatile ("movl %%esp,%0" : "=m" (saved_esp));
asm volatile ("movl %%ebp,%0" : "=m" (saved_ebp));
asm volatile ("movl %%ebx,%0" : "=m" (saved_ebx));
asm volatile ("movl %%edi,%0" : "=m" (saved_edi));
asm volatile ("movl %%esi,%0" : "=m" (saved_esi));
}
#define acpi_restore_register_state() do {} while (0)
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment