diff options
author | Ben Gamari <bgamari.foss@gmail.com> | 2015-08-03 08:42:00 +0200 |
---|---|---|
committer | Ben Gamari <ben@smart-cactus.org> | 2015-08-03 08:42:15 +0200 |
commit | b38ee89c8c8724ba2feb98d4082795a5d4ae96f6 (patch) | |
tree | 47f867a778e1d5aafda176ccf93b2049db527e85 /rts/StgCRun.c | |
parent | 948e03e55a47a7c5a3566a7f8ba347f590fd8d1f (diff) | |
download | haskell-b38ee89c8c8724ba2feb98d4082795a5d4ae96f6.tar.gz |
Fix incorrect stack pointer usage in StgRun() on x86_64
The STG_RETURN code from StgCRun.c is incorrect for x86_64 variants
where the ABI doesn't impose a mandatory red zone for the stack, like on
Windows or Xen/HaLVM. The current implementation restores the stack
pointer first, which effectively marks the area with the saved registers
as reusable. Later, the CPU registers are restored from this "free"
area.
This ordering happens to work by accident on operating systems that
strictly adhere to the System V ABI, because any interrupt/signal
delivery is guaranteed to leave the first 128 bytes past the stack
pointer untouched (red zone). On other systems this might result in
corrupted CPU registers if an interruption happens just after restoring
the stack pointer.
The red zone is usually only used by small leaf functions to avoid
updates to the stack pointer and exploiting it doesn't give us any
advantage in this case.
Reviewers: austin, rwbarton
Reviewed By: rwbarton
Subscribers: thomie, simonmar
Differential Revision: https://phabricator.haskell.org/D1120
GHC Trac Issues: #10155
Diffstat (limited to 'rts/StgCRun.c')
-rw-r--r-- | rts/StgCRun.c | 21 |
1 files changed, 10 insertions, 11 deletions
diff --git a/rts/StgCRun.c b/rts/StgCRun.c index 29039117ad..644850988e 100644 --- a/rts/StgCRun.c +++ b/rts/StgCRun.c @@ -302,19 +302,18 @@ StgRunIsImplementedInAssembler(void) * restore callee-saves registers. (Don't stomp on %%rax!) */ "addq %0, %%rsp\n\t" - "movq %%rsp, %%rdx\n\t" - "addq %1, %%rsp\n\t" - "movq 0(%%rdx),%%rbx\n\t" /* restore the registers saved above */ - "movq 8(%%rdx),%%rbp\n\t" - "movq 16(%%rdx),%%r12\n\t" - "movq 24(%%rdx),%%r13\n\t" - "movq 32(%%rdx),%%r14\n\t" - "movq 40(%%rdx),%%r15\n\t" + "movq 0(%%rsp),%%rbx\n\t" /* restore the registers saved above */ + "movq 8(%%rsp),%%rbp\n\t" + "movq 16(%%rsp),%%r12\n\t" + "movq 24(%%rsp),%%r13\n\t" + "movq 32(%%rsp),%%r14\n\t" + "movq 40(%%rsp),%%r15\n\t" #if defined(mingw32_HOST_OS) - "movq 48(%%rdx),%%rdi\n\t" - "movq 56(%%rdx),%%rsi\n\t" - "movq 64(%%rdx),%%xmm6\n\t" + "movq 48(%%rsp),%%rdi\n\t" + "movq 56(%%rsp),%%rsi\n\t" + "movq 64(%%rsp),%%xmm6\n\t" #endif + "addq %1, %%rsp\n\t" "retq" : |