diff options
author | Erik de Castro Lopo <erik.decastrolopo@ambiata.com> | 2017-04-05 05:53:46 +1000 |
---|---|---|
committer | Ben Gamari <ben@smart-cactus.org> | 2017-04-28 22:35:04 -0400 |
commit | e5b3492f23c2296d0d8221e1787ee585331f726e (patch) | |
tree | e1d6797dadc265db7f97615ff71c65c3d83db9c0 /includes/stg | |
parent | 6d14c1485cb570cbd183bcdc0f858d9a6dc1eb31 (diff) | |
download | haskell-e5b3492f23c2296d0d8221e1787ee585331f726e.tar.gz |
Enable new warning for fragile/incorrect CPP #if usage
The C code in the RTS now gets built with `-Wundef` and the Haskell code
(stages 1 and 2 only) with `-Wcpp-undef`. We now get warnings whereever
`#if` is used on undefined identifiers.
Test Plan: Validate on Linux and Windows
Reviewers: austin, angerman, simonmar, bgamari, Phyx
Reviewed By: bgamari
Subscribers: thomie, snowleopard
Differential Revision: https://phabricator.haskell.org/D3278
Diffstat (limited to 'includes/stg')
-rw-r--r-- | includes/stg/HaskellMachRegs.h | 36 | ||||
-rw-r--r-- | includes/stg/MachRegs.h | 14 | ||||
-rw-r--r-- | includes/stg/RtsMachRegs.h | 36 | ||||
-rw-r--r-- | includes/stg/SMP.h | 37 |
4 files changed, 83 insertions, 40 deletions
diff --git a/includes/stg/HaskellMachRegs.h b/includes/stg/HaskellMachRegs.h index 37e687bc4e..89a2b95399 100644 --- a/includes/stg/HaskellMachRegs.h +++ b/includes/stg/HaskellMachRegs.h @@ -32,14 +32,34 @@ #define MACHREGS_NO_REGS 0 -#define MACHREGS_i386 i386_TARGET_ARCH -#define MACHREGS_x86_64 x86_64_TARGET_ARCH -#define MACHREGS_powerpc (powerpc_TARGET_ARCH || powerpc64_TARGET_ARCH \ - || powerpc64le_TARGET_ARCH || rs6000_TARGET_ARCH) -#define MACHREGS_sparc sparc_TARGET_ARCH -#define MACHREGS_arm arm_TARGET_ARCH -#define MACHREGS_aarch64 aarch64_TARGET_ARCH -#define MACHREGS_darwin darwin_TARGET_OS +#ifdef i386_TARGET_ARCH +#define MACHREGS_i386 1 +#endif + +#ifdef x86_64_TARGET_ARCH +#define MACHREGS_x86_64 1 +#endif + +#if defined(powerpc_TARGET_ARCH) || defined(powerpc64_TARGET_ARCH) \ + || defined(powerpc64le_TARGET_ARCH) || defined(rs6000_TARGET_ARCH) +#define MACHREGS_powerpc 1 +#endif + +#ifdef sparc_TARGET_ARCH +#define MACHREGS_sparc 1 +#endif + +#ifdef arm_TARGET_ARCH +#define MACHREGS_arm 1 +#endif + +#ifdef aarch64_TARGET_ARCH +#define MACHREGS_aarch64 1 +#endif + +#ifdef darwin_TARGET_OS +#define MACHREGS_darwin 1 +#endif #endif diff --git a/includes/stg/MachRegs.h b/includes/stg/MachRegs.h index a0cf595505..f58e49e626 100644 --- a/includes/stg/MachRegs.h +++ b/includes/stg/MachRegs.h @@ -81,7 +81,7 @@ Leaving SpLim out of the picture. -------------------------------------------------------------------------- */ -#if MACHREGS_i386 +#ifdef MACHREGS_i386 #define REG(x) __asm__("%" #x) @@ -155,7 +155,7 @@ --------------------------------------------------------------------------- */ -#elif MACHREGS_x86_64 +#elif defined(MACHREGS_x86_64) #define REG(x) __asm__("%" #x) @@ -302,7 +302,7 @@ the stack. See Note [Overlapping global registers] for implications. We can do the Whole Business with callee-save registers only! -------------------------------------------------------------------------- */ -#elif MACHREGS_powerpc +#elif defined(MACHREGS_powerpc) #define REG(x) __asm__(#x) @@ -315,7 +315,7 @@ the stack. See Note [Overlapping global registers] for implications. #define REG_R7 r20 #define REG_R8 r21 -#if MACHREGS_darwin +#ifdef MACHREGS_darwin #define REG_F1 f14 #define REG_F2 f15 @@ -441,7 +441,7 @@ the stack. See Note [Overlapping global registers] for implications. -------------------------------------------------------------------------- */ -#elif MACHREGS_sparc +#elif defined(MACHREGS_sparc) #define REG(x) __asm__("%" #x) @@ -520,7 +520,7 @@ the stack. See Note [Overlapping global registers] for implications. d16-d31/q8-q15 Argument / result/ scratch registers ----------------------------------------------------------------------------- */ -#elif MACHREGS_arm +#elif defined(MACHREGS_arm) #define REG(x) __asm__(#x) @@ -577,7 +577,7 @@ the stack. See Note [Overlapping global registers] for implications. ----------------------------------------------------------------------------- */ -#elif MACHREGS_aarch64 +#elif defined(MACHREGS_aarch64) #define REG(x) __asm__(#x) diff --git a/includes/stg/RtsMachRegs.h b/includes/stg/RtsMachRegs.h index 5a0464b909..627bdfeebb 100644 --- a/includes/stg/RtsMachRegs.h +++ b/includes/stg/RtsMachRegs.h @@ -38,14 +38,34 @@ #define MACHREGS_NO_REGS 0 -#define MACHREGS_i386 i386_HOST_ARCH -#define MACHREGS_x86_64 x86_64_HOST_ARCH -#define MACHREGS_powerpc (powerpc_HOST_ARCH || powerpc64_HOST_ARCH \ - || powerpc64le_HOST_ARCH || rs6000_HOST_ARCH) -#define MACHREGS_sparc sparc_HOST_ARCH -#define MACHREGS_arm arm_HOST_ARCH -#define MACHREGS_aarch64 aarch64_HOST_ARCH -#define MACHREGS_darwin darwin_HOST_OS +#ifdef i386_HOST_ARCH +#define MACHREGS_i386 1 +#endif + +#ifdef x86_64_HOST_ARCH +#define MACHREGS_x86_64 1 +#endif + +#if defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \ + || defined(powerpc64le_HOST_ARCH) || defined(rs6000_HOST_ARCH) +#define MACHREGS_powerpc 1 +#endif + +#ifdef sparc_HOST_ARCH +#define MACHREGS_sparc 1 +#endif + +#ifdef arm_HOST_ARCH +#define MACHREGS_arm 1 +#endif + +#ifdef aarch64_HOST_ARCH +#define MACHREGS_aarch64 1 +#endif + +#ifdef darwin_HOST_OS +#define MACHREGS_darwin 1 +#endif #endif diff --git a/includes/stg/SMP.h b/includes/stg/SMP.h index e4ae6121fa..4020aef0d9 100644 --- a/includes/stg/SMP.h +++ b/includes/stg/SMP.h @@ -13,7 +13,7 @@ #pragma once -#if arm_HOST_ARCH && defined(arm_HOST_ARCH_PRE_ARMv6) +#if defined(arm_HOST_ARCH) && defined(arm_HOST_ARCH_PRE_ARMv6) void arm_atomic_spin_lock(void); void arm_atomic_spin_unlock(void); #endif @@ -186,14 +186,15 @@ EXTERN_INLINE void write_barrier(void) { #if defined(NOSMP) return; -#elif i386_HOST_ARCH || x86_64_HOST_ARCH +#elif defined(i386_HOST_ARCH) || defined(x86_64_HOST_ARCH) __asm__ __volatile__ ("" : : : "memory"); -#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH +#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \ + || defined(powerpc64le_HOST_ARCH) __asm__ __volatile__ ("lwsync" : : : "memory"); -#elif sparc_HOST_ARCH +#elif defined(sparc_HOST_ARCH) /* Sparc in TSO mode does not require store/store barriers. */ __asm__ __volatile__ ("" : : : "memory"); -#elif (arm_HOST_ARCH) || aarch64_HOST_ARCH +#elif defined(arm_HOST_ARCH) || defined(aarch64_HOST_ARCH) __asm__ __volatile__ ("dmb st" : : : "memory"); #else #error memory barriers unimplemented on this architecture @@ -204,17 +205,18 @@ EXTERN_INLINE void store_load_barrier(void) { #if defined(NOSMP) return; -#elif i386_HOST_ARCH +#elif defined(i386_HOST_ARCH) __asm__ __volatile__ ("lock; addl $0,0(%%esp)" : : : "memory"); -#elif x86_64_HOST_ARCH +#elif defined(x86_64_HOST_ARCH) __asm__ __volatile__ ("lock; addq $0,0(%%rsp)" : : : "memory"); -#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH +#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \ + || defined(powerpc64le_HOST_ARCH) __asm__ __volatile__ ("sync" : : : "memory"); -#elif sparc_HOST_ARCH +#elif defined(sparc_HOST_ARCH) __asm__ __volatile__ ("membar #StoreLoad" : : : "memory"); -#elif arm_HOST_ARCH +#elif defined(arm_HOST_ARCH) __asm__ __volatile__ ("dmb" : : : "memory"); -#elif aarch64_HOST_ARCH +#elif defined(aarch64_HOST_ARCH) __asm__ __volatile__ ("dmb sy" : : : "memory"); #else #error memory barriers unimplemented on this architecture @@ -225,18 +227,19 @@ EXTERN_INLINE void load_load_barrier(void) { #if defined(NOSMP) return; -#elif i386_HOST_ARCH +#elif defined(i386_HOST_ARCH) __asm__ __volatile__ ("" : : : "memory"); -#elif x86_64_HOST_ARCH +#elif defined(x86_64_HOST_ARCH) __asm__ __volatile__ ("" : : : "memory"); -#elif powerpc_HOST_ARCH || powerpc64_HOST_ARCH || powerpc64le_HOST_ARCH +#elif defined(powerpc_HOST_ARCH) || defined(powerpc64_HOST_ARCH) \ + || defined(powerpc64le_HOST_ARCH) __asm__ __volatile__ ("lwsync" : : : "memory"); -#elif sparc_HOST_ARCH +#elif defined(sparc_HOST_ARCH) /* Sparc in TSO mode does not require load/load barriers. */ __asm__ __volatile__ ("" : : : "memory"); -#elif arm_HOST_ARCH +#elif defined(arm_HOST_ARCH) __asm__ __volatile__ ("dmb" : : : "memory"); -#elif aarch64_HOST_ARCH +#elif defined(aarch64_HOST_ARCH) __asm__ __volatile__ ("dmb sy" : : : "memory"); #else #error memory barriers unimplemented on this architecture |