summaryrefslogtreecommitdiff
path: root/chromium/base/cpu_unittest.cc
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-10-13 13:24:50 +0200
committerAllan Sandfeld Jensen <allan.jensen@theqtcompany.com>2015-10-14 10:57:25 +0000
commitaf3d4809763ef308f08ced947a73b624729ac7ea (patch)
tree4402b911e30383f6c6dace1e8cf3b8e85355db3a /chromium/base/cpu_unittest.cc
parent0e8ff63a407fe323e215bb1a2c423c09a4747c8a (diff)
downloadqtwebengine-chromium-af3d4809763ef308f08ced947a73b624729ac7ea.tar.gz
BASELINE: Update Chromium to 47.0.2526.14
Also adding in sources needed for spellchecking. Change-Id: Idd44170fa1616f26315188970a8d5ba7d472b18a Reviewed-by: Michael BrĂ¼ning <michael.bruning@theqtcompany.com>
Diffstat (limited to 'chromium/base/cpu_unittest.cc')
-rw-r--r--chromium/base/cpu_unittest.cc87
1 files changed, 55 insertions, 32 deletions
diff --git a/chromium/base/cpu_unittest.cc b/chromium/base/cpu_unittest.cc
index 18bf959a55e..931509738fe 100644
--- a/chromium/base/cpu_unittest.cc
+++ b/chromium/base/cpu_unittest.cc
@@ -7,6 +7,11 @@
#include "testing/gtest/include/gtest/gtest.h"
+#if _MSC_VER >= 1700
+// C4752: found Intel(R) Advanced Vector Extensions; consider using /arch:AVX.
+#pragma warning(disable: 4752)
+#endif
+
// Tests whether we can run extended instructions represented by the CPU
// information. This test actually executes some extended instructions (such as
// MMX, SSE, etc.) supported by the CPU and sees we can run them without
@@ -17,77 +22,95 @@ TEST(CPU, RunExtendedInstructions) {
// Retrieve the CPU information.
base::CPU cpu;
-// TODO(jschuh): crbug.com/168866 Find a way to enable this on Win64.
-#if defined(OS_WIN) && !defined(_M_X64)
ASSERT_TRUE(cpu.has_mmx());
+ ASSERT_TRUE(cpu.has_sse());
+ ASSERT_TRUE(cpu.has_sse2());
+// TODO(fbarchard): consider enabling for clangcl.
+#if defined(COMPILER_GCC)
// Execute an MMX instruction.
- __asm emms;
+ __asm__ __volatile__("emms\n" : : : "mm0");
- if (cpu.has_sse()) {
- // Execute an SSE instruction.
- __asm xorps xmm0, xmm0;
- }
+ // Execute an SSE instruction.
+ __asm__ __volatile__("xorps %%xmm0, %%xmm0\n" : : : "xmm0");
- if (cpu.has_sse2()) {
- // Execute an SSE 2 instruction.
- __asm psrldq xmm0, 0;
- }
+ // Execute an SSE 2 instruction.
+ __asm__ __volatile__("psrldq $0, %%xmm0\n" : : : "xmm0");
if (cpu.has_sse3()) {
// Execute an SSE 3 instruction.
- __asm addsubpd xmm0, xmm0;
+ __asm__ __volatile__("addsubpd %%xmm0, %%xmm0\n" : : : "xmm0");
}
if (cpu.has_ssse3()) {
// Execute a Supplimental SSE 3 instruction.
- __asm psignb xmm0, xmm0;
+ __asm__ __volatile__("psignb %%xmm0, %%xmm0\n" : : : "xmm0");
}
if (cpu.has_sse41()) {
// Execute an SSE 4.1 instruction.
- __asm pmuldq xmm0, xmm0;
+ __asm__ __volatile__("pmuldq %%xmm0, %%xmm0\n" : : : "xmm0");
}
if (cpu.has_sse42()) {
// Execute an SSE 4.2 instruction.
- __asm crc32 eax, eax;
+ __asm__ __volatile__("crc32 %%eax, %%eax\n" : : : "eax");
}
-#elif defined(OS_POSIX) && defined(__x86_64__)
- ASSERT_TRUE(cpu.has_mmx());
- // Execute an MMX instruction.
- __asm__ __volatile__("emms\n" : : : "mm0");
-
- if (cpu.has_sse()) {
- // Execute an SSE instruction.
- __asm__ __volatile__("xorps %%xmm0, %%xmm0\n" : : : "xmm0");
+ if (cpu.has_avx()) {
+ // Execute an AVX instruction.
+ __asm__ __volatile__("vzeroupper\n" : : : "xmm0");
}
- if (cpu.has_sse2()) {
- // Execute an SSE 2 instruction.
- __asm__ __volatile__("psrldq $0, %%xmm0\n" : : : "xmm0");
+ if (cpu.has_avx2()) {
+ // Execute an AVX 2 instruction.
+ __asm__ __volatile__("vpunpcklbw %%ymm0, %%ymm0, %%ymm0\n" : : : "xmm0");
}
+// TODO(jschuh): crbug.com/168866 Find a way to enable this on Win64.
+#elif defined(COMPILER_MSVC) && defined(ARCH_CPU_32_BITS)
+
+ // Execute an MMX instruction.
+ __asm emms;
+
+ // Execute an SSE instruction.
+ __asm xorps xmm0, xmm0;
+
+ // Execute an SSE 2 instruction.
+ __asm psrldq xmm0, 0;
+
if (cpu.has_sse3()) {
// Execute an SSE 3 instruction.
- __asm__ __volatile__("addsubpd %%xmm0, %%xmm0\n" : : : "xmm0");
+ __asm addsubpd xmm0, xmm0;
}
if (cpu.has_ssse3()) {
// Execute a Supplimental SSE 3 instruction.
- __asm__ __volatile__("psignb %%xmm0, %%xmm0\n" : : : "xmm0");
+ __asm psignb xmm0, xmm0;
}
if (cpu.has_sse41()) {
// Execute an SSE 4.1 instruction.
- __asm__ __volatile__("pmuldq %%xmm0, %%xmm0\n" : : : "xmm0");
+ __asm pmuldq xmm0, xmm0;
}
if (cpu.has_sse42()) {
// Execute an SSE 4.2 instruction.
- __asm__ __volatile__("crc32 %%eax, %%eax\n" : : : "eax");
+ __asm crc32 eax, eax;
}
-#endif
-#endif
+
+// Visual C 2012 required for AVX.
+#if _MSC_VER >= 1700
+ if (cpu.has_avx()) {
+ // Execute an AVX instruction.
+ __asm vzeroupper;
+ }
+
+ if (cpu.has_avx2()) {
+ // Execute an AVX 2 instruction.
+ __asm vpunpcklbw ymm0, ymm0, ymm0
+ }
+#endif // _MSC_VER >= 1700
+#endif // defined(COMPILER_GCC)
+#endif // defined(ARCH_CPU_X86_FAMILY)
}