diff options
author | John Koleszar <jkoleszar@google.com> | 2010-05-18 11:58:33 -0400 |
---|---|---|
committer | John Koleszar <jkoleszar@google.com> | 2010-05-18 11:58:33 -0400 |
commit | 0ea50ce9cb4b65eee6afa1d041fe8beb5abda667 (patch) | |
tree | 1f3b9019f28bc56fd3156f96e5a9653a983ee61b /vp8/encoder/x86/variance_x86.h | |
download | libvpx-0ea50ce9cb4b65eee6afa1d041fe8beb5abda667.tar.gz |
Initial WebM releasev0.9.0
Diffstat (limited to 'vp8/encoder/x86/variance_x86.h')
-rw-r--r-- | vp8/encoder/x86/variance_x86.h | 275 |
1 files changed, 275 insertions, 0 deletions
diff --git a/vp8/encoder/x86/variance_x86.h b/vp8/encoder/x86/variance_x86.h new file mode 100644 index 000000000..35fc90c48 --- /dev/null +++ b/vp8/encoder/x86/variance_x86.h @@ -0,0 +1,275 @@ +/* + * Copyright (c) 2010 The VP8 project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license and patent + * grant that can be found in the LICENSE file in the root of the source + * tree. All contributing project authors may be found in the AUTHORS + * file in the root of the source tree. + */ + + +#ifndef VARIANCE_X86_H +#define VARIANCE_X86_H + + +/* Note: + * + * This platform is commonly built for runtime CPU detection. If you modify + * any of the function mappings present in this file, be sure to also update + * them in the function pointer initialization code + */ +#if HAVE_MMX +extern prototype_sad(vp8_sad4x4_mmx); +extern prototype_sad(vp8_sad8x8_mmx); +extern prototype_sad(vp8_sad8x16_mmx); +extern prototype_sad(vp8_sad16x8_mmx); +extern prototype_sad(vp8_sad16x16_mmx); +extern prototype_variance(vp8_variance4x4_mmx); +extern prototype_variance(vp8_variance8x8_mmx); +extern prototype_variance(vp8_variance8x16_mmx); +extern prototype_variance(vp8_variance16x8_mmx); +extern prototype_variance(vp8_variance16x16_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_variance4x4_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_variance8x8_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_variance8x16_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_variance16x8_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_variance16x16_mmx); +extern prototype_subpixvariance(vp8_sub_pixel_mse16x16_mmx); +extern prototype_getmbss(vp8_get_mb_ss_mmx); +extern prototype_variance(vp8_mse16x16_mmx); +extern prototype_sad(vp8_get16x16pred_error_mmx); +extern prototype_variance2(vp8_get8x8var_mmx); +extern prototype_variance2(vp8_get16x16var_mmx); +extern prototype_sad(vp8_get4x4sse_cs_mmx); + +#if !CONFIG_RUNTIME_CPU_DETECT +#undef vp8_variance_sad4x4 +#define vp8_variance_sad4x4 vp8_sad4x4_mmx + +#undef vp8_variance_sad8x8 +#define vp8_variance_sad8x8 vp8_sad8x8_mmx + +#undef vp8_variance_sad8x16 +#define vp8_variance_sad8x16 vp8_sad8x16_mmx + +#undef vp8_variance_sad16x8 +#define vp8_variance_sad16x8 vp8_sad16x8_mmx + +#undef vp8_variance_sad16x16 +#define vp8_variance_sad16x16 vp8_sad16x16_mmx + +#undef vp8_variance_var4x4 +#define vp8_variance_var4x4 vp8_variance4x4_mmx + +#undef vp8_variance_var8x8 +#define vp8_variance_var8x8 vp8_variance8x8_mmx + +#undef vp8_variance_var8x16 +#define vp8_variance_var8x16 vp8_variance8x16_mmx + +#undef vp8_variance_var16x8 +#define vp8_variance_var16x8 vp8_variance16x8_mmx + +#undef vp8_variance_var16x16 +#define vp8_variance_var16x16 vp8_variance16x16_mmx + +#undef vp8_variance_subpixvar4x4 +#define vp8_variance_subpixvar4x4 vp8_sub_pixel_variance4x4_mmx + +#undef vp8_variance_subpixvar8x8 +#define vp8_variance_subpixvar8x8 vp8_sub_pixel_variance8x8_mmx + +#undef vp8_variance_subpixvar8x16 +#define vp8_variance_subpixvar8x16 vp8_sub_pixel_variance8x16_mmx + +#undef vp8_variance_subpixvar16x8 +#define vp8_variance_subpixvar16x8 vp8_sub_pixel_variance16x8_mmx + +#undef vp8_variance_subpixvar16x16 +#define vp8_variance_subpixvar16x16 vp8_sub_pixel_variance16x16_mmx + +#undef vp8_variance_subpixmse16x16 +#define vp8_variance_subpixmse16x16 vp8_sub_pixel_mse16x16_mmx + +#undef vp8_variance_getmbss +#define vp8_variance_getmbss vp8_get_mb_ss_mmx + +#undef vp8_variance_mse16x16 +#define vp8_variance_mse16x16 vp8_mse16x16_mmx + +#undef vp8_variance_get16x16prederror +#define vp8_variance_get16x16prederror vp8_get16x16pred_error_mmx + +#undef vp8_variance_get8x8var +#define vp8_variance_get8x8var vp8_get8x8var_mmx + +#undef vp8_variance_get16x16var +#define vp8_variance_get16x16var vp8_get16x16var_mmx + +#undef vp8_variance_get4x4sse_cs +#define vp8_variance_get4x4sse_cs vp8_get4x4sse_cs_mmx + +#endif +#endif + + +#if HAVE_SSE2 +extern prototype_sad(vp8_sad4x4_wmt); +extern prototype_sad(vp8_sad8x8_wmt); +extern prototype_sad(vp8_sad8x16_wmt); +extern prototype_sad(vp8_sad16x8_wmt); +extern prototype_sad(vp8_sad16x16_wmt); +extern prototype_variance(vp8_variance4x4_wmt); +extern prototype_variance(vp8_variance8x8_wmt); +extern prototype_variance(vp8_variance8x16_wmt); +extern prototype_variance(vp8_variance16x8_wmt); +extern prototype_variance(vp8_variance16x16_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_variance4x4_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_variance8x8_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_variance8x16_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_variance16x8_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_variance16x16_wmt); +extern prototype_subpixvariance(vp8_sub_pixel_mse16x16_wmt); +extern prototype_getmbss(vp8_get_mb_ss_sse2); +extern prototype_variance(vp8_mse16x16_wmt); +extern prototype_sad(vp8_get16x16pred_error_sse2); +extern prototype_variance2(vp8_get8x8var_sse2); +extern prototype_variance2(vp8_get16x16var_sse2); + +#if !CONFIG_RUNTIME_CPU_DETECT +#undef vp8_variance_sad4x4 +#define vp8_variance_sad4x4 vp8_sad4x4_wmt + +#undef vp8_variance_sad8x8 +#define vp8_variance_sad8x8 vp8_sad8x8_wmt + +#undef vp8_variance_sad8x16 +#define vp8_variance_sad8x16 vp8_sad8x16_wmt + +#undef vp8_variance_sad16x8 +#define vp8_variance_sad16x8 vp8_sad16x8_wmt + +#undef vp8_variance_sad16x16 +#define vp8_variance_sad16x16 vp8_sad16x16_wmt + +#undef vp8_variance_var4x4 +#define vp8_variance_var4x4 vp8_variance4x4_wmt + +#undef vp8_variance_var8x8 +#define vp8_variance_var8x8 vp8_variance8x8_wmt + +#undef vp8_variance_var8x16 +#define vp8_variance_var8x16 vp8_variance8x16_wmt + +#undef vp8_variance_var16x8 +#define vp8_variance_var16x8 vp8_variance16x8_wmt + +#undef vp8_variance_var16x16 +#define vp8_variance_var16x16 vp8_variance16x16_wmt + +#undef vp8_variance_subpixvar4x4 +#define vp8_variance_subpixvar4x4 vp8_sub_pixel_variance4x4_wmt + +#undef vp8_variance_subpixvar8x8 +#define vp8_variance_subpixvar8x8 vp8_sub_pixel_variance8x8_wmt + +#undef vp8_variance_subpixvar8x16 +#define vp8_variance_subpixvar8x16 vp8_sub_pixel_variance8x16_wmt + +#undef vp8_variance_subpixvar16x8 +#define vp8_variance_subpixvar16x8 vp8_sub_pixel_variance16x8_wmt + +#undef vp8_variance_subpixvar16x16 +#define vp8_variance_subpixvar16x16 vp8_sub_pixel_variance16x16_wmt + +#undef vp8_variance_subpixmse16x16 +#define vp8_variance_subpixmse16x16 vp8_sub_pixel_mse16x16_wmt + +#undef vp8_variance_getmbss +#define vp8_variance_getmbss vp8_get_mb_ss_sse2 + +#undef vp8_variance_mse16x16 +#define vp8_variance_mse16x16 vp8_mse16x16_wmt + +#undef vp8_variance_get16x16prederror +#define vp8_variance_get16x16prederror vp8_get16x16pred_error_sse2 + +#undef vp8_variance_get8x8var +#define vp8_variance_get8x8var vp8_get8x8var_sse2 + +#undef vp8_variance_get16x16var +#define vp8_variance_get16x16var vp8_get16x16var_sse2 + +#endif +#endif + + +#if HAVE_SSE3 +extern prototype_sad(vp8_sad16x16_sse3); +extern prototype_sad(vp8_sad16x8_sse3); +extern prototype_sad_multi_same_address(vp8_sad16x16x3_sse3); +extern prototype_sad_multi_same_address(vp8_sad16x8x3_sse3); +extern prototype_sad_multi_same_address(vp8_sad8x16x3_sse3); +extern prototype_sad_multi_same_address(vp8_sad8x8x3_sse3); +extern prototype_sad_multi_same_address(vp8_sad4x4x3_sse3); + +extern prototype_sad_multi_dif_address(vp8_sad16x16x4d_sse3); +extern prototype_sad_multi_dif_address(vp8_sad16x8x4d_sse3); +extern prototype_sad_multi_dif_address(vp8_sad8x16x4d_sse3); +extern prototype_sad_multi_dif_address(vp8_sad8x8x4d_sse3); +extern prototype_sad_multi_dif_address(vp8_sad4x4x4d_sse3); + +#if !CONFIG_RUNTIME_CPU_DETECT + +#undef vp8_variance_sad16x16 +#define vp8_variance_sad16x16 vp8_sad16x16_sse3 + +#undef vp8_variance_sad16x16x3 +#define vp8_variance_sad16x16x3 vp8_sad16x16x3_sse3 + +#undef vp8_variance_sad16x8x3 +#define vp8_variance_sad16x8x3 vp8_sad16x8x3_sse3 + +#undef vp8_variance_sad8x16x3 +#define vp8_variance_sad8x16x3 vp8_sad8x16x3_sse3 + +#undef vp8_variance_sad8x8x3 +#define vp8_variance_sad8x8x3 vp8_sad8x8x3_sse3 + +#undef vp8_variance_sad4x4x3 +#define vp8_variance_sad4x4x3 vp8_sad4x4x3_sse3 + +#undef vp8_variance_sad16x16x4d +#define vp8_variance_sad16x16x4 vp8_sad16x16x4d_sse3 + +#undef vp8_variance_sad16x8x4d +#define vp8_variance_sad16x8x4d vp8_sad16x8x4d_sse3 + +#undef vp8_variance_sad8x16x4d +#define vp8_variance_sad8x16x4d vp8_sad8x16x4d_sse3 + +#undef vp8_variance_sad8x8x4d +#define vp8_variance_sad8x8x4d vp8_sad8x8x4d_sse3 + +#undef vp8_variance_sad4x4x4d +#define vp8_variance_sad4x4x4d vp8_sad4x4x4d_sse3 + +#endif +#endif + + +#if HAVE_SSSE3 +extern prototype_sad_multi_same_address(vp8_sad16x16x3_ssse3); +extern prototype_sad_multi_same_address(vp8_sad16x8x3_ssse3); + +#if !CONFIG_RUNTIME_CPU_DETECT +#undef vp8_variance_sad16x16x3 +#define vp8_variance_sad16x16x3 vp8_sad16x16x3_ssse3 + +#undef vp8_variance_sad16x8x3 +#define vp8_variance_sad16x8x3 vp8_sad16x8x3_ssse3 + +#endif +#endif + +#endif |