diff options
Diffstat (limited to 'arch/x86/kernel/tsc_sync.c')
-rw-r--r-- | arch/x86/kernel/tsc_sync.c | 37 |
1 files changed, 28 insertions, 9 deletions
diff --git a/arch/x86/kernel/tsc_sync.c b/arch/x86/kernel/tsc_sync.c index f713e84d1cb4..fdb3b7befc47 100644 --- a/arch/x86/kernel/tsc_sync.c +++ b/arch/x86/kernel/tsc_sync.c @@ -59,19 +59,20 @@ void tsc_verify_tsc_adjust(void) } #ifndef CONFIG_SMP -void __init tsc_store_and_check_tsc_adjust(void) +bool __init tsc_store_and_check_tsc_adjust(void) { struct tsc_adjust *ref, *cur = this_cpu_ptr(&tsc_adjust); s64 bootval; if (!boot_cpu_has(X86_FEATURE_TSC_ADJUST)) - return; + return false; rdmsrl(MSR_IA32_TSC_ADJUST, bootval); cur->bootval = bootval; cur->adjusted = bootval; cur->nextcheck = jiffies + HZ; pr_info("TSC ADJUST: Boot CPU0: %lld\n", bootval); + return false; } #else /* !CONFIG_SMP */ @@ -79,14 +80,14 @@ void __init tsc_store_and_check_tsc_adjust(void) /* * Store and check the TSC ADJUST MSR if available */ -void tsc_store_and_check_tsc_adjust(void) +bool tsc_store_and_check_tsc_adjust(void) { struct tsc_adjust *ref, *cur = this_cpu_ptr(&tsc_adjust); unsigned int refcpu, cpu = smp_processor_id(); s64 bootval; if (!boot_cpu_has(X86_FEATURE_TSC_ADJUST)) - return; + return false; rdmsrl(MSR_IA32_TSC_ADJUST, bootval); cur->bootval = bootval; @@ -110,7 +111,7 @@ void tsc_store_and_check_tsc_adjust(void) */ cur->adjusted = bootval; pr_info_once("TSC ADJUST: Boot CPU%u: %lld\n", cpu, bootval); - return; + return false; } ref = per_cpu_ptr(&tsc_adjust, refcpu); @@ -134,6 +135,11 @@ void tsc_store_and_check_tsc_adjust(void) cur->adjusted = ref->adjusted; wrmsrl(MSR_IA32_TSC_ADJUST, ref->adjusted); } + /* + * We have the TSCs forced to be in sync on this package. Skip sync + * test: + */ + return true; } /* @@ -142,6 +148,7 @@ void tsc_store_and_check_tsc_adjust(void) */ static atomic_t start_count; static atomic_t stop_count; +static atomic_t skip_test; /* * We use a raw spinlock in this exceptional case, because @@ -265,10 +272,16 @@ void check_tsc_sync_source(int cpu) atomic_set(&stop_count, 0); /* - * Wait for the target to arrive: + * Wait for the target to start or to skip the test: */ - while (atomic_read(&start_count) != cpus-1) + while (atomic_read(&start_count) != cpus - 1) { + if (atomic_read(&skip_test) > 0) { + atomic_set(&skip_test, 0); + return; + } cpu_relax(); + } + /* * Trigger the target to continue into the measurement too: */ @@ -318,8 +331,14 @@ void check_tsc_sync_target(void) if (unsynchronized_tsc() || tsc_clocksource_reliable) return; - /* Store and check the TSC ADJUST MSR */ - tsc_store_and_check_tsc_adjust(); + /* + * Store, verify and sanitize the TSC adjust register. If + * successful skip the test. + */ + if (tsc_store_and_check_tsc_adjust()) { + atomic_inc(&skip_test); + return; + } /* * Register this CPU's participation and wait for the |