@@ -926,6 +926,12 @@ const struct arch_vpmu_ops *__init core2_vpmu_init(void)
}
fixed_pmc_cnt = core2_get_fixed_pmc_count();
+#define PERF_FIXED_CTR_MAX (MSR_CORE_PERF_FIXED_CTRn - MSR_CORE_PERF_FIXED_CTR0 + 1)
+ if ( fixed_pmc_cnt > PERF_FIXED_CTR_MAX )
+ {
+ printk(XENLOG_INFO "VPMU: limiting fixed perf counters to %d\n", PERF_FIXED_CTR_MAX);
+ fixed_pmc_cnt = PERF_FIXED_CTR_MAX;
+ }
if ( cpu_has_pdcm )
{
@@ -3362,7 +3362,7 @@ static int cf_check vmx_msr_read_intercept(
/* FALLTHROUGH */
case MSR_P6_PERFCTR(0)...MSR_P6_PERFCTR_LAST:
case MSR_P6_EVNTSEL(0)...MSR_P6_EVNTSEL_LAST:
- case MSR_CORE_PERF_FIXED_CTR0...MSR_CORE_PERF_FIXED_CTR2:
+ case MSR_CORE_PERF_FIXED_CTR0...MSR_CORE_PERF_FIXED_CTRn:
case MSR_CORE_PERF_FIXED_CTR_CTRL...MSR_CORE_PERF_GLOBAL_OVF_CTRL:
case MSR_IA32_PEBS_ENABLE:
case MSR_IA32_DS_AREA:
@@ -3680,7 +3680,7 @@ static int cf_check vmx_msr_write_intercept(
case MSR_P6_PERFCTR(0)...MSR_P6_PERFCTR_LAST:
case MSR_P6_EVNTSEL(0)...MSR_P6_EVNTSEL_LAST:
- case MSR_CORE_PERF_FIXED_CTR0...MSR_CORE_PERF_FIXED_CTR2:
+ case MSR_CORE_PERF_FIXED_CTR0...MSR_CORE_PERF_FIXED_CTRn:
case MSR_CORE_PERF_FIXED_CTR_CTRL...MSR_CORE_PERF_GLOBAL_OVF_CTRL:
case MSR_IA32_PEBS_ENABLE:
case MSR_IA32_DS_AREA:
@@ -674,8 +674,8 @@
/* Intel Core-based CPU performance counters */
#define MSR_CORE_PERF_FIXED_CTR0 0x00000309
-#define MSR_CORE_PERF_FIXED_CTR1 0x0000030a
-#define MSR_CORE_PERF_FIXED_CTR2 0x0000030b
+#define MSR_CORE_PERF_FIXED_CTRn 0x0000030b
+
#define MSR_CORE_PERF_FIXED_CTR_CTRL 0x0000038d
#define MSR_CORE_PERF_GLOBAL_STATUS 0x0000038e
#define MSR_CORE_PERF_GLOBAL_CTRL 0x0000038f
@@ -967,7 +967,7 @@ static int cf_check read_msr(
case MSR_P6_PERFCTR(0) ... MSR_P6_PERFCTR_LAST:
case MSR_P6_EVNTSEL(0) ... MSR_P6_EVNTSEL_LAST:
- case MSR_CORE_PERF_FIXED_CTR0 ... MSR_CORE_PERF_FIXED_CTR2:
+ case MSR_CORE_PERF_FIXED_CTR0 ... MSR_CORE_PERF_FIXED_CTRn:
case MSR_CORE_PERF_FIXED_CTR_CTRL ... MSR_CORE_PERF_GLOBAL_OVF_CTRL:
if ( boot_cpu_data.x86_vendor == X86_VENDOR_INTEL )
{
@@ -1147,7 +1147,7 @@ static int cf_check write_msr(
case MSR_P6_PERFCTR(0) ... MSR_P6_PERFCTR_LAST:
case MSR_P6_EVNTSEL(0) ... MSR_P6_EVNTSEL_LAST:
- case MSR_CORE_PERF_FIXED_CTR0 ... MSR_CORE_PERF_FIXED_CTR2:
+ case MSR_CORE_PERF_FIXED_CTR0 ... MSR_CORE_PERF_FIXED_CTRn:
case MSR_CORE_PERF_FIXED_CTR_CTRL ... MSR_CORE_PERF_GLOBAL_OVF_CTRL:
if ( boot_cpu_data.x86_vendor == X86_VENDOR_INTEL )
{