| 1 | // SPDX-License-Identifier: GPL-2.0 | 
|---|
| 2 | /* Nehalem-EX/Westmere-EX uncore support */ | 
|---|
| 3 | #include <asm/cpu_device_id.h> | 
|---|
| 4 | #include <asm/msr.h> | 
|---|
| 5 | #include "uncore.h" | 
|---|
| 6 |  | 
|---|
| 7 | /* NHM-EX event control */ | 
|---|
| 8 | #define NHMEX_PMON_CTL_EV_SEL_MASK	0x000000ff | 
|---|
| 9 | #define NHMEX_PMON_CTL_UMASK_MASK	0x0000ff00 | 
|---|
| 10 | #define NHMEX_PMON_CTL_EN_BIT0		(1 << 0) | 
|---|
| 11 | #define NHMEX_PMON_CTL_EDGE_DET		(1 << 18) | 
|---|
| 12 | #define NHMEX_PMON_CTL_PMI_EN		(1 << 20) | 
|---|
| 13 | #define NHMEX_PMON_CTL_EN_BIT22		(1 << 22) | 
|---|
| 14 | #define NHMEX_PMON_CTL_INVERT		(1 << 23) | 
|---|
| 15 | #define NHMEX_PMON_CTL_TRESH_MASK	0xff000000 | 
|---|
| 16 | #define NHMEX_PMON_RAW_EVENT_MASK	(NHMEX_PMON_CTL_EV_SEL_MASK | \ | 
|---|
| 17 | NHMEX_PMON_CTL_UMASK_MASK | \ | 
|---|
| 18 | NHMEX_PMON_CTL_EDGE_DET | \ | 
|---|
| 19 | NHMEX_PMON_CTL_INVERT | \ | 
|---|
| 20 | NHMEX_PMON_CTL_TRESH_MASK) | 
|---|
| 21 |  | 
|---|
| 22 | /* NHM-EX Ubox */ | 
|---|
| 23 | #define NHMEX_U_MSR_PMON_GLOBAL_CTL		0xc00 | 
|---|
| 24 | #define NHMEX_U_MSR_PMON_CTR			0xc11 | 
|---|
| 25 | #define NHMEX_U_MSR_PMON_EV_SEL			0xc10 | 
|---|
| 26 |  | 
|---|
| 27 | #define NHMEX_U_PMON_GLOBAL_EN			(1 << 0) | 
|---|
| 28 | #define NHMEX_U_PMON_GLOBAL_PMI_CORE_SEL	0x0000001e | 
|---|
| 29 | #define NHMEX_U_PMON_GLOBAL_EN_ALL		(1 << 28) | 
|---|
| 30 | #define NHMEX_U_PMON_GLOBAL_RST_ALL		(1 << 29) | 
|---|
| 31 | #define NHMEX_U_PMON_GLOBAL_FRZ_ALL		(1 << 31) | 
|---|
| 32 |  | 
|---|
| 33 | #define NHMEX_U_PMON_RAW_EVENT_MASK		\ | 
|---|
| 34 | (NHMEX_PMON_CTL_EV_SEL_MASK |	\ | 
|---|
| 35 | NHMEX_PMON_CTL_EDGE_DET) | 
|---|
| 36 |  | 
|---|
| 37 | /* NHM-EX Cbox */ | 
|---|
| 38 | #define NHMEX_C0_MSR_PMON_GLOBAL_CTL		0xd00 | 
|---|
| 39 | #define NHMEX_C0_MSR_PMON_CTR0			0xd11 | 
|---|
| 40 | #define NHMEX_C0_MSR_PMON_EV_SEL0		0xd10 | 
|---|
| 41 | #define NHMEX_C_MSR_OFFSET			0x20 | 
|---|
| 42 |  | 
|---|
| 43 | /* NHM-EX Bbox */ | 
|---|
| 44 | #define NHMEX_B0_MSR_PMON_GLOBAL_CTL		0xc20 | 
|---|
| 45 | #define NHMEX_B0_MSR_PMON_CTR0			0xc31 | 
|---|
| 46 | #define NHMEX_B0_MSR_PMON_CTL0			0xc30 | 
|---|
| 47 | #define NHMEX_B_MSR_OFFSET			0x40 | 
|---|
| 48 | #define NHMEX_B0_MSR_MATCH			0xe45 | 
|---|
| 49 | #define NHMEX_B0_MSR_MASK			0xe46 | 
|---|
| 50 | #define NHMEX_B1_MSR_MATCH			0xe4d | 
|---|
| 51 | #define NHMEX_B1_MSR_MASK			0xe4e | 
|---|
| 52 |  | 
|---|
| 53 | #define NHMEX_B_PMON_CTL_EN			(1 << 0) | 
|---|
| 54 | #define NHMEX_B_PMON_CTL_EV_SEL_SHIFT		1 | 
|---|
| 55 | #define NHMEX_B_PMON_CTL_EV_SEL_MASK		\ | 
|---|
| 56 | (0x1f << NHMEX_B_PMON_CTL_EV_SEL_SHIFT) | 
|---|
| 57 | #define NHMEX_B_PMON_CTR_SHIFT		6 | 
|---|
| 58 | #define NHMEX_B_PMON_CTR_MASK		\ | 
|---|
| 59 | (0x3 << NHMEX_B_PMON_CTR_SHIFT) | 
|---|
| 60 | #define NHMEX_B_PMON_RAW_EVENT_MASK		\ | 
|---|
| 61 | (NHMEX_B_PMON_CTL_EV_SEL_MASK | \ | 
|---|
| 62 | NHMEX_B_PMON_CTR_MASK) | 
|---|
| 63 |  | 
|---|
| 64 | /* NHM-EX Sbox */ | 
|---|
| 65 | #define NHMEX_S0_MSR_PMON_GLOBAL_CTL		0xc40 | 
|---|
| 66 | #define NHMEX_S0_MSR_PMON_CTR0			0xc51 | 
|---|
| 67 | #define NHMEX_S0_MSR_PMON_CTL0			0xc50 | 
|---|
| 68 | #define NHMEX_S_MSR_OFFSET			0x80 | 
|---|
| 69 | #define NHMEX_S0_MSR_MM_CFG			0xe48 | 
|---|
| 70 | #define NHMEX_S0_MSR_MATCH			0xe49 | 
|---|
| 71 | #define NHMEX_S0_MSR_MASK			0xe4a | 
|---|
| 72 | #define NHMEX_S1_MSR_MM_CFG			0xe58 | 
|---|
| 73 | #define NHMEX_S1_MSR_MATCH			0xe59 | 
|---|
| 74 | #define NHMEX_S1_MSR_MASK			0xe5a | 
|---|
| 75 |  | 
|---|
| 76 | #define NHMEX_S_PMON_MM_CFG_EN			(0x1ULL << 63) | 
|---|
| 77 | #define NHMEX_S_EVENT_TO_R_PROG_EV		0 | 
|---|
| 78 |  | 
|---|
| 79 | /* NHM-EX Mbox */ | 
|---|
| 80 | #define NHMEX_M0_MSR_GLOBAL_CTL			0xca0 | 
|---|
| 81 | #define NHMEX_M0_MSR_PMU_DSP			0xca5 | 
|---|
| 82 | #define NHMEX_M0_MSR_PMU_ISS			0xca6 | 
|---|
| 83 | #define NHMEX_M0_MSR_PMU_MAP			0xca7 | 
|---|
| 84 | #define NHMEX_M0_MSR_PMU_MSC_THR		0xca8 | 
|---|
| 85 | #define NHMEX_M0_MSR_PMU_PGT			0xca9 | 
|---|
| 86 | #define NHMEX_M0_MSR_PMU_PLD			0xcaa | 
|---|
| 87 | #define NHMEX_M0_MSR_PMU_ZDP_CTL_FVC		0xcab | 
|---|
| 88 | #define NHMEX_M0_MSR_PMU_CTL0			0xcb0 | 
|---|
| 89 | #define NHMEX_M0_MSR_PMU_CNT0			0xcb1 | 
|---|
| 90 | #define NHMEX_M_MSR_OFFSET			0x40 | 
|---|
| 91 | #define NHMEX_M0_MSR_PMU_MM_CFG			0xe54 | 
|---|
| 92 | #define NHMEX_M1_MSR_PMU_MM_CFG			0xe5c | 
|---|
| 93 |  | 
|---|
| 94 | #define NHMEX_M_PMON_MM_CFG_EN			(1ULL << 63) | 
|---|
| 95 | #define NHMEX_M_PMON_ADDR_MATCH_MASK		0x3ffffffffULL | 
|---|
| 96 | #define NHMEX_M_PMON_ADDR_MASK_MASK		0x7ffffffULL | 
|---|
| 97 | #define NHMEX_M_PMON_ADDR_MASK_SHIFT		34 | 
|---|
| 98 |  | 
|---|
| 99 | #define NHMEX_M_PMON_CTL_EN			(1 << 0) | 
|---|
| 100 | #define NHMEX_M_PMON_CTL_PMI_EN			(1 << 1) | 
|---|
| 101 | #define NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT	2 | 
|---|
| 102 | #define NHMEX_M_PMON_CTL_COUNT_MODE_MASK	\ | 
|---|
| 103 | (0x3 << NHMEX_M_PMON_CTL_COUNT_MODE_SHIFT) | 
|---|
| 104 | #define NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT	4 | 
|---|
| 105 | #define NHMEX_M_PMON_CTL_STORAGE_MODE_MASK	\ | 
|---|
| 106 | (0x3 << NHMEX_M_PMON_CTL_STORAGE_MODE_SHIFT) | 
|---|
| 107 | #define NHMEX_M_PMON_CTL_WRAP_MODE		(1 << 6) | 
|---|
| 108 | #define NHMEX_M_PMON_CTL_FLAG_MODE		(1 << 7) | 
|---|
| 109 | #define NHMEX_M_PMON_CTL_INC_SEL_SHIFT		9 | 
|---|
| 110 | #define NHMEX_M_PMON_CTL_INC_SEL_MASK		\ | 
|---|
| 111 | (0x1f << NHMEX_M_PMON_CTL_INC_SEL_SHIFT) | 
|---|
| 112 | #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT	19 | 
|---|
| 113 | #define NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK	\ | 
|---|
| 114 | (0x7 << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT) | 
|---|
| 115 | #define NHMEX_M_PMON_RAW_EVENT_MASK			\ | 
|---|
| 116 | (NHMEX_M_PMON_CTL_COUNT_MODE_MASK |	\ | 
|---|
| 117 | NHMEX_M_PMON_CTL_STORAGE_MODE_MASK |	\ | 
|---|
| 118 | NHMEX_M_PMON_CTL_WRAP_MODE |		\ | 
|---|
| 119 | NHMEX_M_PMON_CTL_FLAG_MODE |		\ | 
|---|
| 120 | NHMEX_M_PMON_CTL_INC_SEL_MASK |	\ | 
|---|
| 121 | NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK) | 
|---|
| 122 |  | 
|---|
| 123 | #define NHMEX_M_PMON_ZDP_CTL_FVC_MASK		(((1 << 11) - 1) | (1 << 23)) | 
|---|
| 124 | #define NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n)	(0x7ULL << (11 + 3 * (n))) | 
|---|
| 125 |  | 
|---|
| 126 | #define WSMEX_M_PMON_ZDP_CTL_FVC_MASK		(((1 << 12) - 1) | (1 << 24)) | 
|---|
| 127 | #define WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(n)	(0x7ULL << (12 + 3 * (n))) | 
|---|
| 128 |  | 
|---|
| 129 | /* | 
|---|
| 130 | * use the 9~13 bits to select event If the 7th bit is not set, | 
|---|
| 131 | * otherwise use the 19~21 bits to select event. | 
|---|
| 132 | */ | 
|---|
| 133 | #define MBOX_INC_SEL(x) ((x) << NHMEX_M_PMON_CTL_INC_SEL_SHIFT) | 
|---|
| 134 | #define MBOX_SET_FLAG_SEL(x) (((x) << NHMEX_M_PMON_CTL_SET_FLAG_SEL_SHIFT) | \ | 
|---|
| 135 | NHMEX_M_PMON_CTL_FLAG_MODE) | 
|---|
| 136 | #define MBOX_INC_SEL_MASK (NHMEX_M_PMON_CTL_INC_SEL_MASK | \ | 
|---|
| 137 | NHMEX_M_PMON_CTL_FLAG_MODE) | 
|---|
| 138 | #define MBOX_SET_FLAG_SEL_MASK (NHMEX_M_PMON_CTL_SET_FLAG_SEL_MASK | \ | 
|---|
| 139 | NHMEX_M_PMON_CTL_FLAG_MODE) | 
|---|
| 140 | #define MBOX_INC_SEL_EXTAR_REG(c, r) \ | 
|---|
| 141 | EVENT_EXTRA_REG(MBOX_INC_SEL(c), NHMEX_M0_MSR_PMU_##r, \ | 
|---|
| 142 | MBOX_INC_SEL_MASK, (u64)-1, NHMEX_M_##r) | 
|---|
| 143 | #define (c, r) \ | 
|---|
| 144 | EVENT_EXTRA_REG(MBOX_SET_FLAG_SEL(c), NHMEX_M0_MSR_PMU_##r, \ | 
|---|
| 145 | MBOX_SET_FLAG_SEL_MASK, \ | 
|---|
| 146 | (u64)-1, NHMEX_M_##r) | 
|---|
| 147 |  | 
|---|
| 148 | /* NHM-EX Rbox */ | 
|---|
| 149 | #define NHMEX_R_MSR_GLOBAL_CTL			0xe00 | 
|---|
| 150 | #define NHMEX_R_MSR_PMON_CTL0			0xe10 | 
|---|
| 151 | #define NHMEX_R_MSR_PMON_CNT0			0xe11 | 
|---|
| 152 | #define NHMEX_R_MSR_OFFSET			0x20 | 
|---|
| 153 |  | 
|---|
| 154 | #define NHMEX_R_MSR_PORTN_QLX_CFG(n)		\ | 
|---|
| 155 | ((n) < 4 ? (0xe0c + (n)) : (0xe2c + (n) - 4)) | 
|---|
| 156 | #define NHMEX_R_MSR_PORTN_IPERF_CFG0(n)		(0xe04 + (n)) | 
|---|
| 157 | #define NHMEX_R_MSR_PORTN_IPERF_CFG1(n)		(0xe24 + (n)) | 
|---|
| 158 | #define NHMEX_R_MSR_PORTN_XBR_OFFSET(n)		\ | 
|---|
| 159 | (((n) < 4 ? 0 : 0x10) + (n) * 4) | 
|---|
| 160 | #define NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n)	\ | 
|---|
| 161 | (0xe60 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n)) | 
|---|
| 162 | #define NHMEX_R_MSR_PORTN_XBR_SET1_MATCH(n)	\ | 
|---|
| 163 | (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 1) | 
|---|
| 164 | #define NHMEX_R_MSR_PORTN_XBR_SET1_MASK(n)	\ | 
|---|
| 165 | (NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(n) + 2) | 
|---|
| 166 | #define NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n)	\ | 
|---|
| 167 | (0xe70 + NHMEX_R_MSR_PORTN_XBR_OFFSET(n)) | 
|---|
| 168 | #define NHMEX_R_MSR_PORTN_XBR_SET2_MATCH(n)	\ | 
|---|
| 169 | (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 1) | 
|---|
| 170 | #define NHMEX_R_MSR_PORTN_XBR_SET2_MASK(n)	\ | 
|---|
| 171 | (NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(n) + 2) | 
|---|
| 172 |  | 
|---|
| 173 | #define NHMEX_R_PMON_CTL_EN			(1 << 0) | 
|---|
| 174 | #define NHMEX_R_PMON_CTL_EV_SEL_SHIFT		1 | 
|---|
| 175 | #define NHMEX_R_PMON_CTL_EV_SEL_MASK		\ | 
|---|
| 176 | (0x1f << NHMEX_R_PMON_CTL_EV_SEL_SHIFT) | 
|---|
| 177 | #define NHMEX_R_PMON_CTL_PMI_EN			(1 << 6) | 
|---|
| 178 | #define NHMEX_R_PMON_RAW_EVENT_MASK		NHMEX_R_PMON_CTL_EV_SEL_MASK | 
|---|
| 179 |  | 
|---|
| 180 | /* NHM-EX Wbox */ | 
|---|
| 181 | #define NHMEX_W_MSR_GLOBAL_CTL			0xc80 | 
|---|
| 182 | #define NHMEX_W_MSR_PMON_CNT0			0xc90 | 
|---|
| 183 | #define NHMEX_W_MSR_PMON_EVT_SEL0		0xc91 | 
|---|
| 184 | #define NHMEX_W_MSR_PMON_FIXED_CTR		0x394 | 
|---|
| 185 | #define NHMEX_W_MSR_PMON_FIXED_CTL		0x395 | 
|---|
| 186 |  | 
|---|
| 187 | #define NHMEX_W_PMON_GLOBAL_FIXED_EN		(1ULL << 31) | 
|---|
| 188 |  | 
|---|
| 189 | #define __BITS_VALUE(x, i, n)  ((typeof(x))(((x) >> ((i) * (n))) & \ | 
|---|
| 190 | ((1ULL << (n)) - 1))) | 
|---|
| 191 |  | 
|---|
| 192 | DEFINE_UNCORE_FORMAT_ATTR(event, event, "config:0-7"); | 
|---|
| 193 | DEFINE_UNCORE_FORMAT_ATTR(event5, event, "config:1-5"); | 
|---|
| 194 | DEFINE_UNCORE_FORMAT_ATTR(umask, umask, "config:8-15"); | 
|---|
| 195 | DEFINE_UNCORE_FORMAT_ATTR(edge, edge, "config:18"); | 
|---|
| 196 | DEFINE_UNCORE_FORMAT_ATTR(inv, inv, "config:23"); | 
|---|
| 197 | DEFINE_UNCORE_FORMAT_ATTR(thresh8, thresh, "config:24-31"); | 
|---|
| 198 | DEFINE_UNCORE_FORMAT_ATTR(counter, counter, "config:6-7"); | 
|---|
| 199 | DEFINE_UNCORE_FORMAT_ATTR(match, match, "config1:0-63"); | 
|---|
| 200 | DEFINE_UNCORE_FORMAT_ATTR(mask, mask, "config2:0-63"); | 
|---|
| 201 |  | 
|---|
| 202 | static void nhmex_uncore_msr_init_box(struct intel_uncore_box *box) | 
|---|
| 203 | { | 
|---|
| 204 | wrmsrq(NHMEX_U_MSR_PMON_GLOBAL_CTL, NHMEX_U_PMON_GLOBAL_EN_ALL); | 
|---|
| 205 | } | 
|---|
| 206 |  | 
|---|
| 207 | static void nhmex_uncore_msr_exit_box(struct intel_uncore_box *box) | 
|---|
| 208 | { | 
|---|
| 209 | wrmsrq(NHMEX_U_MSR_PMON_GLOBAL_CTL, val: 0); | 
|---|
| 210 | } | 
|---|
| 211 |  | 
|---|
| 212 | static void nhmex_uncore_msr_disable_box(struct intel_uncore_box *box) | 
|---|
| 213 | { | 
|---|
| 214 | unsigned msr = uncore_msr_box_ctl(box); | 
|---|
| 215 | u64 config; | 
|---|
| 216 |  | 
|---|
| 217 | if (msr) { | 
|---|
| 218 | rdmsrq(msr, config); | 
|---|
| 219 | config &= ~((1ULL << uncore_num_counters(box)) - 1); | 
|---|
| 220 | /* WBox has a fixed counter */ | 
|---|
| 221 | if (uncore_msr_fixed_ctl(box)) | 
|---|
| 222 | config &= ~NHMEX_W_PMON_GLOBAL_FIXED_EN; | 
|---|
| 223 | wrmsrq(msr, val: config); | 
|---|
| 224 | } | 
|---|
| 225 | } | 
|---|
| 226 |  | 
|---|
| 227 | static void nhmex_uncore_msr_enable_box(struct intel_uncore_box *box) | 
|---|
| 228 | { | 
|---|
| 229 | unsigned msr = uncore_msr_box_ctl(box); | 
|---|
| 230 | u64 config; | 
|---|
| 231 |  | 
|---|
| 232 | if (msr) { | 
|---|
| 233 | rdmsrq(msr, config); | 
|---|
| 234 | config |= (1ULL << uncore_num_counters(box)) - 1; | 
|---|
| 235 | /* WBox has a fixed counter */ | 
|---|
| 236 | if (uncore_msr_fixed_ctl(box)) | 
|---|
| 237 | config |= NHMEX_W_PMON_GLOBAL_FIXED_EN; | 
|---|
| 238 | wrmsrq(msr, val: config); | 
|---|
| 239 | } | 
|---|
| 240 | } | 
|---|
| 241 |  | 
|---|
| 242 | static void nhmex_uncore_msr_disable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 243 | { | 
|---|
| 244 | wrmsrq(msr: event->hw.config_base, val: 0); | 
|---|
| 245 | } | 
|---|
| 246 |  | 
|---|
| 247 | static void nhmex_uncore_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 248 | { | 
|---|
| 249 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 250 |  | 
|---|
| 251 | if (hwc->idx == UNCORE_PMC_IDX_FIXED) | 
|---|
| 252 | wrmsrq(msr: hwc->config_base, NHMEX_PMON_CTL_EN_BIT0); | 
|---|
| 253 | else if (box->pmu->type->event_mask & NHMEX_PMON_CTL_EN_BIT0) | 
|---|
| 254 | wrmsrq(msr: hwc->config_base, val: hwc->config | NHMEX_PMON_CTL_EN_BIT22); | 
|---|
| 255 | else | 
|---|
| 256 | wrmsrq(msr: hwc->config_base, val: hwc->config | NHMEX_PMON_CTL_EN_BIT0); | 
|---|
| 257 | } | 
|---|
| 258 |  | 
|---|
| 259 | #define NHMEX_UNCORE_OPS_COMMON_INIT()				\ | 
|---|
| 260 | .init_box	= nhmex_uncore_msr_init_box,		\ | 
|---|
| 261 | .exit_box	= nhmex_uncore_msr_exit_box,		\ | 
|---|
| 262 | .disable_box	= nhmex_uncore_msr_disable_box,		\ | 
|---|
| 263 | .enable_box	= nhmex_uncore_msr_enable_box,		\ | 
|---|
| 264 | .disable_event	= nhmex_uncore_msr_disable_event,	\ | 
|---|
| 265 | .read_counter	= uncore_msr_read_counter | 
|---|
| 266 |  | 
|---|
| 267 | static struct intel_uncore_ops nhmex_uncore_ops = { | 
|---|
| 268 | NHMEX_UNCORE_OPS_COMMON_INIT(), | 
|---|
| 269 | .enable_event	= nhmex_uncore_msr_enable_event, | 
|---|
| 270 | }; | 
|---|
| 271 |  | 
|---|
| 272 | static struct attribute *nhmex_uncore_ubox_formats_attr[] = { | 
|---|
| 273 | &format_attr_event.attr, | 
|---|
| 274 | &format_attr_edge.attr, | 
|---|
| 275 | NULL, | 
|---|
| 276 | }; | 
|---|
| 277 |  | 
|---|
| 278 | static const struct attribute_group nhmex_uncore_ubox_format_group = { | 
|---|
| 279 | .name		= "format", | 
|---|
| 280 | .attrs		= nhmex_uncore_ubox_formats_attr, | 
|---|
| 281 | }; | 
|---|
| 282 |  | 
|---|
| 283 | static struct intel_uncore_type nhmex_uncore_ubox = { | 
|---|
| 284 | .name		= "ubox", | 
|---|
| 285 | .num_counters	= 1, | 
|---|
| 286 | .num_boxes	= 1, | 
|---|
| 287 | .perf_ctr_bits	= 48, | 
|---|
| 288 | .event_ctl	= NHMEX_U_MSR_PMON_EV_SEL, | 
|---|
| 289 | .perf_ctr	= NHMEX_U_MSR_PMON_CTR, | 
|---|
| 290 | .event_mask	= NHMEX_U_PMON_RAW_EVENT_MASK, | 
|---|
| 291 | .box_ctl	= NHMEX_U_MSR_PMON_GLOBAL_CTL, | 
|---|
| 292 | .ops		= &nhmex_uncore_ops, | 
|---|
| 293 | .format_group	= &nhmex_uncore_ubox_format_group | 
|---|
| 294 | }; | 
|---|
| 295 |  | 
|---|
| 296 | static struct attribute *nhmex_uncore_cbox_formats_attr[] = { | 
|---|
| 297 | &format_attr_event.attr, | 
|---|
| 298 | &format_attr_umask.attr, | 
|---|
| 299 | &format_attr_edge.attr, | 
|---|
| 300 | &format_attr_inv.attr, | 
|---|
| 301 | &format_attr_thresh8.attr, | 
|---|
| 302 | NULL, | 
|---|
| 303 | }; | 
|---|
| 304 |  | 
|---|
| 305 | static const struct attribute_group nhmex_uncore_cbox_format_group = { | 
|---|
| 306 | .name = "format", | 
|---|
| 307 | .attrs = nhmex_uncore_cbox_formats_attr, | 
|---|
| 308 | }; | 
|---|
| 309 |  | 
|---|
| 310 | /* msr offset for each instance of cbox */ | 
|---|
| 311 | static u64 nhmex_cbox_msr_offsets[] = { | 
|---|
| 312 | 0x0, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x240, 0x2c0, | 
|---|
| 313 | }; | 
|---|
| 314 |  | 
|---|
| 315 | static struct intel_uncore_type nhmex_uncore_cbox = { | 
|---|
| 316 | .name			= "cbox", | 
|---|
| 317 | .num_counters		= 6, | 
|---|
| 318 | .num_boxes		= 10, | 
|---|
| 319 | .perf_ctr_bits		= 48, | 
|---|
| 320 | .event_ctl		= NHMEX_C0_MSR_PMON_EV_SEL0, | 
|---|
| 321 | .perf_ctr		= NHMEX_C0_MSR_PMON_CTR0, | 
|---|
| 322 | .event_mask		= NHMEX_PMON_RAW_EVENT_MASK, | 
|---|
| 323 | .box_ctl		= NHMEX_C0_MSR_PMON_GLOBAL_CTL, | 
|---|
| 324 | .msr_offsets		= nhmex_cbox_msr_offsets, | 
|---|
| 325 | .pair_ctr_ctl		= 1, | 
|---|
| 326 | .ops			= &nhmex_uncore_ops, | 
|---|
| 327 | .format_group		= &nhmex_uncore_cbox_format_group | 
|---|
| 328 | }; | 
|---|
| 329 |  | 
|---|
| 330 | static struct uncore_event_desc nhmex_uncore_wbox_events[] = { | 
|---|
| 331 | INTEL_UNCORE_EVENT_DESC(clockticks, "event=0xff,umask=0"), | 
|---|
| 332 | { /* end: all zeroes */ }, | 
|---|
| 333 | }; | 
|---|
| 334 |  | 
|---|
| 335 | static struct intel_uncore_type nhmex_uncore_wbox = { | 
|---|
| 336 | .name			= "wbox", | 
|---|
| 337 | .num_counters		= 4, | 
|---|
| 338 | .num_boxes		= 1, | 
|---|
| 339 | .perf_ctr_bits		= 48, | 
|---|
| 340 | .event_ctl		= NHMEX_W_MSR_PMON_CNT0, | 
|---|
| 341 | .perf_ctr		= NHMEX_W_MSR_PMON_EVT_SEL0, | 
|---|
| 342 | .fixed_ctr		= NHMEX_W_MSR_PMON_FIXED_CTR, | 
|---|
| 343 | .fixed_ctl		= NHMEX_W_MSR_PMON_FIXED_CTL, | 
|---|
| 344 | .event_mask		= NHMEX_PMON_RAW_EVENT_MASK, | 
|---|
| 345 | .box_ctl		= NHMEX_W_MSR_GLOBAL_CTL, | 
|---|
| 346 | .pair_ctr_ctl		= 1, | 
|---|
| 347 | .event_descs		= nhmex_uncore_wbox_events, | 
|---|
| 348 | .ops			= &nhmex_uncore_ops, | 
|---|
| 349 | .format_group		= &nhmex_uncore_cbox_format_group | 
|---|
| 350 | }; | 
|---|
| 351 |  | 
|---|
| 352 | static int nhmex_bbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 353 | { | 
|---|
| 354 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 355 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 356 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 357 | int ctr, ev_sel; | 
|---|
| 358 |  | 
|---|
| 359 | ctr = (hwc->config & NHMEX_B_PMON_CTR_MASK) >> | 
|---|
| 360 | NHMEX_B_PMON_CTR_SHIFT; | 
|---|
| 361 | ev_sel = (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK) >> | 
|---|
| 362 | NHMEX_B_PMON_CTL_EV_SEL_SHIFT; | 
|---|
| 363 |  | 
|---|
| 364 | /* events that do not use the match/mask registers */ | 
|---|
| 365 | if ((ctr == 0 && ev_sel > 0x3) || (ctr == 1 && ev_sel > 0x6) || | 
|---|
| 366 | (ctr == 2 && ev_sel != 0x4) || ctr == 3) | 
|---|
| 367 | return 0; | 
|---|
| 368 |  | 
|---|
| 369 | if (box->pmu->pmu_idx == 0) | 
|---|
| 370 | reg1->reg = NHMEX_B0_MSR_MATCH; | 
|---|
| 371 | else | 
|---|
| 372 | reg1->reg = NHMEX_B1_MSR_MATCH; | 
|---|
| 373 | reg1->idx = 0; | 
|---|
| 374 | reg1->config = event->attr.config1; | 
|---|
| 375 | reg2->config = event->attr.config2; | 
|---|
| 376 | return 0; | 
|---|
| 377 | } | 
|---|
| 378 |  | 
|---|
| 379 | static void nhmex_bbox_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 380 | { | 
|---|
| 381 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 382 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 383 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 384 |  | 
|---|
| 385 | if (reg1->idx != EXTRA_REG_NONE) { | 
|---|
| 386 | wrmsrq(msr: reg1->reg, val: reg1->config); | 
|---|
| 387 | wrmsrq(msr: reg1->reg + 1, val: reg2->config); | 
|---|
| 388 | } | 
|---|
| 389 | wrmsrq(msr: hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | | 
|---|
| 390 | (hwc->config & NHMEX_B_PMON_CTL_EV_SEL_MASK)); | 
|---|
| 391 | } | 
|---|
| 392 |  | 
|---|
| 393 | /* | 
|---|
| 394 | * The Bbox has 4 counters, but each counter monitors different events. | 
|---|
| 395 | * Use bits 6-7 in the event config to select counter. | 
|---|
| 396 | */ | 
|---|
| 397 | static struct event_constraint nhmex_uncore_bbox_constraints[] = { | 
|---|
| 398 | EVENT_CONSTRAINT(0 , 1, 0xc0), | 
|---|
| 399 | EVENT_CONSTRAINT(0x40, 2, 0xc0), | 
|---|
| 400 | EVENT_CONSTRAINT(0x80, 4, 0xc0), | 
|---|
| 401 | EVENT_CONSTRAINT(0xc0, 8, 0xc0), | 
|---|
| 402 | EVENT_CONSTRAINT_END, | 
|---|
| 403 | }; | 
|---|
| 404 |  | 
|---|
| 405 | static struct attribute *nhmex_uncore_bbox_formats_attr[] = { | 
|---|
| 406 | &format_attr_event5.attr, | 
|---|
| 407 | &format_attr_counter.attr, | 
|---|
| 408 | &format_attr_match.attr, | 
|---|
| 409 | &format_attr_mask.attr, | 
|---|
| 410 | NULL, | 
|---|
| 411 | }; | 
|---|
| 412 |  | 
|---|
| 413 | static const struct attribute_group nhmex_uncore_bbox_format_group = { | 
|---|
| 414 | .name = "format", | 
|---|
| 415 | .attrs = nhmex_uncore_bbox_formats_attr, | 
|---|
| 416 | }; | 
|---|
| 417 |  | 
|---|
| 418 | static struct intel_uncore_ops nhmex_uncore_bbox_ops = { | 
|---|
| 419 | NHMEX_UNCORE_OPS_COMMON_INIT(), | 
|---|
| 420 | .enable_event		= nhmex_bbox_msr_enable_event, | 
|---|
| 421 | .hw_config		= nhmex_bbox_hw_config, | 
|---|
| 422 | .get_constraint		= uncore_get_constraint, | 
|---|
| 423 | .put_constraint		= uncore_put_constraint, | 
|---|
| 424 | }; | 
|---|
| 425 |  | 
|---|
| 426 | static struct intel_uncore_type nhmex_uncore_bbox = { | 
|---|
| 427 | .name			= "bbox", | 
|---|
| 428 | .num_counters		= 4, | 
|---|
| 429 | .num_boxes		= 2, | 
|---|
| 430 | .perf_ctr_bits		= 48, | 
|---|
| 431 | .event_ctl		= NHMEX_B0_MSR_PMON_CTL0, | 
|---|
| 432 | .perf_ctr		= NHMEX_B0_MSR_PMON_CTR0, | 
|---|
| 433 | .event_mask		= NHMEX_B_PMON_RAW_EVENT_MASK, | 
|---|
| 434 | .box_ctl		= NHMEX_B0_MSR_PMON_GLOBAL_CTL, | 
|---|
| 435 | .msr_offset		= NHMEX_B_MSR_OFFSET, | 
|---|
| 436 | .pair_ctr_ctl		= 1, | 
|---|
| 437 | .num_shared_regs	= 1, | 
|---|
| 438 | .constraints		= nhmex_uncore_bbox_constraints, | 
|---|
| 439 | .ops			= &nhmex_uncore_bbox_ops, | 
|---|
| 440 | .format_group		= &nhmex_uncore_bbox_format_group | 
|---|
| 441 | }; | 
|---|
| 442 |  | 
|---|
| 443 | static int nhmex_sbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 444 | { | 
|---|
| 445 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 446 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 447 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 448 |  | 
|---|
| 449 | /* only TO_R_PROG_EV event uses the match/mask register */ | 
|---|
| 450 | if ((hwc->config & NHMEX_PMON_CTL_EV_SEL_MASK) != | 
|---|
| 451 | NHMEX_S_EVENT_TO_R_PROG_EV) | 
|---|
| 452 | return 0; | 
|---|
| 453 |  | 
|---|
| 454 | if (box->pmu->pmu_idx == 0) | 
|---|
| 455 | reg1->reg = NHMEX_S0_MSR_MM_CFG; | 
|---|
| 456 | else | 
|---|
| 457 | reg1->reg = NHMEX_S1_MSR_MM_CFG; | 
|---|
| 458 | reg1->idx = 0; | 
|---|
| 459 | reg1->config = event->attr.config1; | 
|---|
| 460 | reg2->config = event->attr.config2; | 
|---|
| 461 | return 0; | 
|---|
| 462 | } | 
|---|
| 463 |  | 
|---|
| 464 | static void nhmex_sbox_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 465 | { | 
|---|
| 466 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 467 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 468 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 469 |  | 
|---|
| 470 | if (reg1->idx != EXTRA_REG_NONE) { | 
|---|
| 471 | wrmsrq(msr: reg1->reg, val: 0); | 
|---|
| 472 | wrmsrq(msr: reg1->reg + 1, val: reg1->config); | 
|---|
| 473 | wrmsrq(msr: reg1->reg + 2, val: reg2->config); | 
|---|
| 474 | wrmsrq(msr: reg1->reg, NHMEX_S_PMON_MM_CFG_EN); | 
|---|
| 475 | } | 
|---|
| 476 | wrmsrq(msr: hwc->config_base, val: hwc->config | NHMEX_PMON_CTL_EN_BIT22); | 
|---|
| 477 | } | 
|---|
| 478 |  | 
|---|
| 479 | static struct attribute *nhmex_uncore_sbox_formats_attr[] = { | 
|---|
| 480 | &format_attr_event.attr, | 
|---|
| 481 | &format_attr_umask.attr, | 
|---|
| 482 | &format_attr_edge.attr, | 
|---|
| 483 | &format_attr_inv.attr, | 
|---|
| 484 | &format_attr_thresh8.attr, | 
|---|
| 485 | &format_attr_match.attr, | 
|---|
| 486 | &format_attr_mask.attr, | 
|---|
| 487 | NULL, | 
|---|
| 488 | }; | 
|---|
| 489 |  | 
|---|
| 490 | static const struct attribute_group nhmex_uncore_sbox_format_group = { | 
|---|
| 491 | .name			= "format", | 
|---|
| 492 | .attrs			= nhmex_uncore_sbox_formats_attr, | 
|---|
| 493 | }; | 
|---|
| 494 |  | 
|---|
| 495 | static struct intel_uncore_ops nhmex_uncore_sbox_ops = { | 
|---|
| 496 | NHMEX_UNCORE_OPS_COMMON_INIT(), | 
|---|
| 497 | .enable_event		= nhmex_sbox_msr_enable_event, | 
|---|
| 498 | .hw_config		= nhmex_sbox_hw_config, | 
|---|
| 499 | .get_constraint		= uncore_get_constraint, | 
|---|
| 500 | .put_constraint		= uncore_put_constraint, | 
|---|
| 501 | }; | 
|---|
| 502 |  | 
|---|
| 503 | static struct intel_uncore_type nhmex_uncore_sbox = { | 
|---|
| 504 | .name			= "sbox", | 
|---|
| 505 | .num_counters		= 4, | 
|---|
| 506 | .num_boxes		= 2, | 
|---|
| 507 | .perf_ctr_bits		= 48, | 
|---|
| 508 | .event_ctl		= NHMEX_S0_MSR_PMON_CTL0, | 
|---|
| 509 | .perf_ctr		= NHMEX_S0_MSR_PMON_CTR0, | 
|---|
| 510 | .event_mask		= NHMEX_PMON_RAW_EVENT_MASK, | 
|---|
| 511 | .box_ctl		= NHMEX_S0_MSR_PMON_GLOBAL_CTL, | 
|---|
| 512 | .msr_offset		= NHMEX_S_MSR_OFFSET, | 
|---|
| 513 | .pair_ctr_ctl		= 1, | 
|---|
| 514 | .num_shared_regs	= 1, | 
|---|
| 515 | .ops			= &nhmex_uncore_sbox_ops, | 
|---|
| 516 | .format_group		= &nhmex_uncore_sbox_format_group | 
|---|
| 517 | }; | 
|---|
| 518 |  | 
|---|
| 519 | enum { | 
|---|
| 520 | , | 
|---|
| 521 | , | 
|---|
| 522 | , | 
|---|
| 523 | , | 
|---|
| 524 | , | 
|---|
| 525 | , | 
|---|
| 526 | , | 
|---|
| 527 | , | 
|---|
| 528 | }; | 
|---|
| 529 |  | 
|---|
| 530 | static struct extra_reg [] = { | 
|---|
| 531 | MBOX_INC_SEL_EXTAR_REG(0x0, DSP), | 
|---|
| 532 | MBOX_INC_SEL_EXTAR_REG(0x4, MSC_THR), | 
|---|
| 533 | MBOX_INC_SEL_EXTAR_REG(0x5, MSC_THR), | 
|---|
| 534 | MBOX_INC_SEL_EXTAR_REG(0x9, ISS), | 
|---|
| 535 | /* event 0xa uses two extra registers */ | 
|---|
| 536 | MBOX_INC_SEL_EXTAR_REG(0xa, ISS), | 
|---|
| 537 | MBOX_INC_SEL_EXTAR_REG(0xa, PLD), | 
|---|
| 538 | MBOX_INC_SEL_EXTAR_REG(0xb, PLD), | 
|---|
| 539 | /* events 0xd ~ 0x10 use the same extra register */ | 
|---|
| 540 | MBOX_INC_SEL_EXTAR_REG(0xd, ZDP_CTL_FVC), | 
|---|
| 541 | MBOX_INC_SEL_EXTAR_REG(0xe, ZDP_CTL_FVC), | 
|---|
| 542 | MBOX_INC_SEL_EXTAR_REG(0xf, ZDP_CTL_FVC), | 
|---|
| 543 | MBOX_INC_SEL_EXTAR_REG(0x10, ZDP_CTL_FVC), | 
|---|
| 544 | MBOX_INC_SEL_EXTAR_REG(0x16, PGT), | 
|---|
| 545 | MBOX_SET_FLAG_SEL_EXTRA_REG(0x0, DSP), | 
|---|
| 546 | MBOX_SET_FLAG_SEL_EXTRA_REG(0x1, ISS), | 
|---|
| 547 | MBOX_SET_FLAG_SEL_EXTRA_REG(0x5, PGT), | 
|---|
| 548 | MBOX_SET_FLAG_SEL_EXTRA_REG(0x6, MAP), | 
|---|
| 549 | EVENT_EXTRA_END | 
|---|
| 550 | }; | 
|---|
| 551 |  | 
|---|
| 552 | /* Nehalem-EX or Westmere-EX ? */ | 
|---|
| 553 | static bool uncore_nhmex; | 
|---|
| 554 |  | 
|---|
| 555 | static bool nhmex_mbox_get_shared_reg(struct intel_uncore_box *box, int idx, u64 config) | 
|---|
| 556 | { | 
|---|
| 557 | struct intel_uncore_extra_reg *er; | 
|---|
| 558 | unsigned long flags; | 
|---|
| 559 | bool ret = false; | 
|---|
| 560 | u64 mask; | 
|---|
| 561 |  | 
|---|
| 562 | if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { | 
|---|
| 563 | er = &box->shared_regs[idx]; | 
|---|
| 564 | raw_spin_lock_irqsave(&er->lock, flags); | 
|---|
| 565 | if (!atomic_read(v: &er->ref) || er->config == config) { | 
|---|
| 566 | atomic_inc(v: &er->ref); | 
|---|
| 567 | er->config = config; | 
|---|
| 568 | ret = true; | 
|---|
| 569 | } | 
|---|
| 570 | raw_spin_unlock_irqrestore(&er->lock, flags); | 
|---|
| 571 |  | 
|---|
| 572 | return ret; | 
|---|
| 573 | } | 
|---|
| 574 | /* | 
|---|
| 575 | * The ZDP_CTL_FVC MSR has 4 fields which are used to control | 
|---|
| 576 | * events 0xd ~ 0x10. Besides these 4 fields, there are additional | 
|---|
| 577 | * fields which are shared. | 
|---|
| 578 | */ | 
|---|
| 579 | idx -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; | 
|---|
| 580 | if (WARN_ON_ONCE(idx >= 4)) | 
|---|
| 581 | return false; | 
|---|
| 582 |  | 
|---|
| 583 | /* mask of the shared fields */ | 
|---|
| 584 | if (uncore_nhmex) | 
|---|
| 585 | mask = NHMEX_M_PMON_ZDP_CTL_FVC_MASK; | 
|---|
| 586 | else | 
|---|
| 587 | mask = WSMEX_M_PMON_ZDP_CTL_FVC_MASK; | 
|---|
| 588 | er = &box->shared_regs[EXTRA_REG_NHMEX_M_ZDP_CTL_FVC]; | 
|---|
| 589 |  | 
|---|
| 590 | raw_spin_lock_irqsave(&er->lock, flags); | 
|---|
| 591 | /* add mask of the non-shared field if it's in use */ | 
|---|
| 592 | if (__BITS_VALUE(atomic_read(&er->ref), idx, 8)) { | 
|---|
| 593 | if (uncore_nhmex) | 
|---|
| 594 | mask |= NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 595 | else | 
|---|
| 596 | mask |= WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 597 | } | 
|---|
| 598 |  | 
|---|
| 599 | if (!atomic_read(v: &er->ref) || !((er->config ^ config) & mask)) { | 
|---|
| 600 | atomic_add(i: 1 << (idx * 8), v: &er->ref); | 
|---|
| 601 | if (uncore_nhmex) | 
|---|
| 602 | mask = NHMEX_M_PMON_ZDP_CTL_FVC_MASK | | 
|---|
| 603 | NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 604 | else | 
|---|
| 605 | mask = WSMEX_M_PMON_ZDP_CTL_FVC_MASK | | 
|---|
| 606 | WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 607 | er->config &= ~mask; | 
|---|
| 608 | er->config |= (config & mask); | 
|---|
| 609 | ret = true; | 
|---|
| 610 | } | 
|---|
| 611 | raw_spin_unlock_irqrestore(&er->lock, flags); | 
|---|
| 612 |  | 
|---|
| 613 | return ret; | 
|---|
| 614 | } | 
|---|
| 615 |  | 
|---|
| 616 | static void nhmex_mbox_put_shared_reg(struct intel_uncore_box *box, int idx) | 
|---|
| 617 | { | 
|---|
| 618 | struct intel_uncore_extra_reg *er; | 
|---|
| 619 |  | 
|---|
| 620 | if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { | 
|---|
| 621 | er = &box->shared_regs[idx]; | 
|---|
| 622 | atomic_dec(v: &er->ref); | 
|---|
| 623 | return; | 
|---|
| 624 | } | 
|---|
| 625 |  | 
|---|
| 626 | idx -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; | 
|---|
| 627 | er = &box->shared_regs[EXTRA_REG_NHMEX_M_ZDP_CTL_FVC]; | 
|---|
| 628 | atomic_sub(i: 1 << (idx * 8), v: &er->ref); | 
|---|
| 629 | } | 
|---|
| 630 |  | 
|---|
| 631 | static u64 nhmex_mbox_alter_er(struct perf_event *event, int new_idx, bool modify) | 
|---|
| 632 | { | 
|---|
| 633 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 634 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 635 | u64 idx, orig_idx = __BITS_VALUE(reg1->idx, 0, 8); | 
|---|
| 636 | u64 config = reg1->config; | 
|---|
| 637 |  | 
|---|
| 638 | /* get the non-shared control bits and shift them */ | 
|---|
| 639 | idx = orig_idx - EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; | 
|---|
| 640 | if (uncore_nhmex) | 
|---|
| 641 | config &= NHMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 642 | else | 
|---|
| 643 | config &= WSMEX_M_PMON_ZDP_CTL_FVC_EVENT_MASK(idx); | 
|---|
| 644 | if (new_idx > orig_idx) { | 
|---|
| 645 | idx = new_idx - orig_idx; | 
|---|
| 646 | config <<= 3 * idx; | 
|---|
| 647 | } else { | 
|---|
| 648 | idx = orig_idx - new_idx; | 
|---|
| 649 | config >>= 3 * idx; | 
|---|
| 650 | } | 
|---|
| 651 |  | 
|---|
| 652 | /* add the shared control bits back */ | 
|---|
| 653 | if (uncore_nhmex) | 
|---|
| 654 | config |= NHMEX_M_PMON_ZDP_CTL_FVC_MASK & reg1->config; | 
|---|
| 655 | else | 
|---|
| 656 | config |= WSMEX_M_PMON_ZDP_CTL_FVC_MASK & reg1->config; | 
|---|
| 657 | config |= NHMEX_M_PMON_ZDP_CTL_FVC_MASK & reg1->config; | 
|---|
| 658 | if (modify) { | 
|---|
| 659 | /* adjust the main event selector */ | 
|---|
| 660 | if (new_idx > orig_idx) | 
|---|
| 661 | hwc->config += idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; | 
|---|
| 662 | else | 
|---|
| 663 | hwc->config -= idx << NHMEX_M_PMON_CTL_INC_SEL_SHIFT; | 
|---|
| 664 | reg1->config = config; | 
|---|
| 665 | reg1->idx = ~0xff | new_idx; | 
|---|
| 666 | } | 
|---|
| 667 | return config; | 
|---|
| 668 | } | 
|---|
| 669 |  | 
|---|
| 670 | static struct event_constraint * | 
|---|
| 671 | nhmex_mbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 672 | { | 
|---|
| 673 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 
|---|
| 674 | struct hw_perf_event_extra *reg2 = &event->hw.branch_reg; | 
|---|
| 675 | int i, idx[2], alloc = 0; | 
|---|
| 676 | u64 config1 = reg1->config; | 
|---|
| 677 |  | 
|---|
| 678 | idx[0] = __BITS_VALUE(reg1->idx, 0, 8); | 
|---|
| 679 | idx[1] = __BITS_VALUE(reg1->idx, 1, 8); | 
|---|
| 680 | again: | 
|---|
| 681 | for (i = 0; i < 2; i++) { | 
|---|
| 682 | if (!uncore_box_is_fake(box) && (reg1->alloc & (0x1 << i))) | 
|---|
| 683 | idx[i] = 0xff; | 
|---|
| 684 |  | 
|---|
| 685 | if (idx[i] == 0xff) | 
|---|
| 686 | continue; | 
|---|
| 687 |  | 
|---|
| 688 | if (!nhmex_mbox_get_shared_reg(box, idx: idx[i], | 
|---|
| 689 | __BITS_VALUE(config1, i, 32))) | 
|---|
| 690 | goto fail; | 
|---|
| 691 | alloc |= (0x1 << i); | 
|---|
| 692 | } | 
|---|
| 693 |  | 
|---|
| 694 | /* for the match/mask registers */ | 
|---|
| 695 | if (reg2->idx != EXTRA_REG_NONE && | 
|---|
| 696 | (uncore_box_is_fake(box) || !reg2->alloc) && | 
|---|
| 697 | !nhmex_mbox_get_shared_reg(box, idx: reg2->idx, config: reg2->config)) | 
|---|
| 698 | goto fail; | 
|---|
| 699 |  | 
|---|
| 700 | /* | 
|---|
| 701 | * If it's a fake box -- as per validate_{group,event}() we | 
|---|
| 702 | * shouldn't touch event state and we can avoid doing so | 
|---|
| 703 | * since both will only call get_event_constraints() once | 
|---|
| 704 | * on each event, this avoids the need for reg->alloc. | 
|---|
| 705 | */ | 
|---|
| 706 | if (!uncore_box_is_fake(box)) { | 
|---|
| 707 | if (idx[0] != 0xff && idx[0] != __BITS_VALUE(reg1->idx, 0, 8)) | 
|---|
| 708 | nhmex_mbox_alter_er(event, new_idx: idx[0], modify: true); | 
|---|
| 709 | reg1->alloc |= alloc; | 
|---|
| 710 | if (reg2->idx != EXTRA_REG_NONE) | 
|---|
| 711 | reg2->alloc = 1; | 
|---|
| 712 | } | 
|---|
| 713 | return NULL; | 
|---|
| 714 | fail: | 
|---|
| 715 | if (idx[0] != 0xff && !(alloc & 0x1) && | 
|---|
| 716 | idx[0] >= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) { | 
|---|
| 717 | /* | 
|---|
| 718 | * events 0xd ~ 0x10 are functional identical, but are | 
|---|
| 719 | * controlled by different fields in the ZDP_CTL_FVC | 
|---|
| 720 | * register. If we failed to take one field, try the | 
|---|
| 721 | * rest 3 choices. | 
|---|
| 722 | */ | 
|---|
| 723 | BUG_ON(__BITS_VALUE(reg1->idx, 1, 8) != 0xff); | 
|---|
| 724 | idx[0] -= EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; | 
|---|
| 725 | idx[0] = (idx[0] + 1) % 4; | 
|---|
| 726 | idx[0] += EXTRA_REG_NHMEX_M_ZDP_CTL_FVC; | 
|---|
| 727 | if (idx[0] != __BITS_VALUE(reg1->idx, 0, 8)) { | 
|---|
| 728 | config1 = nhmex_mbox_alter_er(event, new_idx: idx[0], modify: false); | 
|---|
| 729 | goto again; | 
|---|
| 730 | } | 
|---|
| 731 | } | 
|---|
| 732 |  | 
|---|
| 733 | if (alloc & 0x1) | 
|---|
| 734 | nhmex_mbox_put_shared_reg(box, idx: idx[0]); | 
|---|
| 735 | if (alloc & 0x2) | 
|---|
| 736 | nhmex_mbox_put_shared_reg(box, idx: idx[1]); | 
|---|
| 737 | return &uncore_constraint_empty; | 
|---|
| 738 | } | 
|---|
| 739 |  | 
|---|
| 740 | static void nhmex_mbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 741 | { | 
|---|
| 742 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 
|---|
| 743 | struct hw_perf_event_extra *reg2 = &event->hw.branch_reg; | 
|---|
| 744 |  | 
|---|
| 745 | if (uncore_box_is_fake(box)) | 
|---|
| 746 | return; | 
|---|
| 747 |  | 
|---|
| 748 | if (reg1->alloc & 0x1) | 
|---|
| 749 | nhmex_mbox_put_shared_reg(box, __BITS_VALUE(reg1->idx, 0, 8)); | 
|---|
| 750 | if (reg1->alloc & 0x2) | 
|---|
| 751 | nhmex_mbox_put_shared_reg(box, __BITS_VALUE(reg1->idx, 1, 8)); | 
|---|
| 752 | reg1->alloc = 0; | 
|---|
| 753 |  | 
|---|
| 754 | if (reg2->alloc) { | 
|---|
| 755 | nhmex_mbox_put_shared_reg(box, idx: reg2->idx); | 
|---|
| 756 | reg2->alloc = 0; | 
|---|
| 757 | } | 
|---|
| 758 | } | 
|---|
| 759 |  | 
|---|
| 760 | static int (struct extra_reg *er) | 
|---|
| 761 | { | 
|---|
| 762 | if (er->idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) | 
|---|
| 763 | return er->idx; | 
|---|
| 764 | return er->idx + (er->event >> NHMEX_M_PMON_CTL_INC_SEL_SHIFT) - 0xd; | 
|---|
| 765 | } | 
|---|
| 766 |  | 
|---|
| 767 | static int nhmex_mbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 768 | { | 
|---|
| 769 | struct intel_uncore_type *type = box->pmu->type; | 
|---|
| 770 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 
|---|
| 771 | struct hw_perf_event_extra *reg2 = &event->hw.branch_reg; | 
|---|
| 772 | struct extra_reg *er; | 
|---|
| 773 | unsigned msr; | 
|---|
| 774 | int reg_idx = 0; | 
|---|
| 775 | /* | 
|---|
| 776 | * The mbox events may require 2 extra MSRs at the most. But only | 
|---|
| 777 | * the lower 32 bits in these MSRs are significant, so we can use | 
|---|
| 778 | * config1 to pass two MSRs' config. | 
|---|
| 779 | */ | 
|---|
| 780 | for (er = nhmex_uncore_mbox_extra_regs; er->msr; er++) { | 
|---|
| 781 | if (er->event != (event->hw.config & er->config_mask)) | 
|---|
| 782 | continue; | 
|---|
| 783 | if (event->attr.config1 & ~er->valid_mask) | 
|---|
| 784 | return -EINVAL; | 
|---|
| 785 |  | 
|---|
| 786 | msr = er->msr + type->msr_offset * box->pmu->pmu_idx; | 
|---|
| 787 | if (WARN_ON_ONCE(msr >= 0xffff || er->idx >= 0xff)) | 
|---|
| 788 | return -EINVAL; | 
|---|
| 789 |  | 
|---|
| 790 | /* always use the 32~63 bits to pass the PLD config */ | 
|---|
| 791 | if (er->idx == EXTRA_REG_NHMEX_M_PLD) | 
|---|
| 792 | reg_idx = 1; | 
|---|
| 793 | else if (WARN_ON_ONCE(reg_idx > 0)) | 
|---|
| 794 | return -EINVAL; | 
|---|
| 795 |  | 
|---|
| 796 | reg1->idx &= ~(0xff << (reg_idx * 8)); | 
|---|
| 797 | reg1->reg &= ~(0xffff << (reg_idx * 16)); | 
|---|
| 798 | reg1->idx |= nhmex_mbox_extra_reg_idx(er) << (reg_idx * 8); | 
|---|
| 799 | reg1->reg |= msr << (reg_idx * 16); | 
|---|
| 800 | reg1->config = event->attr.config1; | 
|---|
| 801 | reg_idx++; | 
|---|
| 802 | } | 
|---|
| 803 | /* | 
|---|
| 804 | * The mbox only provides ability to perform address matching | 
|---|
| 805 | * for the PLD events. | 
|---|
| 806 | */ | 
|---|
| 807 | if (reg_idx == 2) { | 
|---|
| 808 | reg2->idx = EXTRA_REG_NHMEX_M_FILTER; | 
|---|
| 809 | if (event->attr.config2 & NHMEX_M_PMON_MM_CFG_EN) | 
|---|
| 810 | reg2->config = event->attr.config2; | 
|---|
| 811 | else | 
|---|
| 812 | reg2->config = ~0ULL; | 
|---|
| 813 | if (box->pmu->pmu_idx == 0) | 
|---|
| 814 | reg2->reg = NHMEX_M0_MSR_PMU_MM_CFG; | 
|---|
| 815 | else | 
|---|
| 816 | reg2->reg = NHMEX_M1_MSR_PMU_MM_CFG; | 
|---|
| 817 | } | 
|---|
| 818 | return 0; | 
|---|
| 819 | } | 
|---|
| 820 |  | 
|---|
| 821 | static u64 nhmex_mbox_shared_reg_config(struct intel_uncore_box *box, int idx) | 
|---|
| 822 | { | 
|---|
| 823 | struct intel_uncore_extra_reg *er; | 
|---|
| 824 | unsigned long flags; | 
|---|
| 825 | u64 config; | 
|---|
| 826 |  | 
|---|
| 827 | if (idx < EXTRA_REG_NHMEX_M_ZDP_CTL_FVC) | 
|---|
| 828 | return box->shared_regs[idx].config; | 
|---|
| 829 |  | 
|---|
| 830 | er = &box->shared_regs[EXTRA_REG_NHMEX_M_ZDP_CTL_FVC]; | 
|---|
| 831 | raw_spin_lock_irqsave(&er->lock, flags); | 
|---|
| 832 | config = er->config; | 
|---|
| 833 | raw_spin_unlock_irqrestore(&er->lock, flags); | 
|---|
| 834 | return config; | 
|---|
| 835 | } | 
|---|
| 836 |  | 
|---|
| 837 | static void nhmex_mbox_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 838 | { | 
|---|
| 839 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 840 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 841 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 842 | int idx; | 
|---|
| 843 |  | 
|---|
| 844 | idx = __BITS_VALUE(reg1->idx, 0, 8); | 
|---|
| 845 | if (idx != 0xff) | 
|---|
| 846 | wrmsrq(__BITS_VALUE(reg1->reg, 0, 16), | 
|---|
| 847 | val: nhmex_mbox_shared_reg_config(box, idx)); | 
|---|
| 848 | idx = __BITS_VALUE(reg1->idx, 1, 8); | 
|---|
| 849 | if (idx != 0xff) | 
|---|
| 850 | wrmsrq(__BITS_VALUE(reg1->reg, 1, 16), | 
|---|
| 851 | val: nhmex_mbox_shared_reg_config(box, idx)); | 
|---|
| 852 |  | 
|---|
| 853 | if (reg2->idx != EXTRA_REG_NONE) { | 
|---|
| 854 | wrmsrq(msr: reg2->reg, val: 0); | 
|---|
| 855 | if (reg2->config != ~0ULL) { | 
|---|
| 856 | wrmsrq(msr: reg2->reg + 1, | 
|---|
| 857 | val: reg2->config & NHMEX_M_PMON_ADDR_MATCH_MASK); | 
|---|
| 858 | wrmsrq(msr: reg2->reg + 2, NHMEX_M_PMON_ADDR_MASK_MASK & | 
|---|
| 859 | (reg2->config >> NHMEX_M_PMON_ADDR_MASK_SHIFT)); | 
|---|
| 860 | wrmsrq(msr: reg2->reg, NHMEX_M_PMON_MM_CFG_EN); | 
|---|
| 861 | } | 
|---|
| 862 | } | 
|---|
| 863 |  | 
|---|
| 864 | wrmsrq(msr: hwc->config_base, val: hwc->config | NHMEX_PMON_CTL_EN_BIT0); | 
|---|
| 865 | } | 
|---|
| 866 |  | 
|---|
| 867 | DEFINE_UNCORE_FORMAT_ATTR(count_mode,		count_mode, "config:2-3"); | 
|---|
| 868 | DEFINE_UNCORE_FORMAT_ATTR(storage_mode,		storage_mode, "config:4-5"); | 
|---|
| 869 | DEFINE_UNCORE_FORMAT_ATTR(wrap_mode,		wrap_mode, "config:6"); | 
|---|
| 870 | DEFINE_UNCORE_FORMAT_ATTR(flag_mode,		flag_mode, "config:7"); | 
|---|
| 871 | DEFINE_UNCORE_FORMAT_ATTR(inc_sel,		inc_sel, "config:9-13"); | 
|---|
| 872 | DEFINE_UNCORE_FORMAT_ATTR(set_flag_sel,		set_flag_sel, "config:19-21"); | 
|---|
| 873 | DEFINE_UNCORE_FORMAT_ATTR(filter_cfg_en,	filter_cfg_en, "config2:63"); | 
|---|
| 874 | DEFINE_UNCORE_FORMAT_ATTR(filter_match,		filter_match, "config2:0-33"); | 
|---|
| 875 | DEFINE_UNCORE_FORMAT_ATTR(filter_mask,		filter_mask, "config2:34-61"); | 
|---|
| 876 | DEFINE_UNCORE_FORMAT_ATTR(dsp,			dsp, "config1:0-31"); | 
|---|
| 877 | DEFINE_UNCORE_FORMAT_ATTR(thr,			thr, "config1:0-31"); | 
|---|
| 878 | DEFINE_UNCORE_FORMAT_ATTR(fvc,			fvc, "config1:0-31"); | 
|---|
| 879 | DEFINE_UNCORE_FORMAT_ATTR(pgt,			pgt, "config1:0-31"); | 
|---|
| 880 | DEFINE_UNCORE_FORMAT_ATTR(map,			map, "config1:0-31"); | 
|---|
| 881 | DEFINE_UNCORE_FORMAT_ATTR(iss,			iss, "config1:0-31"); | 
|---|
| 882 | DEFINE_UNCORE_FORMAT_ATTR(pld,			pld, "config1:32-63"); | 
|---|
| 883 |  | 
|---|
| 884 | static struct attribute *nhmex_uncore_mbox_formats_attr[] = { | 
|---|
| 885 | &format_attr_count_mode.attr, | 
|---|
| 886 | &format_attr_storage_mode.attr, | 
|---|
| 887 | &format_attr_wrap_mode.attr, | 
|---|
| 888 | &format_attr_flag_mode.attr, | 
|---|
| 889 | &format_attr_inc_sel.attr, | 
|---|
| 890 | &format_attr_set_flag_sel.attr, | 
|---|
| 891 | &format_attr_filter_cfg_en.attr, | 
|---|
| 892 | &format_attr_filter_match.attr, | 
|---|
| 893 | &format_attr_filter_mask.attr, | 
|---|
| 894 | &format_attr_dsp.attr, | 
|---|
| 895 | &format_attr_thr.attr, | 
|---|
| 896 | &format_attr_fvc.attr, | 
|---|
| 897 | &format_attr_pgt.attr, | 
|---|
| 898 | &format_attr_map.attr, | 
|---|
| 899 | &format_attr_iss.attr, | 
|---|
| 900 | &format_attr_pld.attr, | 
|---|
| 901 | NULL, | 
|---|
| 902 | }; | 
|---|
| 903 |  | 
|---|
| 904 | static const struct attribute_group nhmex_uncore_mbox_format_group = { | 
|---|
| 905 | .name		= "format", | 
|---|
| 906 | .attrs		= nhmex_uncore_mbox_formats_attr, | 
|---|
| 907 | }; | 
|---|
| 908 |  | 
|---|
| 909 | static struct uncore_event_desc nhmex_uncore_mbox_events[] = { | 
|---|
| 910 | INTEL_UNCORE_EVENT_DESC(bbox_cmds_read, "inc_sel=0xd,fvc=0x2800"), | 
|---|
| 911 | INTEL_UNCORE_EVENT_DESC(bbox_cmds_write, "inc_sel=0xd,fvc=0x2820"), | 
|---|
| 912 | { /* end: all zeroes */ }, | 
|---|
| 913 | }; | 
|---|
| 914 |  | 
|---|
| 915 | static struct uncore_event_desc wsmex_uncore_mbox_events[] = { | 
|---|
| 916 | INTEL_UNCORE_EVENT_DESC(bbox_cmds_read, "inc_sel=0xd,fvc=0x5000"), | 
|---|
| 917 | INTEL_UNCORE_EVENT_DESC(bbox_cmds_write, "inc_sel=0xd,fvc=0x5040"), | 
|---|
| 918 | { /* end: all zeroes */ }, | 
|---|
| 919 | }; | 
|---|
| 920 |  | 
|---|
| 921 | static struct intel_uncore_ops nhmex_uncore_mbox_ops = { | 
|---|
| 922 | NHMEX_UNCORE_OPS_COMMON_INIT(), | 
|---|
| 923 | .enable_event	= nhmex_mbox_msr_enable_event, | 
|---|
| 924 | .hw_config	= nhmex_mbox_hw_config, | 
|---|
| 925 | .get_constraint	= nhmex_mbox_get_constraint, | 
|---|
| 926 | .put_constraint	= nhmex_mbox_put_constraint, | 
|---|
| 927 | }; | 
|---|
| 928 |  | 
|---|
| 929 | static struct intel_uncore_type nhmex_uncore_mbox = { | 
|---|
| 930 | .name			= "mbox", | 
|---|
| 931 | .num_counters		= 6, | 
|---|
| 932 | .num_boxes		= 2, | 
|---|
| 933 | .perf_ctr_bits		= 48, | 
|---|
| 934 | .event_ctl		= NHMEX_M0_MSR_PMU_CTL0, | 
|---|
| 935 | .perf_ctr		= NHMEX_M0_MSR_PMU_CNT0, | 
|---|
| 936 | .event_mask		= NHMEX_M_PMON_RAW_EVENT_MASK, | 
|---|
| 937 | .box_ctl		= NHMEX_M0_MSR_GLOBAL_CTL, | 
|---|
| 938 | .msr_offset		= NHMEX_M_MSR_OFFSET, | 
|---|
| 939 | .pair_ctr_ctl		= 1, | 
|---|
| 940 | .num_shared_regs	= 8, | 
|---|
| 941 | .event_descs		= nhmex_uncore_mbox_events, | 
|---|
| 942 | .ops			= &nhmex_uncore_mbox_ops, | 
|---|
| 943 | .format_group		= &nhmex_uncore_mbox_format_group, | 
|---|
| 944 | }; | 
|---|
| 945 |  | 
|---|
| 946 | static void nhmex_rbox_alter_er(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 947 | { | 
|---|
| 948 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 949 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 950 |  | 
|---|
| 951 | /* adjust the main event selector and extra register index */ | 
|---|
| 952 | if (reg1->idx % 2) { | 
|---|
| 953 | reg1->idx--; | 
|---|
| 954 | hwc->config -= 1 << NHMEX_R_PMON_CTL_EV_SEL_SHIFT; | 
|---|
| 955 | } else { | 
|---|
| 956 | reg1->idx++; | 
|---|
| 957 | hwc->config += 1 << NHMEX_R_PMON_CTL_EV_SEL_SHIFT; | 
|---|
| 958 | } | 
|---|
| 959 |  | 
|---|
| 960 | /* adjust extra register config */ | 
|---|
| 961 | switch (reg1->idx % 6) { | 
|---|
| 962 | case 2: | 
|---|
| 963 | /* shift the 8~15 bits to the 0~7 bits */ | 
|---|
| 964 | reg1->config >>= 8; | 
|---|
| 965 | break; | 
|---|
| 966 | case 3: | 
|---|
| 967 | /* shift the 0~7 bits to the 8~15 bits */ | 
|---|
| 968 | reg1->config <<= 8; | 
|---|
| 969 | break; | 
|---|
| 970 | } | 
|---|
| 971 | } | 
|---|
| 972 |  | 
|---|
| 973 | /* | 
|---|
| 974 | * Each rbox has 4 event set which monitor PQI port 0~3 or 4~7. | 
|---|
| 975 | * An event set consists of 6 events, the 3rd and 4th events in | 
|---|
| 976 | * an event set use the same extra register. So an event set uses | 
|---|
| 977 | * 5 extra registers. | 
|---|
| 978 | */ | 
|---|
| 979 | static struct event_constraint * | 
|---|
| 980 | nhmex_rbox_get_constraint(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 981 | { | 
|---|
| 982 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 983 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 984 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 985 | struct intel_uncore_extra_reg *er; | 
|---|
| 986 | unsigned long flags; | 
|---|
| 987 | int idx, er_idx; | 
|---|
| 988 | u64 config1; | 
|---|
| 989 | bool ok = false; | 
|---|
| 990 |  | 
|---|
| 991 | if (!uncore_box_is_fake(box) && reg1->alloc) | 
|---|
| 992 | return NULL; | 
|---|
| 993 |  | 
|---|
| 994 | idx = reg1->idx % 6; | 
|---|
| 995 | config1 = reg1->config; | 
|---|
| 996 | again: | 
|---|
| 997 | er_idx = idx; | 
|---|
| 998 | /* the 3rd and 4th events use the same extra register */ | 
|---|
| 999 | if (er_idx > 2) | 
|---|
| 1000 | er_idx--; | 
|---|
| 1001 | er_idx += (reg1->idx / 6) * 5; | 
|---|
| 1002 |  | 
|---|
| 1003 | er = &box->shared_regs[er_idx]; | 
|---|
| 1004 | raw_spin_lock_irqsave(&er->lock, flags); | 
|---|
| 1005 | if (idx < 2) { | 
|---|
| 1006 | if (!atomic_read(v: &er->ref) || er->config == reg1->config) { | 
|---|
| 1007 | atomic_inc(v: &er->ref); | 
|---|
| 1008 | er->config = reg1->config; | 
|---|
| 1009 | ok = true; | 
|---|
| 1010 | } | 
|---|
| 1011 | } else if (idx == 2 || idx == 3) { | 
|---|
| 1012 | /* | 
|---|
| 1013 | * these two events use different fields in a extra register, | 
|---|
| 1014 | * the 0~7 bits and the 8~15 bits respectively. | 
|---|
| 1015 | */ | 
|---|
| 1016 | u64 mask = 0xff << ((idx - 2) * 8); | 
|---|
| 1017 | if (!__BITS_VALUE(atomic_read(&er->ref), idx - 2, 8) || | 
|---|
| 1018 | !((er->config ^ config1) & mask)) { | 
|---|
| 1019 | atomic_add(i: 1 << ((idx - 2) * 8), v: &er->ref); | 
|---|
| 1020 | er->config &= ~mask; | 
|---|
| 1021 | er->config |= config1 & mask; | 
|---|
| 1022 | ok = true; | 
|---|
| 1023 | } | 
|---|
| 1024 | } else { | 
|---|
| 1025 | if (!atomic_read(v: &er->ref) || | 
|---|
| 1026 | (er->config == (hwc->config >> 32) && | 
|---|
| 1027 | er->config1 == reg1->config && | 
|---|
| 1028 | er->config2 == reg2->config)) { | 
|---|
| 1029 | atomic_inc(v: &er->ref); | 
|---|
| 1030 | er->config = (hwc->config >> 32); | 
|---|
| 1031 | er->config1 = reg1->config; | 
|---|
| 1032 | er->config2 = reg2->config; | 
|---|
| 1033 | ok = true; | 
|---|
| 1034 | } | 
|---|
| 1035 | } | 
|---|
| 1036 | raw_spin_unlock_irqrestore(&er->lock, flags); | 
|---|
| 1037 |  | 
|---|
| 1038 | if (!ok) { | 
|---|
| 1039 | /* | 
|---|
| 1040 | * The Rbox events are always in pairs. The paired | 
|---|
| 1041 | * events are functional identical, but use different | 
|---|
| 1042 | * extra registers. If we failed to take an extra | 
|---|
| 1043 | * register, try the alternative. | 
|---|
| 1044 | */ | 
|---|
| 1045 | idx ^= 1; | 
|---|
| 1046 | if (idx != reg1->idx % 6) { | 
|---|
| 1047 | if (idx == 2) | 
|---|
| 1048 | config1 >>= 8; | 
|---|
| 1049 | else if (idx == 3) | 
|---|
| 1050 | config1 <<= 8; | 
|---|
| 1051 | goto again; | 
|---|
| 1052 | } | 
|---|
| 1053 | } else { | 
|---|
| 1054 | if (!uncore_box_is_fake(box)) { | 
|---|
| 1055 | if (idx != reg1->idx % 6) | 
|---|
| 1056 | nhmex_rbox_alter_er(box, event); | 
|---|
| 1057 | reg1->alloc = 1; | 
|---|
| 1058 | } | 
|---|
| 1059 | return NULL; | 
|---|
| 1060 | } | 
|---|
| 1061 | return &uncore_constraint_empty; | 
|---|
| 1062 | } | 
|---|
| 1063 |  | 
|---|
| 1064 | static void nhmex_rbox_put_constraint(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 1065 | { | 
|---|
| 1066 | struct intel_uncore_extra_reg *er; | 
|---|
| 1067 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 
|---|
| 1068 | int idx, er_idx; | 
|---|
| 1069 |  | 
|---|
| 1070 | if (uncore_box_is_fake(box) || !reg1->alloc) | 
|---|
| 1071 | return; | 
|---|
| 1072 |  | 
|---|
| 1073 | idx = reg1->idx % 6; | 
|---|
| 1074 | er_idx = idx; | 
|---|
| 1075 | if (er_idx > 2) | 
|---|
| 1076 | er_idx--; | 
|---|
| 1077 | er_idx += (reg1->idx / 6) * 5; | 
|---|
| 1078 |  | 
|---|
| 1079 | er = &box->shared_regs[er_idx]; | 
|---|
| 1080 | if (idx == 2 || idx == 3) | 
|---|
| 1081 | atomic_sub(i: 1 << ((idx - 2) * 8), v: &er->ref); | 
|---|
| 1082 | else | 
|---|
| 1083 | atomic_dec(v: &er->ref); | 
|---|
| 1084 |  | 
|---|
| 1085 | reg1->alloc = 0; | 
|---|
| 1086 | } | 
|---|
| 1087 |  | 
|---|
| 1088 | static int nhmex_rbox_hw_config(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 1089 | { | 
|---|
| 1090 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 1091 | struct hw_perf_event_extra *reg1 = &event->hw.extra_reg; | 
|---|
| 1092 | struct hw_perf_event_extra *reg2 = &event->hw.branch_reg; | 
|---|
| 1093 | int idx; | 
|---|
| 1094 |  | 
|---|
| 1095 | idx = (event->hw.config & NHMEX_R_PMON_CTL_EV_SEL_MASK) >> | 
|---|
| 1096 | NHMEX_R_PMON_CTL_EV_SEL_SHIFT; | 
|---|
| 1097 | if (idx >= 0x18) | 
|---|
| 1098 | return -EINVAL; | 
|---|
| 1099 |  | 
|---|
| 1100 | reg1->idx = idx; | 
|---|
| 1101 | reg1->config = event->attr.config1; | 
|---|
| 1102 |  | 
|---|
| 1103 | switch (idx % 6) { | 
|---|
| 1104 | case 4: | 
|---|
| 1105 | case 5: | 
|---|
| 1106 | hwc->config |= event->attr.config & (~0ULL << 32); | 
|---|
| 1107 | reg2->config = event->attr.config2; | 
|---|
| 1108 | break; | 
|---|
| 1109 | } | 
|---|
| 1110 | return 0; | 
|---|
| 1111 | } | 
|---|
| 1112 |  | 
|---|
| 1113 | static void nhmex_rbox_msr_enable_event(struct intel_uncore_box *box, struct perf_event *event) | 
|---|
| 1114 | { | 
|---|
| 1115 | struct hw_perf_event *hwc = &event->hw; | 
|---|
| 1116 | struct hw_perf_event_extra *reg1 = &hwc->extra_reg; | 
|---|
| 1117 | struct hw_perf_event_extra *reg2 = &hwc->branch_reg; | 
|---|
| 1118 | int idx, port; | 
|---|
| 1119 |  | 
|---|
| 1120 | idx = reg1->idx; | 
|---|
| 1121 | port = idx / 6 + box->pmu->pmu_idx * 4; | 
|---|
| 1122 |  | 
|---|
| 1123 | switch (idx % 6) { | 
|---|
| 1124 | case 0: | 
|---|
| 1125 | wrmsrq(NHMEX_R_MSR_PORTN_IPERF_CFG0(port), val: reg1->config); | 
|---|
| 1126 | break; | 
|---|
| 1127 | case 1: | 
|---|
| 1128 | wrmsrq(NHMEX_R_MSR_PORTN_IPERF_CFG1(port), val: reg1->config); | 
|---|
| 1129 | break; | 
|---|
| 1130 | case 2: | 
|---|
| 1131 | case 3: | 
|---|
| 1132 | wrmsrq(NHMEX_R_MSR_PORTN_QLX_CFG(port), | 
|---|
| 1133 | val: uncore_shared_reg_config(box, idx: 2 + (idx / 6) * 5)); | 
|---|
| 1134 | break; | 
|---|
| 1135 | case 4: | 
|---|
| 1136 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET1_MM_CFG(port), | 
|---|
| 1137 | val: hwc->config >> 32); | 
|---|
| 1138 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET1_MATCH(port), val: reg1->config); | 
|---|
| 1139 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET1_MASK(port), val: reg2->config); | 
|---|
| 1140 | break; | 
|---|
| 1141 | case 5: | 
|---|
| 1142 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET2_MM_CFG(port), | 
|---|
| 1143 | val: hwc->config >> 32); | 
|---|
| 1144 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET2_MATCH(port), val: reg1->config); | 
|---|
| 1145 | wrmsrq(NHMEX_R_MSR_PORTN_XBR_SET2_MASK(port), val: reg2->config); | 
|---|
| 1146 | break; | 
|---|
| 1147 | } | 
|---|
| 1148 |  | 
|---|
| 1149 | wrmsrq(msr: hwc->config_base, NHMEX_PMON_CTL_EN_BIT0 | | 
|---|
| 1150 | (hwc->config & NHMEX_R_PMON_CTL_EV_SEL_MASK)); | 
|---|
| 1151 | } | 
|---|
| 1152 |  | 
|---|
| 1153 | DEFINE_UNCORE_FORMAT_ATTR(xbr_mm_cfg, xbr_mm_cfg, "config:32-63"); | 
|---|
| 1154 | DEFINE_UNCORE_FORMAT_ATTR(xbr_match, xbr_match, "config1:0-63"); | 
|---|
| 1155 | DEFINE_UNCORE_FORMAT_ATTR(xbr_mask, xbr_mask, "config2:0-63"); | 
|---|
| 1156 | DEFINE_UNCORE_FORMAT_ATTR(qlx_cfg, qlx_cfg, "config1:0-15"); | 
|---|
| 1157 | DEFINE_UNCORE_FORMAT_ATTR(iperf_cfg, iperf_cfg, "config1:0-31"); | 
|---|
| 1158 |  | 
|---|
| 1159 | static struct attribute *nhmex_uncore_rbox_formats_attr[] = { | 
|---|
| 1160 | &format_attr_event5.attr, | 
|---|
| 1161 | &format_attr_xbr_mm_cfg.attr, | 
|---|
| 1162 | &format_attr_xbr_match.attr, | 
|---|
| 1163 | &format_attr_xbr_mask.attr, | 
|---|
| 1164 | &format_attr_qlx_cfg.attr, | 
|---|
| 1165 | &format_attr_iperf_cfg.attr, | 
|---|
| 1166 | NULL, | 
|---|
| 1167 | }; | 
|---|
| 1168 |  | 
|---|
| 1169 | static const struct attribute_group nhmex_uncore_rbox_format_group = { | 
|---|
| 1170 | .name = "format", | 
|---|
| 1171 | .attrs = nhmex_uncore_rbox_formats_attr, | 
|---|
| 1172 | }; | 
|---|
| 1173 |  | 
|---|
| 1174 | static struct uncore_event_desc nhmex_uncore_rbox_events[] = { | 
|---|
| 1175 | INTEL_UNCORE_EVENT_DESC(qpi0_flit_send, "event=0x0,iperf_cfg=0x80000000"), | 
|---|
| 1176 | INTEL_UNCORE_EVENT_DESC(qpi1_filt_send, "event=0x6,iperf_cfg=0x80000000"), | 
|---|
| 1177 | INTEL_UNCORE_EVENT_DESC(qpi0_idle_filt, "event=0x0,iperf_cfg=0x40000000"), | 
|---|
| 1178 | INTEL_UNCORE_EVENT_DESC(qpi1_idle_filt, "event=0x6,iperf_cfg=0x40000000"), | 
|---|
| 1179 | INTEL_UNCORE_EVENT_DESC(qpi0_date_response, "event=0x0,iperf_cfg=0xc4"), | 
|---|
| 1180 | INTEL_UNCORE_EVENT_DESC(qpi1_date_response, "event=0x6,iperf_cfg=0xc4"), | 
|---|
| 1181 | { /* end: all zeroes */ }, | 
|---|
| 1182 | }; | 
|---|
| 1183 |  | 
|---|
| 1184 | static struct intel_uncore_ops nhmex_uncore_rbox_ops = { | 
|---|
| 1185 | NHMEX_UNCORE_OPS_COMMON_INIT(), | 
|---|
| 1186 | .enable_event		= nhmex_rbox_msr_enable_event, | 
|---|
| 1187 | .hw_config		= nhmex_rbox_hw_config, | 
|---|
| 1188 | .get_constraint		= nhmex_rbox_get_constraint, | 
|---|
| 1189 | .put_constraint		= nhmex_rbox_put_constraint, | 
|---|
| 1190 | }; | 
|---|
| 1191 |  | 
|---|
| 1192 | static struct intel_uncore_type nhmex_uncore_rbox = { | 
|---|
| 1193 | .name			= "rbox", | 
|---|
| 1194 | .num_counters		= 8, | 
|---|
| 1195 | .num_boxes		= 2, | 
|---|
| 1196 | .perf_ctr_bits		= 48, | 
|---|
| 1197 | .event_ctl		= NHMEX_R_MSR_PMON_CTL0, | 
|---|
| 1198 | .perf_ctr		= NHMEX_R_MSR_PMON_CNT0, | 
|---|
| 1199 | .event_mask		= NHMEX_R_PMON_RAW_EVENT_MASK, | 
|---|
| 1200 | .box_ctl		= NHMEX_R_MSR_GLOBAL_CTL, | 
|---|
| 1201 | .msr_offset		= NHMEX_R_MSR_OFFSET, | 
|---|
| 1202 | .pair_ctr_ctl		= 1, | 
|---|
| 1203 | .num_shared_regs	= 20, | 
|---|
| 1204 | .event_descs		= nhmex_uncore_rbox_events, | 
|---|
| 1205 | .ops			= &nhmex_uncore_rbox_ops, | 
|---|
| 1206 | .format_group		= &nhmex_uncore_rbox_format_group | 
|---|
| 1207 | }; | 
|---|
| 1208 |  | 
|---|
| 1209 | static struct intel_uncore_type *nhmex_msr_uncores[] = { | 
|---|
| 1210 | &nhmex_uncore_ubox, | 
|---|
| 1211 | &nhmex_uncore_cbox, | 
|---|
| 1212 | &nhmex_uncore_bbox, | 
|---|
| 1213 | &nhmex_uncore_sbox, | 
|---|
| 1214 | &nhmex_uncore_mbox, | 
|---|
| 1215 | &nhmex_uncore_rbox, | 
|---|
| 1216 | &nhmex_uncore_wbox, | 
|---|
| 1217 | NULL, | 
|---|
| 1218 | }; | 
|---|
| 1219 |  | 
|---|
| 1220 | void nhmex_uncore_cpu_init(void) | 
|---|
| 1221 | { | 
|---|
| 1222 | if (boot_cpu_data.x86_vfm == INTEL_NEHALEM_EX) | 
|---|
| 1223 | uncore_nhmex = true; | 
|---|
| 1224 | else | 
|---|
| 1225 | nhmex_uncore_mbox.event_descs = wsmex_uncore_mbox_events; | 
|---|
| 1226 | if (nhmex_uncore_cbox.num_boxes > topology_num_cores_per_package()) | 
|---|
| 1227 | nhmex_uncore_cbox.num_boxes = topology_num_cores_per_package(); | 
|---|
| 1228 | uncore_msr_uncores = nhmex_msr_uncores; | 
|---|
| 1229 | } | 
|---|
| 1230 | /* end of Nehalem-EX uncore support */ | 
|---|
| 1231 |  | 
|---|