1 /*
2 * Copyright (c) 2023-2024, Arm Limited. All rights reserved.
3 *
4 * SPDX-License-Identifier: BSD-3-Clause
5 */
6
7 #include <arch.h>
8 #include <arch_features.h>
9 #include <arch_helpers.h>
10 #include <lib/extensions/pmuv3.h>
11
init_mdcr_el2_hpmn(u_register_t mdcr_el2)12 static u_register_t init_mdcr_el2_hpmn(u_register_t mdcr_el2)
13 {
14 /*
15 * Initialize MDCR_EL2.HPMN to its hardware reset value so we don't
16 * throw anyone off who expects this to be sensible.
17 */
18 mdcr_el2 &= ~MDCR_EL2_HPMN_MASK;
19 mdcr_el2 |= ((read_pmcr_el0() >> PMCR_EL0_N_SHIFT) & PMCR_EL0_N_MASK);
20
21 return mdcr_el2;
22 }
23
mtpmu_disable_el3(u_register_t mdcr_el3)24 static u_register_t mtpmu_disable_el3(u_register_t mdcr_el3)
25 {
26 if (is_feat_mtpmu_supported()) {
27 /*
28 * MDCR_EL3.MTPME = 0
29 * FEAT_MTPMU is disabled. The Effective value of PMEVTYPER<n>_EL0.MT is
30 * zero.
31 */
32 mdcr_el3 &= ~MDCR_MTPME_BIT;
33 }
34
35 return mdcr_el3;
36 }
37
pmuv3_enable(cpu_context_t * ctx)38 void pmuv3_enable(cpu_context_t *ctx)
39 {
40 #if (CTX_INCLUDE_EL2_REGS && IMAGE_BL31)
41 u_register_t mdcr_el2_val;
42
43 mdcr_el2_val = read_el2_ctx_common(get_el2_sysregs_ctx(ctx), mdcr_el2);
44 mdcr_el2_val = init_mdcr_el2_hpmn(mdcr_el2_val);
45 write_el2_ctx_common(get_el2_sysregs_ctx(ctx), mdcr_el2, mdcr_el2_val);
46 #endif /* (CTX_INCLUDE_EL2_REGS && IMAGE_BL31) */
47
48 el3_state_t *state = get_el3state_ctx(ctx);
49 u_register_t mdcr_el3_val = read_ctx_reg(state, CTX_MDCR_EL3);
50
51 /* ---------------------------------------------------------------------
52 * MDCR_EL3.MPMX: Set to zero to not affect event counters (when
53 * SPME = 0).
54 *
55 * MDCR_EL3.MCCD: Set to one so that cycle counting by PMCCNTR_EL0 is
56 * prohibited in EL3. This bit is RES0 in versions of the
57 * architecture with FEAT_PMUv3p7 not implemented.
58 *
59 * MDCR_EL3.SCCD: Set to one so that cycle counting by PMCCNTR_EL0 is
60 * prohibited in Secure state. This bit is RES0 in versions of the
61 * architecture with FEAT_PMUv3p5 not implemented.
62 *
63 * MDCR_EL3.SPME: Set to zero so that event counting is prohibited in
64 * Secure state (and explicitly EL3 with later revisions). If ARMv8.2
65 * Debug is not implemented this bit does not have any effect on the
66 * counters unless there is support for the implementation defined
67 * authentication interface ExternalSecureNoninvasiveDebugEnabled().
68 *
69 * The SPME/MPMX combination is a little tricky. Below is a small
70 * summary if another combination is ever needed:
71 * SPME | MPMX | secure world | EL3
72 * -------------------------------------
73 * 0 | 0 | disabled | disabled
74 * 1 | 0 | enabled | enabled
75 * 0 | 1 | enabled | disabled
76 * 1 | 1 | enabled | disabled only for counters 0 to
77 * MDCR_EL2.HPMN - 1. Enabled for the rest
78 *
79 * MDCR_EL3.EnPM2: Set to one so that various PMUv3p9 related system
80 * register accesses do not trap to EL3.
81 *
82 * MDCR_EL3.TPM: Set to zero so that EL0, EL1, and EL2 System register
83 * accesses to all Performance Monitors registers do not trap to EL3.
84 *
85 * MDCR_EL3.PMEE set to 0b01 to delegate PMU IRQ and Profiling exception
86 * control to MDCR_EL2, to allow lower ELs own this policy.
87 */
88 mdcr_el3_val |= MDCR_SCCD_BIT | MDCR_MCCD_BIT | MDCR_EnPM2_BIT;
89 mdcr_el3_val &= ~(MDCR_MPMX_BIT | MDCR_SPME_BIT | MDCR_TPM_BIT);
90 mdcr_el3_val = mtpmu_disable_el3(mdcr_el3_val);
91
92 if (is_feat_ebep_supported()) {
93 mdcr_el3_val |= MDCR_PMEE(MDCR_PMEE_CTRL_EL2);
94 }
95
96 write_ctx_reg(state, CTX_MDCR_EL3, mdcr_el3_val);
97 }
98
pmuv3_init_el3(void)99 void pmuv3_init_el3(void)
100 {
101 /* ---------------------------------------------------------------------
102 * Initialise PMCR_EL0 setting all fields rather than relying
103 * on hw. Some fields are architecturally UNKNOWN on reset.
104 *
105 * PMCR_EL0.DP: Set to one so that the cycle counter,
106 * PMCCNTR_EL0 does not count when event counting is prohibited.
107 * Necessary on PMUv3 <= p7 where MDCR_EL3.{SCCD,MCCD} are not
108 * available
109 *
110 * PMCR_EL0.X: Set to zero to disable export of events.
111 *
112 * PMCR_EL0.C: Set to one to reset PMCCNTR_EL0 to zero.
113 *
114 * PMCR_EL0.P: Set to one to reset each event counter PMEVCNTR<n>_EL0 to
115 * zero.
116 *
117 * PMCR_EL0.E: Set to zero to disable cycle and event counters.
118 * ---------------------------------------------------------------------
119 */
120 write_pmcr_el0((read_pmcr_el0() | PMCR_EL0_DP_BIT | PMCR_EL0_C_BIT |
121 PMCR_EL0_P_BIT) & ~(PMCR_EL0_X_BIT | PMCR_EL0_E_BIT));
122 }
123
mtpmu_disable_el2(u_register_t mdcr_el2)124 static u_register_t mtpmu_disable_el2(u_register_t mdcr_el2)
125 {
126 if (is_feat_mtpmu_supported()) {
127 /*
128 * MDCR_EL2.MTPME = 0
129 * FEAT_MTPMU is disabled. The Effective value of PMEVTYPER<n>_EL0.MT is
130 * zero.
131 */
132 mdcr_el2 &= ~MDCR_EL2_MTPME;
133 }
134 return mdcr_el2;
135 }
136
pmuv3_init_el2_unused(void)137 void pmuv3_init_el2_unused(void)
138 {
139 u_register_t mdcr_el2 = read_mdcr_el2();
140
141 /*
142 * Initialise MDCR_EL2, setting all fields rather than
143 * relying on hw. Some fields are architecturally
144 * UNKNOWN on reset.
145 *
146 * MDCR_EL2.HLP: Set to one so that event counter overflow, that is
147 * recorded in PMOVSCLR_EL0[0-30], occurs on the increment that changes
148 * PMEVCNTR<n>_EL0[63] from 1 to 0, when ARMv8.5-PMU is implemented.
149 * This bit is RES0 in versions of the architecture earlier than
150 * ARMv8.5, setting it to 1 doesn't have any effect on them.
151 *
152 * MDCR_EL2.HCCD: Set to one to prohibit cycle counting at EL2. This bit
153 * is RES0 in versions of the architecture with FEAT_PMUv3p5 not
154 * implemented.
155 *
156 * MDCR_EL2.HPMD: Set to one so that event counting is
157 * prohibited at EL2 for counter n < MDCR_EL2.HPMN. This bit is RES0
158 * in versions of the architecture with FEAT_PMUv3p1 not implemented.
159 *
160 * MDCR_EL2.HPME: Set to zero to disable event counters for counters
161 * n >= MDCR_EL2.HPMN.
162 *
163 * MDCR_EL2.TPM: Set to zero so that Non-secure EL0 and
164 * EL1 accesses to all Performance Monitors registers
165 * are not trapped to EL2.
166 *
167 * MDCR_EL2.TPMCR: Set to zero so that Non-secure EL0
168 * and EL1 accesses to the PMCR_EL0 or PMCR are not
169 * trapped to EL2.
170 */
171 mdcr_el2 = (mdcr_el2 | MDCR_EL2_HLP_BIT | MDCR_EL2_HPMD_BIT |
172 MDCR_EL2_HCCD_BIT) &
173 ~(MDCR_EL2_HPME_BIT | MDCR_EL2_TPM_BIT | MDCR_EL2_TPMCR_BIT);
174 mdcr_el2 = init_mdcr_el2_hpmn(mdcr_el2);
175 mdcr_el2 = mtpmu_disable_el2(mdcr_el2);
176 write_mdcr_el2(mdcr_el2);
177 }
178