1 /*
2  * Copyright (c) 2019-2022, Arm Limited. All rights reserved.
3  *
4  * SPDX-License-Identifier: BSD-3-Clause
5  */
6 
7 #ifndef ARCH_FEATURES_H
8 #define ARCH_FEATURES_H
9 
10 #include <stdbool.h>
11 
12 #include <arch_helpers.h>
13 
is_armv7_gentimer_present(void)14 static inline bool is_armv7_gentimer_present(void)
15 {
16 	/* The Generic Timer is always present in an ARMv8-A implementation */
17 	return true;
18 }
19 
is_armv8_1_pan_present(void)20 static inline bool is_armv8_1_pan_present(void)
21 {
22 	return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_PAN_SHIFT) &
23 		ID_AA64MMFR1_EL1_PAN_MASK) != 0U;
24 }
25 
is_armv8_1_vhe_present(void)26 static inline bool is_armv8_1_vhe_present(void)
27 {
28 	return ((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_VHE_SHIFT) &
29 		ID_AA64MMFR1_EL1_VHE_MASK) != 0U;
30 }
31 
is_armv8_2_ttcnp_present(void)32 static inline bool is_armv8_2_ttcnp_present(void)
33 {
34 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) &
35 		ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
36 }
37 
is_armv8_3_pauth_present(void)38 static inline bool is_armv8_3_pauth_present(void)
39 {
40 	uint64_t mask = (ID_AA64ISAR1_GPI_MASK << ID_AA64ISAR1_GPI_SHIFT) |
41 			(ID_AA64ISAR1_GPA_MASK << ID_AA64ISAR1_GPA_SHIFT) |
42 			(ID_AA64ISAR1_API_MASK << ID_AA64ISAR1_API_SHIFT) |
43 			(ID_AA64ISAR1_APA_MASK << ID_AA64ISAR1_APA_SHIFT);
44 
45 	/* If any of the fields is not zero, PAuth is present */
46 	return (read_id_aa64isar1_el1() & mask) != 0U;
47 }
48 
is_armv8_4_dit_present(void)49 static inline bool is_armv8_4_dit_present(void)
50 {
51 	return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) &
52 		ID_AA64PFR0_DIT_MASK) == 1U;
53 }
54 
is_armv8_4_ttst_present(void)55 static inline bool is_armv8_4_ttst_present(void)
56 {
57 	return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) &
58 		ID_AA64MMFR2_EL1_ST_MASK) == 1U;
59 }
60 
is_armv8_5_bti_present(void)61 static inline bool is_armv8_5_bti_present(void)
62 {
63 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) &
64 		ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
65 }
66 
get_armv8_5_mte_support(void)67 static inline unsigned int get_armv8_5_mte_support(void)
68 {
69 	return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) &
70 		ID_AA64PFR1_EL1_MTE_MASK);
71 }
72 
is_armv8_4_sel2_present(void)73 static inline bool is_armv8_4_sel2_present(void)
74 {
75 	return ((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SEL2_SHIFT) &
76 		ID_AA64PFR0_SEL2_MASK) == 1ULL;
77 }
78 
is_armv8_6_twed_present(void)79 static inline bool is_armv8_6_twed_present(void)
80 {
81 	return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_TWED_SHIFT) &
82 		ID_AA64MMFR1_EL1_TWED_MASK) == ID_AA64MMFR1_EL1_TWED_SUPPORTED);
83 }
84 
is_armv8_6_fgt_present(void)85 static inline bool is_armv8_6_fgt_present(void)
86 {
87 	return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_FGT_SHIFT) &
88 		ID_AA64MMFR0_EL1_FGT_MASK) == ID_AA64MMFR0_EL1_FGT_SUPPORTED;
89 }
90 
get_armv8_6_ecv_support(void)91 static inline unsigned long int get_armv8_6_ecv_support(void)
92 {
93 	return ((read_id_aa64mmfr0_el1() >> ID_AA64MMFR0_EL1_ECV_SHIFT) &
94 		ID_AA64MMFR0_EL1_ECV_MASK);
95 }
96 
is_armv8_5_rng_present(void)97 static inline bool is_armv8_5_rng_present(void)
98 {
99 	return ((read_id_aa64isar0_el1() >> ID_AA64ISAR0_RNDR_SHIFT) &
100 		ID_AA64ISAR0_RNDR_MASK);
101 }
102 
is_armv8_6_feat_amuv1p1_present(void)103 static inline bool is_armv8_6_feat_amuv1p1_present(void)
104 {
105 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) &
106 		ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1P1);
107 }
108 
109 /*
110  * Return MPAM version:
111  *
112  * 0x00: None Armv8.0 or later
113  * 0x01: v0.1 Armv8.4 or later
114  * 0x10: v1.0 Armv8.2 or later
115  * 0x11: v1.1 Armv8.4 or later
116  *
117  */
get_mpam_version(void)118 static inline unsigned int get_mpam_version(void)
119 {
120 	return (unsigned int)((((read_id_aa64pfr0_el1() >>
121 		ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
122 				((read_id_aa64pfr1_el1() >>
123 		ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK));
124 }
125 
is_feat_hcx_present(void)126 static inline bool is_feat_hcx_present(void)
127 {
128 	return (((read_id_aa64mmfr1_el1() >> ID_AA64MMFR1_EL1_HCX_SHIFT) &
129 		ID_AA64MMFR1_EL1_HCX_MASK) == ID_AA64MMFR1_EL1_HCX_SUPPORTED);
130 }
131 
get_armv9_2_feat_rme_support(void)132 static inline unsigned int get_armv9_2_feat_rme_support(void)
133 {
134 	/*
135 	 * Return the RME version, zero if not supported.  This function can be
136 	 * used as both an integer value for the RME version or compared to zero
137 	 * to detect RME presence.
138 	 */
139 	return (unsigned int)(read_id_aa64pfr0_el1() >>
140 		ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK;
141 }
142 
143 /*********************************************************************************
144  * Function to identify the presence of FEAT_SB (Speculation Barrier Instruction)
145  ********************************************************************************/
is_armv8_0_feat_sb_present(void)146 static inline bool is_armv8_0_feat_sb_present(void)
147 {
148 	return (((read_id_aa64isar1_el1() >> ID_AA64ISAR1_SB_SHIFT) &
149 		ID_AA64ISAR1_SB_MASK) == ID_AA64ISAR1_SB_SUPPORTED);
150 }
151 
152 /*********************************************************************************
153  * Function to identify the presence of FEAT_CSV2_2 (Cache Speculation Variant 2)
154  ********************************************************************************/
is_armv8_0_feat_csv2_2_present(void)155 static inline bool is_armv8_0_feat_csv2_2_present(void)
156 {
157 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_CSV2_SHIFT) &
158 		ID_AA64PFR0_CSV2_MASK) == ID_AA64PFR0_CSV2_2_SUPPORTED);
159 }
160 
161 /**********************************************************************************
162  * Function to identify the presence of FEAT_SPE (Statistical Profiling Extension)
163  *********************************************************************************/
is_armv8_2_feat_spe_present(void)164 static inline bool is_armv8_2_feat_spe_present(void)
165 {
166 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_PMS_SHIFT) &
167 		ID_AA64DFR0_PMS_MASK) != ID_AA64DFR0_SPE_NOT_SUPPORTED);
168 }
169 
170 /*******************************************************************************
171  * Function to identify the presence of FEAT_SVE (Scalable Vector Extension)
172  ******************************************************************************/
is_armv8_2_feat_sve_present(void)173 static inline bool is_armv8_2_feat_sve_present(void)
174 {
175 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_SVE_SHIFT) &
176 		ID_AA64PFR0_SVE_MASK) == ID_AA64PFR0_SVE_SUPPORTED);
177 }
178 
179 /*******************************************************************************
180  * Function to identify the presence of FEAT_RAS (Reliability,Availability,
181  * and Serviceability Extension)
182  ******************************************************************************/
is_armv8_2_feat_ras_present(void)183 static inline bool is_armv8_2_feat_ras_present(void)
184 {
185 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_RAS_SHIFT) &
186 		ID_AA64PFR0_RAS_MASK) != ID_AA64PFR0_RAS_NOT_SUPPORTED);
187 }
188 
189 /**************************************************************************
190  * Function to identify the presence of FEAT_DIT (Data Independent Timing)
191  *************************************************************************/
is_armv8_4_feat_dit_present(void)192 static inline bool is_armv8_4_feat_dit_present(void)
193 {
194 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_DIT_SHIFT) &
195 		ID_AA64PFR0_DIT_MASK) == ID_AA64PFR0_DIT_SUPPORTED);
196 }
197 
198 /*************************************************************************
199  * Function to identify the presence of FEAT_TRF (TraceLift)
200  ************************************************************************/
is_arm8_4_feat_trf_present(void)201 static inline bool is_arm8_4_feat_trf_present(void)
202 {
203 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEFILT_SHIFT) &
204 		ID_AA64DFR0_TRACEFILT_MASK) == ID_AA64DFR0_TRACEFILT_SUPPORTED);
205 }
206 
207 /*******************************************************************************
208  * Function to identify the presence of FEAT_AMUv1 (Activity Monitors-
209  * Extension v1)
210  ******************************************************************************/
is_armv8_4_feat_amuv1_present(void)211 static inline bool is_armv8_4_feat_amuv1_present(void)
212 {
213 	return (((read_id_aa64pfr0_el1() >> ID_AA64PFR0_AMU_SHIFT) &
214 		ID_AA64PFR0_AMU_MASK) >= ID_AA64PFR0_AMU_V1);
215 }
216 
217 /********************************************************************************
218  * Function to identify the presence of FEAT_NV2 (Enhanced Nested Virtualization
219  * Support)
220  *******************************************************************************/
get_armv8_4_feat_nv_support(void)221 static inline unsigned int get_armv8_4_feat_nv_support(void)
222 {
223 	return (((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_NV_SHIFT) &
224 		ID_AA64MMFR2_EL1_NV_MASK));
225 }
226 
227 /*******************************************************************************
228  * Function to identify the presence of FEAT_BRBE (Branch Record Buffer
229  * Extension)
230  ******************************************************************************/
is_feat_brbe_present(void)231 static inline bool is_feat_brbe_present(void)
232 {
233 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_BRBE_SHIFT) &
234 		ID_AA64DFR0_BRBE_MASK) == ID_AA64DFR0_BRBE_SUPPORTED);
235 }
236 
237 /*******************************************************************************
238  * Function to identify the presence of FEAT_TRBE (Trace Buffer Extension)
239  ******************************************************************************/
is_feat_trbe_present(void)240 static inline bool is_feat_trbe_present(void)
241 {
242 	return (((read_id_aa64dfr0_el1() >> ID_AA64DFR0_TRACEBUFFER_SHIFT) &
243 		ID_AA64DFR0_TRACEBUFFER_MASK) == ID_AA64DFR0_TRACEBUFFER_SUPPORTED);
244 }
245 
246 #endif /* ARCH_FEATURES_H */
247