Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux
1
fork

Configure Feed

Select the types of activity you want to include in your feed.

KVM: arm64: Account for RES1 bits in DECLARE_FEAT_MAP() and co

None of the registers we manage in the feature dependency infrastructure
so far has any RES1 bit. This is about to change, as VTCR_EL2 has
its bit 31 being RES1.

In order to not fail the consistency checks by not describing a bit,
add RES1 bits to the set of immutable bits. This requires some extra
surgery for the FGT handling, as we now need to track RES1 bits there
as well.

There are no RES1 FGT bits *yet*. Watch this space.

Reviewed-by: Fuad Tabba <tabba@google.com>
Tested-by: Sascha Bischoff <sascha.bischoff@arm.com>
Reviewed-by: Jonathan Cameron <jonathan.cameron@huawei.com>
Tested-by: Fuad Tabba <tabba@google.com>
Link: https://patch.msgid.link/20251210173024.561160-5-maz@kernel.org
Signed-off-by: Marc Zyngier <maz@kernel.org>

+44 -35
+1
arch/arm64/include/asm/kvm_host.h
··· 638 638 u64 mask; 639 639 u64 nmask; 640 640 u64 res0; 641 + u64 res1; 641 642 }; 642 643 643 644 extern struct fgt_masks hfgrtr_masks;
+13 -12
arch/arm64/kvm/config.c
··· 16 16 */ 17 17 struct reg_bits_to_feat_map { 18 18 union { 19 - u64 bits; 20 - u64 *res0p; 19 + u64 bits; 20 + struct fgt_masks *masks; 21 21 }; 22 22 23 23 #define NEVER_FGU BIT(0) /* Can trap, but never UNDEF */ 24 24 #define CALL_FUNC BIT(1) /* Needs to evaluate tons of crap */ 25 25 #define FIXED_VALUE BIT(2) /* RAZ/WI or RAO/WI in KVM */ 26 - #define RES0_POINTER BIT(3) /* Pointer to RES0 value instead of bits */ 26 + #define MASKS_POINTER BIT(3) /* Pointer to fgt_masks struct instead of bits */ 27 27 28 28 unsigned long flags; 29 29 ··· 92 92 #define NEEDS_FEAT_FIXED(m, ...) \ 93 93 __NEEDS_FEAT_FLAG(m, FIXED_VALUE, bits, __VA_ARGS__, 0) 94 94 95 - #define NEEDS_FEAT_RES0(p, ...) \ 96 - __NEEDS_FEAT_FLAG(p, RES0_POINTER, res0p, __VA_ARGS__) 95 + #define NEEDS_FEAT_MASKS(p, ...) \ 96 + __NEEDS_FEAT_FLAG(p, MASKS_POINTER, masks, __VA_ARGS__) 97 97 98 98 /* 99 99 * Declare the dependency between a set of bits and a set of features, ··· 109 109 #define DECLARE_FEAT_MAP(n, r, m, f) \ 110 110 struct reg_feat_map_desc n = { \ 111 111 .name = #r, \ 112 - .feat_map = NEEDS_FEAT(~r##_RES0, f), \ 112 + .feat_map = NEEDS_FEAT(~(r##_RES0 | \ 113 + r##_RES1), f), \ 113 114 .bit_feat_map = m, \ 114 115 .bit_feat_map_sz = ARRAY_SIZE(m), \ 115 116 } 116 117 117 118 /* 118 119 * Specialised version of the above for FGT registers that have their 119 - * RES0 masks described as struct fgt_masks. 120 + * RESx masks described as struct fgt_masks. 120 121 */ 121 122 #define DECLARE_FEAT_MAP_FGT(n, msk, m, f) \ 122 123 struct reg_feat_map_desc n = { \ 123 124 .name = #msk, \ 124 - .feat_map = NEEDS_FEAT_RES0(&msk.res0, f),\ 125 + .feat_map = NEEDS_FEAT_MASKS(&msk, f), \ 125 126 .bit_feat_map = m, \ 126 127 .bit_feat_map_sz = ARRAY_SIZE(m), \ 127 128 } ··· 1169 1168 mdcr_el2_feat_map, FEAT_AA64EL2); 1170 1169 1171 1170 static void __init check_feat_map(const struct reg_bits_to_feat_map *map, 1172 - int map_size, u64 res0, const char *str) 1171 + int map_size, u64 resx, const char *str) 1173 1172 { 1174 1173 u64 mask = 0; 1175 1174 1176 1175 for (int i = 0; i < map_size; i++) 1177 1176 mask |= map[i].bits; 1178 1177 1179 - if (mask != ~res0) 1178 + if (mask != ~resx) 1180 1179 kvm_err("Undefined %s behaviour, bits %016llx\n", 1181 - str, mask ^ ~res0); 1180 + str, mask ^ ~resx); 1182 1181 } 1183 1182 1184 1183 static u64 reg_feat_map_bits(const struct reg_bits_to_feat_map *map) 1185 1184 { 1186 - return map->flags & RES0_POINTER ? ~(*map->res0p) : map->bits; 1185 + return map->flags & MASKS_POINTER ? (map->masks->mask | map->masks->nmask) : map->bits; 1187 1186 } 1188 1187 1189 1188 static void __init check_reg_desc(const struct reg_feat_map_desc *r)
+30 -23
arch/arm64/kvm/emulate-nested.c
··· 2105 2105 } 2106 2106 2107 2107 #define FGT_MASKS(__n, __m) \ 2108 - struct fgt_masks __n = { .str = #__m, .res0 = __m, } 2108 + struct fgt_masks __n = { .str = #__m, .res0 = __m ## _RES0, .res1 = __m ## _RES1 } 2109 2109 2110 - FGT_MASKS(hfgrtr_masks, HFGRTR_EL2_RES0); 2111 - FGT_MASKS(hfgwtr_masks, HFGWTR_EL2_RES0); 2112 - FGT_MASKS(hfgitr_masks, HFGITR_EL2_RES0); 2113 - FGT_MASKS(hdfgrtr_masks, HDFGRTR_EL2_RES0); 2114 - FGT_MASKS(hdfgwtr_masks, HDFGWTR_EL2_RES0); 2115 - FGT_MASKS(hafgrtr_masks, HAFGRTR_EL2_RES0); 2116 - FGT_MASKS(hfgrtr2_masks, HFGRTR2_EL2_RES0); 2117 - FGT_MASKS(hfgwtr2_masks, HFGWTR2_EL2_RES0); 2118 - FGT_MASKS(hfgitr2_masks, HFGITR2_EL2_RES0); 2119 - FGT_MASKS(hdfgrtr2_masks, HDFGRTR2_EL2_RES0); 2120 - FGT_MASKS(hdfgwtr2_masks, HDFGWTR2_EL2_RES0); 2110 + FGT_MASKS(hfgrtr_masks, HFGRTR_EL2); 2111 + FGT_MASKS(hfgwtr_masks, HFGWTR_EL2); 2112 + FGT_MASKS(hfgitr_masks, HFGITR_EL2); 2113 + FGT_MASKS(hdfgrtr_masks, HDFGRTR_EL2); 2114 + FGT_MASKS(hdfgwtr_masks, HDFGWTR_EL2); 2115 + FGT_MASKS(hafgrtr_masks, HAFGRTR_EL2); 2116 + FGT_MASKS(hfgrtr2_masks, HFGRTR2_EL2); 2117 + FGT_MASKS(hfgwtr2_masks, HFGWTR2_EL2); 2118 + FGT_MASKS(hfgitr2_masks, HFGITR2_EL2); 2119 + FGT_MASKS(hdfgrtr2_masks, HDFGRTR2_EL2); 2120 + FGT_MASKS(hdfgwtr2_masks, HDFGWTR2_EL2); 2121 2121 2122 2122 static __init bool aggregate_fgt(union trap_config tc) 2123 2123 { 2124 2124 struct fgt_masks *rmasks, *wmasks; 2125 + u64 rresx, wresx; 2125 2126 2126 2127 switch (tc.fgt) { 2127 2128 case HFGRTR_GROUP: ··· 2155 2154 break; 2156 2155 } 2157 2156 2157 + rresx = rmasks->res0 | rmasks->res1; 2158 + if (wmasks) 2159 + wresx = wmasks->res0 | wmasks->res1; 2160 + 2158 2161 /* 2159 2162 * A bit can be reserved in either the R or W register, but 2160 2163 * not both. 2161 2164 */ 2162 - if ((BIT(tc.bit) & rmasks->res0) && 2163 - (!wmasks || (BIT(tc.bit) & wmasks->res0))) 2165 + if ((BIT(tc.bit) & rresx) && (!wmasks || (BIT(tc.bit) & wresx))) 2164 2166 return false; 2165 2167 2166 2168 if (tc.pol) 2167 - rmasks->mask |= BIT(tc.bit) & ~rmasks->res0; 2169 + rmasks->mask |= BIT(tc.bit) & ~rresx; 2168 2170 else 2169 - rmasks->nmask |= BIT(tc.bit) & ~rmasks->res0; 2171 + rmasks->nmask |= BIT(tc.bit) & ~rresx; 2170 2172 2171 2173 if (wmasks) { 2172 2174 if (tc.pol) 2173 - wmasks->mask |= BIT(tc.bit) & ~wmasks->res0; 2175 + wmasks->mask |= BIT(tc.bit) & ~wresx; 2174 2176 else 2175 - wmasks->nmask |= BIT(tc.bit) & ~wmasks->res0; 2177 + wmasks->nmask |= BIT(tc.bit) & ~wresx; 2176 2178 } 2177 2179 2178 2180 return true; ··· 2184 2180 static __init int check_fgt_masks(struct fgt_masks *masks) 2185 2181 { 2186 2182 unsigned long duplicate = masks->mask & masks->nmask; 2187 - u64 res0 = masks->res0; 2188 2183 int ret = 0; 2189 2184 2190 2185 if (duplicate) { ··· 2197 2194 ret = -EINVAL; 2198 2195 } 2199 2196 2200 - masks->res0 = ~(masks->mask | masks->nmask); 2201 - if (masks->res0 != res0) 2202 - kvm_info("Implicit %s = %016llx, expecting %016llx\n", 2203 - masks->str, masks->res0, res0); 2197 + if ((masks->res0 | masks->res1 | masks->mask | masks->nmask) != GENMASK(63, 0) || 2198 + (masks->res0 & masks->res1) || (masks->res0 & masks->mask) || 2199 + (masks->res0 & masks->nmask) || (masks->res1 & masks->mask) || 2200 + (masks->res1 & masks->nmask) || (masks->mask & masks->nmask)) { 2201 + kvm_info("Inconsistent masks for %s (%016llx, %016llx, %016llx, %016llx)\n", 2202 + masks->str, masks->res0, masks->res1, masks->mask, masks->nmask); 2203 + masks->res0 = ~(masks->res1 | masks->mask | masks->nmask); 2204 + } 2204 2205 2205 2206 return ret; 2206 2207 }