1 /* $NetBSD: cacheinfo.h,v 1.31 2021/12/09 14:33:19 msaitoh Exp $ */ 2 3 #ifndef _X86_CACHEINFO_H_ 4 #define _X86_CACHEINFO_H_ 5 6 struct x86_cache_info { 7 uint8_t cai_index; 8 uint8_t cai_desc; 9 uint8_t cai_associativity; 10 u_int cai_totalsize; /* #entries for TLB, bytes for cache */ 11 u_int cai_linesize; /* 12 * or page size for TLB, 13 * or prefetch size 14 */ 15 #ifndef _KERNEL 16 const char *cai_string; 17 #endif 18 }; 19 20 #define CAI_ITLB 0 /* Instruction TLB (4K pages) */ 21 #define CAI_ITLB2 1 /* Instruction TLB (2/4M pages) */ 22 #define CAI_DTLB 2 /* Data TLB (4K pages) */ 23 #define CAI_DTLB2 3 /* Data TLB (2/4M pages) */ 24 #define CAI_ICACHE 4 /* Instruction cache */ 25 #define CAI_DCACHE 5 /* Data cache */ 26 #define CAI_L2CACHE 6 /* Level 2 cache */ 27 #define CAI_L3CACHE 7 /* Level 3 cache */ 28 #define CAI_L1_1GBITLB 8 /* L1 1GB Page instruction TLB */ 29 #define CAI_L1_1GBDTLB 9 /* L1 1GB Page data TLB */ 30 #define CAI_L2_1GBITLB 10 /* L2 1GB Page instruction TLB */ 31 #define CAI_L2_1GBDTLB 11 /* L2 1GB Page data TLB */ 32 #define CAI_L2_ITLB 12 /* L2 Instruction TLB (4K pages) */ 33 #define CAI_L2_ITLB2 13 /* L2 Instruction TLB (2/4M pages) */ 34 #define CAI_L2_DTLB 14 /* L2 Data TLB (4K pages) */ 35 #define CAI_L2_DTLB2 15 /* L2 Data TLB (2/4M pages) */ 36 #define CAI_L2_STLB 16 /* Shared L2 TLB (4K pages) */ 37 #define CAI_L2_STLB2 17 /* Shared L2 TLB (4K/2M pages) */ 38 #define CAI_L2_STLB3 18 /* Shared L2 TLB (2M/4M pages) */ 39 #define CAI_PREFETCH 19 /* Prefetch */ 40 #define CAI_L1_LD_TLB 20 /* L1 Load Only TLB */ 41 #define CAI_L1_ST_TLB 21 /* L1 Store Only TLB */ 42 #define CAI_COUNT 22 43 44 45 /* 46 * AMD Cache Info: 47 * 48 * Barcelona, Phenom: 49 * 50 * Function 8000.0005 L1 TLB/Cache Information 51 * EAX -- L1 TLB 2/4MB pages 52 * EBX -- L1 TLB 4K pages 53 * ECX -- L1 D-cache 54 * EDX -- L1 I-cache 55 * 56 * Function 8000.0006 L2 TLB/Cache Information 57 * EAX -- L2 TLB 2/4MB pages 58 * EBX -- L2 TLB 4K pages 59 * ECX -- L2 Unified cache 60 * EDX -- L3 Unified Cache 61 * 62 * Function 8000.0019 TLB 1GB Page Information 63 * EAX -- L1 1GB pages 64 * EBX -- L2 1GB pages 65 * ECX -- reserved 66 * EDX -- reserved 67 * 68 * Athlon, Duron: 69 * 70 * Function 8000.0005 L1 TLB/Cache Information 71 * EAX -- L1 TLB 2/4MB pages 72 * EBX -- L1 TLB 4K pages 73 * ECX -- L1 D-cache 74 * EDX -- L1 I-cache 75 * 76 * Function 8000.0006 L2 TLB/Cache Information 77 * EAX -- L2 TLB 2/4MB pages 78 * EBX -- L2 TLB 4K pages 79 * ECX -- L2 Unified cache 80 * EDX -- reserved 81 * 82 * K5, K6: 83 * 84 * Function 8000.0005 L1 TLB/Cache Information 85 * EAX -- reserved 86 * EBX -- TLB 4K pages 87 * ECX -- L1 D-cache 88 * EDX -- L1 I-cache 89 * 90 * K6-III: 91 * 92 * Function 8000.0006 L2 Cache Information 93 * EAX -- reserved 94 * EBX -- reserved 95 * ECX -- L2 Unified cache 96 * EDX -- reserved 97 */ 98 99 /* L1 TLB 2/4MB pages */ 100 #define AMD_L1_EAX_DTLB_ASSOC(x) (((x) >> 24) & 0xff) 101 #define AMD_L1_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff) 102 #define AMD_L1_EAX_ITLB_ASSOC(x) (((x) >> 8) & 0xff) 103 #define AMD_L1_EAX_ITLB_ENTRIES(x) ( (x) & 0xff) 104 105 /* L1 TLB 4K pages */ 106 #define AMD_L1_EBX_DTLB_ASSOC(x) (((x) >> 24) & 0xff) 107 #define AMD_L1_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff) 108 #define AMD_L1_EBX_ITLB_ASSOC(x) (((x) >> 8) & 0xff) 109 #define AMD_L1_EBX_ITLB_ENTRIES(x) ( (x) & 0xff) 110 111 /* L1 Data Cache */ 112 #define AMD_L1_ECX_DC_SIZE(x) ((((x) >> 24) & 0xff) * 1024) 113 #define AMD_L1_ECX_DC_ASSOC(x) (((x) >> 16) & 0xff) 114 #define AMD_L1_ECX_DC_LPT(x) (((x) >> 8) & 0xff) 115 #define AMD_L1_ECX_DC_LS(x) ( (x) & 0xff) 116 117 /* L1 Instruction Cache */ 118 #define AMD_L1_EDX_IC_SIZE(x) ((((x) >> 24) & 0xff) * 1024) 119 #define AMD_L1_EDX_IC_ASSOC(x) (((x) >> 16) & 0xff) 120 #define AMD_L1_EDX_IC_LPT(x) (((x) >> 8) & 0xff) 121 #define AMD_L1_EDX_IC_LS(x) ( (x) & 0xff) 122 123 /* Note for L2 TLB -- if the upper 16 bits are 0, it is a unified TLB */ 124 125 /* L2 TLB 2/4MB pages */ 126 #define AMD_L2_EAX_DTLB_ASSOC(x) (((x) >> 28) & 0xf) 127 #define AMD_L2_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff) 128 #define AMD_L2_EAX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf) 129 #define AMD_L2_EAX_IUTLB_ENTRIES(x) ( (x) & 0xfff) 130 131 /* L2 TLB 4K pages */ 132 #define AMD_L2_EBX_DTLB_ASSOC(x) (((x) >> 28) & 0xf) 133 #define AMD_L2_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff) 134 #define AMD_L2_EBX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf) 135 #define AMD_L2_EBX_IUTLB_ENTRIES(x) ( (x) & 0xfff) 136 137 /* L2 Cache */ 138 #define AMD_L2_ECX_C_SIZE(x) ((((x) >> 16) & 0xffff) * 1024) 139 #define AMD_L2_ECX_C_ASSOC(x) (((x) >> 12) & 0xf) 140 #define AMD_L2_ECX_C_LPT(x) (((x) >> 8) & 0xf) 141 #define AMD_L2_ECX_C_LS(x) ( (x) & 0xff) 142 143 /* L3 Cache */ 144 #define AMD_L3_EDX_C_SIZE(x) ((((x) >> 18) & 0xffff) * 1024 * 512) 145 #define AMD_L3_EDX_C_ASSOC(x) (((x) >> 12) & 0xf) 146 #define AMD_L3_EDX_C_LPT(x) (((x) >> 8) & 0xf) 147 #define AMD_L3_EDX_C_LS(x) ( (x) & 0xff) 148 149 /* L1 TLB 1GB pages */ 150 #define AMD_L1_1GB_EAX_DTLB_ASSOC(x) (((x) >> 28) & 0xf) 151 #define AMD_L1_1GB_EAX_DTLB_ENTRIES(x) (((x) >> 16) & 0xfff) 152 #define AMD_L1_1GB_EAX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf) 153 #define AMD_L1_1GB_EAX_IUTLB_ENTRIES(x) ( (x) & 0xfff) 154 155 /* L2 TLB 1GB pages */ 156 #define AMD_L2_1GB_EBX_DUTLB_ASSOC(x) (((x) >> 28) & 0xf) 157 #define AMD_L2_1GB_EBX_DUTLB_ENTRIES(x) (((x) >> 16) & 0xfff) 158 #define AMD_L2_1GB_EBX_IUTLB_ASSOC(x) (((x) >> 12) & 0xf) 159 #define AMD_L2_1GB_EBX_IUTLB_ENTRIES(x) ( (x) & 0xfff) 160 161 /* 162 * VIA Cache Info: 163 * 164 * Nehemiah (at least) 165 * 166 * Function 8000.0005 L1 TLB/Cache Information 167 * EAX -- reserved 168 * EBX -- L1 TLB 4K pages 169 * ECX -- L1 D-cache 170 * EDX -- L1 I-cache 171 * 172 * Function 8000.0006 L2 Cache Information 173 * EAX -- reserved 174 * EBX -- reserved 175 * ECX -- L2 Unified cache 176 * EDX -- reserved 177 */ 178 179 /* L1 TLB 4K pages */ 180 #define VIA_L1_EBX_DTLB_ASSOC(x) (((x) >> 24) & 0xff) 181 #define VIA_L1_EBX_DTLB_ENTRIES(x) (((x) >> 16) & 0xff) 182 #define VIA_L1_EBX_ITLB_ASSOC(x) (((x) >> 8) & 0xff) 183 #define VIA_L1_EBX_ITLB_ENTRIES(x) ( (x) & 0xff) 184 185 /* L1 Data Cache */ 186 #define VIA_L1_ECX_DC_SIZE(x) ((((x) >> 24) & 0xff) * 1024) 187 #define VIA_L1_ECX_DC_ASSOC(x) (((x) >> 16) & 0xff) 188 #define VIA_L1_ECX_DC_LPT(x) (((x) >> 8) & 0xff) 189 #define VIA_L1_ECX_DC_LS(x) ( (x) & 0xff) 190 191 /* L1 Instruction Cache */ 192 #define VIA_L1_EDX_IC_SIZE(x) ((((x) >> 24) & 0xff) * 1024) 193 #define VIA_L1_EDX_IC_ASSOC(x) (((x) >> 16) & 0xff) 194 #define VIA_L1_EDX_IC_LPT(x) (((x) >> 8) & 0xff) 195 #define VIA_L1_EDX_IC_LS(x) ( (x) & 0xff) 196 197 /* L2 Cache (pre-Nehemiah) */ 198 #define VIA_L2_ECX_C_SIZE(x) ((((x) >> 24) & 0xff) * 1024) 199 #define VIA_L2_ECX_C_ASSOC(x) (((x) >> 16) & 0xff) 200 #define VIA_L2_ECX_C_LPT(x) (((x) >> 8) & 0xff) 201 #define VIA_L2_ECX_C_LS(x) ( (x) & 0xff) 202 203 /* L2 Cache (Nehemiah and newer) */ 204 #define VIA_L2N_ECX_C_SIZE(x) ((((x) >> 16) & 0xffff) * 1024) 205 #define VIA_L2N_ECX_C_ASSOC(x) (((x) >> 12) & 0xf) 206 #define VIA_L2N_ECX_C_LPT(x) (((x) >> 8) & 0xf) 207 #define VIA_L2N_ECX_C_LS(x) ( (x) & 0xff) 208 209 #ifdef _KERNEL 210 #define __CI_TBL(a,b,c,d,e,f) { a, b, c, d, e } 211 #else 212 #define __CI_TBL(a,b,c,d,e,f) { a, b, c, d, e, f } 213 #endif 214 215 /* 216 * XXX Currently organized mostly by cache type, but would be 217 * XXX easier to maintain if it were in descriptor type order. 218 */ 219 #define INTEL_CACHE_INFO { \ 220 __CI_TBL(CAI_ITLB, 0x01, 4, 32, 4 * 1024, NULL), \ 221 __CI_TBL(CAI_ITLB2, 0x02, 0xff, 2, 4 * 1024 * 1024, NULL), \ 222 __CI_TBL(CAI_DTLB, 0x03, 4, 64, 4 * 1024, NULL), \ 223 __CI_TBL(CAI_DTLB2, 0x04, 4, 8, 4 * 1024 * 1024, NULL), \ 224 __CI_TBL(CAI_DTLB2, 0x05, 4, 32, 4 * 1024 * 1024, NULL), \ 225 __CI_TBL(CAI_ITLB2, 0x0b, 4, 4, 4 * 1024 * 1024, NULL), \ 226 __CI_TBL(CAI_ITLB, 0x4f, 0xff, 32, 4 * 1024, NULL), \ 227 __CI_TBL(CAI_ITLB, 0x50, 0xff, 64, 4 * 1024, "64 4K/4M entries"), \ 228 __CI_TBL(CAI_ITLB, 0x51, 0xff, 64, 4 * 1024, "128 4K/4M entries"),\ 229 __CI_TBL(CAI_ITLB, 0x52, 0xff, 64, 4 * 1024, "256 4K/4M entries"),\ 230 __CI_TBL(CAI_ITLB2, 0x55, 0xff, 64, 4 * 1024, "7 2M/4M entries"), \ 231 __CI_TBL(CAI_DTLB2, 0x56, 4, 16, 4 * 1024 * 1024, NULL), \ 232 __CI_TBL(CAI_DTLB, 0x57, 4, 16, 4 * 1024, NULL), \ 233 __CI_TBL(CAI_DTLB, 0x59, 0xff, 16, 4 * 1024, NULL), \ 234 __CI_TBL(CAI_DTLB2, 0x5a, 0xff, 64, 4 * 1024, "32 2M/4M entries (L0)"), \ 235 __CI_TBL(CAI_DTLB, 0x5b, 0xff, 64, 4 * 1024, "64 4K/4M entries"), \ 236 __CI_TBL(CAI_DTLB, 0x5c, 0xff, 64, 4 * 1024, "128 4K/4M entries"),\ 237 __CI_TBL(CAI_DTLB, 0x5d, 0xff, 64, 4 * 1024, "256 4K/4M entries"),\ 238 __CI_TBL(CAI_ITLB, 0x61, 0xff, 48, 4 * 1024, NULL), \ 239 __CI_TBL(CAI_L1_1GBDTLB,0x63, 4, 4,1024*1024 * 1024, NULL), \ 240 __CI_TBL(CAI_DTLB, 0x64, 4,512, 4 * 1024, NULL), \ 241 __CI_TBL(CAI_ITLB, 0x6a, 8, 64, 4 * 1024, NULL), \ 242 __CI_TBL(CAI_DTLB, 0x6b, 8,256, 4 * 1024, NULL), \ 243 __CI_TBL(CAI_L2_DTLB2, 0x6c, 8,128, 0, "128 2M/4M entries"),\ 244 __CI_TBL(CAI_L1_1GBDTLB,0x6d,0xff, 16,1024*1024 * 1024, NULL), \ 245 __CI_TBL(CAI_ITLB2, 0x76, 0xff, 8, 4 * 1024 * 1024, "8 2M/4M entries"), \ 246 __CI_TBL(CAI_DTLB, 0xa0, 0xff, 32, 4 * 1024, NULL), \ 247 __CI_TBL(CAI_ITLB, 0xb0, 4,128, 4 * 1024, NULL), \ 248 __CI_TBL(CAI_ITLB2, 0xb1, 4, 64, 0, "8 2M & 4 4M entries"), \ 249 __CI_TBL(CAI_ITLB, 0xb2, 4, 64, 4 * 1024, NULL), \ 250 __CI_TBL(CAI_DTLB, 0xb3, 4,128, 4 * 1024, NULL), \ 251 __CI_TBL(CAI_DTLB, 0xb4, 4,256, 4 * 1024, NULL), \ 252 __CI_TBL(CAI_ITLB, 0xb5, 8, 64, 4 * 1024, NULL), \ 253 __CI_TBL(CAI_ITLB, 0xb6, 8,128, 4 * 1024, NULL), \ 254 __CI_TBL(CAI_DTLB, 0xba, 4, 64, 4 * 1024, NULL), \ 255 __CI_TBL(CAI_DTLB2, 0xc0, 4, 8, 4 * 1024, "8 4K/4M entries"), \ 256 __CI_TBL(CAI_L2_STLB2, 0xc1, 8,1024, 4 * 1024, "1024 4K/2M entries"), \ 257 __CI_TBL(CAI_DTLB2, 0xc2, 4, 16, 4 * 1024, "16 4K/2M entries"), \ 258 __CI_TBL(CAI_L2_STLB, 0xc3, 6,1536, 4 * 1024, NULL), \ 259 __CI_TBL(CAI_DTLB2, 0xc4, 4, 32, 4 * 1024, "32 2M/4M entries"), \ 260 __CI_TBL(CAI_L2_STLB, 0xca, 4,512, 4 * 1024, NULL), \ 261 __CI_TBL(CAI_ICACHE, 0x06, 4, 8 * 1024, 32, NULL), \ 262 __CI_TBL(CAI_ICACHE, 0x08, 4, 16 * 1024, 32, NULL), \ 263 __CI_TBL(CAI_ICACHE, 0x09, 4, 32 * 1024, 64, NULL), \ 264 __CI_TBL(CAI_DCACHE, 0x0a, 2, 8 * 1024, 32, NULL), \ 265 __CI_TBL(CAI_DCACHE, 0x0c, 4, 16 * 1024, 32, NULL), \ 266 __CI_TBL(CAI_DCACHE, 0x0d, 4, 16 * 1024, 64, NULL), \ 267 __CI_TBL(CAI_DCACHE, 0x0e, 6, 24 * 1024, 64, NULL), \ 268 __CI_TBL(CAI_L2CACHE, 0x21, 8, 256 * 1024, 64, NULL), /* L2 (MLC) */ \ 269 __CI_TBL(CAI_L3CACHE, 0x22, 0xff, 512 * 1024, 64, "sectored, 4-way "), \ 270 __CI_TBL(CAI_L3CACHE, 0x23, 0xff, 1 * 1024 * 1024, 64, "sectored, 8-way "), \ 271 __CI_TBL(CAI_L2CACHE, 0x24, 16, 1 * 1024 * 1024, 64, NULL), \ 272 __CI_TBL(CAI_L3CACHE, 0x25, 0xff, 2 * 1024 * 1024, 64, "sectored, 8-way "), \ 273 __CI_TBL(CAI_L3CACHE, 0x29, 0xff, 4 * 1024 * 1024, 64, "sectored, 8-way "), \ 274 __CI_TBL(CAI_DCACHE, 0x2c, 8, 32 * 1024, 64, NULL), \ 275 __CI_TBL(CAI_ICACHE, 0x30, 8, 32 * 1024, 64, NULL), \ 276 __CI_TBL(CAI_L2CACHE, 0x39, 4, 128 * 1024, 64, NULL), \ 277 __CI_TBL(CAI_L2CACHE, 0x3a, 6, 192 * 1024, 64, NULL), \ 278 __CI_TBL(CAI_L2CACHE, 0x3b, 2, 128 * 1024, 64, NULL), \ 279 __CI_TBL(CAI_L2CACHE, 0x3c, 4, 256 * 1024, 64, NULL), \ 280 __CI_TBL(CAI_L2CACHE, 0x3d, 6, 384 * 1024, 64, NULL), \ 281 __CI_TBL(CAI_L2CACHE, 0x3e, 4, 512 * 1024, 64, NULL), \ 282 __CI_TBL(CAI_L2CACHE, 0x40, 0, 0, 0, "not present"), \ 283 __CI_TBL(CAI_L2CACHE, 0x41, 4, 128 * 1024, 32, NULL), \ 284 __CI_TBL(CAI_L2CACHE, 0x42, 4, 256 * 1024, 32, NULL), \ 285 __CI_TBL(CAI_L2CACHE, 0x43, 4, 512 * 1024, 32, NULL), \ 286 __CI_TBL(CAI_L2CACHE, 0x44, 4, 1 * 1024 * 1024, 32, NULL), \ 287 __CI_TBL(CAI_L2CACHE, 0x45, 4, 2 * 1024 * 1024, 32, NULL), \ 288 __CI_TBL(CAI_L3CACHE, 0x46, 4, 4 * 1024 * 1024, 64, NULL), \ 289 __CI_TBL(CAI_L3CACHE, 0x47, 8, 8 * 1024 * 1024, 64, NULL), \ 290 __CI_TBL(CAI_L2CACHE, 0x48, 12, 3 * 1024 * 1024, 64, NULL), \ 291 \ 292 /* 0x49 Is L2 on Xeon MP (Family 0f, Model 06), L3 otherwise */ \ 293 __CI_TBL(CAI_L2CACHE, 0x49, 16, 4 * 1024 * 1024, 64, NULL), \ 294 __CI_TBL(CAI_L3CACHE, 0x49, 16, 4 * 1024 * 1024, 64, NULL), \ 295 __CI_TBL(CAI_L3CACHE, 0x4a, 12, 6 * 1024 * 1024, 64, NULL), \ 296 __CI_TBL(CAI_L3CACHE, 0x4b, 16, 8 * 1024 * 1024, 64, NULL), \ 297 __CI_TBL(CAI_L3CACHE, 0x4c, 12,12 * 1024 * 1024, 64, NULL), \ 298 __CI_TBL(CAI_L3CACHE, 0x4d, 16,16 * 1024 * 1024, 64, NULL), \ 299 __CI_TBL(CAI_L2CACHE, 0x4e, 24, 6 * 1024 * 1024, 64, NULL), \ 300 __CI_TBL(CAI_DCACHE, 0x60, 8, 16 * 1024, 64, NULL), \ 301 __CI_TBL(CAI_DCACHE, 0x66, 4, 8 * 1024, 64, NULL), \ 302 __CI_TBL(CAI_DCACHE, 0x67, 4, 16 * 1024, 64, NULL), \ 303 __CI_TBL(CAI_DCACHE, 0x68, 4, 32 * 1024, 64, NULL), \ 304 __CI_TBL(CAI_ICACHE, 0x70, 8, 12 * 1024, 64, "12K uOp cache"), \ 305 __CI_TBL(CAI_ICACHE, 0x71, 8, 16 * 1024, 64, "16K uOp cache"), \ 306 __CI_TBL(CAI_ICACHE, 0x72, 8, 32 * 1024, 64, "32K uOp cache"), \ 307 __CI_TBL(CAI_ICACHE, 0x73, 8, 64 * 1024, 64, "64K uOp cache"), \ 308 __CI_TBL(CAI_L2CACHE, 0x78, 4, 1 * 1024 * 1024, 64, NULL), \ 309 __CI_TBL(CAI_L2CACHE, 0x79, 8, 128 * 1024, 64, NULL), \ 310 __CI_TBL(CAI_L2CACHE, 0x7a, 8, 256 * 1024, 64, NULL), \ 311 __CI_TBL(CAI_L2CACHE, 0x7b, 8, 512 * 1024, 64, NULL), \ 312 __CI_TBL(CAI_L2CACHE, 0x7c, 8, 1 * 1024 * 1024, 64, NULL), \ 313 __CI_TBL(CAI_L2CACHE, 0x7d, 8, 2 * 1024 * 1024, 64, NULL), \ 314 __CI_TBL(CAI_L2CACHE, 0x7f, 2, 512 * 1024, 64, NULL), \ 315 __CI_TBL(CAI_L2CACHE, 0x80, 8, 512 * 1024, 64, NULL), \ 316 __CI_TBL(CAI_L2CACHE, 0x82, 8, 256 * 1024, 32, NULL), \ 317 __CI_TBL(CAI_L2CACHE, 0x83, 8, 512 * 1024, 32, NULL), \ 318 __CI_TBL(CAI_L2CACHE, 0x84, 8, 1 * 1024 * 1024, 32, NULL), \ 319 __CI_TBL(CAI_L2CACHE, 0x85, 8, 2 * 1024 * 1024, 32, NULL), \ 320 __CI_TBL(CAI_L2CACHE, 0x86, 4, 512 * 1024, 64, NULL), \ 321 __CI_TBL(CAI_L2CACHE, 0x87, 8, 1 * 1024 * 1024, 64, NULL), \ 322 __CI_TBL(CAI_L3CACHE, 0xd0, 4, 512 * 1024, 64, NULL), \ 323 __CI_TBL(CAI_L3CACHE, 0xd1, 4, 1 * 1024 * 1024, 64, NULL), \ 324 __CI_TBL(CAI_L3CACHE, 0xd2, 4, 2 * 1024 * 1024, 64, NULL), \ 325 __CI_TBL(CAI_L3CACHE, 0xd6, 8, 1 * 1024 * 1024, 64, NULL), \ 326 __CI_TBL(CAI_L3CACHE, 0xd7, 8, 2 * 1024 * 1024, 64, NULL), \ 327 __CI_TBL(CAI_L3CACHE, 0xd8, 8, 4 * 1024 * 1024, 64, NULL), \ 328 __CI_TBL(CAI_L3CACHE, 0xdc, 12, 3 * 512 * 1024, 64, NULL), \ 329 __CI_TBL(CAI_L3CACHE, 0xdd, 12, 3 * 1024 * 1024, 64, NULL), \ 330 __CI_TBL(CAI_L3CACHE, 0xde, 12, 6 * 1024 * 1024, 64, NULL), \ 331 __CI_TBL(CAI_L3CACHE, 0xe2, 16, 2 * 1024 * 1024, 64, NULL), \ 332 __CI_TBL(CAI_L3CACHE, 0xe3, 16, 4 * 1024 * 1024, 64, NULL), \ 333 __CI_TBL(CAI_L3CACHE, 0xe4, 16, 8 * 1024 * 1024, 64, NULL), \ 334 __CI_TBL(CAI_L3CACHE, 0xea, 24,12 * 1024 * 1024, 64, NULL), \ 335 __CI_TBL(CAI_L3CACHE, 0xeb, 24,18 * 1024 * 1024, 64, NULL), \ 336 __CI_TBL(CAI_L3CACHE, 0xec, 24,24 * 1024 * 1024, 64, NULL), \ 337 __CI_TBL(CAI_PREFETCH, 0xf0, 0, 0, 64, NULL), \ 338 __CI_TBL(CAI_PREFETCH, 0xf1, 0, 0,128, NULL), \ 339 /* 0xfe means no TLB information in CPUID leaf 2 (and use leaf 0x18) */ \ 340 /* 0xff means no cache information in CPUID leaf 2 (and use leaf 4) */ \ 341 __CI_TBL(0, 0, 0, 0, 0, NULL) \ 342 } 343 344 #define AMD_L2L3CACHE_INFO { \ 345 __CI_TBL(0, 0x01, 1, 0, 0, NULL), \ 346 __CI_TBL(0, 0x02, 2, 0, 0, NULL), \ 347 __CI_TBL(0, 0x03, 3, 0, 0, NULL), \ 348 __CI_TBL(0, 0x04, 4, 0, 0, NULL), \ 349 __CI_TBL(0, 0x05, 6, 0, 0, NULL), \ 350 __CI_TBL(0, 0x06, 8, 0, 0, NULL), \ 351 __CI_TBL(0, 0x08, 16, 0, 0, NULL), \ 352 /* 0x09:Use Fn8000_001D */ \ 353 __CI_TBL(0, 0x0a, 32, 0, 0, NULL), \ 354 __CI_TBL(0, 0x0b, 48, 0, 0, NULL), \ 355 __CI_TBL(0, 0x0c, 64, 0, 0, NULL), \ 356 __CI_TBL(0, 0x0d, 96, 0, 0, NULL), \ 357 __CI_TBL(0, 0x0e, 128, 0, 0, NULL), \ 358 __CI_TBL(0, 0x0f, 0xff, 0, 0, NULL), \ 359 __CI_TBL(0, 0x00, 0, 0, 0, NULL) \ 360 } 361 362 const struct x86_cache_info *cpu_cacheinfo_lookup( 363 const struct x86_cache_info *, uint8_t); 364 365 #endif /* _X86_CACHEINFO_H_ */ 366