1 /**************************************************************************//**
2 * @file cache_armv8a.c
3 * @brief CMSIS AARCH64 Cache Source file
4 * @version V1.0.0
5 * @date 21. January 2022
6 ******************************************************************************/
7
8 /*
9 * Copyright 2022 NXP
10 *
11 * SPDX-License-Identifier: Apache-2.0
12 */
13
14 #include "core_ca53.h"
15 #include "cache_armv8a.h"
16
17 #ifndef BIT_MASK
18 #define BIT_MASK(n) ((1UL << n) - 1UL)
19 #endif
20
21 /* CLIDR_EL1 */
22 #define CLIDR_EL1_LOC_SHIFT 24
23 #define CLIDR_EL1_LOC_MASK BIT_MASK(3)
24 #define CLIDR_EL1_CTYPE_SHIFT(l) ((l) * 3)
25 #define CLIDR_EL1_CTYPE_MASK BIT_MASK(3)
26
27 /* CCSIDR_EL1 */
28 #define CCSIDR_EL1_LN_SZ_SHIFT 0
29 #define CCSIDR_EL1_LN_SZ_MASK BIT_MASK(3)
30 #define CCSIDR_EL1_WAYS_SHIFT 3
31 #define CCSIDR_EL1_WAYS_MASK BIT_MASK(10)
32 #define CCSIDR_EL1_SETS_SHIFT 13
33 #define CCSIDR_EL1_SETS_MASK BIT_MASK(15)
34
35 /* CSSELR_EL1 */
36 #define CSSELR_EL1_LEVEL_SHIFT 1
37 #define CSSELR_EL1_LEVEL_MASK BIT_MASK(3)
38
39 enum cache_ops {
40 CACHE_OP_C, /* Clean */
41 CACHE_OP_I, /* Invalidate */
42 CACHE_OP_CI /* Clean and Invalidate */
43 };
44
45 /*
46 * Operation for all data cache to PoC
47 * op: CACHE_OP_C: clean
48 * CACHE_OP_I: invalidate
49 * CACHE_OP_CI: clean and invalidate
50 */
dcache_all(enum cache_ops op)51 int dcache_all(enum cache_ops op)
52 {
53 uint32_t clidr_el1, csselr_el1, ccsidr_el1;
54 uint32_t num_ways, num_sets, set, way, operand;
55 uint8_t loc, cache_type, cache_level, set_shift, way_shift;
56
57 __DSB();
58
59 __MRS(CLIDR_EL1, &clidr_el1);
60
61 loc = (clidr_el1 >> CLIDR_EL1_LOC_SHIFT) & CLIDR_EL1_LOC_MASK;
62 if (!loc)
63 return 0;
64
65 for (cache_level = 0; cache_level < loc; cache_level++) {
66 cache_type = (clidr_el1 >> CLIDR_EL1_CTYPE_SHIFT(cache_level)) &
67 CLIDR_EL1_CTYPE_MASK;
68 /* No Data or Unified cache at this level */
69 if (cache_type < 2)
70 continue;
71
72 /* Select cache level and Data/Unified cache */
73 csselr_el1 = (cache_level & CSSELR_EL1_LEVEL_MASK) <<
74 CSSELR_EL1_LEVEL_SHIFT;
75 __MSR(CSSELR_EL1, csselr_el1);
76 __ISB();
77
78 __MRS(CCSIDR_EL1, &ccsidr_el1);
79 set_shift = ((ccsidr_el1 >> CCSIDR_EL1_LN_SZ_SHIFT) &
80 CCSIDR_EL1_LN_SZ_MASK) + 4;
81 num_ways = ((ccsidr_el1 >> CCSIDR_EL1_WAYS_SHIFT) &
82 CCSIDR_EL1_WAYS_MASK) + 1;
83 num_sets = ((ccsidr_el1 >> CCSIDR_EL1_SETS_SHIFT) &
84 CCSIDR_EL1_SETS_MASK) + 1;
85 /* 32-log2(ways), bit position of way in DC operand */
86 way_shift = __CLZ(num_ways - 1);
87
88 for (set = 0; set < num_sets; set++) {
89 for (way = 0; way < num_ways; way++) {
90 /* cache level, aligned to pos in DC operand */
91 operand = (cache_level << 1);
92 /* set number, aligned to pos in DC operand */
93 operand |= set << set_shift;
94 /* way number, aligned to pos in DC operand */
95 /* No way number field for direct-mapped cache */
96 if (way_shift < 32)
97 operand |= way << way_shift;
98
99 switch (op) {
100 case CACHE_OP_C:
101 dcache_ops(csw, operand);
102 break;
103 case CACHE_OP_I:
104 dcache_ops(isw, operand);
105 break;
106 case CACHE_OP_CI:
107 dcache_ops(cisw, operand);
108 break;
109 default:
110 return -1;
111 }
112 }
113 }
114 }
115
116 __DSB();
117
118 /* Restore csselr_el1 to level 0 */
119 __MSR(CSSELR_EL1, 0);
120 __ISB();
121
122 return 0;
123 }
124
dcache_clean_all(void)125 void dcache_clean_all(void)
126 {
127 dcache_all(CACHE_OP_C);
128 }
129
dcache_invalidate_all(void)130 void dcache_invalidate_all(void)
131 {
132 dcache_all(CACHE_OP_I);
133 }
dcache_clean_invalidate_all(void)134 void dcache_clean_invalidate_all(void)
135 {
136 dcache_all(CACHE_OP_CI);
137 }
138