1 /* 2 * tie-asm.h -- compile-time HAL assembler definitions dependent on CORE & TIE 3 * 4 * NOTE: This header file is not meant to be included directly. 5 */ 6 7 /* This header file contains assembly-language definitions (assembly 8 macros, etc.) for this specific Xtensa processor's TIE extensions 9 and options. It is customized to this Xtensa processor configuration. 10 11 Copyright (c) 1999-2022 Cadence Design Systems Inc. 12 13 Permission is hereby granted, free of charge, to any person obtaining 14 a copy of this software and associated documentation files (the 15 "Software"), to deal in the Software without restriction, including 16 without limitation the rights to use, copy, modify, merge, publish, 17 distribute, sublicense, and/or sell copies of the Software, and to 18 permit persons to whom the Software is furnished to do so, subject to 19 the following conditions: 20 21 The above copyright notice and this permission notice shall be included 22 in all copies or substantial portions of the Software. 23 24 THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 25 EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 26 MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 27 IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 28 CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 29 TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 30 SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ 31 32 #ifndef _XTENSA_CORE_TIE_ASM_H 33 #define _XTENSA_CORE_TIE_ASM_H 34 35 #include <xtensa/coreasm.h> 36 37 /* Selection parameter values for save-area save/restore macros: */ 38 /* Option vs. TIE: */ 39 #define XTHAL_SAS_TIE 0x0001 /* custom extension or coprocessor */ 40 #define XTHAL_SAS_OPT 0x0002 /* optional (and not a coprocessor) */ 41 #define XTHAL_SAS_ANYOT 0x0003 /* both of the above */ 42 /* Whether used automatically by compiler: */ 43 #define XTHAL_SAS_NOCC 0x0004 /* not used by compiler w/o special opts/code */ 44 #define XTHAL_SAS_CC 0x0008 /* used by compiler without special opts/code */ 45 #define XTHAL_SAS_ANYCC 0x000C /* both of the above */ 46 /* ABI handling across function calls: */ 47 #define XTHAL_SAS_CALR 0x0010 /* caller-saved */ 48 #define XTHAL_SAS_CALE 0x0020 /* callee-saved */ 49 #define XTHAL_SAS_GLOB 0x0040 /* global across function calls (in thread) */ 50 #define XTHAL_SAS_ANYABI 0x0070 /* all of the above three */ 51 /* Misc */ 52 #define XTHAL_SAS_ALL 0xFFFF /* include all default NCP contents */ 53 #define XTHAL_SAS3(optie,ccuse,abi) ( ((optie) & XTHAL_SAS_ANYOT) \ 54 | ((ccuse) & XTHAL_SAS_ANYCC) \ 55 | ((abi) & XTHAL_SAS_ANYABI) ) 56 57 58 /* 59 * Macro to store all non-coprocessor (extra) custom TIE and optional state 60 * (not including zero-overhead loop registers). 61 * Required parameters: 62 * ptr Save area pointer address register (clobbered) 63 * (register must contain a 4 byte aligned address). 64 * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 65 * registers are clobbered, the remaining are unused). 66 * Optional parameters: 67 * continue If macro invoked as part of a larger store sequence, set to 1 68 * if this is not the first in the sequence. Defaults to 0. 69 * ofs Offset from start of larger sequence (from value of first ptr 70 * in sequence) at which to store. Defaults to next available space 71 * (or 0 if <continue> is 0). 72 * select Select what category(ies) of registers to store, as a bitmask 73 * (see XTHAL_SAS_xxx constants). Defaults to all registers. 74 * alloc Select what category(ies) of registers to allocate; if any 75 * category is selected here that is not in <select>, space for 76 * the corresponding registers is skipped without doing any store. 77 */ 78 .macro xchal_ncp_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 79 xchal_sa_start \continue, \ofs 80 // Optional global registers used by default by the compiler: 81 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 82 xchal_sa_align \ptr, 0, 1016, 4, 4 83 rur.threadptr \at1 // threadptr option 84 s32i \at1, \ptr, .Lxchal_ofs_+0 85 .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 86 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 87 xchal_sa_align \ptr, 0, 1016, 4, 4 88 .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 89 .endif 90 // Optional caller-saved registers used by default by the compiler: 91 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 92 xchal_sa_align \ptr, 0, 1012, 4, 4 93 rsr.acclo \at1 // MAC16 option 94 s32i \at1, \ptr, .Lxchal_ofs_+0 95 rsr.acchi \at1 // MAC16 option 96 s32i \at1, \ptr, .Lxchal_ofs_+4 97 .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 98 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 99 xchal_sa_align \ptr, 0, 1012, 4, 4 100 .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 101 .endif 102 // Optional caller-saved registers not used by default by the compiler: 103 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 104 xchal_sa_align \ptr, 0, 996, 4, 4 105 rsr.br \at1 // boolean option 106 s32i \at1, \ptr, .Lxchal_ofs_+0 107 rsr.scompare1 \at1 // conditional store option 108 s32i \at1, \ptr, .Lxchal_ofs_+4 109 rsr.m0 \at1 // MAC16 option 110 s32i \at1, \ptr, .Lxchal_ofs_+8 111 rsr.m1 \at1 // MAC16 option 112 s32i \at1, \ptr, .Lxchal_ofs_+12 113 rsr.m2 \at1 // MAC16 option 114 s32i \at1, \ptr, .Lxchal_ofs_+16 115 rsr.m3 \at1 // MAC16 option 116 s32i \at1, \ptr, .Lxchal_ofs_+20 117 .set .Lxchal_ofs_, .Lxchal_ofs_ + 24 118 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 119 xchal_sa_align \ptr, 0, 996, 4, 4 120 .set .Lxchal_ofs_, .Lxchal_ofs_ + 24 121 .endif 122 .endm // xchal_ncp_store 123 124 /* 125 * Macro to load all non-coprocessor (extra) custom TIE and optional state 126 * (not including zero-overhead loop registers). 127 * Required parameters: 128 * ptr Save area pointer address register (clobbered) 129 * (register must contain a 4 byte aligned address). 130 * at1..at4 Four temporary address registers (first XCHAL_NCP_NUM_ATMPS 131 * registers are clobbered, the remaining are unused). 132 * Optional parameters: 133 * continue If macro invoked as part of a larger load sequence, set to 1 134 * if this is not the first in the sequence. Defaults to 0. 135 * ofs Offset from start of larger sequence (from value of first ptr 136 * in sequence) at which to load. Defaults to next available space 137 * (or 0 if <continue> is 0). 138 * select Select what category(ies) of registers to load, as a bitmask 139 * (see XTHAL_SAS_xxx constants). Defaults to all registers. 140 * alloc Select what category(ies) of registers to allocate; if any 141 * category is selected here that is not in <select>, space for 142 * the corresponding registers is skipped without doing any load. 143 */ 144 .macro xchal_ncp_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 145 xchal_sa_start \continue, \ofs 146 // Optional global registers used by default by the compiler: 147 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\select) 148 xchal_sa_align \ptr, 0, 1016, 4, 4 149 l32i \at1, \ptr, .Lxchal_ofs_+0 150 wur.threadptr \at1 // threadptr option 151 .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 152 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_GLOB) & ~(\alloc)) == 0 153 xchal_sa_align \ptr, 0, 1016, 4, 4 154 .set .Lxchal_ofs_, .Lxchal_ofs_ + 4 155 .endif 156 // Optional caller-saved registers used by default by the compiler: 157 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\select) 158 xchal_sa_align \ptr, 0, 1012, 4, 4 159 l32i \at1, \ptr, .Lxchal_ofs_+0 160 wsr.acclo \at1 // MAC16 option 161 l32i \at1, \ptr, .Lxchal_ofs_+4 162 wsr.acchi \at1 // MAC16 option 163 .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 164 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_CC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 165 xchal_sa_align \ptr, 0, 1012, 4, 4 166 .set .Lxchal_ofs_, .Lxchal_ofs_ + 8 167 .endif 168 // Optional caller-saved registers not used by default by the compiler: 169 .ifeq (XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 170 xchal_sa_align \ptr, 0, 996, 4, 4 171 l32i \at1, \ptr, .Lxchal_ofs_+0 172 wsr.br \at1 // boolean option 173 l32i \at1, \ptr, .Lxchal_ofs_+4 174 wsr.scompare1 \at1 // conditional store option 175 l32i \at1, \ptr, .Lxchal_ofs_+8 176 wsr.m0 \at1 // MAC16 option 177 l32i \at1, \ptr, .Lxchal_ofs_+12 178 wsr.m1 \at1 // MAC16 option 179 l32i \at1, \ptr, .Lxchal_ofs_+16 180 wsr.m2 \at1 // MAC16 option 181 l32i \at1, \ptr, .Lxchal_ofs_+20 182 wsr.m3 \at1 // MAC16 option 183 .set .Lxchal_ofs_, .Lxchal_ofs_ + 24 184 .elseif ((XTHAL_SAS_OPT | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 185 xchal_sa_align \ptr, 0, 996, 4, 4 186 .set .Lxchal_ofs_, .Lxchal_ofs_ + 24 187 .endif 188 .endm // xchal_ncp_load 189 190 191 #define XCHAL_NCP_NUM_ATMPS 1 192 193 /* 194 * Macro to store the state of TIE coprocessor AudioEngineLX. 195 * Required parameters: 196 * ptr Save area pointer address register (clobbered) 197 * (register must contain a 8 byte aligned address). 198 * at1..at4 Four temporary address registers (first XCHAL_CP1_NUM_ATMPS 199 * registers are clobbered, the remaining are unused). 200 * Optional parameters are the same as for xchal_ncp_store. 201 */ 202 #define xchal_cp_AudioEngineLX_store xchal_cp1_store 203 .macro xchal_cp1_store ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 204 xchal_sa_start \continue, \ofs 205 // Custom caller-saved registers not used by default by the compiler: 206 .ifeq (XTHAL_SAS_TIE | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 207 xchal_sa_align \ptr, 0, 0, 8, 8 208 ae_s64.i aed0, \ptr, .Lxchal_ofs_+32 209 ae_s64.i aed1, \ptr, .Lxchal_ofs_+40 210 ae_s64.i aed2, \ptr, .Lxchal_ofs_+48 211 ae_s64.i aed3, \ptr, .Lxchal_ofs_+56 212 addi \ptr, \ptr, 64 213 ae_s64.i aed4, \ptr, .Lxchal_ofs_+0 214 ae_s64.i aed5, \ptr, .Lxchal_ofs_+8 215 ae_s64.i aed6, \ptr, .Lxchal_ofs_+16 216 ae_s64.i aed7, \ptr, .Lxchal_ofs_+24 217 ae_s64.i aed8, \ptr, .Lxchal_ofs_+32 218 ae_s64.i aed9, \ptr, .Lxchal_ofs_+40 219 ae_s64.i aed10, \ptr, .Lxchal_ofs_+48 220 ae_s64.i aed11, \ptr, .Lxchal_ofs_+56 221 addi \ptr, \ptr, 64 222 ae_salign64.i u0, \ptr, .Lxchal_ofs_+0 223 ae_salign64.i u1, \ptr, .Lxchal_ofs_+8 224 ae_salign64.i u2, \ptr, .Lxchal_ofs_+16 225 ae_salign64.i u3, \ptr, .Lxchal_ofs_+24 226 addi \ptr, \ptr, -128 227 ae_movvfusionmisc aed0 // ureg FUSIONMISC 228 ae_s64.i aed0, \ptr, .Lxchal_ofs_+0 + 0 229 ae_movvcirc aed0 // ureg CIRC 230 ae_s64.i aed0, \ptr, .Lxchal_ofs_+8 + 0 231 ae_movvtablefirstsearchnext aed0 // ureg TABLEFIRSTSEARCHNEXT 232 ae_s64.i aed0, \ptr, .Lxchal_ofs_+16 + 0 233 ae_movvfcrfsr aed0 // ureg FCR_FSR 234 ae_s64.i aed0, \ptr, .Lxchal_ofs_+24 + 0 235 .set .Lxchal_ofs_, .Lxchal_ofs_ + 160 236 .elseif ((XTHAL_SAS_TIE | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 237 xchal_sa_align \ptr, 0, 0, 8, 8 238 .set .Lxchal_ofs_, .Lxchal_ofs_ + 160 239 .endif 240 .endm // xchal_cp1_store 241 242 /* 243 * Macro to load the state of TIE coprocessor AudioEngineLX. 244 * Required parameters: 245 * ptr Save area pointer address register (clobbered) 246 * (register must contain a 8 byte aligned address). 247 * at1..at4 Four temporary address registers (first XCHAL_CP1_NUM_ATMPS 248 * registers are clobbered, the remaining are unused). 249 * Optional parameters are the same as for xchal_ncp_load. 250 */ 251 #define xchal_cp_AudioEngineLX_load xchal_cp1_load 252 .macro xchal_cp1_load ptr at1 at2 at3 at4 continue=0 ofs=-1 select=XTHAL_SAS_ALL alloc=0 253 xchal_sa_start \continue, \ofs 254 // Custom caller-saved registers not used by default by the compiler: 255 .ifeq (XTHAL_SAS_TIE | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\select) 256 xchal_sa_align \ptr, 0, 0, 8, 8 257 ae_l64.i aed0, \ptr, .Lxchal_ofs_+0 + 0 // ureg FUSIONMISC 258 ae_movfusionmiscv aed0 259 ae_l64.i aed0, \ptr, .Lxchal_ofs_+8 + 0 // ureg CIRC 260 ae_movcircv aed0 261 ae_l64.i aed0, \ptr, .Lxchal_ofs_+16 + 0 // ureg TABLEFIRSTSEARCHNEXT 262 ae_movtablefirstsearchnextv aed0 263 ae_l64.i aed0, \ptr, .Lxchal_ofs_+24 + 0 // ureg FCR_FSR 264 ae_movfcrfsrv aed0 265 ae_l64.i aed0, \ptr, .Lxchal_ofs_+32 266 ae_l64.i aed1, \ptr, .Lxchal_ofs_+40 267 ae_l64.i aed2, \ptr, .Lxchal_ofs_+48 268 ae_l64.i aed3, \ptr, .Lxchal_ofs_+56 269 addi \ptr, \ptr, 64 270 ae_l64.i aed4, \ptr, .Lxchal_ofs_+0 271 ae_l64.i aed5, \ptr, .Lxchal_ofs_+8 272 ae_l64.i aed6, \ptr, .Lxchal_ofs_+16 273 ae_l64.i aed7, \ptr, .Lxchal_ofs_+24 274 ae_l64.i aed8, \ptr, .Lxchal_ofs_+32 275 ae_l64.i aed9, \ptr, .Lxchal_ofs_+40 276 ae_l64.i aed10, \ptr, .Lxchal_ofs_+48 277 ae_l64.i aed11, \ptr, .Lxchal_ofs_+56 278 addi \ptr, \ptr, 64 279 ae_lalign64.i u0, \ptr, .Lxchal_ofs_+0 280 ae_lalign64.i u1, \ptr, .Lxchal_ofs_+8 281 ae_lalign64.i u2, \ptr, .Lxchal_ofs_+16 282 ae_lalign64.i u3, \ptr, .Lxchal_ofs_+24 283 .set .Lxchal_pofs_, .Lxchal_pofs_ + 128 284 .set .Lxchal_ofs_, .Lxchal_ofs_ + 32 285 .elseif ((XTHAL_SAS_TIE | XTHAL_SAS_NOCC | XTHAL_SAS_CALR) & ~(\alloc)) == 0 286 xchal_sa_align \ptr, 0, 0, 8, 8 287 .set .Lxchal_ofs_, .Lxchal_ofs_ + 160 288 .endif 289 .endm // xchal_cp1_load 290 291 #define XCHAL_CP1_NUM_ATMPS 0 292 #define XCHAL_SA_NUM_ATMPS 1 293 294 /* Empty macros for unconfigured coprocessors: */ 295 .macro xchal_cp0_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 296 .macro xchal_cp0_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 297 .macro xchal_cp2_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 298 .macro xchal_cp2_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 299 .macro xchal_cp3_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 300 .macro xchal_cp3_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 301 .macro xchal_cp4_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 302 .macro xchal_cp4_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 303 .macro xchal_cp5_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 304 .macro xchal_cp5_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 305 .macro xchal_cp6_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 306 .macro xchal_cp6_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 307 .macro xchal_cp7_store p a b c d continue=0 ofs=-1 select=-1 ; .endm 308 .macro xchal_cp7_load p a b c d continue=0 ofs=-1 select=-1 ; .endm 309 310 #endif /*_XTENSA_CORE_TIE_ASM_H*/ 311 312