Searched refs:r22 (Results 1 – 25 of 120) sorted by relevance
12345
182 movi MMUIR_END, r22186 bne r21, r22, tr1191 movi MMUDR_END, r22195 bne r21, r22, tr1199 movi MMUIR_TEXT_L, r22 /* PTEL first */200 add.l r22, r63, r22 /* Sign extend */201 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */202 movi MMUIR_TEXT_H, r22 /* PTEH last */203 add.l r22, r63, r22 /* Sign extend */204 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */[all …]
66 movi ITLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r2269 bne r21, r22, tr174 movi DTLB_LAST_VAR_UNRESTRICTED+TLB_STEP, r2277 bne r21, r22, tr181 movi MMUIR_TEXT_L, r22 /* PTEL first */82 putcfg r21, 1, r22 /* Set MMUIR[0].PTEL */83 movi MMUIR_TEXT_H, r22 /* PTEH last */84 putcfg r21, 0, r22 /* Set MMUIR[0].PTEH */88 movi MMUDR_CACHED_L, r22 /* PTEL first */89 putcfg r21, 1, r22 /* Set MMUDR[0].PTEL */[all …]
163 sub r2, r7, r22171 ldx.q r22, r6, r0182 ! ldx.q r22, r36, r63 ! TAKum03020183 alloco r22, 32185 addi r22, 32, r22186 ldx.q r22, r19, r23187 sthi.q r22, -25, r0188 ldx.q r22, r20, r24189 ldx.q r22, r21, r25190 stlo.q r22, -32, r0[all …]
148 sub r2, r7, r22156 ldx.q r22, r6, r0167 ldx.q r22, r36, r63168 alloco r22, 32169 addi r22, 32, r22170 ldx.q r22, r19, r23171 sthi.q r22, -25, r0172 ldx.q r22, r20, r24173 ldx.q r22, r21, r25174 stlo.q r22, -32, r0[all …]
12 addz.l r5,r63,r2213 nsb r22,r014 shlld r22,r0,r2536 mulu.l r18,r22,r2045 mulu.l r19,r22,r2052 sub r25,r22,r2554 mulu.l r19,r22,r20
7 nsb r4,r228 shlld r3,r22,r614 sub r63,r22,r20 // r63 == 64 % 6425 addi r22,-31,r093 addi r22,32,r0
62 addi r60, 0x80, r2271 ldx.q r2, r22, r63 ! prefetch 4 lines hence
33 sub r22=in1,r0,1 // last byte address36 shr.u r22=r22,r20 // (last byte address) / (stride size)39 sub r8=r22,r23 // number of strides - 184 add r22=in1,in087 sub r22=r22,r0,1 // last byte address90 shr.u r22=r22,r20 // (last byte address) / (stride size)93 sub r8=r22,r23 // number of strides - 1
48 ld4 r22=[in0],853 add r22=r22,r2355 add r20=r20,r22104 ld4 r22=[in0],4112 add r17=r22,r23
44 #define dst_pre_l2 r22298 shr.u r22=in2,4 // number of 16-byte iteration318 shladd dst1=r22,3,dst0 // 2nd dest pointer319 shladd src1=r22,3,src0 // 2nd src pointer320 cmp.eq p8,p9=r22,r0 // do we really need to loop?322 add cnt=-1,r22 // ctop iteration adjustment508 EX(.ex_handler, (p6) ld8 r22=[src1]); /* common, prime for tail section */ \515 shrp r21=r22,r38,shift; /* speculative work */ \557 #define D r22
80 LDREG ITLB_SID_COUNT(%r1), %r22103 addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */114 addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */123 LDREG DTLB_SID_COUNT(%r1), %r22146 addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */157 addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */201 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/245 mtsm %r22 /* restore I-bit */261 rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/306 mtsm %r22 /* restore I-bit */[all …]
126 depdi 0, 31, 32, %r22179 STREG %r22, TASK_PT_GR22(%r1)204 stw %r22, -52(%r30) /* 5th argument */337 LDREG TASK_PT_GR22(%r1), %r22342 stw %r22, -52(%r30) /* 5th argument */755 10: ldw 0(%r25), %r22847 sub,= %r29, %r22, %r0
126 shr.u r22=r21,3134 (p8) shr r22=r22,r27138 shr.u r18=r22,PGDIR_SHIFT // get bottom portion of pgd index bit153 shr.u r28=r22,PUD_SHIFT // shift pud index into position155 shr.u r18=r22,PMD_SHIFT // shift pmd index into position164 shr.u r18=r22,PMD_SHIFT // shift pmd index into position174 shr.u r19=r22,PAGE_SHIFT // shift pte index into position183 MOV_FROM_IHA(r22) // get the VHPT address that caused the TLB miss191 MOV_TO_IFA(r22, r24)239 (p6) ptc.l r22,r27 // purge PTE page translation[all …]
73 (pUStk) addl r22=IA64_RBS_OFFSET,r1; /* compute base of RBS */ \76 (pUStk) lfetch.fault.excl.nt1 [r22]; \80 (pUStk) mov ar.bspstore=r22; /* switch to kernel RBS */ \108 (pUStk) sub r18=r18,r22; /* r18=RSE.ndirty*8 */ \178 .mem.offset 0,0; st8.spill [r2]=r22,16; \217 (pUStk) sub r16=r18,r22; \242 mov ar.bspstore=r22 \
201 add r22 = IA64_GTOD_WALL_TIME_OFFSET,r20 // wall_time207 (p15) add r22 = IA64_GTOD_MONO_TIME_OFFSET,r20 // monotonic_time239 ld8 r9 = [r22],IA64_TIME_SN_SPEC_SNSEC_OFFSET // sec241 ld8 r8 = [r22],-IA64_TIME_SN_SPEC_SNSEC_OFFSET // snsec478 addl r22=IA64_RBS_OFFSET,r2 // A compute base of RBS482 lfetch.fault.excl.nt1 [r22] // M0|1 prefetch register backing-store497 mov ar.bspstore=r22 // M2 (6 cyc) switch to kernel RBS522 sub r22=r19,r18 // stime before leave kernel527 add r20=r20,r22 // sum stime538 SSM_PSR_I(p0, p6, r22) // M2 we're on kernel stacks now, reenable irqs
27 addl r22=IA64_RBS_OFFSET,r229 mov ar.bspstore=r22
178 adds r22=IA64_TASK_THREAD_KSP_OFFSET,r13184 st8 [r22]=sp // save kernel stack pointer of old task293 mov r22=b1308 st8 [r14]=r22,SW(B4)-SW(B1) // save b1381 ld8 r22=[r3],16 // restore b1419 mov b1=r22697 MOV_FROM_ITC(pUStk, p9, r22, r19) // fetch time at leave741 mov r22=r0 // A clear r22749 MOV_FROM_PSR(pKStk, r22, r21) // M2 read PSR now that interrupts are disabled786 st8 [r14]=r22 // M save time at leave[all …]
20 std r22, 96(%r1)65 li r22, 0x222266 std r22, -232(%r1)125 cmpwi r22, 0x2222164 li r22, 0xad224 ld r22, -232(%r1)225 cmpwi r22, 0x2222260 ld r22, 96(%r1)
82 #define K_LOAD_ARGS_5(r26,r25,r24,r23,r22) \ argument83 register unsigned long __r22 __asm__("r22") = (unsigned long)(r22); \85 #define K_LOAD_ARGS_6(r26,r25,r24,r23,r22,r21) \ argument87 K_LOAD_ARGS_5(r26,r25,r24,r23,r22)
40 arg4: .reg r2268 r22: .reg %r22
111 5: lwi r22, r6, 0x0010 + offset; \119 13: swi r22, r5, 0x0010 + offset; \199 swi r22, r1, 24222 lwi r22, r1, 24242 lwi r22, r1, 24
25 std r22,(top_pos - 72)(%r1); \45 ld r22,(top_pos - 72)(%r1); \82 ld r22,64(r3)
444 std r22,-104(1)452 exc; ld r22,640(4)466 exc; std r22,648(3)484 exc; ld r22,656(4)503 exc; std r22,648(3)534 ld r22,-104(1)554 ld r22,-104(1)
39 unsigned long r22; member98 PTREGS_INFO(r22), \