Lines Matching +full:0 +full:x60
12 ROT8: .octa 0x0e0d0c0f0a09080b0605040702010003
13 .octa 0x0e0d0c0f0a09080b0605040702010003
17 ROT16: .octa 0x0d0c0f0e09080b0a0504070601000302
18 .octa 0x0d0c0f0e09080b0a0504070601000302
22 CTRINC: .octa 0x00000003000000020000000100000000
23 .octa 0x00000007000000060000000500000004
27 CTR2BL: .octa 0x00000000000000000000000000000000
28 .octa 0x00000000000000000000000000000001
32 CTR4BL: .octa 0x00000000000000000000000000000002
33 .octa 0x00000000000000000000000000000003
51 # x0..3[0-2] = s0..3
52 vbroadcasti128 0x00(%rdi),%ymm0
53 vbroadcasti128 0x10(%rdi),%ymm1
54 vbroadcasti128 0x20(%rdi),%ymm2
55 vbroadcasti128 0x30(%rdi),%ymm3
97 # x1 = shuffle32(x1, MASK(0, 3, 2, 1))
98 vpshufd $0x39,%ymm1,%ymm1
99 # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
100 vpshufd $0x4e,%ymm2,%ymm2
101 # x3 = shuffle32(x3, MASK(2, 1, 0, 3))
102 vpshufd $0x93,%ymm3,%ymm3
130 # x1 = shuffle32(x1, MASK(2, 1, 0, 3))
131 vpshufd $0x93,%ymm1,%ymm1
132 # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
133 vpshufd $0x4e,%ymm2,%ymm2
134 # x3 = shuffle32(x3, MASK(0, 3, 2, 1))
135 vpshufd $0x39,%ymm3,%ymm3
142 cmp $0x10,%rax
144 vpxor 0x00(%rdx),%xmm7,%xmm6
145 vmovdqu %xmm6,0x00(%rsi)
149 cmp $0x20,%rax
151 vpxor 0x10(%rdx),%xmm7,%xmm6
152 vmovdqu %xmm6,0x10(%rsi)
156 cmp $0x30,%rax
158 vpxor 0x20(%rdx),%xmm7,%xmm6
159 vmovdqu %xmm6,0x20(%rsi)
163 cmp $0x40,%rax
165 vpxor 0x30(%rdx),%xmm7,%xmm6
166 vmovdqu %xmm6,0x30(%rsi)
171 cmp $0x50,%rax
173 vpxor 0x40(%rdx),%xmm7,%xmm6
174 vmovdqu %xmm6,0x40(%rsi)
177 cmp $0x60,%rax
179 vpxor 0x50(%rdx),%xmm7,%xmm6
180 vmovdqu %xmm6,0x50(%rsi)
183 cmp $0x70,%rax
185 vpxor 0x60(%rdx),%xmm7,%xmm6
186 vmovdqu %xmm6,0x60(%rsi)
189 cmp $0x80,%rax
191 vpxor 0x70(%rdx),%xmm7,%xmm6
192 vmovdqu %xmm6,0x70(%rsi)
201 and $0x0f,%r9
203 and $~0x0f,%rax
208 sub $0x10,%rsp
216 vpxor 0x00(%rsp),%xmm7,%xmm7
217 vmovdqa %xmm7,0x00(%rsp)
245 # x0..3[0-4] = s0..3
246 vbroadcasti128 0x00(%rdi),%ymm0
247 vbroadcasti128 0x10(%rdi),%ymm1
248 vbroadcasti128 0x20(%rdi),%ymm2
249 vbroadcasti128 0x30(%rdi),%ymm3
320 # x1 = shuffle32(x1, MASK(0, 3, 2, 1))
321 vpshufd $0x39,%ymm1,%ymm1
322 vpshufd $0x39,%ymm5,%ymm5
323 # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
324 vpshufd $0x4e,%ymm2,%ymm2
325 vpshufd $0x4e,%ymm6,%ymm6
326 # x3 = shuffle32(x3, MASK(2, 1, 0, 3))
327 vpshufd $0x93,%ymm3,%ymm3
328 vpshufd $0x93,%ymm7,%ymm7
378 # x1 = shuffle32(x1, MASK(2, 1, 0, 3))
379 vpshufd $0x93,%ymm1,%ymm1
380 vpshufd $0x93,%ymm5,%ymm5
381 # x2 = shuffle32(x2, MASK(1, 0, 3, 2))
382 vpshufd $0x4e,%ymm2,%ymm2
383 vpshufd $0x4e,%ymm6,%ymm6
384 # x3 = shuffle32(x3, MASK(0, 3, 2, 1))
385 vpshufd $0x39,%ymm3,%ymm3
386 vpshufd $0x39,%ymm7,%ymm7
393 cmp $0x10,%rax
395 vpxor 0x00(%rdx),%xmm10,%xmm9
396 vmovdqu %xmm9,0x00(%rsi)
400 cmp $0x20,%rax
402 vpxor 0x10(%rdx),%xmm10,%xmm9
403 vmovdqu %xmm9,0x10(%rsi)
407 cmp $0x30,%rax
409 vpxor 0x20(%rdx),%xmm10,%xmm9
410 vmovdqu %xmm9,0x20(%rsi)
414 cmp $0x40,%rax
416 vpxor 0x30(%rdx),%xmm10,%xmm9
417 vmovdqu %xmm9,0x30(%rsi)
422 cmp $0x50,%rax
424 vpxor 0x40(%rdx),%xmm10,%xmm9
425 vmovdqu %xmm9,0x40(%rsi)
428 cmp $0x60,%rax
430 vpxor 0x50(%rdx),%xmm10,%xmm9
431 vmovdqu %xmm9,0x50(%rsi)
434 cmp $0x70,%rax
436 vpxor 0x60(%rdx),%xmm10,%xmm9
437 vmovdqu %xmm9,0x60(%rsi)
440 cmp $0x80,%rax
442 vpxor 0x70(%rdx),%xmm10,%xmm9
443 vmovdqu %xmm9,0x70(%rsi)
447 cmp $0x90,%rax
449 vpxor 0x80(%rdx),%xmm10,%xmm9
450 vmovdqu %xmm9,0x80(%rsi)
454 cmp $0xa0,%rax
456 vpxor 0x90(%rdx),%xmm10,%xmm9
457 vmovdqu %xmm9,0x90(%rsi)
461 cmp $0xb0,%rax
463 vpxor 0xa0(%rdx),%xmm10,%xmm9
464 vmovdqu %xmm9,0xa0(%rsi)
468 cmp $0xc0,%rax
470 vpxor 0xb0(%rdx),%xmm10,%xmm9
471 vmovdqu %xmm9,0xb0(%rsi)
476 cmp $0xd0,%rax
478 vpxor 0xc0(%rdx),%xmm10,%xmm9
479 vmovdqu %xmm9,0xc0(%rsi)
482 cmp $0xe0,%rax
484 vpxor 0xd0(%rdx),%xmm10,%xmm9
485 vmovdqu %xmm9,0xd0(%rsi)
488 cmp $0xf0,%rax
490 vpxor 0xe0(%rdx),%xmm10,%xmm9
491 vmovdqu %xmm9,0xe0(%rsi)
494 cmp $0x100,%rax
496 vpxor 0xf0(%rdx),%xmm10,%xmm9
497 vmovdqu %xmm9,0xf0(%rsi)
506 and $0x0f,%r9
508 and $~0x0f,%rax
513 sub $0x10,%rsp
521 vpxor 0x00(%rsp),%xmm10,%xmm10
522 vmovdqa %xmm10,0x00(%rsp)
555 sub $0x80, %rsp
558 # x0..15[0-7] = s[0..15]
559 vpbroadcastd 0x00(%rdi),%ymm0
560 vpbroadcastd 0x04(%rdi),%ymm1
561 vpbroadcastd 0x08(%rdi),%ymm2
562 vpbroadcastd 0x0c(%rdi),%ymm3
563 vpbroadcastd 0x10(%rdi),%ymm4
564 vpbroadcastd 0x14(%rdi),%ymm5
565 vpbroadcastd 0x18(%rdi),%ymm6
566 vpbroadcastd 0x1c(%rdi),%ymm7
567 vpbroadcastd 0x20(%rdi),%ymm8
568 vpbroadcastd 0x24(%rdi),%ymm9
569 vpbroadcastd 0x28(%rdi),%ymm10
570 vpbroadcastd 0x2c(%rdi),%ymm11
571 vpbroadcastd 0x30(%rdi),%ymm12
572 vpbroadcastd 0x34(%rdi),%ymm13
573 vpbroadcastd 0x38(%rdi),%ymm14
574 vpbroadcastd 0x3c(%rdi),%ymm15
576 vmovdqa %ymm0,0x00(%rsp)
577 vmovdqa %ymm1,0x20(%rsp)
578 vmovdqa %ymm2,0x40(%rsp)
579 vmovdqa %ymm3,0x60(%rsp)
585 # x12 += counter values 0-3
590 vpaddd 0x00(%rsp),%ymm4,%ymm0
591 vmovdqa %ymm0,0x00(%rsp)
595 vpaddd 0x20(%rsp),%ymm5,%ymm0
596 vmovdqa %ymm0,0x20(%rsp)
600 vpaddd 0x40(%rsp),%ymm6,%ymm0
601 vmovdqa %ymm0,0x40(%rsp)
605 vpaddd 0x60(%rsp),%ymm7,%ymm0
606 vmovdqa %ymm0,0x60(%rsp)
636 vpaddd 0x00(%rsp),%ymm4,%ymm0
637 vmovdqa %ymm0,0x00(%rsp)
641 vpaddd 0x20(%rsp),%ymm5,%ymm0
642 vmovdqa %ymm0,0x20(%rsp)
646 vpaddd 0x40(%rsp),%ymm6,%ymm0
647 vmovdqa %ymm0,0x40(%rsp)
651 vpaddd 0x60(%rsp),%ymm7,%ymm0
652 vmovdqa %ymm0,0x60(%rsp)
682 vpaddd 0x00(%rsp),%ymm5,%ymm0
683 vmovdqa %ymm0,0x00(%rsp)
687 vpaddd 0x20(%rsp),%ymm6,%ymm0
688 vmovdqa %ymm0,0x20(%rsp)
692 vpaddd 0x40(%rsp),%ymm7,%ymm0
693 vmovdqa %ymm0,0x40(%rsp)
697 vpaddd 0x60(%rsp),%ymm4,%ymm0
698 vmovdqa %ymm0,0x60(%rsp)
728 vpaddd 0x00(%rsp),%ymm5,%ymm0
729 vmovdqa %ymm0,0x00(%rsp)
733 vpaddd 0x20(%rsp),%ymm6,%ymm0
734 vmovdqa %ymm0,0x20(%rsp)
738 vpaddd 0x40(%rsp),%ymm7,%ymm0
739 vmovdqa %ymm0,0x40(%rsp)
743 vpaddd 0x60(%rsp),%ymm4,%ymm0
744 vmovdqa %ymm0,0x60(%rsp)
776 # x0..15[0-3] += s[0..15]
777 vpbroadcastd 0x00(%rdi),%ymm0
778 vpaddd 0x00(%rsp),%ymm0,%ymm0
779 vmovdqa %ymm0,0x00(%rsp)
780 vpbroadcastd 0x04(%rdi),%ymm0
781 vpaddd 0x20(%rsp),%ymm0,%ymm0
782 vmovdqa %ymm0,0x20(%rsp)
783 vpbroadcastd 0x08(%rdi),%ymm0
784 vpaddd 0x40(%rsp),%ymm0,%ymm0
785 vmovdqa %ymm0,0x40(%rsp)
786 vpbroadcastd 0x0c(%rdi),%ymm0
787 vpaddd 0x60(%rsp),%ymm0,%ymm0
788 vmovdqa %ymm0,0x60(%rsp)
789 vpbroadcastd 0x10(%rdi),%ymm0
791 vpbroadcastd 0x14(%rdi),%ymm0
793 vpbroadcastd 0x18(%rdi),%ymm0
795 vpbroadcastd 0x1c(%rdi),%ymm0
797 vpbroadcastd 0x20(%rdi),%ymm0
799 vpbroadcastd 0x24(%rdi),%ymm0
801 vpbroadcastd 0x28(%rdi),%ymm0
803 vpbroadcastd 0x2c(%rdi),%ymm0
805 vpbroadcastd 0x30(%rdi),%ymm0
807 vpbroadcastd 0x34(%rdi),%ymm0
809 vpbroadcastd 0x38(%rdi),%ymm0
811 vpbroadcastd 0x3c(%rdi),%ymm0
814 # x12 += counter values 0-3
818 vmovdqa 0x00(%rsp),%ymm0
819 vmovdqa 0x20(%rsp),%ymm1
822 vmovdqa %ymm2,0x00(%rsp)
823 vmovdqa %ymm1,0x20(%rsp)
824 vmovdqa 0x40(%rsp),%ymm0
825 vmovdqa 0x60(%rsp),%ymm1
828 vmovdqa %ymm2,0x40(%rsp)
829 vmovdqa %ymm1,0x60(%rsp)
850 vmovdqa 0x00(%rsp),%ymm0
851 vmovdqa 0x40(%rsp),%ymm2
854 vmovdqa %ymm1,0x00(%rsp)
855 vmovdqa %ymm2,0x40(%rsp)
856 vmovdqa 0x20(%rsp),%ymm0
857 vmovdqa 0x60(%rsp),%ymm2
860 vmovdqa %ymm1,0x20(%rsp)
861 vmovdqa %ymm2,0x60(%rsp)
883 vmovdqa 0x00(%rsp),%ymm1
884 vperm2i128 $0x20,%ymm4,%ymm1,%ymm0
885 cmp $0x0020,%rax
887 vpxor 0x0000(%rdx),%ymm0,%ymm0
888 vmovdqu %ymm0,0x0000(%rsi)
889 vperm2i128 $0x31,%ymm4,%ymm1,%ymm4
891 vperm2i128 $0x20,%ymm12,%ymm8,%ymm0
892 cmp $0x0040,%rax
894 vpxor 0x0020(%rdx),%ymm0,%ymm0
895 vmovdqu %ymm0,0x0020(%rsi)
896 vperm2i128 $0x31,%ymm12,%ymm8,%ymm12
898 vmovdqa 0x40(%rsp),%ymm1
899 vperm2i128 $0x20,%ymm6,%ymm1,%ymm0
900 cmp $0x0060,%rax
902 vpxor 0x0040(%rdx),%ymm0,%ymm0
903 vmovdqu %ymm0,0x0040(%rsi)
904 vperm2i128 $0x31,%ymm6,%ymm1,%ymm6
906 vperm2i128 $0x20,%ymm14,%ymm10,%ymm0
907 cmp $0x0080,%rax
909 vpxor 0x0060(%rdx),%ymm0,%ymm0
910 vmovdqu %ymm0,0x0060(%rsi)
911 vperm2i128 $0x31,%ymm14,%ymm10,%ymm14
913 vmovdqa 0x20(%rsp),%ymm1
914 vperm2i128 $0x20,%ymm5,%ymm1,%ymm0
915 cmp $0x00a0,%rax
917 vpxor 0x0080(%rdx),%ymm0,%ymm0
918 vmovdqu %ymm0,0x0080(%rsi)
919 vperm2i128 $0x31,%ymm5,%ymm1,%ymm5
921 vperm2i128 $0x20,%ymm13,%ymm9,%ymm0
922 cmp $0x00c0,%rax
924 vpxor 0x00a0(%rdx),%ymm0,%ymm0
925 vmovdqu %ymm0,0x00a0(%rsi)
926 vperm2i128 $0x31,%ymm13,%ymm9,%ymm13
928 vmovdqa 0x60(%rsp),%ymm1
929 vperm2i128 $0x20,%ymm7,%ymm1,%ymm0
930 cmp $0x00e0,%rax
932 vpxor 0x00c0(%rdx),%ymm0,%ymm0
933 vmovdqu %ymm0,0x00c0(%rsi)
934 vperm2i128 $0x31,%ymm7,%ymm1,%ymm7
936 vperm2i128 $0x20,%ymm15,%ymm11,%ymm0
937 cmp $0x0100,%rax
939 vpxor 0x00e0(%rdx),%ymm0,%ymm0
940 vmovdqu %ymm0,0x00e0(%rsi)
941 vperm2i128 $0x31,%ymm15,%ymm11,%ymm15
945 cmp $0x0120,%rax
947 vpxor 0x0100(%rdx),%ymm0,%ymm0
948 vmovdqu %ymm0,0x0100(%rsi)
951 cmp $0x0140,%rax
953 vpxor 0x0120(%rdx),%ymm0,%ymm0
954 vmovdqu %ymm0,0x0120(%rsi)
957 cmp $0x0160,%rax
959 vpxor 0x0140(%rdx),%ymm0,%ymm0
960 vmovdqu %ymm0,0x0140(%rsi)
963 cmp $0x0180,%rax
965 vpxor 0x0160(%rdx),%ymm0,%ymm0
966 vmovdqu %ymm0,0x0160(%rsi)
969 cmp $0x01a0,%rax
971 vpxor 0x0180(%rdx),%ymm0,%ymm0
972 vmovdqu %ymm0,0x0180(%rsi)
975 cmp $0x01c0,%rax
977 vpxor 0x01a0(%rdx),%ymm0,%ymm0
978 vmovdqu %ymm0,0x01a0(%rsi)
981 cmp $0x01e0,%rax
983 vpxor 0x01c0(%rdx),%ymm0,%ymm0
984 vmovdqu %ymm0,0x01c0(%rsi)
987 cmp $0x0200,%rax
989 vpxor 0x01e0(%rdx),%ymm0,%ymm0
990 vmovdqu %ymm0,0x01e0(%rsi)
1000 and $0x1f,%r9
1002 and $~0x1f,%rax
1011 vpxor 0x00(%rsp),%ymm0,%ymm0
1012 vmovdqa %ymm0,0x00(%rsp)