Lines Matching +full:1 +full:c
56 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
74 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
98 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
110 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
137 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
179 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
187 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
194 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
247 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
251 METRIC_EVENT_CONSTRAINT(INTEL_TD_METRIC_BAD_SPEC, 1),
292 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
296 METRIC_EVENT_CONSTRAINT(INTEL_TD_METRIC_BAD_SPEC, 1),
355 "event=0x3c,umask=0x0,any=1"); /* cpu_clk_unhalted.thread_any */
364 "event=0xd,umask=0x3,cmask=1", /* int_misc.recovery_cycles */
365 "event=0xd,umask=0x3,cmask=1,any=1"); /* int_misc.recovery_cycles_any */
398 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
424 FIXED_EVENT_CONSTRAINT(0x003c, 1), /* CPU_CLK_UNHALTED.CORE */
448 [ C(L1D ) ] = {
449 [ C(OP_READ) ] = {
450 [ C(RESULT_ACCESS) ] = 0x81d0,
451 [ C(RESULT_MISS) ] = 0xe124,
453 [ C(OP_WRITE) ] = {
454 [ C(RESULT_ACCESS) ] = 0x82d0,
457 [ C(L1I ) ] = {
458 [ C(OP_READ) ] = {
459 [ C(RESULT_MISS) ] = 0xe424,
461 [ C(OP_WRITE) ] = {
462 [ C(RESULT_ACCESS) ] = -1,
463 [ C(RESULT_MISS) ] = -1,
466 [ C(LL ) ] = {
467 [ C(OP_READ) ] = {
468 [ C(RESULT_ACCESS) ] = 0x12a,
469 [ C(RESULT_MISS) ] = 0x12a,
471 [ C(OP_WRITE) ] = {
472 [ C(RESULT_ACCESS) ] = 0x12a,
473 [ C(RESULT_MISS) ] = 0x12a,
476 [ C(DTLB) ] = {
477 [ C(OP_READ) ] = {
478 [ C(RESULT_ACCESS) ] = 0x81d0,
479 [ C(RESULT_MISS) ] = 0xe12,
481 [ C(OP_WRITE) ] = {
482 [ C(RESULT_ACCESS) ] = 0x82d0,
483 [ C(RESULT_MISS) ] = 0xe13,
486 [ C(ITLB) ] = {
487 [ C(OP_READ) ] = {
488 [ C(RESULT_ACCESS) ] = -1,
489 [ C(RESULT_MISS) ] = 0xe11,
491 [ C(OP_WRITE) ] = {
492 [ C(RESULT_ACCESS) ] = -1,
493 [ C(RESULT_MISS) ] = -1,
495 [ C(OP_PREFETCH) ] = {
496 [ C(RESULT_ACCESS) ] = -1,
497 [ C(RESULT_MISS) ] = -1,
500 [ C(BPU ) ] = {
501 [ C(OP_READ) ] = {
502 [ C(RESULT_ACCESS) ] = 0x4c4,
503 [ C(RESULT_MISS) ] = 0x4c5,
505 [ C(OP_WRITE) ] = {
506 [ C(RESULT_ACCESS) ] = -1,
507 [ C(RESULT_MISS) ] = -1,
509 [ C(OP_PREFETCH) ] = {
510 [ C(RESULT_ACCESS) ] = -1,
511 [ C(RESULT_MISS) ] = -1,
514 [ C(NODE) ] = {
515 [ C(OP_READ) ] = {
516 [ C(RESULT_ACCESS) ] = 0x12a,
517 [ C(RESULT_MISS) ] = 0x12a,
527 [ C(LL ) ] = {
528 [ C(OP_READ) ] = {
529 [ C(RESULT_ACCESS) ] = 0x10001,
530 [ C(RESULT_MISS) ] = 0x3fbfc00001,
532 [ C(OP_WRITE) ] = {
533 [ C(RESULT_ACCESS) ] = 0x3f3ffc0002,
534 [ C(RESULT_MISS) ] = 0x3f3fc00002,
537 [ C(NODE) ] = {
538 [ C(OP_READ) ] = {
539 [ C(RESULT_ACCESS) ] = 0x10c000001,
540 [ C(RESULT_MISS) ] = 0x3fb3000001,
555 #define SKL_DEMAND_RFO BIT_ULL(1)
594 [ C(L1D ) ] = {
595 [ C(OP_READ) ] = {
596 [ C(RESULT_ACCESS) ] = 0x81d0, /* MEM_INST_RETIRED.ALL_LOADS */
597 [ C(RESULT_MISS) ] = 0x151, /* L1D.REPLACEMENT */
599 [ C(OP_WRITE) ] = {
600 [ C(RESULT_ACCESS) ] = 0x82d0, /* MEM_INST_RETIRED.ALL_STORES */
601 [ C(RESULT_MISS) ] = 0x0,
603 [ C(OP_PREFETCH) ] = {
604 [ C(RESULT_ACCESS) ] = 0x0,
605 [ C(RESULT_MISS) ] = 0x0,
608 [ C(L1I ) ] = {
609 [ C(OP_READ) ] = {
610 [ C(RESULT_ACCESS) ] = 0x0,
611 [ C(RESULT_MISS) ] = 0x283, /* ICACHE_64B.MISS */
613 [ C(OP_WRITE) ] = {
614 [ C(RESULT_ACCESS) ] = -1,
615 [ C(RESULT_MISS) ] = -1,
617 [ C(OP_PREFETCH) ] = {
618 [ C(RESULT_ACCESS) ] = 0x0,
619 [ C(RESULT_MISS) ] = 0x0,
622 [ C(LL ) ] = {
623 [ C(OP_READ) ] = {
624 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
625 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
627 [ C(OP_WRITE) ] = {
628 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
629 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
631 [ C(OP_PREFETCH) ] = {
632 [ C(RESULT_ACCESS) ] = 0x0,
633 [ C(RESULT_MISS) ] = 0x0,
636 [ C(DTLB) ] = {
637 [ C(OP_READ) ] = {
638 [ C(RESULT_ACCESS) ] = 0x81d0, /* MEM_INST_RETIRED.ALL_LOADS */
639 [ C(RESULT_MISS) ] = 0xe08, /* DTLB_LOAD_MISSES.WALK_COMPLETED */
641 [ C(OP_WRITE) ] = {
642 [ C(RESULT_ACCESS) ] = 0x82d0, /* MEM_INST_RETIRED.ALL_STORES */
643 [ C(RESULT_MISS) ] = 0xe49, /* DTLB_STORE_MISSES.WALK_COMPLETED */
645 [ C(OP_PREFETCH) ] = {
646 [ C(RESULT_ACCESS) ] = 0x0,
647 [ C(RESULT_MISS) ] = 0x0,
650 [ C(ITLB) ] = {
651 [ C(OP_READ) ] = {
652 [ C(RESULT_ACCESS) ] = 0x2085, /* ITLB_MISSES.STLB_HIT */
653 [ C(RESULT_MISS) ] = 0xe85, /* ITLB_MISSES.WALK_COMPLETED */
655 [ C(OP_WRITE) ] = {
656 [ C(RESULT_ACCESS) ] = -1,
657 [ C(RESULT_MISS) ] = -1,
659 [ C(OP_PREFETCH) ] = {
660 [ C(RESULT_ACCESS) ] = -1,
661 [ C(RESULT_MISS) ] = -1,
664 [ C(BPU ) ] = {
665 [ C(OP_READ) ] = {
666 [ C(RESULT_ACCESS) ] = 0xc4, /* BR_INST_RETIRED.ALL_BRANCHES */
667 [ C(RESULT_MISS) ] = 0xc5, /* BR_MISP_RETIRED.ALL_BRANCHES */
669 [ C(OP_WRITE) ] = {
670 [ C(RESULT_ACCESS) ] = -1,
671 [ C(RESULT_MISS) ] = -1,
673 [ C(OP_PREFETCH) ] = {
674 [ C(RESULT_ACCESS) ] = -1,
675 [ C(RESULT_MISS) ] = -1,
678 [ C(NODE) ] = {
679 [ C(OP_READ) ] = {
680 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
681 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
683 [ C(OP_WRITE) ] = {
684 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
685 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
687 [ C(OP_PREFETCH) ] = {
688 [ C(RESULT_ACCESS) ] = 0x0,
689 [ C(RESULT_MISS) ] = 0x0,
699 [ C(LL ) ] = {
700 [ C(OP_READ) ] = {
701 [ C(RESULT_ACCESS) ] = SKL_DEMAND_READ|
703 [ C(RESULT_MISS) ] = SKL_DEMAND_READ|
707 [ C(OP_WRITE) ] = {
708 [ C(RESULT_ACCESS) ] = SKL_DEMAND_WRITE|
710 [ C(RESULT_MISS) ] = SKL_DEMAND_WRITE|
714 [ C(OP_PREFETCH) ] = {
715 [ C(RESULT_ACCESS) ] = 0x0,
716 [ C(RESULT_MISS) ] = 0x0,
719 [ C(NODE) ] = {
720 [ C(OP_READ) ] = {
721 [ C(RESULT_ACCESS) ] = SKL_DEMAND_READ|
723 [ C(RESULT_MISS) ] = SKL_DEMAND_READ|
726 [ C(OP_WRITE) ] = {
727 [ C(RESULT_ACCESS) ] = SKL_DEMAND_WRITE|
729 [ C(RESULT_MISS) ] = SKL_DEMAND_WRITE|
732 [ C(OP_PREFETCH) ] = {
733 [ C(RESULT_ACCESS) ] = 0x0,
734 [ C(RESULT_MISS) ] = 0x0,
739 #define SNB_DMND_DATA_RD (1ULL << 0)
740 #define SNB_DMND_RFO (1ULL << 1)
741 #define SNB_DMND_IFETCH (1ULL << 2)
742 #define SNB_DMND_WB (1ULL << 3)
743 #define SNB_PF_DATA_RD (1ULL << 4)
744 #define SNB_PF_RFO (1ULL << 5)
745 #define SNB_PF_IFETCH (1ULL << 6)
746 #define SNB_LLC_DATA_RD (1ULL << 7)
747 #define SNB_LLC_RFO (1ULL << 8)
748 #define SNB_LLC_IFETCH (1ULL << 9)
749 #define SNB_BUS_LOCKS (1ULL << 10)
750 #define SNB_STRM_ST (1ULL << 11)
751 #define SNB_OTHER (1ULL << 15)
752 #define SNB_RESP_ANY (1ULL << 16)
753 #define SNB_NO_SUPP (1ULL << 17)
754 #define SNB_LLC_HITM (1ULL << 18)
755 #define SNB_LLC_HITE (1ULL << 19)
756 #define SNB_LLC_HITS (1ULL << 20)
757 #define SNB_LLC_HITF (1ULL << 21)
758 #define SNB_LOCAL (1ULL << 22)
760 #define SNB_SNP_NONE (1ULL << 31)
761 #define SNB_SNP_NOT_NEEDED (1ULL << 32)
762 #define SNB_SNP_MISS (1ULL << 33)
763 #define SNB_NO_FWD (1ULL << 34)
764 #define SNB_SNP_FWD (1ULL << 35)
765 #define SNB_HITM (1ULL << 36)
766 #define SNB_NON_DRAM (1ULL << 37)
787 [ C(LL ) ] = {
788 [ C(OP_READ) ] = {
789 [ C(RESULT_ACCESS) ] = SNB_DMND_READ|SNB_L3_ACCESS,
790 [ C(RESULT_MISS) ] = SNB_DMND_READ|SNB_L3_MISS,
792 [ C(OP_WRITE) ] = {
793 [ C(RESULT_ACCESS) ] = SNB_DMND_WRITE|SNB_L3_ACCESS,
794 [ C(RESULT_MISS) ] = SNB_DMND_WRITE|SNB_L3_MISS,
796 [ C(OP_PREFETCH) ] = {
797 [ C(RESULT_ACCESS) ] = SNB_DMND_PREFETCH|SNB_L3_ACCESS,
798 [ C(RESULT_MISS) ] = SNB_DMND_PREFETCH|SNB_L3_MISS,
801 [ C(NODE) ] = {
802 [ C(OP_READ) ] = {
803 [ C(RESULT_ACCESS) ] = SNB_DMND_READ|SNB_DRAM_ANY,
804 [ C(RESULT_MISS) ] = SNB_DMND_READ|SNB_DRAM_REMOTE,
806 [ C(OP_WRITE) ] = {
807 [ C(RESULT_ACCESS) ] = SNB_DMND_WRITE|SNB_DRAM_ANY,
808 [ C(RESULT_MISS) ] = SNB_DMND_WRITE|SNB_DRAM_REMOTE,
810 [ C(OP_PREFETCH) ] = {
811 [ C(RESULT_ACCESS) ] = SNB_DMND_PREFETCH|SNB_DRAM_ANY,
812 [ C(RESULT_MISS) ] = SNB_DMND_PREFETCH|SNB_DRAM_REMOTE,
822 [ C(L1D) ] = {
823 [ C(OP_READ) ] = {
824 [ C(RESULT_ACCESS) ] = 0xf1d0, /* MEM_UOP_RETIRED.LOADS */
825 [ C(RESULT_MISS) ] = 0x0151, /* L1D.REPLACEMENT */
827 [ C(OP_WRITE) ] = {
828 [ C(RESULT_ACCESS) ] = 0xf2d0, /* MEM_UOP_RETIRED.STORES */
829 [ C(RESULT_MISS) ] = 0x0851, /* L1D.ALL_M_REPLACEMENT */
831 [ C(OP_PREFETCH) ] = {
832 [ C(RESULT_ACCESS) ] = 0x0,
833 [ C(RESULT_MISS) ] = 0x024e, /* HW_PRE_REQ.DL1_MISS */
836 [ C(L1I ) ] = {
837 [ C(OP_READ) ] = {
838 [ C(RESULT_ACCESS) ] = 0x0,
839 [ C(RESULT_MISS) ] = 0x0280, /* ICACHE.MISSES */
841 [ C(OP_WRITE) ] = {
842 [ C(RESULT_ACCESS) ] = -1,
843 [ C(RESULT_MISS) ] = -1,
845 [ C(OP_PREFETCH) ] = {
846 [ C(RESULT_ACCESS) ] = 0x0,
847 [ C(RESULT_MISS) ] = 0x0,
850 [ C(LL ) ] = {
851 [ C(OP_READ) ] = {
853 [ C(RESULT_ACCESS) ] = 0x01b7,
855 [ C(RESULT_MISS) ] = 0x01b7,
857 [ C(OP_WRITE) ] = {
859 [ C(RESULT_ACCESS) ] = 0x01b7,
861 [ C(RESULT_MISS) ] = 0x01b7,
863 [ C(OP_PREFETCH) ] = {
865 [ C(RESULT_ACCESS) ] = 0x01b7,
867 [ C(RESULT_MISS) ] = 0x01b7,
870 [ C(DTLB) ] = {
871 [ C(OP_READ) ] = {
872 [ C(RESULT_ACCESS) ] = 0x81d0, /* MEM_UOP_RETIRED.ALL_LOADS */
873 [ C(RESULT_MISS) ] = 0x0108, /* DTLB_LOAD_MISSES.CAUSES_A_WALK */
875 [ C(OP_WRITE) ] = {
876 [ C(RESULT_ACCESS) ] = 0x82d0, /* MEM_UOP_RETIRED.ALL_STORES */
877 [ C(RESULT_MISS) ] = 0x0149, /* DTLB_STORE_MISSES.MISS_CAUSES_A_WALK */
879 [ C(OP_PREFETCH) ] = {
880 [ C(RESULT_ACCESS) ] = 0x0,
881 [ C(RESULT_MISS) ] = 0x0,
884 [ C(ITLB) ] = {
885 [ C(OP_READ) ] = {
886 [ C(RESULT_ACCESS) ] = 0x1085, /* ITLB_MISSES.STLB_HIT */
887 [ C(RESULT_MISS) ] = 0x0185, /* ITLB_MISSES.CAUSES_A_WALK */
889 [ C(OP_WRITE) ] = {
890 [ C(RESULT_ACCESS) ] = -1,
891 [ C(RESULT_MISS) ] = -1,
893 [ C(OP_PREFETCH) ] = {
894 [ C(RESULT_ACCESS) ] = -1,
895 [ C(RESULT_MISS) ] = -1,
898 [ C(BPU ) ] = {
899 [ C(OP_READ) ] = {
900 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ALL_BRANCHES */
901 [ C(RESULT_MISS) ] = 0x00c5, /* BR_MISP_RETIRED.ALL_BRANCHES */
903 [ C(OP_WRITE) ] = {
904 [ C(RESULT_ACCESS) ] = -1,
905 [ C(RESULT_MISS) ] = -1,
907 [ C(OP_PREFETCH) ] = {
908 [ C(RESULT_ACCESS) ] = -1,
909 [ C(RESULT_MISS) ] = -1,
912 [ C(NODE) ] = {
913 [ C(OP_READ) ] = {
914 [ C(RESULT_ACCESS) ] = 0x01b7,
915 [ C(RESULT_MISS) ] = 0x01b7,
917 [ C(OP_WRITE) ] = {
918 [ C(RESULT_ACCESS) ] = 0x01b7,
919 [ C(RESULT_MISS) ] = 0x01b7,
921 [ C(OP_PREFETCH) ] = {
922 [ C(RESULT_ACCESS) ] = 0x01b7,
923 [ C(RESULT_MISS) ] = 0x01b7,
939 #define HSW_DEMAND_RFO BIT_ULL(1)
978 [ C(L1D ) ] = {
979 [ C(OP_READ) ] = {
980 [ C(RESULT_ACCESS) ] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
981 [ C(RESULT_MISS) ] = 0x151, /* L1D.REPLACEMENT */
983 [ C(OP_WRITE) ] = {
984 [ C(RESULT_ACCESS) ] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
985 [ C(RESULT_MISS) ] = 0x0,
987 [ C(OP_PREFETCH) ] = {
988 [ C(RESULT_ACCESS) ] = 0x0,
989 [ C(RESULT_MISS) ] = 0x0,
992 [ C(L1I ) ] = {
993 [ C(OP_READ) ] = {
994 [ C(RESULT_ACCESS) ] = 0x0,
995 [ C(RESULT_MISS) ] = 0x280, /* ICACHE.MISSES */
997 [ C(OP_WRITE) ] = {
998 [ C(RESULT_ACCESS) ] = -1,
999 [ C(RESULT_MISS) ] = -1,
1001 [ C(OP_PREFETCH) ] = {
1002 [ C(RESULT_ACCESS) ] = 0x0,
1003 [ C(RESULT_MISS) ] = 0x0,
1006 [ C(LL ) ] = {
1007 [ C(OP_READ) ] = {
1008 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1009 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1011 [ C(OP_WRITE) ] = {
1012 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1013 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1015 [ C(OP_PREFETCH) ] = {
1016 [ C(RESULT_ACCESS) ] = 0x0,
1017 [ C(RESULT_MISS) ] = 0x0,
1020 [ C(DTLB) ] = {
1021 [ C(OP_READ) ] = {
1022 [ C(RESULT_ACCESS) ] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
1023 [ C(RESULT_MISS) ] = 0x108, /* DTLB_LOAD_MISSES.MISS_CAUSES_A_WALK */
1025 [ C(OP_WRITE) ] = {
1026 [ C(RESULT_ACCESS) ] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
1027 [ C(RESULT_MISS) ] = 0x149, /* DTLB_STORE_MISSES.MISS_CAUSES_A_WALK */
1029 [ C(OP_PREFETCH) ] = {
1030 [ C(RESULT_ACCESS) ] = 0x0,
1031 [ C(RESULT_MISS) ] = 0x0,
1034 [ C(ITLB) ] = {
1035 [ C(OP_READ) ] = {
1036 [ C(RESULT_ACCESS) ] = 0x6085, /* ITLB_MISSES.STLB_HIT */
1037 [ C(RESULT_MISS) ] = 0x185, /* ITLB_MISSES.MISS_CAUSES_A_WALK */
1039 [ C(OP_WRITE) ] = {
1040 [ C(RESULT_ACCESS) ] = -1,
1041 [ C(RESULT_MISS) ] = -1,
1043 [ C(OP_PREFETCH) ] = {
1044 [ C(RESULT_ACCESS) ] = -1,
1045 [ C(RESULT_MISS) ] = -1,
1048 [ C(BPU ) ] = {
1049 [ C(OP_READ) ] = {
1050 [ C(RESULT_ACCESS) ] = 0xc4, /* BR_INST_RETIRED.ALL_BRANCHES */
1051 [ C(RESULT_MISS) ] = 0xc5, /* BR_MISP_RETIRED.ALL_BRANCHES */
1053 [ C(OP_WRITE) ] = {
1054 [ C(RESULT_ACCESS) ] = -1,
1055 [ C(RESULT_MISS) ] = -1,
1057 [ C(OP_PREFETCH) ] = {
1058 [ C(RESULT_ACCESS) ] = -1,
1059 [ C(RESULT_MISS) ] = -1,
1062 [ C(NODE) ] = {
1063 [ C(OP_READ) ] = {
1064 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1065 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1067 [ C(OP_WRITE) ] = {
1068 [ C(RESULT_ACCESS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1069 [ C(RESULT_MISS) ] = 0x1b7, /* OFFCORE_RESPONSE */
1071 [ C(OP_PREFETCH) ] = {
1072 [ C(RESULT_ACCESS) ] = 0x0,
1073 [ C(RESULT_MISS) ] = 0x0,
1083 [ C(LL ) ] = {
1084 [ C(OP_READ) ] = {
1085 [ C(RESULT_ACCESS) ] = HSW_DEMAND_READ|
1087 [ C(RESULT_MISS) ] = HSW_DEMAND_READ|
1090 [ C(OP_WRITE) ] = {
1091 [ C(RESULT_ACCESS) ] = HSW_DEMAND_WRITE|
1093 [ C(RESULT_MISS) ] = HSW_DEMAND_WRITE|
1096 [ C(OP_PREFETCH) ] = {
1097 [ C(RESULT_ACCESS) ] = 0x0,
1098 [ C(RESULT_MISS) ] = 0x0,
1101 [ C(NODE) ] = {
1102 [ C(OP_READ) ] = {
1103 [ C(RESULT_ACCESS) ] = HSW_DEMAND_READ|
1106 [ C(RESULT_MISS) ] = HSW_DEMAND_READ|
1110 [ C(OP_WRITE) ] = {
1111 [ C(RESULT_ACCESS) ] = HSW_DEMAND_WRITE|
1114 [ C(RESULT_MISS) ] = HSW_DEMAND_WRITE|
1118 [ C(OP_PREFETCH) ] = {
1119 [ C(RESULT_ACCESS) ] = 0x0,
1120 [ C(RESULT_MISS) ] = 0x0,
1130 [ C(L1D) ] = {
1131 [ C(OP_READ) ] = {
1132 [ C(RESULT_ACCESS) ] = 0x010b, /* MEM_INST_RETIRED.LOADS */
1133 [ C(RESULT_MISS) ] = 0x0151, /* L1D.REPL */
1135 [ C(OP_WRITE) ] = {
1136 [ C(RESULT_ACCESS) ] = 0x020b, /* MEM_INST_RETURED.STORES */
1137 [ C(RESULT_MISS) ] = 0x0251, /* L1D.M_REPL */
1139 [ C(OP_PREFETCH) ] = {
1140 [ C(RESULT_ACCESS) ] = 0x014e, /* L1D_PREFETCH.REQUESTS */
1141 [ C(RESULT_MISS) ] = 0x024e, /* L1D_PREFETCH.MISS */
1144 [ C(L1I ) ] = {
1145 [ C(OP_READ) ] = {
1146 [ C(RESULT_ACCESS) ] = 0x0380, /* L1I.READS */
1147 [ C(RESULT_MISS) ] = 0x0280, /* L1I.MISSES */
1149 [ C(OP_WRITE) ] = {
1150 [ C(RESULT_ACCESS) ] = -1,
1151 [ C(RESULT_MISS) ] = -1,
1153 [ C(OP_PREFETCH) ] = {
1154 [ C(RESULT_ACCESS) ] = 0x0,
1155 [ C(RESULT_MISS) ] = 0x0,
1158 [ C(LL ) ] = {
1159 [ C(OP_READ) ] = {
1161 [ C(RESULT_ACCESS) ] = 0x01b7,
1163 [ C(RESULT_MISS) ] = 0x01b7,
1169 [ C(OP_WRITE) ] = {
1171 [ C(RESULT_ACCESS) ] = 0x01b7,
1173 [ C(RESULT_MISS) ] = 0x01b7,
1175 [ C(OP_PREFETCH) ] = {
1177 [ C(RESULT_ACCESS) ] = 0x01b7,
1179 [ C(RESULT_MISS) ] = 0x01b7,
1182 [ C(DTLB) ] = {
1183 [ C(OP_READ) ] = {
1184 [ C(RESULT_ACCESS) ] = 0x010b, /* MEM_INST_RETIRED.LOADS */
1185 [ C(RESULT_MISS) ] = 0x0108, /* DTLB_LOAD_MISSES.ANY */
1187 [ C(OP_WRITE) ] = {
1188 [ C(RESULT_ACCESS) ] = 0x020b, /* MEM_INST_RETURED.STORES */
1189 [ C(RESULT_MISS) ] = 0x010c, /* MEM_STORE_RETIRED.DTLB_MISS */
1191 [ C(OP_PREFETCH) ] = {
1192 [ C(RESULT_ACCESS) ] = 0x0,
1193 [ C(RESULT_MISS) ] = 0x0,
1196 [ C(ITLB) ] = {
1197 [ C(OP_READ) ] = {
1198 [ C(RESULT_ACCESS) ] = 0x01c0, /* INST_RETIRED.ANY_P */
1199 [ C(RESULT_MISS) ] = 0x0185, /* ITLB_MISSES.ANY */
1201 [ C(OP_WRITE) ] = {
1202 [ C(RESULT_ACCESS) ] = -1,
1203 [ C(RESULT_MISS) ] = -1,
1205 [ C(OP_PREFETCH) ] = {
1206 [ C(RESULT_ACCESS) ] = -1,
1207 [ C(RESULT_MISS) ] = -1,
1210 [ C(BPU ) ] = {
1211 [ C(OP_READ) ] = {
1212 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ALL_BRANCHES */
1213 [ C(RESULT_MISS) ] = 0x03e8, /* BPU_CLEARS.ANY */
1215 [ C(OP_WRITE) ] = {
1216 [ C(RESULT_ACCESS) ] = -1,
1217 [ C(RESULT_MISS) ] = -1,
1219 [ C(OP_PREFETCH) ] = {
1220 [ C(RESULT_ACCESS) ] = -1,
1221 [ C(RESULT_MISS) ] = -1,
1224 [ C(NODE) ] = {
1225 [ C(OP_READ) ] = {
1226 [ C(RESULT_ACCESS) ] = 0x01b7,
1227 [ C(RESULT_MISS) ] = 0x01b7,
1229 [ C(OP_WRITE) ] = {
1230 [ C(RESULT_ACCESS) ] = 0x01b7,
1231 [ C(RESULT_MISS) ] = 0x01b7,
1233 [ C(OP_PREFETCH) ] = {
1234 [ C(RESULT_ACCESS) ] = 0x01b7,
1235 [ C(RESULT_MISS) ] = 0x01b7,
1245 #define NHM_DMND_DATA_RD (1 << 0)
1246 #define NHM_DMND_RFO (1 << 1)
1247 #define NHM_DMND_IFETCH (1 << 2)
1248 #define NHM_DMND_WB (1 << 3)
1249 #define NHM_PF_DATA_RD (1 << 4)
1250 #define NHM_PF_DATA_RFO (1 << 5)
1251 #define NHM_PF_IFETCH (1 << 6)
1252 #define NHM_OFFCORE_OTHER (1 << 7)
1253 #define NHM_UNCORE_HIT (1 << 8)
1254 #define NHM_OTHER_CORE_HIT_SNP (1 << 9)
1255 #define NHM_OTHER_CORE_HITM (1 << 10)
1257 #define NHM_REMOTE_CACHE_FWD (1 << 12)
1258 #define NHM_REMOTE_DRAM (1 << 13)
1259 #define NHM_LOCAL_DRAM (1 << 14)
1260 #define NHM_NON_DRAM (1 << 15)
1278 [ C(LL ) ] = {
1279 [ C(OP_READ) ] = {
1280 [ C(RESULT_ACCESS) ] = NHM_DMND_READ|NHM_L3_ACCESS,
1281 [ C(RESULT_MISS) ] = NHM_DMND_READ|NHM_L3_MISS,
1283 [ C(OP_WRITE) ] = {
1284 [ C(RESULT_ACCESS) ] = NHM_DMND_WRITE|NHM_L3_ACCESS,
1285 [ C(RESULT_MISS) ] = NHM_DMND_WRITE|NHM_L3_MISS,
1287 [ C(OP_PREFETCH) ] = {
1288 [ C(RESULT_ACCESS) ] = NHM_DMND_PREFETCH|NHM_L3_ACCESS,
1289 [ C(RESULT_MISS) ] = NHM_DMND_PREFETCH|NHM_L3_MISS,
1292 [ C(NODE) ] = {
1293 [ C(OP_READ) ] = {
1294 [ C(RESULT_ACCESS) ] = NHM_DMND_READ|NHM_LOCAL|NHM_REMOTE,
1295 [ C(RESULT_MISS) ] = NHM_DMND_READ|NHM_REMOTE,
1297 [ C(OP_WRITE) ] = {
1298 [ C(RESULT_ACCESS) ] = NHM_DMND_WRITE|NHM_LOCAL|NHM_REMOTE,
1299 [ C(RESULT_MISS) ] = NHM_DMND_WRITE|NHM_REMOTE,
1301 [ C(OP_PREFETCH) ] = {
1302 [ C(RESULT_ACCESS) ] = NHM_DMND_PREFETCH|NHM_LOCAL|NHM_REMOTE,
1303 [ C(RESULT_MISS) ] = NHM_DMND_PREFETCH|NHM_REMOTE,
1313 [ C(L1D) ] = {
1314 [ C(OP_READ) ] = {
1315 [ C(RESULT_ACCESS) ] = 0x010b, /* MEM_INST_RETIRED.LOADS */
1316 [ C(RESULT_MISS) ] = 0x0151, /* L1D.REPL */
1318 [ C(OP_WRITE) ] = {
1319 [ C(RESULT_ACCESS) ] = 0x020b, /* MEM_INST_RETURED.STORES */
1320 [ C(RESULT_MISS) ] = 0x0251, /* L1D.M_REPL */
1322 [ C(OP_PREFETCH) ] = {
1323 [ C(RESULT_ACCESS) ] = 0x014e, /* L1D_PREFETCH.REQUESTS */
1324 [ C(RESULT_MISS) ] = 0x024e, /* L1D_PREFETCH.MISS */
1327 [ C(L1I ) ] = {
1328 [ C(OP_READ) ] = {
1329 [ C(RESULT_ACCESS) ] = 0x0380, /* L1I.READS */
1330 [ C(RESULT_MISS) ] = 0x0280, /* L1I.MISSES */
1332 [ C(OP_WRITE) ] = {
1333 [ C(RESULT_ACCESS) ] = -1,
1334 [ C(RESULT_MISS) ] = -1,
1336 [ C(OP_PREFETCH) ] = {
1337 [ C(RESULT_ACCESS) ] = 0x0,
1338 [ C(RESULT_MISS) ] = 0x0,
1341 [ C(LL ) ] = {
1342 [ C(OP_READ) ] = {
1344 [ C(RESULT_ACCESS) ] = 0x01b7,
1346 [ C(RESULT_MISS) ] = 0x01b7,
1352 [ C(OP_WRITE) ] = {
1354 [ C(RESULT_ACCESS) ] = 0x01b7,
1356 [ C(RESULT_MISS) ] = 0x01b7,
1358 [ C(OP_PREFETCH) ] = {
1360 [ C(RESULT_ACCESS) ] = 0x01b7,
1362 [ C(RESULT_MISS) ] = 0x01b7,
1365 [ C(DTLB) ] = {
1366 [ C(OP_READ) ] = {
1367 [ C(RESULT_ACCESS) ] = 0x0f40, /* L1D_CACHE_LD.MESI (alias) */
1368 [ C(RESULT_MISS) ] = 0x0108, /* DTLB_LOAD_MISSES.ANY */
1370 [ C(OP_WRITE) ] = {
1371 [ C(RESULT_ACCESS) ] = 0x0f41, /* L1D_CACHE_ST.MESI (alias) */
1372 [ C(RESULT_MISS) ] = 0x010c, /* MEM_STORE_RETIRED.DTLB_MISS */
1374 [ C(OP_PREFETCH) ] = {
1375 [ C(RESULT_ACCESS) ] = 0x0,
1376 [ C(RESULT_MISS) ] = 0x0,
1379 [ C(ITLB) ] = {
1380 [ C(OP_READ) ] = {
1381 [ C(RESULT_ACCESS) ] = 0x01c0, /* INST_RETIRED.ANY_P */
1382 [ C(RESULT_MISS) ] = 0x20c8, /* ITLB_MISS_RETIRED */
1384 [ C(OP_WRITE) ] = {
1385 [ C(RESULT_ACCESS) ] = -1,
1386 [ C(RESULT_MISS) ] = -1,
1388 [ C(OP_PREFETCH) ] = {
1389 [ C(RESULT_ACCESS) ] = -1,
1390 [ C(RESULT_MISS) ] = -1,
1393 [ C(BPU ) ] = {
1394 [ C(OP_READ) ] = {
1395 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ALL_BRANCHES */
1396 [ C(RESULT_MISS) ] = 0x03e8, /* BPU_CLEARS.ANY */
1398 [ C(OP_WRITE) ] = {
1399 [ C(RESULT_ACCESS) ] = -1,
1400 [ C(RESULT_MISS) ] = -1,
1402 [ C(OP_PREFETCH) ] = {
1403 [ C(RESULT_ACCESS) ] = -1,
1404 [ C(RESULT_MISS) ] = -1,
1407 [ C(NODE) ] = {
1408 [ C(OP_READ) ] = {
1409 [ C(RESULT_ACCESS) ] = 0x01b7,
1410 [ C(RESULT_MISS) ] = 0x01b7,
1412 [ C(OP_WRITE) ] = {
1413 [ C(RESULT_ACCESS) ] = 0x01b7,
1414 [ C(RESULT_MISS) ] = 0x01b7,
1416 [ C(OP_PREFETCH) ] = {
1417 [ C(RESULT_ACCESS) ] = 0x01b7,
1418 [ C(RESULT_MISS) ] = 0x01b7,
1428 [ C(L1D) ] = {
1429 [ C(OP_READ) ] = {
1430 [ C(RESULT_ACCESS) ] = 0x0f40, /* L1D_CACHE_LD.MESI */
1431 [ C(RESULT_MISS) ] = 0x0140, /* L1D_CACHE_LD.I_STATE */
1433 [ C(OP_WRITE) ] = {
1434 [ C(RESULT_ACCESS) ] = 0x0f41, /* L1D_CACHE_ST.MESI */
1435 [ C(RESULT_MISS) ] = 0x0141, /* L1D_CACHE_ST.I_STATE */
1437 [ C(OP_PREFETCH) ] = {
1438 [ C(RESULT_ACCESS) ] = 0x104e, /* L1D_PREFETCH.REQUESTS */
1439 [ C(RESULT_MISS) ] = 0,
1442 [ C(L1I ) ] = {
1443 [ C(OP_READ) ] = {
1444 [ C(RESULT_ACCESS) ] = 0x0080, /* L1I.READS */
1445 [ C(RESULT_MISS) ] = 0x0081, /* L1I.MISSES */
1447 [ C(OP_WRITE) ] = {
1448 [ C(RESULT_ACCESS) ] = -1,
1449 [ C(RESULT_MISS) ] = -1,
1451 [ C(OP_PREFETCH) ] = {
1452 [ C(RESULT_ACCESS) ] = 0,
1453 [ C(RESULT_MISS) ] = 0,
1456 [ C(LL ) ] = {
1457 [ C(OP_READ) ] = {
1458 [ C(RESULT_ACCESS) ] = 0x4f29, /* L2_LD.MESI */
1459 [ C(RESULT_MISS) ] = 0x4129, /* L2_LD.ISTATE */
1461 [ C(OP_WRITE) ] = {
1462 [ C(RESULT_ACCESS) ] = 0x4f2A, /* L2_ST.MESI */
1463 [ C(RESULT_MISS) ] = 0x412A, /* L2_ST.ISTATE */
1465 [ C(OP_PREFETCH) ] = {
1466 [ C(RESULT_ACCESS) ] = 0,
1467 [ C(RESULT_MISS) ] = 0,
1470 [ C(DTLB) ] = {
1471 [ C(OP_READ) ] = {
1472 [ C(RESULT_ACCESS) ] = 0x0f40, /* L1D_CACHE_LD.MESI (alias) */
1473 [ C(RESULT_MISS) ] = 0x0208, /* DTLB_MISSES.MISS_LD */
1475 [ C(OP_WRITE) ] = {
1476 [ C(RESULT_ACCESS) ] = 0x0f41, /* L1D_CACHE_ST.MESI (alias) */
1477 [ C(RESULT_MISS) ] = 0x0808, /* DTLB_MISSES.MISS_ST */
1479 [ C(OP_PREFETCH) ] = {
1480 [ C(RESULT_ACCESS) ] = 0,
1481 [ C(RESULT_MISS) ] = 0,
1484 [ C(ITLB) ] = {
1485 [ C(OP_READ) ] = {
1486 [ C(RESULT_ACCESS) ] = 0x00c0, /* INST_RETIRED.ANY_P */
1487 [ C(RESULT_MISS) ] = 0x1282, /* ITLBMISSES */
1489 [ C(OP_WRITE) ] = {
1490 [ C(RESULT_ACCESS) ] = -1,
1491 [ C(RESULT_MISS) ] = -1,
1493 [ C(OP_PREFETCH) ] = {
1494 [ C(RESULT_ACCESS) ] = -1,
1495 [ C(RESULT_MISS) ] = -1,
1498 [ C(BPU ) ] = {
1499 [ C(OP_READ) ] = {
1500 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ANY */
1501 [ C(RESULT_MISS) ] = 0x00c5, /* BP_INST_RETIRED.MISPRED */
1503 [ C(OP_WRITE) ] = {
1504 [ C(RESULT_ACCESS) ] = -1,
1505 [ C(RESULT_MISS) ] = -1,
1507 [ C(OP_PREFETCH) ] = {
1508 [ C(RESULT_ACCESS) ] = -1,
1509 [ C(RESULT_MISS) ] = -1,
1519 [ C(L1D) ] = {
1520 [ C(OP_READ) ] = {
1521 [ C(RESULT_ACCESS) ] = 0x2140, /* L1D_CACHE.LD */
1522 [ C(RESULT_MISS) ] = 0,
1524 [ C(OP_WRITE) ] = {
1525 [ C(RESULT_ACCESS) ] = 0x2240, /* L1D_CACHE.ST */
1526 [ C(RESULT_MISS) ] = 0,
1528 [ C(OP_PREFETCH) ] = {
1529 [ C(RESULT_ACCESS) ] = 0x0,
1530 [ C(RESULT_MISS) ] = 0,
1533 [ C(L1I ) ] = {
1534 [ C(OP_READ) ] = {
1535 [ C(RESULT_ACCESS) ] = 0x0380, /* L1I.READS */
1536 [ C(RESULT_MISS) ] = 0x0280, /* L1I.MISSES */
1538 [ C(OP_WRITE) ] = {
1539 [ C(RESULT_ACCESS) ] = -1,
1540 [ C(RESULT_MISS) ] = -1,
1542 [ C(OP_PREFETCH) ] = {
1543 [ C(RESULT_ACCESS) ] = 0,
1544 [ C(RESULT_MISS) ] = 0,
1547 [ C(LL ) ] = {
1548 [ C(OP_READ) ] = {
1549 [ C(RESULT_ACCESS) ] = 0x4f29, /* L2_LD.MESI */
1550 [ C(RESULT_MISS) ] = 0x4129, /* L2_LD.ISTATE */
1552 [ C(OP_WRITE) ] = {
1553 [ C(RESULT_ACCESS) ] = 0x4f2A, /* L2_ST.MESI */
1554 [ C(RESULT_MISS) ] = 0x412A, /* L2_ST.ISTATE */
1556 [ C(OP_PREFETCH) ] = {
1557 [ C(RESULT_ACCESS) ] = 0,
1558 [ C(RESULT_MISS) ] = 0,
1561 [ C(DTLB) ] = {
1562 [ C(OP_READ) ] = {
1563 [ C(RESULT_ACCESS) ] = 0x2140, /* L1D_CACHE_LD.MESI (alias) */
1564 [ C(RESULT_MISS) ] = 0x0508, /* DTLB_MISSES.MISS_LD */
1566 [ C(OP_WRITE) ] = {
1567 [ C(RESULT_ACCESS) ] = 0x2240, /* L1D_CACHE_ST.MESI (alias) */
1568 [ C(RESULT_MISS) ] = 0x0608, /* DTLB_MISSES.MISS_ST */
1570 [ C(OP_PREFETCH) ] = {
1571 [ C(RESULT_ACCESS) ] = 0,
1572 [ C(RESULT_MISS) ] = 0,
1575 [ C(ITLB) ] = {
1576 [ C(OP_READ) ] = {
1577 [ C(RESULT_ACCESS) ] = 0x00c0, /* INST_RETIRED.ANY_P */
1578 [ C(RESULT_MISS) ] = 0x0282, /* ITLB.MISSES */
1580 [ C(OP_WRITE) ] = {
1581 [ C(RESULT_ACCESS) ] = -1,
1582 [ C(RESULT_MISS) ] = -1,
1584 [ C(OP_PREFETCH) ] = {
1585 [ C(RESULT_ACCESS) ] = -1,
1586 [ C(RESULT_MISS) ] = -1,
1589 [ C(BPU ) ] = {
1590 [ C(OP_READ) ] = {
1591 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ANY */
1592 [ C(RESULT_MISS) ] = 0x00c5, /* BP_INST_RETIRED.MISPRED */
1594 [ C(OP_WRITE) ] = {
1595 [ C(RESULT_ACCESS) ] = -1,
1596 [ C(RESULT_MISS) ] = -1,
1598 [ C(OP_PREFETCH) ] = {
1599 [ C(RESULT_ACCESS) ] = -1,
1600 [ C(RESULT_MISS) ] = -1,
1649 [ C(LL ) ] = {
1650 [ C(OP_READ) ] = {
1651 [ C(RESULT_ACCESS) ] = SLM_DMND_READ|SLM_LLC_ACCESS,
1652 [ C(RESULT_MISS) ] = 0,
1654 [ C(OP_WRITE) ] = {
1655 [ C(RESULT_ACCESS) ] = SLM_DMND_WRITE|SLM_LLC_ACCESS,
1656 [ C(RESULT_MISS) ] = SLM_DMND_WRITE|SLM_LLC_MISS,
1658 [ C(OP_PREFETCH) ] = {
1659 [ C(RESULT_ACCESS) ] = SLM_DMND_PREFETCH|SLM_LLC_ACCESS,
1660 [ C(RESULT_MISS) ] = SLM_DMND_PREFETCH|SLM_LLC_MISS,
1670 [ C(L1D) ] = {
1671 [ C(OP_READ) ] = {
1672 [ C(RESULT_ACCESS) ] = 0,
1673 [ C(RESULT_MISS) ] = 0x0104, /* LD_DCU_MISS */
1675 [ C(OP_WRITE) ] = {
1676 [ C(RESULT_ACCESS) ] = 0,
1677 [ C(RESULT_MISS) ] = 0,
1679 [ C(OP_PREFETCH) ] = {
1680 [ C(RESULT_ACCESS) ] = 0,
1681 [ C(RESULT_MISS) ] = 0,
1684 [ C(L1I ) ] = {
1685 [ C(OP_READ) ] = {
1686 [ C(RESULT_ACCESS) ] = 0x0380, /* ICACHE.ACCESSES */
1687 [ C(RESULT_MISS) ] = 0x0280, /* ICACGE.MISSES */
1689 [ C(OP_WRITE) ] = {
1690 [ C(RESULT_ACCESS) ] = -1,
1691 [ C(RESULT_MISS) ] = -1,
1693 [ C(OP_PREFETCH) ] = {
1694 [ C(RESULT_ACCESS) ] = 0,
1695 [ C(RESULT_MISS) ] = 0,
1698 [ C(LL ) ] = {
1699 [ C(OP_READ) ] = {
1701 [ C(RESULT_ACCESS) ] = 0x01b7,
1702 [ C(RESULT_MISS) ] = 0,
1704 [ C(OP_WRITE) ] = {
1706 [ C(RESULT_ACCESS) ] = 0x01b7,
1708 [ C(RESULT_MISS) ] = 0x01b7,
1710 [ C(OP_PREFETCH) ] = {
1712 [ C(RESULT_ACCESS) ] = 0x01b7,
1714 [ C(RESULT_MISS) ] = 0x01b7,
1717 [ C(DTLB) ] = {
1718 [ C(OP_READ) ] = {
1719 [ C(RESULT_ACCESS) ] = 0,
1720 [ C(RESULT_MISS) ] = 0x0804, /* LD_DTLB_MISS */
1722 [ C(OP_WRITE) ] = {
1723 [ C(RESULT_ACCESS) ] = 0,
1724 [ C(RESULT_MISS) ] = 0,
1726 [ C(OP_PREFETCH) ] = {
1727 [ C(RESULT_ACCESS) ] = 0,
1728 [ C(RESULT_MISS) ] = 0,
1731 [ C(ITLB) ] = {
1732 [ C(OP_READ) ] = {
1733 [ C(RESULT_ACCESS) ] = 0x00c0, /* INST_RETIRED.ANY_P */
1734 [ C(RESULT_MISS) ] = 0x40205, /* PAGE_WALKS.I_SIDE_WALKS */
1736 [ C(OP_WRITE) ] = {
1737 [ C(RESULT_ACCESS) ] = -1,
1738 [ C(RESULT_MISS) ] = -1,
1740 [ C(OP_PREFETCH) ] = {
1741 [ C(RESULT_ACCESS) ] = -1,
1742 [ C(RESULT_MISS) ] = -1,
1745 [ C(BPU ) ] = {
1746 [ C(OP_READ) ] = {
1747 [ C(RESULT_ACCESS) ] = 0x00c4, /* BR_INST_RETIRED.ANY */
1748 [ C(RESULT_MISS) ] = 0x00c5, /* BP_INST_RETIRED.MISPRED */
1750 [ C(OP_WRITE) ] = {
1751 [ C(RESULT_ACCESS) ] = -1,
1752 [ C(RESULT_MISS) ] = -1,
1754 [ C(OP_PREFETCH) ] = {
1755 [ C(RESULT_ACCESS) ] = -1,
1756 [ C(RESULT_MISS) ] = -1,
1790 #define GLM_DEMAND_RFO BIT_ULL(1)
1804 [C(L1D)] = {
1805 [C(OP_READ)] = {
1806 [C(RESULT_ACCESS)] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
1807 [C(RESULT_MISS)] = 0x0,
1809 [C(OP_WRITE)] = {
1810 [C(RESULT_ACCESS)] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
1811 [C(RESULT_MISS)] = 0x0,
1813 [C(OP_PREFETCH)] = {
1814 [C(RESULT_ACCESS)] = 0x0,
1815 [C(RESULT_MISS)] = 0x0,
1818 [C(L1I)] = {
1819 [C(OP_READ)] = {
1820 [C(RESULT_ACCESS)] = 0x0380, /* ICACHE.ACCESSES */
1821 [C(RESULT_MISS)] = 0x0280, /* ICACHE.MISSES */
1823 [C(OP_WRITE)] = {
1824 [C(RESULT_ACCESS)] = -1,
1825 [C(RESULT_MISS)] = -1,
1827 [C(OP_PREFETCH)] = {
1828 [C(RESULT_ACCESS)] = 0x0,
1829 [C(RESULT_MISS)] = 0x0,
1832 [C(LL)] = {
1833 [C(OP_READ)] = {
1834 [C(RESULT_ACCESS)] = 0x1b7, /* OFFCORE_RESPONSE */
1835 [C(RESULT_MISS)] = 0x1b7, /* OFFCORE_RESPONSE */
1837 [C(OP_WRITE)] = {
1838 [C(RESULT_ACCESS)] = 0x1b7, /* OFFCORE_RESPONSE */
1839 [C(RESULT_MISS)] = 0x1b7, /* OFFCORE_RESPONSE */
1841 [C(OP_PREFETCH)] = {
1842 [C(RESULT_ACCESS)] = 0x1b7, /* OFFCORE_RESPONSE */
1843 [C(RESULT_MISS)] = 0x1b7, /* OFFCORE_RESPONSE */
1846 [C(DTLB)] = {
1847 [C(OP_READ)] = {
1848 [C(RESULT_ACCESS)] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
1849 [C(RESULT_MISS)] = 0x0,
1851 [C(OP_WRITE)] = {
1852 [C(RESULT_ACCESS)] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
1853 [C(RESULT_MISS)] = 0x0,
1855 [C(OP_PREFETCH)] = {
1856 [C(RESULT_ACCESS)] = 0x0,
1857 [C(RESULT_MISS)] = 0x0,
1860 [C(ITLB)] = {
1861 [C(OP_READ)] = {
1862 [C(RESULT_ACCESS)] = 0x00c0, /* INST_RETIRED.ANY_P */
1863 [C(RESULT_MISS)] = 0x0481, /* ITLB.MISS */
1865 [C(OP_WRITE)] = {
1866 [C(RESULT_ACCESS)] = -1,
1867 [C(RESULT_MISS)] = -1,
1869 [C(OP_PREFETCH)] = {
1870 [C(RESULT_ACCESS)] = -1,
1871 [C(RESULT_MISS)] = -1,
1874 [C(BPU)] = {
1875 [C(OP_READ)] = {
1876 [C(RESULT_ACCESS)] = 0x00c4, /* BR_INST_RETIRED.ALL_BRANCHES */
1877 [C(RESULT_MISS)] = 0x00c5, /* BR_MISP_RETIRED.ALL_BRANCHES */
1879 [C(OP_WRITE)] = {
1880 [C(RESULT_ACCESS)] = -1,
1881 [C(RESULT_MISS)] = -1,
1883 [C(OP_PREFETCH)] = {
1884 [C(RESULT_ACCESS)] = -1,
1885 [C(RESULT_MISS)] = -1,
1894 [C(LL)] = {
1895 [C(OP_READ)] = {
1896 [C(RESULT_ACCESS)] = GLM_DEMAND_READ|
1898 [C(RESULT_MISS)] = GLM_DEMAND_READ|
1901 [C(OP_WRITE)] = {
1902 [C(RESULT_ACCESS)] = GLM_DEMAND_WRITE|
1904 [C(RESULT_MISS)] = GLM_DEMAND_WRITE|
1907 [C(OP_PREFETCH)] = {
1908 [C(RESULT_ACCESS)] = GLM_DEMAND_PREFETCH|
1910 [C(RESULT_MISS)] = GLM_DEMAND_PREFETCH|
1920 [C(L1D)] = {
1921 [C(OP_READ)] = {
1922 [C(RESULT_ACCESS)] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
1923 [C(RESULT_MISS)] = 0x0,
1925 [C(OP_WRITE)] = {
1926 [C(RESULT_ACCESS)] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
1927 [C(RESULT_MISS)] = 0x0,
1929 [C(OP_PREFETCH)] = {
1930 [C(RESULT_ACCESS)] = 0x0,
1931 [C(RESULT_MISS)] = 0x0,
1934 [C(L1I)] = {
1935 [C(OP_READ)] = {
1936 [C(RESULT_ACCESS)] = 0x0380, /* ICACHE.ACCESSES */
1937 [C(RESULT_MISS)] = 0x0280, /* ICACHE.MISSES */
1939 [C(OP_WRITE)] = {
1940 [C(RESULT_ACCESS)] = -1,
1941 [C(RESULT_MISS)] = -1,
1943 [C(OP_PREFETCH)] = {
1944 [C(RESULT_ACCESS)] = 0x0,
1945 [C(RESULT_MISS)] = 0x0,
1948 [C(LL)] = {
1949 [C(OP_READ)] = {
1950 [C(RESULT_ACCESS)] = 0x1b7, /* OFFCORE_RESPONSE */
1951 [C(RESULT_MISS)] = 0x1b7, /* OFFCORE_RESPONSE */
1953 [C(OP_WRITE)] = {
1954 [C(RESULT_ACCESS)] = 0x1b7, /* OFFCORE_RESPONSE */
1955 [C(RESULT_MISS)] = 0x1b7, /* OFFCORE_RESPONSE */
1957 [C(OP_PREFETCH)] = {
1958 [C(RESULT_ACCESS)] = 0x0,
1959 [C(RESULT_MISS)] = 0x0,
1962 [C(DTLB)] = {
1963 [C(OP_READ)] = {
1964 [C(RESULT_ACCESS)] = 0x81d0, /* MEM_UOPS_RETIRED.ALL_LOADS */
1965 [C(RESULT_MISS)] = 0xe08, /* DTLB_LOAD_MISSES.WALK_COMPLETED */
1967 [C(OP_WRITE)] = {
1968 [C(RESULT_ACCESS)] = 0x82d0, /* MEM_UOPS_RETIRED.ALL_STORES */
1969 [C(RESULT_MISS)] = 0xe49, /* DTLB_STORE_MISSES.WALK_COMPLETED */
1971 [C(OP_PREFETCH)] = {
1972 [C(RESULT_ACCESS)] = 0x0,
1973 [C(RESULT_MISS)] = 0x0,
1976 [C(ITLB)] = {
1977 [C(OP_READ)] = {
1978 [C(RESULT_ACCESS)] = 0x00c0, /* INST_RETIRED.ANY_P */
1979 [C(RESULT_MISS)] = 0x0481, /* ITLB.MISS */
1981 [C(OP_WRITE)] = {
1982 [C(RESULT_ACCESS)] = -1,
1983 [C(RESULT_MISS)] = -1,
1985 [C(OP_PREFETCH)] = {
1986 [C(RESULT_ACCESS)] = -1,
1987 [C(RESULT_MISS)] = -1,
1990 [C(BPU)] = {
1991 [C(OP_READ)] = {
1992 [C(RESULT_ACCESS)] = 0x00c4, /* BR_INST_RETIRED.ALL_BRANCHES */
1993 [C(RESULT_MISS)] = 0x00c5, /* BR_MISP_RETIRED.ALL_BRANCHES */
1995 [C(OP_WRITE)] = {
1996 [C(RESULT_ACCESS)] = -1,
1997 [C(RESULT_MISS)] = -1,
1999 [C(OP_PREFETCH)] = {
2000 [C(RESULT_ACCESS)] = -1,
2001 [C(RESULT_MISS)] = -1,
2010 [C(LL)] = {
2011 [C(OP_READ)] = {
2012 [C(RESULT_ACCESS)] = GLM_DEMAND_READ|
2014 [C(RESULT_MISS)] = GLM_DEMAND_READ|
2017 [C(OP_WRITE)] = {
2018 [C(RESULT_ACCESS)] = GLM_DEMAND_WRITE|
2020 [C(RESULT_MISS)] = GLM_DEMAND_WRITE|
2023 [C(OP_PREFETCH)] = {
2024 [C(RESULT_ACCESS)] = 0x0,
2025 [C(RESULT_MISS)] = 0x0,
2042 [C(LL)] = {
2043 [C(OP_READ)] = {
2044 [C(RESULT_ACCESS)] = TNT_DEMAND_READ|
2046 [C(RESULT_MISS)] = TNT_DEMAND_READ|
2049 [C(OP_WRITE)] = {
2050 [C(RESULT_ACCESS)] = TNT_DEMAND_WRITE|
2052 [C(RESULT_MISS)] = TNT_DEMAND_WRITE|
2055 [C(OP_PREFETCH)] = {
2056 [C(RESULT_ACCESS)] = 0x0,
2057 [C(RESULT_MISS)] = 0x0,
2110 [C(LL)] = {
2111 [C(OP_READ)] = {
2112 [C(RESULT_ACCESS)] = KNL_L2_READ | KNL_L2_ACCESS,
2113 [C(RESULT_MISS)] = 0,
2115 [C(OP_WRITE)] = {
2116 [C(RESULT_ACCESS)] = KNL_L2_WRITE | KNL_L2_ACCESS,
2117 [C(RESULT_MISS)] = KNL_L2_WRITE | KNL_L2_MISS,
2119 [C(OP_PREFETCH)] = {
2120 [C(RESULT_ACCESS)] = KNL_L2_PREFETCH | KNL_L2_ACCESS,
2121 [C(RESULT_MISS)] = KNL_L2_PREFETCH | KNL_L2_MISS,
2218 * 1) Clear MSR_IA32_PEBS_ENABLE and MSR_CORE_PERF_GLOBAL_CTRL; in intel_pmu_nhm_workaround()
2228 * A) To reduce MSR operations, we don't run step 1) as they in intel_pmu_nhm_workaround()
2232 * C) With step 5), we do clear only when the PERFEVTSELx is in intel_pmu_nhm_workaround()
2376 case 0 ... INTEL_PMC_IDX_FIXED - 1: in intel_pmu_disable_event()
2380 case INTEL_PMC_IDX_FIXED ... INTEL_PMC_IDX_FIXED_BTS - 1: in intel_pmu_disable_event()
2495 * may be reduced from 1 to 0. If so, the bad_spec event value in __icl_update_topdown_event()
2515 for_each_set_bit(idx, cpuc->active_mask, metric_end + 1) { in update_saved_topdown_regs()
2547 for_each_set_bit(idx, cpuc->active_mask, metric_end + 1) { in intel_update_topdown_event()
2591 x86_pmu.num_topdown_events - 1); in icl_update_topdown_event()
2691 case 0 ... INTEL_PMC_IDX_FIXED - 1: in intel_pmu_enable_event()
2695 case INTEL_PMC_IDX_FIXED ... INTEL_PMC_IDX_FIXED_BTS - 1: in intel_pmu_enable_event()
2948 WARN(1, "perfevents: irq loop stuck!\n"); in intel_pmu_handle_irq()
2999 struct event_constraint *c = &vlbr_constraint; in intel_vlbr_constraints() local
3001 if (unlikely(constraint_match(c, event->hw.config))) in intel_vlbr_constraints()
3002 return c; in intel_vlbr_constraints()
3056 struct event_constraint *c = &emptyconstraint; in __intel_shared_reg_get_constraints() local
3099 reg->alloc = 1; in __intel_shared_reg_get_constraints()
3113 c = NULL; in __intel_shared_reg_get_constraints()
3123 return c; in __intel_shared_reg_get_constraints()
3156 struct event_constraint *c = NULL, *d; in intel_shared_regs_constraints() local
3161 c = __intel_shared_reg_get_constraints(cpuc, event, xreg); in intel_shared_regs_constraints()
3162 if (c == &emptyconstraint) in intel_shared_regs_constraints()
3163 return c; in intel_shared_regs_constraints()
3170 c = d; in intel_shared_regs_constraints()
3173 return c; in intel_shared_regs_constraints()
3181 struct event_constraint *c; in x86_get_event_constraints() local
3184 for_each_event_constraint(c, event_constraints) { in x86_get_event_constraints()
3185 if (constraint_match(c, event->hw.config)) { in x86_get_event_constraints()
3186 event->hw.flags |= c->flags; in x86_get_event_constraints()
3187 return c; in x86_get_event_constraints()
3199 struct event_constraint *c; in __intel_get_event_constraints() local
3201 c = intel_vlbr_constraints(event); in __intel_get_event_constraints()
3202 if (c) in __intel_get_event_constraints()
3203 return c; in __intel_get_event_constraints()
3205 c = intel_bts_constraints(event); in __intel_get_event_constraints()
3206 if (c) in __intel_get_event_constraints()
3207 return c; in __intel_get_event_constraints()
3209 c = intel_shared_regs_constraints(cpuc, event); in __intel_get_event_constraints()
3210 if (c) in __intel_get_event_constraints()
3211 return c; in __intel_get_event_constraints()
3213 c = intel_pebs_constraints(event); in __intel_get_event_constraints()
3214 if (c) in __intel_get_event_constraints()
3215 return c; in __intel_get_event_constraints()
3253 struct event_constraint *c = cpuc->event_constraint[idx]; in intel_commit_scheduling() local
3263 if (!(c->flags & PERF_X86_EVENT_DYNAMIC)) in intel_commit_scheduling()
3270 if (c->flags & PERF_X86_EVENT_EXCL) in intel_commit_scheduling()
3304 dyn_constraint(struct cpu_hw_events *cpuc, struct event_constraint *c, int idx) in dyn_constraint() argument
3308 if (!(c->flags & PERF_X86_EVENT_DYNAMIC)) { in dyn_constraint()
3320 *cx = *c; in dyn_constraint()
3326 c = cx; in dyn_constraint()
3329 return c; in dyn_constraint()
3334 int idx, struct event_constraint *c) in intel_get_excl_constraints() argument
3346 return c; in intel_get_excl_constraints()
3352 return c; in intel_get_excl_constraints()
3362 c = dyn_constraint(cpuc, c, idx); in intel_get_excl_constraints()
3374 xlo = &excl_cntrs->states[tid ^ 1]; in intel_get_excl_constraints()
3380 is_excl = c->flags & PERF_X86_EVENT_EXCL; in intel_get_excl_constraints()
3384 WRITE_ONCE(excl_cntrs->has_exclusive[tid], 1); in intel_get_excl_constraints()
3395 w = c->weight; in intel_get_excl_constraints()
3396 for_each_set_bit(i, c->idxmsk, X86_PMC_IDX_MAX) { in intel_get_excl_constraints()
3403 __clear_bit(i, c->idxmsk); in intel_get_excl_constraints()
3413 __clear_bit(i, c->idxmsk); in intel_get_excl_constraints()
3425 c = &emptyconstraint; in intel_get_excl_constraints()
3427 c->weight = w; in intel_get_excl_constraints()
3429 return c; in intel_get_excl_constraints()
3554 u64 alt_config = X86_CONFIG(.event=0xc0, .inv=1, .cmask=16); in intel_pebs_aliases_core2()
3582 u64 alt_config = X86_CONFIG(.event=0xc2, .umask=0x01, .inv=1, .cmask=16); in intel_pebs_aliases_snb()
3603 * only programmed on counter 1, but that seems like an in intel_pebs_aliases_precdist()
3606 u64 alt_config = X86_CONFIG(.event=0xc0, .umask=0x01, .inv=1, .cmask=16); in intel_pebs_aliases_precdist()
3678 ((x86_pmu.num_topdown_events - 1) << 8))
3878 *nr = 1; in intel_guest_get_msrs()
3889 arr[1].msr = MSR_IA32_PEBS_ENABLE; in intel_guest_get_msrs()
3890 arr[1].host = cpuc->pebs_enabled; in intel_guest_get_msrs()
3891 arr[1].guest = 0; in intel_guest_get_msrs()
4001 struct event_constraint *c; in hsw_get_event_constraints() local
4003 c = intel_get_event_constraints(cpuc, idx, event); in hsw_get_event_constraints()
4007 if (c->idxmsk64 & (1U << 2)) in hsw_get_event_constraints()
4012 return c; in hsw_get_event_constraints()
4034 struct event_constraint *c; in spr_get_event_constraints() local
4036 c = icl_get_event_constraints(cpuc, idx, event); in spr_get_event_constraints()
4046 if (c->idxmsk64 & BIT_ULL(0)) in spr_get_event_constraints()
4052 return c; in spr_get_event_constraints()
4059 struct event_constraint *c; in glp_get_event_constraints() local
4065 c = intel_get_event_constraints(cpuc, idx, event); in glp_get_event_constraints()
4067 return c; in glp_get_event_constraints()
4074 struct event_constraint *c; in tnt_get_event_constraints() local
4088 c = intel_get_event_constraints(cpuc, idx, event); in tnt_get_event_constraints()
4090 return c; in tnt_get_event_constraints()
4099 struct event_constraint *c = hsw_get_event_constraints(cpuc, idx, event); in tfa_get_event_constraints() local
4104 if (!allow_tsx_force_abort && test_bit(3, c->idxmsk)) { in tfa_get_event_constraints()
4105 c = dyn_constraint(cpuc, c, idx); in tfa_get_event_constraints()
4106 c->idxmsk64 &= ~(1ULL << 3); in tfa_get_event_constraints()
4107 c->weight--; in tfa_get_event_constraints()
4110 return c; in tfa_get_event_constraints()
4124 WARN_ON(1); in adl_get_event_constraints()
4137 WARN_ON(1); in adl_hw_config()
4226 regs->core_id = -1; in allocate_shared_regs()
4233 struct intel_excl_cntrs *c; in allocate_excl_cntrs() local
4235 c = kzalloc_node(sizeof(struct intel_excl_cntrs), in allocate_excl_cntrs()
4237 if (c) { in allocate_excl_cntrs()
4238 raw_spin_lock_init(&c->lock); in allocate_excl_cntrs()
4239 c->core_id = -1; in allocate_excl_cntrs()
4241 return c; in allocate_excl_cntrs()
4319 if (WARN_ON_ONCE(!pmu || (pmu->pmu.type == -1))) { in init_hybrid_pmu()
4373 if (x86_pmu.version > 1) in intel_pmu_cpu_starting()
4391 x86_pmu.intel_ctrl &= ~(1ULL << GLOBAL_CTRL_EN_PERF_METRICS); in intel_pmu_cpu_starting()
4419 struct intel_excl_cntrs *c; in intel_pmu_cpu_starting() local
4422 c = sibling->excl_cntrs; in intel_pmu_cpu_starting()
4423 if (c && c->core_id == core_id) { in intel_pmu_cpu_starting()
4424 cpuc->kfree_on_online[1] = cpuc->excl_cntrs; in intel_pmu_cpu_starting()
4425 cpuc->excl_cntrs = c; in intel_pmu_cpu_starting()
4427 cpuc->excl_thread_id = 1; in intel_pmu_cpu_starting()
4438 struct intel_excl_cntrs *c; in free_excl_cntrs() local
4440 c = cpuc->excl_cntrs; in free_excl_cntrs()
4441 if (c) { in free_excl_cntrs()
4442 if (c->core_id == -1 || --c->refcnt == 0) in free_excl_cntrs()
4443 kfree(c); in free_excl_cntrs()
4462 if (pc->core_id == -1 || --pc->refcnt == 0) in intel_cpuc_finish()
4568 .apic = 1,
4573 * so we install an artificial 1<<31 period regardless of
4576 .max_period = (1ULL<<31) - 1,
4586 * together with PMU version 1 and thus be using core_pmu with
4619 .apic = 1,
4623 * so we install an artificial 1<<31 period regardless of
4626 .max_period = (1ULL << 31) - 1,
4681 INTEL_CPU_DESC(INTEL_FAM6_HASWELL_L, 1, 0x0000001e),
4682 INTEL_CPU_DESC(INTEL_FAM6_HASWELL_G, 1, 0x00000015),
4686 INTEL_CPU_DESC(INTEL_FAM6_BROADWELL_G, 1, 0x00000014),
4691 INTEL_CPU_DESC(INTEL_FAM6_BROADWELL_X, 1, 0x0b000014),
4748 x86_pmu.pebs_broken = 1; in intel_snb_check_microcode()
4896 EVENT_ATTR_STR(cycles-t, cycles_t, "event=0x3c,in_tx=1");
4897 EVENT_ATTR_STR(cycles-ct, cycles_ct, "event=0x3c,in_tx=1,in_tx_cp=1");
5026 if (val > 1) in freeze_on_smi_store()
5037 on_each_cpu(flip_smm_bit, &val, 1); in freeze_on_smi_store()
5082 on_each_cpu(update_tfa_sched, NULL, 1); in set_sysctl_tfa()
5255 EVENT_ATTR_STR_HYBRID(cycles-t, cycles_t_adl, "event=0x3c,in_tx=1", hy…
5256 EVENT_ATTR_STR_HYBRID(cycles-ct, cycles_ct_adl, "event=0x3c,in_tx=1,in_tx_cp=1", hy…
5315 return (cpu >= nr_cpu_ids) ? -1 : cpu; in hybrid_find_supported_cpu()
5401 WARN(1, KERN_ERR "hw perf events %d > max(%d), clipping!", in intel_pmu_check_num_counters()
5405 *intel_ctrl = (1ULL << *num_counters) - 1; in intel_pmu_check_num_counters()
5408 WARN(1, KERN_ERR "hw perf events fixed %d > max(%d), clipping!", in intel_pmu_check_num_counters()
5421 struct event_constraint *c; in intel_pmu_check_event_constraints() local
5430 for_each_event_constraint(c, event_constraints) { in intel_pmu_check_event_constraints()
5435 if (c->idxmsk64 & INTEL_PMC_MSK_TOPDOWN) { in intel_pmu_check_event_constraints()
5441 c->idxmsk64 = 0; in intel_pmu_check_event_constraints()
5442 c->weight = hweight64(c->idxmsk64); in intel_pmu_check_event_constraints()
5446 if (c->cmask == FIXED_EVENT_FLAGS) { in intel_pmu_check_event_constraints()
5448 c->idxmsk64 &= intel_ctrl; in intel_pmu_check_event_constraints()
5450 if (c->idxmsk64 != INTEL_PMC_MSK_FIXED_REF_CYCLES) in intel_pmu_check_event_constraints()
5451 c->idxmsk64 |= (1ULL << num_counters) - 1; in intel_pmu_check_event_constraints()
5453 c->idxmsk64 &= in intel_pmu_check_event_constraints()
5455 c->weight = hweight64(c->idxmsk64); in intel_pmu_check_event_constraints()
5493 pmu->intel_ctrl |= 1ULL << GLOBAL_CTRL_EN_PERF_METRICS; in intel_pmu_check_hybrid_pmus()
5554 x86_pmu.cntval_mask = (1ULL << eax.split.bit_width) - 1; in intel_pmu_init()
5565 if (version > 1 && version < 5) { in intel_pmu_init()
5571 fixed_mask = (1L << x86_pmu.num_counters_fixed) - 1; in intel_pmu_init()
5647 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
5648 /* UOPS_EXECUTED.CORE_ACTIVE_CYCLES,c=1,i=1 */ in intel_pmu_init()
5650 X86_CONFIG(.event=0xb1, .umask=0x3f, .inv=1, .cmask=1); in intel_pmu_init()
5654 x86_pmu.pebs_no_tlb = 1; in intel_pmu_init()
5763 hw_cache_event_ids[C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = -1; in intel_pmu_init()
5804 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
5805 /* UOPS_EXECUTED.CORE_ACTIVE_CYCLES,c=1,i=1 */ in intel_pmu_init()
5807 X86_CONFIG(.event=0xb1, .umask=0x3f, .inv=1, .cmask=1); in intel_pmu_init()
5842 /* UOPS_ISSUED.ANY,c=1,i=1 to count stall cycles */ in intel_pmu_init()
5844 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
5845 /* UOPS_DISPATCHED.THREAD,c=1,i=1 to count stall cycles*/ in intel_pmu_init()
5847 X86_CONFIG(.event=0xb1, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
5861 …hw_cache_event_ids[C(DTLB)][C(OP_READ)][C(RESULT_MISS)] = 0x8108; /* DTLB_LOAD_MISSES.DEMAND_LD_MI… in intel_pmu_init()
5883 /* UOPS_ISSUED.ANY,c=1,i=1 to count stall cycles */ in intel_pmu_init()
5885 X86_CONFIG(.event=0x0e, .umask=0x01, .inv=1, .cmask=1); in intel_pmu_init()
5937 hw_cache_extra_regs[C(LL)][C(OP_READ)][C(RESULT_MISS)] = HSW_DEMAND_READ | in intel_pmu_init()
5939 hw_cache_extra_regs[C(LL)][C(OP_WRITE)][C(RESULT_MISS)] = HSW_DEMAND_WRITE|BDW_L3_MISS| in intel_pmu_init()
5941 hw_cache_extra_regs[C(NODE)][C(OP_READ)][C(RESULT_ACCESS)] = HSW_DEMAND_READ| in intel_pmu_init()
5943 hw_cache_extra_regs[C(NODE)][C(OP_WRITE)][C(RESULT_ACCESS)] = HSW_DEMAND_WRITE| in intel_pmu_init()
6004 /* INT_MISC.RECOVERY_CYCLES has umask 1 in Skylake */ in intel_pmu_init()
6006 "event=0xd,umask=0x1,cmask=1"; in intel_pmu_init()
6008 "event=0xd,umask=0x1,cmask=1,any=1"; in intel_pmu_init()
6058 hw_cache_event_ids[C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = -1; in intel_pmu_init()
6179 pmu->num_counters_fixed = x86_pmu.num_counters_fixed + 1; in intel_pmu_init()
6186 __EVENT_CONSTRAINT(0, (1ULL << pmu->num_counters) - 1, in intel_pmu_init()
6189 pmu->intel_cap.perf_metrics = 1; in intel_pmu_init()
6207 __EVENT_CONSTRAINT(0, (1ULL << pmu->num_counters) - 1, in intel_pmu_init()
6211 pmu->intel_cap.pebs_output_pt_available = 1; in intel_pmu_init()
6215 pmu->hw_cache_event_ids[C(ITLB)][C(OP_READ)][C(RESULT_ACCESS)] = -1; in intel_pmu_init()
6225 case 1: in intel_pmu_init()
6294 x86_pmu.max_period = x86_pmu.cntval_mask >> 1; in intel_pmu_init()
6300 x86_pmu.intel_ctrl |= 1ULL << GLOBAL_CTRL_EN_PERF_METRICS; in intel_pmu_init()
6316 int c; in fixup_ht_bug() local
6323 if (topology_max_smt_threads() > 1) { in fixup_ht_bug()
6340 for_each_online_cpu(c) in fixup_ht_bug()
6341 free_excl_cntrs(&per_cpu(cpu_hw_events, c)); in fixup_ht_bug()