| 1 | /* SPDX-License-Identifier: GPL-2.0 |
| 2 | * |
| 3 | * Copyright 2020-2022 HabanaLabs, Ltd. |
| 4 | * All Rights Reserved. |
| 5 | * |
| 6 | */ |
| 7 | |
| 8 | #ifndef GAUDI2_MASKS_H_ |
| 9 | #define GAUDI2_MASKS_H_ |
| 10 | |
| 11 | #include "../include/gaudi2/asic_reg/gaudi2_regs.h" |
| 12 | |
| 13 | /* Useful masks for bits in various registers */ |
| 14 | #define QMAN_GLBL_ERR_CFG_MSG_EN_MASK \ |
| 15 | ((0xF << PDMA0_QM_GLBL_ERR_CFG_PQF_ERR_MSG_EN_SHIFT) | \ |
| 16 | (0x1F << PDMA0_QM_GLBL_ERR_CFG_CQF_ERR_MSG_EN_SHIFT) | \ |
| 17 | (0x1F << PDMA0_QM_GLBL_ERR_CFG_CP_ERR_MSG_EN_SHIFT)) |
| 18 | |
| 19 | #define QMAN_GLBL_ERR_CFG_STOP_ON_ERR_EN_MASK \ |
| 20 | ((0xF << PDMA0_QM_GLBL_ERR_CFG_PQF_STOP_ON_ERR_SHIFT) | \ |
| 21 | (0x1F << PDMA0_QM_GLBL_ERR_CFG_CQF_STOP_ON_ERR_SHIFT) | \ |
| 22 | (0x1F << PDMA0_QM_GLBL_ERR_CFG_CP_STOP_ON_ERR_SHIFT) | \ |
| 23 | (0x1 << PDMA0_QM_GLBL_ERR_CFG_ARB_STOP_ON_ERR_SHIFT)) |
| 24 | |
| 25 | #define QMAN_GLBL_ERR_CFG1_MSG_EN_MASK \ |
| 26 | (0x1 << PDMA0_QM_GLBL_ERR_CFG1_CQF_ERR_MSG_EN_SHIFT) |
| 27 | |
| 28 | #define QMAN_GLBL_ERR_CFG1_STOP_ON_ERR_EN_MASK \ |
| 29 | ((0x1 << PDMA0_QM_GLBL_ERR_CFG1_CQF_STOP_ON_ERR_SHIFT) | \ |
| 30 | (0x1 << PDMA0_QM_GLBL_ERR_CFG1_ARC_STOP_ON_ERR_SHIFT)) |
| 31 | |
| 32 | #define QM_PQC_LBW_WDATA \ |
| 33 | ((1 << DCORE0_SYNC_MNGR_OBJS_SOB_OBJ_VAL_SHIFT) | \ |
| 34 | (1 << DCORE0_SYNC_MNGR_OBJS_SOB_OBJ_INC_SHIFT)) |
| 35 | |
| 36 | #define QMAN_MAKE_TRUSTED \ |
| 37 | ((0xF << PDMA0_QM_GLBL_PROT_PQF_SHIFT) | \ |
| 38 | (0x1 << PDMA0_QM_GLBL_PROT_ERR_SHIFT) | \ |
| 39 | (0x1 << PDMA0_QM_GLBL_PROT_PQC_SHIFT)) |
| 40 | |
| 41 | #define QMAN_MAKE_TRUSTED_TEST_MODE \ |
| 42 | ((0xF << PDMA0_QM_GLBL_PROT_PQF_SHIFT) | \ |
| 43 | (0xF << PDMA0_QM_GLBL_PROT_CQF_SHIFT) | \ |
| 44 | (0xF << PDMA0_QM_GLBL_PROT_CP_SHIFT) | \ |
| 45 | (0x1 << PDMA0_QM_GLBL_PROT_ERR_SHIFT) | \ |
| 46 | (0x1 << PDMA0_QM_GLBL_PROT_PQC_SHIFT)) |
| 47 | |
| 48 | #define QMAN_ENABLE \ |
| 49 | ((0xF << PDMA0_QM_GLBL_CFG0_PQF_EN_SHIFT) | \ |
| 50 | (0x1F << PDMA0_QM_GLBL_CFG0_CQF_EN_SHIFT) | \ |
| 51 | (0x1F << PDMA0_QM_GLBL_CFG0_CP_EN_SHIFT) | \ |
| 52 | (0x1 << PDMA0_QM_GLBL_CFG0_ARC_CQF_EN_SHIFT)) |
| 53 | |
| 54 | #define PDMA0_QMAN_ENABLE \ |
| 55 | ((0x3 << PDMA0_QM_GLBL_CFG0_PQF_EN_SHIFT) | \ |
| 56 | (0x1F << PDMA0_QM_GLBL_CFG0_CQF_EN_SHIFT) | \ |
| 57 | (0x1F << PDMA0_QM_GLBL_CFG0_CP_EN_SHIFT) | \ |
| 58 | (0x1 << PDMA0_QM_GLBL_CFG0_ARC_CQF_EN_SHIFT)) |
| 59 | |
| 60 | #define PDMA1_QMAN_ENABLE \ |
| 61 | ((0x1 << PDMA0_QM_GLBL_CFG0_PQF_EN_SHIFT) | \ |
| 62 | (0x1F << PDMA0_QM_GLBL_CFG0_CQF_EN_SHIFT) | \ |
| 63 | (0x1F << PDMA0_QM_GLBL_CFG0_CP_EN_SHIFT) | \ |
| 64 | (0x1 << PDMA0_QM_GLBL_CFG0_ARC_CQF_EN_SHIFT)) |
| 65 | |
| 66 | /* QM_IDLE_MASK is valid for all engines QM idle check */ |
| 67 | #define QM_IDLE_MASK (DCORE0_EDMA0_QM_GLBL_STS0_PQF_IDLE_MASK | \ |
| 68 | DCORE0_EDMA0_QM_GLBL_STS0_CQF_IDLE_MASK | \ |
| 69 | DCORE0_EDMA0_QM_GLBL_STS0_CP_IDLE_MASK) |
| 70 | |
| 71 | #define QM_ARC_IDLE_MASK DCORE0_EDMA0_QM_GLBL_STS1_ARC_CQF_IDLE_MASK |
| 72 | |
| 73 | #define MME_ARCH_IDLE_MASK \ |
| 74 | (DCORE0_MME_CTRL_LO_ARCH_STATUS_SB_IN_EMPTY_MASK | \ |
| 75 | DCORE0_MME_CTRL_LO_ARCH_STATUS_AGU_COUT_SM_IDLE_MASK | \ |
| 76 | DCORE0_MME_CTRL_LO_ARCH_STATUS_WBC_AXI_IDLE_MASK | \ |
| 77 | DCORE0_MME_CTRL_LO_ARCH_STATUS_SB_IN_AXI_IDLE_MASK | \ |
| 78 | DCORE0_MME_CTRL_LO_ARCH_STATUS_QM_IDLE_MASK | \ |
| 79 | DCORE0_MME_CTRL_LO_ARCH_STATUS_QM_RDY_MASK) |
| 80 | |
| 81 | #define TPC_IDLE_MASK (DCORE0_TPC0_CFG_STATUS_SCALAR_PIPE_EMPTY_MASK | \ |
| 82 | DCORE0_TPC0_CFG_STATUS_IQ_EMPTY_MASK | \ |
| 83 | DCORE0_TPC0_CFG_STATUS_SB_EMPTY_MASK | \ |
| 84 | DCORE0_TPC0_CFG_STATUS_QM_IDLE_MASK | \ |
| 85 | DCORE0_TPC0_CFG_STATUS_QM_RDY_MASK) |
| 86 | |
| 87 | #define DCORE0_TPC0_QM_CGM_STS_AGENT_IDLE_MASK 0x100 |
| 88 | |
| 89 | #define DCORE0_TPC0_EML_CFG_DBG_CNT_DBG_EXIT_MASK 0x40 |
| 90 | |
| 91 | /* CGM_IDLE_MASK is valid for all engines CGM idle check */ |
| 92 | #define CGM_IDLE_MASK DCORE0_TPC0_QM_CGM_STS_AGENT_IDLE_MASK |
| 93 | |
| 94 | #define QM_GLBL_CFG1_PQF_STOP PDMA0_QM_GLBL_CFG1_PQF_STOP_MASK |
| 95 | #define QM_GLBL_CFG1_CQF_STOP PDMA0_QM_GLBL_CFG1_CQF_STOP_MASK |
| 96 | #define QM_GLBL_CFG1_CP_STOP PDMA0_QM_GLBL_CFG1_CP_STOP_MASK |
| 97 | #define QM_GLBL_CFG1_PQF_FLUSH PDMA0_QM_GLBL_CFG1_PQF_FLUSH_MASK |
| 98 | #define QM_GLBL_CFG1_CQF_FLUSH PDMA0_QM_GLBL_CFG1_CQF_FLUSH_MASK |
| 99 | #define QM_GLBL_CFG1_CP_FLUSH PDMA0_QM_GLBL_CFG1_CP_FLUSH_MASK |
| 100 | |
| 101 | #define QM_GLBL_CFG2_ARC_CQF_STOP PDMA0_QM_GLBL_CFG2_ARC_CQF_STOP_MASK |
| 102 | #define QM_GLBL_CFG2_ARC_CQF_FLUSH PDMA0_QM_GLBL_CFG2_ARC_CQF_FLUSH_MASK |
| 103 | |
| 104 | #define QM_ARB_ERR_MSG_EN_CHOISE_OVF_MASK 0x1 |
| 105 | #define QM_ARB_ERR_MSG_EN_CHOISE_WDT_MASK 0x2 |
| 106 | #define QM_ARB_ERR_MSG_EN_AXI_LBW_ERR_MASK 0x4 |
| 107 | |
| 108 | #define QM_ARB_ERR_MSG_EN_MASK (\ |
| 109 | QM_ARB_ERR_MSG_EN_CHOISE_OVF_MASK |\ |
| 110 | QM_ARB_ERR_MSG_EN_CHOISE_WDT_MASK |\ |
| 111 | QM_ARB_ERR_MSG_EN_AXI_LBW_ERR_MASK) |
| 112 | |
| 113 | #define PCIE_AUX_FLR_CTRL_HW_CTRL_MASK 0x1 |
| 114 | #define PCIE_AUX_FLR_CTRL_INT_MASK_MASK 0x2 |
| 115 | |
| 116 | #define MME_ACC_INTR_MASK_WBC_ERR_RESP_MASK GENMASK(1, 0) |
| 117 | #define MME_ACC_INTR_MASK_AP_SRC_POS_INF_MASK BIT(2) |
| 118 | #define MME_ACC_INTR_MASK_AP_SRC_NEG_INF_MASK BIT(3) |
| 119 | #define MME_ACC_INTR_MASK_AP_SRC_NAN_MASK BIT(4) |
| 120 | #define MME_ACC_INTR_MASK_AP_RESULT_POS_INF_MASK BIT(5) |
| 121 | #define MME_ACC_INTR_MASK_AP_RESULT_NEG_INF_MASK BIT(6) |
| 122 | |
| 123 | #define SM_CQ_L2H_MASK_VAL 0xFFFFFFFFFC000000ull |
| 124 | #define SM_CQ_L2H_CMPR_VAL 0x1000007FFC000000ull |
| 125 | #define SM_CQ_L2H_LOW_MASK GENMASK(31, 20) |
| 126 | #define SM_CQ_L2H_LOW_SHIFT 20 |
| 127 | |
| 128 | #define MMU_STATIC_MULTI_PAGE_SIZE_HOP4_PAGE_SIZE_MASK \ |
| 129 | REG_FIELD_MASK(DCORE0_HMMU0_MMU_STATIC_MULTI_PAGE_SIZE, HOP4_PAGE_SIZE) |
| 130 | #define STLB_HOP_CONFIGURATION_ONLY_LARGE_PAGE_MASK \ |
| 131 | REG_FIELD_MASK(DCORE0_HMMU0_STLB_HOP_CONFIGURATION, ONLY_LARGE_PAGE) |
| 132 | |
| 133 | #define AXUSER_HB_SEC_ASID_MASK 0x3FF |
| 134 | #define AXUSER_HB_SEC_MMBP_MASK 0x400 |
| 135 | |
| 136 | #define MMUBP_ASID_MASK (AXUSER_HB_SEC_ASID_MASK | AXUSER_HB_SEC_MMBP_MASK) |
| 137 | |
| 138 | #define ROT_MSS_HALT_WBC_MASK BIT(0) |
| 139 | #define ROT_MSS_HALT_RSB_MASK BIT(1) |
| 140 | #define ROT_MSS_HALT_MRSB_MASK BIT(2) |
| 141 | |
| 142 | #define PCIE_DBI_MSIX_ADDRESS_MATCH_LOW_OFF_MSIX_ADDRESS_MATCH_EN_SHIFT 0 |
| 143 | #define PCIE_DBI_MSIX_ADDRESS_MATCH_LOW_OFF_MSIX_ADDRESS_MATCH_EN_MASK 0x1 |
| 144 | |
| 145 | #define DCORE0_SYNC_MNGR_OBJS_SOB_OBJ_SIGN_SHIFT 15 |
| 146 | #define DCORE0_SYNC_MNGR_OBJS_SOB_OBJ_SIGN_MASK 0x8000 |
| 147 | |
| 148 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_ERR_INTR_SHIFT 0 |
| 149 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_ERR_INTR_MASK 0x1 |
| 150 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_LBW_ERR_INTR_SHIFT 1 |
| 151 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_LBW_ERR_INTR_MASK 0x2 |
| 152 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_BAD_ACCESS_INTR_SHIFT 2 |
| 153 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_BAD_ACCESS_INTR_MASK 0x4 |
| 154 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_ERR_INTR_MASK_SHIFT 3 |
| 155 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_ERR_INTR_MASK_MASK 0x8 |
| 156 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_LBW_ERR_INTR_MASK_SHIFT 4 |
| 157 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_AXI_LBW_ERR_INTR_MASK_MASK 0x10 |
| 158 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_BAD_ACCESS_INTR_MASK_SHIFT 5 |
| 159 | #define PCIE_WRAP_PCIE_IC_SEI_INTR_IND_BAD_ACCESS_INTR_MASK_MASK 0x20 |
| 160 | |
| 161 | #endif /* GAUDI2_MASKS_H_ */ |
| 162 | |