u32 ref_and_mask_cp9;
u32 ref_and_mask_sdma0;
u32 ref_and_mask_sdma1;
+ u32 ref_and_mask_sdma2;
+ u32 ref_and_mask_sdma3;
+ u32 ref_and_mask_sdma4;
+ u32 ref_and_mask_sdma5;
+ u32 ref_and_mask_sdma6;
+ u32 ref_and_mask_sdma7;
};
struct amdgpu_mmio_remap {
#define smnNBIF_MGCG_CTRL_LCLK 0x1013a21c
+/*
+ * These are nbio v7_4_1 registers mask. Temporarily define these here since
+ * nbio v7_4_1 header is incomplete.
+ */
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG0_MASK 0x00001000L
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG1_MASK 0x00002000L
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG2_MASK 0x00004000L
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG3_MASK 0x00008000L
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG4_MASK 0x00010000L
+#define GPU_HDP_FLUSH_DONE__RSVD_ENG5_MASK 0x00020000L
+
static void nbio_v7_4_remap_hdp_registers(struct amdgpu_device *adev)
{
WREG32_SOC15(NBIO, 0, mmREMAP_HDP_MEM_FLUSH_CNTL,
.ref_and_mask_cp9 = GPU_HDP_FLUSH_DONE__CP9_MASK,
.ref_and_mask_sdma0 = GPU_HDP_FLUSH_DONE__SDMA0_MASK,
.ref_and_mask_sdma1 = GPU_HDP_FLUSH_DONE__SDMA1_MASK,
+ .ref_and_mask_sdma2 = GPU_HDP_FLUSH_DONE__RSVD_ENG0_MASK,
+ .ref_and_mask_sdma3 = GPU_HDP_FLUSH_DONE__RSVD_ENG1_MASK,
+ .ref_and_mask_sdma4 = GPU_HDP_FLUSH_DONE__RSVD_ENG2_MASK,
+ .ref_and_mask_sdma5 = GPU_HDP_FLUSH_DONE__RSVD_ENG3_MASK,
+ .ref_and_mask_sdma6 = GPU_HDP_FLUSH_DONE__RSVD_ENG4_MASK,
+ .ref_and_mask_sdma7 = GPU_HDP_FLUSH_DONE__RSVD_ENG5_MASK,
};
static void nbio_v7_4_detect_hw_virt(struct amdgpu_device *adev)
u32 ref_and_mask = 0;
const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio_funcs->hdp_flush_reg;
- if (ring->me == 0)
- ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0;
- else
- ref_and_mask = nbio_hf_reg->ref_and_mask_sdma1;
+ ref_and_mask = nbio_hf_reg->ref_and_mask_sdma0 << ring->me;
sdma_v4_0_wait_reg_mem(ring, 0, 1,
adev->nbio_funcs->get_hdp_flush_done_offset(adev),