minerva: update to v1.4

- Correct Zqlatch period checks
- Update periodic training
- Simplify some logic
- Fix some mr13 values
- Separate EMC channel enums from macros
- Add extra reg flushes
- Fix tree margin comparison signedness
 By using incorrect signedness on tree margins the delta taps would always apply.
 By casting margins to integer it now properly checks if it should apply delta taps on the new trimmers.
 This fixes a bug that exists in every Nvidia emc dvfs code.
This commit is contained in:
CTCaer 2021-05-11 10:23:08 +03:00
parent 253de81a6b
commit 05833bb38c
3 changed files with 154 additions and 194 deletions

View file

@ -94,8 +94,8 @@ enum tree_update_mode_t
enum emc_channels
{
EMC_CH0 = 0,
EMC_CH1 = 1
EMC_CHANNEL0 = 0,
EMC_CHANNEL1 = 1
};
enum EMC_2X_CLK_SRC

View file

@ -179,7 +179,8 @@
#define TIMING_UPDATE_STALLED (1 << 23)
#define MRR_DIVLD (1 << 20)
#define IN_SELF_REFRESH_MASK (3 << 8)
#define IN_POWERDOWN_MASK (3 << 4)
#define IN_POWERDOWN_BOTH_MASK (3 << 4)
#define IN_POWERDOWN_1DEV_MASK (1 << 4)
#define REQ_FIFO_EMPTY (1 << 0)
#define EMC_CFG_2 0x2B8

View file

@ -32,6 +32,8 @@
//#define OVERCLOCK_FREQ 1862400
//#define OVERCLOCK_VOLTAGE 1200000 // Default is 1100mV and in HOS 1125mV.
#define PERF_HACK
bool emc_2X_clk_src_is_pllmb;
bool fsp_for_src_freq;
bool train_ram_patterns;
@ -1097,16 +1099,15 @@ done:
static void _request_mmr_data(u32 data, bool dual_channel)
{
EMC(EMC_MRR) = data;
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, true, EMC_CH0);
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, true, EMC_CHANNEL0);
if (dual_channel)
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, true, EMC_CH1);
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, true, EMC_CHANNEL1);
}
static u32 _start_periodic_compensation()
static void _start_periodic_compensation()
{
EMC(EMC_MPC) = 0x4B;
return EMC(EMC_MPC);
(void)EMC(EMC_MPC);
}
static bool _timing_update(u32 dual_channel)
@ -1114,9 +1115,9 @@ static bool _timing_update(u32 dual_channel)
bool err = 0;
EMC(EMC_TIMING_CONTROL) = 1;
err = _wait_emc_status(EMC_EMC_STATUS, TIMING_UPDATE_STALLED, false, EMC_CH0);
err = _wait_emc_status(EMC_EMC_STATUS, TIMING_UPDATE_STALLED, false, EMC_CHANNEL0);
if (dual_channel)
err |= _wait_emc_status(EMC_EMC_STATUS, TIMING_UPDATE_STALLED, false, EMC_CH1);
err |= _wait_emc_status(EMC_EMC_STATUS, TIMING_UPDATE_STALLED, false, EMC_CHANNEL1);
return err;
}
@ -1129,7 +1130,7 @@ static u32 _get_dram_temperature()
bool channel1_enabled = (EMC(EMC_FBIO_CFG7) >> 2) & 1;
u32 emc_cfg_o = EMC(EMC_CFG);
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, false, EMC_CH0);
_wait_emc_status(EMC_EMC_STATUS, MRR_DIVLD, false, EMC_CHANNEL0);
if (emc_cfg_o & 0x20000000)
{
@ -1137,7 +1138,7 @@ static u32 _get_dram_temperature()
_timing_update(channel1_enabled);
}
_request_mmr_data(0x80040000, EMC_CH0);
_request_mmr_data(0x80040000, EMC_CHANNEL0);
mr4_0 = EMC(EMC_MRR) & 0xFFFF;
if (mr4_0 < 0xF001)
@ -1150,7 +1151,7 @@ static u32 _get_dram_temperature()
if (channel1_enabled)
{
_request_mmr_data(0x40040000, EMC_CH1);
_request_mmr_data(0x40040000, EMC_CHANNEL1);
mr4_1 = EMC(EMC_MRR);
if (mr4_1 < 0xF001)
@ -1311,19 +1312,6 @@ static void _digital_dll_disable()
;
}
static void _digital_dll_enable(u32 channel1_enabled)
{
EMC(EMC_CFG_DIG_DLL) |= 1;
_timing_update(channel1_enabled);
while (!(EMC(EMC_CFG_DIG_DLL) & 1))
;
if (channel1_enabled)
while (!(EMC_CH1(EMC_CFG_DIG_DLL) & 1))
;
}
static void _digital_dll_enable_rs(u32 channel1_enabled)
{
EMC(EMC_CFG_DIG_DLL) = (EMC(EMC_CFG_DIG_DLL) & 0xFFFFFF24) | 0x89;
@ -1383,7 +1371,7 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_
else
{
pmacro_dq_pad = (pmacro_dq_pad & 0xFEFEFDFD) | 0x10200;
pmacro_cmd_pad_drvforceon = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4010200;
pmacro_cmd_pad_drvforceon = (pmacro_cmd_pad & 0xFAFEFDFD) | 0x4010200;
_ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_drvforceon, (u32)src_clk_per_pc);
_ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0);
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFEEDFEED, 0);
@ -1408,7 +1396,7 @@ static u32 _dvfs_power_ramp_down(bool flip_backward, emc_table_t *src_emc_table_
ramp_down_wait += 400000;
_ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xFFFFFFFA, (u32)src_clk_per_pc);
_ccfifo_write(EMC_PMACRO_COMMON_PAD_TX_CTRL, pmacro_common_tx & 0xFFFFFFF0, (u32)src_clk_per_pc);
_ccfifo_write(0, 0, (u32)src_clk_per_pc);
_ccfifo_write(EMC_INTSTATUS, 0, (u32)src_clk_per_pc);
}
return ramp_down_wait;
@ -1467,7 +1455,7 @@ static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_en
pmacro_common_tx = dst_emc_table_entry->burst_regs.emc_pmacro_common_pad_tx_ctrl_idx;
}
pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4000000;
pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFAFEFDFD) | 0x4000000;
if (dst_clock_period >= 1666) // Dvfs mid speed threshold.
{
@ -1489,10 +1477,8 @@ static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_en
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, dst_clk_per_pc);
else
{
pmacro_cmd_pad_data = (pmacro_cmd_pad & 0xFEFEFDFD) | 0x4010200;
pmacro_dq_pad = (pmacro_dq_pad & 0xFEFEFDFD) | 0x10200;
_ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, dst_clk_per_pc);
_ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0);
_ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, (pmacro_cmd_pad & 0xFAFEFDFD) | 0x4010200, dst_clk_per_pc);
_ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, (pmacro_dq_pad & 0xFEFEFDFD) | 0x10200, 0);
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 & 0xFE40FE40, 0);
}
@ -1502,24 +1488,22 @@ static u32 _dvfs_power_ramp_up(bool flip_backward, emc_table_t *src_emc_table_en
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, dst_clk_per_pc);
else
{
pmacro_cmd_pad_data |= 0x1010202u;
pmacro_dq_pad |= 0x1010202;
pmacro_cmd_pad_data = pmacro_cmd_pad | 0x5010202;
_ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data, dst_clk_per_pc);
_ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad, 0);
_ccfifo_write(EMC_PMACRO_DATA_PAD_TX_CTRL, pmacro_dq_pad | 0x1010202, 0);
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1, 0);
}
_ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, dst_clk_per_pc + 9);
ramp_up_wait = 500000 + (dst_clock_period * 10);
}
else // 1000 > dst_clock_period < 1666.
{
_ccfifo_write(EMC_PMACRO_BRICK_CTRL_RFU1, pmacro_rfu1 | 0x6000600, dst_clk_per_pc);
_ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, dst_clk_per_pc + 9);
ramp_up_wait = 200000 + (dst_clock_period * 10);
}
_ccfifo_write(EMC_FBIO_CFG5, pmacro_cfg5 & 0xFFFFFEFF, dst_clk_per_pc + 9);
}
_ccfifo_write(EMC_PMACRO_CMD_PAD_TX_CTRL, pmacro_cmd_pad_data & 0xFBFFFFFF, 5);
@ -1598,7 +1582,7 @@ static u32 _minerva_update_clock_tree_delay(emc_table_t *src_emc_entry, emc_tabl
tdelta = dst_emc_entry->current_dram_clktree_c0d0u0 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u0_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
tdelta *= -1;
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c0d0u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u0_idx / 100;
@ -1634,8 +1618,8 @@ calc_td0_0:
tdelta = dst_emc_entry->current_dram_clktree_c0d0u1 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u1_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c0d0u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d0u1_idx / 100;
@ -1673,8 +1657,8 @@ calc_td1_0:
tdelta = dst_emc_entry->current_dram_clktree_c1d0u0 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u0_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c1d0u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u0_idx / 100;
@ -1710,8 +1694,8 @@ calc_td1_1:
tdelta = dst_emc_entry->current_dram_clktree_c1d0u1 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u1_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c1d0u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d0u1_idx / 100;
@ -1772,8 +1756,8 @@ calc_dev2:
tdelta = dst_emc_entry->current_dram_clktree_c0d1u0 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u0_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c0d1u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u0_idx / 100;
@ -1809,8 +1793,8 @@ calc_tmp_td0_1:
tdelta = dst_emc_entry->current_dram_clktree_c0d1u1 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u1_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c0d1u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c0d1u1_idx / 100;
@ -1848,8 +1832,8 @@ calc_tmp_td1_0:
tdelta = dst_emc_entry->current_dram_clktree_c1d1u0 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u0_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c1d1u0 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u0_idx / 100;
@ -1885,8 +1869,8 @@ calc_tmp_td1_1:
tdelta = dst_emc_entry->current_dram_clktree_c1d1u1 - (dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u1_idx / 100);
if (tdelta < 0)
tdelta = -tdelta;
if (tdelta > adelta)
tdelta *= -1;
if ((u32)tdelta > adelta)
adelta = tdelta;
if (update_type == TRAINING_UPDATE || ((dst_rate_mhz * tdelta * 128) / 1000000) > dst_emc_entry->tree_margin)
dst_emc_entry->current_dram_clktree_c1d1u1 = dst_emc_entry->ptfv_list.ptfv_dqsosc_movavg_c1d1u1_idx / 100;
@ -2024,17 +2008,18 @@ static u32 _minerva_apply_periodic_compensation_trimmer(emc_table_t *mtc_table_e
case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_2:
case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK0_3:
case EMC_DATA_BRLSHFT_0:
tree_delta[0] = (mtc_table_entry->current_dram_clktree_c0d0u0 - mtc_table_entry->trained_dram_clktree_c0d0u0) << 7;
tree_delta[1] = (mtc_table_entry->current_dram_clktree_c0d0u1 - mtc_table_entry->trained_dram_clktree_c0d0u1) << 7;
tree_delta[2] = (mtc_table_entry->current_dram_clktree_c1d0u0 - mtc_table_entry->trained_dram_clktree_c1d0u0) << 7;
tree_delta[3] = (mtc_table_entry->current_dram_clktree_c1d0u1 - mtc_table_entry->trained_dram_clktree_c1d0u1) << 7;
tree_delta[0] = (mtc_table_entry->current_dram_clktree_c0d0u0 - mtc_table_entry->trained_dram_clktree_c0d0u0) * 128;
tree_delta[1] = (mtc_table_entry->current_dram_clktree_c0d0u1 - mtc_table_entry->trained_dram_clktree_c0d0u1) * 128;
tree_delta[2] = (mtc_table_entry->current_dram_clktree_c1d0u0 - mtc_table_entry->trained_dram_clktree_c1d0u0) * 128;
tree_delta[3] = (mtc_table_entry->current_dram_clktree_c1d0u1 - mtc_table_entry->trained_dram_clktree_c1d0u1) * 128;
tree_delta_taps[0] = (tree_delta[0] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[1] = (tree_delta[1] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[2] = (tree_delta[2] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[3] = (tree_delta[3] * (s32)dst_rate_mhz) / 1000000;
for (u32 i = 0; i < 4; i++)
{
if ((tree_delta_taps[i] > mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * mtc_table_entry->tree_margin)))
// Check if tap exceeds margins and apply it.
if ((tree_delta_taps[i] > (s32)mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * (s32)mtc_table_entry->tree_margin)))
{
new_trim[i * 2] += tree_delta_taps[i];
new_trim[i * 2 + 1] += tree_delta_taps[i];
@ -2056,17 +2041,18 @@ static u32 _minerva_apply_periodic_compensation_trimmer(emc_table_t *mtc_table_e
case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_2:
case EMC_PMACRO_OB_DDLL_LONG_DQ_RANK1_3:
case EMC_DATA_BRLSHFT_1:
tree_delta[0] = (mtc_table_entry->current_dram_clktree_c0d1u0 - mtc_table_entry->trained_dram_clktree_c0d1u0) << 7;
tree_delta[1] = (mtc_table_entry->current_dram_clktree_c0d1u1 - mtc_table_entry->trained_dram_clktree_c0d1u1) << 7;
tree_delta[2] = (mtc_table_entry->current_dram_clktree_c1d1u0 - mtc_table_entry->trained_dram_clktree_c1d1u0) << 7;
tree_delta[3] = (mtc_table_entry->current_dram_clktree_c1d1u1 - mtc_table_entry->trained_dram_clktree_c1d1u1) << 7;
tree_delta[0] = (mtc_table_entry->current_dram_clktree_c0d1u0 - mtc_table_entry->trained_dram_clktree_c0d1u0) * 128;
tree_delta[1] = (mtc_table_entry->current_dram_clktree_c0d1u1 - mtc_table_entry->trained_dram_clktree_c0d1u1) * 128;
tree_delta[2] = (mtc_table_entry->current_dram_clktree_c1d1u0 - mtc_table_entry->trained_dram_clktree_c1d1u0) * 128;
tree_delta[3] = (mtc_table_entry->current_dram_clktree_c1d1u1 - mtc_table_entry->trained_dram_clktree_c1d1u1) * 128;
tree_delta_taps[0] = (tree_delta[0] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[1] = (tree_delta[1] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[2] = (tree_delta[2] * (s32)dst_rate_mhz) / 1000000;
tree_delta_taps[3] = (tree_delta[3] * (s32)dst_rate_mhz) / 1000000;
for (u32 i = 0; i < 4; i++)
{
if ((tree_delta_taps[i] > mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * mtc_table_entry->tree_margin)))
// Check if tap exceeds margins and apply it.
if ((tree_delta_taps[i] > (s32)mtc_table_entry->tree_margin) || (tree_delta_taps[i] < (-1 * (s32)mtc_table_entry->tree_margin)))
{
new_trim[8 + i * 2] += tree_delta_taps[i];
new_trim[8 + i * 2 + 1] += tree_delta_taps[i];
@ -2616,10 +2602,13 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
u32 tFC_lpddr4 = dst_emc_entry->dram_timings.t_fc_lpddr4 * 1000;
u32 tZQCAL_lpddr4 = 1000000;
if (src_clock_period <= 2000)
if (dst_clock_period <= 2000)
tZQCAL_lpddr4 -= tFC_lpddr4;
s32 tZQCAL_lpddr4_fc_adj = tZQCAL_lpddr4 / dst_clock_period;
(void)EMC(EMC_CFG);
(void)EMC(EMC_AUTO_CAL_CONFIG);
// Step 1 - Pre DVFS SW sequence.
EPRINTF("Step 1");
emc_dbg_o = EMC(EMC_DBG);
@ -2632,6 +2621,7 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
// Step 1.2 - Disable AUTOCAL temporarily.
EPRINTF("Step 1.2");
EMC(EMC_AUTO_CAL_CONFIG) = (dst_emc_entry->emc_auto_cal_config & 0x7FFFF9FF) | 0x600;
(void)EMC(EMC_AUTO_CAL_CONFIG);
// Step 1.3 - Disable other power features.
EPRINTF("Step 1.3");
@ -2644,20 +2634,20 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
{
if (dram_dev_num == TWO_RANK)
{
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, false, EMC_CH0);
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_BOTH_MASK, false, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, false, EMC_CH1);
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_BOTH_MASK, false, EMC_CHANNEL1);
}
else
{
_wait_emc_status(EMC_EMC_STATUS, 0x10, false, EMC_CH0);
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_1DEV_MASK, false, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, 0x10, false, EMC_CH1);
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_1DEV_MASK, false, EMC_CHANNEL1);
}
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CH0);
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CH1);
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, false, EMC_CHANNEL1);
// Reset clock tree delays.
dst_emc_entry->current_dram_clktree_c0d0u0 = dst_emc_entry->trained_dram_clktree_c0d0u0;
@ -2826,6 +2816,7 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
EMC(EMC_W2P) = W2P_war;
EMC(EMC_TRPAB) = TRPab_war;
EMC(EMC_DBG) = emc_dbg_o;
(void)EMC(EMC_TRPAB);
_usleep(1);
}
@ -3058,7 +3049,6 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
// Step 9 - LPDDR4.
EPRINTF("Step 9");
EMC(EMC_ZCAL_INTERVAL) = src_emc_entry->burst_regs.emc_zcal_interval_idx & 0xFF000000;
EMC(EMC_ZCAL_WAIT_CNT) = dst_emc_entry->burst_regs.emc_zcal_wait_cnt_idx & 0xFFFFF800;
EMC(EMC_DBG) = emc_dbg_o | 0x40000002;
@ -3081,12 +3071,11 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
_ccfifo_write(EMC_DBG, (emc_dbg_o & 0xF3FFFFFF) | 0x4000000, 0);
}
// Step 10 - Self refresh
EPRINTF("Step 10");
_ccfifo_write(EMC_SELF_REF, 0x101, 0);
if (!needs_ca_or_cavref_training && (src_clock_period <= 2000))
if (!needs_ca_or_cavref_training && (dst_clock_period <= 2000))
{
_ccfifo_write(EMC_MRW3, mr13_flip_fspwr ^ 0x40, 0);
_ccfifo_write(EMC_MRW6, (src_emc_entry->burst_regs.emc_mrw6_idx & 0xC0C0) | (dst_emc_entry->burst_regs.emc_mrw6_idx & 0xFFFF3F3F), 0);
@ -3155,7 +3144,7 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
// Step 13 - Ramp up.
EPRINTF("Step 13");
ramp_up_wait = _dvfs_power_ramp_up(false, src_emc_entry, dst_emc_entry, needs_training & 0xFF, dst_clock_period);
ramp_up_wait = _dvfs_power_ramp_up(false, src_emc_entry, dst_emc_entry, needs_training, dst_clock_period);
_ccfifo_write(EMC_DBG, emc_dbg_val, 0);
@ -3184,83 +3173,70 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
EPRINTF("Step 15");
if (!needs_ca_or_cavref_training)
{
s32 zq_latch_dvfs_wait_time = 0;
s32 T_PDEX_timing_final = 0;
s32 T_PDEX_timing = div_o3(dst_emc_entry->dram_timings.t_pdex * 1000, dst_clock_period);
s32 zq_latch_dvfs_wait_time;
u32 T_PDEX_timing = div_o3(dst_emc_entry->dram_timings.t_pdex * 1000, dst_clock_period);
if (src_clock_period > 2000)
zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - T_PDEX_timing;
if (dst_clock_period > 2000)
zq_latch_dvfs_wait_time = (s32)tZQCAL_lpddr4_fc_adj - (s32)T_PDEX_timing;
else
zq_latch_dvfs_wait_time =
(s32)tZQCAL_lpddr4_fc_adj - (ramp_up_wait + ramp_down_wait) / dst_clock_period;
if (dram_dev_num == ONE_RANK)
{
if (T_PDEX_timing < 0)
T_PDEX_timing = 0;
if (src_clock_period > 2000)
if (dst_clock_period > 2000)
_ccfifo_write(EMC_ZQ_CAL, 0x80000001, T_PDEX_timing);
if (!needs_tristate_training)
{
_ccfifo_write(EMC_MRW3, (mr13_flip_fspop & 0xF3FFFFF7) | 0xC000000, T_PDEX_timing);
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
_ccfifo_write(EMC_REF, 0, 0);
}
emc_zq_cal = 0x80000002;
if (zq_latch_dvfs_wait_time < 0)
zq_latch_dvfs_wait_time = 0;
}
else if (zcal_resistor_shared)
{
if (src_clock_period > 2000)
{
T_PDEX_timing_final = T_PDEX_timing;
if (T_PDEX_timing < 0)
T_PDEX_timing_final = 0;
_ccfifo_write(EMC_ZQ_CAL, 0x80000001, T_PDEX_timing_final);
}
if (dst_clock_period > 2000)
_ccfifo_write(EMC_ZQ_CAL, 0x80000001, T_PDEX_timing);
T_PDEX_timing_final = zq_latch_dvfs_wait_time + T_PDEX_timing;
s32 T_PDEX_timing_final = zq_latch_dvfs_wait_time + (s32)T_PDEX_timing;
if ((zq_latch_dvfs_wait_time + T_PDEX_timing) < 0)
if (T_PDEX_timing_final < 0)
T_PDEX_timing_final = 0;
_ccfifo_write(EMC_ZQ_CAL, 0x80000002, T_PDEX_timing_final);
_ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0);
if (!needs_tristate_training)
{
_ccfifo_write(EMC_MRW3, (mr13_flip_fspop & 0xF3FFFFF7) | 0xC000000, 0);
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
_ccfifo_write(EMC_REF, 0, 0);
}
emc_zq_cal = 0x40000002;
zq_latch_dvfs_wait_time = 1000000 / dst_clock_period;
}
else
{
if (T_PDEX_timing < 0)
T_PDEX_timing = 0;
if (src_clock_period > 2000)
if (dst_clock_period > 2000)
_ccfifo_write(EMC_ZQ_CAL, 1, T_PDEX_timing);
if (!needs_tristate_training)
{
_ccfifo_write(EMC_MRW3, (mr13_flip_fspop & 0xF3FFFFF7) | 0xC000000, T_PDEX_timing);
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
_ccfifo_write(EMC_REF, 0, 0);
}
emc_zq_cal = 2;
if (zq_latch_dvfs_wait_time < 0)
zq_latch_dvfs_wait_time = 0;
}
// Disable self-refresh.
if (!needs_tristate_training)
{
#ifdef PERF_HACK
// HACK: Setting ACTIVE_SELF_REF increases perf by 1-2%.
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
#else
_ccfifo_write(EMC_SELF_REF, 0, 0);
#endif
_ccfifo_write(EMC_REF, 0, 0);
}
if (zq_latch_dvfs_wait_time < 0)
zq_latch_dvfs_wait_time = 0;
_ccfifo_write(EMC_ZQ_CAL, emc_zq_cal, (u32)zq_latch_dvfs_wait_time);
}
@ -3344,40 +3320,19 @@ static u32 _minerva_set_clock(emc_table_t *src_emc_entry, emc_table_t *dst_emc_e
_ccfifo_write(EMC_ZQ_CAL, 0x80000001, 0);
_ccfifo_write(EMC_ZQ_CAL, 0x80000002, 1000000 / src_clock_period);
if (zcal_resistor_shared && dram_dev_num == TWO_RANK)
if ((!needs_ca_or_cavref_training || needs_swap_rank_training) && dram_dev_num == TWO_RANK)
{
if (!needs_ca_or_cavref_training || needs_swap_rank_training)
{
_ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0);
_ccfifo_write(EMC_ZQ_CAL, 0x40000002, 1000000 / src_clock_period);
if (!needs_ca_or_cavref_training)
_ccfifo_write(EMC_MRW3, ((mr13_flip_fspop ^ 0xC0) & 0xF3FFFFF7) | 0xC000000, 0);
}
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
goto step_19_2;
}
else if (dram_dev_num == TWO_RANK)
{
if (needs_ca_or_cavref_training && !needs_swap_rank_training)
{
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
goto step_19_2;
}
_ccfifo_write(EMC_ZQ_CAL, 0x40000001, 0);
_ccfifo_write(EMC_ZQ_CAL, 0x40000002, 1000000 / src_clock_period);
}
if (!needs_ca_or_cavref_training)
_ccfifo_write(EMC_MRW3, ((mr13_flip_fspop ^ 0xC0) & 0xF3FFFFF7) | 0xC000000, 0);
_ccfifo_write(EMC_MRW3, (mr13_flip_fspop & 0xF3FFFFF7) ^ 0xC0000C0, 0);
_ccfifo_write(EMC_SELF_REF, 0x100, 0);
_ccfifo_write(EMC_SELF_REF, 0, 0); // Was 0x100.
}
step_19_2:
// Step 19.2.
EPRINTF("Step 19.2");
if (bg_regulator_mode_change)
@ -3418,6 +3373,7 @@ step_19_2:
// Step 22 - Restore EMC_CFG_PIPE_CLK.
EPRINTF("Step 22");
//if (needs_tristate_training && dram_type == DRAM_TYPE_LPDDR4)////////////////
if (needs_tristate_training)
_ccfifo_write(EMC_SEL_DPD_CTRL, src_emc_entry->emc_sel_dpd_ctrl, 0);
@ -3434,14 +3390,25 @@ step_19_2:
// Step 23 - Clock Change.
EPRINTF("Step 23");
// During training save current clock.
if (needs_tristate_training)
{
CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC_SAFE) = (u32)CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC);
_change_dll_src(src_emc_entry, (u32)CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC));
u32 emc_clk_src = CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC);
CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC_SAFE) = emc_clk_src;
_change_dll_src(src_emc_entry, emc_clk_src);
}
EMC(EMC_CFG_DIG_DLL) = (EMC(EMC_CFG_DIG_DLL) & 0xFFFFFF24) | 0x88;
// Set CFG_DLL_MODE to RUN_PERIODIC.
EMC(EMC_CFG_DIG_DLL) = (EMC(EMC_CFG_DIG_DLL) & 0xFFFFFF24) | 0x88;
(void)EMC(EMC_CFG_DIG_DLL);
(void)EMC(EMC_FBIO_CFG7);
(void)MC(MC_EMEM_ADR_CFG);
(void)EMC(EMC_INTSTATUS);
// Do clock change.
CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC) = selected_clk_src_emc;
(void)CLOCK(CLK_RST_CONTROLLER_CLK_SOURCE_EMC);
if (_wait_emc_status(EMC_INTSTATUS, CLKCHANGE_COMPLETE_INT, true, 0))
return 4; // Clkchange handshake timeout error.
@ -3450,6 +3417,7 @@ step_19_2:
EPRINTF("Step 24");
if (needs_tristate_training)
{
(void)MC(MC_EMEM_ADR_CFG);
emc_dbg_val = EMC(EMC_DBG);
EMC(EMC_DBG) |= 1;
@ -3543,7 +3511,6 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst
u32 needs_training_emc_table[8] = {0};
u32 needs_training = dst_emc_entry->needs_training;
bool dual_channel = (EMC(EMC_FBIO_CFG7) >> 1) & ((EMC(EMC_FBIO_CFG7) >> 2) & 1);
// Must start as true.
if (train_ram_patterns)
@ -3558,29 +3525,21 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst
train_ram_patterns = false;
}
if (needs_training && !dst_emc_entry->trained)
if (!dst_emc_entry->trained)
{
needs_training_idx = needs_training & 3;
if (needs_training & 3)
{
needs_training_idx = 1;
needs_training_emc_table[0] = needs_training & 0x203;
needs_training_emc_table[needs_training_idx++] = needs_training & 0x203;
if (MC(MC_EMEM_ADR_CFG) & 1) // if mapping W8 (1KB page).
{
needs_training_idx = 2;
needs_training_emc_table[1] = needs_training & 0x303;
}
needs_training_emc_table[needs_training_idx++] = needs_training & 0x303;
}
if (MC(MC_EMEM_ADR_CFG) & 1 && needs_training & 0xC)
if (needs_training & 0xC)
{
needs_training_emc_table[needs_training_idx] = needs_training & 0x20C;
needs_training_emc_table[needs_training_idx + 1] = needs_training & 0x204;
needs_training_idx += 2;
}
else if (needs_training & 0xC)
needs_training_emc_table[needs_training_idx++] = needs_training & 0x20C;
if (MC(MC_EMEM_ADR_CFG) & 1)
needs_training_emc_table[needs_training_idx++] = needs_training & 0x204;
}
if (needs_training & 0xF0)
needs_training_emc_table[needs_training_idx++] = needs_training & 0x2F0;
@ -3589,6 +3548,8 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst
{
_minerva_set_clock(src_emc_entry, dst_emc_entry, needs_training_emc_table[i], selected_clk_src_emc);
bool dual_channel = (EMC(EMC_FBIO_CFG7) >> 1) & ((EMC(EMC_FBIO_CFG7) >> 2) & 1);
EMC(EMC_DBG) = (EMC(EMC_DBG) & 0xF3FFFFFF) | 0x8000000;
EMC(EMC_CFG_UPDATE) = (EMC(EMC_CFG_UPDATE) & 0xFFFFFFF9) | 4;
_timing_update(dual_channel);
@ -3614,10 +3575,10 @@ static void _minerva_train_patterns(emc_table_t *src_emc_entry, emc_table_t *dst
EMC(EMC_TRPAB) = src_emc_entry->burst_regs.emc_trpab_idx;
_timing_update(dual_channel);
}
dst_emc_entry->trained = 1;
EPRINTF("Trained");
dst_emc_entry->trained = 1;
}
if (switch_rate)
@ -3681,49 +3642,46 @@ u32 _minerva_do_periodic_compensation(emc_table_t *mtc_table_entry)
{
if (mtc_table_entry && mtc_table_entry->periodic_training)
{
u32 val = 0;
u32 dram_dev_num = (MC(MC_EMEM_ADR_CFG) & 1) + 1;
u32 pd_mask = (dram_dev_num == TWO_RANK) ? IN_POWERDOWN_BOTH_MASK : IN_POWERDOWN_1DEV_MASK;
bool channel1_enabled = (mtc_table_entry->burst_regs.emc_fbio_cfg7_idx >> 2) & 1;
//u32 emc_dbg_o = EMC(EMC_DBG);
(void)EMC(EMC_DBG);
// Safekeep current config.
u32 emc_cfg_o = EMC(EMC_CFG);
u32 emc_cfg = emc_cfg_o & 0xFFFFFFF;
u32 emc_cfg_dig_dll_o = EMC(EMC_CFG_DIG_DLL);
u32 emc_cfg_update_o = EMC(EMC_CFG_UPDATE);
// Step 1 - Disable other power features.
EMC(EMC_CFG) = emc_cfg;
// Step 1 - Disable digital DLL.
EMC(EMC_CFG_DIG_DLL) = emc_cfg_dig_dll_o & 0xFFFFFFFE;
_digital_dll_disable();
// Step 1.2 - Always update auto cal in clock change.
EMC(EMC_CFG_UPDATE) = (emc_cfg_update_o & 0xFFFFF9FF) | 0x400;
if (dram_dev_num == TWO_RANK)
{
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, 0, EMC_CH0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, IN_POWERDOWN_MASK, 0, EMC_CH1);
}
else
{
_wait_emc_status(EMC_EMC_STATUS, 0x10, 0, 0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, 0x10, 0, EMC_CH1);
}
// Step 1.3 - Disable other power features.
EMC(EMC_CFG) = emc_cfg_o & 0xFFFFFFF;
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CH0);
// Timing update and wait for everything to power down.
_timing_update(channel1_enabled);
_wait_emc_status(EMC_EMC_STATUS, pd_mask, 0, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CH1);
_wait_emc_status(EMC_EMC_STATUS, pd_mask, 0, EMC_CHANNEL1);
// Wait for request FIFO to get empty.
//_wait_emc_status(EMC_EMC_STATUS, REQ_FIFO_EMPTY, 0, EMC_CH0); //v1.6
//if (channel1_enabled)
// _wait_emc_status(EMC_EMC_STATUS, REQ_FIFO_EMPTY, 0, EMC_CH1); //v1.6
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_EMC_STATUS, IN_SELF_REFRESH_MASK, 0, EMC_CHANNEL1);
u32 emc_cfg_update = EMC(EMC_CFG_UPDATE);
EMC(EMC_CFG_UPDATE) = (emc_cfg_update & 0xFFFFF9FF) | 0x400;
_wait_emc_status(EMC_CFG_DIG_DLL, 1, 0, EMC_CHANNEL0);
if (channel1_enabled)
_wait_emc_status(EMC_CFG_DIG_DLL, 1, 0, EMC_CHANNEL1);
// Step 2 - Osc kick off - this assumes training and dvfs have set correct MR23.
_start_periodic_compensation();
// Step 3 - Let dram capture its clock tree delays.
_usleep(1000 * _actual_osc_clocks(mtc_table_entry->run_clocks) / mtc_table_entry->rate_khz + 1);
_usleep(1000 * _actual_osc_clocks(mtc_table_entry->run_clocks) / mtc_table_entry->rate_khz + 2);
// Step 4 - Check delta wrt previous values (save value if margin exceeds what is set in table).
u32 adelta = _minerva_update_clock_tree_delay(mtc_table_entry, mtc_table_entry, dram_dev_num, channel1_enabled, PERIODIC_TRAINING_UPDATE);
@ -3733,21 +3691,22 @@ u32 _minerva_do_periodic_compensation(emc_table_t *mtc_table_entry)
{
for (u32 i = 0; i < 10; i++)
{
val = _minerva_apply_periodic_compensation_trimmer(mtc_table_entry, periodic_training_addr[i]);
EMC(periodic_training_addr[i]) = val;
EMC(periodic_training_addr[i]) =
_minerva_apply_periodic_compensation_trimmer(mtc_table_entry, periodic_training_addr[i]);
}
}
// Step 6 - Restore other power features.
EMC(EMC_CFG) = emc_cfg_o;
// Step 6 - Timing update to apply the new trimmers.
// Step 6.1 - Restore the DLL.
EMC(EMC_CFG_DIG_DLL) = emc_cfg_dig_dll_o;
// Step 6.2 - Timing update for applying the new trimmers.
_timing_update(channel1_enabled);
// Step 6.1 - Restore the UPDATE_DLL_IN_UPDATE field.
EMC(EMC_CFG_UPDATE) = emc_cfg_update;
// Step 6.2 - Restore the DLL.
_digital_dll_enable(channel1_enabled);
// Step 6.3 - Restore the UPDATE_DLL_IN_UPDATE field.
EMC(EMC_CFG_UPDATE) = emc_cfg_update_o;
}
return 0;