ddr: altera: Clean up rw_mgr_mem_calibrate_read_test_patterns()

Rework this function such that the code is more readable. Zap
unused parameter "num_tries" while at it. Also wrap parameter
"bit_chk" into this function as it's value is not used outside.
Finally, fix the return value from this function to match the
common expectation, where 0 means success.

Signed-off-by: Marek Vasut <marex@denx.de>
This commit is contained in:
Marek Vasut 2015-07-18 03:55:07 +02:00
parent 93dcfd8982
commit d844c7d434

View file

@ -1036,31 +1036,42 @@ static void rw_mgr_mem_handoff(void)
*/ */
} }
/* /**
* performs a guaranteed read on the patterns we are going to use during a * rw_mgr_mem_calibrate_read_test_patterns() - Read back test patterns
* read test to ensure memory works * @rank_bgn: Rank number
* @group: Read/Write Group
* @all_ranks: Test all ranks
*
* Performs a guaranteed read on the patterns we are going to use during a
* read test to ensure memory works.
*/ */
static uint32_t rw_mgr_mem_calibrate_read_test_patterns(uint32_t rank_bgn, static int
uint32_t group, uint32_t num_tries, uint32_t *bit_chk, rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn, const u32 group,
uint32_t all_ranks) const u32 all_ranks)
{ {
uint32_t r, vg; const u32 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
uint32_t correct_mask_vg; RW_MGR_RUN_SINGLE_GROUP_OFFSET;
uint32_t tmp_bit_chk; const u32 addr_offset =
uint32_t rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : (group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS) << 2;
(rank_bgn + NUM_RANKS_PER_SHADOW_REG); const u32 rank_end = all_ranks ?
uint32_t addr; RW_MGR_MEM_NUMBER_OF_RANKS :
uint32_t base_rw_mgr; (rank_bgn + NUM_RANKS_PER_SHADOW_REG);
const u32 shift_ratio = RW_MGR_MEM_DQ_PER_READ_DQS /
RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS;
const u32 correct_mask_vg = param->read_correct_mask_vg;
*bit_chk = param->read_correct_mask; u32 tmp_bit_chk, base_rw_mgr, bit_chk;
correct_mask_vg = param->read_correct_mask_vg; int vg, r;
int ret = 0;
bit_chk = param->read_correct_mask;
for (r = rank_bgn; r < rank_end; r++) { for (r = rank_bgn; r < rank_end; r++) {
/* Request to skip the rank */
if (param->skip_ranks[r]) if (param->skip_ranks[r])
/* request to skip the rank */
continue; continue;
/* set rank */ /* Set rank */
set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
/* Load up a constant bursts of read commands */ /* Load up a constant bursts of read commands */
@ -1073,38 +1084,36 @@ static uint32_t rw_mgr_mem_calibrate_read_test_patterns(uint32_t rank_bgn,
&sdr_rw_load_jump_mgr_regs->load_jump_add1); &sdr_rw_load_jump_mgr_regs->load_jump_add1);
tmp_bit_chk = 0; tmp_bit_chk = 0;
for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS-1; ; vg--) { for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1;
/* reset the fifos to get pointers to known state */ vg >= 0; vg--) {
/* Reset the FIFOs to get pointers to known state. */
writel(0, &phy_mgr_cmd->fifo_reset); writel(0, &phy_mgr_cmd->fifo_reset);
writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
RW_MGR_RESET_READ_DATAPATH_OFFSET); RW_MGR_RESET_READ_DATAPATH_OFFSET);
writel(RW_MGR_GUARANTEED_READ,
tmp_bit_chk = tmp_bit_chk << (RW_MGR_MEM_DQ_PER_READ_DQS addr + addr_offset + (vg << 2));
/ RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS);
addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
writel(RW_MGR_GUARANTEED_READ, addr +
((group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS +
vg) << 2));
base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
tmp_bit_chk = tmp_bit_chk | (correct_mask_vg & (~base_rw_mgr)); tmp_bit_chk <<= shift_ratio;
tmp_bit_chk |= correct_mask_vg & ~base_rw_mgr;
if (vg == 0)
break;
} }
*bit_chk &= tmp_bit_chk;
bit_chk &= tmp_bit_chk;
} }
addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2)); writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2));
set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
debug_cond(DLEVEL == 1, "%s:%d test_load_patterns(%u,ALL) => (%u == %u) =>\
%lu\n", __func__, __LINE__, group, *bit_chk, param->read_correct_mask, if (bit_chk != param->read_correct_mask)
(long unsigned int)(*bit_chk == param->read_correct_mask)); ret = -EIO;
return *bit_chk == param->read_correct_mask;
debug_cond(DLEVEL == 1,
"%s:%d test_load_patterns(%u,ALL) => (%u == %u) => %i\n",
__func__, __LINE__, group, bit_chk,
param->read_correct_mask, ret);
return ret;
} }
/** /**
@ -2200,7 +2209,6 @@ static uint32_t rw_mgr_mem_calibrate_vfifo_center(uint32_t rank_bgn,
static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group, static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,
const u32 phase) const u32 phase)
{ {
u32 bit_chk;
int ret; int ret;
/* Set a particular DQ/DQS phase. */ /* Set a particular DQ/DQS phase. */
@ -2223,16 +2231,12 @@ static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,
* Altera EMI_RM 2015.05.04 :: Figure 1-26 * Altera EMI_RM 2015.05.04 :: Figure 1-26
* Back-to-Back reads of the patterns used for calibration. * Back-to-Back reads of the patterns used for calibration.
*/ */
ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1, ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1);
&bit_chk, 1); if (ret)
if (!ret) { /* FIXME: 0 means failure in this old code :-( */
debug_cond(DLEVEL == 1, debug_cond(DLEVEL == 1,
"%s:%d Guaranteed read test failed: g=%u p=%u\n", "%s:%d Guaranteed read test failed: g=%u p=%u\n",
__func__, __LINE__, rw_group, phase); __func__, __LINE__, rw_group, phase);
return -EIO; return ret;
}
return 0;
} }
/** /**