xref: /rk3399_rockchip-uboot/drivers/ddr/altera/sequencer.c (revision ec3ab3f9b5c75b5a11f8f6f52951fa8dd45990be)
13da42859SDinh Nguyen /*
23da42859SDinh Nguyen  * Copyright Altera Corporation (C) 2012-2015
33da42859SDinh Nguyen  *
43da42859SDinh Nguyen  * SPDX-License-Identifier:    BSD-3-Clause
53da42859SDinh Nguyen  */
63da42859SDinh Nguyen 
73da42859SDinh Nguyen #include <common.h>
83da42859SDinh Nguyen #include <asm/io.h>
93da42859SDinh Nguyen #include <asm/arch/sdram.h>
1004372fb8SMarek Vasut #include <errno.h>
113da42859SDinh Nguyen #include "sequencer.h"
129c76df51SMarek Vasut 
133da42859SDinh Nguyen static struct socfpga_sdr_rw_load_manager *sdr_rw_load_mgr_regs =
14139823ecSMarek Vasut 	(struct socfpga_sdr_rw_load_manager *)
15139823ecSMarek Vasut 		(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0x800);
163da42859SDinh Nguyen static struct socfpga_sdr_rw_load_jump_manager *sdr_rw_load_jump_mgr_regs =
17139823ecSMarek Vasut 	(struct socfpga_sdr_rw_load_jump_manager *)
18139823ecSMarek Vasut 		(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0xC00);
193da42859SDinh Nguyen static struct socfpga_sdr_reg_file *sdr_reg_file =
20a1c654a8SMarek Vasut 	(struct socfpga_sdr_reg_file *)SDR_PHYGRP_REGFILEGRP_ADDRESS;
213da42859SDinh Nguyen static struct socfpga_sdr_scc_mgr *sdr_scc_mgr =
22139823ecSMarek Vasut 	(struct socfpga_sdr_scc_mgr *)
23139823ecSMarek Vasut 		(SDR_PHYGRP_SCCGRP_ADDRESS | 0xe00);
243da42859SDinh Nguyen static struct socfpga_phy_mgr_cmd *phy_mgr_cmd =
251bc6f14aSMarek Vasut 	(struct socfpga_phy_mgr_cmd *)SDR_PHYGRP_PHYMGRGRP_ADDRESS;
263da42859SDinh Nguyen static struct socfpga_phy_mgr_cfg *phy_mgr_cfg =
27139823ecSMarek Vasut 	(struct socfpga_phy_mgr_cfg *)
28139823ecSMarek Vasut 		(SDR_PHYGRP_PHYMGRGRP_ADDRESS | 0x40);
293da42859SDinh Nguyen static struct socfpga_data_mgr *data_mgr =
30c4815f76SMarek Vasut 	(struct socfpga_data_mgr *)SDR_PHYGRP_DATAMGRGRP_ADDRESS;
316cb9f167SMarek Vasut static struct socfpga_sdr_ctrl *sdr_ctrl =
326cb9f167SMarek Vasut 	(struct socfpga_sdr_ctrl *)SDR_CTRLGRP_ADDRESS;
336cb9f167SMarek Vasut 
34d718a26bSMarek Vasut const struct socfpga_sdram_rw_mgr_config *rwcfg;
3510c14261SMarek Vasut const struct socfpga_sdram_io_config *iocfg;
36042ff2d0SMarek Vasut const struct socfpga_sdram_misc_config *misccfg;
37d718a26bSMarek Vasut 
383da42859SDinh Nguyen #define DELTA_D		1
393da42859SDinh Nguyen 
403da42859SDinh Nguyen /*
413da42859SDinh Nguyen  * In order to reduce ROM size, most of the selectable calibration steps are
423da42859SDinh Nguyen  * decided at compile time based on the user's calibration mode selection,
433da42859SDinh Nguyen  * as captured by the STATIC_CALIB_STEPS selection below.
443da42859SDinh Nguyen  *
453da42859SDinh Nguyen  * However, to support simulation-time selection of fast simulation mode, where
463da42859SDinh Nguyen  * we skip everything except the bare minimum, we need a few of the steps to
473da42859SDinh Nguyen  * be dynamic.  In those cases, we either use the DYNAMIC_CALIB_STEPS for the
483da42859SDinh Nguyen  * check, which is based on the rtl-supplied value, or we dynamically compute
493da42859SDinh Nguyen  * the value to use based on the dynamically-chosen calibration mode
503da42859SDinh Nguyen  */
513da42859SDinh Nguyen 
523da42859SDinh Nguyen #define DLEVEL 0
533da42859SDinh Nguyen #define STATIC_IN_RTL_SIM 0
543da42859SDinh Nguyen #define STATIC_SKIP_DELAY_LOOPS 0
553da42859SDinh Nguyen 
563da42859SDinh Nguyen #define STATIC_CALIB_STEPS (STATIC_IN_RTL_SIM | CALIB_SKIP_FULL_TEST | \
573da42859SDinh Nguyen 	STATIC_SKIP_DELAY_LOOPS)
583da42859SDinh Nguyen 
593da42859SDinh Nguyen /* calibration steps requested by the rtl */
6085f76628SMarek Vasut static u16 dyn_calib_steps;
613da42859SDinh Nguyen 
623da42859SDinh Nguyen /*
633da42859SDinh Nguyen  * To make CALIB_SKIP_DELAY_LOOPS a dynamic conditional option
643da42859SDinh Nguyen  * instead of static, we use boolean logic to select between
653da42859SDinh Nguyen  * non-skip and skip values
663da42859SDinh Nguyen  *
673da42859SDinh Nguyen  * The mask is set to include all bits when not-skipping, but is
683da42859SDinh Nguyen  * zero when skipping
693da42859SDinh Nguyen  */
703da42859SDinh Nguyen 
7185f76628SMarek Vasut static u16 skip_delay_mask;	/* mask off bits when skipping/not-skipping */
723da42859SDinh Nguyen 
733da42859SDinh Nguyen #define SKIP_DELAY_LOOP_VALUE_OR_ZERO(non_skip_value) \
743da42859SDinh Nguyen 	((non_skip_value) & skip_delay_mask)
753da42859SDinh Nguyen 
7685f76628SMarek Vasut static struct gbl_type *gbl;
7785f76628SMarek Vasut static struct param_type *param;
783da42859SDinh Nguyen 
set_failing_group_stage(u32 group,u32 stage,u32 substage)795ded7320SMarek Vasut static void set_failing_group_stage(u32 group, u32 stage,
805ded7320SMarek Vasut 	u32 substage)
813da42859SDinh Nguyen {
823da42859SDinh Nguyen 	/*
833da42859SDinh Nguyen 	 * Only set the global stage if there was not been any other
843da42859SDinh Nguyen 	 * failing group
853da42859SDinh Nguyen 	 */
863da42859SDinh Nguyen 	if (gbl->error_stage == CAL_STAGE_NIL)	{
873da42859SDinh Nguyen 		gbl->error_substage = substage;
883da42859SDinh Nguyen 		gbl->error_stage = stage;
893da42859SDinh Nguyen 		gbl->error_group = group;
903da42859SDinh Nguyen 	}
913da42859SDinh Nguyen }
923da42859SDinh Nguyen 
reg_file_set_group(u16 set_group)932c0d2d9cSMarek Vasut static void reg_file_set_group(u16 set_group)
943da42859SDinh Nguyen {
952c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff0000, set_group << 16);
963da42859SDinh Nguyen }
973da42859SDinh Nguyen 
reg_file_set_stage(u8 set_stage)982c0d2d9cSMarek Vasut static void reg_file_set_stage(u8 set_stage)
993da42859SDinh Nguyen {
1002c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff, set_stage & 0xff);
1013da42859SDinh Nguyen }
1023da42859SDinh Nguyen 
reg_file_set_sub_stage(u8 set_sub_stage)1032c0d2d9cSMarek Vasut static void reg_file_set_sub_stage(u8 set_sub_stage)
1043da42859SDinh Nguyen {
1052c0d2d9cSMarek Vasut 	set_sub_stage &= 0xff;
1062c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xff00, set_sub_stage << 8);
1073da42859SDinh Nguyen }
1083da42859SDinh Nguyen 
1097c89c2d9SMarek Vasut /**
1107c89c2d9SMarek Vasut  * phy_mgr_initialize() - Initialize PHY Manager
1117c89c2d9SMarek Vasut  *
1127c89c2d9SMarek Vasut  * Initialize PHY Manager.
1137c89c2d9SMarek Vasut  */
phy_mgr_initialize(void)1149fa9c90eSMarek Vasut static void phy_mgr_initialize(void)
1153da42859SDinh Nguyen {
1167c89c2d9SMarek Vasut 	u32 ratio;
1177c89c2d9SMarek Vasut 
1183da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
1197c89c2d9SMarek Vasut 	/* Calibration has control over path to memory */
1203da42859SDinh Nguyen 	/*
1213da42859SDinh Nguyen 	 * In Hard PHY this is a 2-bit control:
1223da42859SDinh Nguyen 	 * 0: AFI Mux Select
1233da42859SDinh Nguyen 	 * 1: DDIO Mux Select
1243da42859SDinh Nguyen 	 */
1251273dd9eSMarek Vasut 	writel(0x3, &phy_mgr_cfg->mux_sel);
1263da42859SDinh Nguyen 
1273da42859SDinh Nguyen 	/* USER memory clock is not stable we begin initialization  */
1281273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->reset_mem_stbl);
1293da42859SDinh Nguyen 
1303da42859SDinh Nguyen 	/* USER calibration status all set to zero */
1311273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->cal_status);
1323da42859SDinh Nguyen 
1331273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->cal_debug_info);
1343da42859SDinh Nguyen 
1357c89c2d9SMarek Vasut 	/* Init params only if we do NOT skip calibration. */
1367c89c2d9SMarek Vasut 	if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL)
1377c89c2d9SMarek Vasut 		return;
1387c89c2d9SMarek Vasut 
1391fa0c8c4SMarek Vasut 	ratio = rwcfg->mem_dq_per_read_dqs /
1401fa0c8c4SMarek Vasut 		rwcfg->mem_virtual_groups_per_read_dqs;
1417c89c2d9SMarek Vasut 	param->read_correct_mask_vg = (1 << ratio) - 1;
1427c89c2d9SMarek Vasut 	param->write_correct_mask_vg = (1 << ratio) - 1;
1431fa0c8c4SMarek Vasut 	param->read_correct_mask = (1 << rwcfg->mem_dq_per_read_dqs) - 1;
1441fa0c8c4SMarek Vasut 	param->write_correct_mask = (1 << rwcfg->mem_dq_per_write_dqs) - 1;
1453da42859SDinh Nguyen }
1463da42859SDinh Nguyen 
147080bf64eSMarek Vasut /**
148080bf64eSMarek Vasut  * set_rank_and_odt_mask() - Set Rank and ODT mask
149080bf64eSMarek Vasut  * @rank:	Rank mask
150080bf64eSMarek Vasut  * @odt_mode:	ODT mode, OFF or READ_WRITE
151080bf64eSMarek Vasut  *
152080bf64eSMarek Vasut  * Set Rank and ODT mask (On-Die Termination).
153080bf64eSMarek Vasut  */
set_rank_and_odt_mask(const u32 rank,const u32 odt_mode)154b2dfd100SMarek Vasut static void set_rank_and_odt_mask(const u32 rank, const u32 odt_mode)
1553da42859SDinh Nguyen {
156b2dfd100SMarek Vasut 	u32 odt_mask_0 = 0;
157b2dfd100SMarek Vasut 	u32 odt_mask_1 = 0;
158b2dfd100SMarek Vasut 	u32 cs_and_odt_mask;
1593da42859SDinh Nguyen 
160b2dfd100SMarek Vasut 	if (odt_mode == RW_MGR_ODT_MODE_OFF) {
161b2dfd100SMarek Vasut 		odt_mask_0 = 0x0;
162b2dfd100SMarek Vasut 		odt_mask_1 = 0x0;
163b2dfd100SMarek Vasut 	} else {	/* RW_MGR_ODT_MODE_READ_WRITE */
1641fa0c8c4SMarek Vasut 		switch (rwcfg->mem_number_of_ranks) {
165287cdf6bSMarek Vasut 		case 1:	/* 1 Rank */
166287cdf6bSMarek Vasut 			/* Read: ODT = 0 ; Write: ODT = 1 */
1673da42859SDinh Nguyen 			odt_mask_0 = 0x0;
1683da42859SDinh Nguyen 			odt_mask_1 = 0x1;
169287cdf6bSMarek Vasut 			break;
170287cdf6bSMarek Vasut 		case 2:	/* 2 Ranks */
1711fa0c8c4SMarek Vasut 			if (rwcfg->mem_number_of_cs_per_dimm == 1) {
172080bf64eSMarek Vasut 				/*
173080bf64eSMarek Vasut 				 * - Dual-Slot , Single-Rank (1 CS per DIMM)
1743da42859SDinh Nguyen 				 *   OR
175080bf64eSMarek Vasut 				 * - RDIMM, 4 total CS (2 CS per DIMM, 2 DIMM)
176080bf64eSMarek Vasut 				 *
177080bf64eSMarek Vasut 				 * Since MEM_NUMBER_OF_RANKS is 2, they
178080bf64eSMarek Vasut 				 * are both single rank with 2 CS each
179080bf64eSMarek Vasut 				 * (special for RDIMM).
180080bf64eSMarek Vasut 				 *
1813da42859SDinh Nguyen 				 * Read: Turn on ODT on the opposite rank
1823da42859SDinh Nguyen 				 * Write: Turn on ODT on all ranks
1833da42859SDinh Nguyen 				 */
1843da42859SDinh Nguyen 				odt_mask_0 = 0x3 & ~(1 << rank);
1853da42859SDinh Nguyen 				odt_mask_1 = 0x3;
1863da42859SDinh Nguyen 			} else {
1873da42859SDinh Nguyen 				/*
188080bf64eSMarek Vasut 				 * - Single-Slot , Dual-Rank (2 CS per DIMM)
189080bf64eSMarek Vasut 				 *
190080bf64eSMarek Vasut 				 * Read: Turn on ODT off on all ranks
191080bf64eSMarek Vasut 				 * Write: Turn on ODT on active rank
1923da42859SDinh Nguyen 				 */
1933da42859SDinh Nguyen 				odt_mask_0 = 0x0;
1943da42859SDinh Nguyen 				odt_mask_1 = 0x3 & (1 << rank);
1953da42859SDinh Nguyen 			}
196287cdf6bSMarek Vasut 			break;
197287cdf6bSMarek Vasut 		case 4:	/* 4 Ranks */
198287cdf6bSMarek Vasut 			/* Read:
1993da42859SDinh Nguyen 			 * ----------+-----------------------+
2003da42859SDinh Nguyen 			 *           |         ODT           |
2013da42859SDinh Nguyen 			 * Read From +-----------------------+
2023da42859SDinh Nguyen 			 *   Rank    |  3  |  2  |  1  |  0  |
2033da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2043da42859SDinh Nguyen 			 *     0     |  0  |  1  |  0  |  0  |
2053da42859SDinh Nguyen 			 *     1     |  1  |  0  |  0  |  0  |
2063da42859SDinh Nguyen 			 *     2     |  0  |  0  |  0  |  1  |
2073da42859SDinh Nguyen 			 *     3     |  0  |  0  |  1  |  0  |
2083da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2093da42859SDinh Nguyen 			 *
2103da42859SDinh Nguyen 			 * Write:
2113da42859SDinh Nguyen 			 * ----------+-----------------------+
2123da42859SDinh Nguyen 			 *           |         ODT           |
2133da42859SDinh Nguyen 			 * Write To  +-----------------------+
2143da42859SDinh Nguyen 			 *   Rank    |  3  |  2  |  1  |  0  |
2153da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2163da42859SDinh Nguyen 			 *     0     |  0  |  1  |  0  |  1  |
2173da42859SDinh Nguyen 			 *     1     |  1  |  0  |  1  |  0  |
2183da42859SDinh Nguyen 			 *     2     |  0  |  1  |  0  |  1  |
2193da42859SDinh Nguyen 			 *     3     |  1  |  0  |  1  |  0  |
2203da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2213da42859SDinh Nguyen 			 */
2223da42859SDinh Nguyen 			switch (rank) {
2233da42859SDinh Nguyen 			case 0:
2243da42859SDinh Nguyen 				odt_mask_0 = 0x4;
2253da42859SDinh Nguyen 				odt_mask_1 = 0x5;
2263da42859SDinh Nguyen 				break;
2273da42859SDinh Nguyen 			case 1:
2283da42859SDinh Nguyen 				odt_mask_0 = 0x8;
2293da42859SDinh Nguyen 				odt_mask_1 = 0xA;
2303da42859SDinh Nguyen 				break;
2313da42859SDinh Nguyen 			case 2:
2323da42859SDinh Nguyen 				odt_mask_0 = 0x1;
2333da42859SDinh Nguyen 				odt_mask_1 = 0x5;
2343da42859SDinh Nguyen 				break;
2353da42859SDinh Nguyen 			case 3:
2363da42859SDinh Nguyen 				odt_mask_0 = 0x2;
2373da42859SDinh Nguyen 				odt_mask_1 = 0xA;
2383da42859SDinh Nguyen 				break;
2393da42859SDinh Nguyen 			}
240287cdf6bSMarek Vasut 			break;
2413da42859SDinh Nguyen 		}
2423da42859SDinh Nguyen 	}
2433da42859SDinh Nguyen 
244b2dfd100SMarek Vasut 	cs_and_odt_mask = (0xFF & ~(1 << rank)) |
2453da42859SDinh Nguyen 			  ((0xFF & odt_mask_0) << 8) |
2463da42859SDinh Nguyen 			  ((0xFF & odt_mask_1) << 16);
2471273dd9eSMarek Vasut 	writel(cs_and_odt_mask, SDR_PHYGRP_RWMGRGRP_ADDRESS |
2481273dd9eSMarek Vasut 				RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
2493da42859SDinh Nguyen }
2503da42859SDinh Nguyen 
251c76976d9SMarek Vasut /**
252c76976d9SMarek Vasut  * scc_mgr_set() - Set SCC Manager register
253c76976d9SMarek Vasut  * @off:	Base offset in SCC Manager space
254c76976d9SMarek Vasut  * @grp:	Read/Write group
255c76976d9SMarek Vasut  * @val:	Value to be set
256c76976d9SMarek Vasut  *
257c76976d9SMarek Vasut  * This function sets the SCC Manager (Scan Chain Control Manager) register.
258c76976d9SMarek Vasut  */
scc_mgr_set(u32 off,u32 grp,u32 val)259c76976d9SMarek Vasut static void scc_mgr_set(u32 off, u32 grp, u32 val)
260c76976d9SMarek Vasut {
261c76976d9SMarek Vasut 	writel(val, SDR_PHYGRP_SCCGRP_ADDRESS | off | (grp << 2));
262c76976d9SMarek Vasut }
263c76976d9SMarek Vasut 
264e893f4dcSMarek Vasut /**
265e893f4dcSMarek Vasut  * scc_mgr_initialize() - Initialize SCC Manager registers
266e893f4dcSMarek Vasut  *
267e893f4dcSMarek Vasut  * Initialize SCC Manager registers.
268e893f4dcSMarek Vasut  */
scc_mgr_initialize(void)2693da42859SDinh Nguyen static void scc_mgr_initialize(void)
2703da42859SDinh Nguyen {
2713da42859SDinh Nguyen 	/*
272e893f4dcSMarek Vasut 	 * Clear register file for HPS. 16 (2^4) is the size of the
273e893f4dcSMarek Vasut 	 * full register file in the scc mgr:
274e893f4dcSMarek Vasut 	 *	RFILE_DEPTH = 1 + log2(MEM_DQ_PER_DQS + 1 + MEM_DM_PER_DQS +
275e893f4dcSMarek Vasut 	 *                             MEM_IF_READ_DQS_WIDTH - 1);
2763da42859SDinh Nguyen 	 */
277c76976d9SMarek Vasut 	int i;
278e893f4dcSMarek Vasut 
2793da42859SDinh Nguyen 	for (i = 0; i < 16; i++) {
280ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1, "%s:%d: Clearing SCC RFILE index %u\n",
2813da42859SDinh Nguyen 			   __func__, __LINE__, i);
2828e9e62c9SMarek Vasut 		scc_mgr_set(SCC_MGR_HHP_RFILE_OFFSET, i, 0);
2833da42859SDinh Nguyen 	}
2843da42859SDinh Nguyen }
2853da42859SDinh Nguyen 
scc_mgr_set_dqdqs_output_phase(u32 write_group,u32 phase)2865ded7320SMarek Vasut static void scc_mgr_set_dqdqs_output_phase(u32 write_group, u32 phase)
2875ff825b8SMarek Vasut {
288c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, write_group, phase);
2895ff825b8SMarek Vasut }
2905ff825b8SMarek Vasut 
scc_mgr_set_dqs_bus_in_delay(u32 read_group,u32 delay)2915ded7320SMarek Vasut static void scc_mgr_set_dqs_bus_in_delay(u32 read_group, u32 delay)
2923da42859SDinh Nguyen {
293c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_IN_DELAY_OFFSET, read_group, delay);
2943da42859SDinh Nguyen }
2953da42859SDinh Nguyen 
scc_mgr_set_dqs_en_phase(u32 read_group,u32 phase)2965ded7320SMarek Vasut static void scc_mgr_set_dqs_en_phase(u32 read_group, u32 phase)
2973da42859SDinh Nguyen {
298c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_EN_PHASE_OFFSET, read_group, phase);
2993da42859SDinh Nguyen }
3003da42859SDinh Nguyen 
scc_mgr_set_dqs_en_delay(u32 read_group,u32 delay)3015ded7320SMarek Vasut static void scc_mgr_set_dqs_en_delay(u32 read_group, u32 delay)
3025ff825b8SMarek Vasut {
303c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_EN_DELAY_OFFSET, read_group, delay);
3045ff825b8SMarek Vasut }
3055ff825b8SMarek Vasut 
scc_mgr_set_dq_in_delay(u32 dq_in_group,u32 delay)30670ed80afSMarek Vasut static void scc_mgr_set_dq_in_delay(u32 dq_in_group, u32 delay)
30770ed80afSMarek Vasut {
30870ed80afSMarek Vasut 	scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, dq_in_group, delay);
30970ed80afSMarek Vasut }
31070ed80afSMarek Vasut 
scc_mgr_set_dqs_io_in_delay(u32 delay)3115ded7320SMarek Vasut static void scc_mgr_set_dqs_io_in_delay(u32 delay)
3125ff825b8SMarek Vasut {
3131fa0c8c4SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, rwcfg->mem_dq_per_write_dqs,
314c76976d9SMarek Vasut 		    delay);
3155ff825b8SMarek Vasut }
3165ff825b8SMarek Vasut 
scc_mgr_set_dm_in_delay(u32 dm,u32 delay)31770ed80afSMarek Vasut static void scc_mgr_set_dm_in_delay(u32 dm, u32 delay)
3185ff825b8SMarek Vasut {
31970ed80afSMarek Vasut 	scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET,
32070ed80afSMarek Vasut 		    rwcfg->mem_dq_per_write_dqs + 1 + dm,
32170ed80afSMarek Vasut 		    delay);
3225ff825b8SMarek Vasut }
3235ff825b8SMarek Vasut 
scc_mgr_set_dq_out1_delay(u32 dq_in_group,u32 delay)3245ded7320SMarek Vasut static void scc_mgr_set_dq_out1_delay(u32 dq_in_group, u32 delay)
3255ff825b8SMarek Vasut {
326c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, dq_in_group, delay);
3275ff825b8SMarek Vasut }
3285ff825b8SMarek Vasut 
scc_mgr_set_dqs_out1_delay(u32 delay)3295ded7320SMarek Vasut static void scc_mgr_set_dqs_out1_delay(u32 delay)
3305ff825b8SMarek Vasut {
3311fa0c8c4SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, rwcfg->mem_dq_per_write_dqs,
332c76976d9SMarek Vasut 		    delay);
3335ff825b8SMarek Vasut }
3345ff825b8SMarek Vasut 
scc_mgr_set_dm_out1_delay(u32 dm,u32 delay)3355ded7320SMarek Vasut static void scc_mgr_set_dm_out1_delay(u32 dm, u32 delay)
3365ff825b8SMarek Vasut {
337c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET,
3381fa0c8c4SMarek Vasut 		    rwcfg->mem_dq_per_write_dqs + 1 + dm,
339c76976d9SMarek Vasut 		    delay);
3405ff825b8SMarek Vasut }
3415ff825b8SMarek Vasut 
3425ff825b8SMarek Vasut /* load up dqs config settings */
scc_mgr_load_dqs(u32 dqs)3435ded7320SMarek Vasut static void scc_mgr_load_dqs(u32 dqs)
3445ff825b8SMarek Vasut {
3455ff825b8SMarek Vasut 	writel(dqs, &sdr_scc_mgr->dqs_ena);
3465ff825b8SMarek Vasut }
3475ff825b8SMarek Vasut 
3485ff825b8SMarek Vasut /* load up dqs io config settings */
scc_mgr_load_dqs_io(void)3495ff825b8SMarek Vasut static void scc_mgr_load_dqs_io(void)
3505ff825b8SMarek Vasut {
3515ff825b8SMarek Vasut 	writel(0, &sdr_scc_mgr->dqs_io_ena);
3525ff825b8SMarek Vasut }
3535ff825b8SMarek Vasut 
3545ff825b8SMarek Vasut /* load up dq config settings */
scc_mgr_load_dq(u32 dq_in_group)3555ded7320SMarek Vasut static void scc_mgr_load_dq(u32 dq_in_group)
3565ff825b8SMarek Vasut {
3575ff825b8SMarek Vasut 	writel(dq_in_group, &sdr_scc_mgr->dq_ena);
3585ff825b8SMarek Vasut }
3595ff825b8SMarek Vasut 
3605ff825b8SMarek Vasut /* load up dm config settings */
scc_mgr_load_dm(u32 dm)3615ded7320SMarek Vasut static void scc_mgr_load_dm(u32 dm)
3625ff825b8SMarek Vasut {
3635ff825b8SMarek Vasut 	writel(dm, &sdr_scc_mgr->dm_ena);
3645ff825b8SMarek Vasut }
3655ff825b8SMarek Vasut 
3660b69b807SMarek Vasut /**
3670b69b807SMarek Vasut  * scc_mgr_set_all_ranks() - Set SCC Manager register for all ranks
3680b69b807SMarek Vasut  * @off:	Base offset in SCC Manager space
3690b69b807SMarek Vasut  * @grp:	Read/Write group
3700b69b807SMarek Vasut  * @val:	Value to be set
3710b69b807SMarek Vasut  * @update:	If non-zero, trigger SCC Manager update for all ranks
3720b69b807SMarek Vasut  *
3730b69b807SMarek Vasut  * This function sets the SCC Manager (Scan Chain Control Manager) register
3740b69b807SMarek Vasut  * and optionally triggers the SCC update for all ranks.
3750b69b807SMarek Vasut  */
scc_mgr_set_all_ranks(const u32 off,const u32 grp,const u32 val,const int update)3760b69b807SMarek Vasut static void scc_mgr_set_all_ranks(const u32 off, const u32 grp, const u32 val,
3770b69b807SMarek Vasut 				  const int update)
3783da42859SDinh Nguyen {
3790b69b807SMarek Vasut 	u32 r;
3803da42859SDinh Nguyen 
3811fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
3823da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
3830b69b807SMarek Vasut 		scc_mgr_set(off, grp, val);
384162d60efSMarek Vasut 
3850b69b807SMarek Vasut 		if (update || (r == 0)) {
3860b69b807SMarek Vasut 			writel(grp, &sdr_scc_mgr->dqs_ena);
3870b69b807SMarek Vasut 			writel(0, &sdr_scc_mgr->update);
3880b69b807SMarek Vasut 		}
3890b69b807SMarek Vasut 	}
3900b69b807SMarek Vasut }
3910b69b807SMarek Vasut 
scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group,u32 phase)3920b69b807SMarek Vasut static void scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group, u32 phase)
3930b69b807SMarek Vasut {
3943da42859SDinh Nguyen 	/*
3953da42859SDinh Nguyen 	 * USER although the h/w doesn't support different phases per
3963da42859SDinh Nguyen 	 * shadow register, for simplicity our scc manager modeling
3973da42859SDinh Nguyen 	 * keeps different phase settings per shadow reg, and it's
3983da42859SDinh Nguyen 	 * important for us to keep them in sync to match h/w.
3993da42859SDinh Nguyen 	 * for efficiency, the scan chain update should occur only
4003da42859SDinh Nguyen 	 * once to sr0.
4013da42859SDinh Nguyen 	 */
4020b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_PHASE_OFFSET,
4030b69b807SMarek Vasut 			      read_group, phase, 0);
4043da42859SDinh Nguyen }
4053da42859SDinh Nguyen 
scc_mgr_set_dqdqs_output_phase_all_ranks(u32 write_group,u32 phase)4065ded7320SMarek Vasut static void scc_mgr_set_dqdqs_output_phase_all_ranks(u32 write_group,
4075ded7320SMarek Vasut 						     u32 phase)
4083da42859SDinh Nguyen {
4093da42859SDinh Nguyen 	/*
4103da42859SDinh Nguyen 	 * USER although the h/w doesn't support different phases per
4113da42859SDinh Nguyen 	 * shadow register, for simplicity our scc manager modeling
4123da42859SDinh Nguyen 	 * keeps different phase settings per shadow reg, and it's
4133da42859SDinh Nguyen 	 * important for us to keep them in sync to match h/w.
4143da42859SDinh Nguyen 	 * for efficiency, the scan chain update should occur only
4153da42859SDinh Nguyen 	 * once to sr0.
4163da42859SDinh Nguyen 	 */
4170b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQDQS_OUT_PHASE_OFFSET,
4180b69b807SMarek Vasut 			      write_group, phase, 0);
4193da42859SDinh Nguyen }
4203da42859SDinh Nguyen 
scc_mgr_set_dqs_en_delay_all_ranks(u32 read_group,u32 delay)4215ded7320SMarek Vasut static void scc_mgr_set_dqs_en_delay_all_ranks(u32 read_group,
4225ded7320SMarek Vasut 					       u32 delay)
4233da42859SDinh Nguyen {
4243da42859SDinh Nguyen 	/*
4253da42859SDinh Nguyen 	 * In shadow register mode, the T11 settings are stored in
4263da42859SDinh Nguyen 	 * registers in the core, which are updated by the DQS_ENA
4273da42859SDinh Nguyen 	 * signals. Not issuing the SCC_MGR_UPD command allows us to
4283da42859SDinh Nguyen 	 * save lots of rank switching overhead, by calling
4293da42859SDinh Nguyen 	 * select_shadow_regs_for_update with update_scan_chains
4303da42859SDinh Nguyen 	 * set to 0.
4313da42859SDinh Nguyen 	 */
4320b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_DELAY_OFFSET,
4330b69b807SMarek Vasut 			      read_group, delay, 1);
4343da42859SDinh Nguyen }
4353da42859SDinh Nguyen 
4365be355c1SMarek Vasut /**
4375be355c1SMarek Vasut  * scc_mgr_set_oct_out1_delay() - Set OCT output delay
4385be355c1SMarek Vasut  * @write_group:	Write group
4395be355c1SMarek Vasut  * @delay:		Delay value
4405be355c1SMarek Vasut  *
4415be355c1SMarek Vasut  * This function sets the OCT output delay in SCC manager.
4425be355c1SMarek Vasut  */
scc_mgr_set_oct_out1_delay(const u32 write_group,const u32 delay)4435be355c1SMarek Vasut static void scc_mgr_set_oct_out1_delay(const u32 write_group, const u32 delay)
4443da42859SDinh Nguyen {
4451fa0c8c4SMarek Vasut 	const int ratio = rwcfg->mem_if_read_dqs_width /
4461fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
4475be355c1SMarek Vasut 	const int base = write_group * ratio;
4485be355c1SMarek Vasut 	int i;
4493da42859SDinh Nguyen 	/*
4503da42859SDinh Nguyen 	 * Load the setting in the SCC manager
4513da42859SDinh Nguyen 	 * Although OCT affects only write data, the OCT delay is controlled
4523da42859SDinh Nguyen 	 * by the DQS logic block which is instantiated once per read group.
4533da42859SDinh Nguyen 	 * For protocols where a write group consists of multiple read groups,
4543da42859SDinh Nguyen 	 * the setting must be set multiple times.
4553da42859SDinh Nguyen 	 */
4565be355c1SMarek Vasut 	for (i = 0; i < ratio; i++)
4575be355c1SMarek Vasut 		scc_mgr_set(SCC_MGR_OCT_OUT1_DELAY_OFFSET, base + i, delay);
4583da42859SDinh Nguyen }
4593da42859SDinh Nguyen 
46037a37ca7SMarek Vasut /**
46137a37ca7SMarek Vasut  * scc_mgr_set_hhp_extras() - Set HHP extras.
46237a37ca7SMarek Vasut  *
46337a37ca7SMarek Vasut  * Load the fixed setting in the SCC manager HHP extras.
46437a37ca7SMarek Vasut  */
scc_mgr_set_hhp_extras(void)4653da42859SDinh Nguyen static void scc_mgr_set_hhp_extras(void)
4663da42859SDinh Nguyen {
4673da42859SDinh Nguyen 	/*
4683da42859SDinh Nguyen 	 * Load the fixed setting in the SCC manager
46937a37ca7SMarek Vasut 	 * bits: 0:0 = 1'b1	- DQS bypass
47037a37ca7SMarek Vasut 	 * bits: 1:1 = 1'b1	- DQ bypass
4713da42859SDinh Nguyen 	 * bits: 4:2 = 3'b001	- rfifo_mode
4723da42859SDinh Nguyen 	 * bits: 6:5 = 2'b01	- rfifo clock_select
4733da42859SDinh Nguyen 	 * bits: 7:7 = 1'b0	- separate gating from ungating setting
4743da42859SDinh Nguyen 	 * bits: 8:8 = 1'b0	- separate OE from Output delay setting
4753da42859SDinh Nguyen 	 */
47637a37ca7SMarek Vasut 	const u32 value = (0 << 8) | (0 << 7) | (1 << 5) |
47737a37ca7SMarek Vasut 			  (1 << 2) | (1 << 1) | (1 << 0);
47837a37ca7SMarek Vasut 	const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS |
47937a37ca7SMarek Vasut 			 SCC_MGR_HHP_GLOBALS_OFFSET |
48037a37ca7SMarek Vasut 			 SCC_MGR_HHP_EXTRAS_OFFSET;
4813da42859SDinh Nguyen 
482ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "%s:%d Setting HHP Extras\n",
48337a37ca7SMarek Vasut 		   __func__, __LINE__);
48437a37ca7SMarek Vasut 	writel(value, addr);
485ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "%s:%d Done Setting HHP Extras\n",
48637a37ca7SMarek Vasut 		   __func__, __LINE__);
4873da42859SDinh Nguyen }
4883da42859SDinh Nguyen 
489f42af35bSMarek Vasut /**
490f42af35bSMarek Vasut  * scc_mgr_zero_all() - Zero all DQS config
491f42af35bSMarek Vasut  *
492f42af35bSMarek Vasut  * Zero all DQS config.
4933da42859SDinh Nguyen  */
scc_mgr_zero_all(void)4943da42859SDinh Nguyen static void scc_mgr_zero_all(void)
4953da42859SDinh Nguyen {
496f42af35bSMarek Vasut 	int i, r;
4973da42859SDinh Nguyen 
4983da42859SDinh Nguyen 	/*
4993da42859SDinh Nguyen 	 * USER Zero all DQS config settings, across all groups and all
5003da42859SDinh Nguyen 	 * shadow registers
5013da42859SDinh Nguyen 	 */
5021fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
503f42af35bSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
5041fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
5053da42859SDinh Nguyen 			/*
5063da42859SDinh Nguyen 			 * The phases actually don't exist on a per-rank basis,
5073da42859SDinh Nguyen 			 * but there's no harm updating them several times, so
5083da42859SDinh Nguyen 			 * let's keep the code simple.
5093da42859SDinh Nguyen 			 */
510160695d8SMarek Vasut 			scc_mgr_set_dqs_bus_in_delay(i, iocfg->dqs_in_reserve);
5113da42859SDinh Nguyen 			scc_mgr_set_dqs_en_phase(i, 0);
5123da42859SDinh Nguyen 			scc_mgr_set_dqs_en_delay(i, 0);
5133da42859SDinh Nguyen 		}
5143da42859SDinh Nguyen 
5151fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_write_dqs_width; i++) {
5163da42859SDinh Nguyen 			scc_mgr_set_dqdqs_output_phase(i, 0);
517f42af35bSMarek Vasut 			/* Arria V/Cyclone V don't have out2. */
518160695d8SMarek Vasut 			scc_mgr_set_oct_out1_delay(i, iocfg->dqs_out_reserve);
5193da42859SDinh Nguyen 		}
5203da42859SDinh Nguyen 	}
5213da42859SDinh Nguyen 
522f42af35bSMarek Vasut 	/* Multicast to all DQS group enables. */
5231273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dqs_ena);
5241273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
5253da42859SDinh Nguyen }
5263da42859SDinh Nguyen 
527c5c5f537SMarek Vasut /**
528c5c5f537SMarek Vasut  * scc_set_bypass_mode() - Set bypass mode and trigger SCC update
529c5c5f537SMarek Vasut  * @write_group:	Write group
530c5c5f537SMarek Vasut  *
531c5c5f537SMarek Vasut  * Set bypass mode and trigger SCC update.
532c5c5f537SMarek Vasut  */
scc_set_bypass_mode(const u32 write_group)533c5c5f537SMarek Vasut static void scc_set_bypass_mode(const u32 write_group)
5343da42859SDinh Nguyen {
535c5c5f537SMarek Vasut 	/* Multicast to all DQ enables. */
5361273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dq_ena);
5371273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dm_ena);
5383da42859SDinh Nguyen 
539c5c5f537SMarek Vasut 	/* Update current DQS IO enable. */
5401273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->dqs_io_ena);
5413da42859SDinh Nguyen 
542c5c5f537SMarek Vasut 	/* Update the DQS logic. */
5431273dd9eSMarek Vasut 	writel(write_group, &sdr_scc_mgr->dqs_ena);
5443da42859SDinh Nguyen 
545c5c5f537SMarek Vasut 	/* Hit update. */
5461273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
5473da42859SDinh Nguyen }
5483da42859SDinh Nguyen 
5495e837896SMarek Vasut /**
5505e837896SMarek Vasut  * scc_mgr_load_dqs_for_write_group() - Load DQS settings for Write Group
5515e837896SMarek Vasut  * @write_group:	Write group
5525e837896SMarek Vasut  *
5535e837896SMarek Vasut  * Load DQS settings for Write Group, do not trigger SCC update.
5545e837896SMarek Vasut  */
scc_mgr_load_dqs_for_write_group(const u32 write_group)5555e837896SMarek Vasut static void scc_mgr_load_dqs_for_write_group(const u32 write_group)
5565ff825b8SMarek Vasut {
5571fa0c8c4SMarek Vasut 	const int ratio = rwcfg->mem_if_read_dqs_width /
5581fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
5595e837896SMarek Vasut 	const int base = write_group * ratio;
5605e837896SMarek Vasut 	int i;
5615ff825b8SMarek Vasut 	/*
5625e837896SMarek Vasut 	 * Load the setting in the SCC manager
5635ff825b8SMarek Vasut 	 * Although OCT affects only write data, the OCT delay is controlled
5645ff825b8SMarek Vasut 	 * by the DQS logic block which is instantiated once per read group.
5655ff825b8SMarek Vasut 	 * For protocols where a write group consists of multiple read groups,
5665e837896SMarek Vasut 	 * the setting must be set multiple times.
5675ff825b8SMarek Vasut 	 */
5685e837896SMarek Vasut 	for (i = 0; i < ratio; i++)
5695e837896SMarek Vasut 		writel(base + i, &sdr_scc_mgr->dqs_ena);
5705ff825b8SMarek Vasut }
5715ff825b8SMarek Vasut 
572d41ea93aSMarek Vasut /**
573d41ea93aSMarek Vasut  * scc_mgr_zero_group() - Zero all configs for a group
574d41ea93aSMarek Vasut  *
575d41ea93aSMarek Vasut  * Zero DQ, DM, DQS and OCT configs for a group.
576d41ea93aSMarek Vasut  */
scc_mgr_zero_group(const u32 write_group,const int out_only)577d41ea93aSMarek Vasut static void scc_mgr_zero_group(const u32 write_group, const int out_only)
5783da42859SDinh Nguyen {
579d41ea93aSMarek Vasut 	int i, r;
5803da42859SDinh Nguyen 
5811fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
582d41ea93aSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
583d41ea93aSMarek Vasut 		/* Zero all DQ config settings. */
5841fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
58507aee5bdSMarek Vasut 			scc_mgr_set_dq_out1_delay(i, 0);
5863da42859SDinh Nguyen 			if (!out_only)
58707aee5bdSMarek Vasut 				scc_mgr_set_dq_in_delay(i, 0);
5883da42859SDinh Nguyen 		}
5893da42859SDinh Nguyen 
590d41ea93aSMarek Vasut 		/* Multicast to all DQ enables. */
5911273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dq_ena);
5923da42859SDinh Nguyen 
593d41ea93aSMarek Vasut 		/* Zero all DM config settings. */
59470ed80afSMarek Vasut 		for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) {
59570ed80afSMarek Vasut 			if (!out_only)
59670ed80afSMarek Vasut 				scc_mgr_set_dm_in_delay(i, 0);
59707aee5bdSMarek Vasut 			scc_mgr_set_dm_out1_delay(i, 0);
59870ed80afSMarek Vasut 		}
5993da42859SDinh Nguyen 
600d41ea93aSMarek Vasut 		/* Multicast to all DM enables. */
6011273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dm_ena);
6023da42859SDinh Nguyen 
603d41ea93aSMarek Vasut 		/* Zero all DQS IO settings. */
6043da42859SDinh Nguyen 		if (!out_only)
60532675249SMarek Vasut 			scc_mgr_set_dqs_io_in_delay(0);
606d41ea93aSMarek Vasut 
607d41ea93aSMarek Vasut 		/* Arria V/Cyclone V don't have out2. */
608160695d8SMarek Vasut 		scc_mgr_set_dqs_out1_delay(iocfg->dqs_out_reserve);
609160695d8SMarek Vasut 		scc_mgr_set_oct_out1_delay(write_group, iocfg->dqs_out_reserve);
6103da42859SDinh Nguyen 		scc_mgr_load_dqs_for_write_group(write_group);
6113da42859SDinh Nguyen 
612d41ea93aSMarek Vasut 		/* Multicast to all DQS IO enables (only 1 in total). */
6131273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->dqs_io_ena);
6143da42859SDinh Nguyen 
615d41ea93aSMarek Vasut 		/* Hit update to zero everything. */
6161273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
6173da42859SDinh Nguyen 	}
6183da42859SDinh Nguyen }
6193da42859SDinh Nguyen 
6203da42859SDinh Nguyen /*
6213da42859SDinh Nguyen  * apply and load a particular input delay for the DQ pins in a group
6223da42859SDinh Nguyen  * group_bgn is the index of the first dq pin (in the write group)
6233da42859SDinh Nguyen  */
scc_mgr_apply_group_dq_in_delay(u32 group_bgn,u32 delay)6245ded7320SMarek Vasut static void scc_mgr_apply_group_dq_in_delay(u32 group_bgn, u32 delay)
6253da42859SDinh Nguyen {
6265ded7320SMarek Vasut 	u32 i, p;
6273da42859SDinh Nguyen 
6281fa0c8c4SMarek Vasut 	for (i = 0, p = group_bgn; i < rwcfg->mem_dq_per_read_dqs; i++, p++) {
62907aee5bdSMarek Vasut 		scc_mgr_set_dq_in_delay(p, delay);
6303da42859SDinh Nguyen 		scc_mgr_load_dq(p);
6313da42859SDinh Nguyen 	}
6323da42859SDinh Nguyen }
6333da42859SDinh Nguyen 
634300c2e62SMarek Vasut /**
635300c2e62SMarek Vasut  * scc_mgr_apply_group_dq_out1_delay() - Apply and load an output delay for the DQ pins in a group
636300c2e62SMarek Vasut  * @delay:		Delay value
637300c2e62SMarek Vasut  *
638300c2e62SMarek Vasut  * Apply and load a particular output delay for the DQ pins in a group.
639300c2e62SMarek Vasut  */
scc_mgr_apply_group_dq_out1_delay(const u32 delay)640300c2e62SMarek Vasut static void scc_mgr_apply_group_dq_out1_delay(const u32 delay)
6413da42859SDinh Nguyen {
642300c2e62SMarek Vasut 	int i;
6433da42859SDinh Nguyen 
6441fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
645300c2e62SMarek Vasut 		scc_mgr_set_dq_out1_delay(i, delay);
6463da42859SDinh Nguyen 		scc_mgr_load_dq(i);
6473da42859SDinh Nguyen 	}
6483da42859SDinh Nguyen }
6493da42859SDinh Nguyen 
6503da42859SDinh Nguyen /* apply and load a particular output delay for the DM pins in a group */
scc_mgr_apply_group_dm_out1_delay(u32 delay1)6515ded7320SMarek Vasut static void scc_mgr_apply_group_dm_out1_delay(u32 delay1)
6523da42859SDinh Nguyen {
6535ded7320SMarek Vasut 	u32 i;
6543da42859SDinh Nguyen 
6553da42859SDinh Nguyen 	for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) {
65607aee5bdSMarek Vasut 		scc_mgr_set_dm_out1_delay(i, delay1);
6573da42859SDinh Nguyen 		scc_mgr_load_dm(i);
6583da42859SDinh Nguyen 	}
6593da42859SDinh Nguyen }
6603da42859SDinh Nguyen 
6613da42859SDinh Nguyen 
6623da42859SDinh Nguyen /* apply and load delay on both DQS and OCT out1 */
scc_mgr_apply_group_dqs_io_and_oct_out1(u32 write_group,u32 delay)6635ded7320SMarek Vasut static void scc_mgr_apply_group_dqs_io_and_oct_out1(u32 write_group,
6645ded7320SMarek Vasut 						    u32 delay)
6653da42859SDinh Nguyen {
66632675249SMarek Vasut 	scc_mgr_set_dqs_out1_delay(delay);
6673da42859SDinh Nguyen 	scc_mgr_load_dqs_io();
6683da42859SDinh Nguyen 
6693da42859SDinh Nguyen 	scc_mgr_set_oct_out1_delay(write_group, delay);
6703da42859SDinh Nguyen 	scc_mgr_load_dqs_for_write_group(write_group);
6713da42859SDinh Nguyen }
6723da42859SDinh Nguyen 
6735cb1b508SMarek Vasut /**
6745cb1b508SMarek Vasut  * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side: DQ, DM, DQS, OCT
6755cb1b508SMarek Vasut  * @write_group:	Write group
6765cb1b508SMarek Vasut  * @delay:		Delay value
6775cb1b508SMarek Vasut  *
6785cb1b508SMarek Vasut  * Apply a delay to the entire output side: DQ, DM, DQS, OCT.
6795cb1b508SMarek Vasut  */
scc_mgr_apply_group_all_out_delay_add(const u32 write_group,const u32 delay)6808eccde3eSMarek Vasut static void scc_mgr_apply_group_all_out_delay_add(const u32 write_group,
6818eccde3eSMarek Vasut 						  const u32 delay)
6823da42859SDinh Nguyen {
6838eccde3eSMarek Vasut 	u32 i, new_delay;
6843da42859SDinh Nguyen 
6858eccde3eSMarek Vasut 	/* DQ shift */
6861fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++)
6873da42859SDinh Nguyen 		scc_mgr_load_dq(i);
6883da42859SDinh Nguyen 
6898eccde3eSMarek Vasut 	/* DM shift */
6908eccde3eSMarek Vasut 	for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++)
6913da42859SDinh Nguyen 		scc_mgr_load_dm(i);
6923da42859SDinh Nguyen 
6935cb1b508SMarek Vasut 	/* DQS shift */
6945cb1b508SMarek Vasut 	new_delay = READ_SCC_DQS_IO_OUT2_DELAY + delay;
695160695d8SMarek Vasut 	if (new_delay > iocfg->io_out2_delay_max) {
696ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1,
6975cb1b508SMarek Vasut 			   "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
6985cb1b508SMarek Vasut 			   __func__, __LINE__, write_group, delay, new_delay,
699160695d8SMarek Vasut 			   iocfg->io_out2_delay_max,
700160695d8SMarek Vasut 			   new_delay - iocfg->io_out2_delay_max);
701160695d8SMarek Vasut 		new_delay -= iocfg->io_out2_delay_max;
7025cb1b508SMarek Vasut 		scc_mgr_set_dqs_out1_delay(new_delay);
7033da42859SDinh Nguyen 	}
7043da42859SDinh Nguyen 
7053da42859SDinh Nguyen 	scc_mgr_load_dqs_io();
7063da42859SDinh Nguyen 
7075cb1b508SMarek Vasut 	/* OCT shift */
7085cb1b508SMarek Vasut 	new_delay = READ_SCC_OCT_OUT2_DELAY + delay;
709160695d8SMarek Vasut 	if (new_delay > iocfg->io_out2_delay_max) {
710ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1,
7115cb1b508SMarek Vasut 			   "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
7125cb1b508SMarek Vasut 			   __func__, __LINE__, write_group, delay,
713160695d8SMarek Vasut 			   new_delay, iocfg->io_out2_delay_max,
714160695d8SMarek Vasut 			   new_delay - iocfg->io_out2_delay_max);
715160695d8SMarek Vasut 		new_delay -= iocfg->io_out2_delay_max;
7165cb1b508SMarek Vasut 		scc_mgr_set_oct_out1_delay(write_group, new_delay);
7173da42859SDinh Nguyen 	}
7183da42859SDinh Nguyen 
7193da42859SDinh Nguyen 	scc_mgr_load_dqs_for_write_group(write_group);
7203da42859SDinh Nguyen }
7213da42859SDinh Nguyen 
722f51a7d35SMarek Vasut /**
723f51a7d35SMarek Vasut  * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side to all ranks
724f51a7d35SMarek Vasut  * @write_group:	Write group
725f51a7d35SMarek Vasut  * @delay:		Delay value
726f51a7d35SMarek Vasut  *
727f51a7d35SMarek Vasut  * Apply a delay to the entire output side (DQ, DM, DQS, OCT) to all ranks.
7283da42859SDinh Nguyen  */
729f51a7d35SMarek Vasut static void
scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group,const u32 delay)730f51a7d35SMarek Vasut scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group,
731f51a7d35SMarek Vasut 						const u32 delay)
7323da42859SDinh Nguyen {
733f51a7d35SMarek Vasut 	int r;
7343da42859SDinh Nguyen 
7351fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
7363da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
7375cb1b508SMarek Vasut 		scc_mgr_apply_group_all_out_delay_add(write_group, delay);
7381273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
7393da42859SDinh Nguyen 	}
7403da42859SDinh Nguyen }
7413da42859SDinh Nguyen 
742f936f94fSMarek Vasut /**
743f936f94fSMarek Vasut  * set_jump_as_return() - Return instruction optimization
744f936f94fSMarek Vasut  *
745f936f94fSMarek Vasut  * Optimization used to recover some slots in ddr3 inst_rom could be
746f936f94fSMarek Vasut  * applied to other protocols if we wanted to
747f936f94fSMarek Vasut  */
set_jump_as_return(void)7483da42859SDinh Nguyen static void set_jump_as_return(void)
7493da42859SDinh Nguyen {
7503da42859SDinh Nguyen 	/*
751f936f94fSMarek Vasut 	 * To save space, we replace return with jump to special shared
7523da42859SDinh Nguyen 	 * RETURN instruction so we set the counter to large value so that
753f936f94fSMarek Vasut 	 * we always jump.
7543da42859SDinh Nguyen 	 */
7551273dd9eSMarek Vasut 	writel(0xff, &sdr_rw_load_mgr_regs->load_cntr0);
7561fa0c8c4SMarek Vasut 	writel(rwcfg->rreturn, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
7573da42859SDinh Nguyen }
7583da42859SDinh Nguyen 
7593de9622eSMarek Vasut /**
7603de9622eSMarek Vasut  * delay_for_n_mem_clocks() - Delay for N memory clocks
7613de9622eSMarek Vasut  * @clocks:	Length of the delay
7623de9622eSMarek Vasut  *
7633de9622eSMarek Vasut  * Delay for N memory clocks.
7643da42859SDinh Nguyen  */
delay_for_n_mem_clocks(const u32 clocks)76590a584b7SMarek Vasut static void delay_for_n_mem_clocks(const u32 clocks)
7663da42859SDinh Nguyen {
76790a584b7SMarek Vasut 	u32 afi_clocks;
7686a39be6cSMarek Vasut 	u16 c_loop;
7696a39be6cSMarek Vasut 	u8 inner;
7706a39be6cSMarek Vasut 	u8 outer;
7713da42859SDinh Nguyen 
7723da42859SDinh Nguyen 	debug("%s:%d: clocks=%u ... start\n", __func__, __LINE__, clocks);
7733da42859SDinh Nguyen 
774cbcaf460SMarek Vasut 	/* Scale (rounding up) to get afi clocks. */
77596fd4362SMarek Vasut 	afi_clocks = DIV_ROUND_UP(clocks, misccfg->afi_rate_ratio);
776cbcaf460SMarek Vasut 	if (afi_clocks)	/* Temporary underflow protection */
777cbcaf460SMarek Vasut 		afi_clocks--;
7783da42859SDinh Nguyen 
7793da42859SDinh Nguyen 	/*
78090a584b7SMarek Vasut 	 * Note, we don't bother accounting for being off a little
78190a584b7SMarek Vasut 	 * bit because of a few extra instructions in outer loops.
78290a584b7SMarek Vasut 	 * Note, the loops have a test at the end, and do the test
78390a584b7SMarek Vasut 	 * before the decrement, and so always perform the loop
7843da42859SDinh Nguyen 	 * 1 time more than the counter value
7853da42859SDinh Nguyen 	 */
786cbcaf460SMarek Vasut 	c_loop = afi_clocks >> 16;
7876a39be6cSMarek Vasut 	outer = c_loop ? 0xff : (afi_clocks >> 8);
7886a39be6cSMarek Vasut 	inner = outer ? 0xff : afi_clocks;
7893da42859SDinh Nguyen 
7903da42859SDinh Nguyen 	/*
7913da42859SDinh Nguyen 	 * rom instructions are structured as follows:
7923da42859SDinh Nguyen 	 *
7933da42859SDinh Nguyen 	 *    IDLE_LOOP2: jnz cntr0, TARGET_A
7943da42859SDinh Nguyen 	 *    IDLE_LOOP1: jnz cntr1, TARGET_B
7953da42859SDinh Nguyen 	 *                return
7963da42859SDinh Nguyen 	 *
7973da42859SDinh Nguyen 	 * so, when doing nested loops, TARGET_A is set to IDLE_LOOP2, and
7983da42859SDinh Nguyen 	 * TARGET_B is set to IDLE_LOOP2 as well
7993da42859SDinh Nguyen 	 *
8003da42859SDinh Nguyen 	 * if we have no outer loop, though, then we can use IDLE_LOOP1 only,
8013da42859SDinh Nguyen 	 * and set TARGET_B to IDLE_LOOP1 and we skip IDLE_LOOP2 entirely
8023da42859SDinh Nguyen 	 *
8033da42859SDinh Nguyen 	 * a little confusing, but it helps save precious space in the inst_rom
8043da42859SDinh Nguyen 	 * and sequencer rom and keeps the delays more accurate and reduces
8053da42859SDinh Nguyen 	 * overhead
8063da42859SDinh Nguyen 	 */
807cbcaf460SMarek Vasut 	if (afi_clocks < 0x100) {
8081273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
8091273dd9eSMarek Vasut 		       &sdr_rw_load_mgr_regs->load_cntr1);
8103da42859SDinh Nguyen 
8111fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop1,
8121273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
8133da42859SDinh Nguyen 
8141fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
8151273dd9eSMarek Vasut 					  RW_MGR_RUN_SINGLE_GROUP_OFFSET);
8163da42859SDinh Nguyen 	} else {
8171273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
8181273dd9eSMarek Vasut 		       &sdr_rw_load_mgr_regs->load_cntr0);
8193da42859SDinh Nguyen 
8201273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(outer),
8211273dd9eSMarek Vasut 		       &sdr_rw_load_mgr_regs->load_cntr1);
8223da42859SDinh Nguyen 
8231fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop2,
8241273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add0);
8253da42859SDinh Nguyen 
8261fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop2,
8271273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
8283da42859SDinh Nguyen 
8293da42859SDinh Nguyen 		do {
8301fa0c8c4SMarek Vasut 			writel(rwcfg->idle_loop2,
8311273dd9eSMarek Vasut 			       SDR_PHYGRP_RWMGRGRP_ADDRESS |
8321273dd9eSMarek Vasut 			       RW_MGR_RUN_SINGLE_GROUP_OFFSET);
8333da42859SDinh Nguyen 		} while (c_loop-- != 0);
8343da42859SDinh Nguyen 	}
8353da42859SDinh Nguyen 	debug("%s:%d clocks=%u ... end\n", __func__, __LINE__, clocks);
8363da42859SDinh Nguyen }
8373da42859SDinh Nguyen 
838944fe719SMarek Vasut /**
839944fe719SMarek Vasut  * rw_mgr_mem_init_load_regs() - Load instruction registers
840944fe719SMarek Vasut  * @cntr0:	Counter 0 value
841944fe719SMarek Vasut  * @cntr1:	Counter 1 value
842944fe719SMarek Vasut  * @cntr2:	Counter 2 value
843944fe719SMarek Vasut  * @jump:	Jump instruction value
844944fe719SMarek Vasut  *
845944fe719SMarek Vasut  * Load instruction registers.
846944fe719SMarek Vasut  */
rw_mgr_mem_init_load_regs(u32 cntr0,u32 cntr1,u32 cntr2,u32 jump)847944fe719SMarek Vasut static void rw_mgr_mem_init_load_regs(u32 cntr0, u32 cntr1, u32 cntr2, u32 jump)
848944fe719SMarek Vasut {
8495ded7320SMarek Vasut 	u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
850944fe719SMarek Vasut 			   RW_MGR_RUN_SINGLE_GROUP_OFFSET;
851944fe719SMarek Vasut 
852944fe719SMarek Vasut 	/* Load counters */
853944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr0),
854944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr0);
855944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr1),
856944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr1);
857944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr2),
858944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr2);
859944fe719SMarek Vasut 
860944fe719SMarek Vasut 	/* Load jump address */
861944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
862944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add1);
863944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
864944fe719SMarek Vasut 
865944fe719SMarek Vasut 	/* Execute count instruction */
866944fe719SMarek Vasut 	writel(jump, grpaddr);
867944fe719SMarek Vasut }
868944fe719SMarek Vasut 
869ecd2334aSMarek Vasut /**
870ecd2334aSMarek Vasut  * rw_mgr_mem_load_user() - Load user calibration values
871ecd2334aSMarek Vasut  * @fin1:	Final instruction 1
872ecd2334aSMarek Vasut  * @fin2:	Final instruction 2
873ecd2334aSMarek Vasut  * @precharge:	If 1, precharge the banks at the end
874ecd2334aSMarek Vasut  *
875ecd2334aSMarek Vasut  * Load user calibration values and optionally precharge the banks.
876ecd2334aSMarek Vasut  */
rw_mgr_mem_load_user(const u32 fin1,const u32 fin2,const int precharge)877ecd2334aSMarek Vasut static void rw_mgr_mem_load_user(const u32 fin1, const u32 fin2,
878ecd2334aSMarek Vasut 				 const int precharge)
879ecd2334aSMarek Vasut {
880ecd2334aSMarek Vasut 	u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
881ecd2334aSMarek Vasut 		      RW_MGR_RUN_SINGLE_GROUP_OFFSET;
882ecd2334aSMarek Vasut 	u32 r;
883ecd2334aSMarek Vasut 
8841fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks; r++) {
885ecd2334aSMarek Vasut 		/* set rank */
886ecd2334aSMarek Vasut 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
887ecd2334aSMarek Vasut 
888ecd2334aSMarek Vasut 		/* precharge all banks ... */
889ecd2334aSMarek Vasut 		if (precharge)
8901fa0c8c4SMarek Vasut 			writel(rwcfg->precharge_all, grpaddr);
891ecd2334aSMarek Vasut 
892ecd2334aSMarek Vasut 		/*
893ecd2334aSMarek Vasut 		 * USER Use Mirror-ed commands for odd ranks if address
894ecd2334aSMarek Vasut 		 * mirrorring is on
895ecd2334aSMarek Vasut 		 */
8961fa0c8c4SMarek Vasut 		if ((rwcfg->mem_address_mirroring >> r) & 0x1) {
897ecd2334aSMarek Vasut 			set_jump_as_return();
8981fa0c8c4SMarek Vasut 			writel(rwcfg->mrs2_mirr, grpaddr);
899ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
900ecd2334aSMarek Vasut 			set_jump_as_return();
9011fa0c8c4SMarek Vasut 			writel(rwcfg->mrs3_mirr, grpaddr);
902ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
903ecd2334aSMarek Vasut 			set_jump_as_return();
9041fa0c8c4SMarek Vasut 			writel(rwcfg->mrs1_mirr, grpaddr);
905ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
906ecd2334aSMarek Vasut 			set_jump_as_return();
907ecd2334aSMarek Vasut 			writel(fin1, grpaddr);
908ecd2334aSMarek Vasut 		} else {
909ecd2334aSMarek Vasut 			set_jump_as_return();
9101fa0c8c4SMarek Vasut 			writel(rwcfg->mrs2, grpaddr);
911ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
912ecd2334aSMarek Vasut 			set_jump_as_return();
9131fa0c8c4SMarek Vasut 			writel(rwcfg->mrs3, grpaddr);
914ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
915ecd2334aSMarek Vasut 			set_jump_as_return();
9161fa0c8c4SMarek Vasut 			writel(rwcfg->mrs1, grpaddr);
917ecd2334aSMarek Vasut 			set_jump_as_return();
918ecd2334aSMarek Vasut 			writel(fin2, grpaddr);
919ecd2334aSMarek Vasut 		}
920ecd2334aSMarek Vasut 
921ecd2334aSMarek Vasut 		if (precharge)
922ecd2334aSMarek Vasut 			continue;
923ecd2334aSMarek Vasut 
924ecd2334aSMarek Vasut 		set_jump_as_return();
9251fa0c8c4SMarek Vasut 		writel(rwcfg->zqcl, grpaddr);
926ecd2334aSMarek Vasut 
927ecd2334aSMarek Vasut 		/* tZQinit = tDLLK = 512 ck cycles */
928ecd2334aSMarek Vasut 		delay_for_n_mem_clocks(512);
929ecd2334aSMarek Vasut 	}
930ecd2334aSMarek Vasut }
931ecd2334aSMarek Vasut 
9328e9d7d04SMarek Vasut /**
9338e9d7d04SMarek Vasut  * rw_mgr_mem_initialize() - Initialize RW Manager
9348e9d7d04SMarek Vasut  *
9358e9d7d04SMarek Vasut  * Initialize RW Manager.
9368e9d7d04SMarek Vasut  */
rw_mgr_mem_initialize(void)9373da42859SDinh Nguyen static void rw_mgr_mem_initialize(void)
9383da42859SDinh Nguyen {
9393da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
9403da42859SDinh Nguyen 
9413da42859SDinh Nguyen 	/* The reset / cke part of initialization is broadcasted to all ranks */
9421273dd9eSMarek Vasut 	writel(RW_MGR_RANK_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS |
9431273dd9eSMarek Vasut 				RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
9443da42859SDinh Nguyen 
9453da42859SDinh Nguyen 	/*
9463da42859SDinh Nguyen 	 * Here's how you load register for a loop
9473da42859SDinh Nguyen 	 * Counters are located @ 0x800
9483da42859SDinh Nguyen 	 * Jump address are located @ 0xC00
9493da42859SDinh Nguyen 	 * For both, registers 0 to 3 are selected using bits 3 and 2, like
9503da42859SDinh Nguyen 	 * in 0x800, 0x804, 0x808, 0x80C and 0xC00, 0xC04, 0xC08, 0xC0C
9513da42859SDinh Nguyen 	 * I know this ain't pretty, but Avalon bus throws away the 2 least
9523da42859SDinh Nguyen 	 * significant bits
9533da42859SDinh Nguyen 	 */
9543da42859SDinh Nguyen 
9558e9d7d04SMarek Vasut 	/* Start with memory RESET activated */
9563da42859SDinh Nguyen 
9573da42859SDinh Nguyen 	/* tINIT = 200us */
9583da42859SDinh Nguyen 
9593da42859SDinh Nguyen 	/*
9603da42859SDinh Nguyen 	 * 200us @ 266MHz (3.75 ns) ~ 54000 clock cycles
9613da42859SDinh Nguyen 	 * If a and b are the number of iteration in 2 nested loops
9623da42859SDinh Nguyen 	 * it takes the following number of cycles to complete the operation:
9633da42859SDinh Nguyen 	 * number_of_cycles = ((2 + n) * a + 2) * b
9643da42859SDinh Nguyen 	 * where n is the number of instruction in the inner loop
9653da42859SDinh Nguyen 	 * One possible solution is n = 0 , a = 256 , b = 106 => a = FF,
9663da42859SDinh Nguyen 	 * b = 6A
9673da42859SDinh Nguyen 	 */
968139823ecSMarek Vasut 	rw_mgr_mem_init_load_regs(misccfg->tinit_cntr0_val,
969139823ecSMarek Vasut 				  misccfg->tinit_cntr1_val,
97096fd4362SMarek Vasut 				  misccfg->tinit_cntr2_val,
9711fa0c8c4SMarek Vasut 				  rwcfg->init_reset_0_cke_0);
9723da42859SDinh Nguyen 
9738e9d7d04SMarek Vasut 	/* Indicate that memory is stable. */
9741273dd9eSMarek Vasut 	writel(1, &phy_mgr_cfg->reset_mem_stbl);
9753da42859SDinh Nguyen 
9763da42859SDinh Nguyen 	/*
9773da42859SDinh Nguyen 	 * transition the RESET to high
9783da42859SDinh Nguyen 	 * Wait for 500us
9793da42859SDinh Nguyen 	 */
9803da42859SDinh Nguyen 
9813da42859SDinh Nguyen 	/*
9823da42859SDinh Nguyen 	 * 500us @ 266MHz (3.75 ns) ~ 134000 clock cycles
9833da42859SDinh Nguyen 	 * If a and b are the number of iteration in 2 nested loops
9843da42859SDinh Nguyen 	 * it takes the following number of cycles to complete the operation
9853da42859SDinh Nguyen 	 * number_of_cycles = ((2 + n) * a + 2) * b
9863da42859SDinh Nguyen 	 * where n is the number of instruction in the inner loop
9873da42859SDinh Nguyen 	 * One possible solution is n = 2 , a = 131 , b = 256 => a = 83,
9883da42859SDinh Nguyen 	 * b = FF
9893da42859SDinh Nguyen 	 */
990139823ecSMarek Vasut 	rw_mgr_mem_init_load_regs(misccfg->treset_cntr0_val,
991139823ecSMarek Vasut 				  misccfg->treset_cntr1_val,
99296fd4362SMarek Vasut 				  misccfg->treset_cntr2_val,
9931fa0c8c4SMarek Vasut 				  rwcfg->init_reset_1_cke_0);
9943da42859SDinh Nguyen 
9958e9d7d04SMarek Vasut 	/* Bring up clock enable. */
9963da42859SDinh Nguyen 
9973da42859SDinh Nguyen 	/* tXRP < 250 ck cycles */
9983da42859SDinh Nguyen 	delay_for_n_mem_clocks(250);
9993da42859SDinh Nguyen 
10001fa0c8c4SMarek Vasut 	rw_mgr_mem_load_user(rwcfg->mrs0_dll_reset_mirr, rwcfg->mrs0_dll_reset,
1001ecd2334aSMarek Vasut 			     0);
10023da42859SDinh Nguyen }
10033da42859SDinh Nguyen 
1004f1f22f72SMarek Vasut /**
1005f1f22f72SMarek Vasut  * rw_mgr_mem_handoff() - Hand off the memory to user
1006f1f22f72SMarek Vasut  *
1007f1f22f72SMarek Vasut  * At the end of calibration we have to program the user settings in
1008f1f22f72SMarek Vasut  * and hand off the memory to the user.
10093da42859SDinh Nguyen  */
rw_mgr_mem_handoff(void)10103da42859SDinh Nguyen static void rw_mgr_mem_handoff(void)
10113da42859SDinh Nguyen {
10121fa0c8c4SMarek Vasut 	rw_mgr_mem_load_user(rwcfg->mrs0_user_mirr, rwcfg->mrs0_user, 1);
10133da42859SDinh Nguyen 	/*
1014f1f22f72SMarek Vasut 	 * Need to wait tMOD (12CK or 15ns) time before issuing other
1015f1f22f72SMarek Vasut 	 * commands, but we will have plenty of NIOS cycles before actual
1016f1f22f72SMarek Vasut 	 * handoff so its okay.
10173da42859SDinh Nguyen 	 */
10183da42859SDinh Nguyen }
10193da42859SDinh Nguyen 
10208371c2eeSMarek Vasut /**
10218371c2eeSMarek Vasut  * rw_mgr_mem_calibrate_write_test_issue() - Issue write test command
10228371c2eeSMarek Vasut  * @group:	Write Group
10238371c2eeSMarek Vasut  * @use_dm:	Use DM
10248371c2eeSMarek Vasut  *
10258371c2eeSMarek Vasut  * Issue write test command. Two variants are provided, one that just tests
10268371c2eeSMarek Vasut  * a write pattern and another that tests datamask functionality.
1027ad64769cSMarek Vasut  */
rw_mgr_mem_calibrate_write_test_issue(u32 group,u32 test_dm)10288371c2eeSMarek Vasut static void rw_mgr_mem_calibrate_write_test_issue(u32 group,
10298371c2eeSMarek Vasut 						  u32 test_dm)
1030ad64769cSMarek Vasut {
10318371c2eeSMarek Vasut 	const u32 quick_write_mode =
10328371c2eeSMarek Vasut 		(STATIC_CALIB_STEPS & CALIB_SKIP_WRITES) &&
103396fd4362SMarek Vasut 		misccfg->enable_super_quick_calibration;
10348371c2eeSMarek Vasut 	u32 mcc_instruction;
10358371c2eeSMarek Vasut 	u32 rw_wl_nop_cycles;
1036ad64769cSMarek Vasut 
1037ad64769cSMarek Vasut 	/*
1038ad64769cSMarek Vasut 	 * Set counter and jump addresses for the right
1039ad64769cSMarek Vasut 	 * number of NOP cycles.
1040ad64769cSMarek Vasut 	 * The number of supported NOP cycles can range from -1 to infinity
1041ad64769cSMarek Vasut 	 * Three different cases are handled:
1042ad64769cSMarek Vasut 	 *
1043ad64769cSMarek Vasut 	 * 1. For a number of NOP cycles greater than 0, the RW Mgr looping
1044ad64769cSMarek Vasut 	 *    mechanism will be used to insert the right number of NOPs
1045ad64769cSMarek Vasut 	 *
1046ad64769cSMarek Vasut 	 * 2. For a number of NOP cycles equals to 0, the micro-instruction
1047ad64769cSMarek Vasut 	 *    issuing the write command will jump straight to the
1048ad64769cSMarek Vasut 	 *    micro-instruction that turns on DQS (for DDRx), or outputs write
1049ad64769cSMarek Vasut 	 *    data (for RLD), skipping
1050ad64769cSMarek Vasut 	 *    the NOP micro-instruction all together
1051ad64769cSMarek Vasut 	 *
1052ad64769cSMarek Vasut 	 * 3. A number of NOP cycles equal to -1 indicates that DQS must be
1053ad64769cSMarek Vasut 	 *    turned on in the same micro-instruction that issues the write
1054ad64769cSMarek Vasut 	 *    command. Then we need
1055ad64769cSMarek Vasut 	 *    to directly jump to the micro-instruction that sends out the data
1056ad64769cSMarek Vasut 	 *
1057ad64769cSMarek Vasut 	 * NOTE: Implementing this mechanism uses 2 RW Mgr jump-counters
1058ad64769cSMarek Vasut 	 *       (2 and 3). One jump-counter (0) is used to perform multiple
1059ad64769cSMarek Vasut 	 *       write-read operations.
1060ad64769cSMarek Vasut 	 *       one counter left to issue this command in "multiple-group" mode
1061ad64769cSMarek Vasut 	 */
1062ad64769cSMarek Vasut 
1063ad64769cSMarek Vasut 	rw_wl_nop_cycles = gbl->rw_wl_nop_cycles;
1064ad64769cSMarek Vasut 
1065ad64769cSMarek Vasut 	if (rw_wl_nop_cycles == -1) {
1066ad64769cSMarek Vasut 		/*
1067ad64769cSMarek Vasut 		 * CNTR 2 - We want to execute the special write operation that
1068ad64769cSMarek Vasut 		 * turns on DQS right away and then skip directly to the
1069ad64769cSMarek Vasut 		 * instruction that sends out the data. We set the counter to a
1070ad64769cSMarek Vasut 		 * large number so that the jump is always taken.
1071ad64769cSMarek Vasut 		 */
1072ad64769cSMarek Vasut 		writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
1073ad64769cSMarek Vasut 
1074ad64769cSMarek Vasut 		/* CNTR 3 - Not used */
1075ad64769cSMarek Vasut 		if (test_dm) {
10761fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0_wl_1;
10771fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_data,
1078ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
10791fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1080ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
1081ad64769cSMarek Vasut 		} else {
10821fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0_wl_1;
10831fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_data,
1084ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
10851fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_nop,
1086ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
1087ad64769cSMarek Vasut 		}
1088ad64769cSMarek Vasut 	} else if (rw_wl_nop_cycles == 0) {
1089ad64769cSMarek Vasut 		/*
1090ad64769cSMarek Vasut 		 * CNTR 2 - We want to skip the NOP operation and go straight
1091ad64769cSMarek Vasut 		 * to the DQS enable instruction. We set the counter to a large
1092ad64769cSMarek Vasut 		 * number so that the jump is always taken.
1093ad64769cSMarek Vasut 		 */
1094ad64769cSMarek Vasut 		writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
1095ad64769cSMarek Vasut 
1096ad64769cSMarek Vasut 		/* CNTR 3 - Not used */
1097ad64769cSMarek Vasut 		if (test_dm) {
10981fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0;
10991fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_dqs,
1100ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1101ad64769cSMarek Vasut 		} else {
11021fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0;
11031fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_dqs,
1104ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1105ad64769cSMarek Vasut 		}
1106ad64769cSMarek Vasut 	} else {
1107ad64769cSMarek Vasut 		/*
1108ad64769cSMarek Vasut 		 * CNTR 2 - In this case we want to execute the next instruction
1109ad64769cSMarek Vasut 		 * and NOT take the jump. So we set the counter to 0. The jump
1110ad64769cSMarek Vasut 		 * address doesn't count.
1111ad64769cSMarek Vasut 		 */
1112ad64769cSMarek Vasut 		writel(0x0, &sdr_rw_load_mgr_regs->load_cntr2);
1113ad64769cSMarek Vasut 		writel(0x0, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1114ad64769cSMarek Vasut 
1115ad64769cSMarek Vasut 		/*
1116ad64769cSMarek Vasut 		 * CNTR 3 - Set the nop counter to the number of cycles we
1117ad64769cSMarek Vasut 		 * need to loop for, minus 1.
1118ad64769cSMarek Vasut 		 */
1119ad64769cSMarek Vasut 		writel(rw_wl_nop_cycles - 1, &sdr_rw_load_mgr_regs->load_cntr3);
1120ad64769cSMarek Vasut 		if (test_dm) {
11211fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0;
11221fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1123ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
1124ad64769cSMarek Vasut 		} else {
11251fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0;
11261fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_nop,
1127ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
1128ad64769cSMarek Vasut 		}
1129ad64769cSMarek Vasut 	}
1130ad64769cSMarek Vasut 
1131ad64769cSMarek Vasut 	writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1132ad64769cSMarek Vasut 		  RW_MGR_RESET_READ_DATAPATH_OFFSET);
1133ad64769cSMarek Vasut 
1134ad64769cSMarek Vasut 	if (quick_write_mode)
1135ad64769cSMarek Vasut 		writel(0x08, &sdr_rw_load_mgr_regs->load_cntr0);
1136ad64769cSMarek Vasut 	else
1137ad64769cSMarek Vasut 		writel(0x40, &sdr_rw_load_mgr_regs->load_cntr0);
1138ad64769cSMarek Vasut 
1139ad64769cSMarek Vasut 	writel(mcc_instruction, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
1140ad64769cSMarek Vasut 
1141ad64769cSMarek Vasut 	/*
1142ad64769cSMarek Vasut 	 * CNTR 1 - This is used to ensure enough time elapses
1143ad64769cSMarek Vasut 	 * for read data to come back.
1144ad64769cSMarek Vasut 	 */
1145ad64769cSMarek Vasut 	writel(0x30, &sdr_rw_load_mgr_regs->load_cntr1);
1146ad64769cSMarek Vasut 
1147ad64769cSMarek Vasut 	if (test_dm) {
11481fa0c8c4SMarek Vasut 		writel(rwcfg->lfsr_wr_rd_dm_bank_0_wait,
1149ad64769cSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
1150ad64769cSMarek Vasut 	} else {
11511fa0c8c4SMarek Vasut 		writel(rwcfg->lfsr_wr_rd_bank_0_wait,
1152ad64769cSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
1153ad64769cSMarek Vasut 	}
1154ad64769cSMarek Vasut 
11558371c2eeSMarek Vasut 	writel(mcc_instruction, (SDR_PHYGRP_RWMGRGRP_ADDRESS |
11568371c2eeSMarek Vasut 				RW_MGR_RUN_SINGLE_GROUP_OFFSET) +
11578371c2eeSMarek Vasut 				(group << 2));
1158ad64769cSMarek Vasut }
1159ad64769cSMarek Vasut 
11604a82854bSMarek Vasut /**
11614a82854bSMarek Vasut  * rw_mgr_mem_calibrate_write_test() - Test writes, check for single/multiple pass
11624a82854bSMarek Vasut  * @rank_bgn:		Rank number
11634a82854bSMarek Vasut  * @write_group:	Write Group
11644a82854bSMarek Vasut  * @use_dm:		Use DM
11654a82854bSMarek Vasut  * @all_correct:	All bits must be correct in the mask
11664a82854bSMarek Vasut  * @bit_chk:		Resulting bit mask after the test
11674a82854bSMarek Vasut  * @all_ranks:		Test all ranks
11684a82854bSMarek Vasut  *
11694a82854bSMarek Vasut  * Test writes, can check for a single bit pass or multiple bit pass.
11704a82854bSMarek Vasut  */
1171b9452ea0SMarek Vasut static int
rw_mgr_mem_calibrate_write_test(const u32 rank_bgn,const u32 write_group,const u32 use_dm,const u32 all_correct,u32 * bit_chk,const u32 all_ranks)1172b9452ea0SMarek Vasut rw_mgr_mem_calibrate_write_test(const u32 rank_bgn, const u32 write_group,
1173b9452ea0SMarek Vasut 				const u32 use_dm, const u32 all_correct,
1174b9452ea0SMarek Vasut 				u32 *bit_chk, const u32 all_ranks)
1175ad64769cSMarek Vasut {
1176b9452ea0SMarek Vasut 	const u32 rank_end = all_ranks ?
11771fa0c8c4SMarek Vasut 				rwcfg->mem_number_of_ranks :
1178ad64769cSMarek Vasut 				(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
11791fa0c8c4SMarek Vasut 	const u32 shift_ratio = rwcfg->mem_dq_per_write_dqs /
11801fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_write_dqs;
1181b9452ea0SMarek Vasut 	const u32 correct_mask_vg = param->write_correct_mask_vg;
1182b9452ea0SMarek Vasut 
1183b9452ea0SMarek Vasut 	u32 tmp_bit_chk, base_rw_mgr;
1184b9452ea0SMarek Vasut 	int vg, r;
1185ad64769cSMarek Vasut 
1186ad64769cSMarek Vasut 	*bit_chk = param->write_correct_mask;
1187ad64769cSMarek Vasut 
1188ad64769cSMarek Vasut 	for (r = rank_bgn; r < rank_end; r++) {
1189b9452ea0SMarek Vasut 		/* Set rank */
1190ad64769cSMarek Vasut 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
1191ad64769cSMarek Vasut 
1192ad64769cSMarek Vasut 		tmp_bit_chk = 0;
11931fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_write_dqs - 1;
1194b9452ea0SMarek Vasut 		     vg >= 0; vg--) {
1195b9452ea0SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
1196ad64769cSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
1197ad64769cSMarek Vasut 
1198b9452ea0SMarek Vasut 			rw_mgr_mem_calibrate_write_test_issue(
1199b9452ea0SMarek Vasut 				write_group *
12001fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_write_dqs + vg,
1201ad64769cSMarek Vasut 				use_dm);
1202ad64769cSMarek Vasut 
1203b9452ea0SMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
1204b9452ea0SMarek Vasut 			tmp_bit_chk <<= shift_ratio;
1205b9452ea0SMarek Vasut 			tmp_bit_chk |= (correct_mask_vg & ~(base_rw_mgr));
1206ad64769cSMarek Vasut 		}
1207b9452ea0SMarek Vasut 
1208ad64769cSMarek Vasut 		*bit_chk &= tmp_bit_chk;
1209ad64769cSMarek Vasut 	}
1210ad64769cSMarek Vasut 
1211ad64769cSMarek Vasut 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1212b9452ea0SMarek Vasut 	if (all_correct) {
1213ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
1214b9452ea0SMarek Vasut 			   "write_test(%u,%u,ALL) : %u == %u => %i\n",
1215b9452ea0SMarek Vasut 			   write_group, use_dm, *bit_chk,
1216b9452ea0SMarek Vasut 			   param->write_correct_mask,
1217b9452ea0SMarek Vasut 			   *bit_chk == param->write_correct_mask);
1218ad64769cSMarek Vasut 		return *bit_chk == param->write_correct_mask;
1219ad64769cSMarek Vasut 	} else {
1220ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
1221b9452ea0SMarek Vasut 			   "write_test(%u,%u,ONE) : %u != %i => %i\n",
1222b9452ea0SMarek Vasut 			   write_group, use_dm, *bit_chk, 0, *bit_chk != 0);
1223ad64769cSMarek Vasut 		return *bit_chk != 0x00;
1224ad64769cSMarek Vasut 	}
1225ad64769cSMarek Vasut }
1226ad64769cSMarek Vasut 
1227d844c7d4SMarek Vasut /**
1228d844c7d4SMarek Vasut  * rw_mgr_mem_calibrate_read_test_patterns() - Read back test patterns
1229d844c7d4SMarek Vasut  * @rank_bgn:	Rank number
1230d844c7d4SMarek Vasut  * @group:	Read/Write Group
1231d844c7d4SMarek Vasut  * @all_ranks:	Test all ranks
1232d844c7d4SMarek Vasut  *
1233d844c7d4SMarek Vasut  * Performs a guaranteed read on the patterns we are going to use during a
1234d844c7d4SMarek Vasut  * read test to ensure memory works.
12353da42859SDinh Nguyen  */
1236d844c7d4SMarek Vasut static int
rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn,const u32 group,const u32 all_ranks)1237d844c7d4SMarek Vasut rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn, const u32 group,
1238d844c7d4SMarek Vasut 					const u32 all_ranks)
12393da42859SDinh Nguyen {
1240d844c7d4SMarek Vasut 	const u32 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1241d844c7d4SMarek Vasut 			 RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1242d844c7d4SMarek Vasut 	const u32 addr_offset =
12431fa0c8c4SMarek Vasut 			 (group * rwcfg->mem_virtual_groups_per_read_dqs) << 2;
1244d844c7d4SMarek Vasut 	const u32 rank_end = all_ranks ?
12451fa0c8c4SMarek Vasut 				rwcfg->mem_number_of_ranks :
12463da42859SDinh Nguyen 				(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
12471fa0c8c4SMarek Vasut 	const u32 shift_ratio = rwcfg->mem_dq_per_read_dqs /
12481fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_read_dqs;
1249d844c7d4SMarek Vasut 	const u32 correct_mask_vg = param->read_correct_mask_vg;
12503da42859SDinh Nguyen 
1251d844c7d4SMarek Vasut 	u32 tmp_bit_chk, base_rw_mgr, bit_chk;
1252d844c7d4SMarek Vasut 	int vg, r;
1253d844c7d4SMarek Vasut 	int ret = 0;
1254d844c7d4SMarek Vasut 
1255d844c7d4SMarek Vasut 	bit_chk = param->read_correct_mask;
12563da42859SDinh Nguyen 
12573da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
1258d844c7d4SMarek Vasut 		/* Set rank */
12593da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
12603da42859SDinh Nguyen 
12613da42859SDinh Nguyen 		/* Load up a constant bursts of read commands */
12621273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
12631fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_read,
12641273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add0);
12653da42859SDinh Nguyen 
12661273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
12671fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_read_cont,
12681273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
12693da42859SDinh Nguyen 
12703da42859SDinh Nguyen 		tmp_bit_chk = 0;
12711fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_read_dqs - 1;
1272d844c7d4SMarek Vasut 		     vg >= 0; vg--) {
1273d844c7d4SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
12741273dd9eSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
12751273dd9eSMarek Vasut 			writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
12761273dd9eSMarek Vasut 				  RW_MGR_RESET_READ_DATAPATH_OFFSET);
12771fa0c8c4SMarek Vasut 			writel(rwcfg->guaranteed_read,
1278d844c7d4SMarek Vasut 			       addr + addr_offset + (vg << 2));
12793da42859SDinh Nguyen 
12801273dd9eSMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
1281d844c7d4SMarek Vasut 			tmp_bit_chk <<= shift_ratio;
1282d844c7d4SMarek Vasut 			tmp_bit_chk |= correct_mask_vg & ~base_rw_mgr;
12833da42859SDinh Nguyen 		}
12843da42859SDinh Nguyen 
1285d844c7d4SMarek Vasut 		bit_chk &= tmp_bit_chk;
1286d844c7d4SMarek Vasut 	}
1287d844c7d4SMarek Vasut 
12881fa0c8c4SMarek Vasut 	writel(rwcfg->clear_dqs_enable, addr + (group << 2));
12893da42859SDinh Nguyen 
12903da42859SDinh Nguyen 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1291d844c7d4SMarek Vasut 
1292d844c7d4SMarek Vasut 	if (bit_chk != param->read_correct_mask)
1293d844c7d4SMarek Vasut 		ret = -EIO;
1294d844c7d4SMarek Vasut 
1295ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
1296d844c7d4SMarek Vasut 		   "%s:%d test_load_patterns(%u,ALL) => (%u == %u) => %i\n",
1297d844c7d4SMarek Vasut 		   __func__, __LINE__, group, bit_chk,
1298d844c7d4SMarek Vasut 		   param->read_correct_mask, ret);
1299d844c7d4SMarek Vasut 
1300d844c7d4SMarek Vasut 	return ret;
13013da42859SDinh Nguyen }
13023da42859SDinh Nguyen 
1303b6cb7f9eSMarek Vasut /**
1304b6cb7f9eSMarek Vasut  * rw_mgr_mem_calibrate_read_load_patterns() - Load up the patterns for read test
1305b6cb7f9eSMarek Vasut  * @rank_bgn:	Rank number
1306b6cb7f9eSMarek Vasut  * @all_ranks:	Test all ranks
1307b6cb7f9eSMarek Vasut  *
1308b6cb7f9eSMarek Vasut  * Load up the patterns we are going to use during a read test.
1309b6cb7f9eSMarek Vasut  */
rw_mgr_mem_calibrate_read_load_patterns(const u32 rank_bgn,const int all_ranks)1310b6cb7f9eSMarek Vasut static void rw_mgr_mem_calibrate_read_load_patterns(const u32 rank_bgn,
1311b6cb7f9eSMarek Vasut 						    const int all_ranks)
13123da42859SDinh Nguyen {
1313b6cb7f9eSMarek Vasut 	const u32 rank_end = all_ranks ?
13141fa0c8c4SMarek Vasut 			rwcfg->mem_number_of_ranks :
13153da42859SDinh Nguyen 			(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1316b6cb7f9eSMarek Vasut 	u32 r;
13173da42859SDinh Nguyen 
13183da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
1319b6cb7f9eSMarek Vasut 
13203da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
13213da42859SDinh Nguyen 		/* set rank */
13223da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
13233da42859SDinh Nguyen 
13243da42859SDinh Nguyen 		/* Load up a constant bursts */
13251273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
13263da42859SDinh Nguyen 
13271fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait0,
13281273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add0);
13293da42859SDinh Nguyen 
13301273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
13313da42859SDinh Nguyen 
13321fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait1,
13331273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
13343da42859SDinh Nguyen 
13351273dd9eSMarek Vasut 		writel(0x04, &sdr_rw_load_mgr_regs->load_cntr2);
13363da42859SDinh Nguyen 
13371fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait2,
13381273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
13393da42859SDinh Nguyen 
13401273dd9eSMarek Vasut 		writel(0x04, &sdr_rw_load_mgr_regs->load_cntr3);
13413da42859SDinh Nguyen 
13421fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait3,
13431273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
13443da42859SDinh Nguyen 
13451fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write, SDR_PHYGRP_RWMGRGRP_ADDRESS |
13461273dd9eSMarek Vasut 						RW_MGR_RUN_SINGLE_GROUP_OFFSET);
13473da42859SDinh Nguyen 	}
13483da42859SDinh Nguyen 
13493da42859SDinh Nguyen 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
13503da42859SDinh Nguyen }
13513da42859SDinh Nguyen 
1352783fcf59SMarek Vasut /**
1353783fcf59SMarek Vasut  * rw_mgr_mem_calibrate_read_test() - Perform READ test on single rank
1354783fcf59SMarek Vasut  * @rank_bgn:		Rank number
1355783fcf59SMarek Vasut  * @group:		Read/Write group
1356783fcf59SMarek Vasut  * @num_tries:		Number of retries of the test
1357783fcf59SMarek Vasut  * @all_correct:	All bits must be correct in the mask
1358783fcf59SMarek Vasut  * @bit_chk:		Resulting bit mask after the test
1359783fcf59SMarek Vasut  * @all_groups:		Test all R/W groups
1360783fcf59SMarek Vasut  * @all_ranks:		Test all ranks
1361783fcf59SMarek Vasut  *
1362783fcf59SMarek Vasut  * Try a read and see if it returns correct data back. Test has dummy reads
1363783fcf59SMarek Vasut  * inserted into the mix used to align DQS enable. Test has more thorough
1364783fcf59SMarek Vasut  * checks than the regular read test.
13653da42859SDinh Nguyen  */
13663cb8bf3fSMarek Vasut static int
rw_mgr_mem_calibrate_read_test(const u32 rank_bgn,const u32 group,const u32 num_tries,const u32 all_correct,u32 * bit_chk,const u32 all_groups,const u32 all_ranks)13673cb8bf3fSMarek Vasut rw_mgr_mem_calibrate_read_test(const u32 rank_bgn, const u32 group,
13683cb8bf3fSMarek Vasut 			       const u32 num_tries, const u32 all_correct,
13693cb8bf3fSMarek Vasut 			       u32 *bit_chk,
13703cb8bf3fSMarek Vasut 			       const u32 all_groups, const u32 all_ranks)
13713da42859SDinh Nguyen {
13721fa0c8c4SMarek Vasut 	const u32 rank_end = all_ranks ? rwcfg->mem_number_of_ranks :
13733da42859SDinh Nguyen 		(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
13743cb8bf3fSMarek Vasut 	const u32 quick_read_mode =
13753cb8bf3fSMarek Vasut 		((STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS) &&
137696fd4362SMarek Vasut 		 misccfg->enable_super_quick_calibration);
13773cb8bf3fSMarek Vasut 	u32 correct_mask_vg = param->read_correct_mask_vg;
13783cb8bf3fSMarek Vasut 	u32 tmp_bit_chk;
13793cb8bf3fSMarek Vasut 	u32 base_rw_mgr;
13803cb8bf3fSMarek Vasut 	u32 addr;
13813cb8bf3fSMarek Vasut 
13823cb8bf3fSMarek Vasut 	int r, vg, ret;
13833da42859SDinh Nguyen 
13843da42859SDinh Nguyen 	*bit_chk = param->read_correct_mask;
13853da42859SDinh Nguyen 
13863da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
13873da42859SDinh Nguyen 		/* set rank */
13883da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
13893da42859SDinh Nguyen 
13901273dd9eSMarek Vasut 		writel(0x10, &sdr_rw_load_mgr_regs->load_cntr1);
13913da42859SDinh Nguyen 
13921fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b_wait1,
13931273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
13943da42859SDinh Nguyen 
13951273dd9eSMarek Vasut 		writel(0x10, &sdr_rw_load_mgr_regs->load_cntr2);
13961fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b_wait2,
13971273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
13983da42859SDinh Nguyen 
13993da42859SDinh Nguyen 		if (quick_read_mode)
14001273dd9eSMarek Vasut 			writel(0x1, &sdr_rw_load_mgr_regs->load_cntr0);
14013da42859SDinh Nguyen 			/* need at least two (1+1) reads to capture failures */
14023da42859SDinh Nguyen 		else if (all_groups)
14031273dd9eSMarek Vasut 			writel(0x06, &sdr_rw_load_mgr_regs->load_cntr0);
14043da42859SDinh Nguyen 		else
14051273dd9eSMarek Vasut 			writel(0x32, &sdr_rw_load_mgr_regs->load_cntr0);
14063da42859SDinh Nguyen 
14071fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b,
14081273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add0);
14093da42859SDinh Nguyen 		if (all_groups)
14101fa0c8c4SMarek Vasut 			writel(rwcfg->mem_if_read_dqs_width *
14111fa0c8c4SMarek Vasut 			       rwcfg->mem_virtual_groups_per_read_dqs - 1,
14121273dd9eSMarek Vasut 			       &sdr_rw_load_mgr_regs->load_cntr3);
14133da42859SDinh Nguyen 		else
14141273dd9eSMarek Vasut 			writel(0x0, &sdr_rw_load_mgr_regs->load_cntr3);
14153da42859SDinh Nguyen 
14161fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b,
14171273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
14183da42859SDinh Nguyen 
14193da42859SDinh Nguyen 		tmp_bit_chk = 0;
14201fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_read_dqs - 1; vg >= 0;
14217ce23bb6SMarek Vasut 		     vg--) {
1422ba522c76SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
14231273dd9eSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
14241273dd9eSMarek Vasut 			writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
14251273dd9eSMarek Vasut 				  RW_MGR_RESET_READ_DATAPATH_OFFSET);
14263da42859SDinh Nguyen 
1427ba522c76SMarek Vasut 			if (all_groups) {
1428ba522c76SMarek Vasut 				addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1429ba522c76SMarek Vasut 				       RW_MGR_RUN_ALL_GROUPS_OFFSET;
1430ba522c76SMarek Vasut 			} else {
1431ba522c76SMarek Vasut 				addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1432ba522c76SMarek Vasut 				       RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1433ba522c76SMarek Vasut 			}
1434c4815f76SMarek Vasut 
14351fa0c8c4SMarek Vasut 			writel(rwcfg->read_b2b, addr +
1436139823ecSMarek Vasut 			       ((group *
1437139823ecSMarek Vasut 				 rwcfg->mem_virtual_groups_per_read_dqs +
14383da42859SDinh Nguyen 				 vg) << 2));
14393da42859SDinh Nguyen 
14401273dd9eSMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
14411fa0c8c4SMarek Vasut 			tmp_bit_chk <<= rwcfg->mem_dq_per_read_dqs /
14421fa0c8c4SMarek Vasut 					rwcfg->mem_virtual_groups_per_read_dqs;
1443ba522c76SMarek Vasut 			tmp_bit_chk |= correct_mask_vg & ~(base_rw_mgr);
14443da42859SDinh Nguyen 		}
14457ce23bb6SMarek Vasut 
14463da42859SDinh Nguyen 		*bit_chk &= tmp_bit_chk;
14473da42859SDinh Nguyen 	}
14483da42859SDinh Nguyen 
1449c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
14501fa0c8c4SMarek Vasut 	writel(rwcfg->clear_dqs_enable, addr + (group << 2));
14513da42859SDinh Nguyen 
14523853d65eSMarek Vasut 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
14533853d65eSMarek Vasut 
14543da42859SDinh Nguyen 	if (all_correct) {
14553853d65eSMarek Vasut 		ret = (*bit_chk == param->read_correct_mask);
1456ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
14573853d65eSMarek Vasut 			   "%s:%d read_test(%u,ALL,%u) => (%u == %u) => %i\n",
14583853d65eSMarek Vasut 			   __func__, __LINE__, group, all_groups, *bit_chk,
14593853d65eSMarek Vasut 			   param->read_correct_mask, ret);
14603da42859SDinh Nguyen 	} else	{
14613853d65eSMarek Vasut 		ret = (*bit_chk != 0x00);
1462ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
14633853d65eSMarek Vasut 			   "%s:%d read_test(%u,ONE,%u) => (%u != %u) => %i\n",
14643853d65eSMarek Vasut 			   __func__, __LINE__, group, all_groups, *bit_chk,
14653853d65eSMarek Vasut 			   0, ret);
14663da42859SDinh Nguyen 	}
14673853d65eSMarek Vasut 
14683853d65eSMarek Vasut 	return ret;
14693da42859SDinh Nguyen }
14703da42859SDinh Nguyen 
147196df6036SMarek Vasut /**
147296df6036SMarek Vasut  * rw_mgr_mem_calibrate_read_test_all_ranks() - Perform READ test on all ranks
147396df6036SMarek Vasut  * @grp:		Read/Write group
147496df6036SMarek Vasut  * @num_tries:		Number of retries of the test
147596df6036SMarek Vasut  * @all_correct:	All bits must be correct in the mask
147696df6036SMarek Vasut  * @all_groups:		Test all R/W groups
147796df6036SMarek Vasut  *
147896df6036SMarek Vasut  * Perform a READ test across all memory ranks.
147996df6036SMarek Vasut  */
148096df6036SMarek Vasut static int
rw_mgr_mem_calibrate_read_test_all_ranks(const u32 grp,const u32 num_tries,const u32 all_correct,const u32 all_groups)148196df6036SMarek Vasut rw_mgr_mem_calibrate_read_test_all_ranks(const u32 grp, const u32 num_tries,
148296df6036SMarek Vasut 					 const u32 all_correct,
148396df6036SMarek Vasut 					 const u32 all_groups)
14843da42859SDinh Nguyen {
148596df6036SMarek Vasut 	u32 bit_chk;
148696df6036SMarek Vasut 	return rw_mgr_mem_calibrate_read_test(0, grp, num_tries, all_correct,
148796df6036SMarek Vasut 					      &bit_chk, all_groups, 1);
14883da42859SDinh Nguyen }
14893da42859SDinh Nguyen 
149060bb8a8aSMarek Vasut /**
149160bb8a8aSMarek Vasut  * rw_mgr_incr_vfifo() - Increase VFIFO value
149260bb8a8aSMarek Vasut  * @grp:	Read/Write group
149360bb8a8aSMarek Vasut  *
149460bb8a8aSMarek Vasut  * Increase VFIFO value.
149560bb8a8aSMarek Vasut  */
rw_mgr_incr_vfifo(const u32 grp)14968c887b6eSMarek Vasut static void rw_mgr_incr_vfifo(const u32 grp)
14973da42859SDinh Nguyen {
14981273dd9eSMarek Vasut 	writel(grp, &phy_mgr_cmd->inc_vfifo_hard_phy);
14993da42859SDinh Nguyen }
15003da42859SDinh Nguyen 
150160bb8a8aSMarek Vasut /**
150260bb8a8aSMarek Vasut  * rw_mgr_decr_vfifo() - Decrease VFIFO value
150360bb8a8aSMarek Vasut  * @grp:	Read/Write group
150460bb8a8aSMarek Vasut  *
150560bb8a8aSMarek Vasut  * Decrease VFIFO value.
150660bb8a8aSMarek Vasut  */
rw_mgr_decr_vfifo(const u32 grp)15078c887b6eSMarek Vasut static void rw_mgr_decr_vfifo(const u32 grp)
15083da42859SDinh Nguyen {
150960bb8a8aSMarek Vasut 	u32 i;
15103da42859SDinh Nguyen 
151196fd4362SMarek Vasut 	for (i = 0; i < misccfg->read_valid_fifo_size - 1; i++)
15128c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
15133da42859SDinh Nguyen }
15143da42859SDinh Nguyen 
1515d145ca9fSMarek Vasut /**
1516d145ca9fSMarek Vasut  * find_vfifo_failing_read() - Push VFIFO to get a failing read
1517d145ca9fSMarek Vasut  * @grp:	Read/Write group
1518d145ca9fSMarek Vasut  *
1519d145ca9fSMarek Vasut  * Push VFIFO until a failing read happens.
1520d145ca9fSMarek Vasut  */
find_vfifo_failing_read(const u32 grp)1521d145ca9fSMarek Vasut static int find_vfifo_failing_read(const u32 grp)
15223da42859SDinh Nguyen {
152396df6036SMarek Vasut 	u32 v, ret, fail_cnt = 0;
15243da42859SDinh Nguyen 
152596fd4362SMarek Vasut 	for (v = 0; v < misccfg->read_valid_fifo_size; v++) {
1526ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d: vfifo %u\n",
15273da42859SDinh Nguyen 			   __func__, __LINE__, v);
1528d145ca9fSMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
152996df6036SMarek Vasut 						PASS_ONE_BIT, 0);
1530d145ca9fSMarek Vasut 		if (!ret) {
15313da42859SDinh Nguyen 			fail_cnt++;
15323da42859SDinh Nguyen 
15333da42859SDinh Nguyen 			if (fail_cnt == 2)
1534d145ca9fSMarek Vasut 				return v;
15353da42859SDinh Nguyen 		}
15363da42859SDinh Nguyen 
1537d145ca9fSMarek Vasut 		/* Fiddle with FIFO. */
15388c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
15393da42859SDinh Nguyen 	}
15403da42859SDinh Nguyen 
1541d145ca9fSMarek Vasut 	/* No failing read found! Something must have gone wrong. */
1542ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d: vfifo failed\n", __func__, __LINE__);
15433da42859SDinh Nguyen 	return 0;
15443da42859SDinh Nguyen }
15453da42859SDinh Nguyen 
1546192d6f9fSMarek Vasut /**
154752e8f217SMarek Vasut  * sdr_find_phase_delay() - Find DQS enable phase or delay
154852e8f217SMarek Vasut  * @working:	If 1, look for working phase/delay, if 0, look for non-working
154952e8f217SMarek Vasut  * @delay:	If 1, look for delay, if 0, look for phase
155052e8f217SMarek Vasut  * @grp:	Read/Write group
155152e8f217SMarek Vasut  * @work:	Working window position
155252e8f217SMarek Vasut  * @work_inc:	Working window increment
155352e8f217SMarek Vasut  * @pd:		DQS Phase/Delay Iterator
155452e8f217SMarek Vasut  *
155552e8f217SMarek Vasut  * Find working or non-working DQS enable phase setting.
155652e8f217SMarek Vasut  */
sdr_find_phase_delay(int working,int delay,const u32 grp,u32 * work,const u32 work_inc,u32 * pd)155752e8f217SMarek Vasut static int sdr_find_phase_delay(int working, int delay, const u32 grp,
155852e8f217SMarek Vasut 				u32 *work, const u32 work_inc, u32 *pd)
155952e8f217SMarek Vasut {
1560139823ecSMarek Vasut 	const u32 max = delay ? iocfg->dqs_en_delay_max :
1561139823ecSMarek Vasut 				iocfg->dqs_en_phase_max;
156296df6036SMarek Vasut 	u32 ret;
156352e8f217SMarek Vasut 
156452e8f217SMarek Vasut 	for (; *pd <= max; (*pd)++) {
156552e8f217SMarek Vasut 		if (delay)
156652e8f217SMarek Vasut 			scc_mgr_set_dqs_en_delay_all_ranks(grp, *pd);
156752e8f217SMarek Vasut 		else
156852e8f217SMarek Vasut 			scc_mgr_set_dqs_en_phase_all_ranks(grp, *pd);
156952e8f217SMarek Vasut 
157052e8f217SMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
157196df6036SMarek Vasut 					PASS_ONE_BIT, 0);
157252e8f217SMarek Vasut 		if (!working)
157352e8f217SMarek Vasut 			ret = !ret;
157452e8f217SMarek Vasut 
157552e8f217SMarek Vasut 		if (ret)
157652e8f217SMarek Vasut 			return 0;
157752e8f217SMarek Vasut 
157852e8f217SMarek Vasut 		if (work)
157952e8f217SMarek Vasut 			*work += work_inc;
158052e8f217SMarek Vasut 	}
158152e8f217SMarek Vasut 
158252e8f217SMarek Vasut 	return -EINVAL;
158352e8f217SMarek Vasut }
158452e8f217SMarek Vasut /**
1585192d6f9fSMarek Vasut  * sdr_find_phase() - Find DQS enable phase
1586192d6f9fSMarek Vasut  * @working:	If 1, look for working phase, if 0, look for non-working phase
1587192d6f9fSMarek Vasut  * @grp:	Read/Write group
1588192d6f9fSMarek Vasut  * @work:	Working window position
1589192d6f9fSMarek Vasut  * @i:		Iterator
1590192d6f9fSMarek Vasut  * @p:		DQS Phase Iterator
1591192d6f9fSMarek Vasut  *
1592192d6f9fSMarek Vasut  * Find working or non-working DQS enable phase setting.
1593192d6f9fSMarek Vasut  */
sdr_find_phase(int working,const u32 grp,u32 * work,u32 * i,u32 * p)15948c887b6eSMarek Vasut static int sdr_find_phase(int working, const u32 grp, u32 *work,
159586a39dc7SMarek Vasut 			  u32 *i, u32 *p)
1596192d6f9fSMarek Vasut {
159796fd4362SMarek Vasut 	const u32 end = misccfg->read_valid_fifo_size + (working ? 0 : 1);
159852e8f217SMarek Vasut 	int ret;
1599192d6f9fSMarek Vasut 
1600192d6f9fSMarek Vasut 	for (; *i < end; (*i)++) {
1601192d6f9fSMarek Vasut 		if (working)
1602192d6f9fSMarek Vasut 			*p = 0;
1603192d6f9fSMarek Vasut 
160452e8f217SMarek Vasut 		ret = sdr_find_phase_delay(working, 0, grp, work,
1605160695d8SMarek Vasut 					   iocfg->delay_per_opa_tap, p);
160652e8f217SMarek Vasut 		if (!ret)
1607192d6f9fSMarek Vasut 			return 0;
1608192d6f9fSMarek Vasut 
1609160695d8SMarek Vasut 		if (*p > iocfg->dqs_en_phase_max) {
1610192d6f9fSMarek Vasut 			/* Fiddle with FIFO. */
16118c887b6eSMarek Vasut 			rw_mgr_incr_vfifo(grp);
1612192d6f9fSMarek Vasut 			if (!working)
1613192d6f9fSMarek Vasut 				*p = 0;
1614192d6f9fSMarek Vasut 		}
1615192d6f9fSMarek Vasut 	}
1616192d6f9fSMarek Vasut 
1617192d6f9fSMarek Vasut 	return -EINVAL;
1618192d6f9fSMarek Vasut }
1619192d6f9fSMarek Vasut 
16204c5e584bSMarek Vasut /**
16214c5e584bSMarek Vasut  * sdr_working_phase() - Find working DQS enable phase
16224c5e584bSMarek Vasut  * @grp:	Read/Write group
16234c5e584bSMarek Vasut  * @work_bgn:	Working window start position
16244c5e584bSMarek Vasut  * @d:		dtaps output value
16254c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
16264c5e584bSMarek Vasut  * @i:		Iterator
16274c5e584bSMarek Vasut  *
16284c5e584bSMarek Vasut  * Find working DQS enable phase setting.
16294c5e584bSMarek Vasut  */
sdr_working_phase(const u32 grp,u32 * work_bgn,u32 * d,u32 * p,u32 * i)16308c887b6eSMarek Vasut static int sdr_working_phase(const u32 grp, u32 *work_bgn, u32 *d,
16314c5e584bSMarek Vasut 			     u32 *p, u32 *i)
16323da42859SDinh Nguyen {
1633160695d8SMarek Vasut 	const u32 dtaps_per_ptap = iocfg->delay_per_opa_tap /
1634160695d8SMarek Vasut 				   iocfg->delay_per_dqs_en_dchain_tap;
1635192d6f9fSMarek Vasut 	int ret;
16363da42859SDinh Nguyen 
1637192d6f9fSMarek Vasut 	*work_bgn = 0;
1638192d6f9fSMarek Vasut 
1639192d6f9fSMarek Vasut 	for (*d = 0; *d <= dtaps_per_ptap; (*d)++) {
1640192d6f9fSMarek Vasut 		*i = 0;
1641521fe39cSMarek Vasut 		scc_mgr_set_dqs_en_delay_all_ranks(grp, *d);
16428c887b6eSMarek Vasut 		ret = sdr_find_phase(1, grp, work_bgn, i, p);
1643192d6f9fSMarek Vasut 		if (!ret)
1644192d6f9fSMarek Vasut 			return 0;
1645160695d8SMarek Vasut 		*work_bgn += iocfg->delay_per_dqs_en_dchain_tap;
16463da42859SDinh Nguyen 	}
16473da42859SDinh Nguyen 
164838ed6922SMarek Vasut 	/* Cannot find working solution */
1649ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d find_dqs_en_phase: no vfifo/ptap/dtap\n",
1650192d6f9fSMarek Vasut 		   __func__, __LINE__);
1651192d6f9fSMarek Vasut 	return -EINVAL;
16523da42859SDinh Nguyen }
16533da42859SDinh Nguyen 
16544c5e584bSMarek Vasut /**
16554c5e584bSMarek Vasut  * sdr_backup_phase() - Find DQS enable backup phase
16564c5e584bSMarek Vasut  * @grp:	Read/Write group
16574c5e584bSMarek Vasut  * @work_bgn:	Working window start position
16584c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
16594c5e584bSMarek Vasut  *
16604c5e584bSMarek Vasut  * Find DQS enable backup phase setting.
16614c5e584bSMarek Vasut  */
sdr_backup_phase(const u32 grp,u32 * work_bgn,u32 * p)16628c887b6eSMarek Vasut static void sdr_backup_phase(const u32 grp, u32 *work_bgn, u32 *p)
16633da42859SDinh Nguyen {
166496df6036SMarek Vasut 	u32 tmp_delay, d;
16654c5e584bSMarek Vasut 	int ret;
16663da42859SDinh Nguyen 
16673da42859SDinh Nguyen 	/* Special case code for backing up a phase */
16683da42859SDinh Nguyen 	if (*p == 0) {
1669160695d8SMarek Vasut 		*p = iocfg->dqs_en_phase_max;
16708c887b6eSMarek Vasut 		rw_mgr_decr_vfifo(grp);
16713da42859SDinh Nguyen 	} else {
16723da42859SDinh Nguyen 		(*p)--;
16733da42859SDinh Nguyen 	}
1674160695d8SMarek Vasut 	tmp_delay = *work_bgn - iocfg->delay_per_opa_tap;
1675521fe39cSMarek Vasut 	scc_mgr_set_dqs_en_phase_all_ranks(grp, *p);
16763da42859SDinh Nguyen 
1677139823ecSMarek Vasut 	for (d = 0; d <= iocfg->dqs_en_delay_max && tmp_delay < *work_bgn;
1678139823ecSMarek Vasut 	     d++) {
167949891df6SMarek Vasut 		scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
16803da42859SDinh Nguyen 
16814c5e584bSMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
168296df6036SMarek Vasut 					PASS_ONE_BIT, 0);
16834c5e584bSMarek Vasut 		if (ret) {
16843da42859SDinh Nguyen 			*work_bgn = tmp_delay;
16853da42859SDinh Nguyen 			break;
16863da42859SDinh Nguyen 		}
168749891df6SMarek Vasut 
1688160695d8SMarek Vasut 		tmp_delay += iocfg->delay_per_dqs_en_dchain_tap;
16893da42859SDinh Nguyen 	}
16903da42859SDinh Nguyen 
16914c5e584bSMarek Vasut 	/* Restore VFIFO to old state before we decremented it (if needed). */
16923da42859SDinh Nguyen 	(*p)++;
1693160695d8SMarek Vasut 	if (*p > iocfg->dqs_en_phase_max) {
16943da42859SDinh Nguyen 		*p = 0;
16958c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
16963da42859SDinh Nguyen 	}
16973da42859SDinh Nguyen 
1698521fe39cSMarek Vasut 	scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
16993da42859SDinh Nguyen }
17003da42859SDinh Nguyen 
17014c5e584bSMarek Vasut /**
17024c5e584bSMarek Vasut  * sdr_nonworking_phase() - Find non-working DQS enable phase
17034c5e584bSMarek Vasut  * @grp:	Read/Write group
17044c5e584bSMarek Vasut  * @work_end:	Working window end position
17054c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
17064c5e584bSMarek Vasut  * @i:		Iterator
17074c5e584bSMarek Vasut  *
17084c5e584bSMarek Vasut  * Find non-working DQS enable phase setting.
17094c5e584bSMarek Vasut  */
sdr_nonworking_phase(const u32 grp,u32 * work_end,u32 * p,u32 * i)17108c887b6eSMarek Vasut static int sdr_nonworking_phase(const u32 grp, u32 *work_end, u32 *p, u32 *i)
17113da42859SDinh Nguyen {
1712192d6f9fSMarek Vasut 	int ret;
17133da42859SDinh Nguyen 
17143da42859SDinh Nguyen 	(*p)++;
1715160695d8SMarek Vasut 	*work_end += iocfg->delay_per_opa_tap;
1716160695d8SMarek Vasut 	if (*p > iocfg->dqs_en_phase_max) {
1717192d6f9fSMarek Vasut 		/* Fiddle with FIFO. */
17183da42859SDinh Nguyen 		*p = 0;
17198c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
17203da42859SDinh Nguyen 	}
17213da42859SDinh Nguyen 
17228c887b6eSMarek Vasut 	ret = sdr_find_phase(0, grp, work_end, i, p);
1723192d6f9fSMarek Vasut 	if (ret) {
172438ed6922SMarek Vasut 		/* Cannot see edge of failing read. */
1725ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d: end: failed\n",
1726192d6f9fSMarek Vasut 			   __func__, __LINE__);
1727192d6f9fSMarek Vasut 	}
1728192d6f9fSMarek Vasut 
1729192d6f9fSMarek Vasut 	return ret;
17303da42859SDinh Nguyen }
17313da42859SDinh Nguyen 
17320a13a0fbSMarek Vasut /**
17330a13a0fbSMarek Vasut  * sdr_find_window_center() - Find center of the working DQS window.
17340a13a0fbSMarek Vasut  * @grp:	Read/Write group
17350a13a0fbSMarek Vasut  * @work_bgn:	First working settings
17360a13a0fbSMarek Vasut  * @work_end:	Last working settings
17370a13a0fbSMarek Vasut  *
17380a13a0fbSMarek Vasut  * Find center of the working DQS enable window.
17390a13a0fbSMarek Vasut  */
sdr_find_window_center(const u32 grp,const u32 work_bgn,const u32 work_end)17400a13a0fbSMarek Vasut static int sdr_find_window_center(const u32 grp, const u32 work_bgn,
17418c887b6eSMarek Vasut 				  const u32 work_end)
17423da42859SDinh Nguyen {
174396df6036SMarek Vasut 	u32 work_mid;
17443da42859SDinh Nguyen 	int tmp_delay = 0;
174528fd242aSMarek Vasut 	int i, p, d;
17463da42859SDinh Nguyen 
174728fd242aSMarek Vasut 	work_mid = (work_bgn + work_end) / 2;
17483da42859SDinh Nguyen 
1749ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "work_bgn=%d work_end=%d work_mid=%d\n",
175028fd242aSMarek Vasut 		   work_bgn, work_end, work_mid);
17513da42859SDinh Nguyen 	/* Get the middle delay to be less than a VFIFO delay */
1752160695d8SMarek Vasut 	tmp_delay = (iocfg->dqs_en_phase_max + 1) * iocfg->delay_per_opa_tap;
175328fd242aSMarek Vasut 
1754ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "vfifo ptap delay %d\n", tmp_delay);
1755cbb0b7e0SMarek Vasut 	work_mid %= tmp_delay;
1756ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "new work_mid %d\n", work_mid);
17573da42859SDinh Nguyen 
1758160695d8SMarek Vasut 	tmp_delay = rounddown(work_mid, iocfg->delay_per_opa_tap);
1759160695d8SMarek Vasut 	if (tmp_delay > iocfg->dqs_en_phase_max * iocfg->delay_per_opa_tap)
1760160695d8SMarek Vasut 		tmp_delay = iocfg->dqs_en_phase_max * iocfg->delay_per_opa_tap;
1761160695d8SMarek Vasut 	p = tmp_delay / iocfg->delay_per_opa_tap;
17623da42859SDinh Nguyen 
1763ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "new p %d, tmp_delay=%d\n", p, tmp_delay);
1764cbb0b7e0SMarek Vasut 
1765139823ecSMarek Vasut 	d = DIV_ROUND_UP(work_mid - tmp_delay,
1766139823ecSMarek Vasut 			 iocfg->delay_per_dqs_en_dchain_tap);
1767160695d8SMarek Vasut 	if (d > iocfg->dqs_en_delay_max)
1768160695d8SMarek Vasut 		d = iocfg->dqs_en_delay_max;
1769160695d8SMarek Vasut 	tmp_delay += d * iocfg->delay_per_dqs_en_dchain_tap;
1770cbb0b7e0SMarek Vasut 
1771ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "new d %d, tmp_delay=%d\n", d, tmp_delay);
177228fd242aSMarek Vasut 
1773cbb0b7e0SMarek Vasut 	scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
177428fd242aSMarek Vasut 	scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
17753da42859SDinh Nguyen 
17763da42859SDinh Nguyen 	/*
17773da42859SDinh Nguyen 	 * push vfifo until we can successfully calibrate. We can do this
17783da42859SDinh Nguyen 	 * because the largest possible margin in 1 VFIFO cycle.
17793da42859SDinh Nguyen 	 */
178096fd4362SMarek Vasut 	for (i = 0; i < misccfg->read_valid_fifo_size; i++) {
1781ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "find_dqs_en_phase: center\n");
178228fd242aSMarek Vasut 		if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
17833da42859SDinh Nguyen 							     PASS_ONE_BIT,
178496df6036SMarek Vasut 							     0)) {
1785ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 2,
17868c887b6eSMarek Vasut 				   "%s:%d center: found: ptap=%u dtap=%u\n",
17878c887b6eSMarek Vasut 				   __func__, __LINE__, p, d);
17880a13a0fbSMarek Vasut 			return 0;
17893da42859SDinh Nguyen 		}
17900a13a0fbSMarek Vasut 
17910a13a0fbSMarek Vasut 		/* Fiddle with FIFO. */
17928c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
17930a13a0fbSMarek Vasut 	}
17940a13a0fbSMarek Vasut 
1795ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d center: failed.\n",
17960a13a0fbSMarek Vasut 		   __func__, __LINE__);
17970a13a0fbSMarek Vasut 	return -EINVAL;
17983da42859SDinh Nguyen }
17993da42859SDinh Nguyen 
180033756893SMarek Vasut /**
180133756893SMarek Vasut  * rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase() - Find a good DQS enable to use
180233756893SMarek Vasut  * @grp:	Read/Write Group
180333756893SMarek Vasut  *
180433756893SMarek Vasut  * Find a good DQS enable to use.
180533756893SMarek Vasut  */
rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(const u32 grp)1806914546e7SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(const u32 grp)
18073da42859SDinh Nguyen {
18085735540fSMarek Vasut 	u32 d, p, i;
18095735540fSMarek Vasut 	u32 dtaps_per_ptap;
18105735540fSMarek Vasut 	u32 work_bgn, work_end;
181135e47b71SMarek Vasut 	u32 found_passing_read, found_failing_read = 0, initial_failing_dtap;
18125735540fSMarek Vasut 	int ret;
18133da42859SDinh Nguyen 
18143da42859SDinh Nguyen 	debug("%s:%d %u\n", __func__, __LINE__, grp);
18153da42859SDinh Nguyen 
18163da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
18173da42859SDinh Nguyen 
18183da42859SDinh Nguyen 	scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
18193da42859SDinh Nguyen 	scc_mgr_set_dqs_en_phase_all_ranks(grp, 0);
18203da42859SDinh Nguyen 
18212f3589caSMarek Vasut 	/* Step 0: Determine number of delay taps for each phase tap. */
1822139823ecSMarek Vasut 	dtaps_per_ptap = iocfg->delay_per_opa_tap /
1823139823ecSMarek Vasut 			 iocfg->delay_per_dqs_en_dchain_tap;
18243da42859SDinh Nguyen 
18252f3589caSMarek Vasut 	/* Step 1: First push vfifo until we get a failing read. */
1826d145ca9fSMarek Vasut 	find_vfifo_failing_read(grp);
18273da42859SDinh Nguyen 
18282f3589caSMarek Vasut 	/* Step 2: Find first working phase, increment in ptaps. */
18293da42859SDinh Nguyen 	work_bgn = 0;
1830914546e7SMarek Vasut 	ret = sdr_working_phase(grp, &work_bgn, &d, &p, &i);
1831914546e7SMarek Vasut 	if (ret)
1832914546e7SMarek Vasut 		return ret;
18333da42859SDinh Nguyen 
18343da42859SDinh Nguyen 	work_end = work_bgn;
18353da42859SDinh Nguyen 
18363da42859SDinh Nguyen 	/*
18372f3589caSMarek Vasut 	 * If d is 0 then the working window covers a phase tap and we can
18382f3589caSMarek Vasut 	 * follow the old procedure. Otherwise, we've found the beginning
18393da42859SDinh Nguyen 	 * and we need to increment the dtaps until we find the end.
18403da42859SDinh Nguyen 	 */
18413da42859SDinh Nguyen 	if (d == 0) {
18422f3589caSMarek Vasut 		/*
18432f3589caSMarek Vasut 		 * Step 3a: If we have room, back off by one and
18442f3589caSMarek Vasut 		 *          increment in dtaps.
18452f3589caSMarek Vasut 		 */
18468c887b6eSMarek Vasut 		sdr_backup_phase(grp, &work_bgn, &p);
18473da42859SDinh Nguyen 
18482f3589caSMarek Vasut 		/*
18492f3589caSMarek Vasut 		 * Step 4a: go forward from working phase to non working
18502f3589caSMarek Vasut 		 * phase, increment in ptaps.
18512f3589caSMarek Vasut 		 */
1852914546e7SMarek Vasut 		ret = sdr_nonworking_phase(grp, &work_end, &p, &i);
1853914546e7SMarek Vasut 		if (ret)
1854914546e7SMarek Vasut 			return ret;
18553da42859SDinh Nguyen 
18562f3589caSMarek Vasut 		/* Step 5a: Back off one from last, increment in dtaps. */
18573da42859SDinh Nguyen 
18583da42859SDinh Nguyen 		/* Special case code for backing up a phase */
18593da42859SDinh Nguyen 		if (p == 0) {
1860160695d8SMarek Vasut 			p = iocfg->dqs_en_phase_max;
18618c887b6eSMarek Vasut 			rw_mgr_decr_vfifo(grp);
18623da42859SDinh Nguyen 		} else {
18633da42859SDinh Nguyen 			p = p - 1;
18643da42859SDinh Nguyen 		}
18653da42859SDinh Nguyen 
1866160695d8SMarek Vasut 		work_end -= iocfg->delay_per_opa_tap;
18673da42859SDinh Nguyen 		scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
18683da42859SDinh Nguyen 
18693da42859SDinh Nguyen 		d = 0;
18703da42859SDinh Nguyen 
1871ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d p: ptap=%u\n",
18722f3589caSMarek Vasut 			   __func__, __LINE__, p);
18733da42859SDinh Nguyen 	}
18743da42859SDinh Nguyen 
18752f3589caSMarek Vasut 	/* The dtap increment to find the failing edge is done here. */
187652e8f217SMarek Vasut 	sdr_find_phase_delay(0, 1, grp, &work_end,
1877160695d8SMarek Vasut 			     iocfg->delay_per_dqs_en_dchain_tap, &d);
18783da42859SDinh Nguyen 
18793da42859SDinh Nguyen 	/* Go back to working dtap */
18803da42859SDinh Nguyen 	if (d != 0)
1881160695d8SMarek Vasut 		work_end -= iocfg->delay_per_dqs_en_dchain_tap;
18823da42859SDinh Nguyen 
1883ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2,
18842f3589caSMarek Vasut 		   "%s:%d p/d: ptap=%u dtap=%u end=%u\n",
18852f3589caSMarek Vasut 		   __func__, __LINE__, p, d - 1, work_end);
18863da42859SDinh Nguyen 
18873da42859SDinh Nguyen 	if (work_end < work_bgn) {
18883da42859SDinh Nguyen 		/* nil range */
1889ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d end-2: failed\n",
18902f3589caSMarek Vasut 			   __func__, __LINE__);
1891914546e7SMarek Vasut 		return -EINVAL;
18923da42859SDinh Nguyen 	}
18933da42859SDinh Nguyen 
1894ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d found range [%u,%u]\n",
18953da42859SDinh Nguyen 		   __func__, __LINE__, work_bgn, work_end);
18963da42859SDinh Nguyen 
18973da42859SDinh Nguyen 	/*
18982f3589caSMarek Vasut 	 * We need to calculate the number of dtaps that equal a ptap.
18992f3589caSMarek Vasut 	 * To do that we'll back up a ptap and re-find the edge of the
19002f3589caSMarek Vasut 	 * window using dtaps
19013da42859SDinh Nguyen 	 */
1902ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d calculate dtaps_per_ptap for tracking\n",
19032f3589caSMarek Vasut 		   __func__, __LINE__);
19043da42859SDinh Nguyen 
19053da42859SDinh Nguyen 	/* Special case code for backing up a phase */
19063da42859SDinh Nguyen 	if (p == 0) {
1907160695d8SMarek Vasut 		p = iocfg->dqs_en_phase_max;
19088c887b6eSMarek Vasut 		rw_mgr_decr_vfifo(grp);
1909ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d backedup cycle/phase: p=%u\n",
19102f3589caSMarek Vasut 			   __func__, __LINE__, p);
19113da42859SDinh Nguyen 	} else {
19123da42859SDinh Nguyen 		p = p - 1;
1913ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d backedup phase only: p=%u",
19142f3589caSMarek Vasut 			   __func__, __LINE__, p);
19153da42859SDinh Nguyen 	}
19163da42859SDinh Nguyen 
19173da42859SDinh Nguyen 	scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
19183da42859SDinh Nguyen 
19193da42859SDinh Nguyen 	/*
19203da42859SDinh Nguyen 	 * Increase dtap until we first see a passing read (in case the
19212f3589caSMarek Vasut 	 * window is smaller than a ptap), and then a failing read to
19222f3589caSMarek Vasut 	 * mark the edge of the window again.
19233da42859SDinh Nguyen 	 */
19243da42859SDinh Nguyen 
19252f3589caSMarek Vasut 	/* Find a passing read. */
1926ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d find passing read\n",
19273da42859SDinh Nguyen 		   __func__, __LINE__);
192852e8f217SMarek Vasut 
19293da42859SDinh Nguyen 	initial_failing_dtap = d;
19303da42859SDinh Nguyen 
193152e8f217SMarek Vasut 	found_passing_read = !sdr_find_phase_delay(1, 1, grp, NULL, 0, &d);
19323da42859SDinh Nguyen 	if (found_passing_read) {
19332f3589caSMarek Vasut 		/* Find a failing read. */
1934ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d find failing read\n",
19352f3589caSMarek Vasut 			   __func__, __LINE__);
193652e8f217SMarek Vasut 		d++;
193752e8f217SMarek Vasut 		found_failing_read = !sdr_find_phase_delay(0, 1, grp, NULL, 0,
193852e8f217SMarek Vasut 							   &d);
19393da42859SDinh Nguyen 	} else {
1940ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1,
19412f3589caSMarek Vasut 			   "%s:%d failed to calculate dtaps per ptap. Fall back on static value\n",
19422f3589caSMarek Vasut 			   __func__, __LINE__);
19433da42859SDinh Nguyen 	}
19443da42859SDinh Nguyen 
19453da42859SDinh Nguyen 	/*
19463da42859SDinh Nguyen 	 * The dynamically calculated dtaps_per_ptap is only valid if we
19473da42859SDinh Nguyen 	 * found a passing/failing read. If we didn't, it means d hit the max
1948160695d8SMarek Vasut 	 * (iocfg->dqs_en_delay_max). Otherwise, dtaps_per_ptap retains its
19493da42859SDinh Nguyen 	 * statically calculated value.
19503da42859SDinh Nguyen 	 */
19513da42859SDinh Nguyen 	if (found_passing_read && found_failing_read)
19523da42859SDinh Nguyen 		dtaps_per_ptap = d - initial_failing_dtap;
19533da42859SDinh Nguyen 
19541273dd9eSMarek Vasut 	writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap);
1955ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d dtaps_per_ptap=%u - %u = %u",
19562f3589caSMarek Vasut 		   __func__, __LINE__, d, initial_failing_dtap, dtaps_per_ptap);
19573da42859SDinh Nguyen 
19582f3589caSMarek Vasut 	/* Step 6: Find the centre of the window. */
1959914546e7SMarek Vasut 	ret = sdr_find_window_center(grp, work_bgn, work_end);
19603da42859SDinh Nguyen 
1961914546e7SMarek Vasut 	return ret;
19623da42859SDinh Nguyen }
19633da42859SDinh Nguyen 
1964c4907898SMarek Vasut /**
1965901dc36eSMarek Vasut  * search_stop_check() - Check if the detected edge is valid
1966901dc36eSMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
1967901dc36eSMarek Vasut  * @d:			DQS delay
1968901dc36eSMarek Vasut  * @rank_bgn:		Rank number
1969901dc36eSMarek Vasut  * @write_group:	Write Group
1970901dc36eSMarek Vasut  * @read_group:		Read Group
1971901dc36eSMarek Vasut  * @bit_chk:		Resulting bit mask after the test
1972901dc36eSMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
1973901dc36eSMarek Vasut  * @use_read_test:	Perform read test
1974901dc36eSMarek Vasut  *
1975901dc36eSMarek Vasut  * Test if the found edge is valid.
1976901dc36eSMarek Vasut  */
search_stop_check(const int write,const int d,const int rank_bgn,const u32 write_group,const u32 read_group,u32 * bit_chk,u32 * sticky_bit_chk,const u32 use_read_test)1977901dc36eSMarek Vasut static u32 search_stop_check(const int write, const int d, const int rank_bgn,
1978901dc36eSMarek Vasut 			     const u32 write_group, const u32 read_group,
1979901dc36eSMarek Vasut 			     u32 *bit_chk, u32 *sticky_bit_chk,
1980901dc36eSMarek Vasut 			     const u32 use_read_test)
1981901dc36eSMarek Vasut {
19821fa0c8c4SMarek Vasut 	const u32 ratio = rwcfg->mem_if_read_dqs_width /
19831fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
1984901dc36eSMarek Vasut 	const u32 correct_mask = write ? param->write_correct_mask :
1985901dc36eSMarek Vasut 					 param->read_correct_mask;
19861fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
19871fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
1988901dc36eSMarek Vasut 	u32 ret;
1989901dc36eSMarek Vasut 	/*
1990901dc36eSMarek Vasut 	 * Stop searching when the read test doesn't pass AND when
1991901dc36eSMarek Vasut 	 * we've seen a passing read on every bit.
1992901dc36eSMarek Vasut 	 */
1993901dc36eSMarek Vasut 	if (write) {			/* WRITE-ONLY */
1994901dc36eSMarek Vasut 		ret = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
1995901dc36eSMarek Vasut 							 0, PASS_ONE_BIT,
1996901dc36eSMarek Vasut 							 bit_chk, 0);
1997901dc36eSMarek Vasut 	} else if (use_read_test) {	/* READ-ONLY */
1998901dc36eSMarek Vasut 		ret = !rw_mgr_mem_calibrate_read_test(rank_bgn, read_group,
1999901dc36eSMarek Vasut 							NUM_READ_PB_TESTS,
2000901dc36eSMarek Vasut 							PASS_ONE_BIT, bit_chk,
2001901dc36eSMarek Vasut 							0, 0);
2002901dc36eSMarek Vasut 	} else {			/* READ-ONLY */
2003901dc36eSMarek Vasut 		rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 0,
2004901dc36eSMarek Vasut 						PASS_ONE_BIT, bit_chk, 0);
2005901dc36eSMarek Vasut 		*bit_chk = *bit_chk >> (per_dqs *
2006901dc36eSMarek Vasut 			(read_group - (write_group * ratio)));
2007901dc36eSMarek Vasut 		ret = (*bit_chk == 0);
2008901dc36eSMarek Vasut 	}
2009901dc36eSMarek Vasut 	*sticky_bit_chk = *sticky_bit_chk | *bit_chk;
2010901dc36eSMarek Vasut 	ret = ret && (*sticky_bit_chk == correct_mask);
2011ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2,
2012901dc36eSMarek Vasut 		   "%s:%d center(left): dtap=%u => %u == %u && %u",
2013901dc36eSMarek Vasut 		   __func__, __LINE__, d,
2014901dc36eSMarek Vasut 		   *sticky_bit_chk, correct_mask, ret);
2015901dc36eSMarek Vasut 	return ret;
2016901dc36eSMarek Vasut }
2017901dc36eSMarek Vasut 
2018901dc36eSMarek Vasut /**
201971120773SMarek Vasut  * search_left_edge() - Find left edge of DQ/DQS working phase
202071120773SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
202171120773SMarek Vasut  * @rank_bgn:		Rank number
202271120773SMarek Vasut  * @write_group:	Write Group
202371120773SMarek Vasut  * @read_group:		Read Group
202471120773SMarek Vasut  * @test_bgn:		Rank number to begin the test
202571120773SMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
202671120773SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
202771120773SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
202871120773SMarek Vasut  * @use_read_test:	Perform read test
202971120773SMarek Vasut  *
203071120773SMarek Vasut  * Find left edge of DQ/DQS working phase.
203171120773SMarek Vasut  */
search_left_edge(const int write,const int rank_bgn,const u32 write_group,const u32 read_group,const u32 test_bgn,u32 * sticky_bit_chk,int * left_edge,int * right_edge,const u32 use_read_test)203271120773SMarek Vasut static void search_left_edge(const int write, const int rank_bgn,
203371120773SMarek Vasut 	const u32 write_group, const u32 read_group, const u32 test_bgn,
20340c4be198SMarek Vasut 	u32 *sticky_bit_chk,
203571120773SMarek Vasut 	int *left_edge, int *right_edge, const u32 use_read_test)
203671120773SMarek Vasut {
2037139823ecSMarek Vasut 	const u32 delay_max = write ? iocfg->io_out1_delay_max :
2038139823ecSMarek Vasut 				      iocfg->io_in_delay_max;
2039139823ecSMarek Vasut 	const u32 dqs_max = write ? iocfg->io_out1_delay_max :
2040139823ecSMarek Vasut 				    iocfg->dqs_in_delay_max;
20411fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
20421fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
20430c4be198SMarek Vasut 	u32 stop, bit_chk;
204471120773SMarek Vasut 	int i, d;
204571120773SMarek Vasut 
204671120773SMarek Vasut 	for (d = 0; d <= dqs_max; d++) {
204771120773SMarek Vasut 		if (write)
204871120773SMarek Vasut 			scc_mgr_apply_group_dq_out1_delay(d);
204971120773SMarek Vasut 		else
205071120773SMarek Vasut 			scc_mgr_apply_group_dq_in_delay(test_bgn, d);
205171120773SMarek Vasut 
205271120773SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
205371120773SMarek Vasut 
2054901dc36eSMarek Vasut 		stop = search_stop_check(write, d, rank_bgn, write_group,
20550c4be198SMarek Vasut 					 read_group, &bit_chk, sticky_bit_chk,
2056901dc36eSMarek Vasut 					 use_read_test);
205771120773SMarek Vasut 		if (stop == 1)
205871120773SMarek Vasut 			break;
205971120773SMarek Vasut 
206071120773SMarek Vasut 		/* stop != 1 */
206171120773SMarek Vasut 		for (i = 0; i < per_dqs; i++) {
20620c4be198SMarek Vasut 			if (bit_chk & 1) {
206371120773SMarek Vasut 				/*
206471120773SMarek Vasut 				 * Remember a passing test as
206571120773SMarek Vasut 				 * the left_edge.
206671120773SMarek Vasut 				 */
206771120773SMarek Vasut 				left_edge[i] = d;
206871120773SMarek Vasut 			} else {
206971120773SMarek Vasut 				/*
207071120773SMarek Vasut 				 * If a left edge has not been seen
207171120773SMarek Vasut 				 * yet, then a future passing test
207271120773SMarek Vasut 				 * will mark this edge as the right
207371120773SMarek Vasut 				 * edge.
207471120773SMarek Vasut 				 */
207571120773SMarek Vasut 				if (left_edge[i] == delay_max + 1)
207671120773SMarek Vasut 					right_edge[i] = -(d + 1);
207771120773SMarek Vasut 			}
20780c4be198SMarek Vasut 			bit_chk >>= 1;
207971120773SMarek Vasut 		}
208071120773SMarek Vasut 	}
208171120773SMarek Vasut 
208271120773SMarek Vasut 	/* Reset DQ delay chains to 0 */
208371120773SMarek Vasut 	if (write)
208471120773SMarek Vasut 		scc_mgr_apply_group_dq_out1_delay(0);
208571120773SMarek Vasut 	else
208671120773SMarek Vasut 		scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
208771120773SMarek Vasut 
208871120773SMarek Vasut 	*sticky_bit_chk = 0;
208971120773SMarek Vasut 	for (i = per_dqs - 1; i >= 0; i--) {
2090ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
209171120773SMarek Vasut 			   "%s:%d vfifo_center: left_edge[%u]: %d right_edge[%u]: %d\n",
209271120773SMarek Vasut 			   __func__, __LINE__, i, left_edge[i],
209371120773SMarek Vasut 			   i, right_edge[i]);
209471120773SMarek Vasut 
209571120773SMarek Vasut 		/*
209671120773SMarek Vasut 		 * Check for cases where we haven't found the left edge,
209771120773SMarek Vasut 		 * which makes our assignment of the the right edge invalid.
209871120773SMarek Vasut 		 * Reset it to the illegal value.
209971120773SMarek Vasut 		 */
210071120773SMarek Vasut 		if ((left_edge[i] == delay_max + 1) &&
210171120773SMarek Vasut 		    (right_edge[i] != delay_max + 1)) {
210271120773SMarek Vasut 			right_edge[i] = delay_max + 1;
2103ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 2,
210471120773SMarek Vasut 				   "%s:%d vfifo_center: reset right_edge[%u]: %d\n",
210571120773SMarek Vasut 				   __func__, __LINE__, i, right_edge[i]);
210671120773SMarek Vasut 		}
210771120773SMarek Vasut 
210871120773SMarek Vasut 		/*
210971120773SMarek Vasut 		 * Reset sticky bit
211071120773SMarek Vasut 		 * READ: except for bits where we have seen both
211171120773SMarek Vasut 		 *       the left and right edge.
211271120773SMarek Vasut 		 * WRITE: except for bits where we have seen the
211371120773SMarek Vasut 		 *        left edge.
211471120773SMarek Vasut 		 */
211571120773SMarek Vasut 		*sticky_bit_chk <<= 1;
211671120773SMarek Vasut 		if (write) {
211771120773SMarek Vasut 			if (left_edge[i] != delay_max + 1)
211871120773SMarek Vasut 				*sticky_bit_chk |= 1;
211971120773SMarek Vasut 		} else {
212071120773SMarek Vasut 			if ((left_edge[i] != delay_max + 1) &&
212171120773SMarek Vasut 			    (right_edge[i] != delay_max + 1))
212271120773SMarek Vasut 				*sticky_bit_chk |= 1;
212371120773SMarek Vasut 		}
212471120773SMarek Vasut 	}
212571120773SMarek Vasut }
212671120773SMarek Vasut 
212771120773SMarek Vasut /**
2128c4907898SMarek Vasut  * search_right_edge() - Find right edge of DQ/DQS working phase
2129c4907898SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2130c4907898SMarek Vasut  * @rank_bgn:		Rank number
2131c4907898SMarek Vasut  * @write_group:	Write Group
2132c4907898SMarek Vasut  * @read_group:		Read Group
2133c4907898SMarek Vasut  * @start_dqs:		DQS start phase
2134c4907898SMarek Vasut  * @start_dqs_en:	DQS enable start phase
2135c4907898SMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
2136c4907898SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2137c4907898SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2138c4907898SMarek Vasut  * @use_read_test:	Perform read test
2139c4907898SMarek Vasut  *
2140c4907898SMarek Vasut  * Find right edge of DQ/DQS working phase.
2141c4907898SMarek Vasut  */
search_right_edge(const int write,const int rank_bgn,const u32 write_group,const u32 read_group,const int start_dqs,const int start_dqs_en,u32 * sticky_bit_chk,int * left_edge,int * right_edge,const u32 use_read_test)2142c4907898SMarek Vasut static int search_right_edge(const int write, const int rank_bgn,
2143c4907898SMarek Vasut 	const u32 write_group, const u32 read_group,
2144c4907898SMarek Vasut 	const int start_dqs, const int start_dqs_en,
21450c4be198SMarek Vasut 	u32 *sticky_bit_chk,
2146c4907898SMarek Vasut 	int *left_edge, int *right_edge, const u32 use_read_test)
2147c4907898SMarek Vasut {
2148139823ecSMarek Vasut 	const u32 delay_max = write ? iocfg->io_out1_delay_max :
2149139823ecSMarek Vasut 				      iocfg->io_in_delay_max;
2150139823ecSMarek Vasut 	const u32 dqs_max = write ? iocfg->io_out1_delay_max :
2151139823ecSMarek Vasut 				    iocfg->dqs_in_delay_max;
21521fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
21531fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
21540c4be198SMarek Vasut 	u32 stop, bit_chk;
2155c4907898SMarek Vasut 	int i, d;
2156c4907898SMarek Vasut 
2157c4907898SMarek Vasut 	for (d = 0; d <= dqs_max - start_dqs; d++) {
2158c4907898SMarek Vasut 		if (write) {	/* WRITE-ONLY */
2159c4907898SMarek Vasut 			scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
2160c4907898SMarek Vasut 								d + start_dqs);
2161c4907898SMarek Vasut 		} else {	/* READ-ONLY */
2162c4907898SMarek Vasut 			scc_mgr_set_dqs_bus_in_delay(read_group, d + start_dqs);
2163160695d8SMarek Vasut 			if (iocfg->shift_dqs_en_when_shift_dqs) {
21645ded7320SMarek Vasut 				u32 delay = d + start_dqs_en;
2165160695d8SMarek Vasut 				if (delay > iocfg->dqs_en_delay_max)
2166160695d8SMarek Vasut 					delay = iocfg->dqs_en_delay_max;
2167c4907898SMarek Vasut 				scc_mgr_set_dqs_en_delay(read_group, delay);
2168c4907898SMarek Vasut 			}
2169c4907898SMarek Vasut 			scc_mgr_load_dqs(read_group);
2170c4907898SMarek Vasut 		}
2171c4907898SMarek Vasut 
2172c4907898SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
2173c4907898SMarek Vasut 
2174901dc36eSMarek Vasut 		stop = search_stop_check(write, d, rank_bgn, write_group,
21750c4be198SMarek Vasut 					 read_group, &bit_chk, sticky_bit_chk,
2176901dc36eSMarek Vasut 					 use_read_test);
2177c4907898SMarek Vasut 		if (stop == 1) {
2178c4907898SMarek Vasut 			if (write && (d == 0)) {	/* WRITE-ONLY */
2179139823ecSMarek Vasut 				for (i = 0; i < rwcfg->mem_dq_per_write_dqs;
2180139823ecSMarek Vasut 				     i++) {
2181c4907898SMarek Vasut 					/*
2182c4907898SMarek Vasut 					 * d = 0 failed, but it passed when
2183c4907898SMarek Vasut 					 * testing the left edge, so it must be
2184c4907898SMarek Vasut 					 * marginal, set it to -1
2185c4907898SMarek Vasut 					 */
2186c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1 &&
2187c4907898SMarek Vasut 					    left_edge[i] != delay_max + 1)
2188c4907898SMarek Vasut 						right_edge[i] = -1;
2189c4907898SMarek Vasut 				}
2190c4907898SMarek Vasut 			}
2191c4907898SMarek Vasut 			break;
2192c4907898SMarek Vasut 		}
2193c4907898SMarek Vasut 
2194c4907898SMarek Vasut 		/* stop != 1 */
2195c4907898SMarek Vasut 		for (i = 0; i < per_dqs; i++) {
21960c4be198SMarek Vasut 			if (bit_chk & 1) {
2197c4907898SMarek Vasut 				/*
2198c4907898SMarek Vasut 				 * Remember a passing test as
2199c4907898SMarek Vasut 				 * the right_edge.
2200c4907898SMarek Vasut 				 */
2201c4907898SMarek Vasut 				right_edge[i] = d;
2202c4907898SMarek Vasut 			} else {
2203c4907898SMarek Vasut 				if (d != 0) {
2204c4907898SMarek Vasut 					/*
2205c4907898SMarek Vasut 					 * If a right edge has not
2206c4907898SMarek Vasut 					 * been seen yet, then a future
2207c4907898SMarek Vasut 					 * passing test will mark this
2208c4907898SMarek Vasut 					 * edge as the left edge.
2209c4907898SMarek Vasut 					 */
2210c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1)
2211c4907898SMarek Vasut 						left_edge[i] = -(d + 1);
2212c4907898SMarek Vasut 				} else {
2213c4907898SMarek Vasut 					/*
2214c4907898SMarek Vasut 					 * d = 0 failed, but it passed
2215c4907898SMarek Vasut 					 * when testing the left edge,
2216c4907898SMarek Vasut 					 * so it must be marginal, set
2217c4907898SMarek Vasut 					 * it to -1
2218c4907898SMarek Vasut 					 */
2219c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1 &&
2220c4907898SMarek Vasut 					    left_edge[i] != delay_max + 1)
2221c4907898SMarek Vasut 						right_edge[i] = -1;
2222c4907898SMarek Vasut 					/*
2223c4907898SMarek Vasut 					 * If a right edge has not been
2224c4907898SMarek Vasut 					 * seen yet, then a future
2225c4907898SMarek Vasut 					 * passing test will mark this
2226c4907898SMarek Vasut 					 * edge as the left edge.
2227c4907898SMarek Vasut 					 */
2228c4907898SMarek Vasut 					else if (right_edge[i] == delay_max + 1)
2229c4907898SMarek Vasut 						left_edge[i] = -(d + 1);
2230c4907898SMarek Vasut 				}
2231c4907898SMarek Vasut 			}
2232c4907898SMarek Vasut 
2233ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 2, "%s:%d center[r,d=%u]: ",
2234c4907898SMarek Vasut 				   __func__, __LINE__, d);
2235ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 2,
2236c4907898SMarek Vasut 				   "bit_chk_test=%i left_edge[%u]: %d ",
22370c4be198SMarek Vasut 				   bit_chk & 1, i, left_edge[i]);
2238ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 2, "right_edge[%u]: %d\n", i,
2239c4907898SMarek Vasut 				   right_edge[i]);
22400c4be198SMarek Vasut 			bit_chk >>= 1;
2241c4907898SMarek Vasut 		}
2242c4907898SMarek Vasut 	}
2243c4907898SMarek Vasut 
2244c4907898SMarek Vasut 	/* Check that all bits have a window */
2245c4907898SMarek Vasut 	for (i = 0; i < per_dqs; i++) {
2246ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2247c4907898SMarek Vasut 			   "%s:%d write_center: left_edge[%u]: %d right_edge[%u]: %d",
2248c4907898SMarek Vasut 			   __func__, __LINE__, i, left_edge[i],
2249c4907898SMarek Vasut 			   i, right_edge[i]);
2250c4907898SMarek Vasut 		if ((left_edge[i] == dqs_max + 1) ||
2251c4907898SMarek Vasut 		    (right_edge[i] == dqs_max + 1))
2252c4907898SMarek Vasut 			return i + 1;	/* FIXME: If we fail, retval > 0 */
2253c4907898SMarek Vasut 	}
2254c4907898SMarek Vasut 
2255c4907898SMarek Vasut 	return 0;
2256c4907898SMarek Vasut }
2257c4907898SMarek Vasut 
2258afb3eb84SMarek Vasut /**
2259afb3eb84SMarek Vasut  * get_window_mid_index() - Find the best middle setting of DQ/DQS phase
2260afb3eb84SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2261afb3eb84SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2262afb3eb84SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2263afb3eb84SMarek Vasut  * @mid_min:		Best DQ/DQS phase middle setting
2264afb3eb84SMarek Vasut  *
2265afb3eb84SMarek Vasut  * Find index and value of the middle of the DQ/DQS working phase.
2266afb3eb84SMarek Vasut  */
get_window_mid_index(const int write,int * left_edge,int * right_edge,int * mid_min)2267afb3eb84SMarek Vasut static int get_window_mid_index(const int write, int *left_edge,
2268afb3eb84SMarek Vasut 				int *right_edge, int *mid_min)
2269afb3eb84SMarek Vasut {
22701fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
22711fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
2272afb3eb84SMarek Vasut 	int i, mid, min_index;
2273afb3eb84SMarek Vasut 
2274afb3eb84SMarek Vasut 	/* Find middle of window for each DQ bit */
2275afb3eb84SMarek Vasut 	*mid_min = left_edge[0] - right_edge[0];
2276afb3eb84SMarek Vasut 	min_index = 0;
2277afb3eb84SMarek Vasut 	for (i = 1; i < per_dqs; i++) {
2278afb3eb84SMarek Vasut 		mid = left_edge[i] - right_edge[i];
2279afb3eb84SMarek Vasut 		if (mid < *mid_min) {
2280afb3eb84SMarek Vasut 			*mid_min = mid;
2281afb3eb84SMarek Vasut 			min_index = i;
2282afb3eb84SMarek Vasut 		}
2283afb3eb84SMarek Vasut 	}
2284afb3eb84SMarek Vasut 
2285afb3eb84SMarek Vasut 	/*
2286afb3eb84SMarek Vasut 	 * -mid_min/2 represents the amount that we need to move DQS.
2287afb3eb84SMarek Vasut 	 * If mid_min is odd and positive we'll need to add one to make
2288afb3eb84SMarek Vasut 	 * sure the rounding in further calculations is correct (always
2289afb3eb84SMarek Vasut 	 * bias to the right), so just add 1 for all positive values.
2290afb3eb84SMarek Vasut 	 */
2291afb3eb84SMarek Vasut 	if (*mid_min > 0)
2292afb3eb84SMarek Vasut 		(*mid_min)++;
2293afb3eb84SMarek Vasut 	*mid_min = *mid_min / 2;
2294afb3eb84SMarek Vasut 
2295ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "%s:%d vfifo_center: *mid_min=%d (index=%u)\n",
2296afb3eb84SMarek Vasut 		   __func__, __LINE__, *mid_min, min_index);
2297afb3eb84SMarek Vasut 	return min_index;
2298afb3eb84SMarek Vasut }
2299afb3eb84SMarek Vasut 
2300ffb8b66eSMarek Vasut /**
2301ffb8b66eSMarek Vasut  * center_dq_windows() - Center the DQ/DQS windows
2302ffb8b66eSMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2303ffb8b66eSMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2304ffb8b66eSMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2305ffb8b66eSMarek Vasut  * @mid_min:		Adjusted DQ/DQS phase middle setting
2306ffb8b66eSMarek Vasut  * @orig_mid_min:	Original DQ/DQS phase middle setting
2307ffb8b66eSMarek Vasut  * @min_index:		DQ/DQS phase middle setting index
2308ffb8b66eSMarek Vasut  * @test_bgn:		Rank number to begin the test
2309ffb8b66eSMarek Vasut  * @dq_margin:		Amount of shift for the DQ
2310ffb8b66eSMarek Vasut  * @dqs_margin:		Amount of shift for the DQS
2311ffb8b66eSMarek Vasut  *
2312ffb8b66eSMarek Vasut  * Align the DQ/DQS windows in each group.
2313ffb8b66eSMarek Vasut  */
center_dq_windows(const int write,int * left_edge,int * right_edge,const int mid_min,const int orig_mid_min,const int min_index,const int test_bgn,int * dq_margin,int * dqs_margin)2314ffb8b66eSMarek Vasut static void center_dq_windows(const int write, int *left_edge, int *right_edge,
2315ffb8b66eSMarek Vasut 			      const int mid_min, const int orig_mid_min,
2316ffb8b66eSMarek Vasut 			      const int min_index, const int test_bgn,
2317ffb8b66eSMarek Vasut 			      int *dq_margin, int *dqs_margin)
2318ffb8b66eSMarek Vasut {
2319*e026b984SMarek Vasut 	const s32 delay_max = write ? iocfg->io_out1_delay_max :
2320139823ecSMarek Vasut 				      iocfg->io_in_delay_max;
2321*e026b984SMarek Vasut 	const s32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
23221fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
2323*e026b984SMarek Vasut 	const s32 delay_off = write ? SCC_MGR_IO_OUT1_DELAY_OFFSET :
2324ffb8b66eSMarek Vasut 				      SCC_MGR_IO_IN_DELAY_OFFSET;
2325*e026b984SMarek Vasut 	const s32 addr = SDR_PHYGRP_SCCGRP_ADDRESS | delay_off;
2326ffb8b66eSMarek Vasut 
2327*e026b984SMarek Vasut 	s32 temp_dq_io_delay1;
2328ffb8b66eSMarek Vasut 	int shift_dq, i, p;
2329ffb8b66eSMarek Vasut 
2330ffb8b66eSMarek Vasut 	/* Initialize data for export structures */
2331ffb8b66eSMarek Vasut 	*dqs_margin = delay_max + 1;
2332ffb8b66eSMarek Vasut 	*dq_margin  = delay_max + 1;
2333ffb8b66eSMarek Vasut 
2334ffb8b66eSMarek Vasut 	/* add delay to bring centre of all DQ windows to the same "level" */
2335ffb8b66eSMarek Vasut 	for (i = 0, p = test_bgn; i < per_dqs; i++, p++) {
2336ffb8b66eSMarek Vasut 		/* Use values before divide by 2 to reduce round off error */
2337ffb8b66eSMarek Vasut 		shift_dq = (left_edge[i] - right_edge[i] -
2338ffb8b66eSMarek Vasut 			(left_edge[min_index] - right_edge[min_index]))/2  +
2339ffb8b66eSMarek Vasut 			(orig_mid_min - mid_min);
2340ffb8b66eSMarek Vasut 
2341ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2342ffb8b66eSMarek Vasut 			   "vfifo_center: before: shift_dq[%u]=%d\n",
2343ffb8b66eSMarek Vasut 			   i, shift_dq);
2344ffb8b66eSMarek Vasut 
2345*e026b984SMarek Vasut 		temp_dq_io_delay1 = readl(addr + (i << 2));
2346ffb8b66eSMarek Vasut 
2347ffb8b66eSMarek Vasut 		if (shift_dq + temp_dq_io_delay1 > delay_max)
2348*e026b984SMarek Vasut 			shift_dq = delay_max - temp_dq_io_delay1;
2349ffb8b66eSMarek Vasut 		else if (shift_dq + temp_dq_io_delay1 < 0)
2350ffb8b66eSMarek Vasut 			shift_dq = -temp_dq_io_delay1;
2351ffb8b66eSMarek Vasut 
2352ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2353ffb8b66eSMarek Vasut 			   "vfifo_center: after: shift_dq[%u]=%d\n",
2354ffb8b66eSMarek Vasut 			   i, shift_dq);
2355ffb8b66eSMarek Vasut 
2356ffb8b66eSMarek Vasut 		if (write)
2357139823ecSMarek Vasut 			scc_mgr_set_dq_out1_delay(i,
2358139823ecSMarek Vasut 						  temp_dq_io_delay1 + shift_dq);
2359ffb8b66eSMarek Vasut 		else
2360139823ecSMarek Vasut 			scc_mgr_set_dq_in_delay(p,
2361139823ecSMarek Vasut 						temp_dq_io_delay1 + shift_dq);
2362ffb8b66eSMarek Vasut 
2363ffb8b66eSMarek Vasut 		scc_mgr_load_dq(p);
2364ffb8b66eSMarek Vasut 
2365ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2366ffb8b66eSMarek Vasut 			   "vfifo_center: margin[%u]=[%d,%d]\n", i,
2367ffb8b66eSMarek Vasut 			   left_edge[i] - shift_dq + (-mid_min),
2368ffb8b66eSMarek Vasut 			   right_edge[i] + shift_dq - (-mid_min));
2369ffb8b66eSMarek Vasut 
2370ffb8b66eSMarek Vasut 		/* To determine values for export structures */
2371ffb8b66eSMarek Vasut 		if (left_edge[i] - shift_dq + (-mid_min) < *dq_margin)
2372ffb8b66eSMarek Vasut 			*dq_margin = left_edge[i] - shift_dq + (-mid_min);
2373ffb8b66eSMarek Vasut 
2374ffb8b66eSMarek Vasut 		if (right_edge[i] + shift_dq - (-mid_min) < *dqs_margin)
2375ffb8b66eSMarek Vasut 			*dqs_margin = right_edge[i] + shift_dq - (-mid_min);
2376ffb8b66eSMarek Vasut 	}
2377ffb8b66eSMarek Vasut }
2378ffb8b66eSMarek Vasut 
2379ac63b9adSMarek Vasut /**
2380ac63b9adSMarek Vasut  * rw_mgr_mem_calibrate_vfifo_center() - Per-bit deskew DQ and centering
2381ac63b9adSMarek Vasut  * @rank_bgn:		Rank number
2382ac63b9adSMarek Vasut  * @rw_group:		Read/Write Group
2383ac63b9adSMarek Vasut  * @test_bgn:		Rank at which the test begins
2384ac63b9adSMarek Vasut  * @use_read_test:	Perform a read test
2385ac63b9adSMarek Vasut  * @update_fom:		Update FOM
2386ac63b9adSMarek Vasut  *
2387ac63b9adSMarek Vasut  * Per-bit deskew DQ and centering.
2388ac63b9adSMarek Vasut  */
rw_mgr_mem_calibrate_vfifo_center(const u32 rank_bgn,const u32 rw_group,const u32 test_bgn,const int use_read_test,const int update_fom)23890113c3e1SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_center(const u32 rank_bgn,
23900113c3e1SMarek Vasut 			const u32 rw_group, const u32 test_bgn,
23910113c3e1SMarek Vasut 			const int use_read_test, const int update_fom)
23923da42859SDinh Nguyen {
23935d6db444SMarek Vasut 	const u32 addr =
23945d6db444SMarek Vasut 		SDR_PHYGRP_SCCGRP_ADDRESS + SCC_MGR_DQS_IN_DELAY_OFFSET +
23950113c3e1SMarek Vasut 		(rw_group << 2);
23963da42859SDinh Nguyen 	/*
23973da42859SDinh Nguyen 	 * Store these as signed since there are comparisons with
23983da42859SDinh Nguyen 	 * signed numbers.
23993da42859SDinh Nguyen 	 */
24005ded7320SMarek Vasut 	u32 sticky_bit_chk;
24011fa0c8c4SMarek Vasut 	int32_t left_edge[rwcfg->mem_dq_per_read_dqs];
24021fa0c8c4SMarek Vasut 	int32_t right_edge[rwcfg->mem_dq_per_read_dqs];
24033da42859SDinh Nguyen 	int32_t orig_mid_min, mid_min;
2404160695d8SMarek Vasut 	int32_t new_dqs, start_dqs, start_dqs_en = 0, final_dqs_en;
24053da42859SDinh Nguyen 	int32_t dq_margin, dqs_margin;
24065d6db444SMarek Vasut 	int i, min_index;
2407c4907898SMarek Vasut 	int ret;
24083da42859SDinh Nguyen 
24090113c3e1SMarek Vasut 	debug("%s:%d: %u %u", __func__, __LINE__, rw_group, test_bgn);
24103da42859SDinh Nguyen 
24115d6db444SMarek Vasut 	start_dqs = readl(addr);
2412160695d8SMarek Vasut 	if (iocfg->shift_dqs_en_when_shift_dqs)
2413160695d8SMarek Vasut 		start_dqs_en = readl(addr - iocfg->dqs_en_delay_offset);
24143da42859SDinh Nguyen 
24153da42859SDinh Nguyen 	/* set the left and right edge of each bit to an illegal value */
2416160695d8SMarek Vasut 	/* use (iocfg->io_in_delay_max + 1) as an illegal value */
24173da42859SDinh Nguyen 	sticky_bit_chk = 0;
24181fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_read_dqs; i++) {
2419160695d8SMarek Vasut 		left_edge[i]  = iocfg->io_in_delay_max + 1;
2420160695d8SMarek Vasut 		right_edge[i] = iocfg->io_in_delay_max + 1;
24213da42859SDinh Nguyen 	}
24223da42859SDinh Nguyen 
24233da42859SDinh Nguyen 	/* Search for the left edge of the window for each bit */
24240113c3e1SMarek Vasut 	search_left_edge(0, rank_bgn, rw_group, rw_group, test_bgn,
24250c4be198SMarek Vasut 			 &sticky_bit_chk,
242671120773SMarek Vasut 			 left_edge, right_edge, use_read_test);
24273da42859SDinh Nguyen 
2428f0712c35SMarek Vasut 
24293da42859SDinh Nguyen 	/* Search for the right edge of the window for each bit */
24300113c3e1SMarek Vasut 	ret = search_right_edge(0, rank_bgn, rw_group, rw_group,
2431c4907898SMarek Vasut 				start_dqs, start_dqs_en,
24320c4be198SMarek Vasut 				&sticky_bit_chk,
2433c4907898SMarek Vasut 				left_edge, right_edge, use_read_test);
2434c4907898SMarek Vasut 	if (ret) {
24353da42859SDinh Nguyen 		/*
24363da42859SDinh Nguyen 		 * Restore delay chain settings before letting the loop
24373da42859SDinh Nguyen 		 * in rw_mgr_mem_calibrate_vfifo to retry different
24383da42859SDinh Nguyen 		 * dqs/ck relationships.
24393da42859SDinh Nguyen 		 */
24400113c3e1SMarek Vasut 		scc_mgr_set_dqs_bus_in_delay(rw_group, start_dqs);
2441160695d8SMarek Vasut 		if (iocfg->shift_dqs_en_when_shift_dqs)
24420113c3e1SMarek Vasut 			scc_mgr_set_dqs_en_delay(rw_group, start_dqs_en);
2443c4907898SMarek Vasut 
24440113c3e1SMarek Vasut 		scc_mgr_load_dqs(rw_group);
24451273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
24463da42859SDinh Nguyen 
2447ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1,
2448c4907898SMarek Vasut 			   "%s:%d vfifo_center: failed to find edge [%u]: %d %d",
2449c4907898SMarek Vasut 			   __func__, __LINE__, i, left_edge[i], right_edge[i]);
24503da42859SDinh Nguyen 		if (use_read_test) {
24510113c3e1SMarek Vasut 			set_failing_group_stage(rw_group *
24521fa0c8c4SMarek Vasut 				rwcfg->mem_dq_per_read_dqs + i,
24533da42859SDinh Nguyen 				CAL_STAGE_VFIFO,
24543da42859SDinh Nguyen 				CAL_SUBSTAGE_VFIFO_CENTER);
24553da42859SDinh Nguyen 		} else {
24560113c3e1SMarek Vasut 			set_failing_group_stage(rw_group *
24571fa0c8c4SMarek Vasut 				rwcfg->mem_dq_per_read_dqs + i,
24583da42859SDinh Nguyen 				CAL_STAGE_VFIFO_AFTER_WRITES,
24593da42859SDinh Nguyen 				CAL_SUBSTAGE_VFIFO_CENTER);
24603da42859SDinh Nguyen 		}
246198668247SMarek Vasut 		return -EIO;
24623da42859SDinh Nguyen 	}
24633da42859SDinh Nguyen 
2464afb3eb84SMarek Vasut 	min_index = get_window_mid_index(0, left_edge, right_edge, &mid_min);
24653da42859SDinh Nguyen 
24663da42859SDinh Nguyen 	/* Determine the amount we can change DQS (which is -mid_min) */
24673da42859SDinh Nguyen 	orig_mid_min = mid_min;
24683da42859SDinh Nguyen 	new_dqs = start_dqs - mid_min;
2469160695d8SMarek Vasut 	if (new_dqs > iocfg->dqs_in_delay_max)
2470160695d8SMarek Vasut 		new_dqs = iocfg->dqs_in_delay_max;
24713da42859SDinh Nguyen 	else if (new_dqs < 0)
24723da42859SDinh Nguyen 		new_dqs = 0;
24733da42859SDinh Nguyen 
24743da42859SDinh Nguyen 	mid_min = start_dqs - new_dqs;
2475ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "vfifo_center: new mid_min=%d new_dqs=%d\n",
24763da42859SDinh Nguyen 		   mid_min, new_dqs);
24773da42859SDinh Nguyen 
2478160695d8SMarek Vasut 	if (iocfg->shift_dqs_en_when_shift_dqs) {
2479160695d8SMarek Vasut 		if (start_dqs_en - mid_min > iocfg->dqs_en_delay_max)
2480139823ecSMarek Vasut 			mid_min += start_dqs_en - mid_min -
2481139823ecSMarek Vasut 				   iocfg->dqs_en_delay_max;
24823da42859SDinh Nguyen 		else if (start_dqs_en - mid_min < 0)
24833da42859SDinh Nguyen 			mid_min += start_dqs_en - mid_min;
24843da42859SDinh Nguyen 	}
24853da42859SDinh Nguyen 	new_dqs = start_dqs - mid_min;
24863da42859SDinh Nguyen 
2487ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
2488f0712c35SMarek Vasut 		   "vfifo_center: start_dqs=%d start_dqs_en=%d new_dqs=%d mid_min=%d\n",
2489f0712c35SMarek Vasut 		   start_dqs,
2490160695d8SMarek Vasut 		   iocfg->shift_dqs_en_when_shift_dqs ? start_dqs_en : -1,
24913da42859SDinh Nguyen 		   new_dqs, mid_min);
24923da42859SDinh Nguyen 
2493ffb8b66eSMarek Vasut 	/* Add delay to bring centre of all DQ windows to the same "level". */
2494ffb8b66eSMarek Vasut 	center_dq_windows(0, left_edge, right_edge, mid_min, orig_mid_min,
2495ffb8b66eSMarek Vasut 			  min_index, test_bgn, &dq_margin, &dqs_margin);
24963da42859SDinh Nguyen 
24973da42859SDinh Nguyen 	/* Move DQS-en */
2498160695d8SMarek Vasut 	if (iocfg->shift_dqs_en_when_shift_dqs) {
24995d6db444SMarek Vasut 		final_dqs_en = start_dqs_en - mid_min;
25000113c3e1SMarek Vasut 		scc_mgr_set_dqs_en_delay(rw_group, final_dqs_en);
25010113c3e1SMarek Vasut 		scc_mgr_load_dqs(rw_group);
25023da42859SDinh Nguyen 	}
25033da42859SDinh Nguyen 
25043da42859SDinh Nguyen 	/* Move DQS */
25050113c3e1SMarek Vasut 	scc_mgr_set_dqs_bus_in_delay(rw_group, new_dqs);
25060113c3e1SMarek Vasut 	scc_mgr_load_dqs(rw_group);
2507ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2,
2508f0712c35SMarek Vasut 		   "%s:%d vfifo_center: dq_margin=%d dqs_margin=%d",
2509f0712c35SMarek Vasut 		   __func__, __LINE__, dq_margin, dqs_margin);
25103da42859SDinh Nguyen 
25113da42859SDinh Nguyen 	/*
25123da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our decisions
25133da42859SDinh Nguyen 	 * have been applied. Apply the update bit.
25143da42859SDinh Nguyen 	 */
25151273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
25163da42859SDinh Nguyen 
251798668247SMarek Vasut 	if ((dq_margin < 0) || (dqs_margin < 0))
251898668247SMarek Vasut 		return -EINVAL;
251998668247SMarek Vasut 
252098668247SMarek Vasut 	return 0;
25213da42859SDinh Nguyen }
25223da42859SDinh Nguyen 
2523bce24efaSMarek Vasut /**
252404372fb8SMarek Vasut  * rw_mgr_mem_calibrate_guaranteed_write() - Perform guaranteed write into the device
252504372fb8SMarek Vasut  * @rw_group:	Read/Write Group
252604372fb8SMarek Vasut  * @phase:	DQ/DQS phase
252704372fb8SMarek Vasut  *
252804372fb8SMarek Vasut  * Because initially no communication ca be reliably performed with the memory
252904372fb8SMarek Vasut  * device, the sequencer uses a guaranteed write mechanism to write data into
253004372fb8SMarek Vasut  * the memory device.
253104372fb8SMarek Vasut  */
rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,const u32 phase)253204372fb8SMarek Vasut static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,
253304372fb8SMarek Vasut 						 const u32 phase)
253404372fb8SMarek Vasut {
253504372fb8SMarek Vasut 	int ret;
253604372fb8SMarek Vasut 
253704372fb8SMarek Vasut 	/* Set a particular DQ/DQS phase. */
253804372fb8SMarek Vasut 	scc_mgr_set_dqdqs_output_phase_all_ranks(rw_group, phase);
253904372fb8SMarek Vasut 
2540ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "%s:%d guaranteed write: g=%u p=%u\n",
254104372fb8SMarek Vasut 		   __func__, __LINE__, rw_group, phase);
254204372fb8SMarek Vasut 
254304372fb8SMarek Vasut 	/*
254404372fb8SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-25
254504372fb8SMarek Vasut 	 * Load up the patterns used by read calibration using the
254604372fb8SMarek Vasut 	 * current DQDQS phase.
254704372fb8SMarek Vasut 	 */
254804372fb8SMarek Vasut 	rw_mgr_mem_calibrate_read_load_patterns(0, 1);
254904372fb8SMarek Vasut 
255004372fb8SMarek Vasut 	if (gbl->phy_debug_mode_flags & PHY_DEBUG_DISABLE_GUARANTEED_READ)
255104372fb8SMarek Vasut 		return 0;
255204372fb8SMarek Vasut 
255304372fb8SMarek Vasut 	/*
255404372fb8SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-26
255504372fb8SMarek Vasut 	 * Back-to-Back reads of the patterns used for calibration.
255604372fb8SMarek Vasut 	 */
2557d844c7d4SMarek Vasut 	ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1);
2558d844c7d4SMarek Vasut 	if (ret)
2559ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 1,
256004372fb8SMarek Vasut 			   "%s:%d Guaranteed read test failed: g=%u p=%u\n",
256104372fb8SMarek Vasut 			   __func__, __LINE__, rw_group, phase);
2562d844c7d4SMarek Vasut 	return ret;
256304372fb8SMarek Vasut }
256404372fb8SMarek Vasut 
256504372fb8SMarek Vasut /**
2566f09da11eSMarek Vasut  * rw_mgr_mem_calibrate_dqs_enable_calibration() - DQS Enable Calibration
2567f09da11eSMarek Vasut  * @rw_group:	Read/Write Group
2568f09da11eSMarek Vasut  * @test_bgn:	Rank at which the test begins
2569f09da11eSMarek Vasut  *
2570f09da11eSMarek Vasut  * DQS enable calibration ensures reliable capture of the DQ signal without
2571f09da11eSMarek Vasut  * glitches on the DQS line.
2572f09da11eSMarek Vasut  */
rw_mgr_mem_calibrate_dqs_enable_calibration(const u32 rw_group,const u32 test_bgn)2573f09da11eSMarek Vasut static int rw_mgr_mem_calibrate_dqs_enable_calibration(const u32 rw_group,
2574f09da11eSMarek Vasut 						       const u32 test_bgn)
2575f09da11eSMarek Vasut {
2576f09da11eSMarek Vasut 	/*
2577f09da11eSMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-27
2578f09da11eSMarek Vasut 	 * DQS and DQS Eanble Signal Relationships.
2579f09da11eSMarek Vasut 	 */
258028ea827dSMarek Vasut 
258128ea827dSMarek Vasut 	/* We start at zero, so have one less dq to devide among */
2582160695d8SMarek Vasut 	const u32 delay_step = iocfg->io_in_delay_max /
25831fa0c8c4SMarek Vasut 			       (rwcfg->mem_dq_per_read_dqs - 1);
2584914546e7SMarek Vasut 	int ret;
258528ea827dSMarek Vasut 	u32 i, p, d, r;
258628ea827dSMarek Vasut 
258728ea827dSMarek Vasut 	debug("%s:%d (%u,%u)\n", __func__, __LINE__, rw_group, test_bgn);
258828ea827dSMarek Vasut 
258928ea827dSMarek Vasut 	/* Try different dq_in_delays since the DQ path is shorter than DQS. */
25901fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
259128ea827dSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
259228ea827dSMarek Vasut 		for (i = 0, p = test_bgn, d = 0;
25931fa0c8c4SMarek Vasut 		     i < rwcfg->mem_dq_per_read_dqs;
259428ea827dSMarek Vasut 		     i++, p++, d += delay_step) {
2595ea9aa241SMarek Vasut 			debug_cond(DLEVEL >= 1,
259628ea827dSMarek Vasut 				   "%s:%d: g=%u r=%u i=%u p=%u d=%u\n",
259728ea827dSMarek Vasut 				   __func__, __LINE__, rw_group, r, i, p, d);
259828ea827dSMarek Vasut 
259928ea827dSMarek Vasut 			scc_mgr_set_dq_in_delay(p, d);
260028ea827dSMarek Vasut 			scc_mgr_load_dq(p);
260128ea827dSMarek Vasut 		}
260228ea827dSMarek Vasut 
260328ea827dSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
260428ea827dSMarek Vasut 	}
260528ea827dSMarek Vasut 
260628ea827dSMarek Vasut 	/*
260728ea827dSMarek Vasut 	 * Try rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase across different
260828ea827dSMarek Vasut 	 * dq_in_delay values
260928ea827dSMarek Vasut 	 */
2610914546e7SMarek Vasut 	ret = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(rw_group);
261128ea827dSMarek Vasut 
2612ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
261328ea827dSMarek Vasut 		   "%s:%d: g=%u found=%u; Reseting delay chain to zero\n",
2614914546e7SMarek Vasut 		   __func__, __LINE__, rw_group, !ret);
261528ea827dSMarek Vasut 
26161fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
261728ea827dSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
261828ea827dSMarek Vasut 		scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
261928ea827dSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
262028ea827dSMarek Vasut 	}
262128ea827dSMarek Vasut 
2622914546e7SMarek Vasut 	return ret;
2623f09da11eSMarek Vasut }
2624f09da11eSMarek Vasut 
2625f09da11eSMarek Vasut /**
262616cfc4b9SMarek Vasut  * rw_mgr_mem_calibrate_dq_dqs_centering() - Centering DQ/DQS
262716cfc4b9SMarek Vasut  * @rw_group:		Read/Write Group
262816cfc4b9SMarek Vasut  * @test_bgn:		Rank at which the test begins
262916cfc4b9SMarek Vasut  * @use_read_test:	Perform a read test
263016cfc4b9SMarek Vasut  * @update_fom:		Update FOM
263116cfc4b9SMarek Vasut  *
263216cfc4b9SMarek Vasut  * The centerin DQ/DQS stage attempts to align DQ and DQS signals on reads
263316cfc4b9SMarek Vasut  * within a group.
263416cfc4b9SMarek Vasut  */
263516cfc4b9SMarek Vasut static int
rw_mgr_mem_calibrate_dq_dqs_centering(const u32 rw_group,const u32 test_bgn,const int use_read_test,const int update_fom)263616cfc4b9SMarek Vasut rw_mgr_mem_calibrate_dq_dqs_centering(const u32 rw_group, const u32 test_bgn,
263716cfc4b9SMarek Vasut 				      const int use_read_test,
263816cfc4b9SMarek Vasut 				      const int update_fom)
263916cfc4b9SMarek Vasut 
264016cfc4b9SMarek Vasut {
264116cfc4b9SMarek Vasut 	int ret, grp_calibrated;
264216cfc4b9SMarek Vasut 	u32 rank_bgn, sr;
264316cfc4b9SMarek Vasut 
264416cfc4b9SMarek Vasut 	/*
264516cfc4b9SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-28
264616cfc4b9SMarek Vasut 	 * Read per-bit deskew can be done on a per shadow register basis.
264716cfc4b9SMarek Vasut 	 */
264816cfc4b9SMarek Vasut 	grp_calibrated = 1;
264916cfc4b9SMarek Vasut 	for (rank_bgn = 0, sr = 0;
26501fa0c8c4SMarek Vasut 	     rank_bgn < rwcfg->mem_number_of_ranks;
265116cfc4b9SMarek Vasut 	     rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
265216cfc4b9SMarek Vasut 		ret = rw_mgr_mem_calibrate_vfifo_center(rank_bgn, rw_group,
26530113c3e1SMarek Vasut 							test_bgn,
265416cfc4b9SMarek Vasut 							use_read_test,
265516cfc4b9SMarek Vasut 							update_fom);
265698668247SMarek Vasut 		if (!ret)
265716cfc4b9SMarek Vasut 			continue;
265816cfc4b9SMarek Vasut 
265916cfc4b9SMarek Vasut 		grp_calibrated = 0;
266016cfc4b9SMarek Vasut 	}
266116cfc4b9SMarek Vasut 
266216cfc4b9SMarek Vasut 	if (!grp_calibrated)
266316cfc4b9SMarek Vasut 		return -EIO;
266416cfc4b9SMarek Vasut 
266516cfc4b9SMarek Vasut 	return 0;
266616cfc4b9SMarek Vasut }
266716cfc4b9SMarek Vasut 
266816cfc4b9SMarek Vasut /**
2669bce24efaSMarek Vasut  * rw_mgr_mem_calibrate_vfifo() - Calibrate the read valid prediction FIFO
2670bce24efaSMarek Vasut  * @rw_group:		Read/Write Group
2671bce24efaSMarek Vasut  * @test_bgn:		Rank at which the test begins
26723da42859SDinh Nguyen  *
2673bce24efaSMarek Vasut  * Stage 1: Calibrate the read valid prediction FIFO.
2674bce24efaSMarek Vasut  *
2675bce24efaSMarek Vasut  * This function implements UniPHY calibration Stage 1, as explained in
2676bce24efaSMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
2677bce24efaSMarek Vasut  *
2678bce24efaSMarek Vasut  * - read valid prediction will consist of finding:
2679bce24efaSMarek Vasut  *   - DQS enable phase and DQS enable delay (DQS Enable Calibration)
2680bce24efaSMarek Vasut  *   - DQS input phase  and DQS input delay (DQ/DQS Centering)
26813da42859SDinh Nguyen  *  - we also do a per-bit deskew on the DQ lines.
26823da42859SDinh Nguyen  */
rw_mgr_mem_calibrate_vfifo(const u32 rw_group,const u32 test_bgn)2683c336ca3eSMarek Vasut static int rw_mgr_mem_calibrate_vfifo(const u32 rw_group, const u32 test_bgn)
26843da42859SDinh Nguyen {
26855ded7320SMarek Vasut 	u32 p, d;
26865ded7320SMarek Vasut 	u32 dtaps_per_ptap;
26875ded7320SMarek Vasut 	u32 failed_substage;
26883da42859SDinh Nguyen 
268904372fb8SMarek Vasut 	int ret;
269004372fb8SMarek Vasut 
2691c336ca3eSMarek Vasut 	debug("%s:%d: %u %u\n", __func__, __LINE__, rw_group, test_bgn);
26923da42859SDinh Nguyen 
26937c0a9df3SMarek Vasut 	/* Update info for sims */
26947c0a9df3SMarek Vasut 	reg_file_set_group(rw_group);
26953da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_VFIFO);
26967c0a9df3SMarek Vasut 	reg_file_set_sub_stage(CAL_SUBSTAGE_GUARANTEED_READ);
26973da42859SDinh Nguyen 
26987c0a9df3SMarek Vasut 	failed_substage = CAL_SUBSTAGE_GUARANTEED_READ;
26997c0a9df3SMarek Vasut 
27007c0a9df3SMarek Vasut 	/* USER Determine number of delay taps for each phase tap. */
2701160695d8SMarek Vasut 	dtaps_per_ptap = DIV_ROUND_UP(iocfg->delay_per_opa_tap,
2702160695d8SMarek Vasut 				      iocfg->delay_per_dqs_en_dchain_tap) - 1;
27033da42859SDinh Nguyen 
2704fe2d0a2dSMarek Vasut 	for (d = 0; d <= dtaps_per_ptap; d += 2) {
27053da42859SDinh Nguyen 		/*
27063da42859SDinh Nguyen 		 * In RLDRAMX we may be messing the delay of pins in
2707c336ca3eSMarek Vasut 		 * the same write rw_group but outside of the current read
2708c336ca3eSMarek Vasut 		 * the rw_group, but that's ok because we haven't calibrated
2709ac70d2f3SMarek Vasut 		 * output side yet.
27103da42859SDinh Nguyen 		 */
27113da42859SDinh Nguyen 		if (d > 0) {
2712f51a7d35SMarek Vasut 			scc_mgr_apply_group_all_out_delay_add_all_ranks(
2713c336ca3eSMarek Vasut 								rw_group, d);
27143da42859SDinh Nguyen 		}
27153da42859SDinh Nguyen 
2716160695d8SMarek Vasut 		for (p = 0; p <= iocfg->dqdqs_out_phase_max; p++) {
271704372fb8SMarek Vasut 			/* 1) Guaranteed Write */
271804372fb8SMarek Vasut 			ret = rw_mgr_mem_calibrate_guaranteed_write(rw_group, p);
271904372fb8SMarek Vasut 			if (ret)
27203da42859SDinh Nguyen 				break;
27213da42859SDinh Nguyen 
2722f09da11eSMarek Vasut 			/* 2) DQS Enable Calibration */
2723f09da11eSMarek Vasut 			ret = rw_mgr_mem_calibrate_dqs_enable_calibration(rw_group,
2724f09da11eSMarek Vasut 									  test_bgn);
2725f09da11eSMarek Vasut 			if (ret) {
2726fe2d0a2dSMarek Vasut 				failed_substage = CAL_SUBSTAGE_DQS_EN_PHASE;
2727fe2d0a2dSMarek Vasut 				continue;
2728fe2d0a2dSMarek Vasut 			}
2729fe2d0a2dSMarek Vasut 
273016cfc4b9SMarek Vasut 			/* 3) Centering DQ/DQS */
27313da42859SDinh Nguyen 			/*
273216cfc4b9SMarek Vasut 			 * If doing read after write calibration, do not update
273316cfc4b9SMarek Vasut 			 * FOM now. Do it then.
27343da42859SDinh Nguyen 			 */
273516cfc4b9SMarek Vasut 			ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group,
273616cfc4b9SMarek Vasut 								test_bgn, 1, 0);
273716cfc4b9SMarek Vasut 			if (ret) {
2738d2ea4950SMarek Vasut 				failed_substage = CAL_SUBSTAGE_VFIFO_CENTER;
273916cfc4b9SMarek Vasut 				continue;
27403da42859SDinh Nguyen 			}
2741fe2d0a2dSMarek Vasut 
274216cfc4b9SMarek Vasut 			/* All done. */
2743fe2d0a2dSMarek Vasut 			goto cal_done_ok;
27443da42859SDinh Nguyen 		}
27453da42859SDinh Nguyen 	}
27463da42859SDinh Nguyen 
2747fe2d0a2dSMarek Vasut 	/* Calibration Stage 1 failed. */
2748c336ca3eSMarek Vasut 	set_failing_group_stage(rw_group, CAL_STAGE_VFIFO, failed_substage);
27493da42859SDinh Nguyen 	return 0;
27503da42859SDinh Nguyen 
2751fe2d0a2dSMarek Vasut 	/* Calibration Stage 1 completed OK. */
2752fe2d0a2dSMarek Vasut cal_done_ok:
27533da42859SDinh Nguyen 	/*
27543da42859SDinh Nguyen 	 * Reset the delay chains back to zero if they have moved > 1
27553da42859SDinh Nguyen 	 * (check for > 1 because loop will increase d even when pass in
27563da42859SDinh Nguyen 	 * first case).
27573da42859SDinh Nguyen 	 */
27583da42859SDinh Nguyen 	if (d > 2)
2759c336ca3eSMarek Vasut 		scc_mgr_zero_group(rw_group, 1);
27603da42859SDinh Nguyen 
27613da42859SDinh Nguyen 	return 1;
27623da42859SDinh Nguyen }
27633da42859SDinh Nguyen 
276478cdd7d0SMarek Vasut /**
276578cdd7d0SMarek Vasut  * rw_mgr_mem_calibrate_vfifo_end() - DQ/DQS Centering.
276678cdd7d0SMarek Vasut  * @rw_group:		Read/Write Group
276778cdd7d0SMarek Vasut  * @test_bgn:		Rank at which the test begins
276878cdd7d0SMarek Vasut  *
276978cdd7d0SMarek Vasut  * Stage 3: DQ/DQS Centering.
277078cdd7d0SMarek Vasut  *
277178cdd7d0SMarek Vasut  * This function implements UniPHY calibration Stage 3, as explained in
277278cdd7d0SMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
277378cdd7d0SMarek Vasut  */
rw_mgr_mem_calibrate_vfifo_end(const u32 rw_group,const u32 test_bgn)277478cdd7d0SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_end(const u32 rw_group,
277578cdd7d0SMarek Vasut 					  const u32 test_bgn)
27763da42859SDinh Nguyen {
277778cdd7d0SMarek Vasut 	int ret;
27783da42859SDinh Nguyen 
277978cdd7d0SMarek Vasut 	debug("%s:%d %u %u", __func__, __LINE__, rw_group, test_bgn);
27803da42859SDinh Nguyen 
278178cdd7d0SMarek Vasut 	/* Update info for sims. */
278278cdd7d0SMarek Vasut 	reg_file_set_group(rw_group);
27833da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_VFIFO_AFTER_WRITES);
27843da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
27853da42859SDinh Nguyen 
278678cdd7d0SMarek Vasut 	ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group, test_bgn, 0, 1);
278778cdd7d0SMarek Vasut 	if (ret)
278878cdd7d0SMarek Vasut 		set_failing_group_stage(rw_group,
27893da42859SDinh Nguyen 					CAL_STAGE_VFIFO_AFTER_WRITES,
27903da42859SDinh Nguyen 					CAL_SUBSTAGE_VFIFO_CENTER);
279178cdd7d0SMarek Vasut 	return ret;
27923da42859SDinh Nguyen }
27933da42859SDinh Nguyen 
2794c984278aSMarek Vasut /**
2795c984278aSMarek Vasut  * rw_mgr_mem_calibrate_lfifo() - Minimize latency
2796c984278aSMarek Vasut  *
2797c984278aSMarek Vasut  * Stage 4: Minimize latency.
2798c984278aSMarek Vasut  *
2799c984278aSMarek Vasut  * This function implements UniPHY calibration Stage 4, as explained in
2800c984278aSMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
2801c984278aSMarek Vasut  * Calibrate LFIFO to find smallest read latency.
2802c984278aSMarek Vasut  */
rw_mgr_mem_calibrate_lfifo(void)28035ded7320SMarek Vasut static u32 rw_mgr_mem_calibrate_lfifo(void)
28043da42859SDinh Nguyen {
2805c984278aSMarek Vasut 	int found_one = 0;
28063da42859SDinh Nguyen 
28073da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
28083da42859SDinh Nguyen 
2809c984278aSMarek Vasut 	/* Update info for sims. */
28103da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_LFIFO);
28113da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_READ_LATENCY);
28123da42859SDinh Nguyen 
28133da42859SDinh Nguyen 	/* Load up the patterns used by read calibration for all ranks */
28143da42859SDinh Nguyen 	rw_mgr_mem_calibrate_read_load_patterns(0, 1);
28153da42859SDinh Nguyen 
28163da42859SDinh Nguyen 	do {
28171273dd9eSMarek Vasut 		writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
2818ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2, "%s:%d lfifo: read_lat=%u",
28193da42859SDinh Nguyen 			   __func__, __LINE__, gbl->curr_read_lat);
28203da42859SDinh Nguyen 
2821c984278aSMarek Vasut 		if (!rw_mgr_mem_calibrate_read_test_all_ranks(0, NUM_READ_TESTS,
2822c984278aSMarek Vasut 							      PASS_ALL_BITS, 1))
28233da42859SDinh Nguyen 			break;
28243da42859SDinh Nguyen 
28253da42859SDinh Nguyen 		found_one = 1;
2826c984278aSMarek Vasut 		/*
2827c984278aSMarek Vasut 		 * Reduce read latency and see if things are
2828c984278aSMarek Vasut 		 * working correctly.
2829c984278aSMarek Vasut 		 */
28303da42859SDinh Nguyen 		gbl->curr_read_lat--;
28313da42859SDinh Nguyen 	} while (gbl->curr_read_lat > 0);
28323da42859SDinh Nguyen 
2833c984278aSMarek Vasut 	/* Reset the fifos to get pointers to known state. */
28341273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
28353da42859SDinh Nguyen 
28363da42859SDinh Nguyen 	if (found_one) {
2837c984278aSMarek Vasut 		/* Add a fudge factor to the read latency that was determined */
28383da42859SDinh Nguyen 		gbl->curr_read_lat += 2;
28391273dd9eSMarek Vasut 		writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
2840ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2841c984278aSMarek Vasut 			   "%s:%d lfifo: success: using read_lat=%u\n",
2842c984278aSMarek Vasut 			   __func__, __LINE__, gbl->curr_read_lat);
28433da42859SDinh Nguyen 	} else {
28443da42859SDinh Nguyen 		set_failing_group_stage(0xff, CAL_STAGE_LFIFO,
28453da42859SDinh Nguyen 					CAL_SUBSTAGE_READ_LATENCY);
28463da42859SDinh Nguyen 
2847ea9aa241SMarek Vasut 		debug_cond(DLEVEL >= 2,
2848c984278aSMarek Vasut 			   "%s:%d lfifo: failed at initial read_lat=%u\n",
2849c984278aSMarek Vasut 			   __func__, __LINE__, gbl->curr_read_lat);
28503da42859SDinh Nguyen 	}
2851c984278aSMarek Vasut 
2852c984278aSMarek Vasut 	return found_one;
28533da42859SDinh Nguyen }
28543da42859SDinh Nguyen 
2855c8570afaSMarek Vasut /**
2856c8570afaSMarek Vasut  * search_window() - Search for the/part of the window with DM/DQS shift
2857c8570afaSMarek Vasut  * @search_dm:		If 1, search for the DM shift, if 0, search for DQS shift
2858c8570afaSMarek Vasut  * @rank_bgn:		Rank number
2859c8570afaSMarek Vasut  * @write_group:	Write Group
2860c8570afaSMarek Vasut  * @bgn_curr:		Current window begin
2861c8570afaSMarek Vasut  * @end_curr:		Current window end
2862c8570afaSMarek Vasut  * @bgn_best:		Current best window begin
2863c8570afaSMarek Vasut  * @end_best:		Current best window end
2864c8570afaSMarek Vasut  * @win_best:		Size of the best window
2865c8570afaSMarek Vasut  * @new_dqs:		New DQS value (only applicable if search_dm = 0).
2866c8570afaSMarek Vasut  *
2867c8570afaSMarek Vasut  * Search for the/part of the window with DM/DQS shift.
2868c8570afaSMarek Vasut  */
search_window(const int search_dm,const u32 rank_bgn,const u32 write_group,int * bgn_curr,int * end_curr,int * bgn_best,int * end_best,int * win_best,int new_dqs)2869c8570afaSMarek Vasut static void search_window(const int search_dm,
2870c8570afaSMarek Vasut 			  const u32 rank_bgn, const u32 write_group,
2871c8570afaSMarek Vasut 			  int *bgn_curr, int *end_curr, int *bgn_best,
2872c8570afaSMarek Vasut 			  int *end_best, int *win_best, int new_dqs)
2873c8570afaSMarek Vasut {
2874c8570afaSMarek Vasut 	u32 bit_chk;
2875160695d8SMarek Vasut 	const int max = iocfg->io_out1_delay_max - new_dqs;
2876c8570afaSMarek Vasut 	int d, di;
2877c8570afaSMarek Vasut 
2878c8570afaSMarek Vasut 	/* Search for the/part of the window with DM/DQS shift. */
2879c8570afaSMarek Vasut 	for (di = max; di >= 0; di -= DELTA_D) {
2880c8570afaSMarek Vasut 		if (search_dm) {
2881c8570afaSMarek Vasut 			d = di;
2882c8570afaSMarek Vasut 			scc_mgr_apply_group_dm_out1_delay(d);
2883c8570afaSMarek Vasut 		} else {
2884c8570afaSMarek Vasut 			/* For DQS, we go from 0...max */
2885c8570afaSMarek Vasut 			d = max - di;
2886c8570afaSMarek Vasut 			/*
2887139823ecSMarek Vasut 			 * Note: This only shifts DQS, so are we limiting
2888139823ecSMarek Vasut 			 *       ourselves to width of DQ unnecessarily.
2889c8570afaSMarek Vasut 			 */
2890c8570afaSMarek Vasut 			scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
2891c8570afaSMarek Vasut 								d + new_dqs);
2892c8570afaSMarek Vasut 		}
2893c8570afaSMarek Vasut 
2894c8570afaSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
2895c8570afaSMarek Vasut 
2896c8570afaSMarek Vasut 		if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1,
2897c8570afaSMarek Vasut 						    PASS_ALL_BITS, &bit_chk,
2898c8570afaSMarek Vasut 						    0)) {
2899c8570afaSMarek Vasut 			/* Set current end of the window. */
2900c8570afaSMarek Vasut 			*end_curr = search_dm ? -d : d;
2901c8570afaSMarek Vasut 
2902c8570afaSMarek Vasut 			/*
2903c8570afaSMarek Vasut 			 * If a starting edge of our window has not been seen
2904c8570afaSMarek Vasut 			 * this is our current start of the DM window.
2905c8570afaSMarek Vasut 			 */
2906160695d8SMarek Vasut 			if (*bgn_curr == iocfg->io_out1_delay_max + 1)
2907c8570afaSMarek Vasut 				*bgn_curr = search_dm ? -d : d;
2908c8570afaSMarek Vasut 
2909c8570afaSMarek Vasut 			/*
2910c8570afaSMarek Vasut 			 * If current window is bigger than best seen.
2911c8570afaSMarek Vasut 			 * Set best seen to be current window.
2912c8570afaSMarek Vasut 			 */
2913c8570afaSMarek Vasut 			if ((*end_curr - *bgn_curr + 1) > *win_best) {
2914c8570afaSMarek Vasut 				*win_best = *end_curr - *bgn_curr + 1;
2915c8570afaSMarek Vasut 				*bgn_best = *bgn_curr;
2916c8570afaSMarek Vasut 				*end_best = *end_curr;
2917c8570afaSMarek Vasut 			}
2918c8570afaSMarek Vasut 		} else {
2919c8570afaSMarek Vasut 			/* We just saw a failing test. Reset temp edge. */
2920160695d8SMarek Vasut 			*bgn_curr = iocfg->io_out1_delay_max + 1;
2921160695d8SMarek Vasut 			*end_curr = iocfg->io_out1_delay_max + 1;
2922c8570afaSMarek Vasut 
2923c8570afaSMarek Vasut 			/* Early exit is only applicable to DQS. */
2924c8570afaSMarek Vasut 			if (search_dm)
2925c8570afaSMarek Vasut 				continue;
2926c8570afaSMarek Vasut 
2927c8570afaSMarek Vasut 			/*
2928c8570afaSMarek Vasut 			 * Early exit optimization: if the remaining delay
2929c8570afaSMarek Vasut 			 * chain space is less than already seen largest
2930c8570afaSMarek Vasut 			 * window we can exit.
2931c8570afaSMarek Vasut 			 */
2932160695d8SMarek Vasut 			if (*win_best - 1 > iocfg->io_out1_delay_max - new_dqs - d)
2933c8570afaSMarek Vasut 				break;
2934c8570afaSMarek Vasut 		}
2935c8570afaSMarek Vasut 	}
2936c8570afaSMarek Vasut }
2937c8570afaSMarek Vasut 
29383da42859SDinh Nguyen /*
2939a386a50eSMarek Vasut  * rw_mgr_mem_calibrate_writes_center() - Center all windows
2940a386a50eSMarek Vasut  * @rank_bgn:		Rank number
2941a386a50eSMarek Vasut  * @write_group:	Write group
2942a386a50eSMarek Vasut  * @test_bgn:		Rank at which the test begins
2943a386a50eSMarek Vasut  *
2944a386a50eSMarek Vasut  * Center all windows. Do per-bit-deskew to possibly increase size of
29453da42859SDinh Nguyen  * certain windows.
29463da42859SDinh Nguyen  */
29473b44f55cSMarek Vasut static int
rw_mgr_mem_calibrate_writes_center(const u32 rank_bgn,const u32 write_group,const u32 test_bgn)29483b44f55cSMarek Vasut rw_mgr_mem_calibrate_writes_center(const u32 rank_bgn, const u32 write_group,
29493b44f55cSMarek Vasut 				   const u32 test_bgn)
29503da42859SDinh Nguyen {
2951c8570afaSMarek Vasut 	int i;
29523b44f55cSMarek Vasut 	u32 sticky_bit_chk;
29533b44f55cSMarek Vasut 	u32 min_index;
29541fa0c8c4SMarek Vasut 	int left_edge[rwcfg->mem_dq_per_write_dqs];
29551fa0c8c4SMarek Vasut 	int right_edge[rwcfg->mem_dq_per_write_dqs];
29563b44f55cSMarek Vasut 	int mid;
29573b44f55cSMarek Vasut 	int mid_min, orig_mid_min;
29583b44f55cSMarek Vasut 	int new_dqs, start_dqs;
29593b44f55cSMarek Vasut 	int dq_margin, dqs_margin, dm_margin;
2960160695d8SMarek Vasut 	int bgn_curr = iocfg->io_out1_delay_max + 1;
2961160695d8SMarek Vasut 	int end_curr = iocfg->io_out1_delay_max + 1;
2962160695d8SMarek Vasut 	int bgn_best = iocfg->io_out1_delay_max + 1;
2963160695d8SMarek Vasut 	int end_best = iocfg->io_out1_delay_max + 1;
29643b44f55cSMarek Vasut 	int win_best = 0;
29653da42859SDinh Nguyen 
2966c4907898SMarek Vasut 	int ret;
2967c4907898SMarek Vasut 
29683da42859SDinh Nguyen 	debug("%s:%d %u %u", __func__, __LINE__, write_group, test_bgn);
29693da42859SDinh Nguyen 
29703da42859SDinh Nguyen 	dm_margin = 0;
29713da42859SDinh Nguyen 
2972c6540872SMarek Vasut 	start_dqs = readl((SDR_PHYGRP_SCCGRP_ADDRESS |
2973c6540872SMarek Vasut 			  SCC_MGR_IO_OUT1_DELAY_OFFSET) +
29741fa0c8c4SMarek Vasut 			  (rwcfg->mem_dq_per_write_dqs << 2));
29753da42859SDinh Nguyen 
29763b44f55cSMarek Vasut 	/* Per-bit deskew. */
29773da42859SDinh Nguyen 
29783da42859SDinh Nguyen 	/*
29793b44f55cSMarek Vasut 	 * Set the left and right edge of each bit to an illegal value.
2980160695d8SMarek Vasut 	 * Use (iocfg->io_out1_delay_max + 1) as an illegal value.
29813da42859SDinh Nguyen 	 */
29823da42859SDinh Nguyen 	sticky_bit_chk = 0;
29831fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
2984160695d8SMarek Vasut 		left_edge[i]  = iocfg->io_out1_delay_max + 1;
2985160695d8SMarek Vasut 		right_edge[i] = iocfg->io_out1_delay_max + 1;
29863da42859SDinh Nguyen 	}
29873da42859SDinh Nguyen 
29883b44f55cSMarek Vasut 	/* Search for the left edge of the window for each bit. */
298971120773SMarek Vasut 	search_left_edge(1, rank_bgn, write_group, 0, test_bgn,
29900c4be198SMarek Vasut 			 &sticky_bit_chk,
299171120773SMarek Vasut 			 left_edge, right_edge, 0);
29923da42859SDinh Nguyen 
29933b44f55cSMarek Vasut 	/* Search for the right edge of the window for each bit. */
2994c4907898SMarek Vasut 	ret = search_right_edge(1, rank_bgn, write_group, 0,
2995c4907898SMarek Vasut 				start_dqs, 0,
29960c4be198SMarek Vasut 				&sticky_bit_chk,
2997c4907898SMarek Vasut 				left_edge, right_edge, 0);
2998c4907898SMarek Vasut 	if (ret) {
2999c4907898SMarek Vasut 		set_failing_group_stage(test_bgn + ret - 1, CAL_STAGE_WRITES,
30003da42859SDinh Nguyen 					CAL_SUBSTAGE_WRITES_CENTER);
3001d043ee5bSMarek Vasut 		return -EINVAL;
30023da42859SDinh Nguyen 	}
30033da42859SDinh Nguyen 
3004afb3eb84SMarek Vasut 	min_index = get_window_mid_index(1, left_edge, right_edge, &mid_min);
30053da42859SDinh Nguyen 
30063b44f55cSMarek Vasut 	/* Determine the amount we can change DQS (which is -mid_min). */
30073da42859SDinh Nguyen 	orig_mid_min = mid_min;
30083da42859SDinh Nguyen 	new_dqs = start_dqs;
30093da42859SDinh Nguyen 	mid_min = 0;
3010ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
30113b44f55cSMarek Vasut 		   "%s:%d write_center: start_dqs=%d new_dqs=%d mid_min=%d\n",
30123b44f55cSMarek Vasut 		   __func__, __LINE__, start_dqs, new_dqs, mid_min);
30133da42859SDinh Nguyen 
3014ffb8b66eSMarek Vasut 	/* Add delay to bring centre of all DQ windows to the same "level". */
3015ffb8b66eSMarek Vasut 	center_dq_windows(1, left_edge, right_edge, mid_min, orig_mid_min,
3016ffb8b66eSMarek Vasut 			  min_index, 0, &dq_margin, &dqs_margin);
30173da42859SDinh Nguyen 
30183da42859SDinh Nguyen 	/* Move DQS */
30193da42859SDinh Nguyen 	scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
30201273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30213da42859SDinh Nguyen 
30223da42859SDinh Nguyen 	/* Centre DM */
3023ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d write_center: DM\n", __func__, __LINE__);
30243da42859SDinh Nguyen 
30253da42859SDinh Nguyen 	/*
30263b44f55cSMarek Vasut 	 * Set the left and right edge of each bit to an illegal value.
3027160695d8SMarek Vasut 	 * Use (iocfg->io_out1_delay_max + 1) as an illegal value.
30283da42859SDinh Nguyen 	 */
3029160695d8SMarek Vasut 	left_edge[0]  = iocfg->io_out1_delay_max + 1;
3030160695d8SMarek Vasut 	right_edge[0] = iocfg->io_out1_delay_max + 1;
30313da42859SDinh Nguyen 
30323b44f55cSMarek Vasut 	/* Search for the/part of the window with DM shift. */
3033c8570afaSMarek Vasut 	search_window(1, rank_bgn, write_group, &bgn_curr, &end_curr,
3034c8570afaSMarek Vasut 		      &bgn_best, &end_best, &win_best, 0);
30353da42859SDinh Nguyen 
30363b44f55cSMarek Vasut 	/* Reset DM delay chains to 0. */
303732675249SMarek Vasut 	scc_mgr_apply_group_dm_out1_delay(0);
30383da42859SDinh Nguyen 
30393da42859SDinh Nguyen 	/*
30403da42859SDinh Nguyen 	 * Check to see if the current window nudges up aganist 0 delay.
30413da42859SDinh Nguyen 	 * If so we need to continue the search by shifting DQS otherwise DQS
30423b44f55cSMarek Vasut 	 * search begins as a new search.
30433b44f55cSMarek Vasut 	 */
30443da42859SDinh Nguyen 	if (end_curr != 0) {
3045160695d8SMarek Vasut 		bgn_curr = iocfg->io_out1_delay_max + 1;
3046160695d8SMarek Vasut 		end_curr = iocfg->io_out1_delay_max + 1;
30473da42859SDinh Nguyen 	}
30483da42859SDinh Nguyen 
30493b44f55cSMarek Vasut 	/* Search for the/part of the window with DQS shifts. */
3050c8570afaSMarek Vasut 	search_window(0, rank_bgn, write_group, &bgn_curr, &end_curr,
3051c8570afaSMarek Vasut 		      &bgn_best, &end_best, &win_best, new_dqs);
30523da42859SDinh Nguyen 
30533b44f55cSMarek Vasut 	/* Assign left and right edge for cal and reporting. */
30543da42859SDinh Nguyen 	left_edge[0] = -1 * bgn_best;
30553da42859SDinh Nguyen 	right_edge[0] = end_best;
30563da42859SDinh Nguyen 
3057ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2, "%s:%d dm_calib: left=%d right=%d\n",
30583b44f55cSMarek Vasut 		   __func__, __LINE__, left_edge[0], right_edge[0]);
30593da42859SDinh Nguyen 
30603b44f55cSMarek Vasut 	/* Move DQS (back to orig). */
30613da42859SDinh Nguyen 	scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
30623da42859SDinh Nguyen 
30633da42859SDinh Nguyen 	/* Move DM */
30643da42859SDinh Nguyen 
30653b44f55cSMarek Vasut 	/* Find middle of window for the DM bit. */
30663da42859SDinh Nguyen 	mid = (left_edge[0] - right_edge[0]) / 2;
30673da42859SDinh Nguyen 
30683b44f55cSMarek Vasut 	/* Only move right, since we are not moving DQS/DQ. */
30693da42859SDinh Nguyen 	if (mid < 0)
30703da42859SDinh Nguyen 		mid = 0;
30713da42859SDinh Nguyen 
30723b44f55cSMarek Vasut 	/* dm_marign should fail if we never find a window. */
30733da42859SDinh Nguyen 	if (win_best == 0)
30743da42859SDinh Nguyen 		dm_margin = -1;
30753da42859SDinh Nguyen 	else
30763da42859SDinh Nguyen 		dm_margin = left_edge[0] - mid;
30773da42859SDinh Nguyen 
307832675249SMarek Vasut 	scc_mgr_apply_group_dm_out1_delay(mid);
30791273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30803da42859SDinh Nguyen 
3081ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2,
30823b44f55cSMarek Vasut 		   "%s:%d dm_calib: left=%d right=%d mid=%d dm_margin=%d\n",
30833b44f55cSMarek Vasut 		   __func__, __LINE__, left_edge[0], right_edge[0],
30843b44f55cSMarek Vasut 		   mid, dm_margin);
30853b44f55cSMarek Vasut 	/* Export values. */
30863da42859SDinh Nguyen 	gbl->fom_out += dq_margin + dqs_margin;
30873da42859SDinh Nguyen 
3088ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 2,
30893b44f55cSMarek Vasut 		   "%s:%d write_center: dq_margin=%d dqs_margin=%d dm_margin=%d\n",
30903b44f55cSMarek Vasut 		   __func__, __LINE__, dq_margin, dqs_margin, dm_margin);
30913da42859SDinh Nguyen 
30923da42859SDinh Nguyen 	/*
30933da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our
30943da42859SDinh Nguyen 	 * decisions have been applied.
30953da42859SDinh Nguyen 	 */
30961273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30973b44f55cSMarek Vasut 
3098d043ee5bSMarek Vasut 	if ((dq_margin < 0) || (dqs_margin < 0) || (dm_margin < 0))
3099d043ee5bSMarek Vasut 		return -EINVAL;
3100d043ee5bSMarek Vasut 
3101d043ee5bSMarek Vasut 	return 0;
31023da42859SDinh Nguyen }
31033da42859SDinh Nguyen 
3104db3a6061SMarek Vasut /**
3105db3a6061SMarek Vasut  * rw_mgr_mem_calibrate_writes() - Write Calibration Part One
3106db3a6061SMarek Vasut  * @rank_bgn:		Rank number
3107db3a6061SMarek Vasut  * @group:		Read/Write Group
3108db3a6061SMarek Vasut  * @test_bgn:		Rank at which the test begins
3109db3a6061SMarek Vasut  *
3110db3a6061SMarek Vasut  * Stage 2: Write Calibration Part One.
3111db3a6061SMarek Vasut  *
3112db3a6061SMarek Vasut  * This function implements UniPHY calibration Stage 2, as explained in
3113db3a6061SMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
3114db3a6061SMarek Vasut  */
rw_mgr_mem_calibrate_writes(const u32 rank_bgn,const u32 group,const u32 test_bgn)3115db3a6061SMarek Vasut static int rw_mgr_mem_calibrate_writes(const u32 rank_bgn, const u32 group,
3116db3a6061SMarek Vasut 				       const u32 test_bgn)
31173da42859SDinh Nguyen {
3118db3a6061SMarek Vasut 	int ret;
31193da42859SDinh Nguyen 
3120db3a6061SMarek Vasut 	/* Update info for sims */
3121db3a6061SMarek Vasut 	debug("%s:%d %u %u\n", __func__, __LINE__, group, test_bgn);
3122db3a6061SMarek Vasut 
3123db3a6061SMarek Vasut 	reg_file_set_group(group);
31243da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_WRITES);
31253da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_WRITES_CENTER);
31263da42859SDinh Nguyen 
3127db3a6061SMarek Vasut 	ret = rw_mgr_mem_calibrate_writes_center(rank_bgn, group, test_bgn);
3128d043ee5bSMarek Vasut 	if (ret)
3129db3a6061SMarek Vasut 		set_failing_group_stage(group, CAL_STAGE_WRITES,
31303da42859SDinh Nguyen 					CAL_SUBSTAGE_WRITES_CENTER);
31313da42859SDinh Nguyen 
3132d043ee5bSMarek Vasut 	return ret;
31333da42859SDinh Nguyen }
31343da42859SDinh Nguyen 
31354b0ac26aSMarek Vasut /**
31364b0ac26aSMarek Vasut  * mem_precharge_and_activate() - Precharge all banks and activate
31374b0ac26aSMarek Vasut  *
31384b0ac26aSMarek Vasut  * Precharge all banks and activate row 0 in bank "000..." and bank "111...".
31394b0ac26aSMarek Vasut  */
mem_precharge_and_activate(void)31403da42859SDinh Nguyen static void mem_precharge_and_activate(void)
31413da42859SDinh Nguyen {
31424b0ac26aSMarek Vasut 	int r;
31433da42859SDinh Nguyen 
31441fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks; r++) {
31454b0ac26aSMarek Vasut 		/* Set rank. */
31463da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
31473da42859SDinh Nguyen 
31484b0ac26aSMarek Vasut 		/* Precharge all banks. */
31491fa0c8c4SMarek Vasut 		writel(rwcfg->precharge_all, SDR_PHYGRP_RWMGRGRP_ADDRESS |
31501273dd9eSMarek Vasut 					     RW_MGR_RUN_SINGLE_GROUP_OFFSET);
31513da42859SDinh Nguyen 
31521273dd9eSMarek Vasut 		writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr0);
31531fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1_wait1,
31541273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add0);
31553da42859SDinh Nguyen 
31561273dd9eSMarek Vasut 		writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr1);
31571fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1_wait2,
31581273dd9eSMarek Vasut 		       &sdr_rw_load_jump_mgr_regs->load_jump_add1);
31593da42859SDinh Nguyen 
31604b0ac26aSMarek Vasut 		/* Activate rows. */
31611fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
31621273dd9eSMarek Vasut 						RW_MGR_RUN_SINGLE_GROUP_OFFSET);
31633da42859SDinh Nguyen 	}
31643da42859SDinh Nguyen }
31653da42859SDinh Nguyen 
316616502a0bSMarek Vasut /**
316716502a0bSMarek Vasut  * mem_init_latency() - Configure memory RLAT and WLAT settings
316816502a0bSMarek Vasut  *
316916502a0bSMarek Vasut  * Configure memory RLAT and WLAT parameters.
317016502a0bSMarek Vasut  */
mem_init_latency(void)317116502a0bSMarek Vasut static void mem_init_latency(void)
31723da42859SDinh Nguyen {
317316502a0bSMarek Vasut 	/*
317416502a0bSMarek Vasut 	 * For AV/CV, LFIFO is hardened and always runs at full rate
317516502a0bSMarek Vasut 	 * so max latency in AFI clocks, used here, is correspondingly
317616502a0bSMarek Vasut 	 * smaller.
317716502a0bSMarek Vasut 	 */
317896fd4362SMarek Vasut 	const u32 max_latency = (1 << misccfg->max_latency_count_width) - 1;
317916502a0bSMarek Vasut 	u32 rlat, wlat;
31803da42859SDinh Nguyen 
31813da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
318216502a0bSMarek Vasut 
318316502a0bSMarek Vasut 	/*
318416502a0bSMarek Vasut 	 * Read in write latency.
318516502a0bSMarek Vasut 	 * WL for Hard PHY does not include additive latency.
318616502a0bSMarek Vasut 	 */
31871273dd9eSMarek Vasut 	wlat = readl(&data_mgr->t_wl_add);
31881273dd9eSMarek Vasut 	wlat += readl(&data_mgr->mem_t_add);
31893da42859SDinh Nguyen 
319016502a0bSMarek Vasut 	gbl->rw_wl_nop_cycles = wlat - 1;
31913da42859SDinh Nguyen 
319216502a0bSMarek Vasut 	/* Read in readl latency. */
31931273dd9eSMarek Vasut 	rlat = readl(&data_mgr->t_rl_add);
31943da42859SDinh Nguyen 
319516502a0bSMarek Vasut 	/* Set a pretty high read latency initially. */
31963da42859SDinh Nguyen 	gbl->curr_read_lat = rlat + 16;
31973da42859SDinh Nguyen 	if (gbl->curr_read_lat > max_latency)
31983da42859SDinh Nguyen 		gbl->curr_read_lat = max_latency;
31993da42859SDinh Nguyen 
32001273dd9eSMarek Vasut 	writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
32013da42859SDinh Nguyen 
320216502a0bSMarek Vasut 	/* Advertise write latency. */
320316502a0bSMarek Vasut 	writel(wlat, &phy_mgr_cfg->afi_wlat);
32043da42859SDinh Nguyen }
32053da42859SDinh Nguyen 
320651cea0b6SMarek Vasut /**
320751cea0b6SMarek Vasut  * @mem_skip_calibrate() - Set VFIFO and LFIFO to instant-on settings
320851cea0b6SMarek Vasut  *
320951cea0b6SMarek Vasut  * Set VFIFO and LFIFO to instant-on settings in skip calibration mode.
321051cea0b6SMarek Vasut  */
mem_skip_calibrate(void)32113da42859SDinh Nguyen static void mem_skip_calibrate(void)
32123da42859SDinh Nguyen {
32135ded7320SMarek Vasut 	u32 vfifo_offset;
32145ded7320SMarek Vasut 	u32 i, j, r;
32153da42859SDinh Nguyen 
32163da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
32173da42859SDinh Nguyen 	/* Need to update every shadow register set used by the interface */
32181fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
32193da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
32203da42859SDinh Nguyen 		/*
32213da42859SDinh Nguyen 		 * Set output phase alignment settings appropriate for
32223da42859SDinh Nguyen 		 * skip calibration.
32233da42859SDinh Nguyen 		 */
32241fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
32253da42859SDinh Nguyen 			scc_mgr_set_dqs_en_phase(i, 0);
3226160695d8SMarek Vasut 			if (iocfg->dll_chain_length == 6)
32273da42859SDinh Nguyen 				scc_mgr_set_dqdqs_output_phase(i, 6);
3228160695d8SMarek Vasut 			else
32293da42859SDinh Nguyen 				scc_mgr_set_dqdqs_output_phase(i, 7);
32303da42859SDinh Nguyen 			/*
32313da42859SDinh Nguyen 			 * Case:33398
32323da42859SDinh Nguyen 			 *
32333da42859SDinh Nguyen 			 * Write data arrives to the I/O two cycles before write
32343da42859SDinh Nguyen 			 * latency is reached (720 deg).
32353da42859SDinh Nguyen 			 *   -> due to bit-slip in a/c bus
32363da42859SDinh Nguyen 			 *   -> to allow board skew where dqs is longer than ck
32373da42859SDinh Nguyen 			 *      -> how often can this happen!?
32383da42859SDinh Nguyen 			 *      -> can claim back some ptaps for high freq
32393da42859SDinh Nguyen 			 *       support if we can relax this, but i digress...
32403da42859SDinh Nguyen 			 *
32413da42859SDinh Nguyen 			 * The write_clk leads mem_ck by 90 deg
32423da42859SDinh Nguyen 			 * The minimum ptap of the OPA is 180 deg
32433da42859SDinh Nguyen 			 * Each ptap has (360 / IO_DLL_CHAIN_LENGH) deg of delay
32443da42859SDinh Nguyen 			 * The write_clk is always delayed by 2 ptaps
32453da42859SDinh Nguyen 			 *
32463da42859SDinh Nguyen 			 * Hence, to make DQS aligned to CK, we need to delay
32473da42859SDinh Nguyen 			 * DQS by:
3248139823ecSMarek Vasut 			 *    (720 - 90 - 180 - 2) *
3249139823ecSMarek Vasut 			 *      (360 / iocfg->dll_chain_length)
32503da42859SDinh Nguyen 			 *
3251160695d8SMarek Vasut 			 * Dividing the above by (360 / iocfg->dll_chain_length)
32523da42859SDinh Nguyen 			 * gives us the number of ptaps, which simplies to:
32533da42859SDinh Nguyen 			 *
3254160695d8SMarek Vasut 			 *    (1.25 * iocfg->dll_chain_length - 2)
32553da42859SDinh Nguyen 			 */
325651cea0b6SMarek Vasut 			scc_mgr_set_dqdqs_output_phase(i,
32576d7a3330SMarek Vasut 				       ((125 * iocfg->dll_chain_length) / 100) - 2);
32583da42859SDinh Nguyen 		}
32591273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dqs_ena);
32601273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dqs_io_ena);
32613da42859SDinh Nguyen 
32621fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_write_dqs_width; i++) {
32631273dd9eSMarek Vasut 			writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
32641273dd9eSMarek Vasut 				  SCC_MGR_GROUP_COUNTER_OFFSET);
32653da42859SDinh Nguyen 		}
32661273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dq_ena);
32671273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dm_ena);
32681273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
32693da42859SDinh Nguyen 	}
32703da42859SDinh Nguyen 
32713da42859SDinh Nguyen 	/* Compensate for simulation model behaviour */
32721fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
32733da42859SDinh Nguyen 		scc_mgr_set_dqs_bus_in_delay(i, 10);
32743da42859SDinh Nguyen 		scc_mgr_load_dqs(i);
32753da42859SDinh Nguyen 	}
32761273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
32773da42859SDinh Nguyen 
32783da42859SDinh Nguyen 	/*
32793da42859SDinh Nguyen 	 * ArriaV has hard FIFOs that can only be initialized by incrementing
32803da42859SDinh Nguyen 	 * in sequencer.
32813da42859SDinh Nguyen 	 */
328296fd4362SMarek Vasut 	vfifo_offset = misccfg->calib_vfifo_offset;
328351cea0b6SMarek Vasut 	for (j = 0; j < vfifo_offset; j++)
32841273dd9eSMarek Vasut 		writel(0xff, &phy_mgr_cmd->inc_vfifo_hard_phy);
32851273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
32863da42859SDinh Nguyen 
32873da42859SDinh Nguyen 	/*
328851cea0b6SMarek Vasut 	 * For Arria V and Cyclone V with hard LFIFO, we get the skip-cal
328951cea0b6SMarek Vasut 	 * setting from generation-time constant.
32903da42859SDinh Nguyen 	 */
329196fd4362SMarek Vasut 	gbl->curr_read_lat = misccfg->calib_lfifo_offset;
32921273dd9eSMarek Vasut 	writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
32933da42859SDinh Nguyen }
32943da42859SDinh Nguyen 
32953589fbfbSMarek Vasut /**
32963589fbfbSMarek Vasut  * mem_calibrate() - Memory calibration entry point.
32973589fbfbSMarek Vasut  *
32983589fbfbSMarek Vasut  * Perform memory calibration.
32993589fbfbSMarek Vasut  */
mem_calibrate(void)33005ded7320SMarek Vasut static u32 mem_calibrate(void)
33013da42859SDinh Nguyen {
33025ded7320SMarek Vasut 	u32 i;
33035ded7320SMarek Vasut 	u32 rank_bgn, sr;
33045ded7320SMarek Vasut 	u32 write_group, write_test_bgn;
33055ded7320SMarek Vasut 	u32 read_group, read_test_bgn;
33065ded7320SMarek Vasut 	u32 run_groups, current_run;
33075ded7320SMarek Vasut 	u32 failing_groups = 0;
33085ded7320SMarek Vasut 	u32 group_failed = 0;
33093da42859SDinh Nguyen 
33101fa0c8c4SMarek Vasut 	const u32 rwdqs_ratio = rwcfg->mem_if_read_dqs_width /
33111fa0c8c4SMarek Vasut 				rwcfg->mem_if_write_dqs_width;
331233c42bb8SMarek Vasut 
33133da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
33143da42859SDinh Nguyen 
331516502a0bSMarek Vasut 	/* Initialize the data settings */
33163da42859SDinh Nguyen 	gbl->error_substage = CAL_SUBSTAGE_NIL;
33173da42859SDinh Nguyen 	gbl->error_stage = CAL_STAGE_NIL;
33183da42859SDinh Nguyen 	gbl->error_group = 0xff;
33193da42859SDinh Nguyen 	gbl->fom_in = 0;
33203da42859SDinh Nguyen 	gbl->fom_out = 0;
33213da42859SDinh Nguyen 
332216502a0bSMarek Vasut 	/* Initialize WLAT and RLAT. */
332316502a0bSMarek Vasut 	mem_init_latency();
332416502a0bSMarek Vasut 
332516502a0bSMarek Vasut 	/* Initialize bit slips. */
332616502a0bSMarek Vasut 	mem_precharge_and_activate();
33273da42859SDinh Nguyen 
33281fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
33291273dd9eSMarek Vasut 		writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
33301273dd9eSMarek Vasut 			  SCC_MGR_GROUP_COUNTER_OFFSET);
3331fa5d821bSMarek Vasut 		/* Only needed once to set all groups, pins, DQ, DQS, DM. */
3332fa5d821bSMarek Vasut 		if (i == 0)
3333fa5d821bSMarek Vasut 			scc_mgr_set_hhp_extras();
3334fa5d821bSMarek Vasut 
3335c5c5f537SMarek Vasut 		scc_set_bypass_mode(i);
33363da42859SDinh Nguyen 	}
33373da42859SDinh Nguyen 
3338722c9685SMarek Vasut 	/* Calibration is skipped. */
33393da42859SDinh Nguyen 	if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) {
33403da42859SDinh Nguyen 		/*
33413da42859SDinh Nguyen 		 * Set VFIFO and LFIFO to instant-on settings in skip
33423da42859SDinh Nguyen 		 * calibration mode.
33433da42859SDinh Nguyen 		 */
33443da42859SDinh Nguyen 		mem_skip_calibrate();
3345722c9685SMarek Vasut 
3346722c9685SMarek Vasut 		/*
3347722c9685SMarek Vasut 		 * Do not remove this line as it makes sure all of our
3348722c9685SMarek Vasut 		 * decisions have been applied.
3349722c9685SMarek Vasut 		 */
3350722c9685SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
3351722c9685SMarek Vasut 		return 1;
3352722c9685SMarek Vasut 	}
3353722c9685SMarek Vasut 
3354722c9685SMarek Vasut 	/* Calibration is not skipped. */
33553da42859SDinh Nguyen 	for (i = 0; i < NUM_CALIB_REPEAT; i++) {
33563da42859SDinh Nguyen 		/*
33573da42859SDinh Nguyen 		 * Zero all delay chain/phase settings for all
33583da42859SDinh Nguyen 		 * groups and all shadow register sets.
33593da42859SDinh Nguyen 		 */
33603da42859SDinh Nguyen 		scc_mgr_zero_all();
33613da42859SDinh Nguyen 
3362f085ac3bSMarek Vasut 		run_groups = ~0;
33633da42859SDinh Nguyen 
33643da42859SDinh Nguyen 		for (write_group = 0, write_test_bgn = 0; write_group
33651fa0c8c4SMarek Vasut 			< rwcfg->mem_if_write_dqs_width; write_group++,
33661fa0c8c4SMarek Vasut 			write_test_bgn += rwcfg->mem_dq_per_write_dqs) {
3367c452dcd0SMarek Vasut 			/* Initialize the group failure */
33683da42859SDinh Nguyen 			group_failed = 0;
33693da42859SDinh Nguyen 
33703da42859SDinh Nguyen 			current_run = run_groups & ((1 <<
33713da42859SDinh Nguyen 				RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1);
33723da42859SDinh Nguyen 			run_groups = run_groups >>
33733da42859SDinh Nguyen 				RW_MGR_NUM_DQS_PER_WRITE_GROUP;
33743da42859SDinh Nguyen 
33753da42859SDinh Nguyen 			if (current_run == 0)
33763da42859SDinh Nguyen 				continue;
33773da42859SDinh Nguyen 
33781273dd9eSMarek Vasut 			writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS |
33791273dd9eSMarek Vasut 					    SCC_MGR_GROUP_COUNTER_OFFSET);
3380d41ea93aSMarek Vasut 			scc_mgr_zero_group(write_group, 0);
33813da42859SDinh Nguyen 
338233c42bb8SMarek Vasut 			for (read_group = write_group * rwdqs_ratio,
33833da42859SDinh Nguyen 			     read_test_bgn = 0;
3384c452dcd0SMarek Vasut 			     read_group < (write_group + 1) * rwdqs_ratio;
338533c42bb8SMarek Vasut 			     read_group++,
33861fa0c8c4SMarek Vasut 			     read_test_bgn += rwcfg->mem_dq_per_read_dqs) {
338733c42bb8SMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_VFIFO)
338833c42bb8SMarek Vasut 					continue;
33893da42859SDinh Nguyen 
339033c42bb8SMarek Vasut 				/* Calibrate the VFIFO */
339133c42bb8SMarek Vasut 				if (rw_mgr_mem_calibrate_vfifo(read_group,
339233c42bb8SMarek Vasut 							       read_test_bgn))
339333c42bb8SMarek Vasut 					continue;
339433c42bb8SMarek Vasut 
3395139823ecSMarek Vasut 				if (!(gbl->phy_debug_mode_flags &
3396139823ecSMarek Vasut 				      PHY_DEBUG_SWEEP_ALL_GROUPS))
33973da42859SDinh Nguyen 					return 0;
3398c452dcd0SMarek Vasut 
3399c452dcd0SMarek Vasut 				/* The group failed, we're done. */
3400c452dcd0SMarek Vasut 				goto grp_failed;
34013da42859SDinh Nguyen 			}
34023da42859SDinh Nguyen 
34033da42859SDinh Nguyen 			/* Calibrate the output side */
34044ac21610SMarek Vasut 			for (rank_bgn = 0, sr = 0;
34051fa0c8c4SMarek Vasut 			     rank_bgn < rwcfg->mem_number_of_ranks;
34064ac21610SMarek Vasut 			     rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
34074ac21610SMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
34084ac21610SMarek Vasut 					continue;
34094ac21610SMarek Vasut 
34104ac21610SMarek Vasut 				/* Not needed in quick mode! */
3411139823ecSMarek Vasut 				if (STATIC_CALIB_STEPS &
3412139823ecSMarek Vasut 				    CALIB_SKIP_DELAY_SWEEPS)
34134ac21610SMarek Vasut 					continue;
34144ac21610SMarek Vasut 
34154ac21610SMarek Vasut 				/* Calibrate WRITEs */
3416db3a6061SMarek Vasut 				if (!rw_mgr_mem_calibrate_writes(rank_bgn,
3417139823ecSMarek Vasut 								 write_group,
3418139823ecSMarek Vasut 								 write_test_bgn))
34194ac21610SMarek Vasut 					continue;
34204ac21610SMarek Vasut 
34213da42859SDinh Nguyen 				group_failed = 1;
3422139823ecSMarek Vasut 				if (!(gbl->phy_debug_mode_flags &
3423139823ecSMarek Vasut 				      PHY_DEBUG_SWEEP_ALL_GROUPS))
34244ac21610SMarek Vasut 					return 0;
34253da42859SDinh Nguyen 			}
34263da42859SDinh Nguyen 
3427c452dcd0SMarek Vasut 			/* Some group failed, we're done. */
3428c452dcd0SMarek Vasut 			if (group_failed)
3429c452dcd0SMarek Vasut 				goto grp_failed;
3430c452dcd0SMarek Vasut 
34318213609eSMarek Vasut 			for (read_group = write_group * rwdqs_ratio,
34323da42859SDinh Nguyen 			     read_test_bgn = 0;
3433c452dcd0SMarek Vasut 			     read_group < (write_group + 1) * rwdqs_ratio;
34348213609eSMarek Vasut 			     read_group++,
34351fa0c8c4SMarek Vasut 			     read_test_bgn += rwcfg->mem_dq_per_read_dqs) {
34368213609eSMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
34378213609eSMarek Vasut 					continue;
34383da42859SDinh Nguyen 
343978cdd7d0SMarek Vasut 				if (!rw_mgr_mem_calibrate_vfifo_end(read_group,
34408213609eSMarek Vasut 								    read_test_bgn))
34418213609eSMarek Vasut 					continue;
34428213609eSMarek Vasut 
3443139823ecSMarek Vasut 				if (!(gbl->phy_debug_mode_flags &
3444139823ecSMarek Vasut 				      PHY_DEBUG_SWEEP_ALL_GROUPS))
34453da42859SDinh Nguyen 					return 0;
3446c452dcd0SMarek Vasut 
3447c452dcd0SMarek Vasut 				/* The group failed, we're done. */
3448c452dcd0SMarek Vasut 				goto grp_failed;
34493da42859SDinh Nguyen 			}
34503da42859SDinh Nguyen 
3451c452dcd0SMarek Vasut 			/* No group failed, continue as usual. */
3452c452dcd0SMarek Vasut 			continue;
3453c452dcd0SMarek Vasut 
3454c452dcd0SMarek Vasut grp_failed:		/* A group failed, increment the counter. */
34553da42859SDinh Nguyen 			failing_groups++;
34563da42859SDinh Nguyen 		}
34573da42859SDinh Nguyen 
34583da42859SDinh Nguyen 		/*
34593da42859SDinh Nguyen 		 * USER If there are any failing groups then report
34603da42859SDinh Nguyen 		 * the failure.
34613da42859SDinh Nguyen 		 */
34623da42859SDinh Nguyen 		if (failing_groups != 0)
34633da42859SDinh Nguyen 			return 0;
34643da42859SDinh Nguyen 
3465c50ae303SMarek Vasut 		if (STATIC_CALIB_STEPS & CALIB_SKIP_LFIFO)
3466c50ae303SMarek Vasut 			continue;
3467c50ae303SMarek Vasut 
3468c50ae303SMarek Vasut 		/* Calibrate the LFIFO */
34693da42859SDinh Nguyen 		if (!rw_mgr_mem_calibrate_lfifo())
34703da42859SDinh Nguyen 			return 0;
34713da42859SDinh Nguyen 	}
34723da42859SDinh Nguyen 
34733da42859SDinh Nguyen 	/*
34743da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our decisions
34753da42859SDinh Nguyen 	 * have been applied.
34763da42859SDinh Nguyen 	 */
34771273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
34783da42859SDinh Nguyen 	return 1;
34793da42859SDinh Nguyen }
34803da42859SDinh Nguyen 
348123a040c0SMarek Vasut /**
348223a040c0SMarek Vasut  * run_mem_calibrate() - Perform memory calibration
348323a040c0SMarek Vasut  *
348423a040c0SMarek Vasut  * This function triggers the entire memory calibration procedure.
348523a040c0SMarek Vasut  */
run_mem_calibrate(void)348623a040c0SMarek Vasut static int run_mem_calibrate(void)
34873da42859SDinh Nguyen {
348823a040c0SMarek Vasut 	int pass;
3489bba77110SMarek Vasut 	u32 ctrl_cfg;
34903da42859SDinh Nguyen 
34913da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
34923da42859SDinh Nguyen 
34933da42859SDinh Nguyen 	/* Reset pass/fail status shown on afi_cal_success/fail */
34941273dd9eSMarek Vasut 	writel(PHY_MGR_CAL_RESET, &phy_mgr_cfg->cal_status);
34953da42859SDinh Nguyen 
349623a040c0SMarek Vasut 	/* Stop tracking manager. */
3497bba77110SMarek Vasut 	ctrl_cfg = readl(&sdr_ctrl->ctrl_cfg);
3498bba77110SMarek Vasut 	writel(ctrl_cfg & ~SDR_CTRLGRP_CTRLCFG_DQSTRKEN_MASK,
3499bba77110SMarek Vasut 	       &sdr_ctrl->ctrl_cfg);
35003da42859SDinh Nguyen 
35019fa9c90eSMarek Vasut 	phy_mgr_initialize();
35023da42859SDinh Nguyen 	rw_mgr_mem_initialize();
35033da42859SDinh Nguyen 
350423a040c0SMarek Vasut 	/* Perform the actual memory calibration. */
35053da42859SDinh Nguyen 	pass = mem_calibrate();
35063da42859SDinh Nguyen 
35073da42859SDinh Nguyen 	mem_precharge_and_activate();
35081273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
35093da42859SDinh Nguyen 
351023a040c0SMarek Vasut 	/* Handoff. */
35113da42859SDinh Nguyen 	rw_mgr_mem_handoff();
35123da42859SDinh Nguyen 	/*
35133da42859SDinh Nguyen 	 * In Hard PHY this is a 2-bit control:
35143da42859SDinh Nguyen 	 * 0: AFI Mux Select
35153da42859SDinh Nguyen 	 * 1: DDIO Mux Select
35163da42859SDinh Nguyen 	 */
35171273dd9eSMarek Vasut 	writel(0x2, &phy_mgr_cfg->mux_sel);
351823a040c0SMarek Vasut 
351923a040c0SMarek Vasut 	/* Start tracking manager. */
3520bba77110SMarek Vasut 	writel(ctrl_cfg, &sdr_ctrl->ctrl_cfg);
352123a040c0SMarek Vasut 
352223a040c0SMarek Vasut 	return pass;
35233da42859SDinh Nguyen }
35243da42859SDinh Nguyen 
352523a040c0SMarek Vasut /**
352623a040c0SMarek Vasut  * debug_mem_calibrate() - Report result of memory calibration
352723a040c0SMarek Vasut  * @pass:	Value indicating whether calibration passed or failed
352823a040c0SMarek Vasut  *
352923a040c0SMarek Vasut  * This function reports the results of the memory calibration
353023a040c0SMarek Vasut  * and writes debug information into the register file.
353123a040c0SMarek Vasut  */
debug_mem_calibrate(int pass)353223a040c0SMarek Vasut static void debug_mem_calibrate(int pass)
353323a040c0SMarek Vasut {
35345ded7320SMarek Vasut 	u32 debug_info;
35353da42859SDinh Nguyen 
35363da42859SDinh Nguyen 	if (pass) {
35373da42859SDinh Nguyen 		printf("%s: CALIBRATION PASSED\n", __FILE__);
35383da42859SDinh Nguyen 
35393da42859SDinh Nguyen 		gbl->fom_in /= 2;
35403da42859SDinh Nguyen 		gbl->fom_out /= 2;
35413da42859SDinh Nguyen 
35423da42859SDinh Nguyen 		if (gbl->fom_in > 0xff)
35433da42859SDinh Nguyen 			gbl->fom_in = 0xff;
35443da42859SDinh Nguyen 
35453da42859SDinh Nguyen 		if (gbl->fom_out > 0xff)
35463da42859SDinh Nguyen 			gbl->fom_out = 0xff;
35473da42859SDinh Nguyen 
35483da42859SDinh Nguyen 		/* Update the FOM in the register file */
35493da42859SDinh Nguyen 		debug_info = gbl->fom_in;
35503da42859SDinh Nguyen 		debug_info |= gbl->fom_out << 8;
35511273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->fom);
35523da42859SDinh Nguyen 
35531273dd9eSMarek Vasut 		writel(debug_info, &phy_mgr_cfg->cal_debug_info);
35541273dd9eSMarek Vasut 		writel(PHY_MGR_CAL_SUCCESS, &phy_mgr_cfg->cal_status);
35553da42859SDinh Nguyen 	} else {
35563da42859SDinh Nguyen 		printf("%s: CALIBRATION FAILED\n", __FILE__);
35573da42859SDinh Nguyen 
35583da42859SDinh Nguyen 		debug_info = gbl->error_stage;
35593da42859SDinh Nguyen 		debug_info |= gbl->error_substage << 8;
35603da42859SDinh Nguyen 		debug_info |= gbl->error_group << 16;
35613da42859SDinh Nguyen 
35621273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->failing_stage);
35631273dd9eSMarek Vasut 		writel(debug_info, &phy_mgr_cfg->cal_debug_info);
35641273dd9eSMarek Vasut 		writel(PHY_MGR_CAL_FAIL, &phy_mgr_cfg->cal_status);
35653da42859SDinh Nguyen 
35663da42859SDinh Nguyen 		/* Update the failing group/stage in the register file */
35673da42859SDinh Nguyen 		debug_info = gbl->error_stage;
35683da42859SDinh Nguyen 		debug_info |= gbl->error_substage << 8;
35693da42859SDinh Nguyen 		debug_info |= gbl->error_group << 16;
35701273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->failing_stage);
35713da42859SDinh Nguyen 	}
35723da42859SDinh Nguyen 
357323a040c0SMarek Vasut 	printf("%s: Calibration complete\n", __FILE__);
35743da42859SDinh Nguyen }
35753da42859SDinh Nguyen 
3576bb06434bSMarek Vasut /**
3577bb06434bSMarek Vasut  * hc_initialize_rom_data() - Initialize ROM data
3578bb06434bSMarek Vasut  *
3579bb06434bSMarek Vasut  * Initialize ROM data.
3580bb06434bSMarek Vasut  */
hc_initialize_rom_data(void)35813da42859SDinh Nguyen static void hc_initialize_rom_data(void)
35823da42859SDinh Nguyen {
358304955cf2SMarek Vasut 	unsigned int nelem = 0;
358404955cf2SMarek Vasut 	const u32 *rom_init;
3585bb06434bSMarek Vasut 	u32 i, addr;
35863da42859SDinh Nguyen 
358704955cf2SMarek Vasut 	socfpga_get_seq_inst_init(&rom_init, &nelem);
3588c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_INST_ROM_WRITE_OFFSET;
358904955cf2SMarek Vasut 	for (i = 0; i < nelem; i++)
359004955cf2SMarek Vasut 		writel(rom_init[i], addr + (i << 2));
35913da42859SDinh Nguyen 
359204955cf2SMarek Vasut 	socfpga_get_seq_ac_init(&rom_init, &nelem);
3593c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_AC_ROM_WRITE_OFFSET;
359404955cf2SMarek Vasut 	for (i = 0; i < nelem; i++)
359504955cf2SMarek Vasut 		writel(rom_init[i], addr + (i << 2));
35963da42859SDinh Nguyen }
35973da42859SDinh Nguyen 
35989c1ab2caSMarek Vasut /**
35999c1ab2caSMarek Vasut  * initialize_reg_file() - Initialize SDR register file
36009c1ab2caSMarek Vasut  *
36019c1ab2caSMarek Vasut  * Initialize SDR register file.
36029c1ab2caSMarek Vasut  */
initialize_reg_file(void)36033da42859SDinh Nguyen static void initialize_reg_file(void)
36043da42859SDinh Nguyen {
36053da42859SDinh Nguyen 	/* Initialize the register file with the correct data */
360696fd4362SMarek Vasut 	writel(misccfg->reg_file_init_seq_signature, &sdr_reg_file->signature);
36071273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug_data_addr);
36081273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->cur_stage);
36091273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->fom);
36101273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->failing_stage);
36111273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug1);
36121273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug2);
36133da42859SDinh Nguyen }
36143da42859SDinh Nguyen 
36152ca151f8SMarek Vasut /**
36162ca151f8SMarek Vasut  * initialize_hps_phy() - Initialize HPS PHY
36172ca151f8SMarek Vasut  *
36182ca151f8SMarek Vasut  * Initialize HPS PHY.
36192ca151f8SMarek Vasut  */
initialize_hps_phy(void)36203da42859SDinh Nguyen static void initialize_hps_phy(void)
36213da42859SDinh Nguyen {
36225ded7320SMarek Vasut 	u32 reg;
36233da42859SDinh Nguyen 	/*
36243da42859SDinh Nguyen 	 * Tracking also gets configured here because it's in the
36253da42859SDinh Nguyen 	 * same register.
36263da42859SDinh Nguyen 	 */
36275ded7320SMarek Vasut 	u32 trk_sample_count = 7500;
36285ded7320SMarek Vasut 	u32 trk_long_idle_sample_count = (10 << 16) | 100;
36293da42859SDinh Nguyen 	/*
36303da42859SDinh Nguyen 	 * Format is number of outer loops in the 16 MSB, sample
36313da42859SDinh Nguyen 	 * count in 16 LSB.
36323da42859SDinh Nguyen 	 */
36333da42859SDinh Nguyen 
36343da42859SDinh Nguyen 	reg = 0;
36353da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ACDELAYEN_SET(2);
36363da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQDELAYEN_SET(1);
36373da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSDELAYEN_SET(1);
36383da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSLOGICDELAYEN_SET(1);
36393da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_RESETDELAYEN_SET(0);
36403da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_LPDDRDIS_SET(1);
36413da42859SDinh Nguyen 	/*
36423da42859SDinh Nguyen 	 * This field selects the intrinsic latency to RDATA_EN/FULL path.
36433da42859SDinh Nguyen 	 * 00-bypass, 01- add 5 cycles, 10- add 10 cycles, 11- add 15 cycles.
36443da42859SDinh Nguyen 	 */
36453da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ADDLATSEL_SET(0);
36463da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_SET(
36473da42859SDinh Nguyen 		trk_sample_count);
36486cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl0);
36493da42859SDinh Nguyen 
36503da42859SDinh Nguyen 	reg = 0;
36513da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_SAMPLECOUNT_31_20_SET(
36523da42859SDinh Nguyen 		trk_sample_count >>
36533da42859SDinh Nguyen 		SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_WIDTH);
36543da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_SET(
36553da42859SDinh Nguyen 		trk_long_idle_sample_count);
36566cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl1);
36573da42859SDinh Nguyen 
36583da42859SDinh Nguyen 	reg = 0;
36593da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_2_LONGIDLESAMPLECOUNT_31_20_SET(
36603da42859SDinh Nguyen 		trk_long_idle_sample_count >>
36613da42859SDinh Nguyen 		SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_WIDTH);
36626cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl2);
36633da42859SDinh Nguyen }
36643da42859SDinh Nguyen 
3665880e46f2SMarek Vasut /**
3666880e46f2SMarek Vasut  * initialize_tracking() - Initialize tracking
3667880e46f2SMarek Vasut  *
3668880e46f2SMarek Vasut  * Initialize the register file with usable initial data.
3669880e46f2SMarek Vasut  */
initialize_tracking(void)36703da42859SDinh Nguyen static void initialize_tracking(void)
36713da42859SDinh Nguyen {
3672880e46f2SMarek Vasut 	/*
3673880e46f2SMarek Vasut 	 * Initialize the register file with the correct data.
3674880e46f2SMarek Vasut 	 * Compute usable version of value in case we skip full
3675880e46f2SMarek Vasut 	 * computation later.
3676880e46f2SMarek Vasut 	 */
3677139823ecSMarek Vasut 	writel(DIV_ROUND_UP(iocfg->delay_per_opa_tap,
3678139823ecSMarek Vasut 			    iocfg->delay_per_dchain_tap) - 1,
3679880e46f2SMarek Vasut 	       &sdr_reg_file->dtaps_per_ptap);
3680880e46f2SMarek Vasut 
3681880e46f2SMarek Vasut 	/* trk_sample_count */
3682880e46f2SMarek Vasut 	writel(7500, &sdr_reg_file->trk_sample_count);
3683880e46f2SMarek Vasut 
3684880e46f2SMarek Vasut 	/* longidle outer loop [15:0] */
3685880e46f2SMarek Vasut 	writel((10 << 16) | (100 << 0), &sdr_reg_file->trk_longidle);
36863da42859SDinh Nguyen 
36873da42859SDinh Nguyen 	/*
3688880e46f2SMarek Vasut 	 * longidle sample count [31:24]
3689880e46f2SMarek Vasut 	 * trfc, worst case of 933Mhz 4Gb [23:16]
3690880e46f2SMarek Vasut 	 * trcd, worst case [15:8]
3691880e46f2SMarek Vasut 	 * vfifo wait [7:0]
36923da42859SDinh Nguyen 	 */
3693880e46f2SMarek Vasut 	writel((243 << 24) | (14 << 16) | (10 << 8) | (4 << 0),
3694880e46f2SMarek Vasut 	       &sdr_reg_file->delays);
36953da42859SDinh Nguyen 
36963da42859SDinh Nguyen 	/* mux delay */
36971fa0c8c4SMarek Vasut 	writel((rwcfg->idle << 24) | (rwcfg->activate_1 << 16) |
36981fa0c8c4SMarek Vasut 	       (rwcfg->sgle_read << 8) | (rwcfg->precharge_all << 0),
3699880e46f2SMarek Vasut 	       &sdr_reg_file->trk_rw_mgr_addr);
37003da42859SDinh Nguyen 
37011fa0c8c4SMarek Vasut 	writel(rwcfg->mem_if_read_dqs_width,
3702880e46f2SMarek Vasut 	       &sdr_reg_file->trk_read_dqs_width);
37033da42859SDinh Nguyen 
3704880e46f2SMarek Vasut 	/* trefi [7:0] */
37051fa0c8c4SMarek Vasut 	writel((rwcfg->refresh_all << 24) | (1000 << 0),
3706880e46f2SMarek Vasut 	       &sdr_reg_file->trk_rfsh);
37073da42859SDinh Nguyen }
37083da42859SDinh Nguyen 
sdram_calibration_full(void)37093da42859SDinh Nguyen int sdram_calibration_full(void)
37103da42859SDinh Nguyen {
37113da42859SDinh Nguyen 	struct param_type my_param;
37123da42859SDinh Nguyen 	struct gbl_type my_gbl;
37135ded7320SMarek Vasut 	u32 pass;
371484e0b0cfSMarek Vasut 
371584e0b0cfSMarek Vasut 	memset(&my_param, 0, sizeof(my_param));
371684e0b0cfSMarek Vasut 	memset(&my_gbl, 0, sizeof(my_gbl));
37173da42859SDinh Nguyen 
37183da42859SDinh Nguyen 	param = &my_param;
37193da42859SDinh Nguyen 	gbl = &my_gbl;
37203da42859SDinh Nguyen 
3721d718a26bSMarek Vasut 	rwcfg = socfpga_get_sdram_rwmgr_config();
372210c14261SMarek Vasut 	iocfg = socfpga_get_sdram_io_config();
3723042ff2d0SMarek Vasut 	misccfg = socfpga_get_sdram_misc_config();
3724d718a26bSMarek Vasut 
37253da42859SDinh Nguyen 	/* Set the calibration enabled by default */
37263da42859SDinh Nguyen 	gbl->phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT;
37273da42859SDinh Nguyen 	/*
37283da42859SDinh Nguyen 	 * Only sweep all groups (regardless of fail state) by default
37293da42859SDinh Nguyen 	 * Set enabled read test by default.
37303da42859SDinh Nguyen 	 */
37313da42859SDinh Nguyen #if DISABLE_GUARANTEED_READ
37323da42859SDinh Nguyen 	gbl->phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ;
37333da42859SDinh Nguyen #endif
37343da42859SDinh Nguyen 	/* Initialize the register file */
37353da42859SDinh Nguyen 	initialize_reg_file();
37363da42859SDinh Nguyen 
37373da42859SDinh Nguyen 	/* Initialize any PHY CSR */
37383da42859SDinh Nguyen 	initialize_hps_phy();
37393da42859SDinh Nguyen 
37403da42859SDinh Nguyen 	scc_mgr_initialize();
37413da42859SDinh Nguyen 
37423da42859SDinh Nguyen 	initialize_tracking();
37433da42859SDinh Nguyen 
37443da42859SDinh Nguyen 	printf("%s: Preparing to start memory calibration\n", __FILE__);
37453da42859SDinh Nguyen 
37463da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
3747ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
374823f62b36SMarek Vasut 		   "DDR3 FULL_RATE ranks=%u cs/dimm=%u dq/dqs=%u,%u vg/dqs=%u,%u ",
37491fa0c8c4SMarek Vasut 		   rwcfg->mem_number_of_ranks, rwcfg->mem_number_of_cs_per_dimm,
37501fa0c8c4SMarek Vasut 		   rwcfg->mem_dq_per_read_dqs, rwcfg->mem_dq_per_write_dqs,
37511fa0c8c4SMarek Vasut 		   rwcfg->mem_virtual_groups_per_read_dqs,
37521fa0c8c4SMarek Vasut 		   rwcfg->mem_virtual_groups_per_write_dqs);
3753ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
375423f62b36SMarek Vasut 		   "dqs=%u,%u dq=%u dm=%u ptap_delay=%u dtap_delay=%u ",
37551fa0c8c4SMarek Vasut 		   rwcfg->mem_if_read_dqs_width, rwcfg->mem_if_write_dqs_width,
37561fa0c8c4SMarek Vasut 		   rwcfg->mem_data_width, rwcfg->mem_data_mask_width,
3757160695d8SMarek Vasut 		   iocfg->delay_per_opa_tap, iocfg->delay_per_dchain_tap);
3758ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "dtap_dqsen_delay=%u, dll=%u",
3759160695d8SMarek Vasut 		   iocfg->delay_per_dqs_en_dchain_tap, iocfg->dll_chain_length);
3760ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1,
3761139823ecSMarek Vasut 		   "max values: en_p=%u dqdqs_p=%u en_d=%u dqs_in_d=%u ",
3762160695d8SMarek Vasut 		   iocfg->dqs_en_phase_max, iocfg->dqdqs_out_phase_max,
3763160695d8SMarek Vasut 		   iocfg->dqs_en_delay_max, iocfg->dqs_in_delay_max);
3764ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "io_in_d=%u io_out1_d=%u io_out2_d=%u ",
3765160695d8SMarek Vasut 		   iocfg->io_in_delay_max, iocfg->io_out1_delay_max,
3766160695d8SMarek Vasut 		   iocfg->io_out2_delay_max);
3767ea9aa241SMarek Vasut 	debug_cond(DLEVEL >= 1, "dqs_in_reserve=%u dqs_out_reserve=%u\n",
3768160695d8SMarek Vasut 		   iocfg->dqs_in_reserve, iocfg->dqs_out_reserve);
37693da42859SDinh Nguyen 
37703da42859SDinh Nguyen 	hc_initialize_rom_data();
37713da42859SDinh Nguyen 
37723da42859SDinh Nguyen 	/* update info for sims */
37733da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_NIL);
37743da42859SDinh Nguyen 	reg_file_set_group(0);
37753da42859SDinh Nguyen 
37763da42859SDinh Nguyen 	/*
37773da42859SDinh Nguyen 	 * Load global needed for those actions that require
37783da42859SDinh Nguyen 	 * some dynamic calibration support.
37793da42859SDinh Nguyen 	 */
37803da42859SDinh Nguyen 	dyn_calib_steps = STATIC_CALIB_STEPS;
37813da42859SDinh Nguyen 	/*
37823da42859SDinh Nguyen 	 * Load global to allow dynamic selection of delay loop settings
37833da42859SDinh Nguyen 	 * based on calibration mode.
37843da42859SDinh Nguyen 	 */
37853da42859SDinh Nguyen 	if (!(dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS))
37863da42859SDinh Nguyen 		skip_delay_mask = 0xff;
37873da42859SDinh Nguyen 	else
37883da42859SDinh Nguyen 		skip_delay_mask = 0x0;
37893da42859SDinh Nguyen 
37903da42859SDinh Nguyen 	pass = run_mem_calibrate();
379123a040c0SMarek Vasut 	debug_mem_calibrate(pass);
37923da42859SDinh Nguyen 	return pass;
37933da42859SDinh Nguyen }
3794