xref: /rk3399_rockchip-uboot/drivers/ddr/altera/sequencer.c (revision 1fa0c8c4e30c4aae67b79263c749a66a67d42e5e)
13da42859SDinh Nguyen /*
23da42859SDinh Nguyen  * Copyright Altera Corporation (C) 2012-2015
33da42859SDinh Nguyen  *
43da42859SDinh Nguyen  * SPDX-License-Identifier:    BSD-3-Clause
53da42859SDinh Nguyen  */
63da42859SDinh Nguyen 
73da42859SDinh Nguyen #include <common.h>
83da42859SDinh Nguyen #include <asm/io.h>
93da42859SDinh Nguyen #include <asm/arch/sdram.h>
1004372fb8SMarek Vasut #include <errno.h>
113da42859SDinh Nguyen #include "sequencer.h"
129c76df51SMarek Vasut 
139c76df51SMarek Vasut /*
149c76df51SMarek Vasut  * FIXME: This path is temporary until the SDRAM driver gets
159c76df51SMarek Vasut  *        a proper thorough cleanup.
169c76df51SMarek Vasut  */
179c76df51SMarek Vasut #include "../../../board/altera/socfpga/qts/sequencer_auto.h"
189c76df51SMarek Vasut #include "../../../board/altera/socfpga/qts/sequencer_defines.h"
193da42859SDinh Nguyen 
203da42859SDinh Nguyen static struct socfpga_sdr_rw_load_manager *sdr_rw_load_mgr_regs =
216afb4fe2SMarek Vasut 	(struct socfpga_sdr_rw_load_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0x800);
223da42859SDinh Nguyen 
233da42859SDinh Nguyen static struct socfpga_sdr_rw_load_jump_manager *sdr_rw_load_jump_mgr_regs =
246afb4fe2SMarek Vasut 	(struct socfpga_sdr_rw_load_jump_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0xC00);
253da42859SDinh Nguyen 
263da42859SDinh Nguyen static struct socfpga_sdr_reg_file *sdr_reg_file =
27a1c654a8SMarek Vasut 	(struct socfpga_sdr_reg_file *)SDR_PHYGRP_REGFILEGRP_ADDRESS;
283da42859SDinh Nguyen 
293da42859SDinh Nguyen static struct socfpga_sdr_scc_mgr *sdr_scc_mgr =
30e79025a7SMarek Vasut 	(struct socfpga_sdr_scc_mgr *)(SDR_PHYGRP_SCCGRP_ADDRESS | 0xe00);
313da42859SDinh Nguyen 
323da42859SDinh Nguyen static struct socfpga_phy_mgr_cmd *phy_mgr_cmd =
331bc6f14aSMarek Vasut 	(struct socfpga_phy_mgr_cmd *)SDR_PHYGRP_PHYMGRGRP_ADDRESS;
343da42859SDinh Nguyen 
353da42859SDinh Nguyen static struct socfpga_phy_mgr_cfg *phy_mgr_cfg =
361bc6f14aSMarek Vasut 	(struct socfpga_phy_mgr_cfg *)(SDR_PHYGRP_PHYMGRGRP_ADDRESS | 0x40);
373da42859SDinh Nguyen 
383da42859SDinh Nguyen static struct socfpga_data_mgr *data_mgr =
39c4815f76SMarek Vasut 	(struct socfpga_data_mgr *)SDR_PHYGRP_DATAMGRGRP_ADDRESS;
403da42859SDinh Nguyen 
416cb9f167SMarek Vasut static struct socfpga_sdr_ctrl *sdr_ctrl =
426cb9f167SMarek Vasut 	(struct socfpga_sdr_ctrl *)SDR_CTRLGRP_ADDRESS;
436cb9f167SMarek Vasut 
44d718a26bSMarek Vasut const struct socfpga_sdram_rw_mgr_config *rwcfg;
45d718a26bSMarek Vasut 
463da42859SDinh Nguyen #define DELTA_D		1
473da42859SDinh Nguyen 
483da42859SDinh Nguyen /*
493da42859SDinh Nguyen  * In order to reduce ROM size, most of the selectable calibration steps are
503da42859SDinh Nguyen  * decided at compile time based on the user's calibration mode selection,
513da42859SDinh Nguyen  * as captured by the STATIC_CALIB_STEPS selection below.
523da42859SDinh Nguyen  *
533da42859SDinh Nguyen  * However, to support simulation-time selection of fast simulation mode, where
543da42859SDinh Nguyen  * we skip everything except the bare minimum, we need a few of the steps to
553da42859SDinh Nguyen  * be dynamic.  In those cases, we either use the DYNAMIC_CALIB_STEPS for the
563da42859SDinh Nguyen  * check, which is based on the rtl-supplied value, or we dynamically compute
573da42859SDinh Nguyen  * the value to use based on the dynamically-chosen calibration mode
583da42859SDinh Nguyen  */
593da42859SDinh Nguyen 
603da42859SDinh Nguyen #define DLEVEL 0
613da42859SDinh Nguyen #define STATIC_IN_RTL_SIM 0
623da42859SDinh Nguyen #define STATIC_SKIP_DELAY_LOOPS 0
633da42859SDinh Nguyen 
643da42859SDinh Nguyen #define STATIC_CALIB_STEPS (STATIC_IN_RTL_SIM | CALIB_SKIP_FULL_TEST | \
653da42859SDinh Nguyen 	STATIC_SKIP_DELAY_LOOPS)
663da42859SDinh Nguyen 
673da42859SDinh Nguyen /* calibration steps requested by the rtl */
683da42859SDinh Nguyen uint16_t dyn_calib_steps;
693da42859SDinh Nguyen 
703da42859SDinh Nguyen /*
713da42859SDinh Nguyen  * To make CALIB_SKIP_DELAY_LOOPS a dynamic conditional option
723da42859SDinh Nguyen  * instead of static, we use boolean logic to select between
733da42859SDinh Nguyen  * non-skip and skip values
743da42859SDinh Nguyen  *
753da42859SDinh Nguyen  * The mask is set to include all bits when not-skipping, but is
763da42859SDinh Nguyen  * zero when skipping
773da42859SDinh Nguyen  */
783da42859SDinh Nguyen 
793da42859SDinh Nguyen uint16_t skip_delay_mask;	/* mask off bits when skipping/not-skipping */
803da42859SDinh Nguyen 
813da42859SDinh Nguyen #define SKIP_DELAY_LOOP_VALUE_OR_ZERO(non_skip_value) \
823da42859SDinh Nguyen 	((non_skip_value) & skip_delay_mask)
833da42859SDinh Nguyen 
843da42859SDinh Nguyen struct gbl_type *gbl;
853da42859SDinh Nguyen struct param_type *param;
863da42859SDinh Nguyen 
873da42859SDinh Nguyen static void set_failing_group_stage(uint32_t group, uint32_t stage,
883da42859SDinh Nguyen 	uint32_t substage)
893da42859SDinh Nguyen {
903da42859SDinh Nguyen 	/*
913da42859SDinh Nguyen 	 * Only set the global stage if there was not been any other
923da42859SDinh Nguyen 	 * failing group
933da42859SDinh Nguyen 	 */
943da42859SDinh Nguyen 	if (gbl->error_stage == CAL_STAGE_NIL)	{
953da42859SDinh Nguyen 		gbl->error_substage = substage;
963da42859SDinh Nguyen 		gbl->error_stage = stage;
973da42859SDinh Nguyen 		gbl->error_group = group;
983da42859SDinh Nguyen 	}
993da42859SDinh Nguyen }
1003da42859SDinh Nguyen 
1012c0d2d9cSMarek Vasut static void reg_file_set_group(u16 set_group)
1023da42859SDinh Nguyen {
1032c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff0000, set_group << 16);
1043da42859SDinh Nguyen }
1053da42859SDinh Nguyen 
1062c0d2d9cSMarek Vasut static void reg_file_set_stage(u8 set_stage)
1073da42859SDinh Nguyen {
1082c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff, set_stage & 0xff);
1093da42859SDinh Nguyen }
1103da42859SDinh Nguyen 
1112c0d2d9cSMarek Vasut static void reg_file_set_sub_stage(u8 set_sub_stage)
1123da42859SDinh Nguyen {
1132c0d2d9cSMarek Vasut 	set_sub_stage &= 0xff;
1142c0d2d9cSMarek Vasut 	clrsetbits_le32(&sdr_reg_file->cur_stage, 0xff00, set_sub_stage << 8);
1153da42859SDinh Nguyen }
1163da42859SDinh Nguyen 
1177c89c2d9SMarek Vasut /**
1187c89c2d9SMarek Vasut  * phy_mgr_initialize() - Initialize PHY Manager
1197c89c2d9SMarek Vasut  *
1207c89c2d9SMarek Vasut  * Initialize PHY Manager.
1217c89c2d9SMarek Vasut  */
1229fa9c90eSMarek Vasut static void phy_mgr_initialize(void)
1233da42859SDinh Nguyen {
1247c89c2d9SMarek Vasut 	u32 ratio;
1257c89c2d9SMarek Vasut 
1263da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
1277c89c2d9SMarek Vasut 	/* Calibration has control over path to memory */
1283da42859SDinh Nguyen 	/*
1293da42859SDinh Nguyen 	 * In Hard PHY this is a 2-bit control:
1303da42859SDinh Nguyen 	 * 0: AFI Mux Select
1313da42859SDinh Nguyen 	 * 1: DDIO Mux Select
1323da42859SDinh Nguyen 	 */
1331273dd9eSMarek Vasut 	writel(0x3, &phy_mgr_cfg->mux_sel);
1343da42859SDinh Nguyen 
1353da42859SDinh Nguyen 	/* USER memory clock is not stable we begin initialization  */
1361273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->reset_mem_stbl);
1373da42859SDinh Nguyen 
1383da42859SDinh Nguyen 	/* USER calibration status all set to zero */
1391273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->cal_status);
1403da42859SDinh Nguyen 
1411273dd9eSMarek Vasut 	writel(0, &phy_mgr_cfg->cal_debug_info);
1423da42859SDinh Nguyen 
1437c89c2d9SMarek Vasut 	/* Init params only if we do NOT skip calibration. */
1447c89c2d9SMarek Vasut 	if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL)
1457c89c2d9SMarek Vasut 		return;
1467c89c2d9SMarek Vasut 
147*1fa0c8c4SMarek Vasut 	ratio = rwcfg->mem_dq_per_read_dqs /
148*1fa0c8c4SMarek Vasut 		rwcfg->mem_virtual_groups_per_read_dqs;
1497c89c2d9SMarek Vasut 	param->read_correct_mask_vg = (1 << ratio) - 1;
1507c89c2d9SMarek Vasut 	param->write_correct_mask_vg = (1 << ratio) - 1;
151*1fa0c8c4SMarek Vasut 	param->read_correct_mask = (1 << rwcfg->mem_dq_per_read_dqs) - 1;
152*1fa0c8c4SMarek Vasut 	param->write_correct_mask = (1 << rwcfg->mem_dq_per_write_dqs) - 1;
1533da42859SDinh Nguyen }
1543da42859SDinh Nguyen 
155080bf64eSMarek Vasut /**
156080bf64eSMarek Vasut  * set_rank_and_odt_mask() - Set Rank and ODT mask
157080bf64eSMarek Vasut  * @rank:	Rank mask
158080bf64eSMarek Vasut  * @odt_mode:	ODT mode, OFF or READ_WRITE
159080bf64eSMarek Vasut  *
160080bf64eSMarek Vasut  * Set Rank and ODT mask (On-Die Termination).
161080bf64eSMarek Vasut  */
162b2dfd100SMarek Vasut static void set_rank_and_odt_mask(const u32 rank, const u32 odt_mode)
1633da42859SDinh Nguyen {
164b2dfd100SMarek Vasut 	u32 odt_mask_0 = 0;
165b2dfd100SMarek Vasut 	u32 odt_mask_1 = 0;
166b2dfd100SMarek Vasut 	u32 cs_and_odt_mask;
1673da42859SDinh Nguyen 
168b2dfd100SMarek Vasut 	if (odt_mode == RW_MGR_ODT_MODE_OFF) {
169b2dfd100SMarek Vasut 		odt_mask_0 = 0x0;
170b2dfd100SMarek Vasut 		odt_mask_1 = 0x0;
171b2dfd100SMarek Vasut 	} else {	/* RW_MGR_ODT_MODE_READ_WRITE */
172*1fa0c8c4SMarek Vasut 		switch (rwcfg->mem_number_of_ranks) {
173287cdf6bSMarek Vasut 		case 1:	/* 1 Rank */
174287cdf6bSMarek Vasut 			/* Read: ODT = 0 ; Write: ODT = 1 */
1753da42859SDinh Nguyen 			odt_mask_0 = 0x0;
1763da42859SDinh Nguyen 			odt_mask_1 = 0x1;
177287cdf6bSMarek Vasut 			break;
178287cdf6bSMarek Vasut 		case 2:	/* 2 Ranks */
179*1fa0c8c4SMarek Vasut 			if (rwcfg->mem_number_of_cs_per_dimm == 1) {
180080bf64eSMarek Vasut 				/*
181080bf64eSMarek Vasut 				 * - Dual-Slot , Single-Rank (1 CS per DIMM)
1823da42859SDinh Nguyen 				 *   OR
183080bf64eSMarek Vasut 				 * - RDIMM, 4 total CS (2 CS per DIMM, 2 DIMM)
184080bf64eSMarek Vasut 				 *
185080bf64eSMarek Vasut 				 * Since MEM_NUMBER_OF_RANKS is 2, they
186080bf64eSMarek Vasut 				 * are both single rank with 2 CS each
187080bf64eSMarek Vasut 				 * (special for RDIMM).
188080bf64eSMarek Vasut 				 *
1893da42859SDinh Nguyen 				 * Read: Turn on ODT on the opposite rank
1903da42859SDinh Nguyen 				 * Write: Turn on ODT on all ranks
1913da42859SDinh Nguyen 				 */
1923da42859SDinh Nguyen 				odt_mask_0 = 0x3 & ~(1 << rank);
1933da42859SDinh Nguyen 				odt_mask_1 = 0x3;
1943da42859SDinh Nguyen 			} else {
1953da42859SDinh Nguyen 				/*
196080bf64eSMarek Vasut 				 * - Single-Slot , Dual-Rank (2 CS per DIMM)
197080bf64eSMarek Vasut 				 *
198080bf64eSMarek Vasut 				 * Read: Turn on ODT off on all ranks
199080bf64eSMarek Vasut 				 * Write: Turn on ODT on active rank
2003da42859SDinh Nguyen 				 */
2013da42859SDinh Nguyen 				odt_mask_0 = 0x0;
2023da42859SDinh Nguyen 				odt_mask_1 = 0x3 & (1 << rank);
2033da42859SDinh Nguyen 			}
204287cdf6bSMarek Vasut 			break;
205287cdf6bSMarek Vasut 		case 4:	/* 4 Ranks */
206287cdf6bSMarek Vasut 			/* Read:
2073da42859SDinh Nguyen 			 * ----------+-----------------------+
2083da42859SDinh Nguyen 			 *           |         ODT           |
2093da42859SDinh Nguyen 			 * Read From +-----------------------+
2103da42859SDinh Nguyen 			 *   Rank    |  3  |  2  |  1  |  0  |
2113da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2123da42859SDinh Nguyen 			 *     0     |  0  |  1  |  0  |  0  |
2133da42859SDinh Nguyen 			 *     1     |  1  |  0  |  0  |  0  |
2143da42859SDinh Nguyen 			 *     2     |  0  |  0  |  0  |  1  |
2153da42859SDinh Nguyen 			 *     3     |  0  |  0  |  1  |  0  |
2163da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2173da42859SDinh Nguyen 			 *
2183da42859SDinh Nguyen 			 * Write:
2193da42859SDinh Nguyen 			 * ----------+-----------------------+
2203da42859SDinh Nguyen 			 *           |         ODT           |
2213da42859SDinh Nguyen 			 * Write To  +-----------------------+
2223da42859SDinh Nguyen 			 *   Rank    |  3  |  2  |  1  |  0  |
2233da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2243da42859SDinh Nguyen 			 *     0     |  0  |  1  |  0  |  1  |
2253da42859SDinh Nguyen 			 *     1     |  1  |  0  |  1  |  0  |
2263da42859SDinh Nguyen 			 *     2     |  0  |  1  |  0  |  1  |
2273da42859SDinh Nguyen 			 *     3     |  1  |  0  |  1  |  0  |
2283da42859SDinh Nguyen 			 * ----------+-----+-----+-----+-----+
2293da42859SDinh Nguyen 			 */
2303da42859SDinh Nguyen 			switch (rank) {
2313da42859SDinh Nguyen 			case 0:
2323da42859SDinh Nguyen 				odt_mask_0 = 0x4;
2333da42859SDinh Nguyen 				odt_mask_1 = 0x5;
2343da42859SDinh Nguyen 				break;
2353da42859SDinh Nguyen 			case 1:
2363da42859SDinh Nguyen 				odt_mask_0 = 0x8;
2373da42859SDinh Nguyen 				odt_mask_1 = 0xA;
2383da42859SDinh Nguyen 				break;
2393da42859SDinh Nguyen 			case 2:
2403da42859SDinh Nguyen 				odt_mask_0 = 0x1;
2413da42859SDinh Nguyen 				odt_mask_1 = 0x5;
2423da42859SDinh Nguyen 				break;
2433da42859SDinh Nguyen 			case 3:
2443da42859SDinh Nguyen 				odt_mask_0 = 0x2;
2453da42859SDinh Nguyen 				odt_mask_1 = 0xA;
2463da42859SDinh Nguyen 				break;
2473da42859SDinh Nguyen 			}
248287cdf6bSMarek Vasut 			break;
2493da42859SDinh Nguyen 		}
2503da42859SDinh Nguyen 	}
2513da42859SDinh Nguyen 
252b2dfd100SMarek Vasut 	cs_and_odt_mask = (0xFF & ~(1 << rank)) |
2533da42859SDinh Nguyen 			  ((0xFF & odt_mask_0) << 8) |
2543da42859SDinh Nguyen 			  ((0xFF & odt_mask_1) << 16);
2551273dd9eSMarek Vasut 	writel(cs_and_odt_mask, SDR_PHYGRP_RWMGRGRP_ADDRESS |
2561273dd9eSMarek Vasut 				RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
2573da42859SDinh Nguyen }
2583da42859SDinh Nguyen 
259c76976d9SMarek Vasut /**
260c76976d9SMarek Vasut  * scc_mgr_set() - Set SCC Manager register
261c76976d9SMarek Vasut  * @off:	Base offset in SCC Manager space
262c76976d9SMarek Vasut  * @grp:	Read/Write group
263c76976d9SMarek Vasut  * @val:	Value to be set
264c76976d9SMarek Vasut  *
265c76976d9SMarek Vasut  * This function sets the SCC Manager (Scan Chain Control Manager) register.
266c76976d9SMarek Vasut  */
267c76976d9SMarek Vasut static void scc_mgr_set(u32 off, u32 grp, u32 val)
268c76976d9SMarek Vasut {
269c76976d9SMarek Vasut 	writel(val, SDR_PHYGRP_SCCGRP_ADDRESS | off | (grp << 2));
270c76976d9SMarek Vasut }
271c76976d9SMarek Vasut 
272e893f4dcSMarek Vasut /**
273e893f4dcSMarek Vasut  * scc_mgr_initialize() - Initialize SCC Manager registers
274e893f4dcSMarek Vasut  *
275e893f4dcSMarek Vasut  * Initialize SCC Manager registers.
276e893f4dcSMarek Vasut  */
2773da42859SDinh Nguyen static void scc_mgr_initialize(void)
2783da42859SDinh Nguyen {
2793da42859SDinh Nguyen 	/*
280e893f4dcSMarek Vasut 	 * Clear register file for HPS. 16 (2^4) is the size of the
281e893f4dcSMarek Vasut 	 * full register file in the scc mgr:
282e893f4dcSMarek Vasut 	 *	RFILE_DEPTH = 1 + log2(MEM_DQ_PER_DQS + 1 + MEM_DM_PER_DQS +
283e893f4dcSMarek Vasut 	 *                             MEM_IF_READ_DQS_WIDTH - 1);
2843da42859SDinh Nguyen 	 */
285c76976d9SMarek Vasut 	int i;
286e893f4dcSMarek Vasut 
2873da42859SDinh Nguyen 	for (i = 0; i < 16; i++) {
2887ac40d25SMarek Vasut 		debug_cond(DLEVEL == 1, "%s:%d: Clearing SCC RFILE index %u\n",
2893da42859SDinh Nguyen 			   __func__, __LINE__, i);
290c76976d9SMarek Vasut 		scc_mgr_set(SCC_MGR_HHP_RFILE_OFFSET, 0, i);
2913da42859SDinh Nguyen 	}
2923da42859SDinh Nguyen }
2933da42859SDinh Nguyen 
2945ff825b8SMarek Vasut static void scc_mgr_set_dqdqs_output_phase(uint32_t write_group, uint32_t phase)
2955ff825b8SMarek Vasut {
296c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, write_group, phase);
2975ff825b8SMarek Vasut }
2985ff825b8SMarek Vasut 
2995ff825b8SMarek Vasut static void scc_mgr_set_dqs_bus_in_delay(uint32_t read_group, uint32_t delay)
3003da42859SDinh Nguyen {
301c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_IN_DELAY_OFFSET, read_group, delay);
3023da42859SDinh Nguyen }
3033da42859SDinh Nguyen 
3043da42859SDinh Nguyen static void scc_mgr_set_dqs_en_phase(uint32_t read_group, uint32_t phase)
3053da42859SDinh Nguyen {
306c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_EN_PHASE_OFFSET, read_group, phase);
3073da42859SDinh Nguyen }
3083da42859SDinh Nguyen 
3095ff825b8SMarek Vasut static void scc_mgr_set_dqs_en_delay(uint32_t read_group, uint32_t delay)
3105ff825b8SMarek Vasut {
311c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_DQS_EN_DELAY_OFFSET, read_group, delay);
3125ff825b8SMarek Vasut }
3135ff825b8SMarek Vasut 
31432675249SMarek Vasut static void scc_mgr_set_dqs_io_in_delay(uint32_t delay)
3155ff825b8SMarek Vasut {
316*1fa0c8c4SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, rwcfg->mem_dq_per_write_dqs,
317c76976d9SMarek Vasut 		    delay);
3185ff825b8SMarek Vasut }
3195ff825b8SMarek Vasut 
3205ff825b8SMarek Vasut static void scc_mgr_set_dq_in_delay(uint32_t dq_in_group, uint32_t delay)
3215ff825b8SMarek Vasut {
322c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, dq_in_group, delay);
3235ff825b8SMarek Vasut }
3245ff825b8SMarek Vasut 
3255ff825b8SMarek Vasut static void scc_mgr_set_dq_out1_delay(uint32_t dq_in_group, uint32_t delay)
3265ff825b8SMarek Vasut {
327c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, dq_in_group, delay);
3285ff825b8SMarek Vasut }
3295ff825b8SMarek Vasut 
33032675249SMarek Vasut static void scc_mgr_set_dqs_out1_delay(uint32_t delay)
3315ff825b8SMarek Vasut {
332*1fa0c8c4SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, rwcfg->mem_dq_per_write_dqs,
333c76976d9SMarek Vasut 		    delay);
3345ff825b8SMarek Vasut }
3355ff825b8SMarek Vasut 
3365ff825b8SMarek Vasut static void scc_mgr_set_dm_out1_delay(uint32_t dm, uint32_t delay)
3375ff825b8SMarek Vasut {
338c76976d9SMarek Vasut 	scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET,
339*1fa0c8c4SMarek Vasut 		    rwcfg->mem_dq_per_write_dqs + 1 + dm,
340c76976d9SMarek Vasut 		    delay);
3415ff825b8SMarek Vasut }
3425ff825b8SMarek Vasut 
3435ff825b8SMarek Vasut /* load up dqs config settings */
3445ff825b8SMarek Vasut static void scc_mgr_load_dqs(uint32_t dqs)
3455ff825b8SMarek Vasut {
3465ff825b8SMarek Vasut 	writel(dqs, &sdr_scc_mgr->dqs_ena);
3475ff825b8SMarek Vasut }
3485ff825b8SMarek Vasut 
3495ff825b8SMarek Vasut /* load up dqs io config settings */
3505ff825b8SMarek Vasut static void scc_mgr_load_dqs_io(void)
3515ff825b8SMarek Vasut {
3525ff825b8SMarek Vasut 	writel(0, &sdr_scc_mgr->dqs_io_ena);
3535ff825b8SMarek Vasut }
3545ff825b8SMarek Vasut 
3555ff825b8SMarek Vasut /* load up dq config settings */
3565ff825b8SMarek Vasut static void scc_mgr_load_dq(uint32_t dq_in_group)
3575ff825b8SMarek Vasut {
3585ff825b8SMarek Vasut 	writel(dq_in_group, &sdr_scc_mgr->dq_ena);
3595ff825b8SMarek Vasut }
3605ff825b8SMarek Vasut 
3615ff825b8SMarek Vasut /* load up dm config settings */
3625ff825b8SMarek Vasut static void scc_mgr_load_dm(uint32_t dm)
3635ff825b8SMarek Vasut {
3645ff825b8SMarek Vasut 	writel(dm, &sdr_scc_mgr->dm_ena);
3655ff825b8SMarek Vasut }
3665ff825b8SMarek Vasut 
3670b69b807SMarek Vasut /**
3680b69b807SMarek Vasut  * scc_mgr_set_all_ranks() - Set SCC Manager register for all ranks
3690b69b807SMarek Vasut  * @off:	Base offset in SCC Manager space
3700b69b807SMarek Vasut  * @grp:	Read/Write group
3710b69b807SMarek Vasut  * @val:	Value to be set
3720b69b807SMarek Vasut  * @update:	If non-zero, trigger SCC Manager update for all ranks
3730b69b807SMarek Vasut  *
3740b69b807SMarek Vasut  * This function sets the SCC Manager (Scan Chain Control Manager) register
3750b69b807SMarek Vasut  * and optionally triggers the SCC update for all ranks.
3760b69b807SMarek Vasut  */
3770b69b807SMarek Vasut static void scc_mgr_set_all_ranks(const u32 off, const u32 grp, const u32 val,
3780b69b807SMarek Vasut 				  const int update)
3793da42859SDinh Nguyen {
3800b69b807SMarek Vasut 	u32 r;
3813da42859SDinh Nguyen 
382*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
3833da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
3840b69b807SMarek Vasut 		scc_mgr_set(off, grp, val);
385162d60efSMarek Vasut 
3860b69b807SMarek Vasut 		if (update || (r == 0)) {
3870b69b807SMarek Vasut 			writel(grp, &sdr_scc_mgr->dqs_ena);
3880b69b807SMarek Vasut 			writel(0, &sdr_scc_mgr->update);
3890b69b807SMarek Vasut 		}
3900b69b807SMarek Vasut 	}
3910b69b807SMarek Vasut }
3920b69b807SMarek Vasut 
3930b69b807SMarek Vasut static void scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group, u32 phase)
3940b69b807SMarek Vasut {
3953da42859SDinh Nguyen 	/*
3963da42859SDinh Nguyen 	 * USER although the h/w doesn't support different phases per
3973da42859SDinh Nguyen 	 * shadow register, for simplicity our scc manager modeling
3983da42859SDinh Nguyen 	 * keeps different phase settings per shadow reg, and it's
3993da42859SDinh Nguyen 	 * important for us to keep them in sync to match h/w.
4003da42859SDinh Nguyen 	 * for efficiency, the scan chain update should occur only
4013da42859SDinh Nguyen 	 * once to sr0.
4023da42859SDinh Nguyen 	 */
4030b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_PHASE_OFFSET,
4040b69b807SMarek Vasut 			      read_group, phase, 0);
4053da42859SDinh Nguyen }
4063da42859SDinh Nguyen 
4073da42859SDinh Nguyen static void scc_mgr_set_dqdqs_output_phase_all_ranks(uint32_t write_group,
4083da42859SDinh Nguyen 						     uint32_t phase)
4093da42859SDinh Nguyen {
4103da42859SDinh Nguyen 	/*
4113da42859SDinh Nguyen 	 * USER although the h/w doesn't support different phases per
4123da42859SDinh Nguyen 	 * shadow register, for simplicity our scc manager modeling
4133da42859SDinh Nguyen 	 * keeps different phase settings per shadow reg, and it's
4143da42859SDinh Nguyen 	 * important for us to keep them in sync to match h/w.
4153da42859SDinh Nguyen 	 * for efficiency, the scan chain update should occur only
4163da42859SDinh Nguyen 	 * once to sr0.
4173da42859SDinh Nguyen 	 */
4180b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQDQS_OUT_PHASE_OFFSET,
4190b69b807SMarek Vasut 			      write_group, phase, 0);
4203da42859SDinh Nguyen }
4213da42859SDinh Nguyen 
4223da42859SDinh Nguyen static void scc_mgr_set_dqs_en_delay_all_ranks(uint32_t read_group,
4233da42859SDinh Nguyen 					       uint32_t delay)
4243da42859SDinh Nguyen {
4253da42859SDinh Nguyen 	/*
4263da42859SDinh Nguyen 	 * In shadow register mode, the T11 settings are stored in
4273da42859SDinh Nguyen 	 * registers in the core, which are updated by the DQS_ENA
4283da42859SDinh Nguyen 	 * signals. Not issuing the SCC_MGR_UPD command allows us to
4293da42859SDinh Nguyen 	 * save lots of rank switching overhead, by calling
4303da42859SDinh Nguyen 	 * select_shadow_regs_for_update with update_scan_chains
4313da42859SDinh Nguyen 	 * set to 0.
4323da42859SDinh Nguyen 	 */
4330b69b807SMarek Vasut 	scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_DELAY_OFFSET,
4340b69b807SMarek Vasut 			      read_group, delay, 1);
4351273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
4363da42859SDinh Nguyen }
4373da42859SDinh Nguyen 
4385be355c1SMarek Vasut /**
4395be355c1SMarek Vasut  * scc_mgr_set_oct_out1_delay() - Set OCT output delay
4405be355c1SMarek Vasut  * @write_group:	Write group
4415be355c1SMarek Vasut  * @delay:		Delay value
4425be355c1SMarek Vasut  *
4435be355c1SMarek Vasut  * This function sets the OCT output delay in SCC manager.
4445be355c1SMarek Vasut  */
4455be355c1SMarek Vasut static void scc_mgr_set_oct_out1_delay(const u32 write_group, const u32 delay)
4463da42859SDinh Nguyen {
447*1fa0c8c4SMarek Vasut 	const int ratio = rwcfg->mem_if_read_dqs_width /
448*1fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
4495be355c1SMarek Vasut 	const int base = write_group * ratio;
4505be355c1SMarek Vasut 	int i;
4513da42859SDinh Nguyen 	/*
4523da42859SDinh Nguyen 	 * Load the setting in the SCC manager
4533da42859SDinh Nguyen 	 * Although OCT affects only write data, the OCT delay is controlled
4543da42859SDinh Nguyen 	 * by the DQS logic block which is instantiated once per read group.
4553da42859SDinh Nguyen 	 * For protocols where a write group consists of multiple read groups,
4563da42859SDinh Nguyen 	 * the setting must be set multiple times.
4573da42859SDinh Nguyen 	 */
4585be355c1SMarek Vasut 	for (i = 0; i < ratio; i++)
4595be355c1SMarek Vasut 		scc_mgr_set(SCC_MGR_OCT_OUT1_DELAY_OFFSET, base + i, delay);
4603da42859SDinh Nguyen }
4613da42859SDinh Nguyen 
46237a37ca7SMarek Vasut /**
46337a37ca7SMarek Vasut  * scc_mgr_set_hhp_extras() - Set HHP extras.
46437a37ca7SMarek Vasut  *
46537a37ca7SMarek Vasut  * Load the fixed setting in the SCC manager HHP extras.
46637a37ca7SMarek Vasut  */
4673da42859SDinh Nguyen static void scc_mgr_set_hhp_extras(void)
4683da42859SDinh Nguyen {
4693da42859SDinh Nguyen 	/*
4703da42859SDinh Nguyen 	 * Load the fixed setting in the SCC manager
47137a37ca7SMarek Vasut 	 * bits: 0:0 = 1'b1	- DQS bypass
47237a37ca7SMarek Vasut 	 * bits: 1:1 = 1'b1	- DQ bypass
4733da42859SDinh Nguyen 	 * bits: 4:2 = 3'b001	- rfifo_mode
4743da42859SDinh Nguyen 	 * bits: 6:5 = 2'b01	- rfifo clock_select
4753da42859SDinh Nguyen 	 * bits: 7:7 = 1'b0	- separate gating from ungating setting
4763da42859SDinh Nguyen 	 * bits: 8:8 = 1'b0	- separate OE from Output delay setting
4773da42859SDinh Nguyen 	 */
47837a37ca7SMarek Vasut 	const u32 value = (0 << 8) | (0 << 7) | (1 << 5) |
47937a37ca7SMarek Vasut 			  (1 << 2) | (1 << 1) | (1 << 0);
48037a37ca7SMarek Vasut 	const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS |
48137a37ca7SMarek Vasut 			 SCC_MGR_HHP_GLOBALS_OFFSET |
48237a37ca7SMarek Vasut 			 SCC_MGR_HHP_EXTRAS_OFFSET;
4833da42859SDinh Nguyen 
48437a37ca7SMarek Vasut 	debug_cond(DLEVEL == 1, "%s:%d Setting HHP Extras\n",
48537a37ca7SMarek Vasut 		   __func__, __LINE__);
48637a37ca7SMarek Vasut 	writel(value, addr);
48737a37ca7SMarek Vasut 	debug_cond(DLEVEL == 1, "%s:%d Done Setting HHP Extras\n",
48837a37ca7SMarek Vasut 		   __func__, __LINE__);
4893da42859SDinh Nguyen }
4903da42859SDinh Nguyen 
491f42af35bSMarek Vasut /**
492f42af35bSMarek Vasut  * scc_mgr_zero_all() - Zero all DQS config
493f42af35bSMarek Vasut  *
494f42af35bSMarek Vasut  * Zero all DQS config.
4953da42859SDinh Nguyen  */
4963da42859SDinh Nguyen static void scc_mgr_zero_all(void)
4973da42859SDinh Nguyen {
498f42af35bSMarek Vasut 	int i, r;
4993da42859SDinh Nguyen 
5003da42859SDinh Nguyen 	/*
5013da42859SDinh Nguyen 	 * USER Zero all DQS config settings, across all groups and all
5023da42859SDinh Nguyen 	 * shadow registers
5033da42859SDinh Nguyen 	 */
504*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
505f42af35bSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
506*1fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
5073da42859SDinh Nguyen 			/*
5083da42859SDinh Nguyen 			 * The phases actually don't exist on a per-rank basis,
5093da42859SDinh Nguyen 			 * but there's no harm updating them several times, so
5103da42859SDinh Nguyen 			 * let's keep the code simple.
5113da42859SDinh Nguyen 			 */
5123da42859SDinh Nguyen 			scc_mgr_set_dqs_bus_in_delay(i, IO_DQS_IN_RESERVE);
5133da42859SDinh Nguyen 			scc_mgr_set_dqs_en_phase(i, 0);
5143da42859SDinh Nguyen 			scc_mgr_set_dqs_en_delay(i, 0);
5153da42859SDinh Nguyen 		}
5163da42859SDinh Nguyen 
517*1fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_write_dqs_width; i++) {
5183da42859SDinh Nguyen 			scc_mgr_set_dqdqs_output_phase(i, 0);
519f42af35bSMarek Vasut 			/* Arria V/Cyclone V don't have out2. */
5203da42859SDinh Nguyen 			scc_mgr_set_oct_out1_delay(i, IO_DQS_OUT_RESERVE);
5213da42859SDinh Nguyen 		}
5223da42859SDinh Nguyen 	}
5233da42859SDinh Nguyen 
524f42af35bSMarek Vasut 	/* Multicast to all DQS group enables. */
5251273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dqs_ena);
5261273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
5273da42859SDinh Nguyen }
5283da42859SDinh Nguyen 
529c5c5f537SMarek Vasut /**
530c5c5f537SMarek Vasut  * scc_set_bypass_mode() - Set bypass mode and trigger SCC update
531c5c5f537SMarek Vasut  * @write_group:	Write group
532c5c5f537SMarek Vasut  *
533c5c5f537SMarek Vasut  * Set bypass mode and trigger SCC update.
534c5c5f537SMarek Vasut  */
535c5c5f537SMarek Vasut static void scc_set_bypass_mode(const u32 write_group)
5363da42859SDinh Nguyen {
537c5c5f537SMarek Vasut 	/* Multicast to all DQ enables. */
5381273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dq_ena);
5391273dd9eSMarek Vasut 	writel(0xff, &sdr_scc_mgr->dm_ena);
5403da42859SDinh Nguyen 
541c5c5f537SMarek Vasut 	/* Update current DQS IO enable. */
5421273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->dqs_io_ena);
5433da42859SDinh Nguyen 
544c5c5f537SMarek Vasut 	/* Update the DQS logic. */
5451273dd9eSMarek Vasut 	writel(write_group, &sdr_scc_mgr->dqs_ena);
5463da42859SDinh Nguyen 
547c5c5f537SMarek Vasut 	/* Hit update. */
5481273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
5493da42859SDinh Nguyen }
5503da42859SDinh Nguyen 
5515e837896SMarek Vasut /**
5525e837896SMarek Vasut  * scc_mgr_load_dqs_for_write_group() - Load DQS settings for Write Group
5535e837896SMarek Vasut  * @write_group:	Write group
5545e837896SMarek Vasut  *
5555e837896SMarek Vasut  * Load DQS settings for Write Group, do not trigger SCC update.
5565e837896SMarek Vasut  */
5575e837896SMarek Vasut static void scc_mgr_load_dqs_for_write_group(const u32 write_group)
5585ff825b8SMarek Vasut {
559*1fa0c8c4SMarek Vasut 	const int ratio = rwcfg->mem_if_read_dqs_width /
560*1fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
5615e837896SMarek Vasut 	const int base = write_group * ratio;
5625e837896SMarek Vasut 	int i;
5635ff825b8SMarek Vasut 	/*
5645e837896SMarek Vasut 	 * Load the setting in the SCC manager
5655ff825b8SMarek Vasut 	 * Although OCT affects only write data, the OCT delay is controlled
5665ff825b8SMarek Vasut 	 * by the DQS logic block which is instantiated once per read group.
5675ff825b8SMarek Vasut 	 * For protocols where a write group consists of multiple read groups,
5685e837896SMarek Vasut 	 * the setting must be set multiple times.
5695ff825b8SMarek Vasut 	 */
5705e837896SMarek Vasut 	for (i = 0; i < ratio; i++)
5715e837896SMarek Vasut 		writel(base + i, &sdr_scc_mgr->dqs_ena);
5725ff825b8SMarek Vasut }
5735ff825b8SMarek Vasut 
574d41ea93aSMarek Vasut /**
575d41ea93aSMarek Vasut  * scc_mgr_zero_group() - Zero all configs for a group
576d41ea93aSMarek Vasut  *
577d41ea93aSMarek Vasut  * Zero DQ, DM, DQS and OCT configs for a group.
578d41ea93aSMarek Vasut  */
579d41ea93aSMarek Vasut static void scc_mgr_zero_group(const u32 write_group, const int out_only)
5803da42859SDinh Nguyen {
581d41ea93aSMarek Vasut 	int i, r;
5823da42859SDinh Nguyen 
583*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
584d41ea93aSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
585d41ea93aSMarek Vasut 		/* Zero all DQ config settings. */
586*1fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
58707aee5bdSMarek Vasut 			scc_mgr_set_dq_out1_delay(i, 0);
5883da42859SDinh Nguyen 			if (!out_only)
58907aee5bdSMarek Vasut 				scc_mgr_set_dq_in_delay(i, 0);
5903da42859SDinh Nguyen 		}
5913da42859SDinh Nguyen 
592d41ea93aSMarek Vasut 		/* Multicast to all DQ enables. */
5931273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dq_ena);
5943da42859SDinh Nguyen 
595d41ea93aSMarek Vasut 		/* Zero all DM config settings. */
596d41ea93aSMarek Vasut 		for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++)
59707aee5bdSMarek Vasut 			scc_mgr_set_dm_out1_delay(i, 0);
5983da42859SDinh Nguyen 
599d41ea93aSMarek Vasut 		/* Multicast to all DM enables. */
6001273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dm_ena);
6013da42859SDinh Nguyen 
602d41ea93aSMarek Vasut 		/* Zero all DQS IO settings. */
6033da42859SDinh Nguyen 		if (!out_only)
60432675249SMarek Vasut 			scc_mgr_set_dqs_io_in_delay(0);
605d41ea93aSMarek Vasut 
606d41ea93aSMarek Vasut 		/* Arria V/Cyclone V don't have out2. */
60732675249SMarek Vasut 		scc_mgr_set_dqs_out1_delay(IO_DQS_OUT_RESERVE);
6083da42859SDinh Nguyen 		scc_mgr_set_oct_out1_delay(write_group, IO_DQS_OUT_RESERVE);
6093da42859SDinh Nguyen 		scc_mgr_load_dqs_for_write_group(write_group);
6103da42859SDinh Nguyen 
611d41ea93aSMarek Vasut 		/* Multicast to all DQS IO enables (only 1 in total). */
6121273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->dqs_io_ena);
6133da42859SDinh Nguyen 
614d41ea93aSMarek Vasut 		/* Hit update to zero everything. */
6151273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
6163da42859SDinh Nguyen 	}
6173da42859SDinh Nguyen }
6183da42859SDinh Nguyen 
6193da42859SDinh Nguyen /*
6203da42859SDinh Nguyen  * apply and load a particular input delay for the DQ pins in a group
6213da42859SDinh Nguyen  * group_bgn is the index of the first dq pin (in the write group)
6223da42859SDinh Nguyen  */
62332675249SMarek Vasut static void scc_mgr_apply_group_dq_in_delay(uint32_t group_bgn, uint32_t delay)
6243da42859SDinh Nguyen {
6253da42859SDinh Nguyen 	uint32_t i, p;
6263da42859SDinh Nguyen 
627*1fa0c8c4SMarek Vasut 	for (i = 0, p = group_bgn; i < rwcfg->mem_dq_per_read_dqs; i++, p++) {
62807aee5bdSMarek Vasut 		scc_mgr_set_dq_in_delay(p, delay);
6293da42859SDinh Nguyen 		scc_mgr_load_dq(p);
6303da42859SDinh Nguyen 	}
6313da42859SDinh Nguyen }
6323da42859SDinh Nguyen 
633300c2e62SMarek Vasut /**
634300c2e62SMarek Vasut  * scc_mgr_apply_group_dq_out1_delay() - Apply and load an output delay for the DQ pins in a group
635300c2e62SMarek Vasut  * @delay:		Delay value
636300c2e62SMarek Vasut  *
637300c2e62SMarek Vasut  * Apply and load a particular output delay for the DQ pins in a group.
638300c2e62SMarek Vasut  */
639300c2e62SMarek Vasut static void scc_mgr_apply_group_dq_out1_delay(const u32 delay)
6403da42859SDinh Nguyen {
641300c2e62SMarek Vasut 	int i;
6423da42859SDinh Nguyen 
643*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
644300c2e62SMarek Vasut 		scc_mgr_set_dq_out1_delay(i, delay);
6453da42859SDinh Nguyen 		scc_mgr_load_dq(i);
6463da42859SDinh Nguyen 	}
6473da42859SDinh Nguyen }
6483da42859SDinh Nguyen 
6493da42859SDinh Nguyen /* apply and load a particular output delay for the DM pins in a group */
65032675249SMarek Vasut static void scc_mgr_apply_group_dm_out1_delay(uint32_t delay1)
6513da42859SDinh Nguyen {
6523da42859SDinh Nguyen 	uint32_t i;
6533da42859SDinh Nguyen 
6543da42859SDinh Nguyen 	for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) {
65507aee5bdSMarek Vasut 		scc_mgr_set_dm_out1_delay(i, delay1);
6563da42859SDinh Nguyen 		scc_mgr_load_dm(i);
6573da42859SDinh Nguyen 	}
6583da42859SDinh Nguyen }
6593da42859SDinh Nguyen 
6603da42859SDinh Nguyen 
6613da42859SDinh Nguyen /* apply and load delay on both DQS and OCT out1 */
6623da42859SDinh Nguyen static void scc_mgr_apply_group_dqs_io_and_oct_out1(uint32_t write_group,
6633da42859SDinh Nguyen 						    uint32_t delay)
6643da42859SDinh Nguyen {
66532675249SMarek Vasut 	scc_mgr_set_dqs_out1_delay(delay);
6663da42859SDinh Nguyen 	scc_mgr_load_dqs_io();
6673da42859SDinh Nguyen 
6683da42859SDinh Nguyen 	scc_mgr_set_oct_out1_delay(write_group, delay);
6693da42859SDinh Nguyen 	scc_mgr_load_dqs_for_write_group(write_group);
6703da42859SDinh Nguyen }
6713da42859SDinh Nguyen 
6725cb1b508SMarek Vasut /**
6735cb1b508SMarek Vasut  * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side: DQ, DM, DQS, OCT
6745cb1b508SMarek Vasut  * @write_group:	Write group
6755cb1b508SMarek Vasut  * @delay:		Delay value
6765cb1b508SMarek Vasut  *
6775cb1b508SMarek Vasut  * Apply a delay to the entire output side: DQ, DM, DQS, OCT.
6785cb1b508SMarek Vasut  */
6798eccde3eSMarek Vasut static void scc_mgr_apply_group_all_out_delay_add(const u32 write_group,
6808eccde3eSMarek Vasut 						  const u32 delay)
6813da42859SDinh Nguyen {
6828eccde3eSMarek Vasut 	u32 i, new_delay;
6833da42859SDinh Nguyen 
6848eccde3eSMarek Vasut 	/* DQ shift */
685*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++)
6863da42859SDinh Nguyen 		scc_mgr_load_dq(i);
6873da42859SDinh Nguyen 
6888eccde3eSMarek Vasut 	/* DM shift */
6898eccde3eSMarek Vasut 	for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++)
6903da42859SDinh Nguyen 		scc_mgr_load_dm(i);
6913da42859SDinh Nguyen 
6925cb1b508SMarek Vasut 	/* DQS shift */
6935cb1b508SMarek Vasut 	new_delay = READ_SCC_DQS_IO_OUT2_DELAY + delay;
6943da42859SDinh Nguyen 	if (new_delay > IO_IO_OUT2_DELAY_MAX) {
6955cb1b508SMarek Vasut 		debug_cond(DLEVEL == 1,
6965cb1b508SMarek Vasut 			   "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
6975cb1b508SMarek Vasut 			   __func__, __LINE__, write_group, delay, new_delay,
6985cb1b508SMarek Vasut 			   IO_IO_OUT2_DELAY_MAX,
6993da42859SDinh Nguyen 			   new_delay - IO_IO_OUT2_DELAY_MAX);
7005cb1b508SMarek Vasut 		new_delay -= IO_IO_OUT2_DELAY_MAX;
7015cb1b508SMarek Vasut 		scc_mgr_set_dqs_out1_delay(new_delay);
7023da42859SDinh Nguyen 	}
7033da42859SDinh Nguyen 
7043da42859SDinh Nguyen 	scc_mgr_load_dqs_io();
7053da42859SDinh Nguyen 
7065cb1b508SMarek Vasut 	/* OCT shift */
7075cb1b508SMarek Vasut 	new_delay = READ_SCC_OCT_OUT2_DELAY + delay;
7083da42859SDinh Nguyen 	if (new_delay > IO_IO_OUT2_DELAY_MAX) {
7095cb1b508SMarek Vasut 		debug_cond(DLEVEL == 1,
7105cb1b508SMarek Vasut 			   "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n",
7115cb1b508SMarek Vasut 			   __func__, __LINE__, write_group, delay,
7125cb1b508SMarek Vasut 			   new_delay, IO_IO_OUT2_DELAY_MAX,
7133da42859SDinh Nguyen 			   new_delay - IO_IO_OUT2_DELAY_MAX);
7145cb1b508SMarek Vasut 		new_delay -= IO_IO_OUT2_DELAY_MAX;
7155cb1b508SMarek Vasut 		scc_mgr_set_oct_out1_delay(write_group, new_delay);
7163da42859SDinh Nguyen 	}
7173da42859SDinh Nguyen 
7183da42859SDinh Nguyen 	scc_mgr_load_dqs_for_write_group(write_group);
7193da42859SDinh Nguyen }
7203da42859SDinh Nguyen 
721f51a7d35SMarek Vasut /**
722f51a7d35SMarek Vasut  * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side to all ranks
723f51a7d35SMarek Vasut  * @write_group:	Write group
724f51a7d35SMarek Vasut  * @delay:		Delay value
725f51a7d35SMarek Vasut  *
726f51a7d35SMarek Vasut  * Apply a delay to the entire output side (DQ, DM, DQS, OCT) to all ranks.
7273da42859SDinh Nguyen  */
728f51a7d35SMarek Vasut static void
729f51a7d35SMarek Vasut scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group,
730f51a7d35SMarek Vasut 						const u32 delay)
7313da42859SDinh Nguyen {
732f51a7d35SMarek Vasut 	int r;
7333da42859SDinh Nguyen 
734*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
7353da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
7365cb1b508SMarek Vasut 		scc_mgr_apply_group_all_out_delay_add(write_group, delay);
7371273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
7383da42859SDinh Nguyen 	}
7393da42859SDinh Nguyen }
7403da42859SDinh Nguyen 
741f936f94fSMarek Vasut /**
742f936f94fSMarek Vasut  * set_jump_as_return() - Return instruction optimization
743f936f94fSMarek Vasut  *
744f936f94fSMarek Vasut  * Optimization used to recover some slots in ddr3 inst_rom could be
745f936f94fSMarek Vasut  * applied to other protocols if we wanted to
746f936f94fSMarek Vasut  */
7473da42859SDinh Nguyen static void set_jump_as_return(void)
7483da42859SDinh Nguyen {
7493da42859SDinh Nguyen 	/*
750f936f94fSMarek Vasut 	 * To save space, we replace return with jump to special shared
7513da42859SDinh Nguyen 	 * RETURN instruction so we set the counter to large value so that
752f936f94fSMarek Vasut 	 * we always jump.
7533da42859SDinh Nguyen 	 */
7541273dd9eSMarek Vasut 	writel(0xff, &sdr_rw_load_mgr_regs->load_cntr0);
755*1fa0c8c4SMarek Vasut 	writel(rwcfg->rreturn, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
7563da42859SDinh Nguyen }
7573da42859SDinh Nguyen 
7583de9622eSMarek Vasut /**
7593de9622eSMarek Vasut  * delay_for_n_mem_clocks() - Delay for N memory clocks
7603de9622eSMarek Vasut  * @clocks:	Length of the delay
7613de9622eSMarek Vasut  *
7623de9622eSMarek Vasut  * Delay for N memory clocks.
7633da42859SDinh Nguyen  */
76490a584b7SMarek Vasut static void delay_for_n_mem_clocks(const u32 clocks)
7653da42859SDinh Nguyen {
76690a584b7SMarek Vasut 	u32 afi_clocks;
7676a39be6cSMarek Vasut 	u16 c_loop;
7686a39be6cSMarek Vasut 	u8 inner;
7696a39be6cSMarek Vasut 	u8 outer;
7703da42859SDinh Nguyen 
7713da42859SDinh Nguyen 	debug("%s:%d: clocks=%u ... start\n", __func__, __LINE__, clocks);
7723da42859SDinh Nguyen 
773cbcaf460SMarek Vasut 	/* Scale (rounding up) to get afi clocks. */
77490a584b7SMarek Vasut 	afi_clocks = DIV_ROUND_UP(clocks, AFI_RATE_RATIO);
775cbcaf460SMarek Vasut 	if (afi_clocks)	/* Temporary underflow protection */
776cbcaf460SMarek Vasut 		afi_clocks--;
7773da42859SDinh Nguyen 
7783da42859SDinh Nguyen 	/*
77990a584b7SMarek Vasut 	 * Note, we don't bother accounting for being off a little
78090a584b7SMarek Vasut 	 * bit because of a few extra instructions in outer loops.
78190a584b7SMarek Vasut 	 * Note, the loops have a test at the end, and do the test
78290a584b7SMarek Vasut 	 * before the decrement, and so always perform the loop
7833da42859SDinh Nguyen 	 * 1 time more than the counter value
7843da42859SDinh Nguyen 	 */
785cbcaf460SMarek Vasut 	c_loop = afi_clocks >> 16;
7866a39be6cSMarek Vasut 	outer = c_loop ? 0xff : (afi_clocks >> 8);
7876a39be6cSMarek Vasut 	inner = outer ? 0xff : afi_clocks;
7883da42859SDinh Nguyen 
7893da42859SDinh Nguyen 	/*
7903da42859SDinh Nguyen 	 * rom instructions are structured as follows:
7913da42859SDinh Nguyen 	 *
7923da42859SDinh Nguyen 	 *    IDLE_LOOP2: jnz cntr0, TARGET_A
7933da42859SDinh Nguyen 	 *    IDLE_LOOP1: jnz cntr1, TARGET_B
7943da42859SDinh Nguyen 	 *                return
7953da42859SDinh Nguyen 	 *
7963da42859SDinh Nguyen 	 * so, when doing nested loops, TARGET_A is set to IDLE_LOOP2, and
7973da42859SDinh Nguyen 	 * TARGET_B is set to IDLE_LOOP2 as well
7983da42859SDinh Nguyen 	 *
7993da42859SDinh Nguyen 	 * if we have no outer loop, though, then we can use IDLE_LOOP1 only,
8003da42859SDinh Nguyen 	 * and set TARGET_B to IDLE_LOOP1 and we skip IDLE_LOOP2 entirely
8013da42859SDinh Nguyen 	 *
8023da42859SDinh Nguyen 	 * a little confusing, but it helps save precious space in the inst_rom
8033da42859SDinh Nguyen 	 * and sequencer rom and keeps the delays more accurate and reduces
8043da42859SDinh Nguyen 	 * overhead
8053da42859SDinh Nguyen 	 */
806cbcaf460SMarek Vasut 	if (afi_clocks < 0x100) {
8071273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
8081273dd9eSMarek Vasut 			&sdr_rw_load_mgr_regs->load_cntr1);
8093da42859SDinh Nguyen 
810*1fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop1,
8111273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
8123da42859SDinh Nguyen 
813*1fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
8141273dd9eSMarek Vasut 					  RW_MGR_RUN_SINGLE_GROUP_OFFSET);
8153da42859SDinh Nguyen 	} else {
8161273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner),
8171273dd9eSMarek Vasut 			&sdr_rw_load_mgr_regs->load_cntr0);
8183da42859SDinh Nguyen 
8191273dd9eSMarek Vasut 		writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(outer),
8201273dd9eSMarek Vasut 			&sdr_rw_load_mgr_regs->load_cntr1);
8213da42859SDinh Nguyen 
822*1fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop2,
8231273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add0);
8243da42859SDinh Nguyen 
825*1fa0c8c4SMarek Vasut 		writel(rwcfg->idle_loop2,
8261273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
8273da42859SDinh Nguyen 
8283da42859SDinh Nguyen 		do {
829*1fa0c8c4SMarek Vasut 			writel(rwcfg->idle_loop2,
8301273dd9eSMarek Vasut 				SDR_PHYGRP_RWMGRGRP_ADDRESS |
8311273dd9eSMarek Vasut 				RW_MGR_RUN_SINGLE_GROUP_OFFSET);
8323da42859SDinh Nguyen 		} while (c_loop-- != 0);
8333da42859SDinh Nguyen 	}
8343da42859SDinh Nguyen 	debug("%s:%d clocks=%u ... end\n", __func__, __LINE__, clocks);
8353da42859SDinh Nguyen }
8363da42859SDinh Nguyen 
837944fe719SMarek Vasut /**
838944fe719SMarek Vasut  * rw_mgr_mem_init_load_regs() - Load instruction registers
839944fe719SMarek Vasut  * @cntr0:	Counter 0 value
840944fe719SMarek Vasut  * @cntr1:	Counter 1 value
841944fe719SMarek Vasut  * @cntr2:	Counter 2 value
842944fe719SMarek Vasut  * @jump:	Jump instruction value
843944fe719SMarek Vasut  *
844944fe719SMarek Vasut  * Load instruction registers.
845944fe719SMarek Vasut  */
846944fe719SMarek Vasut static void rw_mgr_mem_init_load_regs(u32 cntr0, u32 cntr1, u32 cntr2, u32 jump)
847944fe719SMarek Vasut {
848944fe719SMarek Vasut 	uint32_t grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
849944fe719SMarek Vasut 			   RW_MGR_RUN_SINGLE_GROUP_OFFSET;
850944fe719SMarek Vasut 
851944fe719SMarek Vasut 	/* Load counters */
852944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr0),
853944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr0);
854944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr1),
855944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr1);
856944fe719SMarek Vasut 	writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr2),
857944fe719SMarek Vasut 	       &sdr_rw_load_mgr_regs->load_cntr2);
858944fe719SMarek Vasut 
859944fe719SMarek Vasut 	/* Load jump address */
860944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
861944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add1);
862944fe719SMarek Vasut 	writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
863944fe719SMarek Vasut 
864944fe719SMarek Vasut 	/* Execute count instruction */
865944fe719SMarek Vasut 	writel(jump, grpaddr);
866944fe719SMarek Vasut }
867944fe719SMarek Vasut 
868ecd2334aSMarek Vasut /**
869ecd2334aSMarek Vasut  * rw_mgr_mem_load_user() - Load user calibration values
870ecd2334aSMarek Vasut  * @fin1:	Final instruction 1
871ecd2334aSMarek Vasut  * @fin2:	Final instruction 2
872ecd2334aSMarek Vasut  * @precharge:	If 1, precharge the banks at the end
873ecd2334aSMarek Vasut  *
874ecd2334aSMarek Vasut  * Load user calibration values and optionally precharge the banks.
875ecd2334aSMarek Vasut  */
876ecd2334aSMarek Vasut static void rw_mgr_mem_load_user(const u32 fin1, const u32 fin2,
877ecd2334aSMarek Vasut 				 const int precharge)
878ecd2334aSMarek Vasut {
879ecd2334aSMarek Vasut 	u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
880ecd2334aSMarek Vasut 		      RW_MGR_RUN_SINGLE_GROUP_OFFSET;
881ecd2334aSMarek Vasut 	u32 r;
882ecd2334aSMarek Vasut 
883*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks; r++) {
884ecd2334aSMarek Vasut 		/* set rank */
885ecd2334aSMarek Vasut 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
886ecd2334aSMarek Vasut 
887ecd2334aSMarek Vasut 		/* precharge all banks ... */
888ecd2334aSMarek Vasut 		if (precharge)
889*1fa0c8c4SMarek Vasut 			writel(rwcfg->precharge_all, grpaddr);
890ecd2334aSMarek Vasut 
891ecd2334aSMarek Vasut 		/*
892ecd2334aSMarek Vasut 		 * USER Use Mirror-ed commands for odd ranks if address
893ecd2334aSMarek Vasut 		 * mirrorring is on
894ecd2334aSMarek Vasut 		 */
895*1fa0c8c4SMarek Vasut 		if ((rwcfg->mem_address_mirroring >> r) & 0x1) {
896ecd2334aSMarek Vasut 			set_jump_as_return();
897*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs2_mirr, grpaddr);
898ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
899ecd2334aSMarek Vasut 			set_jump_as_return();
900*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs3_mirr, grpaddr);
901ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
902ecd2334aSMarek Vasut 			set_jump_as_return();
903*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs1_mirr, grpaddr);
904ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
905ecd2334aSMarek Vasut 			set_jump_as_return();
906ecd2334aSMarek Vasut 			writel(fin1, grpaddr);
907ecd2334aSMarek Vasut 		} else {
908ecd2334aSMarek Vasut 			set_jump_as_return();
909*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs2, grpaddr);
910ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
911ecd2334aSMarek Vasut 			set_jump_as_return();
912*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs3, grpaddr);
913ecd2334aSMarek Vasut 			delay_for_n_mem_clocks(4);
914ecd2334aSMarek Vasut 			set_jump_as_return();
915*1fa0c8c4SMarek Vasut 			writel(rwcfg->mrs1, grpaddr);
916ecd2334aSMarek Vasut 			set_jump_as_return();
917ecd2334aSMarek Vasut 			writel(fin2, grpaddr);
918ecd2334aSMarek Vasut 		}
919ecd2334aSMarek Vasut 
920ecd2334aSMarek Vasut 		if (precharge)
921ecd2334aSMarek Vasut 			continue;
922ecd2334aSMarek Vasut 
923ecd2334aSMarek Vasut 		set_jump_as_return();
924*1fa0c8c4SMarek Vasut 		writel(rwcfg->zqcl, grpaddr);
925ecd2334aSMarek Vasut 
926ecd2334aSMarek Vasut 		/* tZQinit = tDLLK = 512 ck cycles */
927ecd2334aSMarek Vasut 		delay_for_n_mem_clocks(512);
928ecd2334aSMarek Vasut 	}
929ecd2334aSMarek Vasut }
930ecd2334aSMarek Vasut 
9318e9d7d04SMarek Vasut /**
9328e9d7d04SMarek Vasut  * rw_mgr_mem_initialize() - Initialize RW Manager
9338e9d7d04SMarek Vasut  *
9348e9d7d04SMarek Vasut  * Initialize RW Manager.
9358e9d7d04SMarek Vasut  */
9363da42859SDinh Nguyen static void rw_mgr_mem_initialize(void)
9373da42859SDinh Nguyen {
9383da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
9393da42859SDinh Nguyen 
9403da42859SDinh Nguyen 	/* The reset / cke part of initialization is broadcasted to all ranks */
9411273dd9eSMarek Vasut 	writel(RW_MGR_RANK_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS |
9421273dd9eSMarek Vasut 				RW_MGR_SET_CS_AND_ODT_MASK_OFFSET);
9433da42859SDinh Nguyen 
9443da42859SDinh Nguyen 	/*
9453da42859SDinh Nguyen 	 * Here's how you load register for a loop
9463da42859SDinh Nguyen 	 * Counters are located @ 0x800
9473da42859SDinh Nguyen 	 * Jump address are located @ 0xC00
9483da42859SDinh Nguyen 	 * For both, registers 0 to 3 are selected using bits 3 and 2, like
9493da42859SDinh Nguyen 	 * in 0x800, 0x804, 0x808, 0x80C and 0xC00, 0xC04, 0xC08, 0xC0C
9503da42859SDinh Nguyen 	 * I know this ain't pretty, but Avalon bus throws away the 2 least
9513da42859SDinh Nguyen 	 * significant bits
9523da42859SDinh Nguyen 	 */
9533da42859SDinh Nguyen 
9548e9d7d04SMarek Vasut 	/* Start with memory RESET activated */
9553da42859SDinh Nguyen 
9563da42859SDinh Nguyen 	/* tINIT = 200us */
9573da42859SDinh Nguyen 
9583da42859SDinh Nguyen 	/*
9593da42859SDinh Nguyen 	 * 200us @ 266MHz (3.75 ns) ~ 54000 clock cycles
9603da42859SDinh Nguyen 	 * If a and b are the number of iteration in 2 nested loops
9613da42859SDinh Nguyen 	 * it takes the following number of cycles to complete the operation:
9623da42859SDinh Nguyen 	 * number_of_cycles = ((2 + n) * a + 2) * b
9633da42859SDinh Nguyen 	 * where n is the number of instruction in the inner loop
9643da42859SDinh Nguyen 	 * One possible solution is n = 0 , a = 256 , b = 106 => a = FF,
9653da42859SDinh Nguyen 	 * b = 6A
9663da42859SDinh Nguyen 	 */
967944fe719SMarek Vasut 	rw_mgr_mem_init_load_regs(SEQ_TINIT_CNTR0_VAL, SEQ_TINIT_CNTR1_VAL,
968944fe719SMarek Vasut 				  SEQ_TINIT_CNTR2_VAL,
969*1fa0c8c4SMarek Vasut 				  rwcfg->init_reset_0_cke_0);
9703da42859SDinh Nguyen 
9718e9d7d04SMarek Vasut 	/* Indicate that memory is stable. */
9721273dd9eSMarek Vasut 	writel(1, &phy_mgr_cfg->reset_mem_stbl);
9733da42859SDinh Nguyen 
9743da42859SDinh Nguyen 	/*
9753da42859SDinh Nguyen 	 * transition the RESET to high
9763da42859SDinh Nguyen 	 * Wait for 500us
9773da42859SDinh Nguyen 	 */
9783da42859SDinh Nguyen 
9793da42859SDinh Nguyen 	/*
9803da42859SDinh Nguyen 	 * 500us @ 266MHz (3.75 ns) ~ 134000 clock cycles
9813da42859SDinh Nguyen 	 * If a and b are the number of iteration in 2 nested loops
9823da42859SDinh Nguyen 	 * it takes the following number of cycles to complete the operation
9833da42859SDinh Nguyen 	 * number_of_cycles = ((2 + n) * a + 2) * b
9843da42859SDinh Nguyen 	 * where n is the number of instruction in the inner loop
9853da42859SDinh Nguyen 	 * One possible solution is n = 2 , a = 131 , b = 256 => a = 83,
9863da42859SDinh Nguyen 	 * b = FF
9873da42859SDinh Nguyen 	 */
988944fe719SMarek Vasut 	rw_mgr_mem_init_load_regs(SEQ_TRESET_CNTR0_VAL, SEQ_TRESET_CNTR1_VAL,
989944fe719SMarek Vasut 				  SEQ_TRESET_CNTR2_VAL,
990*1fa0c8c4SMarek Vasut 				  rwcfg->init_reset_1_cke_0);
9913da42859SDinh Nguyen 
9928e9d7d04SMarek Vasut 	/* Bring up clock enable. */
9933da42859SDinh Nguyen 
9943da42859SDinh Nguyen 	/* tXRP < 250 ck cycles */
9953da42859SDinh Nguyen 	delay_for_n_mem_clocks(250);
9963da42859SDinh Nguyen 
997*1fa0c8c4SMarek Vasut 	rw_mgr_mem_load_user(rwcfg->mrs0_dll_reset_mirr, rwcfg->mrs0_dll_reset,
998ecd2334aSMarek Vasut 			     0);
9993da42859SDinh Nguyen }
10003da42859SDinh Nguyen 
1001f1f22f72SMarek Vasut /**
1002f1f22f72SMarek Vasut  * rw_mgr_mem_handoff() - Hand off the memory to user
1003f1f22f72SMarek Vasut  *
1004f1f22f72SMarek Vasut  * At the end of calibration we have to program the user settings in
1005f1f22f72SMarek Vasut  * and hand off the memory to the user.
10063da42859SDinh Nguyen  */
10073da42859SDinh Nguyen static void rw_mgr_mem_handoff(void)
10083da42859SDinh Nguyen {
1009*1fa0c8c4SMarek Vasut 	rw_mgr_mem_load_user(rwcfg->mrs0_user_mirr, rwcfg->mrs0_user, 1);
10103da42859SDinh Nguyen 	/*
1011f1f22f72SMarek Vasut 	 * Need to wait tMOD (12CK or 15ns) time before issuing other
1012f1f22f72SMarek Vasut 	 * commands, but we will have plenty of NIOS cycles before actual
1013f1f22f72SMarek Vasut 	 * handoff so its okay.
10143da42859SDinh Nguyen 	 */
10153da42859SDinh Nguyen }
10163da42859SDinh Nguyen 
10178371c2eeSMarek Vasut /**
10188371c2eeSMarek Vasut  * rw_mgr_mem_calibrate_write_test_issue() - Issue write test command
10198371c2eeSMarek Vasut  * @group:	Write Group
10208371c2eeSMarek Vasut  * @use_dm:	Use DM
10218371c2eeSMarek Vasut  *
10228371c2eeSMarek Vasut  * Issue write test command. Two variants are provided, one that just tests
10238371c2eeSMarek Vasut  * a write pattern and another that tests datamask functionality.
1024ad64769cSMarek Vasut  */
10258371c2eeSMarek Vasut static void rw_mgr_mem_calibrate_write_test_issue(u32 group,
10268371c2eeSMarek Vasut 						  u32 test_dm)
1027ad64769cSMarek Vasut {
10288371c2eeSMarek Vasut 	const u32 quick_write_mode =
10298371c2eeSMarek Vasut 		(STATIC_CALIB_STEPS & CALIB_SKIP_WRITES) &&
10308371c2eeSMarek Vasut 		ENABLE_SUPER_QUICK_CALIBRATION;
10318371c2eeSMarek Vasut 	u32 mcc_instruction;
10328371c2eeSMarek Vasut 	u32 rw_wl_nop_cycles;
1033ad64769cSMarek Vasut 
1034ad64769cSMarek Vasut 	/*
1035ad64769cSMarek Vasut 	 * Set counter and jump addresses for the right
1036ad64769cSMarek Vasut 	 * number of NOP cycles.
1037ad64769cSMarek Vasut 	 * The number of supported NOP cycles can range from -1 to infinity
1038ad64769cSMarek Vasut 	 * Three different cases are handled:
1039ad64769cSMarek Vasut 	 *
1040ad64769cSMarek Vasut 	 * 1. For a number of NOP cycles greater than 0, the RW Mgr looping
1041ad64769cSMarek Vasut 	 *    mechanism will be used to insert the right number of NOPs
1042ad64769cSMarek Vasut 	 *
1043ad64769cSMarek Vasut 	 * 2. For a number of NOP cycles equals to 0, the micro-instruction
1044ad64769cSMarek Vasut 	 *    issuing the write command will jump straight to the
1045ad64769cSMarek Vasut 	 *    micro-instruction that turns on DQS (for DDRx), or outputs write
1046ad64769cSMarek Vasut 	 *    data (for RLD), skipping
1047ad64769cSMarek Vasut 	 *    the NOP micro-instruction all together
1048ad64769cSMarek Vasut 	 *
1049ad64769cSMarek Vasut 	 * 3. A number of NOP cycles equal to -1 indicates that DQS must be
1050ad64769cSMarek Vasut 	 *    turned on in the same micro-instruction that issues the write
1051ad64769cSMarek Vasut 	 *    command. Then we need
1052ad64769cSMarek Vasut 	 *    to directly jump to the micro-instruction that sends out the data
1053ad64769cSMarek Vasut 	 *
1054ad64769cSMarek Vasut 	 * NOTE: Implementing this mechanism uses 2 RW Mgr jump-counters
1055ad64769cSMarek Vasut 	 *       (2 and 3). One jump-counter (0) is used to perform multiple
1056ad64769cSMarek Vasut 	 *       write-read operations.
1057ad64769cSMarek Vasut 	 *       one counter left to issue this command in "multiple-group" mode
1058ad64769cSMarek Vasut 	 */
1059ad64769cSMarek Vasut 
1060ad64769cSMarek Vasut 	rw_wl_nop_cycles = gbl->rw_wl_nop_cycles;
1061ad64769cSMarek Vasut 
1062ad64769cSMarek Vasut 	if (rw_wl_nop_cycles == -1) {
1063ad64769cSMarek Vasut 		/*
1064ad64769cSMarek Vasut 		 * CNTR 2 - We want to execute the special write operation that
1065ad64769cSMarek Vasut 		 * turns on DQS right away and then skip directly to the
1066ad64769cSMarek Vasut 		 * instruction that sends out the data. We set the counter to a
1067ad64769cSMarek Vasut 		 * large number so that the jump is always taken.
1068ad64769cSMarek Vasut 		 */
1069ad64769cSMarek Vasut 		writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
1070ad64769cSMarek Vasut 
1071ad64769cSMarek Vasut 		/* CNTR 3 - Not used */
1072ad64769cSMarek Vasut 		if (test_dm) {
1073*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0_wl_1;
1074*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_data,
1075ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1076*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1077ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add3);
1078ad64769cSMarek Vasut 		} else {
1079*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0_wl_1;
1080*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_data,
1081ad64769cSMarek Vasut 				&sdr_rw_load_jump_mgr_regs->load_jump_add2);
1082*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_nop,
1083ad64769cSMarek Vasut 				&sdr_rw_load_jump_mgr_regs->load_jump_add3);
1084ad64769cSMarek Vasut 		}
1085ad64769cSMarek Vasut 	} else if (rw_wl_nop_cycles == 0) {
1086ad64769cSMarek Vasut 		/*
1087ad64769cSMarek Vasut 		 * CNTR 2 - We want to skip the NOP operation and go straight
1088ad64769cSMarek Vasut 		 * to the DQS enable instruction. We set the counter to a large
1089ad64769cSMarek Vasut 		 * number so that the jump is always taken.
1090ad64769cSMarek Vasut 		 */
1091ad64769cSMarek Vasut 		writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2);
1092ad64769cSMarek Vasut 
1093ad64769cSMarek Vasut 		/* CNTR 3 - Not used */
1094ad64769cSMarek Vasut 		if (test_dm) {
1095*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0;
1096*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_dqs,
1097ad64769cSMarek Vasut 			       &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1098ad64769cSMarek Vasut 		} else {
1099*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0;
1100*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_dqs,
1101ad64769cSMarek Vasut 				&sdr_rw_load_jump_mgr_regs->load_jump_add2);
1102ad64769cSMarek Vasut 		}
1103ad64769cSMarek Vasut 	} else {
1104ad64769cSMarek Vasut 		/*
1105ad64769cSMarek Vasut 		 * CNTR 2 - In this case we want to execute the next instruction
1106ad64769cSMarek Vasut 		 * and NOT take the jump. So we set the counter to 0. The jump
1107ad64769cSMarek Vasut 		 * address doesn't count.
1108ad64769cSMarek Vasut 		 */
1109ad64769cSMarek Vasut 		writel(0x0, &sdr_rw_load_mgr_regs->load_cntr2);
1110ad64769cSMarek Vasut 		writel(0x0, &sdr_rw_load_jump_mgr_regs->load_jump_add2);
1111ad64769cSMarek Vasut 
1112ad64769cSMarek Vasut 		/*
1113ad64769cSMarek Vasut 		 * CNTR 3 - Set the nop counter to the number of cycles we
1114ad64769cSMarek Vasut 		 * need to loop for, minus 1.
1115ad64769cSMarek Vasut 		 */
1116ad64769cSMarek Vasut 		writel(rw_wl_nop_cycles - 1, &sdr_rw_load_mgr_regs->load_cntr3);
1117ad64769cSMarek Vasut 		if (test_dm) {
1118*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_dm_bank_0;
1119*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_dm_bank_0_nop,
1120ad64769cSMarek Vasut 				&sdr_rw_load_jump_mgr_regs->load_jump_add3);
1121ad64769cSMarek Vasut 		} else {
1122*1fa0c8c4SMarek Vasut 			mcc_instruction = rwcfg->lfsr_wr_rd_bank_0;
1123*1fa0c8c4SMarek Vasut 			writel(rwcfg->lfsr_wr_rd_bank_0_nop,
1124ad64769cSMarek Vasut 				&sdr_rw_load_jump_mgr_regs->load_jump_add3);
1125ad64769cSMarek Vasut 		}
1126ad64769cSMarek Vasut 	}
1127ad64769cSMarek Vasut 
1128ad64769cSMarek Vasut 	writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
1129ad64769cSMarek Vasut 		  RW_MGR_RESET_READ_DATAPATH_OFFSET);
1130ad64769cSMarek Vasut 
1131ad64769cSMarek Vasut 	if (quick_write_mode)
1132ad64769cSMarek Vasut 		writel(0x08, &sdr_rw_load_mgr_regs->load_cntr0);
1133ad64769cSMarek Vasut 	else
1134ad64769cSMarek Vasut 		writel(0x40, &sdr_rw_load_mgr_regs->load_cntr0);
1135ad64769cSMarek Vasut 
1136ad64769cSMarek Vasut 	writel(mcc_instruction, &sdr_rw_load_jump_mgr_regs->load_jump_add0);
1137ad64769cSMarek Vasut 
1138ad64769cSMarek Vasut 	/*
1139ad64769cSMarek Vasut 	 * CNTR 1 - This is used to ensure enough time elapses
1140ad64769cSMarek Vasut 	 * for read data to come back.
1141ad64769cSMarek Vasut 	 */
1142ad64769cSMarek Vasut 	writel(0x30, &sdr_rw_load_mgr_regs->load_cntr1);
1143ad64769cSMarek Vasut 
1144ad64769cSMarek Vasut 	if (test_dm) {
1145*1fa0c8c4SMarek Vasut 		writel(rwcfg->lfsr_wr_rd_dm_bank_0_wait,
1146ad64769cSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
1147ad64769cSMarek Vasut 	} else {
1148*1fa0c8c4SMarek Vasut 		writel(rwcfg->lfsr_wr_rd_bank_0_wait,
1149ad64769cSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
1150ad64769cSMarek Vasut 	}
1151ad64769cSMarek Vasut 
11528371c2eeSMarek Vasut 	writel(mcc_instruction, (SDR_PHYGRP_RWMGRGRP_ADDRESS |
11538371c2eeSMarek Vasut 				RW_MGR_RUN_SINGLE_GROUP_OFFSET) +
11548371c2eeSMarek Vasut 				(group << 2));
1155ad64769cSMarek Vasut }
1156ad64769cSMarek Vasut 
11574a82854bSMarek Vasut /**
11584a82854bSMarek Vasut  * rw_mgr_mem_calibrate_write_test() - Test writes, check for single/multiple pass
11594a82854bSMarek Vasut  * @rank_bgn:		Rank number
11604a82854bSMarek Vasut  * @write_group:	Write Group
11614a82854bSMarek Vasut  * @use_dm:		Use DM
11624a82854bSMarek Vasut  * @all_correct:	All bits must be correct in the mask
11634a82854bSMarek Vasut  * @bit_chk:		Resulting bit mask after the test
11644a82854bSMarek Vasut  * @all_ranks:		Test all ranks
11654a82854bSMarek Vasut  *
11664a82854bSMarek Vasut  * Test writes, can check for a single bit pass or multiple bit pass.
11674a82854bSMarek Vasut  */
1168b9452ea0SMarek Vasut static int
1169b9452ea0SMarek Vasut rw_mgr_mem_calibrate_write_test(const u32 rank_bgn, const u32 write_group,
1170b9452ea0SMarek Vasut 				const u32 use_dm, const u32 all_correct,
1171b9452ea0SMarek Vasut 				u32 *bit_chk, const u32 all_ranks)
1172ad64769cSMarek Vasut {
1173b9452ea0SMarek Vasut 	const u32 rank_end = all_ranks ?
1174*1fa0c8c4SMarek Vasut 				rwcfg->mem_number_of_ranks :
1175ad64769cSMarek Vasut 				(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1176*1fa0c8c4SMarek Vasut 	const u32 shift_ratio = rwcfg->mem_dq_per_write_dqs /
1177*1fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_write_dqs;
1178b9452ea0SMarek Vasut 	const u32 correct_mask_vg = param->write_correct_mask_vg;
1179b9452ea0SMarek Vasut 
1180b9452ea0SMarek Vasut 	u32 tmp_bit_chk, base_rw_mgr;
1181b9452ea0SMarek Vasut 	int vg, r;
1182ad64769cSMarek Vasut 
1183ad64769cSMarek Vasut 	*bit_chk = param->write_correct_mask;
1184ad64769cSMarek Vasut 
1185ad64769cSMarek Vasut 	for (r = rank_bgn; r < rank_end; r++) {
1186b9452ea0SMarek Vasut 		/* Set rank */
1187ad64769cSMarek Vasut 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
1188ad64769cSMarek Vasut 
1189ad64769cSMarek Vasut 		tmp_bit_chk = 0;
1190*1fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_write_dqs - 1;
1191b9452ea0SMarek Vasut 		     vg >= 0; vg--) {
1192b9452ea0SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
1193ad64769cSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
1194ad64769cSMarek Vasut 
1195b9452ea0SMarek Vasut 			rw_mgr_mem_calibrate_write_test_issue(
1196b9452ea0SMarek Vasut 				write_group *
1197*1fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_write_dqs + vg,
1198ad64769cSMarek Vasut 				use_dm);
1199ad64769cSMarek Vasut 
1200b9452ea0SMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
1201b9452ea0SMarek Vasut 			tmp_bit_chk <<= shift_ratio;
1202b9452ea0SMarek Vasut 			tmp_bit_chk |= (correct_mask_vg & ~(base_rw_mgr));
1203ad64769cSMarek Vasut 		}
1204b9452ea0SMarek Vasut 
1205ad64769cSMarek Vasut 		*bit_chk &= tmp_bit_chk;
1206ad64769cSMarek Vasut 	}
1207ad64769cSMarek Vasut 
1208ad64769cSMarek Vasut 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1209b9452ea0SMarek Vasut 	if (all_correct) {
1210b9452ea0SMarek Vasut 		debug_cond(DLEVEL == 2,
1211b9452ea0SMarek Vasut 			   "write_test(%u,%u,ALL) : %u == %u => %i\n",
1212b9452ea0SMarek Vasut 			   write_group, use_dm, *bit_chk,
1213b9452ea0SMarek Vasut 			   param->write_correct_mask,
1214b9452ea0SMarek Vasut 			   *bit_chk == param->write_correct_mask);
1215ad64769cSMarek Vasut 		return *bit_chk == param->write_correct_mask;
1216ad64769cSMarek Vasut 	} else {
1217ad64769cSMarek Vasut 		set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1218b9452ea0SMarek Vasut 		debug_cond(DLEVEL == 2,
1219b9452ea0SMarek Vasut 			   "write_test(%u,%u,ONE) : %u != %i => %i\n",
1220b9452ea0SMarek Vasut 			   write_group, use_dm, *bit_chk, 0, *bit_chk != 0);
1221ad64769cSMarek Vasut 		return *bit_chk != 0x00;
1222ad64769cSMarek Vasut 	}
1223ad64769cSMarek Vasut }
1224ad64769cSMarek Vasut 
1225d844c7d4SMarek Vasut /**
1226d844c7d4SMarek Vasut  * rw_mgr_mem_calibrate_read_test_patterns() - Read back test patterns
1227d844c7d4SMarek Vasut  * @rank_bgn:	Rank number
1228d844c7d4SMarek Vasut  * @group:	Read/Write Group
1229d844c7d4SMarek Vasut  * @all_ranks:	Test all ranks
1230d844c7d4SMarek Vasut  *
1231d844c7d4SMarek Vasut  * Performs a guaranteed read on the patterns we are going to use during a
1232d844c7d4SMarek Vasut  * read test to ensure memory works.
12333da42859SDinh Nguyen  */
1234d844c7d4SMarek Vasut static int
1235d844c7d4SMarek Vasut rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn, const u32 group,
1236d844c7d4SMarek Vasut 					const u32 all_ranks)
12373da42859SDinh Nguyen {
1238d844c7d4SMarek Vasut 	const u32 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1239d844c7d4SMarek Vasut 			 RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1240d844c7d4SMarek Vasut 	const u32 addr_offset =
1241*1fa0c8c4SMarek Vasut 			 (group * rwcfg->mem_virtual_groups_per_read_dqs) << 2;
1242d844c7d4SMarek Vasut 	const u32 rank_end = all_ranks ?
1243*1fa0c8c4SMarek Vasut 				rwcfg->mem_number_of_ranks :
12443da42859SDinh Nguyen 				(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1245*1fa0c8c4SMarek Vasut 	const u32 shift_ratio = rwcfg->mem_dq_per_read_dqs /
1246*1fa0c8c4SMarek Vasut 				rwcfg->mem_virtual_groups_per_read_dqs;
1247d844c7d4SMarek Vasut 	const u32 correct_mask_vg = param->read_correct_mask_vg;
12483da42859SDinh Nguyen 
1249d844c7d4SMarek Vasut 	u32 tmp_bit_chk, base_rw_mgr, bit_chk;
1250d844c7d4SMarek Vasut 	int vg, r;
1251d844c7d4SMarek Vasut 	int ret = 0;
1252d844c7d4SMarek Vasut 
1253d844c7d4SMarek Vasut 	bit_chk = param->read_correct_mask;
12543da42859SDinh Nguyen 
12553da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
1256d844c7d4SMarek Vasut 		/* Set rank */
12573da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
12583da42859SDinh Nguyen 
12593da42859SDinh Nguyen 		/* Load up a constant bursts of read commands */
12601273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
1261*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_read,
12621273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add0);
12633da42859SDinh Nguyen 
12641273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
1265*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_read_cont,
12661273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
12673da42859SDinh Nguyen 
12683da42859SDinh Nguyen 		tmp_bit_chk = 0;
1269*1fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_read_dqs - 1;
1270d844c7d4SMarek Vasut 		     vg >= 0; vg--) {
1271d844c7d4SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
12721273dd9eSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
12731273dd9eSMarek Vasut 			writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
12741273dd9eSMarek Vasut 				  RW_MGR_RESET_READ_DATAPATH_OFFSET);
1275*1fa0c8c4SMarek Vasut 			writel(rwcfg->guaranteed_read,
1276d844c7d4SMarek Vasut 			       addr + addr_offset + (vg << 2));
12773da42859SDinh Nguyen 
12781273dd9eSMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
1279d844c7d4SMarek Vasut 			tmp_bit_chk <<= shift_ratio;
1280d844c7d4SMarek Vasut 			tmp_bit_chk |= correct_mask_vg & ~base_rw_mgr;
12813da42859SDinh Nguyen 		}
12823da42859SDinh Nguyen 
1283d844c7d4SMarek Vasut 		bit_chk &= tmp_bit_chk;
1284d844c7d4SMarek Vasut 	}
1285d844c7d4SMarek Vasut 
1286*1fa0c8c4SMarek Vasut 	writel(rwcfg->clear_dqs_enable, addr + (group << 2));
12873da42859SDinh Nguyen 
12883da42859SDinh Nguyen 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
1289d844c7d4SMarek Vasut 
1290d844c7d4SMarek Vasut 	if (bit_chk != param->read_correct_mask)
1291d844c7d4SMarek Vasut 		ret = -EIO;
1292d844c7d4SMarek Vasut 
1293d844c7d4SMarek Vasut 	debug_cond(DLEVEL == 1,
1294d844c7d4SMarek Vasut 		   "%s:%d test_load_patterns(%u,ALL) => (%u == %u) => %i\n",
1295d844c7d4SMarek Vasut 		   __func__, __LINE__, group, bit_chk,
1296d844c7d4SMarek Vasut 		   param->read_correct_mask, ret);
1297d844c7d4SMarek Vasut 
1298d844c7d4SMarek Vasut 	return ret;
12993da42859SDinh Nguyen }
13003da42859SDinh Nguyen 
1301b6cb7f9eSMarek Vasut /**
1302b6cb7f9eSMarek Vasut  * rw_mgr_mem_calibrate_read_load_patterns() - Load up the patterns for read test
1303b6cb7f9eSMarek Vasut  * @rank_bgn:	Rank number
1304b6cb7f9eSMarek Vasut  * @all_ranks:	Test all ranks
1305b6cb7f9eSMarek Vasut  *
1306b6cb7f9eSMarek Vasut  * Load up the patterns we are going to use during a read test.
1307b6cb7f9eSMarek Vasut  */
1308b6cb7f9eSMarek Vasut static void rw_mgr_mem_calibrate_read_load_patterns(const u32 rank_bgn,
1309b6cb7f9eSMarek Vasut 						    const int all_ranks)
13103da42859SDinh Nguyen {
1311b6cb7f9eSMarek Vasut 	const u32 rank_end = all_ranks ?
1312*1fa0c8c4SMarek Vasut 			rwcfg->mem_number_of_ranks :
13133da42859SDinh Nguyen 			(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
1314b6cb7f9eSMarek Vasut 	u32 r;
13153da42859SDinh Nguyen 
13163da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
1317b6cb7f9eSMarek Vasut 
13183da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
13193da42859SDinh Nguyen 		/* set rank */
13203da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
13213da42859SDinh Nguyen 
13223da42859SDinh Nguyen 		/* Load up a constant bursts */
13231273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0);
13243da42859SDinh Nguyen 
1325*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait0,
13261273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add0);
13273da42859SDinh Nguyen 
13281273dd9eSMarek Vasut 		writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1);
13293da42859SDinh Nguyen 
1330*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait1,
13311273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
13323da42859SDinh Nguyen 
13331273dd9eSMarek Vasut 		writel(0x04, &sdr_rw_load_mgr_regs->load_cntr2);
13343da42859SDinh Nguyen 
1335*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait2,
13361273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add2);
13373da42859SDinh Nguyen 
13381273dd9eSMarek Vasut 		writel(0x04, &sdr_rw_load_mgr_regs->load_cntr3);
13393da42859SDinh Nguyen 
1340*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write_wait3,
13411273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add3);
13423da42859SDinh Nguyen 
1343*1fa0c8c4SMarek Vasut 		writel(rwcfg->guaranteed_write, SDR_PHYGRP_RWMGRGRP_ADDRESS |
13441273dd9eSMarek Vasut 						RW_MGR_RUN_SINGLE_GROUP_OFFSET);
13453da42859SDinh Nguyen 	}
13463da42859SDinh Nguyen 
13473da42859SDinh Nguyen 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
13483da42859SDinh Nguyen }
13493da42859SDinh Nguyen 
1350783fcf59SMarek Vasut /**
1351783fcf59SMarek Vasut  * rw_mgr_mem_calibrate_read_test() - Perform READ test on single rank
1352783fcf59SMarek Vasut  * @rank_bgn:		Rank number
1353783fcf59SMarek Vasut  * @group:		Read/Write group
1354783fcf59SMarek Vasut  * @num_tries:		Number of retries of the test
1355783fcf59SMarek Vasut  * @all_correct:	All bits must be correct in the mask
1356783fcf59SMarek Vasut  * @bit_chk:		Resulting bit mask after the test
1357783fcf59SMarek Vasut  * @all_groups:		Test all R/W groups
1358783fcf59SMarek Vasut  * @all_ranks:		Test all ranks
1359783fcf59SMarek Vasut  *
1360783fcf59SMarek Vasut  * Try a read and see if it returns correct data back. Test has dummy reads
1361783fcf59SMarek Vasut  * inserted into the mix used to align DQS enable. Test has more thorough
1362783fcf59SMarek Vasut  * checks than the regular read test.
13633da42859SDinh Nguyen  */
13643cb8bf3fSMarek Vasut static int
13653cb8bf3fSMarek Vasut rw_mgr_mem_calibrate_read_test(const u32 rank_bgn, const u32 group,
13663cb8bf3fSMarek Vasut 			       const u32 num_tries, const u32 all_correct,
13673cb8bf3fSMarek Vasut 			       u32 *bit_chk,
13683cb8bf3fSMarek Vasut 			       const u32 all_groups, const u32 all_ranks)
13693da42859SDinh Nguyen {
1370*1fa0c8c4SMarek Vasut 	const u32 rank_end = all_ranks ? rwcfg->mem_number_of_ranks :
13713da42859SDinh Nguyen 		(rank_bgn + NUM_RANKS_PER_SHADOW_REG);
13723cb8bf3fSMarek Vasut 	const u32 quick_read_mode =
13733cb8bf3fSMarek Vasut 		((STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS) &&
13743cb8bf3fSMarek Vasut 		 ENABLE_SUPER_QUICK_CALIBRATION);
13753cb8bf3fSMarek Vasut 	u32 correct_mask_vg = param->read_correct_mask_vg;
13763cb8bf3fSMarek Vasut 	u32 tmp_bit_chk;
13773cb8bf3fSMarek Vasut 	u32 base_rw_mgr;
13783cb8bf3fSMarek Vasut 	u32 addr;
13793cb8bf3fSMarek Vasut 
13803cb8bf3fSMarek Vasut 	int r, vg, ret;
13813da42859SDinh Nguyen 
13823da42859SDinh Nguyen 	*bit_chk = param->read_correct_mask;
13833da42859SDinh Nguyen 
13843da42859SDinh Nguyen 	for (r = rank_bgn; r < rank_end; r++) {
13853da42859SDinh Nguyen 		/* set rank */
13863da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE);
13873da42859SDinh Nguyen 
13881273dd9eSMarek Vasut 		writel(0x10, &sdr_rw_load_mgr_regs->load_cntr1);
13893da42859SDinh Nguyen 
1390*1fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b_wait1,
13911273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
13923da42859SDinh Nguyen 
13931273dd9eSMarek Vasut 		writel(0x10, &sdr_rw_load_mgr_regs->load_cntr2);
1394*1fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b_wait2,
13951273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add2);
13963da42859SDinh Nguyen 
13973da42859SDinh Nguyen 		if (quick_read_mode)
13981273dd9eSMarek Vasut 			writel(0x1, &sdr_rw_load_mgr_regs->load_cntr0);
13993da42859SDinh Nguyen 			/* need at least two (1+1) reads to capture failures */
14003da42859SDinh Nguyen 		else if (all_groups)
14011273dd9eSMarek Vasut 			writel(0x06, &sdr_rw_load_mgr_regs->load_cntr0);
14023da42859SDinh Nguyen 		else
14031273dd9eSMarek Vasut 			writel(0x32, &sdr_rw_load_mgr_regs->load_cntr0);
14043da42859SDinh Nguyen 
1405*1fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b,
14061273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add0);
14073da42859SDinh Nguyen 		if (all_groups)
1408*1fa0c8c4SMarek Vasut 			writel(rwcfg->mem_if_read_dqs_width *
1409*1fa0c8c4SMarek Vasut 			       rwcfg->mem_virtual_groups_per_read_dqs - 1,
14101273dd9eSMarek Vasut 			       &sdr_rw_load_mgr_regs->load_cntr3);
14113da42859SDinh Nguyen 		else
14121273dd9eSMarek Vasut 			writel(0x0, &sdr_rw_load_mgr_regs->load_cntr3);
14133da42859SDinh Nguyen 
1414*1fa0c8c4SMarek Vasut 		writel(rwcfg->read_b2b,
14151273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add3);
14163da42859SDinh Nguyen 
14173da42859SDinh Nguyen 		tmp_bit_chk = 0;
1418*1fa0c8c4SMarek Vasut 		for (vg = rwcfg->mem_virtual_groups_per_read_dqs - 1; vg >= 0;
14197ce23bb6SMarek Vasut 		     vg--) {
1420ba522c76SMarek Vasut 			/* Reset the FIFOs to get pointers to known state. */
14211273dd9eSMarek Vasut 			writel(0, &phy_mgr_cmd->fifo_reset);
14221273dd9eSMarek Vasut 			writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS |
14231273dd9eSMarek Vasut 				  RW_MGR_RESET_READ_DATAPATH_OFFSET);
14243da42859SDinh Nguyen 
1425ba522c76SMarek Vasut 			if (all_groups) {
1426ba522c76SMarek Vasut 				addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1427ba522c76SMarek Vasut 				       RW_MGR_RUN_ALL_GROUPS_OFFSET;
1428ba522c76SMarek Vasut 			} else {
1429ba522c76SMarek Vasut 				addr = SDR_PHYGRP_RWMGRGRP_ADDRESS |
1430ba522c76SMarek Vasut 				       RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1431ba522c76SMarek Vasut 			}
1432c4815f76SMarek Vasut 
1433*1fa0c8c4SMarek Vasut 			writel(rwcfg->read_b2b, addr +
1434*1fa0c8c4SMarek Vasut 			       ((group * rwcfg->mem_virtual_groups_per_read_dqs +
14353da42859SDinh Nguyen 			       vg) << 2));
14363da42859SDinh Nguyen 
14371273dd9eSMarek Vasut 			base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS);
1438*1fa0c8c4SMarek Vasut 			tmp_bit_chk <<= rwcfg->mem_dq_per_read_dqs /
1439*1fa0c8c4SMarek Vasut 					rwcfg->mem_virtual_groups_per_read_dqs;
1440ba522c76SMarek Vasut 			tmp_bit_chk |= correct_mask_vg & ~(base_rw_mgr);
14413da42859SDinh Nguyen 		}
14427ce23bb6SMarek Vasut 
14433da42859SDinh Nguyen 		*bit_chk &= tmp_bit_chk;
14443da42859SDinh Nguyen 	}
14453da42859SDinh Nguyen 
1446c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET;
1447*1fa0c8c4SMarek Vasut 	writel(rwcfg->clear_dqs_enable, addr + (group << 2));
14483da42859SDinh Nguyen 
14493853d65eSMarek Vasut 	set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF);
14503853d65eSMarek Vasut 
14513da42859SDinh Nguyen 	if (all_correct) {
14523853d65eSMarek Vasut 		ret = (*bit_chk == param->read_correct_mask);
14533853d65eSMarek Vasut 		debug_cond(DLEVEL == 2,
14543853d65eSMarek Vasut 			   "%s:%d read_test(%u,ALL,%u) => (%u == %u) => %i\n",
14553853d65eSMarek Vasut 			   __func__, __LINE__, group, all_groups, *bit_chk,
14563853d65eSMarek Vasut 			   param->read_correct_mask, ret);
14573da42859SDinh Nguyen 	} else	{
14583853d65eSMarek Vasut 		ret = (*bit_chk != 0x00);
14593853d65eSMarek Vasut 		debug_cond(DLEVEL == 2,
14603853d65eSMarek Vasut 			   "%s:%d read_test(%u,ONE,%u) => (%u != %u) => %i\n",
14613853d65eSMarek Vasut 			   __func__, __LINE__, group, all_groups, *bit_chk,
14623853d65eSMarek Vasut 			   0, ret);
14633da42859SDinh Nguyen 	}
14643853d65eSMarek Vasut 
14653853d65eSMarek Vasut 	return ret;
14663da42859SDinh Nguyen }
14673da42859SDinh Nguyen 
146896df6036SMarek Vasut /**
146996df6036SMarek Vasut  * rw_mgr_mem_calibrate_read_test_all_ranks() - Perform READ test on all ranks
147096df6036SMarek Vasut  * @grp:		Read/Write group
147196df6036SMarek Vasut  * @num_tries:		Number of retries of the test
147296df6036SMarek Vasut  * @all_correct:	All bits must be correct in the mask
147396df6036SMarek Vasut  * @all_groups:		Test all R/W groups
147496df6036SMarek Vasut  *
147596df6036SMarek Vasut  * Perform a READ test across all memory ranks.
147696df6036SMarek Vasut  */
147796df6036SMarek Vasut static int
147896df6036SMarek Vasut rw_mgr_mem_calibrate_read_test_all_ranks(const u32 grp, const u32 num_tries,
147996df6036SMarek Vasut 					 const u32 all_correct,
148096df6036SMarek Vasut 					 const u32 all_groups)
14813da42859SDinh Nguyen {
148296df6036SMarek Vasut 	u32 bit_chk;
148396df6036SMarek Vasut 	return rw_mgr_mem_calibrate_read_test(0, grp, num_tries, all_correct,
148496df6036SMarek Vasut 					      &bit_chk, all_groups, 1);
14853da42859SDinh Nguyen }
14863da42859SDinh Nguyen 
148760bb8a8aSMarek Vasut /**
148860bb8a8aSMarek Vasut  * rw_mgr_incr_vfifo() - Increase VFIFO value
148960bb8a8aSMarek Vasut  * @grp:	Read/Write group
149060bb8a8aSMarek Vasut  *
149160bb8a8aSMarek Vasut  * Increase VFIFO value.
149260bb8a8aSMarek Vasut  */
14938c887b6eSMarek Vasut static void rw_mgr_incr_vfifo(const u32 grp)
14943da42859SDinh Nguyen {
14951273dd9eSMarek Vasut 	writel(grp, &phy_mgr_cmd->inc_vfifo_hard_phy);
14963da42859SDinh Nguyen }
14973da42859SDinh Nguyen 
149860bb8a8aSMarek Vasut /**
149960bb8a8aSMarek Vasut  * rw_mgr_decr_vfifo() - Decrease VFIFO value
150060bb8a8aSMarek Vasut  * @grp:	Read/Write group
150160bb8a8aSMarek Vasut  *
150260bb8a8aSMarek Vasut  * Decrease VFIFO value.
150360bb8a8aSMarek Vasut  */
15048c887b6eSMarek Vasut static void rw_mgr_decr_vfifo(const u32 grp)
15053da42859SDinh Nguyen {
150660bb8a8aSMarek Vasut 	u32 i;
15073da42859SDinh Nguyen 
15083da42859SDinh Nguyen 	for (i = 0; i < VFIFO_SIZE - 1; i++)
15098c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
15103da42859SDinh Nguyen }
15113da42859SDinh Nguyen 
1512d145ca9fSMarek Vasut /**
1513d145ca9fSMarek Vasut  * find_vfifo_failing_read() - Push VFIFO to get a failing read
1514d145ca9fSMarek Vasut  * @grp:	Read/Write group
1515d145ca9fSMarek Vasut  *
1516d145ca9fSMarek Vasut  * Push VFIFO until a failing read happens.
1517d145ca9fSMarek Vasut  */
1518d145ca9fSMarek Vasut static int find_vfifo_failing_read(const u32 grp)
15193da42859SDinh Nguyen {
152096df6036SMarek Vasut 	u32 v, ret, fail_cnt = 0;
15213da42859SDinh Nguyen 
15228c887b6eSMarek Vasut 	for (v = 0; v < VFIFO_SIZE; v++) {
1523d145ca9fSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d: vfifo %u\n",
15243da42859SDinh Nguyen 			   __func__, __LINE__, v);
1525d145ca9fSMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
152696df6036SMarek Vasut 						PASS_ONE_BIT, 0);
1527d145ca9fSMarek Vasut 		if (!ret) {
15283da42859SDinh Nguyen 			fail_cnt++;
15293da42859SDinh Nguyen 
15303da42859SDinh Nguyen 			if (fail_cnt == 2)
1531d145ca9fSMarek Vasut 				return v;
15323da42859SDinh Nguyen 		}
15333da42859SDinh Nguyen 
1534d145ca9fSMarek Vasut 		/* Fiddle with FIFO. */
15358c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
15363da42859SDinh Nguyen 	}
15373da42859SDinh Nguyen 
1538d145ca9fSMarek Vasut 	/* No failing read found! Something must have gone wrong. */
1539d145ca9fSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d: vfifo failed\n", __func__, __LINE__);
15403da42859SDinh Nguyen 	return 0;
15413da42859SDinh Nguyen }
15423da42859SDinh Nguyen 
1543192d6f9fSMarek Vasut /**
154452e8f217SMarek Vasut  * sdr_find_phase_delay() - Find DQS enable phase or delay
154552e8f217SMarek Vasut  * @working:	If 1, look for working phase/delay, if 0, look for non-working
154652e8f217SMarek Vasut  * @delay:	If 1, look for delay, if 0, look for phase
154752e8f217SMarek Vasut  * @grp:	Read/Write group
154852e8f217SMarek Vasut  * @work:	Working window position
154952e8f217SMarek Vasut  * @work_inc:	Working window increment
155052e8f217SMarek Vasut  * @pd:		DQS Phase/Delay Iterator
155152e8f217SMarek Vasut  *
155252e8f217SMarek Vasut  * Find working or non-working DQS enable phase setting.
155352e8f217SMarek Vasut  */
155452e8f217SMarek Vasut static int sdr_find_phase_delay(int working, int delay, const u32 grp,
155552e8f217SMarek Vasut 				u32 *work, const u32 work_inc, u32 *pd)
155652e8f217SMarek Vasut {
155752e8f217SMarek Vasut 	const u32 max = delay ? IO_DQS_EN_DELAY_MAX : IO_DQS_EN_PHASE_MAX;
155896df6036SMarek Vasut 	u32 ret;
155952e8f217SMarek Vasut 
156052e8f217SMarek Vasut 	for (; *pd <= max; (*pd)++) {
156152e8f217SMarek Vasut 		if (delay)
156252e8f217SMarek Vasut 			scc_mgr_set_dqs_en_delay_all_ranks(grp, *pd);
156352e8f217SMarek Vasut 		else
156452e8f217SMarek Vasut 			scc_mgr_set_dqs_en_phase_all_ranks(grp, *pd);
156552e8f217SMarek Vasut 
156652e8f217SMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
156796df6036SMarek Vasut 					PASS_ONE_BIT, 0);
156852e8f217SMarek Vasut 		if (!working)
156952e8f217SMarek Vasut 			ret = !ret;
157052e8f217SMarek Vasut 
157152e8f217SMarek Vasut 		if (ret)
157252e8f217SMarek Vasut 			return 0;
157352e8f217SMarek Vasut 
157452e8f217SMarek Vasut 		if (work)
157552e8f217SMarek Vasut 			*work += work_inc;
157652e8f217SMarek Vasut 	}
157752e8f217SMarek Vasut 
157852e8f217SMarek Vasut 	return -EINVAL;
157952e8f217SMarek Vasut }
158052e8f217SMarek Vasut /**
1581192d6f9fSMarek Vasut  * sdr_find_phase() - Find DQS enable phase
1582192d6f9fSMarek Vasut  * @working:	If 1, look for working phase, if 0, look for non-working phase
1583192d6f9fSMarek Vasut  * @grp:	Read/Write group
1584192d6f9fSMarek Vasut  * @work:	Working window position
1585192d6f9fSMarek Vasut  * @i:		Iterator
1586192d6f9fSMarek Vasut  * @p:		DQS Phase Iterator
1587192d6f9fSMarek Vasut  *
1588192d6f9fSMarek Vasut  * Find working or non-working DQS enable phase setting.
1589192d6f9fSMarek Vasut  */
15908c887b6eSMarek Vasut static int sdr_find_phase(int working, const u32 grp, u32 *work,
159186a39dc7SMarek Vasut 			  u32 *i, u32 *p)
1592192d6f9fSMarek Vasut {
1593192d6f9fSMarek Vasut 	const u32 end = VFIFO_SIZE + (working ? 0 : 1);
159452e8f217SMarek Vasut 	int ret;
1595192d6f9fSMarek Vasut 
1596192d6f9fSMarek Vasut 	for (; *i < end; (*i)++) {
1597192d6f9fSMarek Vasut 		if (working)
1598192d6f9fSMarek Vasut 			*p = 0;
1599192d6f9fSMarek Vasut 
160052e8f217SMarek Vasut 		ret = sdr_find_phase_delay(working, 0, grp, work,
160152e8f217SMarek Vasut 					   IO_DELAY_PER_OPA_TAP, p);
160252e8f217SMarek Vasut 		if (!ret)
1603192d6f9fSMarek Vasut 			return 0;
1604192d6f9fSMarek Vasut 
1605192d6f9fSMarek Vasut 		if (*p > IO_DQS_EN_PHASE_MAX) {
1606192d6f9fSMarek Vasut 			/* Fiddle with FIFO. */
16078c887b6eSMarek Vasut 			rw_mgr_incr_vfifo(grp);
1608192d6f9fSMarek Vasut 			if (!working)
1609192d6f9fSMarek Vasut 				*p = 0;
1610192d6f9fSMarek Vasut 		}
1611192d6f9fSMarek Vasut 	}
1612192d6f9fSMarek Vasut 
1613192d6f9fSMarek Vasut 	return -EINVAL;
1614192d6f9fSMarek Vasut }
1615192d6f9fSMarek Vasut 
16164c5e584bSMarek Vasut /**
16174c5e584bSMarek Vasut  * sdr_working_phase() - Find working DQS enable phase
16184c5e584bSMarek Vasut  * @grp:	Read/Write group
16194c5e584bSMarek Vasut  * @work_bgn:	Working window start position
16204c5e584bSMarek Vasut  * @d:		dtaps output value
16214c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
16224c5e584bSMarek Vasut  * @i:		Iterator
16234c5e584bSMarek Vasut  *
16244c5e584bSMarek Vasut  * Find working DQS enable phase setting.
16254c5e584bSMarek Vasut  */
16268c887b6eSMarek Vasut static int sdr_working_phase(const u32 grp, u32 *work_bgn, u32 *d,
16274c5e584bSMarek Vasut 			     u32 *p, u32 *i)
16283da42859SDinh Nguyen {
162935ee867fSMarek Vasut 	const u32 dtaps_per_ptap = IO_DELAY_PER_OPA_TAP /
163035ee867fSMarek Vasut 				   IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
1631192d6f9fSMarek Vasut 	int ret;
16323da42859SDinh Nguyen 
1633192d6f9fSMarek Vasut 	*work_bgn = 0;
1634192d6f9fSMarek Vasut 
1635192d6f9fSMarek Vasut 	for (*d = 0; *d <= dtaps_per_ptap; (*d)++) {
1636192d6f9fSMarek Vasut 		*i = 0;
1637521fe39cSMarek Vasut 		scc_mgr_set_dqs_en_delay_all_ranks(grp, *d);
16388c887b6eSMarek Vasut 		ret = sdr_find_phase(1, grp, work_bgn, i, p);
1639192d6f9fSMarek Vasut 		if (!ret)
1640192d6f9fSMarek Vasut 			return 0;
1641192d6f9fSMarek Vasut 		*work_bgn += IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
16423da42859SDinh Nguyen 	}
16433da42859SDinh Nguyen 
164438ed6922SMarek Vasut 	/* Cannot find working solution */
1645192d6f9fSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: no vfifo/ptap/dtap\n",
1646192d6f9fSMarek Vasut 		   __func__, __LINE__);
1647192d6f9fSMarek Vasut 	return -EINVAL;
16483da42859SDinh Nguyen }
16493da42859SDinh Nguyen 
16504c5e584bSMarek Vasut /**
16514c5e584bSMarek Vasut  * sdr_backup_phase() - Find DQS enable backup phase
16524c5e584bSMarek Vasut  * @grp:	Read/Write group
16534c5e584bSMarek Vasut  * @work_bgn:	Working window start position
16544c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
16554c5e584bSMarek Vasut  *
16564c5e584bSMarek Vasut  * Find DQS enable backup phase setting.
16574c5e584bSMarek Vasut  */
16588c887b6eSMarek Vasut static void sdr_backup_phase(const u32 grp, u32 *work_bgn, u32 *p)
16593da42859SDinh Nguyen {
166096df6036SMarek Vasut 	u32 tmp_delay, d;
16614c5e584bSMarek Vasut 	int ret;
16623da42859SDinh Nguyen 
16633da42859SDinh Nguyen 	/* Special case code for backing up a phase */
16643da42859SDinh Nguyen 	if (*p == 0) {
16653da42859SDinh Nguyen 		*p = IO_DQS_EN_PHASE_MAX;
16668c887b6eSMarek Vasut 		rw_mgr_decr_vfifo(grp);
16673da42859SDinh Nguyen 	} else {
16683da42859SDinh Nguyen 		(*p)--;
16693da42859SDinh Nguyen 	}
16703da42859SDinh Nguyen 	tmp_delay = *work_bgn - IO_DELAY_PER_OPA_TAP;
1671521fe39cSMarek Vasut 	scc_mgr_set_dqs_en_phase_all_ranks(grp, *p);
16723da42859SDinh Nguyen 
167349891df6SMarek Vasut 	for (d = 0; d <= IO_DQS_EN_DELAY_MAX && tmp_delay < *work_bgn; d++) {
167449891df6SMarek Vasut 		scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
16753da42859SDinh Nguyen 
16764c5e584bSMarek Vasut 		ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
167796df6036SMarek Vasut 					PASS_ONE_BIT, 0);
16784c5e584bSMarek Vasut 		if (ret) {
16793da42859SDinh Nguyen 			*work_bgn = tmp_delay;
16803da42859SDinh Nguyen 			break;
16813da42859SDinh Nguyen 		}
168249891df6SMarek Vasut 
168349891df6SMarek Vasut 		tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
16843da42859SDinh Nguyen 	}
16853da42859SDinh Nguyen 
16864c5e584bSMarek Vasut 	/* Restore VFIFO to old state before we decremented it (if needed). */
16873da42859SDinh Nguyen 	(*p)++;
16883da42859SDinh Nguyen 	if (*p > IO_DQS_EN_PHASE_MAX) {
16893da42859SDinh Nguyen 		*p = 0;
16908c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
16913da42859SDinh Nguyen 	}
16923da42859SDinh Nguyen 
1693521fe39cSMarek Vasut 	scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
16943da42859SDinh Nguyen }
16953da42859SDinh Nguyen 
16964c5e584bSMarek Vasut /**
16974c5e584bSMarek Vasut  * sdr_nonworking_phase() - Find non-working DQS enable phase
16984c5e584bSMarek Vasut  * @grp:	Read/Write group
16994c5e584bSMarek Vasut  * @work_end:	Working window end position
17004c5e584bSMarek Vasut  * @p:		DQS Phase Iterator
17014c5e584bSMarek Vasut  * @i:		Iterator
17024c5e584bSMarek Vasut  *
17034c5e584bSMarek Vasut  * Find non-working DQS enable phase setting.
17044c5e584bSMarek Vasut  */
17058c887b6eSMarek Vasut static int sdr_nonworking_phase(const u32 grp, u32 *work_end, u32 *p, u32 *i)
17063da42859SDinh Nguyen {
1707192d6f9fSMarek Vasut 	int ret;
17083da42859SDinh Nguyen 
17093da42859SDinh Nguyen 	(*p)++;
17103da42859SDinh Nguyen 	*work_end += IO_DELAY_PER_OPA_TAP;
17113da42859SDinh Nguyen 	if (*p > IO_DQS_EN_PHASE_MAX) {
1712192d6f9fSMarek Vasut 		/* Fiddle with FIFO. */
17133da42859SDinh Nguyen 		*p = 0;
17148c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
17153da42859SDinh Nguyen 	}
17163da42859SDinh Nguyen 
17178c887b6eSMarek Vasut 	ret = sdr_find_phase(0, grp, work_end, i, p);
1718192d6f9fSMarek Vasut 	if (ret) {
171938ed6922SMarek Vasut 		/* Cannot see edge of failing read. */
1720192d6f9fSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d: end: failed\n",
1721192d6f9fSMarek Vasut 			   __func__, __LINE__);
1722192d6f9fSMarek Vasut 	}
1723192d6f9fSMarek Vasut 
1724192d6f9fSMarek Vasut 	return ret;
17253da42859SDinh Nguyen }
17263da42859SDinh Nguyen 
17270a13a0fbSMarek Vasut /**
17280a13a0fbSMarek Vasut  * sdr_find_window_center() - Find center of the working DQS window.
17290a13a0fbSMarek Vasut  * @grp:	Read/Write group
17300a13a0fbSMarek Vasut  * @work_bgn:	First working settings
17310a13a0fbSMarek Vasut  * @work_end:	Last working settings
17320a13a0fbSMarek Vasut  *
17330a13a0fbSMarek Vasut  * Find center of the working DQS enable window.
17340a13a0fbSMarek Vasut  */
17350a13a0fbSMarek Vasut static int sdr_find_window_center(const u32 grp, const u32 work_bgn,
17368c887b6eSMarek Vasut 				  const u32 work_end)
17373da42859SDinh Nguyen {
173896df6036SMarek Vasut 	u32 work_mid;
17393da42859SDinh Nguyen 	int tmp_delay = 0;
174028fd242aSMarek Vasut 	int i, p, d;
17413da42859SDinh Nguyen 
174228fd242aSMarek Vasut 	work_mid = (work_bgn + work_end) / 2;
17433da42859SDinh Nguyen 
17443da42859SDinh Nguyen 	debug_cond(DLEVEL == 2, "work_bgn=%d work_end=%d work_mid=%d\n",
174528fd242aSMarek Vasut 		   work_bgn, work_end, work_mid);
17463da42859SDinh Nguyen 	/* Get the middle delay to be less than a VFIFO delay */
1747cbb0b7e0SMarek Vasut 	tmp_delay = (IO_DQS_EN_PHASE_MAX + 1) * IO_DELAY_PER_OPA_TAP;
174828fd242aSMarek Vasut 
17493da42859SDinh Nguyen 	debug_cond(DLEVEL == 2, "vfifo ptap delay %d\n", tmp_delay);
1750cbb0b7e0SMarek Vasut 	work_mid %= tmp_delay;
175128fd242aSMarek Vasut 	debug_cond(DLEVEL == 2, "new work_mid %d\n", work_mid);
17523da42859SDinh Nguyen 
1753cbb0b7e0SMarek Vasut 	tmp_delay = rounddown(work_mid, IO_DELAY_PER_OPA_TAP);
1754cbb0b7e0SMarek Vasut 	if (tmp_delay > IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP)
1755cbb0b7e0SMarek Vasut 		tmp_delay = IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP;
1756cbb0b7e0SMarek Vasut 	p = tmp_delay / IO_DELAY_PER_OPA_TAP;
17573da42859SDinh Nguyen 
1758cbb0b7e0SMarek Vasut 	debug_cond(DLEVEL == 2, "new p %d, tmp_delay=%d\n", p, tmp_delay);
1759cbb0b7e0SMarek Vasut 
1760cbb0b7e0SMarek Vasut 	d = DIV_ROUND_UP(work_mid - tmp_delay, IO_DELAY_PER_DQS_EN_DCHAIN_TAP);
1761cbb0b7e0SMarek Vasut 	if (d > IO_DQS_EN_DELAY_MAX)
1762cbb0b7e0SMarek Vasut 		d = IO_DQS_EN_DELAY_MAX;
1763cbb0b7e0SMarek Vasut 	tmp_delay += d * IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
1764cbb0b7e0SMarek Vasut 
176528fd242aSMarek Vasut 	debug_cond(DLEVEL == 2, "new d %d, tmp_delay=%d\n", d, tmp_delay);
176628fd242aSMarek Vasut 
1767cbb0b7e0SMarek Vasut 	scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
176828fd242aSMarek Vasut 	scc_mgr_set_dqs_en_delay_all_ranks(grp, d);
17693da42859SDinh Nguyen 
17703da42859SDinh Nguyen 	/*
17713da42859SDinh Nguyen 	 * push vfifo until we can successfully calibrate. We can do this
17723da42859SDinh Nguyen 	 * because the largest possible margin in 1 VFIFO cycle.
17733da42859SDinh Nguyen 	 */
17743da42859SDinh Nguyen 	for (i = 0; i < VFIFO_SIZE; i++) {
17758c887b6eSMarek Vasut 		debug_cond(DLEVEL == 2, "find_dqs_en_phase: center\n");
177628fd242aSMarek Vasut 		if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1,
17773da42859SDinh Nguyen 							     PASS_ONE_BIT,
177896df6036SMarek Vasut 							     0)) {
177928fd242aSMarek Vasut 			debug_cond(DLEVEL == 2,
17808c887b6eSMarek Vasut 				   "%s:%d center: found: ptap=%u dtap=%u\n",
17818c887b6eSMarek Vasut 				   __func__, __LINE__, p, d);
17820a13a0fbSMarek Vasut 			return 0;
17833da42859SDinh Nguyen 		}
17840a13a0fbSMarek Vasut 
17850a13a0fbSMarek Vasut 		/* Fiddle with FIFO. */
17868c887b6eSMarek Vasut 		rw_mgr_incr_vfifo(grp);
17870a13a0fbSMarek Vasut 	}
17880a13a0fbSMarek Vasut 
17890a13a0fbSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d center: failed.\n",
17900a13a0fbSMarek Vasut 		   __func__, __LINE__);
17910a13a0fbSMarek Vasut 	return -EINVAL;
17923da42859SDinh Nguyen }
17933da42859SDinh Nguyen 
179433756893SMarek Vasut /**
179533756893SMarek Vasut  * rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase() - Find a good DQS enable to use
179633756893SMarek Vasut  * @grp:	Read/Write Group
179733756893SMarek Vasut  *
179833756893SMarek Vasut  * Find a good DQS enable to use.
179933756893SMarek Vasut  */
1800914546e7SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(const u32 grp)
18013da42859SDinh Nguyen {
18025735540fSMarek Vasut 	u32 d, p, i;
18035735540fSMarek Vasut 	u32 dtaps_per_ptap;
18045735540fSMarek Vasut 	u32 work_bgn, work_end;
18055735540fSMarek Vasut 	u32 found_passing_read, found_failing_read, initial_failing_dtap;
18065735540fSMarek Vasut 	int ret;
18073da42859SDinh Nguyen 
18083da42859SDinh Nguyen 	debug("%s:%d %u\n", __func__, __LINE__, grp);
18093da42859SDinh Nguyen 
18103da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
18113da42859SDinh Nguyen 
18123da42859SDinh Nguyen 	scc_mgr_set_dqs_en_delay_all_ranks(grp, 0);
18133da42859SDinh Nguyen 	scc_mgr_set_dqs_en_phase_all_ranks(grp, 0);
18143da42859SDinh Nguyen 
18152f3589caSMarek Vasut 	/* Step 0: Determine number of delay taps for each phase tap. */
18163da42859SDinh Nguyen 	dtaps_per_ptap = IO_DELAY_PER_OPA_TAP / IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
18173da42859SDinh Nguyen 
18182f3589caSMarek Vasut 	/* Step 1: First push vfifo until we get a failing read. */
1819d145ca9fSMarek Vasut 	find_vfifo_failing_read(grp);
18203da42859SDinh Nguyen 
18212f3589caSMarek Vasut 	/* Step 2: Find first working phase, increment in ptaps. */
18223da42859SDinh Nguyen 	work_bgn = 0;
1823914546e7SMarek Vasut 	ret = sdr_working_phase(grp, &work_bgn, &d, &p, &i);
1824914546e7SMarek Vasut 	if (ret)
1825914546e7SMarek Vasut 		return ret;
18263da42859SDinh Nguyen 
18273da42859SDinh Nguyen 	work_end = work_bgn;
18283da42859SDinh Nguyen 
18293da42859SDinh Nguyen 	/*
18302f3589caSMarek Vasut 	 * If d is 0 then the working window covers a phase tap and we can
18312f3589caSMarek Vasut 	 * follow the old procedure. Otherwise, we've found the beginning
18323da42859SDinh Nguyen 	 * and we need to increment the dtaps until we find the end.
18333da42859SDinh Nguyen 	 */
18343da42859SDinh Nguyen 	if (d == 0) {
18352f3589caSMarek Vasut 		/*
18362f3589caSMarek Vasut 		 * Step 3a: If we have room, back off by one and
18372f3589caSMarek Vasut 		 *          increment in dtaps.
18382f3589caSMarek Vasut 		 */
18398c887b6eSMarek Vasut 		sdr_backup_phase(grp, &work_bgn, &p);
18403da42859SDinh Nguyen 
18412f3589caSMarek Vasut 		/*
18422f3589caSMarek Vasut 		 * Step 4a: go forward from working phase to non working
18432f3589caSMarek Vasut 		 * phase, increment in ptaps.
18442f3589caSMarek Vasut 		 */
1845914546e7SMarek Vasut 		ret = sdr_nonworking_phase(grp, &work_end, &p, &i);
1846914546e7SMarek Vasut 		if (ret)
1847914546e7SMarek Vasut 			return ret;
18483da42859SDinh Nguyen 
18492f3589caSMarek Vasut 		/* Step 5a: Back off one from last, increment in dtaps. */
18503da42859SDinh Nguyen 
18513da42859SDinh Nguyen 		/* Special case code for backing up a phase */
18523da42859SDinh Nguyen 		if (p == 0) {
18533da42859SDinh Nguyen 			p = IO_DQS_EN_PHASE_MAX;
18548c887b6eSMarek Vasut 			rw_mgr_decr_vfifo(grp);
18553da42859SDinh Nguyen 		} else {
18563da42859SDinh Nguyen 			p = p - 1;
18573da42859SDinh Nguyen 		}
18583da42859SDinh Nguyen 
18593da42859SDinh Nguyen 		work_end -= IO_DELAY_PER_OPA_TAP;
18603da42859SDinh Nguyen 		scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
18613da42859SDinh Nguyen 
18623da42859SDinh Nguyen 		d = 0;
18633da42859SDinh Nguyen 
18642f3589caSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d p: ptap=%u\n",
18652f3589caSMarek Vasut 			   __func__, __LINE__, p);
18663da42859SDinh Nguyen 	}
18673da42859SDinh Nguyen 
18682f3589caSMarek Vasut 	/* The dtap increment to find the failing edge is done here. */
186952e8f217SMarek Vasut 	sdr_find_phase_delay(0, 1, grp, &work_end,
187052e8f217SMarek Vasut 			     IO_DELAY_PER_DQS_EN_DCHAIN_TAP, &d);
18713da42859SDinh Nguyen 
18723da42859SDinh Nguyen 	/* Go back to working dtap */
18733da42859SDinh Nguyen 	if (d != 0)
18743da42859SDinh Nguyen 		work_end -= IO_DELAY_PER_DQS_EN_DCHAIN_TAP;
18753da42859SDinh Nguyen 
18762f3589caSMarek Vasut 	debug_cond(DLEVEL == 2,
18772f3589caSMarek Vasut 		   "%s:%d p/d: ptap=%u dtap=%u end=%u\n",
18782f3589caSMarek Vasut 		   __func__, __LINE__, p, d - 1, work_end);
18793da42859SDinh Nguyen 
18803da42859SDinh Nguyen 	if (work_end < work_bgn) {
18813da42859SDinh Nguyen 		/* nil range */
18822f3589caSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d end-2: failed\n",
18832f3589caSMarek Vasut 			   __func__, __LINE__);
1884914546e7SMarek Vasut 		return -EINVAL;
18853da42859SDinh Nguyen 	}
18863da42859SDinh Nguyen 
18872f3589caSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d found range [%u,%u]\n",
18883da42859SDinh Nguyen 		   __func__, __LINE__, work_bgn, work_end);
18893da42859SDinh Nguyen 
18903da42859SDinh Nguyen 	/*
18912f3589caSMarek Vasut 	 * We need to calculate the number of dtaps that equal a ptap.
18922f3589caSMarek Vasut 	 * To do that we'll back up a ptap and re-find the edge of the
18932f3589caSMarek Vasut 	 * window using dtaps
18943da42859SDinh Nguyen 	 */
18952f3589caSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d calculate dtaps_per_ptap for tracking\n",
18962f3589caSMarek Vasut 		   __func__, __LINE__);
18973da42859SDinh Nguyen 
18983da42859SDinh Nguyen 	/* Special case code for backing up a phase */
18993da42859SDinh Nguyen 	if (p == 0) {
19003da42859SDinh Nguyen 		p = IO_DQS_EN_PHASE_MAX;
19018c887b6eSMarek Vasut 		rw_mgr_decr_vfifo(grp);
19022f3589caSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d backedup cycle/phase: p=%u\n",
19032f3589caSMarek Vasut 			   __func__, __LINE__, p);
19043da42859SDinh Nguyen 	} else {
19053da42859SDinh Nguyen 		p = p - 1;
19062f3589caSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d backedup phase only: p=%u",
19072f3589caSMarek Vasut 			   __func__, __LINE__, p);
19083da42859SDinh Nguyen 	}
19093da42859SDinh Nguyen 
19103da42859SDinh Nguyen 	scc_mgr_set_dqs_en_phase_all_ranks(grp, p);
19113da42859SDinh Nguyen 
19123da42859SDinh Nguyen 	/*
19133da42859SDinh Nguyen 	 * Increase dtap until we first see a passing read (in case the
19142f3589caSMarek Vasut 	 * window is smaller than a ptap), and then a failing read to
19152f3589caSMarek Vasut 	 * mark the edge of the window again.
19163da42859SDinh Nguyen 	 */
19173da42859SDinh Nguyen 
19182f3589caSMarek Vasut 	/* Find a passing read. */
19192f3589caSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d find passing read\n",
19203da42859SDinh Nguyen 		   __func__, __LINE__);
192152e8f217SMarek Vasut 
19223da42859SDinh Nguyen 	initial_failing_dtap = d;
19233da42859SDinh Nguyen 
192452e8f217SMarek Vasut 	found_passing_read = !sdr_find_phase_delay(1, 1, grp, NULL, 0, &d);
19253da42859SDinh Nguyen 	if (found_passing_read) {
19262f3589caSMarek Vasut 		/* Find a failing read. */
19272f3589caSMarek Vasut 		debug_cond(DLEVEL == 2, "%s:%d find failing read\n",
19282f3589caSMarek Vasut 			   __func__, __LINE__);
192952e8f217SMarek Vasut 		d++;
193052e8f217SMarek Vasut 		found_failing_read = !sdr_find_phase_delay(0, 1, grp, NULL, 0,
193152e8f217SMarek Vasut 							   &d);
19323da42859SDinh Nguyen 	} else {
19332f3589caSMarek Vasut 		debug_cond(DLEVEL == 1,
19342f3589caSMarek Vasut 			   "%s:%d failed to calculate dtaps per ptap. Fall back on static value\n",
19352f3589caSMarek Vasut 			   __func__, __LINE__);
19363da42859SDinh Nguyen 	}
19373da42859SDinh Nguyen 
19383da42859SDinh Nguyen 	/*
19393da42859SDinh Nguyen 	 * The dynamically calculated dtaps_per_ptap is only valid if we
19403da42859SDinh Nguyen 	 * found a passing/failing read. If we didn't, it means d hit the max
19413da42859SDinh Nguyen 	 * (IO_DQS_EN_DELAY_MAX). Otherwise, dtaps_per_ptap retains its
19423da42859SDinh Nguyen 	 * statically calculated value.
19433da42859SDinh Nguyen 	 */
19443da42859SDinh Nguyen 	if (found_passing_read && found_failing_read)
19453da42859SDinh Nguyen 		dtaps_per_ptap = d - initial_failing_dtap;
19463da42859SDinh Nguyen 
19471273dd9eSMarek Vasut 	writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap);
19482f3589caSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d dtaps_per_ptap=%u - %u = %u",
19492f3589caSMarek Vasut 		   __func__, __LINE__, d, initial_failing_dtap, dtaps_per_ptap);
19503da42859SDinh Nguyen 
19512f3589caSMarek Vasut 	/* Step 6: Find the centre of the window. */
1952914546e7SMarek Vasut 	ret = sdr_find_window_center(grp, work_bgn, work_end);
19533da42859SDinh Nguyen 
1954914546e7SMarek Vasut 	return ret;
19553da42859SDinh Nguyen }
19563da42859SDinh Nguyen 
1957c4907898SMarek Vasut /**
1958901dc36eSMarek Vasut  * search_stop_check() - Check if the detected edge is valid
1959901dc36eSMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
1960901dc36eSMarek Vasut  * @d:			DQS delay
1961901dc36eSMarek Vasut  * @rank_bgn:		Rank number
1962901dc36eSMarek Vasut  * @write_group:	Write Group
1963901dc36eSMarek Vasut  * @read_group:		Read Group
1964901dc36eSMarek Vasut  * @bit_chk:		Resulting bit mask after the test
1965901dc36eSMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
1966901dc36eSMarek Vasut  * @use_read_test:	Perform read test
1967901dc36eSMarek Vasut  *
1968901dc36eSMarek Vasut  * Test if the found edge is valid.
1969901dc36eSMarek Vasut  */
1970901dc36eSMarek Vasut static u32 search_stop_check(const int write, const int d, const int rank_bgn,
1971901dc36eSMarek Vasut 			     const u32 write_group, const u32 read_group,
1972901dc36eSMarek Vasut 			     u32 *bit_chk, u32 *sticky_bit_chk,
1973901dc36eSMarek Vasut 			     const u32 use_read_test)
1974901dc36eSMarek Vasut {
1975*1fa0c8c4SMarek Vasut 	const u32 ratio = rwcfg->mem_if_read_dqs_width /
1976*1fa0c8c4SMarek Vasut 			  rwcfg->mem_if_write_dqs_width;
1977901dc36eSMarek Vasut 	const u32 correct_mask = write ? param->write_correct_mask :
1978901dc36eSMarek Vasut 					 param->read_correct_mask;
1979*1fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
1980*1fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
1981901dc36eSMarek Vasut 	u32 ret;
1982901dc36eSMarek Vasut 	/*
1983901dc36eSMarek Vasut 	 * Stop searching when the read test doesn't pass AND when
1984901dc36eSMarek Vasut 	 * we've seen a passing read on every bit.
1985901dc36eSMarek Vasut 	 */
1986901dc36eSMarek Vasut 	if (write) {			/* WRITE-ONLY */
1987901dc36eSMarek Vasut 		ret = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group,
1988901dc36eSMarek Vasut 							 0, PASS_ONE_BIT,
1989901dc36eSMarek Vasut 							 bit_chk, 0);
1990901dc36eSMarek Vasut 	} else if (use_read_test) {	/* READ-ONLY */
1991901dc36eSMarek Vasut 		ret = !rw_mgr_mem_calibrate_read_test(rank_bgn, read_group,
1992901dc36eSMarek Vasut 							NUM_READ_PB_TESTS,
1993901dc36eSMarek Vasut 							PASS_ONE_BIT, bit_chk,
1994901dc36eSMarek Vasut 							0, 0);
1995901dc36eSMarek Vasut 	} else {			/* READ-ONLY */
1996901dc36eSMarek Vasut 		rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 0,
1997901dc36eSMarek Vasut 						PASS_ONE_BIT, bit_chk, 0);
1998901dc36eSMarek Vasut 		*bit_chk = *bit_chk >> (per_dqs *
1999901dc36eSMarek Vasut 			(read_group - (write_group * ratio)));
2000901dc36eSMarek Vasut 		ret = (*bit_chk == 0);
2001901dc36eSMarek Vasut 	}
2002901dc36eSMarek Vasut 	*sticky_bit_chk = *sticky_bit_chk | *bit_chk;
2003901dc36eSMarek Vasut 	ret = ret && (*sticky_bit_chk == correct_mask);
2004901dc36eSMarek Vasut 	debug_cond(DLEVEL == 2,
2005901dc36eSMarek Vasut 		   "%s:%d center(left): dtap=%u => %u == %u && %u",
2006901dc36eSMarek Vasut 		   __func__, __LINE__, d,
2007901dc36eSMarek Vasut 		   *sticky_bit_chk, correct_mask, ret);
2008901dc36eSMarek Vasut 	return ret;
2009901dc36eSMarek Vasut }
2010901dc36eSMarek Vasut 
2011901dc36eSMarek Vasut /**
201271120773SMarek Vasut  * search_left_edge() - Find left edge of DQ/DQS working phase
201371120773SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
201471120773SMarek Vasut  * @rank_bgn:		Rank number
201571120773SMarek Vasut  * @write_group:	Write Group
201671120773SMarek Vasut  * @read_group:		Read Group
201771120773SMarek Vasut  * @test_bgn:		Rank number to begin the test
201871120773SMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
201971120773SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
202071120773SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
202171120773SMarek Vasut  * @use_read_test:	Perform read test
202271120773SMarek Vasut  *
202371120773SMarek Vasut  * Find left edge of DQ/DQS working phase.
202471120773SMarek Vasut  */
202571120773SMarek Vasut static void search_left_edge(const int write, const int rank_bgn,
202671120773SMarek Vasut 	const u32 write_group, const u32 read_group, const u32 test_bgn,
20270c4be198SMarek Vasut 	u32 *sticky_bit_chk,
202871120773SMarek Vasut 	int *left_edge, int *right_edge, const u32 use_read_test)
202971120773SMarek Vasut {
203071120773SMarek Vasut 	const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX;
203171120773SMarek Vasut 	const u32 dqs_max = write ? IO_IO_OUT1_DELAY_MAX : IO_DQS_IN_DELAY_MAX;
2032*1fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
2033*1fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
20340c4be198SMarek Vasut 	u32 stop, bit_chk;
203571120773SMarek Vasut 	int i, d;
203671120773SMarek Vasut 
203771120773SMarek Vasut 	for (d = 0; d <= dqs_max; d++) {
203871120773SMarek Vasut 		if (write)
203971120773SMarek Vasut 			scc_mgr_apply_group_dq_out1_delay(d);
204071120773SMarek Vasut 		else
204171120773SMarek Vasut 			scc_mgr_apply_group_dq_in_delay(test_bgn, d);
204271120773SMarek Vasut 
204371120773SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
204471120773SMarek Vasut 
2045901dc36eSMarek Vasut 		stop = search_stop_check(write, d, rank_bgn, write_group,
20460c4be198SMarek Vasut 					 read_group, &bit_chk, sticky_bit_chk,
2047901dc36eSMarek Vasut 					 use_read_test);
204871120773SMarek Vasut 		if (stop == 1)
204971120773SMarek Vasut 			break;
205071120773SMarek Vasut 
205171120773SMarek Vasut 		/* stop != 1 */
205271120773SMarek Vasut 		for (i = 0; i < per_dqs; i++) {
20530c4be198SMarek Vasut 			if (bit_chk & 1) {
205471120773SMarek Vasut 				/*
205571120773SMarek Vasut 				 * Remember a passing test as
205671120773SMarek Vasut 				 * the left_edge.
205771120773SMarek Vasut 				 */
205871120773SMarek Vasut 				left_edge[i] = d;
205971120773SMarek Vasut 			} else {
206071120773SMarek Vasut 				/*
206171120773SMarek Vasut 				 * If a left edge has not been seen
206271120773SMarek Vasut 				 * yet, then a future passing test
206371120773SMarek Vasut 				 * will mark this edge as the right
206471120773SMarek Vasut 				 * edge.
206571120773SMarek Vasut 				 */
206671120773SMarek Vasut 				if (left_edge[i] == delay_max + 1)
206771120773SMarek Vasut 					right_edge[i] = -(d + 1);
206871120773SMarek Vasut 			}
20690c4be198SMarek Vasut 			bit_chk >>= 1;
207071120773SMarek Vasut 		}
207171120773SMarek Vasut 	}
207271120773SMarek Vasut 
207371120773SMarek Vasut 	/* Reset DQ delay chains to 0 */
207471120773SMarek Vasut 	if (write)
207571120773SMarek Vasut 		scc_mgr_apply_group_dq_out1_delay(0);
207671120773SMarek Vasut 	else
207771120773SMarek Vasut 		scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
207871120773SMarek Vasut 
207971120773SMarek Vasut 	*sticky_bit_chk = 0;
208071120773SMarek Vasut 	for (i = per_dqs - 1; i >= 0; i--) {
208171120773SMarek Vasut 		debug_cond(DLEVEL == 2,
208271120773SMarek Vasut 			   "%s:%d vfifo_center: left_edge[%u]: %d right_edge[%u]: %d\n",
208371120773SMarek Vasut 			   __func__, __LINE__, i, left_edge[i],
208471120773SMarek Vasut 			   i, right_edge[i]);
208571120773SMarek Vasut 
208671120773SMarek Vasut 		/*
208771120773SMarek Vasut 		 * Check for cases where we haven't found the left edge,
208871120773SMarek Vasut 		 * which makes our assignment of the the right edge invalid.
208971120773SMarek Vasut 		 * Reset it to the illegal value.
209071120773SMarek Vasut 		 */
209171120773SMarek Vasut 		if ((left_edge[i] == delay_max + 1) &&
209271120773SMarek Vasut 		    (right_edge[i] != delay_max + 1)) {
209371120773SMarek Vasut 			right_edge[i] = delay_max + 1;
209471120773SMarek Vasut 			debug_cond(DLEVEL == 2,
209571120773SMarek Vasut 				   "%s:%d vfifo_center: reset right_edge[%u]: %d\n",
209671120773SMarek Vasut 				   __func__, __LINE__, i, right_edge[i]);
209771120773SMarek Vasut 		}
209871120773SMarek Vasut 
209971120773SMarek Vasut 		/*
210071120773SMarek Vasut 		 * Reset sticky bit
210171120773SMarek Vasut 		 * READ: except for bits where we have seen both
210271120773SMarek Vasut 		 *       the left and right edge.
210371120773SMarek Vasut 		 * WRITE: except for bits where we have seen the
210471120773SMarek Vasut 		 *        left edge.
210571120773SMarek Vasut 		 */
210671120773SMarek Vasut 		*sticky_bit_chk <<= 1;
210771120773SMarek Vasut 		if (write) {
210871120773SMarek Vasut 			if (left_edge[i] != delay_max + 1)
210971120773SMarek Vasut 				*sticky_bit_chk |= 1;
211071120773SMarek Vasut 		} else {
211171120773SMarek Vasut 			if ((left_edge[i] != delay_max + 1) &&
211271120773SMarek Vasut 			    (right_edge[i] != delay_max + 1))
211371120773SMarek Vasut 				*sticky_bit_chk |= 1;
211471120773SMarek Vasut 		}
211571120773SMarek Vasut 	}
211671120773SMarek Vasut 
211771120773SMarek Vasut 
211871120773SMarek Vasut }
211971120773SMarek Vasut 
212071120773SMarek Vasut /**
2121c4907898SMarek Vasut  * search_right_edge() - Find right edge of DQ/DQS working phase
2122c4907898SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2123c4907898SMarek Vasut  * @rank_bgn:		Rank number
2124c4907898SMarek Vasut  * @write_group:	Write Group
2125c4907898SMarek Vasut  * @read_group:		Read Group
2126c4907898SMarek Vasut  * @start_dqs:		DQS start phase
2127c4907898SMarek Vasut  * @start_dqs_en:	DQS enable start phase
2128c4907898SMarek Vasut  * @sticky_bit_chk:	Resulting sticky bit mask after the test
2129c4907898SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2130c4907898SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2131c4907898SMarek Vasut  * @use_read_test:	Perform read test
2132c4907898SMarek Vasut  *
2133c4907898SMarek Vasut  * Find right edge of DQ/DQS working phase.
2134c4907898SMarek Vasut  */
2135c4907898SMarek Vasut static int search_right_edge(const int write, const int rank_bgn,
2136c4907898SMarek Vasut 	const u32 write_group, const u32 read_group,
2137c4907898SMarek Vasut 	const int start_dqs, const int start_dqs_en,
21380c4be198SMarek Vasut 	u32 *sticky_bit_chk,
2139c4907898SMarek Vasut 	int *left_edge, int *right_edge, const u32 use_read_test)
2140c4907898SMarek Vasut {
2141c4907898SMarek Vasut 	const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX;
2142c4907898SMarek Vasut 	const u32 dqs_max = write ? IO_IO_OUT1_DELAY_MAX : IO_DQS_IN_DELAY_MAX;
2143*1fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
2144*1fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
21450c4be198SMarek Vasut 	u32 stop, bit_chk;
2146c4907898SMarek Vasut 	int i, d;
2147c4907898SMarek Vasut 
2148c4907898SMarek Vasut 	for (d = 0; d <= dqs_max - start_dqs; d++) {
2149c4907898SMarek Vasut 		if (write) {	/* WRITE-ONLY */
2150c4907898SMarek Vasut 			scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
2151c4907898SMarek Vasut 								d + start_dqs);
2152c4907898SMarek Vasut 		} else {	/* READ-ONLY */
2153c4907898SMarek Vasut 			scc_mgr_set_dqs_bus_in_delay(read_group, d + start_dqs);
2154c4907898SMarek Vasut 			if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
2155c4907898SMarek Vasut 				uint32_t delay = d + start_dqs_en;
2156c4907898SMarek Vasut 				if (delay > IO_DQS_EN_DELAY_MAX)
2157c4907898SMarek Vasut 					delay = IO_DQS_EN_DELAY_MAX;
2158c4907898SMarek Vasut 				scc_mgr_set_dqs_en_delay(read_group, delay);
2159c4907898SMarek Vasut 			}
2160c4907898SMarek Vasut 			scc_mgr_load_dqs(read_group);
2161c4907898SMarek Vasut 		}
2162c4907898SMarek Vasut 
2163c4907898SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
2164c4907898SMarek Vasut 
2165901dc36eSMarek Vasut 		stop = search_stop_check(write, d, rank_bgn, write_group,
21660c4be198SMarek Vasut 					 read_group, &bit_chk, sticky_bit_chk,
2167901dc36eSMarek Vasut 					 use_read_test);
2168c4907898SMarek Vasut 		if (stop == 1) {
2169c4907898SMarek Vasut 			if (write && (d == 0)) {	/* WRITE-ONLY */
2170*1fa0c8c4SMarek Vasut 				for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
2171c4907898SMarek Vasut 					/*
2172c4907898SMarek Vasut 					 * d = 0 failed, but it passed when
2173c4907898SMarek Vasut 					 * testing the left edge, so it must be
2174c4907898SMarek Vasut 					 * marginal, set it to -1
2175c4907898SMarek Vasut 					 */
2176c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1 &&
2177c4907898SMarek Vasut 					    left_edge[i] != delay_max + 1)
2178c4907898SMarek Vasut 						right_edge[i] = -1;
2179c4907898SMarek Vasut 				}
2180c4907898SMarek Vasut 			}
2181c4907898SMarek Vasut 			break;
2182c4907898SMarek Vasut 		}
2183c4907898SMarek Vasut 
2184c4907898SMarek Vasut 		/* stop != 1 */
2185c4907898SMarek Vasut 		for (i = 0; i < per_dqs; i++) {
21860c4be198SMarek Vasut 			if (bit_chk & 1) {
2187c4907898SMarek Vasut 				/*
2188c4907898SMarek Vasut 				 * Remember a passing test as
2189c4907898SMarek Vasut 				 * the right_edge.
2190c4907898SMarek Vasut 				 */
2191c4907898SMarek Vasut 				right_edge[i] = d;
2192c4907898SMarek Vasut 			} else {
2193c4907898SMarek Vasut 				if (d != 0) {
2194c4907898SMarek Vasut 					/*
2195c4907898SMarek Vasut 					 * If a right edge has not
2196c4907898SMarek Vasut 					 * been seen yet, then a future
2197c4907898SMarek Vasut 					 * passing test will mark this
2198c4907898SMarek Vasut 					 * edge as the left edge.
2199c4907898SMarek Vasut 					 */
2200c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1)
2201c4907898SMarek Vasut 						left_edge[i] = -(d + 1);
2202c4907898SMarek Vasut 				} else {
2203c4907898SMarek Vasut 					/*
2204c4907898SMarek Vasut 					 * d = 0 failed, but it passed
2205c4907898SMarek Vasut 					 * when testing the left edge,
2206c4907898SMarek Vasut 					 * so it must be marginal, set
2207c4907898SMarek Vasut 					 * it to -1
2208c4907898SMarek Vasut 					 */
2209c4907898SMarek Vasut 					if (right_edge[i] == delay_max + 1 &&
2210c4907898SMarek Vasut 					    left_edge[i] != delay_max + 1)
2211c4907898SMarek Vasut 						right_edge[i] = -1;
2212c4907898SMarek Vasut 					/*
2213c4907898SMarek Vasut 					 * If a right edge has not been
2214c4907898SMarek Vasut 					 * seen yet, then a future
2215c4907898SMarek Vasut 					 * passing test will mark this
2216c4907898SMarek Vasut 					 * edge as the left edge.
2217c4907898SMarek Vasut 					 */
2218c4907898SMarek Vasut 					else if (right_edge[i] == delay_max + 1)
2219c4907898SMarek Vasut 						left_edge[i] = -(d + 1);
2220c4907898SMarek Vasut 				}
2221c4907898SMarek Vasut 			}
2222c4907898SMarek Vasut 
2223c4907898SMarek Vasut 			debug_cond(DLEVEL == 2, "%s:%d center[r,d=%u]: ",
2224c4907898SMarek Vasut 				   __func__, __LINE__, d);
2225c4907898SMarek Vasut 			debug_cond(DLEVEL == 2,
2226c4907898SMarek Vasut 				   "bit_chk_test=%i left_edge[%u]: %d ",
22270c4be198SMarek Vasut 				   bit_chk & 1, i, left_edge[i]);
2228c4907898SMarek Vasut 			debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i,
2229c4907898SMarek Vasut 				   right_edge[i]);
22300c4be198SMarek Vasut 			bit_chk >>= 1;
2231c4907898SMarek Vasut 		}
2232c4907898SMarek Vasut 	}
2233c4907898SMarek Vasut 
2234c4907898SMarek Vasut 	/* Check that all bits have a window */
2235c4907898SMarek Vasut 	for (i = 0; i < per_dqs; i++) {
2236c4907898SMarek Vasut 		debug_cond(DLEVEL == 2,
2237c4907898SMarek Vasut 			   "%s:%d write_center: left_edge[%u]: %d right_edge[%u]: %d",
2238c4907898SMarek Vasut 			   __func__, __LINE__, i, left_edge[i],
2239c4907898SMarek Vasut 			   i, right_edge[i]);
2240c4907898SMarek Vasut 		if ((left_edge[i] == dqs_max + 1) ||
2241c4907898SMarek Vasut 		    (right_edge[i] == dqs_max + 1))
2242c4907898SMarek Vasut 			return i + 1;	/* FIXME: If we fail, retval > 0 */
2243c4907898SMarek Vasut 	}
2244c4907898SMarek Vasut 
2245c4907898SMarek Vasut 	return 0;
2246c4907898SMarek Vasut }
2247c4907898SMarek Vasut 
2248afb3eb84SMarek Vasut /**
2249afb3eb84SMarek Vasut  * get_window_mid_index() - Find the best middle setting of DQ/DQS phase
2250afb3eb84SMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2251afb3eb84SMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2252afb3eb84SMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2253afb3eb84SMarek Vasut  * @mid_min:		Best DQ/DQS phase middle setting
2254afb3eb84SMarek Vasut  *
2255afb3eb84SMarek Vasut  * Find index and value of the middle of the DQ/DQS working phase.
2256afb3eb84SMarek Vasut  */
2257afb3eb84SMarek Vasut static int get_window_mid_index(const int write, int *left_edge,
2258afb3eb84SMarek Vasut 				int *right_edge, int *mid_min)
2259afb3eb84SMarek Vasut {
2260*1fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
2261*1fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
2262afb3eb84SMarek Vasut 	int i, mid, min_index;
2263afb3eb84SMarek Vasut 
2264afb3eb84SMarek Vasut 	/* Find middle of window for each DQ bit */
2265afb3eb84SMarek Vasut 	*mid_min = left_edge[0] - right_edge[0];
2266afb3eb84SMarek Vasut 	min_index = 0;
2267afb3eb84SMarek Vasut 	for (i = 1; i < per_dqs; i++) {
2268afb3eb84SMarek Vasut 		mid = left_edge[i] - right_edge[i];
2269afb3eb84SMarek Vasut 		if (mid < *mid_min) {
2270afb3eb84SMarek Vasut 			*mid_min = mid;
2271afb3eb84SMarek Vasut 			min_index = i;
2272afb3eb84SMarek Vasut 		}
2273afb3eb84SMarek Vasut 	}
2274afb3eb84SMarek Vasut 
2275afb3eb84SMarek Vasut 	/*
2276afb3eb84SMarek Vasut 	 * -mid_min/2 represents the amount that we need to move DQS.
2277afb3eb84SMarek Vasut 	 * If mid_min is odd and positive we'll need to add one to make
2278afb3eb84SMarek Vasut 	 * sure the rounding in further calculations is correct (always
2279afb3eb84SMarek Vasut 	 * bias to the right), so just add 1 for all positive values.
2280afb3eb84SMarek Vasut 	 */
2281afb3eb84SMarek Vasut 	if (*mid_min > 0)
2282afb3eb84SMarek Vasut 		(*mid_min)++;
2283afb3eb84SMarek Vasut 	*mid_min = *mid_min / 2;
2284afb3eb84SMarek Vasut 
2285afb3eb84SMarek Vasut 	debug_cond(DLEVEL == 1, "%s:%d vfifo_center: *mid_min=%d (index=%u)\n",
2286afb3eb84SMarek Vasut 		   __func__, __LINE__, *mid_min, min_index);
2287afb3eb84SMarek Vasut 	return min_index;
2288afb3eb84SMarek Vasut }
2289afb3eb84SMarek Vasut 
2290ffb8b66eSMarek Vasut /**
2291ffb8b66eSMarek Vasut  * center_dq_windows() - Center the DQ/DQS windows
2292ffb8b66eSMarek Vasut  * @write:		Perform read (Stage 2) or write (Stage 3) calibration
2293ffb8b66eSMarek Vasut  * @left_edge:		Left edge of the DQ/DQS phase
2294ffb8b66eSMarek Vasut  * @right_edge:		Right edge of the DQ/DQS phase
2295ffb8b66eSMarek Vasut  * @mid_min:		Adjusted DQ/DQS phase middle setting
2296ffb8b66eSMarek Vasut  * @orig_mid_min:	Original DQ/DQS phase middle setting
2297ffb8b66eSMarek Vasut  * @min_index:		DQ/DQS phase middle setting index
2298ffb8b66eSMarek Vasut  * @test_bgn:		Rank number to begin the test
2299ffb8b66eSMarek Vasut  * @dq_margin:		Amount of shift for the DQ
2300ffb8b66eSMarek Vasut  * @dqs_margin:		Amount of shift for the DQS
2301ffb8b66eSMarek Vasut  *
2302ffb8b66eSMarek Vasut  * Align the DQ/DQS windows in each group.
2303ffb8b66eSMarek Vasut  */
2304ffb8b66eSMarek Vasut static void center_dq_windows(const int write, int *left_edge, int *right_edge,
2305ffb8b66eSMarek Vasut 			      const int mid_min, const int orig_mid_min,
2306ffb8b66eSMarek Vasut 			      const int min_index, const int test_bgn,
2307ffb8b66eSMarek Vasut 			      int *dq_margin, int *dqs_margin)
2308ffb8b66eSMarek Vasut {
2309ffb8b66eSMarek Vasut 	const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX;
2310*1fa0c8c4SMarek Vasut 	const u32 per_dqs = write ? rwcfg->mem_dq_per_write_dqs :
2311*1fa0c8c4SMarek Vasut 				    rwcfg->mem_dq_per_read_dqs;
2312ffb8b66eSMarek Vasut 	const u32 delay_off = write ? SCC_MGR_IO_OUT1_DELAY_OFFSET :
2313ffb8b66eSMarek Vasut 				      SCC_MGR_IO_IN_DELAY_OFFSET;
2314ffb8b66eSMarek Vasut 	const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS | delay_off;
2315ffb8b66eSMarek Vasut 
2316ffb8b66eSMarek Vasut 	u32 temp_dq_io_delay1, temp_dq_io_delay2;
2317ffb8b66eSMarek Vasut 	int shift_dq, i, p;
2318ffb8b66eSMarek Vasut 
2319ffb8b66eSMarek Vasut 	/* Initialize data for export structures */
2320ffb8b66eSMarek Vasut 	*dqs_margin = delay_max + 1;
2321ffb8b66eSMarek Vasut 	*dq_margin  = delay_max + 1;
2322ffb8b66eSMarek Vasut 
2323ffb8b66eSMarek Vasut 	/* add delay to bring centre of all DQ windows to the same "level" */
2324ffb8b66eSMarek Vasut 	for (i = 0, p = test_bgn; i < per_dqs; i++, p++) {
2325ffb8b66eSMarek Vasut 		/* Use values before divide by 2 to reduce round off error */
2326ffb8b66eSMarek Vasut 		shift_dq = (left_edge[i] - right_edge[i] -
2327ffb8b66eSMarek Vasut 			(left_edge[min_index] - right_edge[min_index]))/2  +
2328ffb8b66eSMarek Vasut 			(orig_mid_min - mid_min);
2329ffb8b66eSMarek Vasut 
2330ffb8b66eSMarek Vasut 		debug_cond(DLEVEL == 2,
2331ffb8b66eSMarek Vasut 			   "vfifo_center: before: shift_dq[%u]=%d\n",
2332ffb8b66eSMarek Vasut 			   i, shift_dq);
2333ffb8b66eSMarek Vasut 
2334ffb8b66eSMarek Vasut 		temp_dq_io_delay1 = readl(addr + (p << 2));
2335ffb8b66eSMarek Vasut 		temp_dq_io_delay2 = readl(addr + (i << 2));
2336ffb8b66eSMarek Vasut 
2337ffb8b66eSMarek Vasut 		if (shift_dq + temp_dq_io_delay1 > delay_max)
2338ffb8b66eSMarek Vasut 			shift_dq = delay_max - temp_dq_io_delay2;
2339ffb8b66eSMarek Vasut 		else if (shift_dq + temp_dq_io_delay1 < 0)
2340ffb8b66eSMarek Vasut 			shift_dq = -temp_dq_io_delay1;
2341ffb8b66eSMarek Vasut 
2342ffb8b66eSMarek Vasut 		debug_cond(DLEVEL == 2,
2343ffb8b66eSMarek Vasut 			   "vfifo_center: after: shift_dq[%u]=%d\n",
2344ffb8b66eSMarek Vasut 			   i, shift_dq);
2345ffb8b66eSMarek Vasut 
2346ffb8b66eSMarek Vasut 		if (write)
2347ffb8b66eSMarek Vasut 			scc_mgr_set_dq_out1_delay(i, temp_dq_io_delay1 + shift_dq);
2348ffb8b66eSMarek Vasut 		else
2349ffb8b66eSMarek Vasut 			scc_mgr_set_dq_in_delay(p, temp_dq_io_delay1 + shift_dq);
2350ffb8b66eSMarek Vasut 
2351ffb8b66eSMarek Vasut 		scc_mgr_load_dq(p);
2352ffb8b66eSMarek Vasut 
2353ffb8b66eSMarek Vasut 		debug_cond(DLEVEL == 2,
2354ffb8b66eSMarek Vasut 			   "vfifo_center: margin[%u]=[%d,%d]\n", i,
2355ffb8b66eSMarek Vasut 			   left_edge[i] - shift_dq + (-mid_min),
2356ffb8b66eSMarek Vasut 			   right_edge[i] + shift_dq - (-mid_min));
2357ffb8b66eSMarek Vasut 
2358ffb8b66eSMarek Vasut 		/* To determine values for export structures */
2359ffb8b66eSMarek Vasut 		if (left_edge[i] - shift_dq + (-mid_min) < *dq_margin)
2360ffb8b66eSMarek Vasut 			*dq_margin = left_edge[i] - shift_dq + (-mid_min);
2361ffb8b66eSMarek Vasut 
2362ffb8b66eSMarek Vasut 		if (right_edge[i] + shift_dq - (-mid_min) < *dqs_margin)
2363ffb8b66eSMarek Vasut 			*dqs_margin = right_edge[i] + shift_dq - (-mid_min);
2364ffb8b66eSMarek Vasut 	}
2365ffb8b66eSMarek Vasut 
2366ffb8b66eSMarek Vasut }
2367ffb8b66eSMarek Vasut 
2368ac63b9adSMarek Vasut /**
2369ac63b9adSMarek Vasut  * rw_mgr_mem_calibrate_vfifo_center() - Per-bit deskew DQ and centering
2370ac63b9adSMarek Vasut  * @rank_bgn:		Rank number
2371ac63b9adSMarek Vasut  * @rw_group:		Read/Write Group
2372ac63b9adSMarek Vasut  * @test_bgn:		Rank at which the test begins
2373ac63b9adSMarek Vasut  * @use_read_test:	Perform a read test
2374ac63b9adSMarek Vasut  * @update_fom:		Update FOM
2375ac63b9adSMarek Vasut  *
2376ac63b9adSMarek Vasut  * Per-bit deskew DQ and centering.
2377ac63b9adSMarek Vasut  */
23780113c3e1SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_center(const u32 rank_bgn,
23790113c3e1SMarek Vasut 			const u32 rw_group, const u32 test_bgn,
23800113c3e1SMarek Vasut 			const int use_read_test, const int update_fom)
23813da42859SDinh Nguyen {
23825d6db444SMarek Vasut 	const u32 addr =
23835d6db444SMarek Vasut 		SDR_PHYGRP_SCCGRP_ADDRESS + SCC_MGR_DQS_IN_DELAY_OFFSET +
23840113c3e1SMarek Vasut 		(rw_group << 2);
23853da42859SDinh Nguyen 	/*
23863da42859SDinh Nguyen 	 * Store these as signed since there are comparisons with
23873da42859SDinh Nguyen 	 * signed numbers.
23883da42859SDinh Nguyen 	 */
23893da42859SDinh Nguyen 	uint32_t sticky_bit_chk;
2390*1fa0c8c4SMarek Vasut 	int32_t left_edge[rwcfg->mem_dq_per_read_dqs];
2391*1fa0c8c4SMarek Vasut 	int32_t right_edge[rwcfg->mem_dq_per_read_dqs];
23923da42859SDinh Nguyen 	int32_t orig_mid_min, mid_min;
23935d6db444SMarek Vasut 	int32_t new_dqs, start_dqs, start_dqs_en, final_dqs_en;
23943da42859SDinh Nguyen 	int32_t dq_margin, dqs_margin;
23955d6db444SMarek Vasut 	int i, min_index;
2396c4907898SMarek Vasut 	int ret;
23973da42859SDinh Nguyen 
23980113c3e1SMarek Vasut 	debug("%s:%d: %u %u", __func__, __LINE__, rw_group, test_bgn);
23993da42859SDinh Nguyen 
24005d6db444SMarek Vasut 	start_dqs = readl(addr);
24013da42859SDinh Nguyen 	if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS)
24025d6db444SMarek Vasut 		start_dqs_en = readl(addr - IO_DQS_EN_DELAY_OFFSET);
24033da42859SDinh Nguyen 
24043da42859SDinh Nguyen 	/* set the left and right edge of each bit to an illegal value */
24053da42859SDinh Nguyen 	/* use (IO_IO_IN_DELAY_MAX + 1) as an illegal value */
24063da42859SDinh Nguyen 	sticky_bit_chk = 0;
2407*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_read_dqs; i++) {
24083da42859SDinh Nguyen 		left_edge[i]  = IO_IO_IN_DELAY_MAX + 1;
24093da42859SDinh Nguyen 		right_edge[i] = IO_IO_IN_DELAY_MAX + 1;
24103da42859SDinh Nguyen 	}
24113da42859SDinh Nguyen 
24123da42859SDinh Nguyen 	/* Search for the left edge of the window for each bit */
24130113c3e1SMarek Vasut 	search_left_edge(0, rank_bgn, rw_group, rw_group, test_bgn,
24140c4be198SMarek Vasut 			 &sticky_bit_chk,
241571120773SMarek Vasut 			 left_edge, right_edge, use_read_test);
24163da42859SDinh Nguyen 
2417f0712c35SMarek Vasut 
24183da42859SDinh Nguyen 	/* Search for the right edge of the window for each bit */
24190113c3e1SMarek Vasut 	ret = search_right_edge(0, rank_bgn, rw_group, rw_group,
2420c4907898SMarek Vasut 				start_dqs, start_dqs_en,
24210c4be198SMarek Vasut 				&sticky_bit_chk,
2422c4907898SMarek Vasut 				left_edge, right_edge, use_read_test);
2423c4907898SMarek Vasut 	if (ret) {
24243da42859SDinh Nguyen 		/*
24253da42859SDinh Nguyen 		 * Restore delay chain settings before letting the loop
24263da42859SDinh Nguyen 		 * in rw_mgr_mem_calibrate_vfifo to retry different
24273da42859SDinh Nguyen 		 * dqs/ck relationships.
24283da42859SDinh Nguyen 		 */
24290113c3e1SMarek Vasut 		scc_mgr_set_dqs_bus_in_delay(rw_group, start_dqs);
2430c4907898SMarek Vasut 		if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS)
24310113c3e1SMarek Vasut 			scc_mgr_set_dqs_en_delay(rw_group, start_dqs_en);
2432c4907898SMarek Vasut 
24330113c3e1SMarek Vasut 		scc_mgr_load_dqs(rw_group);
24341273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
24353da42859SDinh Nguyen 
2436c4907898SMarek Vasut 		debug_cond(DLEVEL == 1,
2437c4907898SMarek Vasut 			   "%s:%d vfifo_center: failed to find edge [%u]: %d %d",
2438c4907898SMarek Vasut 			   __func__, __LINE__, i, left_edge[i], right_edge[i]);
24393da42859SDinh Nguyen 		if (use_read_test) {
24400113c3e1SMarek Vasut 			set_failing_group_stage(rw_group *
2441*1fa0c8c4SMarek Vasut 				rwcfg->mem_dq_per_read_dqs + i,
24423da42859SDinh Nguyen 				CAL_STAGE_VFIFO,
24433da42859SDinh Nguyen 				CAL_SUBSTAGE_VFIFO_CENTER);
24443da42859SDinh Nguyen 		} else {
24450113c3e1SMarek Vasut 			set_failing_group_stage(rw_group *
2446*1fa0c8c4SMarek Vasut 				rwcfg->mem_dq_per_read_dqs + i,
24473da42859SDinh Nguyen 				CAL_STAGE_VFIFO_AFTER_WRITES,
24483da42859SDinh Nguyen 				CAL_SUBSTAGE_VFIFO_CENTER);
24493da42859SDinh Nguyen 		}
245098668247SMarek Vasut 		return -EIO;
24513da42859SDinh Nguyen 	}
24523da42859SDinh Nguyen 
2453afb3eb84SMarek Vasut 	min_index = get_window_mid_index(0, left_edge, right_edge, &mid_min);
24543da42859SDinh Nguyen 
24553da42859SDinh Nguyen 	/* Determine the amount we can change DQS (which is -mid_min) */
24563da42859SDinh Nguyen 	orig_mid_min = mid_min;
24573da42859SDinh Nguyen 	new_dqs = start_dqs - mid_min;
24583da42859SDinh Nguyen 	if (new_dqs > IO_DQS_IN_DELAY_MAX)
24593da42859SDinh Nguyen 		new_dqs = IO_DQS_IN_DELAY_MAX;
24603da42859SDinh Nguyen 	else if (new_dqs < 0)
24613da42859SDinh Nguyen 		new_dqs = 0;
24623da42859SDinh Nguyen 
24633da42859SDinh Nguyen 	mid_min = start_dqs - new_dqs;
24643da42859SDinh Nguyen 	debug_cond(DLEVEL == 1, "vfifo_center: new mid_min=%d new_dqs=%d\n",
24653da42859SDinh Nguyen 		   mid_min, new_dqs);
24663da42859SDinh Nguyen 
24673da42859SDinh Nguyen 	if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
24683da42859SDinh Nguyen 		if (start_dqs_en - mid_min > IO_DQS_EN_DELAY_MAX)
24693da42859SDinh Nguyen 			mid_min += start_dqs_en - mid_min - IO_DQS_EN_DELAY_MAX;
24703da42859SDinh Nguyen 		else if (start_dqs_en - mid_min < 0)
24713da42859SDinh Nguyen 			mid_min += start_dqs_en - mid_min;
24723da42859SDinh Nguyen 	}
24733da42859SDinh Nguyen 	new_dqs = start_dqs - mid_min;
24743da42859SDinh Nguyen 
2475f0712c35SMarek Vasut 	debug_cond(DLEVEL == 1,
2476f0712c35SMarek Vasut 		   "vfifo_center: start_dqs=%d start_dqs_en=%d new_dqs=%d mid_min=%d\n",
2477f0712c35SMarek Vasut 		   start_dqs,
24783da42859SDinh Nguyen 		   IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS ? start_dqs_en : -1,
24793da42859SDinh Nguyen 		   new_dqs, mid_min);
24803da42859SDinh Nguyen 
2481ffb8b66eSMarek Vasut 	/* Add delay to bring centre of all DQ windows to the same "level". */
2482ffb8b66eSMarek Vasut 	center_dq_windows(0, left_edge, right_edge, mid_min, orig_mid_min,
2483ffb8b66eSMarek Vasut 			  min_index, test_bgn, &dq_margin, &dqs_margin);
24843da42859SDinh Nguyen 
24853da42859SDinh Nguyen 	/* Move DQS-en */
24863da42859SDinh Nguyen 	if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) {
24875d6db444SMarek Vasut 		final_dqs_en = start_dqs_en - mid_min;
24880113c3e1SMarek Vasut 		scc_mgr_set_dqs_en_delay(rw_group, final_dqs_en);
24890113c3e1SMarek Vasut 		scc_mgr_load_dqs(rw_group);
24903da42859SDinh Nguyen 	}
24913da42859SDinh Nguyen 
24923da42859SDinh Nguyen 	/* Move DQS */
24930113c3e1SMarek Vasut 	scc_mgr_set_dqs_bus_in_delay(rw_group, new_dqs);
24940113c3e1SMarek Vasut 	scc_mgr_load_dqs(rw_group);
2495f0712c35SMarek Vasut 	debug_cond(DLEVEL == 2,
2496f0712c35SMarek Vasut 		   "%s:%d vfifo_center: dq_margin=%d dqs_margin=%d",
2497f0712c35SMarek Vasut 		   __func__, __LINE__, dq_margin, dqs_margin);
24983da42859SDinh Nguyen 
24993da42859SDinh Nguyen 	/*
25003da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our decisions
25013da42859SDinh Nguyen 	 * have been applied. Apply the update bit.
25023da42859SDinh Nguyen 	 */
25031273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
25043da42859SDinh Nguyen 
250598668247SMarek Vasut 	if ((dq_margin < 0) || (dqs_margin < 0))
250698668247SMarek Vasut 		return -EINVAL;
250798668247SMarek Vasut 
250898668247SMarek Vasut 	return 0;
25093da42859SDinh Nguyen }
25103da42859SDinh Nguyen 
2511bce24efaSMarek Vasut /**
251204372fb8SMarek Vasut  * rw_mgr_mem_calibrate_guaranteed_write() - Perform guaranteed write into the device
251304372fb8SMarek Vasut  * @rw_group:	Read/Write Group
251404372fb8SMarek Vasut  * @phase:	DQ/DQS phase
251504372fb8SMarek Vasut  *
251604372fb8SMarek Vasut  * Because initially no communication ca be reliably performed with the memory
251704372fb8SMarek Vasut  * device, the sequencer uses a guaranteed write mechanism to write data into
251804372fb8SMarek Vasut  * the memory device.
251904372fb8SMarek Vasut  */
252004372fb8SMarek Vasut static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group,
252104372fb8SMarek Vasut 						 const u32 phase)
252204372fb8SMarek Vasut {
252304372fb8SMarek Vasut 	int ret;
252404372fb8SMarek Vasut 
252504372fb8SMarek Vasut 	/* Set a particular DQ/DQS phase. */
252604372fb8SMarek Vasut 	scc_mgr_set_dqdqs_output_phase_all_ranks(rw_group, phase);
252704372fb8SMarek Vasut 
252804372fb8SMarek Vasut 	debug_cond(DLEVEL == 1, "%s:%d guaranteed write: g=%u p=%u\n",
252904372fb8SMarek Vasut 		   __func__, __LINE__, rw_group, phase);
253004372fb8SMarek Vasut 
253104372fb8SMarek Vasut 	/*
253204372fb8SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-25
253304372fb8SMarek Vasut 	 * Load up the patterns used by read calibration using the
253404372fb8SMarek Vasut 	 * current DQDQS phase.
253504372fb8SMarek Vasut 	 */
253604372fb8SMarek Vasut 	rw_mgr_mem_calibrate_read_load_patterns(0, 1);
253704372fb8SMarek Vasut 
253804372fb8SMarek Vasut 	if (gbl->phy_debug_mode_flags & PHY_DEBUG_DISABLE_GUARANTEED_READ)
253904372fb8SMarek Vasut 		return 0;
254004372fb8SMarek Vasut 
254104372fb8SMarek Vasut 	/*
254204372fb8SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-26
254304372fb8SMarek Vasut 	 * Back-to-Back reads of the patterns used for calibration.
254404372fb8SMarek Vasut 	 */
2545d844c7d4SMarek Vasut 	ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1);
2546d844c7d4SMarek Vasut 	if (ret)
254704372fb8SMarek Vasut 		debug_cond(DLEVEL == 1,
254804372fb8SMarek Vasut 			   "%s:%d Guaranteed read test failed: g=%u p=%u\n",
254904372fb8SMarek Vasut 			   __func__, __LINE__, rw_group, phase);
2550d844c7d4SMarek Vasut 	return ret;
255104372fb8SMarek Vasut }
255204372fb8SMarek Vasut 
255304372fb8SMarek Vasut /**
2554f09da11eSMarek Vasut  * rw_mgr_mem_calibrate_dqs_enable_calibration() - DQS Enable Calibration
2555f09da11eSMarek Vasut  * @rw_group:	Read/Write Group
2556f09da11eSMarek Vasut  * @test_bgn:	Rank at which the test begins
2557f09da11eSMarek Vasut  *
2558f09da11eSMarek Vasut  * DQS enable calibration ensures reliable capture of the DQ signal without
2559f09da11eSMarek Vasut  * glitches on the DQS line.
2560f09da11eSMarek Vasut  */
2561f09da11eSMarek Vasut static int rw_mgr_mem_calibrate_dqs_enable_calibration(const u32 rw_group,
2562f09da11eSMarek Vasut 						       const u32 test_bgn)
2563f09da11eSMarek Vasut {
2564f09da11eSMarek Vasut 	/*
2565f09da11eSMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-27
2566f09da11eSMarek Vasut 	 * DQS and DQS Eanble Signal Relationships.
2567f09da11eSMarek Vasut 	 */
256828ea827dSMarek Vasut 
256928ea827dSMarek Vasut 	/* We start at zero, so have one less dq to devide among */
257028ea827dSMarek Vasut 	const u32 delay_step = IO_IO_IN_DELAY_MAX /
2571*1fa0c8c4SMarek Vasut 			       (rwcfg->mem_dq_per_read_dqs - 1);
2572914546e7SMarek Vasut 	int ret;
257328ea827dSMarek Vasut 	u32 i, p, d, r;
257428ea827dSMarek Vasut 
257528ea827dSMarek Vasut 	debug("%s:%d (%u,%u)\n", __func__, __LINE__, rw_group, test_bgn);
257628ea827dSMarek Vasut 
257728ea827dSMarek Vasut 	/* Try different dq_in_delays since the DQ path is shorter than DQS. */
2578*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
257928ea827dSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
258028ea827dSMarek Vasut 		for (i = 0, p = test_bgn, d = 0;
2581*1fa0c8c4SMarek Vasut 		     i < rwcfg->mem_dq_per_read_dqs;
258228ea827dSMarek Vasut 		     i++, p++, d += delay_step) {
258328ea827dSMarek Vasut 			debug_cond(DLEVEL == 1,
258428ea827dSMarek Vasut 				   "%s:%d: g=%u r=%u i=%u p=%u d=%u\n",
258528ea827dSMarek Vasut 				   __func__, __LINE__, rw_group, r, i, p, d);
258628ea827dSMarek Vasut 
258728ea827dSMarek Vasut 			scc_mgr_set_dq_in_delay(p, d);
258828ea827dSMarek Vasut 			scc_mgr_load_dq(p);
258928ea827dSMarek Vasut 		}
259028ea827dSMarek Vasut 
259128ea827dSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
259228ea827dSMarek Vasut 	}
259328ea827dSMarek Vasut 
259428ea827dSMarek Vasut 	/*
259528ea827dSMarek Vasut 	 * Try rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase across different
259628ea827dSMarek Vasut 	 * dq_in_delay values
259728ea827dSMarek Vasut 	 */
2598914546e7SMarek Vasut 	ret = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(rw_group);
259928ea827dSMarek Vasut 
260028ea827dSMarek Vasut 	debug_cond(DLEVEL == 1,
260128ea827dSMarek Vasut 		   "%s:%d: g=%u found=%u; Reseting delay chain to zero\n",
2602914546e7SMarek Vasut 		   __func__, __LINE__, rw_group, !ret);
260328ea827dSMarek Vasut 
2604*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
260528ea827dSMarek Vasut 	     r += NUM_RANKS_PER_SHADOW_REG) {
260628ea827dSMarek Vasut 		scc_mgr_apply_group_dq_in_delay(test_bgn, 0);
260728ea827dSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
260828ea827dSMarek Vasut 	}
260928ea827dSMarek Vasut 
2610914546e7SMarek Vasut 	return ret;
2611f09da11eSMarek Vasut }
2612f09da11eSMarek Vasut 
2613f09da11eSMarek Vasut /**
261416cfc4b9SMarek Vasut  * rw_mgr_mem_calibrate_dq_dqs_centering() - Centering DQ/DQS
261516cfc4b9SMarek Vasut  * @rw_group:		Read/Write Group
261616cfc4b9SMarek Vasut  * @test_bgn:		Rank at which the test begins
261716cfc4b9SMarek Vasut  * @use_read_test:	Perform a read test
261816cfc4b9SMarek Vasut  * @update_fom:		Update FOM
261916cfc4b9SMarek Vasut  *
262016cfc4b9SMarek Vasut  * The centerin DQ/DQS stage attempts to align DQ and DQS signals on reads
262116cfc4b9SMarek Vasut  * within a group.
262216cfc4b9SMarek Vasut  */
262316cfc4b9SMarek Vasut static int
262416cfc4b9SMarek Vasut rw_mgr_mem_calibrate_dq_dqs_centering(const u32 rw_group, const u32 test_bgn,
262516cfc4b9SMarek Vasut 				      const int use_read_test,
262616cfc4b9SMarek Vasut 				      const int update_fom)
262716cfc4b9SMarek Vasut 
262816cfc4b9SMarek Vasut {
262916cfc4b9SMarek Vasut 	int ret, grp_calibrated;
263016cfc4b9SMarek Vasut 	u32 rank_bgn, sr;
263116cfc4b9SMarek Vasut 
263216cfc4b9SMarek Vasut 	/*
263316cfc4b9SMarek Vasut 	 * Altera EMI_RM 2015.05.04 :: Figure 1-28
263416cfc4b9SMarek Vasut 	 * Read per-bit deskew can be done on a per shadow register basis.
263516cfc4b9SMarek Vasut 	 */
263616cfc4b9SMarek Vasut 	grp_calibrated = 1;
263716cfc4b9SMarek Vasut 	for (rank_bgn = 0, sr = 0;
2638*1fa0c8c4SMarek Vasut 	     rank_bgn < rwcfg->mem_number_of_ranks;
263916cfc4b9SMarek Vasut 	     rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
264016cfc4b9SMarek Vasut 		ret = rw_mgr_mem_calibrate_vfifo_center(rank_bgn, rw_group,
26410113c3e1SMarek Vasut 							test_bgn,
264216cfc4b9SMarek Vasut 							use_read_test,
264316cfc4b9SMarek Vasut 							update_fom);
264498668247SMarek Vasut 		if (!ret)
264516cfc4b9SMarek Vasut 			continue;
264616cfc4b9SMarek Vasut 
264716cfc4b9SMarek Vasut 		grp_calibrated = 0;
264816cfc4b9SMarek Vasut 	}
264916cfc4b9SMarek Vasut 
265016cfc4b9SMarek Vasut 	if (!grp_calibrated)
265116cfc4b9SMarek Vasut 		return -EIO;
265216cfc4b9SMarek Vasut 
265316cfc4b9SMarek Vasut 	return 0;
265416cfc4b9SMarek Vasut }
265516cfc4b9SMarek Vasut 
265616cfc4b9SMarek Vasut /**
2657bce24efaSMarek Vasut  * rw_mgr_mem_calibrate_vfifo() - Calibrate the read valid prediction FIFO
2658bce24efaSMarek Vasut  * @rw_group:		Read/Write Group
2659bce24efaSMarek Vasut  * @test_bgn:		Rank at which the test begins
26603da42859SDinh Nguyen  *
2661bce24efaSMarek Vasut  * Stage 1: Calibrate the read valid prediction FIFO.
2662bce24efaSMarek Vasut  *
2663bce24efaSMarek Vasut  * This function implements UniPHY calibration Stage 1, as explained in
2664bce24efaSMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
2665bce24efaSMarek Vasut  *
2666bce24efaSMarek Vasut  * - read valid prediction will consist of finding:
2667bce24efaSMarek Vasut  *   - DQS enable phase and DQS enable delay (DQS Enable Calibration)
2668bce24efaSMarek Vasut  *   - DQS input phase  and DQS input delay (DQ/DQS Centering)
26693da42859SDinh Nguyen  *  - we also do a per-bit deskew on the DQ lines.
26703da42859SDinh Nguyen  */
2671c336ca3eSMarek Vasut static int rw_mgr_mem_calibrate_vfifo(const u32 rw_group, const u32 test_bgn)
26723da42859SDinh Nguyen {
267316cfc4b9SMarek Vasut 	uint32_t p, d;
26743da42859SDinh Nguyen 	uint32_t dtaps_per_ptap;
26753da42859SDinh Nguyen 	uint32_t failed_substage;
26763da42859SDinh Nguyen 
267704372fb8SMarek Vasut 	int ret;
267804372fb8SMarek Vasut 
2679c336ca3eSMarek Vasut 	debug("%s:%d: %u %u\n", __func__, __LINE__, rw_group, test_bgn);
26803da42859SDinh Nguyen 
26817c0a9df3SMarek Vasut 	/* Update info for sims */
26827c0a9df3SMarek Vasut 	reg_file_set_group(rw_group);
26833da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_VFIFO);
26847c0a9df3SMarek Vasut 	reg_file_set_sub_stage(CAL_SUBSTAGE_GUARANTEED_READ);
26853da42859SDinh Nguyen 
26867c0a9df3SMarek Vasut 	failed_substage = CAL_SUBSTAGE_GUARANTEED_READ;
26877c0a9df3SMarek Vasut 
26887c0a9df3SMarek Vasut 	/* USER Determine number of delay taps for each phase tap. */
2689d32badbdSMarek Vasut 	dtaps_per_ptap = DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP,
2690d32badbdSMarek Vasut 				      IO_DELAY_PER_DQS_EN_DCHAIN_TAP) - 1;
26913da42859SDinh Nguyen 
2692fe2d0a2dSMarek Vasut 	for (d = 0; d <= dtaps_per_ptap; d += 2) {
26933da42859SDinh Nguyen 		/*
26943da42859SDinh Nguyen 		 * In RLDRAMX we may be messing the delay of pins in
2695c336ca3eSMarek Vasut 		 * the same write rw_group but outside of the current read
2696c336ca3eSMarek Vasut 		 * the rw_group, but that's ok because we haven't calibrated
2697ac70d2f3SMarek Vasut 		 * output side yet.
26983da42859SDinh Nguyen 		 */
26993da42859SDinh Nguyen 		if (d > 0) {
2700f51a7d35SMarek Vasut 			scc_mgr_apply_group_all_out_delay_add_all_ranks(
2701c336ca3eSMarek Vasut 								rw_group, d);
27023da42859SDinh Nguyen 		}
27033da42859SDinh Nguyen 
2704fe2d0a2dSMarek Vasut 		for (p = 0; p <= IO_DQDQS_OUT_PHASE_MAX; p++) {
270504372fb8SMarek Vasut 			/* 1) Guaranteed Write */
270604372fb8SMarek Vasut 			ret = rw_mgr_mem_calibrate_guaranteed_write(rw_group, p);
270704372fb8SMarek Vasut 			if (ret)
27083da42859SDinh Nguyen 				break;
27093da42859SDinh Nguyen 
2710f09da11eSMarek Vasut 			/* 2) DQS Enable Calibration */
2711f09da11eSMarek Vasut 			ret = rw_mgr_mem_calibrate_dqs_enable_calibration(rw_group,
2712f09da11eSMarek Vasut 									  test_bgn);
2713f09da11eSMarek Vasut 			if (ret) {
2714fe2d0a2dSMarek Vasut 				failed_substage = CAL_SUBSTAGE_DQS_EN_PHASE;
2715fe2d0a2dSMarek Vasut 				continue;
2716fe2d0a2dSMarek Vasut 			}
2717fe2d0a2dSMarek Vasut 
271816cfc4b9SMarek Vasut 			/* 3) Centering DQ/DQS */
27193da42859SDinh Nguyen 			/*
272016cfc4b9SMarek Vasut 			 * If doing read after write calibration, do not update
272116cfc4b9SMarek Vasut 			 * FOM now. Do it then.
27223da42859SDinh Nguyen 			 */
272316cfc4b9SMarek Vasut 			ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group,
272416cfc4b9SMarek Vasut 								test_bgn, 1, 0);
272516cfc4b9SMarek Vasut 			if (ret) {
2726d2ea4950SMarek Vasut 				failed_substage = CAL_SUBSTAGE_VFIFO_CENTER;
272716cfc4b9SMarek Vasut 				continue;
27283da42859SDinh Nguyen 			}
2729fe2d0a2dSMarek Vasut 
273016cfc4b9SMarek Vasut 			/* All done. */
2731fe2d0a2dSMarek Vasut 			goto cal_done_ok;
27323da42859SDinh Nguyen 		}
27333da42859SDinh Nguyen 	}
27343da42859SDinh Nguyen 
2735fe2d0a2dSMarek Vasut 	/* Calibration Stage 1 failed. */
2736c336ca3eSMarek Vasut 	set_failing_group_stage(rw_group, CAL_STAGE_VFIFO, failed_substage);
27373da42859SDinh Nguyen 	return 0;
27383da42859SDinh Nguyen 
2739fe2d0a2dSMarek Vasut 	/* Calibration Stage 1 completed OK. */
2740fe2d0a2dSMarek Vasut cal_done_ok:
27413da42859SDinh Nguyen 	/*
27423da42859SDinh Nguyen 	 * Reset the delay chains back to zero if they have moved > 1
27433da42859SDinh Nguyen 	 * (check for > 1 because loop will increase d even when pass in
27443da42859SDinh Nguyen 	 * first case).
27453da42859SDinh Nguyen 	 */
27463da42859SDinh Nguyen 	if (d > 2)
2747c336ca3eSMarek Vasut 		scc_mgr_zero_group(rw_group, 1);
27483da42859SDinh Nguyen 
27493da42859SDinh Nguyen 	return 1;
27503da42859SDinh Nguyen }
27513da42859SDinh Nguyen 
275278cdd7d0SMarek Vasut /**
275378cdd7d0SMarek Vasut  * rw_mgr_mem_calibrate_vfifo_end() - DQ/DQS Centering.
275478cdd7d0SMarek Vasut  * @rw_group:		Read/Write Group
275578cdd7d0SMarek Vasut  * @test_bgn:		Rank at which the test begins
275678cdd7d0SMarek Vasut  *
275778cdd7d0SMarek Vasut  * Stage 3: DQ/DQS Centering.
275878cdd7d0SMarek Vasut  *
275978cdd7d0SMarek Vasut  * This function implements UniPHY calibration Stage 3, as explained in
276078cdd7d0SMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
276178cdd7d0SMarek Vasut  */
276278cdd7d0SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_end(const u32 rw_group,
276378cdd7d0SMarek Vasut 					  const u32 test_bgn)
27643da42859SDinh Nguyen {
276578cdd7d0SMarek Vasut 	int ret;
27663da42859SDinh Nguyen 
276778cdd7d0SMarek Vasut 	debug("%s:%d %u %u", __func__, __LINE__, rw_group, test_bgn);
27683da42859SDinh Nguyen 
276978cdd7d0SMarek Vasut 	/* Update info for sims. */
277078cdd7d0SMarek Vasut 	reg_file_set_group(rw_group);
27713da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_VFIFO_AFTER_WRITES);
27723da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER);
27733da42859SDinh Nguyen 
277478cdd7d0SMarek Vasut 	ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group, test_bgn, 0, 1);
277578cdd7d0SMarek Vasut 	if (ret)
277678cdd7d0SMarek Vasut 		set_failing_group_stage(rw_group,
27773da42859SDinh Nguyen 					CAL_STAGE_VFIFO_AFTER_WRITES,
27783da42859SDinh Nguyen 					CAL_SUBSTAGE_VFIFO_CENTER);
277978cdd7d0SMarek Vasut 	return ret;
27803da42859SDinh Nguyen }
27813da42859SDinh Nguyen 
2782c984278aSMarek Vasut /**
2783c984278aSMarek Vasut  * rw_mgr_mem_calibrate_lfifo() - Minimize latency
2784c984278aSMarek Vasut  *
2785c984278aSMarek Vasut  * Stage 4: Minimize latency.
2786c984278aSMarek Vasut  *
2787c984278aSMarek Vasut  * This function implements UniPHY calibration Stage 4, as explained in
2788c984278aSMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
2789c984278aSMarek Vasut  * Calibrate LFIFO to find smallest read latency.
2790c984278aSMarek Vasut  */
27913da42859SDinh Nguyen static uint32_t rw_mgr_mem_calibrate_lfifo(void)
27923da42859SDinh Nguyen {
2793c984278aSMarek Vasut 	int found_one = 0;
27943da42859SDinh Nguyen 
27953da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
27963da42859SDinh Nguyen 
2797c984278aSMarek Vasut 	/* Update info for sims. */
27983da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_LFIFO);
27993da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_READ_LATENCY);
28003da42859SDinh Nguyen 
28013da42859SDinh Nguyen 	/* Load up the patterns used by read calibration for all ranks */
28023da42859SDinh Nguyen 	rw_mgr_mem_calibrate_read_load_patterns(0, 1);
28033da42859SDinh Nguyen 
28043da42859SDinh Nguyen 	do {
28051273dd9eSMarek Vasut 		writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
28063da42859SDinh Nguyen 		debug_cond(DLEVEL == 2, "%s:%d lfifo: read_lat=%u",
28073da42859SDinh Nguyen 			   __func__, __LINE__, gbl->curr_read_lat);
28083da42859SDinh Nguyen 
2809c984278aSMarek Vasut 		if (!rw_mgr_mem_calibrate_read_test_all_ranks(0, NUM_READ_TESTS,
2810c984278aSMarek Vasut 							      PASS_ALL_BITS, 1))
28113da42859SDinh Nguyen 			break;
28123da42859SDinh Nguyen 
28133da42859SDinh Nguyen 		found_one = 1;
2814c984278aSMarek Vasut 		/*
2815c984278aSMarek Vasut 		 * Reduce read latency and see if things are
2816c984278aSMarek Vasut 		 * working correctly.
2817c984278aSMarek Vasut 		 */
28183da42859SDinh Nguyen 		gbl->curr_read_lat--;
28193da42859SDinh Nguyen 	} while (gbl->curr_read_lat > 0);
28203da42859SDinh Nguyen 
2821c984278aSMarek Vasut 	/* Reset the fifos to get pointers to known state. */
28221273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
28233da42859SDinh Nguyen 
28243da42859SDinh Nguyen 	if (found_one) {
2825c984278aSMarek Vasut 		/* Add a fudge factor to the read latency that was determined */
28263da42859SDinh Nguyen 		gbl->curr_read_lat += 2;
28271273dd9eSMarek Vasut 		writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
2828c984278aSMarek Vasut 		debug_cond(DLEVEL == 2,
2829c984278aSMarek Vasut 			   "%s:%d lfifo: success: using read_lat=%u\n",
2830c984278aSMarek Vasut 			   __func__, __LINE__, gbl->curr_read_lat);
28313da42859SDinh Nguyen 	} else {
28323da42859SDinh Nguyen 		set_failing_group_stage(0xff, CAL_STAGE_LFIFO,
28333da42859SDinh Nguyen 					CAL_SUBSTAGE_READ_LATENCY);
28343da42859SDinh Nguyen 
2835c984278aSMarek Vasut 		debug_cond(DLEVEL == 2,
2836c984278aSMarek Vasut 			   "%s:%d lfifo: failed at initial read_lat=%u\n",
2837c984278aSMarek Vasut 			   __func__, __LINE__, gbl->curr_read_lat);
28383da42859SDinh Nguyen 	}
2839c984278aSMarek Vasut 
2840c984278aSMarek Vasut 	return found_one;
28413da42859SDinh Nguyen }
28423da42859SDinh Nguyen 
2843c8570afaSMarek Vasut /**
2844c8570afaSMarek Vasut  * search_window() - Search for the/part of the window with DM/DQS shift
2845c8570afaSMarek Vasut  * @search_dm:		If 1, search for the DM shift, if 0, search for DQS shift
2846c8570afaSMarek Vasut  * @rank_bgn:		Rank number
2847c8570afaSMarek Vasut  * @write_group:	Write Group
2848c8570afaSMarek Vasut  * @bgn_curr:		Current window begin
2849c8570afaSMarek Vasut  * @end_curr:		Current window end
2850c8570afaSMarek Vasut  * @bgn_best:		Current best window begin
2851c8570afaSMarek Vasut  * @end_best:		Current best window end
2852c8570afaSMarek Vasut  * @win_best:		Size of the best window
2853c8570afaSMarek Vasut  * @new_dqs:		New DQS value (only applicable if search_dm = 0).
2854c8570afaSMarek Vasut  *
2855c8570afaSMarek Vasut  * Search for the/part of the window with DM/DQS shift.
2856c8570afaSMarek Vasut  */
2857c8570afaSMarek Vasut static void search_window(const int search_dm,
2858c8570afaSMarek Vasut 			  const u32 rank_bgn, const u32 write_group,
2859c8570afaSMarek Vasut 			  int *bgn_curr, int *end_curr, int *bgn_best,
2860c8570afaSMarek Vasut 			  int *end_best, int *win_best, int new_dqs)
2861c8570afaSMarek Vasut {
2862c8570afaSMarek Vasut 	u32 bit_chk;
2863c8570afaSMarek Vasut 	const int max = IO_IO_OUT1_DELAY_MAX - new_dqs;
2864c8570afaSMarek Vasut 	int d, di;
2865c8570afaSMarek Vasut 
2866c8570afaSMarek Vasut 	/* Search for the/part of the window with DM/DQS shift. */
2867c8570afaSMarek Vasut 	for (di = max; di >= 0; di -= DELTA_D) {
2868c8570afaSMarek Vasut 		if (search_dm) {
2869c8570afaSMarek Vasut 			d = di;
2870c8570afaSMarek Vasut 			scc_mgr_apply_group_dm_out1_delay(d);
2871c8570afaSMarek Vasut 		} else {
2872c8570afaSMarek Vasut 			/* For DQS, we go from 0...max */
2873c8570afaSMarek Vasut 			d = max - di;
2874c8570afaSMarek Vasut 			/*
2875c8570afaSMarek Vasut 			 * Note: This only shifts DQS, so are we limiting ourselve to
2876c8570afaSMarek Vasut 			 * width of DQ unnecessarily.
2877c8570afaSMarek Vasut 			 */
2878c8570afaSMarek Vasut 			scc_mgr_apply_group_dqs_io_and_oct_out1(write_group,
2879c8570afaSMarek Vasut 								d + new_dqs);
2880c8570afaSMarek Vasut 		}
2881c8570afaSMarek Vasut 
2882c8570afaSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
2883c8570afaSMarek Vasut 
2884c8570afaSMarek Vasut 		if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1,
2885c8570afaSMarek Vasut 						    PASS_ALL_BITS, &bit_chk,
2886c8570afaSMarek Vasut 						    0)) {
2887c8570afaSMarek Vasut 			/* Set current end of the window. */
2888c8570afaSMarek Vasut 			*end_curr = search_dm ? -d : d;
2889c8570afaSMarek Vasut 
2890c8570afaSMarek Vasut 			/*
2891c8570afaSMarek Vasut 			 * If a starting edge of our window has not been seen
2892c8570afaSMarek Vasut 			 * this is our current start of the DM window.
2893c8570afaSMarek Vasut 			 */
2894c8570afaSMarek Vasut 			if (*bgn_curr == IO_IO_OUT1_DELAY_MAX + 1)
2895c8570afaSMarek Vasut 				*bgn_curr = search_dm ? -d : d;
2896c8570afaSMarek Vasut 
2897c8570afaSMarek Vasut 			/*
2898c8570afaSMarek Vasut 			 * If current window is bigger than best seen.
2899c8570afaSMarek Vasut 			 * Set best seen to be current window.
2900c8570afaSMarek Vasut 			 */
2901c8570afaSMarek Vasut 			if ((*end_curr - *bgn_curr + 1) > *win_best) {
2902c8570afaSMarek Vasut 				*win_best = *end_curr - *bgn_curr + 1;
2903c8570afaSMarek Vasut 				*bgn_best = *bgn_curr;
2904c8570afaSMarek Vasut 				*end_best = *end_curr;
2905c8570afaSMarek Vasut 			}
2906c8570afaSMarek Vasut 		} else {
2907c8570afaSMarek Vasut 			/* We just saw a failing test. Reset temp edge. */
2908c8570afaSMarek Vasut 			*bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
2909c8570afaSMarek Vasut 			*end_curr = IO_IO_OUT1_DELAY_MAX + 1;
2910c8570afaSMarek Vasut 
2911c8570afaSMarek Vasut 			/* Early exit is only applicable to DQS. */
2912c8570afaSMarek Vasut 			if (search_dm)
2913c8570afaSMarek Vasut 				continue;
2914c8570afaSMarek Vasut 
2915c8570afaSMarek Vasut 			/*
2916c8570afaSMarek Vasut 			 * Early exit optimization: if the remaining delay
2917c8570afaSMarek Vasut 			 * chain space is less than already seen largest
2918c8570afaSMarek Vasut 			 * window we can exit.
2919c8570afaSMarek Vasut 			 */
2920c8570afaSMarek Vasut 			if (*win_best - 1 > IO_IO_OUT1_DELAY_MAX - new_dqs - d)
2921c8570afaSMarek Vasut 				break;
2922c8570afaSMarek Vasut 		}
2923c8570afaSMarek Vasut 	}
2924c8570afaSMarek Vasut }
2925c8570afaSMarek Vasut 
29263da42859SDinh Nguyen /*
2927a386a50eSMarek Vasut  * rw_mgr_mem_calibrate_writes_center() - Center all windows
2928a386a50eSMarek Vasut  * @rank_bgn:		Rank number
2929a386a50eSMarek Vasut  * @write_group:	Write group
2930a386a50eSMarek Vasut  * @test_bgn:		Rank at which the test begins
2931a386a50eSMarek Vasut  *
2932a386a50eSMarek Vasut  * Center all windows. Do per-bit-deskew to possibly increase size of
29333da42859SDinh Nguyen  * certain windows.
29343da42859SDinh Nguyen  */
29353b44f55cSMarek Vasut static int
29363b44f55cSMarek Vasut rw_mgr_mem_calibrate_writes_center(const u32 rank_bgn, const u32 write_group,
29373b44f55cSMarek Vasut 				   const u32 test_bgn)
29383da42859SDinh Nguyen {
2939c8570afaSMarek Vasut 	int i;
29403b44f55cSMarek Vasut 	u32 sticky_bit_chk;
29413b44f55cSMarek Vasut 	u32 min_index;
2942*1fa0c8c4SMarek Vasut 	int left_edge[rwcfg->mem_dq_per_write_dqs];
2943*1fa0c8c4SMarek Vasut 	int right_edge[rwcfg->mem_dq_per_write_dqs];
29443b44f55cSMarek Vasut 	int mid;
29453b44f55cSMarek Vasut 	int mid_min, orig_mid_min;
29463b44f55cSMarek Vasut 	int new_dqs, start_dqs;
29473b44f55cSMarek Vasut 	int dq_margin, dqs_margin, dm_margin;
29483b44f55cSMarek Vasut 	int bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
29493b44f55cSMarek Vasut 	int end_curr = IO_IO_OUT1_DELAY_MAX + 1;
29503b44f55cSMarek Vasut 	int bgn_best = IO_IO_OUT1_DELAY_MAX + 1;
29513b44f55cSMarek Vasut 	int end_best = IO_IO_OUT1_DELAY_MAX + 1;
29523b44f55cSMarek Vasut 	int win_best = 0;
29533da42859SDinh Nguyen 
2954c4907898SMarek Vasut 	int ret;
2955c4907898SMarek Vasut 
29563da42859SDinh Nguyen 	debug("%s:%d %u %u", __func__, __LINE__, write_group, test_bgn);
29573da42859SDinh Nguyen 
29583da42859SDinh Nguyen 	dm_margin = 0;
29593da42859SDinh Nguyen 
2960c6540872SMarek Vasut 	start_dqs = readl((SDR_PHYGRP_SCCGRP_ADDRESS |
2961c6540872SMarek Vasut 			  SCC_MGR_IO_OUT1_DELAY_OFFSET) +
2962*1fa0c8c4SMarek Vasut 			  (rwcfg->mem_dq_per_write_dqs << 2));
29633da42859SDinh Nguyen 
29643b44f55cSMarek Vasut 	/* Per-bit deskew. */
29653da42859SDinh Nguyen 
29663da42859SDinh Nguyen 	/*
29673b44f55cSMarek Vasut 	 * Set the left and right edge of each bit to an illegal value.
29683b44f55cSMarek Vasut 	 * Use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value.
29693da42859SDinh Nguyen 	 */
29703da42859SDinh Nguyen 	sticky_bit_chk = 0;
2971*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_dq_per_write_dqs; i++) {
29723da42859SDinh Nguyen 		left_edge[i]  = IO_IO_OUT1_DELAY_MAX + 1;
29733da42859SDinh Nguyen 		right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1;
29743da42859SDinh Nguyen 	}
29753da42859SDinh Nguyen 
29763b44f55cSMarek Vasut 	/* Search for the left edge of the window for each bit. */
297771120773SMarek Vasut 	search_left_edge(1, rank_bgn, write_group, 0, test_bgn,
29780c4be198SMarek Vasut 			 &sticky_bit_chk,
297971120773SMarek Vasut 			 left_edge, right_edge, 0);
29803da42859SDinh Nguyen 
29813b44f55cSMarek Vasut 	/* Search for the right edge of the window for each bit. */
2982c4907898SMarek Vasut 	ret = search_right_edge(1, rank_bgn, write_group, 0,
2983c4907898SMarek Vasut 				start_dqs, 0,
29840c4be198SMarek Vasut 				&sticky_bit_chk,
2985c4907898SMarek Vasut 				left_edge, right_edge, 0);
2986c4907898SMarek Vasut 	if (ret) {
2987c4907898SMarek Vasut 		set_failing_group_stage(test_bgn + ret - 1, CAL_STAGE_WRITES,
29883da42859SDinh Nguyen 					CAL_SUBSTAGE_WRITES_CENTER);
2989d043ee5bSMarek Vasut 		return -EINVAL;
29903da42859SDinh Nguyen 	}
29913da42859SDinh Nguyen 
2992afb3eb84SMarek Vasut 	min_index = get_window_mid_index(1, left_edge, right_edge, &mid_min);
29933da42859SDinh Nguyen 
29943b44f55cSMarek Vasut 	/* Determine the amount we can change DQS (which is -mid_min). */
29953da42859SDinh Nguyen 	orig_mid_min = mid_min;
29963da42859SDinh Nguyen 	new_dqs = start_dqs;
29973da42859SDinh Nguyen 	mid_min = 0;
29983b44f55cSMarek Vasut 	debug_cond(DLEVEL == 1,
29993b44f55cSMarek Vasut 		   "%s:%d write_center: start_dqs=%d new_dqs=%d mid_min=%d\n",
30003b44f55cSMarek Vasut 		   __func__, __LINE__, start_dqs, new_dqs, mid_min);
30013da42859SDinh Nguyen 
3002ffb8b66eSMarek Vasut 	/* Add delay to bring centre of all DQ windows to the same "level". */
3003ffb8b66eSMarek Vasut 	center_dq_windows(1, left_edge, right_edge, mid_min, orig_mid_min,
3004ffb8b66eSMarek Vasut 			  min_index, 0, &dq_margin, &dqs_margin);
30053da42859SDinh Nguyen 
30063da42859SDinh Nguyen 	/* Move DQS */
30073da42859SDinh Nguyen 	scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
30081273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30093da42859SDinh Nguyen 
30103da42859SDinh Nguyen 	/* Centre DM */
30113da42859SDinh Nguyen 	debug_cond(DLEVEL == 2, "%s:%d write_center: DM\n", __func__, __LINE__);
30123da42859SDinh Nguyen 
30133da42859SDinh Nguyen 	/*
30143b44f55cSMarek Vasut 	 * Set the left and right edge of each bit to an illegal value.
30153b44f55cSMarek Vasut 	 * Use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value.
30163da42859SDinh Nguyen 	 */
30173da42859SDinh Nguyen 	left_edge[0]  = IO_IO_OUT1_DELAY_MAX + 1;
30183da42859SDinh Nguyen 	right_edge[0] = IO_IO_OUT1_DELAY_MAX + 1;
30193da42859SDinh Nguyen 
30203b44f55cSMarek Vasut 	/* Search for the/part of the window with DM shift. */
3021c8570afaSMarek Vasut 	search_window(1, rank_bgn, write_group, &bgn_curr, &end_curr,
3022c8570afaSMarek Vasut 		      &bgn_best, &end_best, &win_best, 0);
30233da42859SDinh Nguyen 
30243b44f55cSMarek Vasut 	/* Reset DM delay chains to 0. */
302532675249SMarek Vasut 	scc_mgr_apply_group_dm_out1_delay(0);
30263da42859SDinh Nguyen 
30273da42859SDinh Nguyen 	/*
30283da42859SDinh Nguyen 	 * Check to see if the current window nudges up aganist 0 delay.
30293da42859SDinh Nguyen 	 * If so we need to continue the search by shifting DQS otherwise DQS
30303b44f55cSMarek Vasut 	 * search begins as a new search.
30313b44f55cSMarek Vasut 	 */
30323da42859SDinh Nguyen 	if (end_curr != 0) {
30333da42859SDinh Nguyen 		bgn_curr = IO_IO_OUT1_DELAY_MAX + 1;
30343da42859SDinh Nguyen 		end_curr = IO_IO_OUT1_DELAY_MAX + 1;
30353da42859SDinh Nguyen 	}
30363da42859SDinh Nguyen 
30373b44f55cSMarek Vasut 	/* Search for the/part of the window with DQS shifts. */
3038c8570afaSMarek Vasut 	search_window(0, rank_bgn, write_group, &bgn_curr, &end_curr,
3039c8570afaSMarek Vasut 		      &bgn_best, &end_best, &win_best, new_dqs);
30403da42859SDinh Nguyen 
30413b44f55cSMarek Vasut 	/* Assign left and right edge for cal and reporting. */
30423da42859SDinh Nguyen 	left_edge[0] = -1 * bgn_best;
30433da42859SDinh Nguyen 	right_edge[0] = end_best;
30443da42859SDinh Nguyen 
30453b44f55cSMarek Vasut 	debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d\n",
30463b44f55cSMarek Vasut 		   __func__, __LINE__, left_edge[0], right_edge[0]);
30473da42859SDinh Nguyen 
30483b44f55cSMarek Vasut 	/* Move DQS (back to orig). */
30493da42859SDinh Nguyen 	scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs);
30503da42859SDinh Nguyen 
30513da42859SDinh Nguyen 	/* Move DM */
30523da42859SDinh Nguyen 
30533b44f55cSMarek Vasut 	/* Find middle of window for the DM bit. */
30543da42859SDinh Nguyen 	mid = (left_edge[0] - right_edge[0]) / 2;
30553da42859SDinh Nguyen 
30563b44f55cSMarek Vasut 	/* Only move right, since we are not moving DQS/DQ. */
30573da42859SDinh Nguyen 	if (mid < 0)
30583da42859SDinh Nguyen 		mid = 0;
30593da42859SDinh Nguyen 
30603b44f55cSMarek Vasut 	/* dm_marign should fail if we never find a window. */
30613da42859SDinh Nguyen 	if (win_best == 0)
30623da42859SDinh Nguyen 		dm_margin = -1;
30633da42859SDinh Nguyen 	else
30643da42859SDinh Nguyen 		dm_margin = left_edge[0] - mid;
30653da42859SDinh Nguyen 
306632675249SMarek Vasut 	scc_mgr_apply_group_dm_out1_delay(mid);
30671273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30683da42859SDinh Nguyen 
30693b44f55cSMarek Vasut 	debug_cond(DLEVEL == 2,
30703b44f55cSMarek Vasut 		   "%s:%d dm_calib: left=%d right=%d mid=%d dm_margin=%d\n",
30713b44f55cSMarek Vasut 		   __func__, __LINE__, left_edge[0], right_edge[0],
30723b44f55cSMarek Vasut 		   mid, dm_margin);
30733b44f55cSMarek Vasut 	/* Export values. */
30743da42859SDinh Nguyen 	gbl->fom_out += dq_margin + dqs_margin;
30753da42859SDinh Nguyen 
30763b44f55cSMarek Vasut 	debug_cond(DLEVEL == 2,
30773b44f55cSMarek Vasut 		   "%s:%d write_center: dq_margin=%d dqs_margin=%d dm_margin=%d\n",
30783b44f55cSMarek Vasut 		   __func__, __LINE__, dq_margin, dqs_margin, dm_margin);
30793da42859SDinh Nguyen 
30803da42859SDinh Nguyen 	/*
30813da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our
30823da42859SDinh Nguyen 	 * decisions have been applied.
30833da42859SDinh Nguyen 	 */
30841273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
30853b44f55cSMarek Vasut 
3086d043ee5bSMarek Vasut 	if ((dq_margin < 0) || (dqs_margin < 0) || (dm_margin < 0))
3087d043ee5bSMarek Vasut 		return -EINVAL;
3088d043ee5bSMarek Vasut 
3089d043ee5bSMarek Vasut 	return 0;
30903da42859SDinh Nguyen }
30913da42859SDinh Nguyen 
3092db3a6061SMarek Vasut /**
3093db3a6061SMarek Vasut  * rw_mgr_mem_calibrate_writes() - Write Calibration Part One
3094db3a6061SMarek Vasut  * @rank_bgn:		Rank number
3095db3a6061SMarek Vasut  * @group:		Read/Write Group
3096db3a6061SMarek Vasut  * @test_bgn:		Rank at which the test begins
3097db3a6061SMarek Vasut  *
3098db3a6061SMarek Vasut  * Stage 2: Write Calibration Part One.
3099db3a6061SMarek Vasut  *
3100db3a6061SMarek Vasut  * This function implements UniPHY calibration Stage 2, as explained in
3101db3a6061SMarek Vasut  * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages".
3102db3a6061SMarek Vasut  */
3103db3a6061SMarek Vasut static int rw_mgr_mem_calibrate_writes(const u32 rank_bgn, const u32 group,
3104db3a6061SMarek Vasut 				       const u32 test_bgn)
31053da42859SDinh Nguyen {
3106db3a6061SMarek Vasut 	int ret;
31073da42859SDinh Nguyen 
3108db3a6061SMarek Vasut 	/* Update info for sims */
3109db3a6061SMarek Vasut 	debug("%s:%d %u %u\n", __func__, __LINE__, group, test_bgn);
3110db3a6061SMarek Vasut 
3111db3a6061SMarek Vasut 	reg_file_set_group(group);
31123da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_WRITES);
31133da42859SDinh Nguyen 	reg_file_set_sub_stage(CAL_SUBSTAGE_WRITES_CENTER);
31143da42859SDinh Nguyen 
3115db3a6061SMarek Vasut 	ret = rw_mgr_mem_calibrate_writes_center(rank_bgn, group, test_bgn);
3116d043ee5bSMarek Vasut 	if (ret)
3117db3a6061SMarek Vasut 		set_failing_group_stage(group, CAL_STAGE_WRITES,
31183da42859SDinh Nguyen 					CAL_SUBSTAGE_WRITES_CENTER);
31193da42859SDinh Nguyen 
3120d043ee5bSMarek Vasut 	return ret;
31213da42859SDinh Nguyen }
31223da42859SDinh Nguyen 
31234b0ac26aSMarek Vasut /**
31244b0ac26aSMarek Vasut  * mem_precharge_and_activate() - Precharge all banks and activate
31254b0ac26aSMarek Vasut  *
31264b0ac26aSMarek Vasut  * Precharge all banks and activate row 0 in bank "000..." and bank "111...".
31274b0ac26aSMarek Vasut  */
31283da42859SDinh Nguyen static void mem_precharge_and_activate(void)
31293da42859SDinh Nguyen {
31304b0ac26aSMarek Vasut 	int r;
31313da42859SDinh Nguyen 
3132*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks; r++) {
31334b0ac26aSMarek Vasut 		/* Set rank. */
31343da42859SDinh Nguyen 		set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF);
31353da42859SDinh Nguyen 
31364b0ac26aSMarek Vasut 		/* Precharge all banks. */
3137*1fa0c8c4SMarek Vasut 		writel(rwcfg->precharge_all, SDR_PHYGRP_RWMGRGRP_ADDRESS |
31381273dd9eSMarek Vasut 					     RW_MGR_RUN_SINGLE_GROUP_OFFSET);
31393da42859SDinh Nguyen 
31401273dd9eSMarek Vasut 		writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr0);
3141*1fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1_wait1,
31421273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add0);
31433da42859SDinh Nguyen 
31441273dd9eSMarek Vasut 		writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr1);
3145*1fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1_wait2,
31461273dd9eSMarek Vasut 			&sdr_rw_load_jump_mgr_regs->load_jump_add1);
31473da42859SDinh Nguyen 
31484b0ac26aSMarek Vasut 		/* Activate rows. */
3149*1fa0c8c4SMarek Vasut 		writel(rwcfg->activate_0_and_1, SDR_PHYGRP_RWMGRGRP_ADDRESS |
31501273dd9eSMarek Vasut 						RW_MGR_RUN_SINGLE_GROUP_OFFSET);
31513da42859SDinh Nguyen 	}
31523da42859SDinh Nguyen }
31533da42859SDinh Nguyen 
315416502a0bSMarek Vasut /**
315516502a0bSMarek Vasut  * mem_init_latency() - Configure memory RLAT and WLAT settings
315616502a0bSMarek Vasut  *
315716502a0bSMarek Vasut  * Configure memory RLAT and WLAT parameters.
315816502a0bSMarek Vasut  */
315916502a0bSMarek Vasut static void mem_init_latency(void)
31603da42859SDinh Nguyen {
316116502a0bSMarek Vasut 	/*
316216502a0bSMarek Vasut 	 * For AV/CV, LFIFO is hardened and always runs at full rate
316316502a0bSMarek Vasut 	 * so max latency in AFI clocks, used here, is correspondingly
316416502a0bSMarek Vasut 	 * smaller.
316516502a0bSMarek Vasut 	 */
316616502a0bSMarek Vasut 	const u32 max_latency = (1 << MAX_LATENCY_COUNT_WIDTH) - 1;
316716502a0bSMarek Vasut 	u32 rlat, wlat;
31683da42859SDinh Nguyen 
31693da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
317016502a0bSMarek Vasut 
317116502a0bSMarek Vasut 	/*
317216502a0bSMarek Vasut 	 * Read in write latency.
317316502a0bSMarek Vasut 	 * WL for Hard PHY does not include additive latency.
317416502a0bSMarek Vasut 	 */
31751273dd9eSMarek Vasut 	wlat = readl(&data_mgr->t_wl_add);
31761273dd9eSMarek Vasut 	wlat += readl(&data_mgr->mem_t_add);
31773da42859SDinh Nguyen 
317816502a0bSMarek Vasut 	gbl->rw_wl_nop_cycles = wlat - 1;
31793da42859SDinh Nguyen 
318016502a0bSMarek Vasut 	/* Read in readl latency. */
31811273dd9eSMarek Vasut 	rlat = readl(&data_mgr->t_rl_add);
31823da42859SDinh Nguyen 
318316502a0bSMarek Vasut 	/* Set a pretty high read latency initially. */
31843da42859SDinh Nguyen 	gbl->curr_read_lat = rlat + 16;
31853da42859SDinh Nguyen 	if (gbl->curr_read_lat > max_latency)
31863da42859SDinh Nguyen 		gbl->curr_read_lat = max_latency;
31873da42859SDinh Nguyen 
31881273dd9eSMarek Vasut 	writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
31893da42859SDinh Nguyen 
319016502a0bSMarek Vasut 	/* Advertise write latency. */
319116502a0bSMarek Vasut 	writel(wlat, &phy_mgr_cfg->afi_wlat);
31923da42859SDinh Nguyen }
31933da42859SDinh Nguyen 
319451cea0b6SMarek Vasut /**
319551cea0b6SMarek Vasut  * @mem_skip_calibrate() - Set VFIFO and LFIFO to instant-on settings
319651cea0b6SMarek Vasut  *
319751cea0b6SMarek Vasut  * Set VFIFO and LFIFO to instant-on settings in skip calibration mode.
319851cea0b6SMarek Vasut  */
31993da42859SDinh Nguyen static void mem_skip_calibrate(void)
32003da42859SDinh Nguyen {
32013da42859SDinh Nguyen 	uint32_t vfifo_offset;
32023da42859SDinh Nguyen 	uint32_t i, j, r;
32033da42859SDinh Nguyen 
32043da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
32053da42859SDinh Nguyen 	/* Need to update every shadow register set used by the interface */
3206*1fa0c8c4SMarek Vasut 	for (r = 0; r < rwcfg->mem_number_of_ranks;
32073da42859SDinh Nguyen 	     r += NUM_RANKS_PER_SHADOW_REG) {
32083da42859SDinh Nguyen 		/*
32093da42859SDinh Nguyen 		 * Set output phase alignment settings appropriate for
32103da42859SDinh Nguyen 		 * skip calibration.
32113da42859SDinh Nguyen 		 */
3212*1fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
32133da42859SDinh Nguyen 			scc_mgr_set_dqs_en_phase(i, 0);
32143da42859SDinh Nguyen #if IO_DLL_CHAIN_LENGTH == 6
32153da42859SDinh Nguyen 			scc_mgr_set_dqdqs_output_phase(i, 6);
32163da42859SDinh Nguyen #else
32173da42859SDinh Nguyen 			scc_mgr_set_dqdqs_output_phase(i, 7);
32183da42859SDinh Nguyen #endif
32193da42859SDinh Nguyen 			/*
32203da42859SDinh Nguyen 			 * Case:33398
32213da42859SDinh Nguyen 			 *
32223da42859SDinh Nguyen 			 * Write data arrives to the I/O two cycles before write
32233da42859SDinh Nguyen 			 * latency is reached (720 deg).
32243da42859SDinh Nguyen 			 *   -> due to bit-slip in a/c bus
32253da42859SDinh Nguyen 			 *   -> to allow board skew where dqs is longer than ck
32263da42859SDinh Nguyen 			 *      -> how often can this happen!?
32273da42859SDinh Nguyen 			 *      -> can claim back some ptaps for high freq
32283da42859SDinh Nguyen 			 *       support if we can relax this, but i digress...
32293da42859SDinh Nguyen 			 *
32303da42859SDinh Nguyen 			 * The write_clk leads mem_ck by 90 deg
32313da42859SDinh Nguyen 			 * The minimum ptap of the OPA is 180 deg
32323da42859SDinh Nguyen 			 * Each ptap has (360 / IO_DLL_CHAIN_LENGH) deg of delay
32333da42859SDinh Nguyen 			 * The write_clk is always delayed by 2 ptaps
32343da42859SDinh Nguyen 			 *
32353da42859SDinh Nguyen 			 * Hence, to make DQS aligned to CK, we need to delay
32363da42859SDinh Nguyen 			 * DQS by:
32373da42859SDinh Nguyen 			 *    (720 - 90 - 180 - 2 * (360 / IO_DLL_CHAIN_LENGTH))
32383da42859SDinh Nguyen 			 *
32393da42859SDinh Nguyen 			 * Dividing the above by (360 / IO_DLL_CHAIN_LENGTH)
32403da42859SDinh Nguyen 			 * gives us the number of ptaps, which simplies to:
32413da42859SDinh Nguyen 			 *
32423da42859SDinh Nguyen 			 *    (1.25 * IO_DLL_CHAIN_LENGTH - 2)
32433da42859SDinh Nguyen 			 */
324451cea0b6SMarek Vasut 			scc_mgr_set_dqdqs_output_phase(i,
324551cea0b6SMarek Vasut 					1.25 * IO_DLL_CHAIN_LENGTH - 2);
32463da42859SDinh Nguyen 		}
32471273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dqs_ena);
32481273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dqs_io_ena);
32493da42859SDinh Nguyen 
3250*1fa0c8c4SMarek Vasut 		for (i = 0; i < rwcfg->mem_if_write_dqs_width; i++) {
32511273dd9eSMarek Vasut 			writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
32521273dd9eSMarek Vasut 				  SCC_MGR_GROUP_COUNTER_OFFSET);
32533da42859SDinh Nguyen 		}
32541273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dq_ena);
32551273dd9eSMarek Vasut 		writel(0xff, &sdr_scc_mgr->dm_ena);
32561273dd9eSMarek Vasut 		writel(0, &sdr_scc_mgr->update);
32573da42859SDinh Nguyen 	}
32583da42859SDinh Nguyen 
32593da42859SDinh Nguyen 	/* Compensate for simulation model behaviour */
3260*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
32613da42859SDinh Nguyen 		scc_mgr_set_dqs_bus_in_delay(i, 10);
32623da42859SDinh Nguyen 		scc_mgr_load_dqs(i);
32633da42859SDinh Nguyen 	}
32641273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
32653da42859SDinh Nguyen 
32663da42859SDinh Nguyen 	/*
32673da42859SDinh Nguyen 	 * ArriaV has hard FIFOs that can only be initialized by incrementing
32683da42859SDinh Nguyen 	 * in sequencer.
32693da42859SDinh Nguyen 	 */
32703da42859SDinh Nguyen 	vfifo_offset = CALIB_VFIFO_OFFSET;
327151cea0b6SMarek Vasut 	for (j = 0; j < vfifo_offset; j++)
32721273dd9eSMarek Vasut 		writel(0xff, &phy_mgr_cmd->inc_vfifo_hard_phy);
32731273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
32743da42859SDinh Nguyen 
32753da42859SDinh Nguyen 	/*
327651cea0b6SMarek Vasut 	 * For Arria V and Cyclone V with hard LFIFO, we get the skip-cal
327751cea0b6SMarek Vasut 	 * setting from generation-time constant.
32783da42859SDinh Nguyen 	 */
32793da42859SDinh Nguyen 	gbl->curr_read_lat = CALIB_LFIFO_OFFSET;
32801273dd9eSMarek Vasut 	writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat);
32813da42859SDinh Nguyen }
32823da42859SDinh Nguyen 
32833589fbfbSMarek Vasut /**
32843589fbfbSMarek Vasut  * mem_calibrate() - Memory calibration entry point.
32853589fbfbSMarek Vasut  *
32863589fbfbSMarek Vasut  * Perform memory calibration.
32873589fbfbSMarek Vasut  */
32883da42859SDinh Nguyen static uint32_t mem_calibrate(void)
32893da42859SDinh Nguyen {
32903da42859SDinh Nguyen 	uint32_t i;
32913da42859SDinh Nguyen 	uint32_t rank_bgn, sr;
32923da42859SDinh Nguyen 	uint32_t write_group, write_test_bgn;
32933da42859SDinh Nguyen 	uint32_t read_group, read_test_bgn;
32943da42859SDinh Nguyen 	uint32_t run_groups, current_run;
32953da42859SDinh Nguyen 	uint32_t failing_groups = 0;
32963da42859SDinh Nguyen 	uint32_t group_failed = 0;
32973da42859SDinh Nguyen 
3298*1fa0c8c4SMarek Vasut 	const u32 rwdqs_ratio = rwcfg->mem_if_read_dqs_width /
3299*1fa0c8c4SMarek Vasut 				rwcfg->mem_if_write_dqs_width;
330033c42bb8SMarek Vasut 
33013da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
33023da42859SDinh Nguyen 
330316502a0bSMarek Vasut 	/* Initialize the data settings */
33043da42859SDinh Nguyen 	gbl->error_substage = CAL_SUBSTAGE_NIL;
33053da42859SDinh Nguyen 	gbl->error_stage = CAL_STAGE_NIL;
33063da42859SDinh Nguyen 	gbl->error_group = 0xff;
33073da42859SDinh Nguyen 	gbl->fom_in = 0;
33083da42859SDinh Nguyen 	gbl->fom_out = 0;
33093da42859SDinh Nguyen 
331016502a0bSMarek Vasut 	/* Initialize WLAT and RLAT. */
331116502a0bSMarek Vasut 	mem_init_latency();
331216502a0bSMarek Vasut 
331316502a0bSMarek Vasut 	/* Initialize bit slips. */
331416502a0bSMarek Vasut 	mem_precharge_and_activate();
33153da42859SDinh Nguyen 
3316*1fa0c8c4SMarek Vasut 	for (i = 0; i < rwcfg->mem_if_read_dqs_width; i++) {
33171273dd9eSMarek Vasut 		writel(i, SDR_PHYGRP_SCCGRP_ADDRESS |
33181273dd9eSMarek Vasut 			  SCC_MGR_GROUP_COUNTER_OFFSET);
3319fa5d821bSMarek Vasut 		/* Only needed once to set all groups, pins, DQ, DQS, DM. */
3320fa5d821bSMarek Vasut 		if (i == 0)
3321fa5d821bSMarek Vasut 			scc_mgr_set_hhp_extras();
3322fa5d821bSMarek Vasut 
3323c5c5f537SMarek Vasut 		scc_set_bypass_mode(i);
33243da42859SDinh Nguyen 	}
33253da42859SDinh Nguyen 
3326722c9685SMarek Vasut 	/* Calibration is skipped. */
33273da42859SDinh Nguyen 	if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) {
33283da42859SDinh Nguyen 		/*
33293da42859SDinh Nguyen 		 * Set VFIFO and LFIFO to instant-on settings in skip
33303da42859SDinh Nguyen 		 * calibration mode.
33313da42859SDinh Nguyen 		 */
33323da42859SDinh Nguyen 		mem_skip_calibrate();
3333722c9685SMarek Vasut 
3334722c9685SMarek Vasut 		/*
3335722c9685SMarek Vasut 		 * Do not remove this line as it makes sure all of our
3336722c9685SMarek Vasut 		 * decisions have been applied.
3337722c9685SMarek Vasut 		 */
3338722c9685SMarek Vasut 		writel(0, &sdr_scc_mgr->update);
3339722c9685SMarek Vasut 		return 1;
3340722c9685SMarek Vasut 	}
3341722c9685SMarek Vasut 
3342722c9685SMarek Vasut 	/* Calibration is not skipped. */
33433da42859SDinh Nguyen 	for (i = 0; i < NUM_CALIB_REPEAT; i++) {
33443da42859SDinh Nguyen 		/*
33453da42859SDinh Nguyen 		 * Zero all delay chain/phase settings for all
33463da42859SDinh Nguyen 		 * groups and all shadow register sets.
33473da42859SDinh Nguyen 		 */
33483da42859SDinh Nguyen 		scc_mgr_zero_all();
33493da42859SDinh Nguyen 
3350f085ac3bSMarek Vasut 		run_groups = ~0;
33513da42859SDinh Nguyen 
33523da42859SDinh Nguyen 		for (write_group = 0, write_test_bgn = 0; write_group
3353*1fa0c8c4SMarek Vasut 			< rwcfg->mem_if_write_dqs_width; write_group++,
3354*1fa0c8c4SMarek Vasut 			write_test_bgn += rwcfg->mem_dq_per_write_dqs) {
3355c452dcd0SMarek Vasut 
3356c452dcd0SMarek Vasut 			/* Initialize the group failure */
33573da42859SDinh Nguyen 			group_failed = 0;
33583da42859SDinh Nguyen 
33593da42859SDinh Nguyen 			current_run = run_groups & ((1 <<
33603da42859SDinh Nguyen 				RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1);
33613da42859SDinh Nguyen 			run_groups = run_groups >>
33623da42859SDinh Nguyen 				RW_MGR_NUM_DQS_PER_WRITE_GROUP;
33633da42859SDinh Nguyen 
33643da42859SDinh Nguyen 			if (current_run == 0)
33653da42859SDinh Nguyen 				continue;
33663da42859SDinh Nguyen 
33671273dd9eSMarek Vasut 			writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS |
33681273dd9eSMarek Vasut 					    SCC_MGR_GROUP_COUNTER_OFFSET);
3369d41ea93aSMarek Vasut 			scc_mgr_zero_group(write_group, 0);
33703da42859SDinh Nguyen 
337133c42bb8SMarek Vasut 			for (read_group = write_group * rwdqs_ratio,
33723da42859SDinh Nguyen 			     read_test_bgn = 0;
3373c452dcd0SMarek Vasut 			     read_group < (write_group + 1) * rwdqs_ratio;
337433c42bb8SMarek Vasut 			     read_group++,
3375*1fa0c8c4SMarek Vasut 			     read_test_bgn += rwcfg->mem_dq_per_read_dqs) {
337633c42bb8SMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_VFIFO)
337733c42bb8SMarek Vasut 					continue;
33783da42859SDinh Nguyen 
337933c42bb8SMarek Vasut 				/* Calibrate the VFIFO */
338033c42bb8SMarek Vasut 				if (rw_mgr_mem_calibrate_vfifo(read_group,
338133c42bb8SMarek Vasut 							       read_test_bgn))
338233c42bb8SMarek Vasut 					continue;
338333c42bb8SMarek Vasut 
338433c42bb8SMarek Vasut 				if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
33853da42859SDinh Nguyen 					return 0;
3386c452dcd0SMarek Vasut 
3387c452dcd0SMarek Vasut 				/* The group failed, we're done. */
3388c452dcd0SMarek Vasut 				goto grp_failed;
33893da42859SDinh Nguyen 			}
33903da42859SDinh Nguyen 
33913da42859SDinh Nguyen 			/* Calibrate the output side */
33924ac21610SMarek Vasut 			for (rank_bgn = 0, sr = 0;
3393*1fa0c8c4SMarek Vasut 			     rank_bgn < rwcfg->mem_number_of_ranks;
33944ac21610SMarek Vasut 			     rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) {
33954ac21610SMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
33964ac21610SMarek Vasut 					continue;
33974ac21610SMarek Vasut 
33984ac21610SMarek Vasut 				/* Not needed in quick mode! */
33994ac21610SMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS)
34004ac21610SMarek Vasut 					continue;
34014ac21610SMarek Vasut 
34024ac21610SMarek Vasut 				/* Calibrate WRITEs */
3403db3a6061SMarek Vasut 				if (!rw_mgr_mem_calibrate_writes(rank_bgn,
34044ac21610SMarek Vasut 						write_group, write_test_bgn))
34054ac21610SMarek Vasut 					continue;
34064ac21610SMarek Vasut 
34073da42859SDinh Nguyen 				group_failed = 1;
34084ac21610SMarek Vasut 				if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
34094ac21610SMarek Vasut 					return 0;
34103da42859SDinh Nguyen 			}
34113da42859SDinh Nguyen 
3412c452dcd0SMarek Vasut 			/* Some group failed, we're done. */
3413c452dcd0SMarek Vasut 			if (group_failed)
3414c452dcd0SMarek Vasut 				goto grp_failed;
3415c452dcd0SMarek Vasut 
34168213609eSMarek Vasut 			for (read_group = write_group * rwdqs_ratio,
34173da42859SDinh Nguyen 			     read_test_bgn = 0;
3418c452dcd0SMarek Vasut 			     read_group < (write_group + 1) * rwdqs_ratio;
34198213609eSMarek Vasut 			     read_group++,
3420*1fa0c8c4SMarek Vasut 			     read_test_bgn += rwcfg->mem_dq_per_read_dqs) {
34218213609eSMarek Vasut 				if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES)
34228213609eSMarek Vasut 					continue;
34233da42859SDinh Nguyen 
342478cdd7d0SMarek Vasut 				if (!rw_mgr_mem_calibrate_vfifo_end(read_group,
34258213609eSMarek Vasut 								read_test_bgn))
34268213609eSMarek Vasut 					continue;
34278213609eSMarek Vasut 
34288213609eSMarek Vasut 				if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS))
34293da42859SDinh Nguyen 					return 0;
3430c452dcd0SMarek Vasut 
3431c452dcd0SMarek Vasut 				/* The group failed, we're done. */
3432c452dcd0SMarek Vasut 				goto grp_failed;
34333da42859SDinh Nguyen 			}
34343da42859SDinh Nguyen 
3435c452dcd0SMarek Vasut 			/* No group failed, continue as usual. */
3436c452dcd0SMarek Vasut 			continue;
3437c452dcd0SMarek Vasut 
3438c452dcd0SMarek Vasut grp_failed:		/* A group failed, increment the counter. */
34393da42859SDinh Nguyen 			failing_groups++;
34403da42859SDinh Nguyen 		}
34413da42859SDinh Nguyen 
34423da42859SDinh Nguyen 		/*
34433da42859SDinh Nguyen 		 * USER If there are any failing groups then report
34443da42859SDinh Nguyen 		 * the failure.
34453da42859SDinh Nguyen 		 */
34463da42859SDinh Nguyen 		if (failing_groups != 0)
34473da42859SDinh Nguyen 			return 0;
34483da42859SDinh Nguyen 
3449c50ae303SMarek Vasut 		if (STATIC_CALIB_STEPS & CALIB_SKIP_LFIFO)
3450c50ae303SMarek Vasut 			continue;
3451c50ae303SMarek Vasut 
3452c50ae303SMarek Vasut 		/* Calibrate the LFIFO */
34533da42859SDinh Nguyen 		if (!rw_mgr_mem_calibrate_lfifo())
34543da42859SDinh Nguyen 			return 0;
34553da42859SDinh Nguyen 	}
34563da42859SDinh Nguyen 
34573da42859SDinh Nguyen 	/*
34583da42859SDinh Nguyen 	 * Do not remove this line as it makes sure all of our decisions
34593da42859SDinh Nguyen 	 * have been applied.
34603da42859SDinh Nguyen 	 */
34611273dd9eSMarek Vasut 	writel(0, &sdr_scc_mgr->update);
34623da42859SDinh Nguyen 	return 1;
34633da42859SDinh Nguyen }
34643da42859SDinh Nguyen 
346523a040c0SMarek Vasut /**
346623a040c0SMarek Vasut  * run_mem_calibrate() - Perform memory calibration
346723a040c0SMarek Vasut  *
346823a040c0SMarek Vasut  * This function triggers the entire memory calibration procedure.
346923a040c0SMarek Vasut  */
347023a040c0SMarek Vasut static int run_mem_calibrate(void)
34713da42859SDinh Nguyen {
347223a040c0SMarek Vasut 	int pass;
34733da42859SDinh Nguyen 
34743da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
34753da42859SDinh Nguyen 
34763da42859SDinh Nguyen 	/* Reset pass/fail status shown on afi_cal_success/fail */
34771273dd9eSMarek Vasut 	writel(PHY_MGR_CAL_RESET, &phy_mgr_cfg->cal_status);
34783da42859SDinh Nguyen 
347923a040c0SMarek Vasut 	/* Stop tracking manager. */
348023a040c0SMarek Vasut 	clrbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22);
34813da42859SDinh Nguyen 
34829fa9c90eSMarek Vasut 	phy_mgr_initialize();
34833da42859SDinh Nguyen 	rw_mgr_mem_initialize();
34843da42859SDinh Nguyen 
348523a040c0SMarek Vasut 	/* Perform the actual memory calibration. */
34863da42859SDinh Nguyen 	pass = mem_calibrate();
34873da42859SDinh Nguyen 
34883da42859SDinh Nguyen 	mem_precharge_and_activate();
34891273dd9eSMarek Vasut 	writel(0, &phy_mgr_cmd->fifo_reset);
34903da42859SDinh Nguyen 
349123a040c0SMarek Vasut 	/* Handoff. */
34923da42859SDinh Nguyen 	rw_mgr_mem_handoff();
34933da42859SDinh Nguyen 	/*
34943da42859SDinh Nguyen 	 * In Hard PHY this is a 2-bit control:
34953da42859SDinh Nguyen 	 * 0: AFI Mux Select
34963da42859SDinh Nguyen 	 * 1: DDIO Mux Select
34973da42859SDinh Nguyen 	 */
34981273dd9eSMarek Vasut 	writel(0x2, &phy_mgr_cfg->mux_sel);
349923a040c0SMarek Vasut 
350023a040c0SMarek Vasut 	/* Start tracking manager. */
350123a040c0SMarek Vasut 	setbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22);
350223a040c0SMarek Vasut 
350323a040c0SMarek Vasut 	return pass;
35043da42859SDinh Nguyen }
35053da42859SDinh Nguyen 
350623a040c0SMarek Vasut /**
350723a040c0SMarek Vasut  * debug_mem_calibrate() - Report result of memory calibration
350823a040c0SMarek Vasut  * @pass:	Value indicating whether calibration passed or failed
350923a040c0SMarek Vasut  *
351023a040c0SMarek Vasut  * This function reports the results of the memory calibration
351123a040c0SMarek Vasut  * and writes debug information into the register file.
351223a040c0SMarek Vasut  */
351323a040c0SMarek Vasut static void debug_mem_calibrate(int pass)
351423a040c0SMarek Vasut {
351523a040c0SMarek Vasut 	uint32_t debug_info;
35163da42859SDinh Nguyen 
35173da42859SDinh Nguyen 	if (pass) {
35183da42859SDinh Nguyen 		printf("%s: CALIBRATION PASSED\n", __FILE__);
35193da42859SDinh Nguyen 
35203da42859SDinh Nguyen 		gbl->fom_in /= 2;
35213da42859SDinh Nguyen 		gbl->fom_out /= 2;
35223da42859SDinh Nguyen 
35233da42859SDinh Nguyen 		if (gbl->fom_in > 0xff)
35243da42859SDinh Nguyen 			gbl->fom_in = 0xff;
35253da42859SDinh Nguyen 
35263da42859SDinh Nguyen 		if (gbl->fom_out > 0xff)
35273da42859SDinh Nguyen 			gbl->fom_out = 0xff;
35283da42859SDinh Nguyen 
35293da42859SDinh Nguyen 		/* Update the FOM in the register file */
35303da42859SDinh Nguyen 		debug_info = gbl->fom_in;
35313da42859SDinh Nguyen 		debug_info |= gbl->fom_out << 8;
35321273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->fom);
35333da42859SDinh Nguyen 
35341273dd9eSMarek Vasut 		writel(debug_info, &phy_mgr_cfg->cal_debug_info);
35351273dd9eSMarek Vasut 		writel(PHY_MGR_CAL_SUCCESS, &phy_mgr_cfg->cal_status);
35363da42859SDinh Nguyen 	} else {
35373da42859SDinh Nguyen 		printf("%s: CALIBRATION FAILED\n", __FILE__);
35383da42859SDinh Nguyen 
35393da42859SDinh Nguyen 		debug_info = gbl->error_stage;
35403da42859SDinh Nguyen 		debug_info |= gbl->error_substage << 8;
35413da42859SDinh Nguyen 		debug_info |= gbl->error_group << 16;
35423da42859SDinh Nguyen 
35431273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->failing_stage);
35441273dd9eSMarek Vasut 		writel(debug_info, &phy_mgr_cfg->cal_debug_info);
35451273dd9eSMarek Vasut 		writel(PHY_MGR_CAL_FAIL, &phy_mgr_cfg->cal_status);
35463da42859SDinh Nguyen 
35473da42859SDinh Nguyen 		/* Update the failing group/stage in the register file */
35483da42859SDinh Nguyen 		debug_info = gbl->error_stage;
35493da42859SDinh Nguyen 		debug_info |= gbl->error_substage << 8;
35503da42859SDinh Nguyen 		debug_info |= gbl->error_group << 16;
35511273dd9eSMarek Vasut 		writel(debug_info, &sdr_reg_file->failing_stage);
35523da42859SDinh Nguyen 	}
35533da42859SDinh Nguyen 
355423a040c0SMarek Vasut 	printf("%s: Calibration complete\n", __FILE__);
35553da42859SDinh Nguyen }
35563da42859SDinh Nguyen 
3557bb06434bSMarek Vasut /**
3558bb06434bSMarek Vasut  * hc_initialize_rom_data() - Initialize ROM data
3559bb06434bSMarek Vasut  *
3560bb06434bSMarek Vasut  * Initialize ROM data.
3561bb06434bSMarek Vasut  */
35623da42859SDinh Nguyen static void hc_initialize_rom_data(void)
35633da42859SDinh Nguyen {
356404955cf2SMarek Vasut 	unsigned int nelem = 0;
356504955cf2SMarek Vasut 	const u32 *rom_init;
3566bb06434bSMarek Vasut 	u32 i, addr;
35673da42859SDinh Nguyen 
356804955cf2SMarek Vasut 	socfpga_get_seq_inst_init(&rom_init, &nelem);
3569c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_INST_ROM_WRITE_OFFSET;
357004955cf2SMarek Vasut 	for (i = 0; i < nelem; i++)
357104955cf2SMarek Vasut 		writel(rom_init[i], addr + (i << 2));
35723da42859SDinh Nguyen 
357304955cf2SMarek Vasut 	socfpga_get_seq_ac_init(&rom_init, &nelem);
3574c4815f76SMarek Vasut 	addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_AC_ROM_WRITE_OFFSET;
357504955cf2SMarek Vasut 	for (i = 0; i < nelem; i++)
357604955cf2SMarek Vasut 		writel(rom_init[i], addr + (i << 2));
35773da42859SDinh Nguyen }
35783da42859SDinh Nguyen 
35799c1ab2caSMarek Vasut /**
35809c1ab2caSMarek Vasut  * initialize_reg_file() - Initialize SDR register file
35819c1ab2caSMarek Vasut  *
35829c1ab2caSMarek Vasut  * Initialize SDR register file.
35839c1ab2caSMarek Vasut  */
35843da42859SDinh Nguyen static void initialize_reg_file(void)
35853da42859SDinh Nguyen {
35863da42859SDinh Nguyen 	/* Initialize the register file with the correct data */
35871273dd9eSMarek Vasut 	writel(REG_FILE_INIT_SEQ_SIGNATURE, &sdr_reg_file->signature);
35881273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug_data_addr);
35891273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->cur_stage);
35901273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->fom);
35911273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->failing_stage);
35921273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug1);
35931273dd9eSMarek Vasut 	writel(0, &sdr_reg_file->debug2);
35943da42859SDinh Nguyen }
35953da42859SDinh Nguyen 
35962ca151f8SMarek Vasut /**
35972ca151f8SMarek Vasut  * initialize_hps_phy() - Initialize HPS PHY
35982ca151f8SMarek Vasut  *
35992ca151f8SMarek Vasut  * Initialize HPS PHY.
36002ca151f8SMarek Vasut  */
36013da42859SDinh Nguyen static void initialize_hps_phy(void)
36023da42859SDinh Nguyen {
36033da42859SDinh Nguyen 	uint32_t reg;
36043da42859SDinh Nguyen 	/*
36053da42859SDinh Nguyen 	 * Tracking also gets configured here because it's in the
36063da42859SDinh Nguyen 	 * same register.
36073da42859SDinh Nguyen 	 */
36083da42859SDinh Nguyen 	uint32_t trk_sample_count = 7500;
36093da42859SDinh Nguyen 	uint32_t trk_long_idle_sample_count = (10 << 16) | 100;
36103da42859SDinh Nguyen 	/*
36113da42859SDinh Nguyen 	 * Format is number of outer loops in the 16 MSB, sample
36123da42859SDinh Nguyen 	 * count in 16 LSB.
36133da42859SDinh Nguyen 	 */
36143da42859SDinh Nguyen 
36153da42859SDinh Nguyen 	reg = 0;
36163da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ACDELAYEN_SET(2);
36173da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQDELAYEN_SET(1);
36183da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSDELAYEN_SET(1);
36193da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSLOGICDELAYEN_SET(1);
36203da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_RESETDELAYEN_SET(0);
36213da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_LPDDRDIS_SET(1);
36223da42859SDinh Nguyen 	/*
36233da42859SDinh Nguyen 	 * This field selects the intrinsic latency to RDATA_EN/FULL path.
36243da42859SDinh Nguyen 	 * 00-bypass, 01- add 5 cycles, 10- add 10 cycles, 11- add 15 cycles.
36253da42859SDinh Nguyen 	 */
36263da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ADDLATSEL_SET(0);
36273da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_SET(
36283da42859SDinh Nguyen 		trk_sample_count);
36296cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl0);
36303da42859SDinh Nguyen 
36313da42859SDinh Nguyen 	reg = 0;
36323da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_SAMPLECOUNT_31_20_SET(
36333da42859SDinh Nguyen 		trk_sample_count >>
36343da42859SDinh Nguyen 		SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_WIDTH);
36353da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_SET(
36363da42859SDinh Nguyen 		trk_long_idle_sample_count);
36376cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl1);
36383da42859SDinh Nguyen 
36393da42859SDinh Nguyen 	reg = 0;
36403da42859SDinh Nguyen 	reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_2_LONGIDLESAMPLECOUNT_31_20_SET(
36413da42859SDinh Nguyen 		trk_long_idle_sample_count >>
36423da42859SDinh Nguyen 		SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_WIDTH);
36436cb9f167SMarek Vasut 	writel(reg, &sdr_ctrl->phy_ctrl2);
36443da42859SDinh Nguyen }
36453da42859SDinh Nguyen 
3646880e46f2SMarek Vasut /**
3647880e46f2SMarek Vasut  * initialize_tracking() - Initialize tracking
3648880e46f2SMarek Vasut  *
3649880e46f2SMarek Vasut  * Initialize the register file with usable initial data.
3650880e46f2SMarek Vasut  */
36513da42859SDinh Nguyen static void initialize_tracking(void)
36523da42859SDinh Nguyen {
3653880e46f2SMarek Vasut 	/*
3654880e46f2SMarek Vasut 	 * Initialize the register file with the correct data.
3655880e46f2SMarek Vasut 	 * Compute usable version of value in case we skip full
3656880e46f2SMarek Vasut 	 * computation later.
3657880e46f2SMarek Vasut 	 */
3658880e46f2SMarek Vasut 	writel(DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP) - 1,
3659880e46f2SMarek Vasut 	       &sdr_reg_file->dtaps_per_ptap);
3660880e46f2SMarek Vasut 
3661880e46f2SMarek Vasut 	/* trk_sample_count */
3662880e46f2SMarek Vasut 	writel(7500, &sdr_reg_file->trk_sample_count);
3663880e46f2SMarek Vasut 
3664880e46f2SMarek Vasut 	/* longidle outer loop [15:0] */
3665880e46f2SMarek Vasut 	writel((10 << 16) | (100 << 0), &sdr_reg_file->trk_longidle);
36663da42859SDinh Nguyen 
36673da42859SDinh Nguyen 	/*
3668880e46f2SMarek Vasut 	 * longidle sample count [31:24]
3669880e46f2SMarek Vasut 	 * trfc, worst case of 933Mhz 4Gb [23:16]
3670880e46f2SMarek Vasut 	 * trcd, worst case [15:8]
3671880e46f2SMarek Vasut 	 * vfifo wait [7:0]
36723da42859SDinh Nguyen 	 */
3673880e46f2SMarek Vasut 	writel((243 << 24) | (14 << 16) | (10 << 8) | (4 << 0),
3674880e46f2SMarek Vasut 	       &sdr_reg_file->delays);
36753da42859SDinh Nguyen 
36763da42859SDinh Nguyen 	/* mux delay */
3677*1fa0c8c4SMarek Vasut 	writel((rwcfg->idle << 24) | (rwcfg->activate_1 << 16) |
3678*1fa0c8c4SMarek Vasut 	       (rwcfg->sgle_read << 8) | (rwcfg->precharge_all << 0),
3679880e46f2SMarek Vasut 	       &sdr_reg_file->trk_rw_mgr_addr);
36803da42859SDinh Nguyen 
3681*1fa0c8c4SMarek Vasut 	writel(rwcfg->mem_if_read_dqs_width,
3682880e46f2SMarek Vasut 	       &sdr_reg_file->trk_read_dqs_width);
36833da42859SDinh Nguyen 
3684880e46f2SMarek Vasut 	/* trefi [7:0] */
3685*1fa0c8c4SMarek Vasut 	writel((rwcfg->refresh_all << 24) | (1000 << 0),
3686880e46f2SMarek Vasut 	       &sdr_reg_file->trk_rfsh);
36873da42859SDinh Nguyen }
36883da42859SDinh Nguyen 
36893da42859SDinh Nguyen int sdram_calibration_full(void)
36903da42859SDinh Nguyen {
36913da42859SDinh Nguyen 	struct param_type my_param;
36923da42859SDinh Nguyen 	struct gbl_type my_gbl;
36933da42859SDinh Nguyen 	uint32_t pass;
369484e0b0cfSMarek Vasut 
369584e0b0cfSMarek Vasut 	memset(&my_param, 0, sizeof(my_param));
369684e0b0cfSMarek Vasut 	memset(&my_gbl, 0, sizeof(my_gbl));
36973da42859SDinh Nguyen 
36983da42859SDinh Nguyen 	param = &my_param;
36993da42859SDinh Nguyen 	gbl = &my_gbl;
37003da42859SDinh Nguyen 
3701d718a26bSMarek Vasut 	rwcfg = socfpga_get_sdram_rwmgr_config();
3702d718a26bSMarek Vasut 
37033da42859SDinh Nguyen 	/* Set the calibration enabled by default */
37043da42859SDinh Nguyen 	gbl->phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT;
37053da42859SDinh Nguyen 	/*
37063da42859SDinh Nguyen 	 * Only sweep all groups (regardless of fail state) by default
37073da42859SDinh Nguyen 	 * Set enabled read test by default.
37083da42859SDinh Nguyen 	 */
37093da42859SDinh Nguyen #if DISABLE_GUARANTEED_READ
37103da42859SDinh Nguyen 	gbl->phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ;
37113da42859SDinh Nguyen #endif
37123da42859SDinh Nguyen 	/* Initialize the register file */
37133da42859SDinh Nguyen 	initialize_reg_file();
37143da42859SDinh Nguyen 
37153da42859SDinh Nguyen 	/* Initialize any PHY CSR */
37163da42859SDinh Nguyen 	initialize_hps_phy();
37173da42859SDinh Nguyen 
37183da42859SDinh Nguyen 	scc_mgr_initialize();
37193da42859SDinh Nguyen 
37203da42859SDinh Nguyen 	initialize_tracking();
37213da42859SDinh Nguyen 
37223da42859SDinh Nguyen 	printf("%s: Preparing to start memory calibration\n", __FILE__);
37233da42859SDinh Nguyen 
37243da42859SDinh Nguyen 	debug("%s:%d\n", __func__, __LINE__);
372523f62b36SMarek Vasut 	debug_cond(DLEVEL == 1,
372623f62b36SMarek Vasut 		   "DDR3 FULL_RATE ranks=%u cs/dimm=%u dq/dqs=%u,%u vg/dqs=%u,%u ",
3727*1fa0c8c4SMarek Vasut 		   rwcfg->mem_number_of_ranks, rwcfg->mem_number_of_cs_per_dimm,
3728*1fa0c8c4SMarek Vasut 		   rwcfg->mem_dq_per_read_dqs, rwcfg->mem_dq_per_write_dqs,
3729*1fa0c8c4SMarek Vasut 		   rwcfg->mem_virtual_groups_per_read_dqs,
3730*1fa0c8c4SMarek Vasut 		   rwcfg->mem_virtual_groups_per_write_dqs);
373123f62b36SMarek Vasut 	debug_cond(DLEVEL == 1,
373223f62b36SMarek Vasut 		   "dqs=%u,%u dq=%u dm=%u ptap_delay=%u dtap_delay=%u ",
3733*1fa0c8c4SMarek Vasut 		   rwcfg->mem_if_read_dqs_width, rwcfg->mem_if_write_dqs_width,
3734*1fa0c8c4SMarek Vasut 		   rwcfg->mem_data_width, rwcfg->mem_data_mask_width,
373523f62b36SMarek Vasut 		   IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP);
373623f62b36SMarek Vasut 	debug_cond(DLEVEL == 1, "dtap_dqsen_delay=%u, dll=%u",
373723f62b36SMarek Vasut 		   IO_DELAY_PER_DQS_EN_DCHAIN_TAP, IO_DLL_CHAIN_LENGTH);
373823f62b36SMarek Vasut 	debug_cond(DLEVEL == 1, "max values: en_p=%u dqdqs_p=%u en_d=%u dqs_in_d=%u ",
373923f62b36SMarek Vasut 		   IO_DQS_EN_PHASE_MAX, IO_DQDQS_OUT_PHASE_MAX,
374023f62b36SMarek Vasut 		   IO_DQS_EN_DELAY_MAX, IO_DQS_IN_DELAY_MAX);
374123f62b36SMarek Vasut 	debug_cond(DLEVEL == 1, "io_in_d=%u io_out1_d=%u io_out2_d=%u ",
374223f62b36SMarek Vasut 		   IO_IO_IN_DELAY_MAX, IO_IO_OUT1_DELAY_MAX,
374323f62b36SMarek Vasut 		   IO_IO_OUT2_DELAY_MAX);
374423f62b36SMarek Vasut 	debug_cond(DLEVEL == 1, "dqs_in_reserve=%u dqs_out_reserve=%u\n",
374523f62b36SMarek Vasut 		   IO_DQS_IN_RESERVE, IO_DQS_OUT_RESERVE);
37463da42859SDinh Nguyen 
37473da42859SDinh Nguyen 	hc_initialize_rom_data();
37483da42859SDinh Nguyen 
37493da42859SDinh Nguyen 	/* update info for sims */
37503da42859SDinh Nguyen 	reg_file_set_stage(CAL_STAGE_NIL);
37513da42859SDinh Nguyen 	reg_file_set_group(0);
37523da42859SDinh Nguyen 
37533da42859SDinh Nguyen 	/*
37543da42859SDinh Nguyen 	 * Load global needed for those actions that require
37553da42859SDinh Nguyen 	 * some dynamic calibration support.
37563da42859SDinh Nguyen 	 */
37573da42859SDinh Nguyen 	dyn_calib_steps = STATIC_CALIB_STEPS;
37583da42859SDinh Nguyen 	/*
37593da42859SDinh Nguyen 	 * Load global to allow dynamic selection of delay loop settings
37603da42859SDinh Nguyen 	 * based on calibration mode.
37613da42859SDinh Nguyen 	 */
37623da42859SDinh Nguyen 	if (!(dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS))
37633da42859SDinh Nguyen 		skip_delay_mask = 0xff;
37643da42859SDinh Nguyen 	else
37653da42859SDinh Nguyen 		skip_delay_mask = 0x0;
37663da42859SDinh Nguyen 
37673da42859SDinh Nguyen 	pass = run_mem_calibrate();
376823a040c0SMarek Vasut 	debug_mem_calibrate(pass);
37693da42859SDinh Nguyen 	return pass;
37703da42859SDinh Nguyen }
3771