13da42859SDinh Nguyen /* 23da42859SDinh Nguyen * Copyright Altera Corporation (C) 2012-2015 33da42859SDinh Nguyen * 43da42859SDinh Nguyen * SPDX-License-Identifier: BSD-3-Clause 53da42859SDinh Nguyen */ 63da42859SDinh Nguyen 73da42859SDinh Nguyen #include <common.h> 83da42859SDinh Nguyen #include <asm/io.h> 93da42859SDinh Nguyen #include <asm/arch/sdram.h> 1004372fb8SMarek Vasut #include <errno.h> 113da42859SDinh Nguyen #include "sequencer.h" 123da42859SDinh Nguyen #include "sequencer_auto.h" 133da42859SDinh Nguyen #include "sequencer_auto_ac_init.h" 143da42859SDinh Nguyen #include "sequencer_auto_inst_init.h" 153da42859SDinh Nguyen #include "sequencer_defines.h" 163da42859SDinh Nguyen 173da42859SDinh Nguyen static struct socfpga_sdr_rw_load_manager *sdr_rw_load_mgr_regs = 186afb4fe2SMarek Vasut (struct socfpga_sdr_rw_load_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0x800); 193da42859SDinh Nguyen 203da42859SDinh Nguyen static struct socfpga_sdr_rw_load_jump_manager *sdr_rw_load_jump_mgr_regs = 216afb4fe2SMarek Vasut (struct socfpga_sdr_rw_load_jump_manager *)(SDR_PHYGRP_RWMGRGRP_ADDRESS | 0xC00); 223da42859SDinh Nguyen 233da42859SDinh Nguyen static struct socfpga_sdr_reg_file *sdr_reg_file = 24a1c654a8SMarek Vasut (struct socfpga_sdr_reg_file *)SDR_PHYGRP_REGFILEGRP_ADDRESS; 253da42859SDinh Nguyen 263da42859SDinh Nguyen static struct socfpga_sdr_scc_mgr *sdr_scc_mgr = 27e79025a7SMarek Vasut (struct socfpga_sdr_scc_mgr *)(SDR_PHYGRP_SCCGRP_ADDRESS | 0xe00); 283da42859SDinh Nguyen 293da42859SDinh Nguyen static struct socfpga_phy_mgr_cmd *phy_mgr_cmd = 301bc6f14aSMarek Vasut (struct socfpga_phy_mgr_cmd *)SDR_PHYGRP_PHYMGRGRP_ADDRESS; 313da42859SDinh Nguyen 323da42859SDinh Nguyen static struct socfpga_phy_mgr_cfg *phy_mgr_cfg = 331bc6f14aSMarek Vasut (struct socfpga_phy_mgr_cfg *)(SDR_PHYGRP_PHYMGRGRP_ADDRESS | 0x40); 343da42859SDinh Nguyen 353da42859SDinh Nguyen static struct socfpga_data_mgr *data_mgr = 36c4815f76SMarek Vasut (struct socfpga_data_mgr *)SDR_PHYGRP_DATAMGRGRP_ADDRESS; 373da42859SDinh Nguyen 386cb9f167SMarek Vasut static struct socfpga_sdr_ctrl *sdr_ctrl = 396cb9f167SMarek Vasut (struct socfpga_sdr_ctrl *)SDR_CTRLGRP_ADDRESS; 406cb9f167SMarek Vasut 413da42859SDinh Nguyen #define DELTA_D 1 423da42859SDinh Nguyen 433da42859SDinh Nguyen /* 443da42859SDinh Nguyen * In order to reduce ROM size, most of the selectable calibration steps are 453da42859SDinh Nguyen * decided at compile time based on the user's calibration mode selection, 463da42859SDinh Nguyen * as captured by the STATIC_CALIB_STEPS selection below. 473da42859SDinh Nguyen * 483da42859SDinh Nguyen * However, to support simulation-time selection of fast simulation mode, where 493da42859SDinh Nguyen * we skip everything except the bare minimum, we need a few of the steps to 503da42859SDinh Nguyen * be dynamic. In those cases, we either use the DYNAMIC_CALIB_STEPS for the 513da42859SDinh Nguyen * check, which is based on the rtl-supplied value, or we dynamically compute 523da42859SDinh Nguyen * the value to use based on the dynamically-chosen calibration mode 533da42859SDinh Nguyen */ 543da42859SDinh Nguyen 553da42859SDinh Nguyen #define DLEVEL 0 563da42859SDinh Nguyen #define STATIC_IN_RTL_SIM 0 573da42859SDinh Nguyen #define STATIC_SKIP_DELAY_LOOPS 0 583da42859SDinh Nguyen 593da42859SDinh Nguyen #define STATIC_CALIB_STEPS (STATIC_IN_RTL_SIM | CALIB_SKIP_FULL_TEST | \ 603da42859SDinh Nguyen STATIC_SKIP_DELAY_LOOPS) 613da42859SDinh Nguyen 623da42859SDinh Nguyen /* calibration steps requested by the rtl */ 633da42859SDinh Nguyen uint16_t dyn_calib_steps; 643da42859SDinh Nguyen 653da42859SDinh Nguyen /* 663da42859SDinh Nguyen * To make CALIB_SKIP_DELAY_LOOPS a dynamic conditional option 673da42859SDinh Nguyen * instead of static, we use boolean logic to select between 683da42859SDinh Nguyen * non-skip and skip values 693da42859SDinh Nguyen * 703da42859SDinh Nguyen * The mask is set to include all bits when not-skipping, but is 713da42859SDinh Nguyen * zero when skipping 723da42859SDinh Nguyen */ 733da42859SDinh Nguyen 743da42859SDinh Nguyen uint16_t skip_delay_mask; /* mask off bits when skipping/not-skipping */ 753da42859SDinh Nguyen 763da42859SDinh Nguyen #define SKIP_DELAY_LOOP_VALUE_OR_ZERO(non_skip_value) \ 773da42859SDinh Nguyen ((non_skip_value) & skip_delay_mask) 783da42859SDinh Nguyen 793da42859SDinh Nguyen struct gbl_type *gbl; 803da42859SDinh Nguyen struct param_type *param; 813da42859SDinh Nguyen uint32_t curr_shadow_reg; 823da42859SDinh Nguyen 833da42859SDinh Nguyen static void set_failing_group_stage(uint32_t group, uint32_t stage, 843da42859SDinh Nguyen uint32_t substage) 853da42859SDinh Nguyen { 863da42859SDinh Nguyen /* 873da42859SDinh Nguyen * Only set the global stage if there was not been any other 883da42859SDinh Nguyen * failing group 893da42859SDinh Nguyen */ 903da42859SDinh Nguyen if (gbl->error_stage == CAL_STAGE_NIL) { 913da42859SDinh Nguyen gbl->error_substage = substage; 923da42859SDinh Nguyen gbl->error_stage = stage; 933da42859SDinh Nguyen gbl->error_group = group; 943da42859SDinh Nguyen } 953da42859SDinh Nguyen } 963da42859SDinh Nguyen 972c0d2d9cSMarek Vasut static void reg_file_set_group(u16 set_group) 983da42859SDinh Nguyen { 992c0d2d9cSMarek Vasut clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff0000, set_group << 16); 1003da42859SDinh Nguyen } 1013da42859SDinh Nguyen 1022c0d2d9cSMarek Vasut static void reg_file_set_stage(u8 set_stage) 1033da42859SDinh Nguyen { 1042c0d2d9cSMarek Vasut clrsetbits_le32(&sdr_reg_file->cur_stage, 0xffff, set_stage & 0xff); 1053da42859SDinh Nguyen } 1063da42859SDinh Nguyen 1072c0d2d9cSMarek Vasut static void reg_file_set_sub_stage(u8 set_sub_stage) 1083da42859SDinh Nguyen { 1092c0d2d9cSMarek Vasut set_sub_stage &= 0xff; 1102c0d2d9cSMarek Vasut clrsetbits_le32(&sdr_reg_file->cur_stage, 0xff00, set_sub_stage << 8); 1113da42859SDinh Nguyen } 1123da42859SDinh Nguyen 1137c89c2d9SMarek Vasut /** 1147c89c2d9SMarek Vasut * phy_mgr_initialize() - Initialize PHY Manager 1157c89c2d9SMarek Vasut * 1167c89c2d9SMarek Vasut * Initialize PHY Manager. 1177c89c2d9SMarek Vasut */ 1189fa9c90eSMarek Vasut static void phy_mgr_initialize(void) 1193da42859SDinh Nguyen { 1207c89c2d9SMarek Vasut u32 ratio; 1217c89c2d9SMarek Vasut 1223da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 1237c89c2d9SMarek Vasut /* Calibration has control over path to memory */ 1243da42859SDinh Nguyen /* 1253da42859SDinh Nguyen * In Hard PHY this is a 2-bit control: 1263da42859SDinh Nguyen * 0: AFI Mux Select 1273da42859SDinh Nguyen * 1: DDIO Mux Select 1283da42859SDinh Nguyen */ 1291273dd9eSMarek Vasut writel(0x3, &phy_mgr_cfg->mux_sel); 1303da42859SDinh Nguyen 1313da42859SDinh Nguyen /* USER memory clock is not stable we begin initialization */ 1321273dd9eSMarek Vasut writel(0, &phy_mgr_cfg->reset_mem_stbl); 1333da42859SDinh Nguyen 1343da42859SDinh Nguyen /* USER calibration status all set to zero */ 1351273dd9eSMarek Vasut writel(0, &phy_mgr_cfg->cal_status); 1363da42859SDinh Nguyen 1371273dd9eSMarek Vasut writel(0, &phy_mgr_cfg->cal_debug_info); 1383da42859SDinh Nguyen 1397c89c2d9SMarek Vasut /* Init params only if we do NOT skip calibration. */ 1407c89c2d9SMarek Vasut if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) 1417c89c2d9SMarek Vasut return; 1427c89c2d9SMarek Vasut 1437c89c2d9SMarek Vasut ratio = RW_MGR_MEM_DQ_PER_READ_DQS / 1447c89c2d9SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS; 1457c89c2d9SMarek Vasut param->read_correct_mask_vg = (1 << ratio) - 1; 1467c89c2d9SMarek Vasut param->write_correct_mask_vg = (1 << ratio) - 1; 1477c89c2d9SMarek Vasut param->read_correct_mask = (1 << RW_MGR_MEM_DQ_PER_READ_DQS) - 1; 1487c89c2d9SMarek Vasut param->write_correct_mask = (1 << RW_MGR_MEM_DQ_PER_WRITE_DQS) - 1; 1497c89c2d9SMarek Vasut ratio = RW_MGR_MEM_DATA_WIDTH / 1507c89c2d9SMarek Vasut RW_MGR_MEM_DATA_MASK_WIDTH; 1517c89c2d9SMarek Vasut param->dm_correct_mask = (1 << ratio) - 1; 1523da42859SDinh Nguyen } 1533da42859SDinh Nguyen 154080bf64eSMarek Vasut /** 155080bf64eSMarek Vasut * set_rank_and_odt_mask() - Set Rank and ODT mask 156080bf64eSMarek Vasut * @rank: Rank mask 157080bf64eSMarek Vasut * @odt_mode: ODT mode, OFF or READ_WRITE 158080bf64eSMarek Vasut * 159080bf64eSMarek Vasut * Set Rank and ODT mask (On-Die Termination). 160080bf64eSMarek Vasut */ 161b2dfd100SMarek Vasut static void set_rank_and_odt_mask(const u32 rank, const u32 odt_mode) 1623da42859SDinh Nguyen { 163b2dfd100SMarek Vasut u32 odt_mask_0 = 0; 164b2dfd100SMarek Vasut u32 odt_mask_1 = 0; 165b2dfd100SMarek Vasut u32 cs_and_odt_mask; 1663da42859SDinh Nguyen 167b2dfd100SMarek Vasut if (odt_mode == RW_MGR_ODT_MODE_OFF) { 168b2dfd100SMarek Vasut odt_mask_0 = 0x0; 169b2dfd100SMarek Vasut odt_mask_1 = 0x0; 170b2dfd100SMarek Vasut } else { /* RW_MGR_ODT_MODE_READ_WRITE */ 171287cdf6bSMarek Vasut switch (RW_MGR_MEM_NUMBER_OF_RANKS) { 172287cdf6bSMarek Vasut case 1: /* 1 Rank */ 173287cdf6bSMarek Vasut /* Read: ODT = 0 ; Write: ODT = 1 */ 1743da42859SDinh Nguyen odt_mask_0 = 0x0; 1753da42859SDinh Nguyen odt_mask_1 = 0x1; 176287cdf6bSMarek Vasut break; 177287cdf6bSMarek Vasut case 2: /* 2 Ranks */ 1783da42859SDinh Nguyen if (RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM == 1) { 179080bf64eSMarek Vasut /* 180080bf64eSMarek Vasut * - Dual-Slot , Single-Rank (1 CS per DIMM) 1813da42859SDinh Nguyen * OR 182080bf64eSMarek Vasut * - RDIMM, 4 total CS (2 CS per DIMM, 2 DIMM) 183080bf64eSMarek Vasut * 184080bf64eSMarek Vasut * Since MEM_NUMBER_OF_RANKS is 2, they 185080bf64eSMarek Vasut * are both single rank with 2 CS each 186080bf64eSMarek Vasut * (special for RDIMM). 187080bf64eSMarek Vasut * 1883da42859SDinh Nguyen * Read: Turn on ODT on the opposite rank 1893da42859SDinh Nguyen * Write: Turn on ODT on all ranks 1903da42859SDinh Nguyen */ 1913da42859SDinh Nguyen odt_mask_0 = 0x3 & ~(1 << rank); 1923da42859SDinh Nguyen odt_mask_1 = 0x3; 1933da42859SDinh Nguyen } else { 1943da42859SDinh Nguyen /* 195080bf64eSMarek Vasut * - Single-Slot , Dual-Rank (2 CS per DIMM) 196080bf64eSMarek Vasut * 197080bf64eSMarek Vasut * Read: Turn on ODT off on all ranks 198080bf64eSMarek Vasut * Write: Turn on ODT on active rank 1993da42859SDinh Nguyen */ 2003da42859SDinh Nguyen odt_mask_0 = 0x0; 2013da42859SDinh Nguyen odt_mask_1 = 0x3 & (1 << rank); 2023da42859SDinh Nguyen } 203287cdf6bSMarek Vasut break; 204287cdf6bSMarek Vasut case 4: /* 4 Ranks */ 205287cdf6bSMarek Vasut /* Read: 2063da42859SDinh Nguyen * ----------+-----------------------+ 2073da42859SDinh Nguyen * | ODT | 2083da42859SDinh Nguyen * Read From +-----------------------+ 2093da42859SDinh Nguyen * Rank | 3 | 2 | 1 | 0 | 2103da42859SDinh Nguyen * ----------+-----+-----+-----+-----+ 2113da42859SDinh Nguyen * 0 | 0 | 1 | 0 | 0 | 2123da42859SDinh Nguyen * 1 | 1 | 0 | 0 | 0 | 2133da42859SDinh Nguyen * 2 | 0 | 0 | 0 | 1 | 2143da42859SDinh Nguyen * 3 | 0 | 0 | 1 | 0 | 2153da42859SDinh Nguyen * ----------+-----+-----+-----+-----+ 2163da42859SDinh Nguyen * 2173da42859SDinh Nguyen * Write: 2183da42859SDinh Nguyen * ----------+-----------------------+ 2193da42859SDinh Nguyen * | ODT | 2203da42859SDinh Nguyen * Write To +-----------------------+ 2213da42859SDinh Nguyen * Rank | 3 | 2 | 1 | 0 | 2223da42859SDinh Nguyen * ----------+-----+-----+-----+-----+ 2233da42859SDinh Nguyen * 0 | 0 | 1 | 0 | 1 | 2243da42859SDinh Nguyen * 1 | 1 | 0 | 1 | 0 | 2253da42859SDinh Nguyen * 2 | 0 | 1 | 0 | 1 | 2263da42859SDinh Nguyen * 3 | 1 | 0 | 1 | 0 | 2273da42859SDinh Nguyen * ----------+-----+-----+-----+-----+ 2283da42859SDinh Nguyen */ 2293da42859SDinh Nguyen switch (rank) { 2303da42859SDinh Nguyen case 0: 2313da42859SDinh Nguyen odt_mask_0 = 0x4; 2323da42859SDinh Nguyen odt_mask_1 = 0x5; 2333da42859SDinh Nguyen break; 2343da42859SDinh Nguyen case 1: 2353da42859SDinh Nguyen odt_mask_0 = 0x8; 2363da42859SDinh Nguyen odt_mask_1 = 0xA; 2373da42859SDinh Nguyen break; 2383da42859SDinh Nguyen case 2: 2393da42859SDinh Nguyen odt_mask_0 = 0x1; 2403da42859SDinh Nguyen odt_mask_1 = 0x5; 2413da42859SDinh Nguyen break; 2423da42859SDinh Nguyen case 3: 2433da42859SDinh Nguyen odt_mask_0 = 0x2; 2443da42859SDinh Nguyen odt_mask_1 = 0xA; 2453da42859SDinh Nguyen break; 2463da42859SDinh Nguyen } 247287cdf6bSMarek Vasut break; 2483da42859SDinh Nguyen } 2493da42859SDinh Nguyen } 2503da42859SDinh Nguyen 251b2dfd100SMarek Vasut cs_and_odt_mask = (0xFF & ~(1 << rank)) | 2523da42859SDinh Nguyen ((0xFF & odt_mask_0) << 8) | 2533da42859SDinh Nguyen ((0xFF & odt_mask_1) << 16); 2541273dd9eSMarek Vasut writel(cs_and_odt_mask, SDR_PHYGRP_RWMGRGRP_ADDRESS | 2551273dd9eSMarek Vasut RW_MGR_SET_CS_AND_ODT_MASK_OFFSET); 2563da42859SDinh Nguyen } 2573da42859SDinh Nguyen 258c76976d9SMarek Vasut /** 259c76976d9SMarek Vasut * scc_mgr_set() - Set SCC Manager register 260c76976d9SMarek Vasut * @off: Base offset in SCC Manager space 261c76976d9SMarek Vasut * @grp: Read/Write group 262c76976d9SMarek Vasut * @val: Value to be set 263c76976d9SMarek Vasut * 264c76976d9SMarek Vasut * This function sets the SCC Manager (Scan Chain Control Manager) register. 265c76976d9SMarek Vasut */ 266c76976d9SMarek Vasut static void scc_mgr_set(u32 off, u32 grp, u32 val) 267c76976d9SMarek Vasut { 268c76976d9SMarek Vasut writel(val, SDR_PHYGRP_SCCGRP_ADDRESS | off | (grp << 2)); 269c76976d9SMarek Vasut } 270c76976d9SMarek Vasut 271e893f4dcSMarek Vasut /** 272e893f4dcSMarek Vasut * scc_mgr_initialize() - Initialize SCC Manager registers 273e893f4dcSMarek Vasut * 274e893f4dcSMarek Vasut * Initialize SCC Manager registers. 275e893f4dcSMarek Vasut */ 2763da42859SDinh Nguyen static void scc_mgr_initialize(void) 2773da42859SDinh Nguyen { 2783da42859SDinh Nguyen /* 279e893f4dcSMarek Vasut * Clear register file for HPS. 16 (2^4) is the size of the 280e893f4dcSMarek Vasut * full register file in the scc mgr: 281e893f4dcSMarek Vasut * RFILE_DEPTH = 1 + log2(MEM_DQ_PER_DQS + 1 + MEM_DM_PER_DQS + 282e893f4dcSMarek Vasut * MEM_IF_READ_DQS_WIDTH - 1); 2833da42859SDinh Nguyen */ 284c76976d9SMarek Vasut int i; 285e893f4dcSMarek Vasut 2863da42859SDinh Nguyen for (i = 0; i < 16; i++) { 2877ac40d25SMarek Vasut debug_cond(DLEVEL == 1, "%s:%d: Clearing SCC RFILE index %u\n", 2883da42859SDinh Nguyen __func__, __LINE__, i); 289c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_HHP_RFILE_OFFSET, 0, i); 2903da42859SDinh Nguyen } 2913da42859SDinh Nguyen } 2923da42859SDinh Nguyen 2935ff825b8SMarek Vasut static void scc_mgr_set_dqdqs_output_phase(uint32_t write_group, uint32_t phase) 2945ff825b8SMarek Vasut { 295c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, write_group, phase); 2965ff825b8SMarek Vasut } 2975ff825b8SMarek Vasut 2985ff825b8SMarek Vasut static void scc_mgr_set_dqs_bus_in_delay(uint32_t read_group, uint32_t delay) 2993da42859SDinh Nguyen { 300c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_DQS_IN_DELAY_OFFSET, read_group, delay); 3013da42859SDinh Nguyen } 3023da42859SDinh Nguyen 3033da42859SDinh Nguyen static void scc_mgr_set_dqs_en_phase(uint32_t read_group, uint32_t phase) 3043da42859SDinh Nguyen { 305c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_DQS_EN_PHASE_OFFSET, read_group, phase); 3063da42859SDinh Nguyen } 3073da42859SDinh Nguyen 3085ff825b8SMarek Vasut static void scc_mgr_set_dqs_en_delay(uint32_t read_group, uint32_t delay) 3095ff825b8SMarek Vasut { 310c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_DQS_EN_DELAY_OFFSET, read_group, delay); 3115ff825b8SMarek Vasut } 3125ff825b8SMarek Vasut 31332675249SMarek Vasut static void scc_mgr_set_dqs_io_in_delay(uint32_t delay) 3145ff825b8SMarek Vasut { 315c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS, 316c76976d9SMarek Vasut delay); 3175ff825b8SMarek Vasut } 3185ff825b8SMarek Vasut 3195ff825b8SMarek Vasut static void scc_mgr_set_dq_in_delay(uint32_t dq_in_group, uint32_t delay) 3205ff825b8SMarek Vasut { 321c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_IO_IN_DELAY_OFFSET, dq_in_group, delay); 3225ff825b8SMarek Vasut } 3235ff825b8SMarek Vasut 3245ff825b8SMarek Vasut static void scc_mgr_set_dq_out1_delay(uint32_t dq_in_group, uint32_t delay) 3255ff825b8SMarek Vasut { 326c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, dq_in_group, delay); 3275ff825b8SMarek Vasut } 3285ff825b8SMarek Vasut 32932675249SMarek Vasut static void scc_mgr_set_dqs_out1_delay(uint32_t delay) 3305ff825b8SMarek Vasut { 331c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, RW_MGR_MEM_DQ_PER_WRITE_DQS, 332c76976d9SMarek Vasut delay); 3335ff825b8SMarek Vasut } 3345ff825b8SMarek Vasut 3355ff825b8SMarek Vasut static void scc_mgr_set_dm_out1_delay(uint32_t dm, uint32_t delay) 3365ff825b8SMarek Vasut { 337c76976d9SMarek Vasut scc_mgr_set(SCC_MGR_IO_OUT1_DELAY_OFFSET, 338c76976d9SMarek Vasut RW_MGR_MEM_DQ_PER_WRITE_DQS + 1 + dm, 339c76976d9SMarek Vasut delay); 3405ff825b8SMarek Vasut } 3415ff825b8SMarek Vasut 3425ff825b8SMarek Vasut /* load up dqs config settings */ 3435ff825b8SMarek Vasut static void scc_mgr_load_dqs(uint32_t dqs) 3445ff825b8SMarek Vasut { 3455ff825b8SMarek Vasut writel(dqs, &sdr_scc_mgr->dqs_ena); 3465ff825b8SMarek Vasut } 3475ff825b8SMarek Vasut 3485ff825b8SMarek Vasut /* load up dqs io config settings */ 3495ff825b8SMarek Vasut static void scc_mgr_load_dqs_io(void) 3505ff825b8SMarek Vasut { 3515ff825b8SMarek Vasut writel(0, &sdr_scc_mgr->dqs_io_ena); 3525ff825b8SMarek Vasut } 3535ff825b8SMarek Vasut 3545ff825b8SMarek Vasut /* load up dq config settings */ 3555ff825b8SMarek Vasut static void scc_mgr_load_dq(uint32_t dq_in_group) 3565ff825b8SMarek Vasut { 3575ff825b8SMarek Vasut writel(dq_in_group, &sdr_scc_mgr->dq_ena); 3585ff825b8SMarek Vasut } 3595ff825b8SMarek Vasut 3605ff825b8SMarek Vasut /* load up dm config settings */ 3615ff825b8SMarek Vasut static void scc_mgr_load_dm(uint32_t dm) 3625ff825b8SMarek Vasut { 3635ff825b8SMarek Vasut writel(dm, &sdr_scc_mgr->dm_ena); 3645ff825b8SMarek Vasut } 3655ff825b8SMarek Vasut 3660b69b807SMarek Vasut /** 3670b69b807SMarek Vasut * scc_mgr_set_all_ranks() - Set SCC Manager register for all ranks 3680b69b807SMarek Vasut * @off: Base offset in SCC Manager space 3690b69b807SMarek Vasut * @grp: Read/Write group 3700b69b807SMarek Vasut * @val: Value to be set 3710b69b807SMarek Vasut * @update: If non-zero, trigger SCC Manager update for all ranks 3720b69b807SMarek Vasut * 3730b69b807SMarek Vasut * This function sets the SCC Manager (Scan Chain Control Manager) register 3740b69b807SMarek Vasut * and optionally triggers the SCC update for all ranks. 3750b69b807SMarek Vasut */ 3760b69b807SMarek Vasut static void scc_mgr_set_all_ranks(const u32 off, const u32 grp, const u32 val, 3770b69b807SMarek Vasut const int update) 3783da42859SDinh Nguyen { 3790b69b807SMarek Vasut u32 r; 3803da42859SDinh Nguyen 3813da42859SDinh Nguyen for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 3823da42859SDinh Nguyen r += NUM_RANKS_PER_SHADOW_REG) { 3830b69b807SMarek Vasut scc_mgr_set(off, grp, val); 384162d60efSMarek Vasut 3850b69b807SMarek Vasut if (update || (r == 0)) { 3860b69b807SMarek Vasut writel(grp, &sdr_scc_mgr->dqs_ena); 3870b69b807SMarek Vasut writel(0, &sdr_scc_mgr->update); 3880b69b807SMarek Vasut } 3890b69b807SMarek Vasut } 3900b69b807SMarek Vasut } 3910b69b807SMarek Vasut 3920b69b807SMarek Vasut static void scc_mgr_set_dqs_en_phase_all_ranks(u32 read_group, u32 phase) 3930b69b807SMarek Vasut { 3943da42859SDinh Nguyen /* 3953da42859SDinh Nguyen * USER although the h/w doesn't support different phases per 3963da42859SDinh Nguyen * shadow register, for simplicity our scc manager modeling 3973da42859SDinh Nguyen * keeps different phase settings per shadow reg, and it's 3983da42859SDinh Nguyen * important for us to keep them in sync to match h/w. 3993da42859SDinh Nguyen * for efficiency, the scan chain update should occur only 4003da42859SDinh Nguyen * once to sr0. 4013da42859SDinh Nguyen */ 4020b69b807SMarek Vasut scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_PHASE_OFFSET, 4030b69b807SMarek Vasut read_group, phase, 0); 4043da42859SDinh Nguyen } 4053da42859SDinh Nguyen 4063da42859SDinh Nguyen static void scc_mgr_set_dqdqs_output_phase_all_ranks(uint32_t write_group, 4073da42859SDinh Nguyen uint32_t phase) 4083da42859SDinh Nguyen { 4093da42859SDinh Nguyen /* 4103da42859SDinh Nguyen * USER although the h/w doesn't support different phases per 4113da42859SDinh Nguyen * shadow register, for simplicity our scc manager modeling 4123da42859SDinh Nguyen * keeps different phase settings per shadow reg, and it's 4133da42859SDinh Nguyen * important for us to keep them in sync to match h/w. 4143da42859SDinh Nguyen * for efficiency, the scan chain update should occur only 4153da42859SDinh Nguyen * once to sr0. 4163da42859SDinh Nguyen */ 4170b69b807SMarek Vasut scc_mgr_set_all_ranks(SCC_MGR_DQDQS_OUT_PHASE_OFFSET, 4180b69b807SMarek Vasut write_group, phase, 0); 4193da42859SDinh Nguyen } 4203da42859SDinh Nguyen 4213da42859SDinh Nguyen static void scc_mgr_set_dqs_en_delay_all_ranks(uint32_t read_group, 4223da42859SDinh Nguyen uint32_t delay) 4233da42859SDinh Nguyen { 4243da42859SDinh Nguyen /* 4253da42859SDinh Nguyen * In shadow register mode, the T11 settings are stored in 4263da42859SDinh Nguyen * registers in the core, which are updated by the DQS_ENA 4273da42859SDinh Nguyen * signals. Not issuing the SCC_MGR_UPD command allows us to 4283da42859SDinh Nguyen * save lots of rank switching overhead, by calling 4293da42859SDinh Nguyen * select_shadow_regs_for_update with update_scan_chains 4303da42859SDinh Nguyen * set to 0. 4313da42859SDinh Nguyen */ 4320b69b807SMarek Vasut scc_mgr_set_all_ranks(SCC_MGR_DQS_EN_DELAY_OFFSET, 4330b69b807SMarek Vasut read_group, delay, 1); 4341273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 4353da42859SDinh Nguyen } 4363da42859SDinh Nguyen 4375be355c1SMarek Vasut /** 4385be355c1SMarek Vasut * scc_mgr_set_oct_out1_delay() - Set OCT output delay 4395be355c1SMarek Vasut * @write_group: Write group 4405be355c1SMarek Vasut * @delay: Delay value 4415be355c1SMarek Vasut * 4425be355c1SMarek Vasut * This function sets the OCT output delay in SCC manager. 4435be355c1SMarek Vasut */ 4445be355c1SMarek Vasut static void scc_mgr_set_oct_out1_delay(const u32 write_group, const u32 delay) 4453da42859SDinh Nguyen { 4465be355c1SMarek Vasut const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / 4475be355c1SMarek Vasut RW_MGR_MEM_IF_WRITE_DQS_WIDTH; 4485be355c1SMarek Vasut const int base = write_group * ratio; 4495be355c1SMarek Vasut int i; 4503da42859SDinh Nguyen /* 4513da42859SDinh Nguyen * Load the setting in the SCC manager 4523da42859SDinh Nguyen * Although OCT affects only write data, the OCT delay is controlled 4533da42859SDinh Nguyen * by the DQS logic block which is instantiated once per read group. 4543da42859SDinh Nguyen * For protocols where a write group consists of multiple read groups, 4553da42859SDinh Nguyen * the setting must be set multiple times. 4563da42859SDinh Nguyen */ 4575be355c1SMarek Vasut for (i = 0; i < ratio; i++) 4585be355c1SMarek Vasut scc_mgr_set(SCC_MGR_OCT_OUT1_DELAY_OFFSET, base + i, delay); 4593da42859SDinh Nguyen } 4603da42859SDinh Nguyen 46137a37ca7SMarek Vasut /** 46237a37ca7SMarek Vasut * scc_mgr_set_hhp_extras() - Set HHP extras. 46337a37ca7SMarek Vasut * 46437a37ca7SMarek Vasut * Load the fixed setting in the SCC manager HHP extras. 46537a37ca7SMarek Vasut */ 4663da42859SDinh Nguyen static void scc_mgr_set_hhp_extras(void) 4673da42859SDinh Nguyen { 4683da42859SDinh Nguyen /* 4693da42859SDinh Nguyen * Load the fixed setting in the SCC manager 47037a37ca7SMarek Vasut * bits: 0:0 = 1'b1 - DQS bypass 47137a37ca7SMarek Vasut * bits: 1:1 = 1'b1 - DQ bypass 4723da42859SDinh Nguyen * bits: 4:2 = 3'b001 - rfifo_mode 4733da42859SDinh Nguyen * bits: 6:5 = 2'b01 - rfifo clock_select 4743da42859SDinh Nguyen * bits: 7:7 = 1'b0 - separate gating from ungating setting 4753da42859SDinh Nguyen * bits: 8:8 = 1'b0 - separate OE from Output delay setting 4763da42859SDinh Nguyen */ 47737a37ca7SMarek Vasut const u32 value = (0 << 8) | (0 << 7) | (1 << 5) | 47837a37ca7SMarek Vasut (1 << 2) | (1 << 1) | (1 << 0); 47937a37ca7SMarek Vasut const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS | 48037a37ca7SMarek Vasut SCC_MGR_HHP_GLOBALS_OFFSET | 48137a37ca7SMarek Vasut SCC_MGR_HHP_EXTRAS_OFFSET; 4823da42859SDinh Nguyen 48337a37ca7SMarek Vasut debug_cond(DLEVEL == 1, "%s:%d Setting HHP Extras\n", 48437a37ca7SMarek Vasut __func__, __LINE__); 48537a37ca7SMarek Vasut writel(value, addr); 48637a37ca7SMarek Vasut debug_cond(DLEVEL == 1, "%s:%d Done Setting HHP Extras\n", 48737a37ca7SMarek Vasut __func__, __LINE__); 4883da42859SDinh Nguyen } 4893da42859SDinh Nguyen 490f42af35bSMarek Vasut /** 491f42af35bSMarek Vasut * scc_mgr_zero_all() - Zero all DQS config 492f42af35bSMarek Vasut * 493f42af35bSMarek Vasut * Zero all DQS config. 4943da42859SDinh Nguyen */ 4953da42859SDinh Nguyen static void scc_mgr_zero_all(void) 4963da42859SDinh Nguyen { 497f42af35bSMarek Vasut int i, r; 4983da42859SDinh Nguyen 4993da42859SDinh Nguyen /* 5003da42859SDinh Nguyen * USER Zero all DQS config settings, across all groups and all 5013da42859SDinh Nguyen * shadow registers 5023da42859SDinh Nguyen */ 503f42af35bSMarek Vasut for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 504f42af35bSMarek Vasut r += NUM_RANKS_PER_SHADOW_REG) { 5053da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { 5063da42859SDinh Nguyen /* 5073da42859SDinh Nguyen * The phases actually don't exist on a per-rank basis, 5083da42859SDinh Nguyen * but there's no harm updating them several times, so 5093da42859SDinh Nguyen * let's keep the code simple. 5103da42859SDinh Nguyen */ 5113da42859SDinh Nguyen scc_mgr_set_dqs_bus_in_delay(i, IO_DQS_IN_RESERVE); 5123da42859SDinh Nguyen scc_mgr_set_dqs_en_phase(i, 0); 5133da42859SDinh Nguyen scc_mgr_set_dqs_en_delay(i, 0); 5143da42859SDinh Nguyen } 5153da42859SDinh Nguyen 5163da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) { 5173da42859SDinh Nguyen scc_mgr_set_dqdqs_output_phase(i, 0); 518f42af35bSMarek Vasut /* Arria V/Cyclone V don't have out2. */ 5193da42859SDinh Nguyen scc_mgr_set_oct_out1_delay(i, IO_DQS_OUT_RESERVE); 5203da42859SDinh Nguyen } 5213da42859SDinh Nguyen } 5223da42859SDinh Nguyen 523f42af35bSMarek Vasut /* Multicast to all DQS group enables. */ 5241273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dqs_ena); 5251273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 5263da42859SDinh Nguyen } 5273da42859SDinh Nguyen 528c5c5f537SMarek Vasut /** 529c5c5f537SMarek Vasut * scc_set_bypass_mode() - Set bypass mode and trigger SCC update 530c5c5f537SMarek Vasut * @write_group: Write group 531c5c5f537SMarek Vasut * 532c5c5f537SMarek Vasut * Set bypass mode and trigger SCC update. 533c5c5f537SMarek Vasut */ 534c5c5f537SMarek Vasut static void scc_set_bypass_mode(const u32 write_group) 5353da42859SDinh Nguyen { 536c5c5f537SMarek Vasut /* Multicast to all DQ enables. */ 5371273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dq_ena); 5381273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dm_ena); 5393da42859SDinh Nguyen 540c5c5f537SMarek Vasut /* Update current DQS IO enable. */ 5411273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->dqs_io_ena); 5423da42859SDinh Nguyen 543c5c5f537SMarek Vasut /* Update the DQS logic. */ 5441273dd9eSMarek Vasut writel(write_group, &sdr_scc_mgr->dqs_ena); 5453da42859SDinh Nguyen 546c5c5f537SMarek Vasut /* Hit update. */ 5471273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 5483da42859SDinh Nguyen } 5493da42859SDinh Nguyen 5505e837896SMarek Vasut /** 5515e837896SMarek Vasut * scc_mgr_load_dqs_for_write_group() - Load DQS settings for Write Group 5525e837896SMarek Vasut * @write_group: Write group 5535e837896SMarek Vasut * 5545e837896SMarek Vasut * Load DQS settings for Write Group, do not trigger SCC update. 5555e837896SMarek Vasut */ 5565e837896SMarek Vasut static void scc_mgr_load_dqs_for_write_group(const u32 write_group) 5575ff825b8SMarek Vasut { 5585e837896SMarek Vasut const int ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / 5595e837896SMarek Vasut RW_MGR_MEM_IF_WRITE_DQS_WIDTH; 5605e837896SMarek Vasut const int base = write_group * ratio; 5615e837896SMarek Vasut int i; 5625ff825b8SMarek Vasut /* 5635e837896SMarek Vasut * Load the setting in the SCC manager 5645ff825b8SMarek Vasut * Although OCT affects only write data, the OCT delay is controlled 5655ff825b8SMarek Vasut * by the DQS logic block which is instantiated once per read group. 5665ff825b8SMarek Vasut * For protocols where a write group consists of multiple read groups, 5675e837896SMarek Vasut * the setting must be set multiple times. 5685ff825b8SMarek Vasut */ 5695e837896SMarek Vasut for (i = 0; i < ratio; i++) 5705e837896SMarek Vasut writel(base + i, &sdr_scc_mgr->dqs_ena); 5715ff825b8SMarek Vasut } 5725ff825b8SMarek Vasut 573d41ea93aSMarek Vasut /** 574d41ea93aSMarek Vasut * scc_mgr_zero_group() - Zero all configs for a group 575d41ea93aSMarek Vasut * 576d41ea93aSMarek Vasut * Zero DQ, DM, DQS and OCT configs for a group. 577d41ea93aSMarek Vasut */ 578d41ea93aSMarek Vasut static void scc_mgr_zero_group(const u32 write_group, const int out_only) 5793da42859SDinh Nguyen { 580d41ea93aSMarek Vasut int i, r; 5813da42859SDinh Nguyen 582d41ea93aSMarek Vasut for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 583d41ea93aSMarek Vasut r += NUM_RANKS_PER_SHADOW_REG) { 584d41ea93aSMarek Vasut /* Zero all DQ config settings. */ 5853da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { 58607aee5bdSMarek Vasut scc_mgr_set_dq_out1_delay(i, 0); 5873da42859SDinh Nguyen if (!out_only) 58807aee5bdSMarek Vasut scc_mgr_set_dq_in_delay(i, 0); 5893da42859SDinh Nguyen } 5903da42859SDinh Nguyen 591d41ea93aSMarek Vasut /* Multicast to all DQ enables. */ 5921273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dq_ena); 5933da42859SDinh Nguyen 594d41ea93aSMarek Vasut /* Zero all DM config settings. */ 595d41ea93aSMarek Vasut for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) 59607aee5bdSMarek Vasut scc_mgr_set_dm_out1_delay(i, 0); 5973da42859SDinh Nguyen 598d41ea93aSMarek Vasut /* Multicast to all DM enables. */ 5991273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dm_ena); 6003da42859SDinh Nguyen 601d41ea93aSMarek Vasut /* Zero all DQS IO settings. */ 6023da42859SDinh Nguyen if (!out_only) 60332675249SMarek Vasut scc_mgr_set_dqs_io_in_delay(0); 604d41ea93aSMarek Vasut 605d41ea93aSMarek Vasut /* Arria V/Cyclone V don't have out2. */ 60632675249SMarek Vasut scc_mgr_set_dqs_out1_delay(IO_DQS_OUT_RESERVE); 6073da42859SDinh Nguyen scc_mgr_set_oct_out1_delay(write_group, IO_DQS_OUT_RESERVE); 6083da42859SDinh Nguyen scc_mgr_load_dqs_for_write_group(write_group); 6093da42859SDinh Nguyen 610d41ea93aSMarek Vasut /* Multicast to all DQS IO enables (only 1 in total). */ 6111273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->dqs_io_ena); 6123da42859SDinh Nguyen 613d41ea93aSMarek Vasut /* Hit update to zero everything. */ 6141273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 6153da42859SDinh Nguyen } 6163da42859SDinh Nguyen } 6173da42859SDinh Nguyen 6183da42859SDinh Nguyen /* 6193da42859SDinh Nguyen * apply and load a particular input delay for the DQ pins in a group 6203da42859SDinh Nguyen * group_bgn is the index of the first dq pin (in the write group) 6213da42859SDinh Nguyen */ 62232675249SMarek Vasut static void scc_mgr_apply_group_dq_in_delay(uint32_t group_bgn, uint32_t delay) 6233da42859SDinh Nguyen { 6243da42859SDinh Nguyen uint32_t i, p; 6253da42859SDinh Nguyen 6263da42859SDinh Nguyen for (i = 0, p = group_bgn; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++, p++) { 62707aee5bdSMarek Vasut scc_mgr_set_dq_in_delay(p, delay); 6283da42859SDinh Nguyen scc_mgr_load_dq(p); 6293da42859SDinh Nguyen } 6303da42859SDinh Nguyen } 6313da42859SDinh Nguyen 632300c2e62SMarek Vasut /** 633300c2e62SMarek Vasut * scc_mgr_apply_group_dq_out1_delay() - Apply and load an output delay for the DQ pins in a group 634300c2e62SMarek Vasut * @delay: Delay value 635300c2e62SMarek Vasut * 636300c2e62SMarek Vasut * Apply and load a particular output delay for the DQ pins in a group. 637300c2e62SMarek Vasut */ 638300c2e62SMarek Vasut static void scc_mgr_apply_group_dq_out1_delay(const u32 delay) 6393da42859SDinh Nguyen { 640300c2e62SMarek Vasut int i; 6413da42859SDinh Nguyen 642300c2e62SMarek Vasut for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { 643300c2e62SMarek Vasut scc_mgr_set_dq_out1_delay(i, delay); 6443da42859SDinh Nguyen scc_mgr_load_dq(i); 6453da42859SDinh Nguyen } 6463da42859SDinh Nguyen } 6473da42859SDinh Nguyen 6483da42859SDinh Nguyen /* apply and load a particular output delay for the DM pins in a group */ 64932675249SMarek Vasut static void scc_mgr_apply_group_dm_out1_delay(uint32_t delay1) 6503da42859SDinh Nguyen { 6513da42859SDinh Nguyen uint32_t i; 6523da42859SDinh Nguyen 6533da42859SDinh Nguyen for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) { 65407aee5bdSMarek Vasut scc_mgr_set_dm_out1_delay(i, delay1); 6553da42859SDinh Nguyen scc_mgr_load_dm(i); 6563da42859SDinh Nguyen } 6573da42859SDinh Nguyen } 6583da42859SDinh Nguyen 6593da42859SDinh Nguyen 6603da42859SDinh Nguyen /* apply and load delay on both DQS and OCT out1 */ 6613da42859SDinh Nguyen static void scc_mgr_apply_group_dqs_io_and_oct_out1(uint32_t write_group, 6623da42859SDinh Nguyen uint32_t delay) 6633da42859SDinh Nguyen { 66432675249SMarek Vasut scc_mgr_set_dqs_out1_delay(delay); 6653da42859SDinh Nguyen scc_mgr_load_dqs_io(); 6663da42859SDinh Nguyen 6673da42859SDinh Nguyen scc_mgr_set_oct_out1_delay(write_group, delay); 6683da42859SDinh Nguyen scc_mgr_load_dqs_for_write_group(write_group); 6693da42859SDinh Nguyen } 6703da42859SDinh Nguyen 6715cb1b508SMarek Vasut /** 6725cb1b508SMarek Vasut * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side: DQ, DM, DQS, OCT 6735cb1b508SMarek Vasut * @write_group: Write group 6745cb1b508SMarek Vasut * @delay: Delay value 6755cb1b508SMarek Vasut * 6765cb1b508SMarek Vasut * Apply a delay to the entire output side: DQ, DM, DQS, OCT. 6775cb1b508SMarek Vasut */ 6788eccde3eSMarek Vasut static void scc_mgr_apply_group_all_out_delay_add(const u32 write_group, 6798eccde3eSMarek Vasut const u32 delay) 6803da42859SDinh Nguyen { 6818eccde3eSMarek Vasut u32 i, new_delay; 6823da42859SDinh Nguyen 6838eccde3eSMarek Vasut /* DQ shift */ 6848eccde3eSMarek Vasut for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) 6853da42859SDinh Nguyen scc_mgr_load_dq(i); 6863da42859SDinh Nguyen 6878eccde3eSMarek Vasut /* DM shift */ 6888eccde3eSMarek Vasut for (i = 0; i < RW_MGR_NUM_DM_PER_WRITE_GROUP; i++) 6893da42859SDinh Nguyen scc_mgr_load_dm(i); 6903da42859SDinh Nguyen 6915cb1b508SMarek Vasut /* DQS shift */ 6925cb1b508SMarek Vasut new_delay = READ_SCC_DQS_IO_OUT2_DELAY + delay; 6933da42859SDinh Nguyen if (new_delay > IO_IO_OUT2_DELAY_MAX) { 6945cb1b508SMarek Vasut debug_cond(DLEVEL == 1, 6955cb1b508SMarek Vasut "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n", 6965cb1b508SMarek Vasut __func__, __LINE__, write_group, delay, new_delay, 6975cb1b508SMarek Vasut IO_IO_OUT2_DELAY_MAX, 6983da42859SDinh Nguyen new_delay - IO_IO_OUT2_DELAY_MAX); 6995cb1b508SMarek Vasut new_delay -= IO_IO_OUT2_DELAY_MAX; 7005cb1b508SMarek Vasut scc_mgr_set_dqs_out1_delay(new_delay); 7013da42859SDinh Nguyen } 7023da42859SDinh Nguyen 7033da42859SDinh Nguyen scc_mgr_load_dqs_io(); 7043da42859SDinh Nguyen 7055cb1b508SMarek Vasut /* OCT shift */ 7065cb1b508SMarek Vasut new_delay = READ_SCC_OCT_OUT2_DELAY + delay; 7073da42859SDinh Nguyen if (new_delay > IO_IO_OUT2_DELAY_MAX) { 7085cb1b508SMarek Vasut debug_cond(DLEVEL == 1, 7095cb1b508SMarek Vasut "%s:%d (%u, %u) DQS: %u > %d; adding %u to OUT1\n", 7105cb1b508SMarek Vasut __func__, __LINE__, write_group, delay, 7115cb1b508SMarek Vasut new_delay, IO_IO_OUT2_DELAY_MAX, 7123da42859SDinh Nguyen new_delay - IO_IO_OUT2_DELAY_MAX); 7135cb1b508SMarek Vasut new_delay -= IO_IO_OUT2_DELAY_MAX; 7145cb1b508SMarek Vasut scc_mgr_set_oct_out1_delay(write_group, new_delay); 7153da42859SDinh Nguyen } 7163da42859SDinh Nguyen 7173da42859SDinh Nguyen scc_mgr_load_dqs_for_write_group(write_group); 7183da42859SDinh Nguyen } 7193da42859SDinh Nguyen 720f51a7d35SMarek Vasut /** 721f51a7d35SMarek Vasut * scc_mgr_apply_group_all_out_delay_add() - Apply a delay to the entire output side to all ranks 722f51a7d35SMarek Vasut * @write_group: Write group 723f51a7d35SMarek Vasut * @delay: Delay value 724f51a7d35SMarek Vasut * 725f51a7d35SMarek Vasut * Apply a delay to the entire output side (DQ, DM, DQS, OCT) to all ranks. 7263da42859SDinh Nguyen */ 727f51a7d35SMarek Vasut static void 728f51a7d35SMarek Vasut scc_mgr_apply_group_all_out_delay_add_all_ranks(const u32 write_group, 729f51a7d35SMarek Vasut const u32 delay) 7303da42859SDinh Nguyen { 731f51a7d35SMarek Vasut int r; 7323da42859SDinh Nguyen 7333da42859SDinh Nguyen for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 7343da42859SDinh Nguyen r += NUM_RANKS_PER_SHADOW_REG) { 7355cb1b508SMarek Vasut scc_mgr_apply_group_all_out_delay_add(write_group, delay); 7361273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 7373da42859SDinh Nguyen } 7383da42859SDinh Nguyen } 7393da42859SDinh Nguyen 740f936f94fSMarek Vasut /** 741f936f94fSMarek Vasut * set_jump_as_return() - Return instruction optimization 742f936f94fSMarek Vasut * 743f936f94fSMarek Vasut * Optimization used to recover some slots in ddr3 inst_rom could be 744f936f94fSMarek Vasut * applied to other protocols if we wanted to 745f936f94fSMarek Vasut */ 7463da42859SDinh Nguyen static void set_jump_as_return(void) 7473da42859SDinh Nguyen { 7483da42859SDinh Nguyen /* 749f936f94fSMarek Vasut * To save space, we replace return with jump to special shared 7503da42859SDinh Nguyen * RETURN instruction so we set the counter to large value so that 751f936f94fSMarek Vasut * we always jump. 7523da42859SDinh Nguyen */ 7531273dd9eSMarek Vasut writel(0xff, &sdr_rw_load_mgr_regs->load_cntr0); 7541273dd9eSMarek Vasut writel(RW_MGR_RETURN, &sdr_rw_load_jump_mgr_regs->load_jump_add0); 7553da42859SDinh Nguyen } 7563da42859SDinh Nguyen 7573da42859SDinh Nguyen /* 7583da42859SDinh Nguyen * should always use constants as argument to ensure all computations are 7593da42859SDinh Nguyen * performed at compile time 7603da42859SDinh Nguyen */ 76190a584b7SMarek Vasut static void delay_for_n_mem_clocks(const u32 clocks) 7623da42859SDinh Nguyen { 76390a584b7SMarek Vasut u32 afi_clocks; 76490a584b7SMarek Vasut u16 c_loop = 0; 76590a584b7SMarek Vasut u8 inner = 0; 76690a584b7SMarek Vasut u8 outer = 0; 7673da42859SDinh Nguyen 7683da42859SDinh Nguyen debug("%s:%d: clocks=%u ... start\n", __func__, __LINE__, clocks); 7693da42859SDinh Nguyen 770*cbcaf460SMarek Vasut /* Scale (rounding up) to get afi clocks. */ 77190a584b7SMarek Vasut afi_clocks = DIV_ROUND_UP(clocks, AFI_RATE_RATIO); 772*cbcaf460SMarek Vasut if (afi_clocks) /* Temporary underflow protection */ 773*cbcaf460SMarek Vasut afi_clocks--; 7743da42859SDinh Nguyen 7753da42859SDinh Nguyen /* 77690a584b7SMarek Vasut * Note, we don't bother accounting for being off a little 77790a584b7SMarek Vasut * bit because of a few extra instructions in outer loops. 77890a584b7SMarek Vasut * Note, the loops have a test at the end, and do the test 77990a584b7SMarek Vasut * before the decrement, and so always perform the loop 7803da42859SDinh Nguyen * 1 time more than the counter value 7813da42859SDinh Nguyen */ 7823da42859SDinh Nguyen if (afi_clocks == 0) { 7833da42859SDinh Nguyen ; 784*cbcaf460SMarek Vasut } else if (afi_clocks < 0x100) { 785*cbcaf460SMarek Vasut inner = afi_clocks; 7863da42859SDinh Nguyen outer = 0; 7873da42859SDinh Nguyen c_loop = 0; 788*cbcaf460SMarek Vasut } else if (afi_clocks < 0x10000) { 7893da42859SDinh Nguyen inner = 0xff; 790*cbcaf460SMarek Vasut outer = afi_clocks >> 8; 7913da42859SDinh Nguyen c_loop = 0; 792*cbcaf460SMarek Vasut } else { /* >= 0x10000 */ 7933da42859SDinh Nguyen inner = 0xff; 7943da42859SDinh Nguyen outer = 0xff; 795*cbcaf460SMarek Vasut c_loop = afi_clocks >> 16; 7963da42859SDinh Nguyen } 7973da42859SDinh Nguyen 7983da42859SDinh Nguyen /* 7993da42859SDinh Nguyen * rom instructions are structured as follows: 8003da42859SDinh Nguyen * 8013da42859SDinh Nguyen * IDLE_LOOP2: jnz cntr0, TARGET_A 8023da42859SDinh Nguyen * IDLE_LOOP1: jnz cntr1, TARGET_B 8033da42859SDinh Nguyen * return 8043da42859SDinh Nguyen * 8053da42859SDinh Nguyen * so, when doing nested loops, TARGET_A is set to IDLE_LOOP2, and 8063da42859SDinh Nguyen * TARGET_B is set to IDLE_LOOP2 as well 8073da42859SDinh Nguyen * 8083da42859SDinh Nguyen * if we have no outer loop, though, then we can use IDLE_LOOP1 only, 8093da42859SDinh Nguyen * and set TARGET_B to IDLE_LOOP1 and we skip IDLE_LOOP2 entirely 8103da42859SDinh Nguyen * 8113da42859SDinh Nguyen * a little confusing, but it helps save precious space in the inst_rom 8123da42859SDinh Nguyen * and sequencer rom and keeps the delays more accurate and reduces 8133da42859SDinh Nguyen * overhead 8143da42859SDinh Nguyen */ 815*cbcaf460SMarek Vasut if (afi_clocks < 0x100) { 8161273dd9eSMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner), 8171273dd9eSMarek Vasut &sdr_rw_load_mgr_regs->load_cntr1); 8183da42859SDinh Nguyen 8191273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP1, 8201273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 8213da42859SDinh Nguyen 8221273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP1, SDR_PHYGRP_RWMGRGRP_ADDRESS | 8231273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 8243da42859SDinh Nguyen } else { 8251273dd9eSMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(inner), 8261273dd9eSMarek Vasut &sdr_rw_load_mgr_regs->load_cntr0); 8273da42859SDinh Nguyen 8281273dd9eSMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(outer), 8291273dd9eSMarek Vasut &sdr_rw_load_mgr_regs->load_cntr1); 8303da42859SDinh Nguyen 8311273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP2, 8321273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add0); 8333da42859SDinh Nguyen 8341273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP2, 8351273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 8363da42859SDinh Nguyen 8373da42859SDinh Nguyen /* hack to get around compiler not being smart enough */ 8383da42859SDinh Nguyen if (afi_clocks <= 0x10000) { 8393da42859SDinh Nguyen /* only need to run once */ 8401273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP2, SDR_PHYGRP_RWMGRGRP_ADDRESS | 8411273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 8423da42859SDinh Nguyen } else { 8433da42859SDinh Nguyen do { 8441273dd9eSMarek Vasut writel(RW_MGR_IDLE_LOOP2, 8451273dd9eSMarek Vasut SDR_PHYGRP_RWMGRGRP_ADDRESS | 8461273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 8473da42859SDinh Nguyen } while (c_loop-- != 0); 8483da42859SDinh Nguyen } 8493da42859SDinh Nguyen } 8503da42859SDinh Nguyen debug("%s:%d clocks=%u ... end\n", __func__, __LINE__, clocks); 8513da42859SDinh Nguyen } 8523da42859SDinh Nguyen 853944fe719SMarek Vasut /** 854944fe719SMarek Vasut * rw_mgr_mem_init_load_regs() - Load instruction registers 855944fe719SMarek Vasut * @cntr0: Counter 0 value 856944fe719SMarek Vasut * @cntr1: Counter 1 value 857944fe719SMarek Vasut * @cntr2: Counter 2 value 858944fe719SMarek Vasut * @jump: Jump instruction value 859944fe719SMarek Vasut * 860944fe719SMarek Vasut * Load instruction registers. 861944fe719SMarek Vasut */ 862944fe719SMarek Vasut static void rw_mgr_mem_init_load_regs(u32 cntr0, u32 cntr1, u32 cntr2, u32 jump) 863944fe719SMarek Vasut { 864944fe719SMarek Vasut uint32_t grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS | 865944fe719SMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET; 866944fe719SMarek Vasut 867944fe719SMarek Vasut /* Load counters */ 868944fe719SMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr0), 869944fe719SMarek Vasut &sdr_rw_load_mgr_regs->load_cntr0); 870944fe719SMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr1), 871944fe719SMarek Vasut &sdr_rw_load_mgr_regs->load_cntr1); 872944fe719SMarek Vasut writel(SKIP_DELAY_LOOP_VALUE_OR_ZERO(cntr2), 873944fe719SMarek Vasut &sdr_rw_load_mgr_regs->load_cntr2); 874944fe719SMarek Vasut 875944fe719SMarek Vasut /* Load jump address */ 876944fe719SMarek Vasut writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add0); 877944fe719SMarek Vasut writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add1); 878944fe719SMarek Vasut writel(jump, &sdr_rw_load_jump_mgr_regs->load_jump_add2); 879944fe719SMarek Vasut 880944fe719SMarek Vasut /* Execute count instruction */ 881944fe719SMarek Vasut writel(jump, grpaddr); 882944fe719SMarek Vasut } 883944fe719SMarek Vasut 884ecd2334aSMarek Vasut /** 885ecd2334aSMarek Vasut * rw_mgr_mem_load_user() - Load user calibration values 886ecd2334aSMarek Vasut * @fin1: Final instruction 1 887ecd2334aSMarek Vasut * @fin2: Final instruction 2 888ecd2334aSMarek Vasut * @precharge: If 1, precharge the banks at the end 889ecd2334aSMarek Vasut * 890ecd2334aSMarek Vasut * Load user calibration values and optionally precharge the banks. 891ecd2334aSMarek Vasut */ 892ecd2334aSMarek Vasut static void rw_mgr_mem_load_user(const u32 fin1, const u32 fin2, 893ecd2334aSMarek Vasut const int precharge) 894ecd2334aSMarek Vasut { 895ecd2334aSMarek Vasut u32 grpaddr = SDR_PHYGRP_RWMGRGRP_ADDRESS | 896ecd2334aSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET; 897ecd2334aSMarek Vasut u32 r; 898ecd2334aSMarek Vasut 899ecd2334aSMarek Vasut for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) { 900ecd2334aSMarek Vasut if (param->skip_ranks[r]) { 901ecd2334aSMarek Vasut /* request to skip the rank */ 902ecd2334aSMarek Vasut continue; 903ecd2334aSMarek Vasut } 904ecd2334aSMarek Vasut 905ecd2334aSMarek Vasut /* set rank */ 906ecd2334aSMarek Vasut set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF); 907ecd2334aSMarek Vasut 908ecd2334aSMarek Vasut /* precharge all banks ... */ 909ecd2334aSMarek Vasut if (precharge) 910ecd2334aSMarek Vasut writel(RW_MGR_PRECHARGE_ALL, grpaddr); 911ecd2334aSMarek Vasut 912ecd2334aSMarek Vasut /* 913ecd2334aSMarek Vasut * USER Use Mirror-ed commands for odd ranks if address 914ecd2334aSMarek Vasut * mirrorring is on 915ecd2334aSMarek Vasut */ 916ecd2334aSMarek Vasut if ((RW_MGR_MEM_ADDRESS_MIRRORING >> r) & 0x1) { 917ecd2334aSMarek Vasut set_jump_as_return(); 918ecd2334aSMarek Vasut writel(RW_MGR_MRS2_MIRR, grpaddr); 919ecd2334aSMarek Vasut delay_for_n_mem_clocks(4); 920ecd2334aSMarek Vasut set_jump_as_return(); 921ecd2334aSMarek Vasut writel(RW_MGR_MRS3_MIRR, grpaddr); 922ecd2334aSMarek Vasut delay_for_n_mem_clocks(4); 923ecd2334aSMarek Vasut set_jump_as_return(); 924ecd2334aSMarek Vasut writel(RW_MGR_MRS1_MIRR, grpaddr); 925ecd2334aSMarek Vasut delay_for_n_mem_clocks(4); 926ecd2334aSMarek Vasut set_jump_as_return(); 927ecd2334aSMarek Vasut writel(fin1, grpaddr); 928ecd2334aSMarek Vasut } else { 929ecd2334aSMarek Vasut set_jump_as_return(); 930ecd2334aSMarek Vasut writel(RW_MGR_MRS2, grpaddr); 931ecd2334aSMarek Vasut delay_for_n_mem_clocks(4); 932ecd2334aSMarek Vasut set_jump_as_return(); 933ecd2334aSMarek Vasut writel(RW_MGR_MRS3, grpaddr); 934ecd2334aSMarek Vasut delay_for_n_mem_clocks(4); 935ecd2334aSMarek Vasut set_jump_as_return(); 936ecd2334aSMarek Vasut writel(RW_MGR_MRS1, grpaddr); 937ecd2334aSMarek Vasut set_jump_as_return(); 938ecd2334aSMarek Vasut writel(fin2, grpaddr); 939ecd2334aSMarek Vasut } 940ecd2334aSMarek Vasut 941ecd2334aSMarek Vasut if (precharge) 942ecd2334aSMarek Vasut continue; 943ecd2334aSMarek Vasut 944ecd2334aSMarek Vasut set_jump_as_return(); 945ecd2334aSMarek Vasut writel(RW_MGR_ZQCL, grpaddr); 946ecd2334aSMarek Vasut 947ecd2334aSMarek Vasut /* tZQinit = tDLLK = 512 ck cycles */ 948ecd2334aSMarek Vasut delay_for_n_mem_clocks(512); 949ecd2334aSMarek Vasut } 950ecd2334aSMarek Vasut } 951ecd2334aSMarek Vasut 9528e9d7d04SMarek Vasut /** 9538e9d7d04SMarek Vasut * rw_mgr_mem_initialize() - Initialize RW Manager 9548e9d7d04SMarek Vasut * 9558e9d7d04SMarek Vasut * Initialize RW Manager. 9568e9d7d04SMarek Vasut */ 9573da42859SDinh Nguyen static void rw_mgr_mem_initialize(void) 9583da42859SDinh Nguyen { 9593da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 9603da42859SDinh Nguyen 9613da42859SDinh Nguyen /* The reset / cke part of initialization is broadcasted to all ranks */ 9621273dd9eSMarek Vasut writel(RW_MGR_RANK_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS | 9631273dd9eSMarek Vasut RW_MGR_SET_CS_AND_ODT_MASK_OFFSET); 9643da42859SDinh Nguyen 9653da42859SDinh Nguyen /* 9663da42859SDinh Nguyen * Here's how you load register for a loop 9673da42859SDinh Nguyen * Counters are located @ 0x800 9683da42859SDinh Nguyen * Jump address are located @ 0xC00 9693da42859SDinh Nguyen * For both, registers 0 to 3 are selected using bits 3 and 2, like 9703da42859SDinh Nguyen * in 0x800, 0x804, 0x808, 0x80C and 0xC00, 0xC04, 0xC08, 0xC0C 9713da42859SDinh Nguyen * I know this ain't pretty, but Avalon bus throws away the 2 least 9723da42859SDinh Nguyen * significant bits 9733da42859SDinh Nguyen */ 9743da42859SDinh Nguyen 9758e9d7d04SMarek Vasut /* Start with memory RESET activated */ 9763da42859SDinh Nguyen 9773da42859SDinh Nguyen /* tINIT = 200us */ 9783da42859SDinh Nguyen 9793da42859SDinh Nguyen /* 9803da42859SDinh Nguyen * 200us @ 266MHz (3.75 ns) ~ 54000 clock cycles 9813da42859SDinh Nguyen * If a and b are the number of iteration in 2 nested loops 9823da42859SDinh Nguyen * it takes the following number of cycles to complete the operation: 9833da42859SDinh Nguyen * number_of_cycles = ((2 + n) * a + 2) * b 9843da42859SDinh Nguyen * where n is the number of instruction in the inner loop 9853da42859SDinh Nguyen * One possible solution is n = 0 , a = 256 , b = 106 => a = FF, 9863da42859SDinh Nguyen * b = 6A 9873da42859SDinh Nguyen */ 988944fe719SMarek Vasut rw_mgr_mem_init_load_regs(SEQ_TINIT_CNTR0_VAL, SEQ_TINIT_CNTR1_VAL, 989944fe719SMarek Vasut SEQ_TINIT_CNTR2_VAL, 990944fe719SMarek Vasut RW_MGR_INIT_RESET_0_CKE_0); 9913da42859SDinh Nguyen 9928e9d7d04SMarek Vasut /* Indicate that memory is stable. */ 9931273dd9eSMarek Vasut writel(1, &phy_mgr_cfg->reset_mem_stbl); 9943da42859SDinh Nguyen 9953da42859SDinh Nguyen /* 9963da42859SDinh Nguyen * transition the RESET to high 9973da42859SDinh Nguyen * Wait for 500us 9983da42859SDinh Nguyen */ 9993da42859SDinh Nguyen 10003da42859SDinh Nguyen /* 10013da42859SDinh Nguyen * 500us @ 266MHz (3.75 ns) ~ 134000 clock cycles 10023da42859SDinh Nguyen * If a and b are the number of iteration in 2 nested loops 10033da42859SDinh Nguyen * it takes the following number of cycles to complete the operation 10043da42859SDinh Nguyen * number_of_cycles = ((2 + n) * a + 2) * b 10053da42859SDinh Nguyen * where n is the number of instruction in the inner loop 10063da42859SDinh Nguyen * One possible solution is n = 2 , a = 131 , b = 256 => a = 83, 10073da42859SDinh Nguyen * b = FF 10083da42859SDinh Nguyen */ 1009944fe719SMarek Vasut rw_mgr_mem_init_load_regs(SEQ_TRESET_CNTR0_VAL, SEQ_TRESET_CNTR1_VAL, 1010944fe719SMarek Vasut SEQ_TRESET_CNTR2_VAL, 1011944fe719SMarek Vasut RW_MGR_INIT_RESET_1_CKE_0); 10123da42859SDinh Nguyen 10138e9d7d04SMarek Vasut /* Bring up clock enable. */ 10143da42859SDinh Nguyen 10153da42859SDinh Nguyen /* tXRP < 250 ck cycles */ 10163da42859SDinh Nguyen delay_for_n_mem_clocks(250); 10173da42859SDinh Nguyen 1018ecd2334aSMarek Vasut rw_mgr_mem_load_user(RW_MGR_MRS0_DLL_RESET_MIRR, RW_MGR_MRS0_DLL_RESET, 1019ecd2334aSMarek Vasut 0); 10203da42859SDinh Nguyen } 10213da42859SDinh Nguyen 1022f1f22f72SMarek Vasut /** 1023f1f22f72SMarek Vasut * rw_mgr_mem_handoff() - Hand off the memory to user 1024f1f22f72SMarek Vasut * 1025f1f22f72SMarek Vasut * At the end of calibration we have to program the user settings in 1026f1f22f72SMarek Vasut * and hand off the memory to the user. 10273da42859SDinh Nguyen */ 10283da42859SDinh Nguyen static void rw_mgr_mem_handoff(void) 10293da42859SDinh Nguyen { 1030ecd2334aSMarek Vasut rw_mgr_mem_load_user(RW_MGR_MRS0_USER_MIRR, RW_MGR_MRS0_USER, 1); 10313da42859SDinh Nguyen /* 1032f1f22f72SMarek Vasut * Need to wait tMOD (12CK or 15ns) time before issuing other 1033f1f22f72SMarek Vasut * commands, but we will have plenty of NIOS cycles before actual 1034f1f22f72SMarek Vasut * handoff so its okay. 10353da42859SDinh Nguyen */ 10363da42859SDinh Nguyen } 10373da42859SDinh Nguyen 10388371c2eeSMarek Vasut /** 10398371c2eeSMarek Vasut * rw_mgr_mem_calibrate_write_test_issue() - Issue write test command 10408371c2eeSMarek Vasut * @group: Write Group 10418371c2eeSMarek Vasut * @use_dm: Use DM 10428371c2eeSMarek Vasut * 10438371c2eeSMarek Vasut * Issue write test command. Two variants are provided, one that just tests 10448371c2eeSMarek Vasut * a write pattern and another that tests datamask functionality. 1045ad64769cSMarek Vasut */ 10468371c2eeSMarek Vasut static void rw_mgr_mem_calibrate_write_test_issue(u32 group, 10478371c2eeSMarek Vasut u32 test_dm) 1048ad64769cSMarek Vasut { 10498371c2eeSMarek Vasut const u32 quick_write_mode = 10508371c2eeSMarek Vasut (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES) && 10518371c2eeSMarek Vasut ENABLE_SUPER_QUICK_CALIBRATION; 10528371c2eeSMarek Vasut u32 mcc_instruction; 10538371c2eeSMarek Vasut u32 rw_wl_nop_cycles; 1054ad64769cSMarek Vasut 1055ad64769cSMarek Vasut /* 1056ad64769cSMarek Vasut * Set counter and jump addresses for the right 1057ad64769cSMarek Vasut * number of NOP cycles. 1058ad64769cSMarek Vasut * The number of supported NOP cycles can range from -1 to infinity 1059ad64769cSMarek Vasut * Three different cases are handled: 1060ad64769cSMarek Vasut * 1061ad64769cSMarek Vasut * 1. For a number of NOP cycles greater than 0, the RW Mgr looping 1062ad64769cSMarek Vasut * mechanism will be used to insert the right number of NOPs 1063ad64769cSMarek Vasut * 1064ad64769cSMarek Vasut * 2. For a number of NOP cycles equals to 0, the micro-instruction 1065ad64769cSMarek Vasut * issuing the write command will jump straight to the 1066ad64769cSMarek Vasut * micro-instruction that turns on DQS (for DDRx), or outputs write 1067ad64769cSMarek Vasut * data (for RLD), skipping 1068ad64769cSMarek Vasut * the NOP micro-instruction all together 1069ad64769cSMarek Vasut * 1070ad64769cSMarek Vasut * 3. A number of NOP cycles equal to -1 indicates that DQS must be 1071ad64769cSMarek Vasut * turned on in the same micro-instruction that issues the write 1072ad64769cSMarek Vasut * command. Then we need 1073ad64769cSMarek Vasut * to directly jump to the micro-instruction that sends out the data 1074ad64769cSMarek Vasut * 1075ad64769cSMarek Vasut * NOTE: Implementing this mechanism uses 2 RW Mgr jump-counters 1076ad64769cSMarek Vasut * (2 and 3). One jump-counter (0) is used to perform multiple 1077ad64769cSMarek Vasut * write-read operations. 1078ad64769cSMarek Vasut * one counter left to issue this command in "multiple-group" mode 1079ad64769cSMarek Vasut */ 1080ad64769cSMarek Vasut 1081ad64769cSMarek Vasut rw_wl_nop_cycles = gbl->rw_wl_nop_cycles; 1082ad64769cSMarek Vasut 1083ad64769cSMarek Vasut if (rw_wl_nop_cycles == -1) { 1084ad64769cSMarek Vasut /* 1085ad64769cSMarek Vasut * CNTR 2 - We want to execute the special write operation that 1086ad64769cSMarek Vasut * turns on DQS right away and then skip directly to the 1087ad64769cSMarek Vasut * instruction that sends out the data. We set the counter to a 1088ad64769cSMarek Vasut * large number so that the jump is always taken. 1089ad64769cSMarek Vasut */ 1090ad64769cSMarek Vasut writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2); 1091ad64769cSMarek Vasut 1092ad64769cSMarek Vasut /* CNTR 3 - Not used */ 1093ad64769cSMarek Vasut if (test_dm) { 1094ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0_WL_1; 1095ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DATA, 1096ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 1097ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP, 1098ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 1099ad64769cSMarek Vasut } else { 1100ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0_WL_1; 1101ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_BANK_0_DATA, 1102ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 1103ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP, 1104ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 1105ad64769cSMarek Vasut } 1106ad64769cSMarek Vasut } else if (rw_wl_nop_cycles == 0) { 1107ad64769cSMarek Vasut /* 1108ad64769cSMarek Vasut * CNTR 2 - We want to skip the NOP operation and go straight 1109ad64769cSMarek Vasut * to the DQS enable instruction. We set the counter to a large 1110ad64769cSMarek Vasut * number so that the jump is always taken. 1111ad64769cSMarek Vasut */ 1112ad64769cSMarek Vasut writel(0xFF, &sdr_rw_load_mgr_regs->load_cntr2); 1113ad64769cSMarek Vasut 1114ad64769cSMarek Vasut /* CNTR 3 - Not used */ 1115ad64769cSMarek Vasut if (test_dm) { 1116ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0; 1117ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_DQS, 1118ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 1119ad64769cSMarek Vasut } else { 1120ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0; 1121ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_BANK_0_DQS, 1122ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 1123ad64769cSMarek Vasut } 1124ad64769cSMarek Vasut } else { 1125ad64769cSMarek Vasut /* 1126ad64769cSMarek Vasut * CNTR 2 - In this case we want to execute the next instruction 1127ad64769cSMarek Vasut * and NOT take the jump. So we set the counter to 0. The jump 1128ad64769cSMarek Vasut * address doesn't count. 1129ad64769cSMarek Vasut */ 1130ad64769cSMarek Vasut writel(0x0, &sdr_rw_load_mgr_regs->load_cntr2); 1131ad64769cSMarek Vasut writel(0x0, &sdr_rw_load_jump_mgr_regs->load_jump_add2); 1132ad64769cSMarek Vasut 1133ad64769cSMarek Vasut /* 1134ad64769cSMarek Vasut * CNTR 3 - Set the nop counter to the number of cycles we 1135ad64769cSMarek Vasut * need to loop for, minus 1. 1136ad64769cSMarek Vasut */ 1137ad64769cSMarek Vasut writel(rw_wl_nop_cycles - 1, &sdr_rw_load_mgr_regs->load_cntr3); 1138ad64769cSMarek Vasut if (test_dm) { 1139ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_DM_BANK_0; 1140ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_NOP, 1141ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 1142ad64769cSMarek Vasut } else { 1143ad64769cSMarek Vasut mcc_instruction = RW_MGR_LFSR_WR_RD_BANK_0; 1144ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_BANK_0_NOP, 1145ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 1146ad64769cSMarek Vasut } 1147ad64769cSMarek Vasut } 1148ad64769cSMarek Vasut 1149ad64769cSMarek Vasut writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | 1150ad64769cSMarek Vasut RW_MGR_RESET_READ_DATAPATH_OFFSET); 1151ad64769cSMarek Vasut 1152ad64769cSMarek Vasut if (quick_write_mode) 1153ad64769cSMarek Vasut writel(0x08, &sdr_rw_load_mgr_regs->load_cntr0); 1154ad64769cSMarek Vasut else 1155ad64769cSMarek Vasut writel(0x40, &sdr_rw_load_mgr_regs->load_cntr0); 1156ad64769cSMarek Vasut 1157ad64769cSMarek Vasut writel(mcc_instruction, &sdr_rw_load_jump_mgr_regs->load_jump_add0); 1158ad64769cSMarek Vasut 1159ad64769cSMarek Vasut /* 1160ad64769cSMarek Vasut * CNTR 1 - This is used to ensure enough time elapses 1161ad64769cSMarek Vasut * for read data to come back. 1162ad64769cSMarek Vasut */ 1163ad64769cSMarek Vasut writel(0x30, &sdr_rw_load_mgr_regs->load_cntr1); 1164ad64769cSMarek Vasut 1165ad64769cSMarek Vasut if (test_dm) { 1166ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_DM_BANK_0_WAIT, 1167ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 1168ad64769cSMarek Vasut } else { 1169ad64769cSMarek Vasut writel(RW_MGR_LFSR_WR_RD_BANK_0_WAIT, 1170ad64769cSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 1171ad64769cSMarek Vasut } 1172ad64769cSMarek Vasut 11738371c2eeSMarek Vasut writel(mcc_instruction, (SDR_PHYGRP_RWMGRGRP_ADDRESS | 11748371c2eeSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET) + 11758371c2eeSMarek Vasut (group << 2)); 1176ad64769cSMarek Vasut } 1177ad64769cSMarek Vasut 11784a82854bSMarek Vasut /** 11794a82854bSMarek Vasut * rw_mgr_mem_calibrate_write_test() - Test writes, check for single/multiple pass 11804a82854bSMarek Vasut * @rank_bgn: Rank number 11814a82854bSMarek Vasut * @write_group: Write Group 11824a82854bSMarek Vasut * @use_dm: Use DM 11834a82854bSMarek Vasut * @all_correct: All bits must be correct in the mask 11844a82854bSMarek Vasut * @bit_chk: Resulting bit mask after the test 11854a82854bSMarek Vasut * @all_ranks: Test all ranks 11864a82854bSMarek Vasut * 11874a82854bSMarek Vasut * Test writes, can check for a single bit pass or multiple bit pass. 11884a82854bSMarek Vasut */ 1189b9452ea0SMarek Vasut static int 1190b9452ea0SMarek Vasut rw_mgr_mem_calibrate_write_test(const u32 rank_bgn, const u32 write_group, 1191b9452ea0SMarek Vasut const u32 use_dm, const u32 all_correct, 1192b9452ea0SMarek Vasut u32 *bit_chk, const u32 all_ranks) 1193ad64769cSMarek Vasut { 1194b9452ea0SMarek Vasut const u32 rank_end = all_ranks ? 1195b9452ea0SMarek Vasut RW_MGR_MEM_NUMBER_OF_RANKS : 1196ad64769cSMarek Vasut (rank_bgn + NUM_RANKS_PER_SHADOW_REG); 1197b9452ea0SMarek Vasut const u32 shift_ratio = RW_MGR_MEM_DQ_PER_WRITE_DQS / 1198b9452ea0SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS; 1199b9452ea0SMarek Vasut const u32 correct_mask_vg = param->write_correct_mask_vg; 1200b9452ea0SMarek Vasut 1201b9452ea0SMarek Vasut u32 tmp_bit_chk, base_rw_mgr; 1202b9452ea0SMarek Vasut int vg, r; 1203ad64769cSMarek Vasut 1204ad64769cSMarek Vasut *bit_chk = param->write_correct_mask; 1205ad64769cSMarek Vasut 1206ad64769cSMarek Vasut for (r = rank_bgn; r < rank_end; r++) { 1207b9452ea0SMarek Vasut /* Request to skip the rank */ 1208b9452ea0SMarek Vasut if (param->skip_ranks[r]) 1209ad64769cSMarek Vasut continue; 1210ad64769cSMarek Vasut 1211b9452ea0SMarek Vasut /* Set rank */ 1212ad64769cSMarek Vasut set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); 1213ad64769cSMarek Vasut 1214ad64769cSMarek Vasut tmp_bit_chk = 0; 1215b9452ea0SMarek Vasut for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS - 1; 1216b9452ea0SMarek Vasut vg >= 0; vg--) { 1217b9452ea0SMarek Vasut /* Reset the FIFOs to get pointers to known state. */ 1218ad64769cSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 1219ad64769cSMarek Vasut 1220b9452ea0SMarek Vasut rw_mgr_mem_calibrate_write_test_issue( 1221b9452ea0SMarek Vasut write_group * 1222ad64769cSMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS + vg, 1223ad64769cSMarek Vasut use_dm); 1224ad64769cSMarek Vasut 1225b9452ea0SMarek Vasut base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); 1226b9452ea0SMarek Vasut tmp_bit_chk <<= shift_ratio; 1227b9452ea0SMarek Vasut tmp_bit_chk |= (correct_mask_vg & ~(base_rw_mgr)); 1228ad64769cSMarek Vasut } 1229b9452ea0SMarek Vasut 1230ad64769cSMarek Vasut *bit_chk &= tmp_bit_chk; 1231ad64769cSMarek Vasut } 1232ad64769cSMarek Vasut 1233ad64769cSMarek Vasut set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); 1234b9452ea0SMarek Vasut if (all_correct) { 1235b9452ea0SMarek Vasut debug_cond(DLEVEL == 2, 1236b9452ea0SMarek Vasut "write_test(%u,%u,ALL) : %u == %u => %i\n", 1237b9452ea0SMarek Vasut write_group, use_dm, *bit_chk, 1238b9452ea0SMarek Vasut param->write_correct_mask, 1239b9452ea0SMarek Vasut *bit_chk == param->write_correct_mask); 1240ad64769cSMarek Vasut return *bit_chk == param->write_correct_mask; 1241ad64769cSMarek Vasut } else { 1242ad64769cSMarek Vasut set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); 1243b9452ea0SMarek Vasut debug_cond(DLEVEL == 2, 1244b9452ea0SMarek Vasut "write_test(%u,%u,ONE) : %u != %i => %i\n", 1245b9452ea0SMarek Vasut write_group, use_dm, *bit_chk, 0, *bit_chk != 0); 1246ad64769cSMarek Vasut return *bit_chk != 0x00; 1247ad64769cSMarek Vasut } 1248ad64769cSMarek Vasut } 1249ad64769cSMarek Vasut 1250d844c7d4SMarek Vasut /** 1251d844c7d4SMarek Vasut * rw_mgr_mem_calibrate_read_test_patterns() - Read back test patterns 1252d844c7d4SMarek Vasut * @rank_bgn: Rank number 1253d844c7d4SMarek Vasut * @group: Read/Write Group 1254d844c7d4SMarek Vasut * @all_ranks: Test all ranks 1255d844c7d4SMarek Vasut * 1256d844c7d4SMarek Vasut * Performs a guaranteed read on the patterns we are going to use during a 1257d844c7d4SMarek Vasut * read test to ensure memory works. 12583da42859SDinh Nguyen */ 1259d844c7d4SMarek Vasut static int 1260d844c7d4SMarek Vasut rw_mgr_mem_calibrate_read_test_patterns(const u32 rank_bgn, const u32 group, 1261d844c7d4SMarek Vasut const u32 all_ranks) 12623da42859SDinh Nguyen { 1263d844c7d4SMarek Vasut const u32 addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | 1264d844c7d4SMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET; 1265d844c7d4SMarek Vasut const u32 addr_offset = 1266d844c7d4SMarek Vasut (group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS) << 2; 1267d844c7d4SMarek Vasut const u32 rank_end = all_ranks ? 1268d844c7d4SMarek Vasut RW_MGR_MEM_NUMBER_OF_RANKS : 12693da42859SDinh Nguyen (rank_bgn + NUM_RANKS_PER_SHADOW_REG); 1270d844c7d4SMarek Vasut const u32 shift_ratio = RW_MGR_MEM_DQ_PER_READ_DQS / 1271d844c7d4SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS; 1272d844c7d4SMarek Vasut const u32 correct_mask_vg = param->read_correct_mask_vg; 12733da42859SDinh Nguyen 1274d844c7d4SMarek Vasut u32 tmp_bit_chk, base_rw_mgr, bit_chk; 1275d844c7d4SMarek Vasut int vg, r; 1276d844c7d4SMarek Vasut int ret = 0; 1277d844c7d4SMarek Vasut 1278d844c7d4SMarek Vasut bit_chk = param->read_correct_mask; 12793da42859SDinh Nguyen 12803da42859SDinh Nguyen for (r = rank_bgn; r < rank_end; r++) { 1281d844c7d4SMarek Vasut /* Request to skip the rank */ 12823da42859SDinh Nguyen if (param->skip_ranks[r]) 12833da42859SDinh Nguyen continue; 12843da42859SDinh Nguyen 1285d844c7d4SMarek Vasut /* Set rank */ 12863da42859SDinh Nguyen set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); 12873da42859SDinh Nguyen 12883da42859SDinh Nguyen /* Load up a constant bursts of read commands */ 12891273dd9eSMarek Vasut writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0); 12901273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_READ, 12911273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add0); 12923da42859SDinh Nguyen 12931273dd9eSMarek Vasut writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1); 12941273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_READ_CONT, 12951273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 12963da42859SDinh Nguyen 12973da42859SDinh Nguyen tmp_bit_chk = 0; 1298d844c7d4SMarek Vasut for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1; 1299d844c7d4SMarek Vasut vg >= 0; vg--) { 1300d844c7d4SMarek Vasut /* Reset the FIFOs to get pointers to known state. */ 13011273dd9eSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 13021273dd9eSMarek Vasut writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | 13031273dd9eSMarek Vasut RW_MGR_RESET_READ_DATAPATH_OFFSET); 1304d844c7d4SMarek Vasut writel(RW_MGR_GUARANTEED_READ, 1305d844c7d4SMarek Vasut addr + addr_offset + (vg << 2)); 13063da42859SDinh Nguyen 13071273dd9eSMarek Vasut base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); 1308d844c7d4SMarek Vasut tmp_bit_chk <<= shift_ratio; 1309d844c7d4SMarek Vasut tmp_bit_chk |= correct_mask_vg & ~base_rw_mgr; 13103da42859SDinh Nguyen } 13113da42859SDinh Nguyen 1312d844c7d4SMarek Vasut bit_chk &= tmp_bit_chk; 1313d844c7d4SMarek Vasut } 1314d844c7d4SMarek Vasut 131517fdc916SMarek Vasut writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2)); 13163da42859SDinh Nguyen 13173da42859SDinh Nguyen set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); 1318d844c7d4SMarek Vasut 1319d844c7d4SMarek Vasut if (bit_chk != param->read_correct_mask) 1320d844c7d4SMarek Vasut ret = -EIO; 1321d844c7d4SMarek Vasut 1322d844c7d4SMarek Vasut debug_cond(DLEVEL == 1, 1323d844c7d4SMarek Vasut "%s:%d test_load_patterns(%u,ALL) => (%u == %u) => %i\n", 1324d844c7d4SMarek Vasut __func__, __LINE__, group, bit_chk, 1325d844c7d4SMarek Vasut param->read_correct_mask, ret); 1326d844c7d4SMarek Vasut 1327d844c7d4SMarek Vasut return ret; 13283da42859SDinh Nguyen } 13293da42859SDinh Nguyen 1330b6cb7f9eSMarek Vasut /** 1331b6cb7f9eSMarek Vasut * rw_mgr_mem_calibrate_read_load_patterns() - Load up the patterns for read test 1332b6cb7f9eSMarek Vasut * @rank_bgn: Rank number 1333b6cb7f9eSMarek Vasut * @all_ranks: Test all ranks 1334b6cb7f9eSMarek Vasut * 1335b6cb7f9eSMarek Vasut * Load up the patterns we are going to use during a read test. 1336b6cb7f9eSMarek Vasut */ 1337b6cb7f9eSMarek Vasut static void rw_mgr_mem_calibrate_read_load_patterns(const u32 rank_bgn, 1338b6cb7f9eSMarek Vasut const int all_ranks) 13393da42859SDinh Nguyen { 1340b6cb7f9eSMarek Vasut const u32 rank_end = all_ranks ? 1341b6cb7f9eSMarek Vasut RW_MGR_MEM_NUMBER_OF_RANKS : 13423da42859SDinh Nguyen (rank_bgn + NUM_RANKS_PER_SHADOW_REG); 1343b6cb7f9eSMarek Vasut u32 r; 13443da42859SDinh Nguyen 13453da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 1346b6cb7f9eSMarek Vasut 13473da42859SDinh Nguyen for (r = rank_bgn; r < rank_end; r++) { 13483da42859SDinh Nguyen if (param->skip_ranks[r]) 13493da42859SDinh Nguyen /* request to skip the rank */ 13503da42859SDinh Nguyen continue; 13513da42859SDinh Nguyen 13523da42859SDinh Nguyen /* set rank */ 13533da42859SDinh Nguyen set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); 13543da42859SDinh Nguyen 13553da42859SDinh Nguyen /* Load up a constant bursts */ 13561273dd9eSMarek Vasut writel(0x20, &sdr_rw_load_mgr_regs->load_cntr0); 13573da42859SDinh Nguyen 13581273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_WRITE_WAIT0, 13591273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add0); 13603da42859SDinh Nguyen 13611273dd9eSMarek Vasut writel(0x20, &sdr_rw_load_mgr_regs->load_cntr1); 13623da42859SDinh Nguyen 13631273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_WRITE_WAIT1, 13641273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 13653da42859SDinh Nguyen 13661273dd9eSMarek Vasut writel(0x04, &sdr_rw_load_mgr_regs->load_cntr2); 13673da42859SDinh Nguyen 13681273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_WRITE_WAIT2, 13691273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 13703da42859SDinh Nguyen 13711273dd9eSMarek Vasut writel(0x04, &sdr_rw_load_mgr_regs->load_cntr3); 13723da42859SDinh Nguyen 13731273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_WRITE_WAIT3, 13741273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 13753da42859SDinh Nguyen 13761273dd9eSMarek Vasut writel(RW_MGR_GUARANTEED_WRITE, SDR_PHYGRP_RWMGRGRP_ADDRESS | 13771273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 13783da42859SDinh Nguyen } 13793da42859SDinh Nguyen 13803da42859SDinh Nguyen set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); 13813da42859SDinh Nguyen } 13823da42859SDinh Nguyen 1383783fcf59SMarek Vasut /** 1384783fcf59SMarek Vasut * rw_mgr_mem_calibrate_read_test() - Perform READ test on single rank 1385783fcf59SMarek Vasut * @rank_bgn: Rank number 1386783fcf59SMarek Vasut * @group: Read/Write group 1387783fcf59SMarek Vasut * @num_tries: Number of retries of the test 1388783fcf59SMarek Vasut * @all_correct: All bits must be correct in the mask 1389783fcf59SMarek Vasut * @bit_chk: Resulting bit mask after the test 1390783fcf59SMarek Vasut * @all_groups: Test all R/W groups 1391783fcf59SMarek Vasut * @all_ranks: Test all ranks 1392783fcf59SMarek Vasut * 1393783fcf59SMarek Vasut * Try a read and see if it returns correct data back. Test has dummy reads 1394783fcf59SMarek Vasut * inserted into the mix used to align DQS enable. Test has more thorough 1395783fcf59SMarek Vasut * checks than the regular read test. 13963da42859SDinh Nguyen */ 13973cb8bf3fSMarek Vasut static int 13983cb8bf3fSMarek Vasut rw_mgr_mem_calibrate_read_test(const u32 rank_bgn, const u32 group, 13993cb8bf3fSMarek Vasut const u32 num_tries, const u32 all_correct, 14003cb8bf3fSMarek Vasut u32 *bit_chk, 14013cb8bf3fSMarek Vasut const u32 all_groups, const u32 all_ranks) 14023da42859SDinh Nguyen { 14033cb8bf3fSMarek Vasut const u32 rank_end = all_ranks ? RW_MGR_MEM_NUMBER_OF_RANKS : 14043da42859SDinh Nguyen (rank_bgn + NUM_RANKS_PER_SHADOW_REG); 14053cb8bf3fSMarek Vasut const u32 quick_read_mode = 14063cb8bf3fSMarek Vasut ((STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS) && 14073cb8bf3fSMarek Vasut ENABLE_SUPER_QUICK_CALIBRATION); 14083cb8bf3fSMarek Vasut u32 correct_mask_vg = param->read_correct_mask_vg; 14093cb8bf3fSMarek Vasut u32 tmp_bit_chk; 14103cb8bf3fSMarek Vasut u32 base_rw_mgr; 14113cb8bf3fSMarek Vasut u32 addr; 14123cb8bf3fSMarek Vasut 14133cb8bf3fSMarek Vasut int r, vg, ret; 14143da42859SDinh Nguyen 14153da42859SDinh Nguyen *bit_chk = param->read_correct_mask; 14163da42859SDinh Nguyen 14173da42859SDinh Nguyen for (r = rank_bgn; r < rank_end; r++) { 14183da42859SDinh Nguyen if (param->skip_ranks[r]) 14193da42859SDinh Nguyen /* request to skip the rank */ 14203da42859SDinh Nguyen continue; 14213da42859SDinh Nguyen 14223da42859SDinh Nguyen /* set rank */ 14233da42859SDinh Nguyen set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_READ_WRITE); 14243da42859SDinh Nguyen 14251273dd9eSMarek Vasut writel(0x10, &sdr_rw_load_mgr_regs->load_cntr1); 14263da42859SDinh Nguyen 14271273dd9eSMarek Vasut writel(RW_MGR_READ_B2B_WAIT1, 14281273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 14293da42859SDinh Nguyen 14301273dd9eSMarek Vasut writel(0x10, &sdr_rw_load_mgr_regs->load_cntr2); 14311273dd9eSMarek Vasut writel(RW_MGR_READ_B2B_WAIT2, 14321273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add2); 14333da42859SDinh Nguyen 14343da42859SDinh Nguyen if (quick_read_mode) 14351273dd9eSMarek Vasut writel(0x1, &sdr_rw_load_mgr_regs->load_cntr0); 14363da42859SDinh Nguyen /* need at least two (1+1) reads to capture failures */ 14373da42859SDinh Nguyen else if (all_groups) 14381273dd9eSMarek Vasut writel(0x06, &sdr_rw_load_mgr_regs->load_cntr0); 14393da42859SDinh Nguyen else 14401273dd9eSMarek Vasut writel(0x32, &sdr_rw_load_mgr_regs->load_cntr0); 14413da42859SDinh Nguyen 14421273dd9eSMarek Vasut writel(RW_MGR_READ_B2B, 14431273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add0); 14443da42859SDinh Nguyen if (all_groups) 14453da42859SDinh Nguyen writel(RW_MGR_MEM_IF_READ_DQS_WIDTH * 14463da42859SDinh Nguyen RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1, 14471273dd9eSMarek Vasut &sdr_rw_load_mgr_regs->load_cntr3); 14483da42859SDinh Nguyen else 14491273dd9eSMarek Vasut writel(0x0, &sdr_rw_load_mgr_regs->load_cntr3); 14503da42859SDinh Nguyen 14511273dd9eSMarek Vasut writel(RW_MGR_READ_B2B, 14521273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add3); 14533da42859SDinh Nguyen 14543da42859SDinh Nguyen tmp_bit_chk = 0; 14557ce23bb6SMarek Vasut for (vg = RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS - 1; vg >= 0; 14567ce23bb6SMarek Vasut vg--) { 1457ba522c76SMarek Vasut /* Reset the FIFOs to get pointers to known state. */ 14581273dd9eSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 14591273dd9eSMarek Vasut writel(0, SDR_PHYGRP_RWMGRGRP_ADDRESS | 14601273dd9eSMarek Vasut RW_MGR_RESET_READ_DATAPATH_OFFSET); 14613da42859SDinh Nguyen 1462ba522c76SMarek Vasut if (all_groups) { 1463ba522c76SMarek Vasut addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | 1464ba522c76SMarek Vasut RW_MGR_RUN_ALL_GROUPS_OFFSET; 1465ba522c76SMarek Vasut } else { 1466ba522c76SMarek Vasut addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | 1467ba522c76SMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET; 1468ba522c76SMarek Vasut } 1469c4815f76SMarek Vasut 147017fdc916SMarek Vasut writel(RW_MGR_READ_B2B, addr + 14713da42859SDinh Nguyen ((group * RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS + 14723da42859SDinh Nguyen vg) << 2)); 14733da42859SDinh Nguyen 14741273dd9eSMarek Vasut base_rw_mgr = readl(SDR_PHYGRP_RWMGRGRP_ADDRESS); 1475ba522c76SMarek Vasut tmp_bit_chk <<= RW_MGR_MEM_DQ_PER_READ_DQS / 1476ba522c76SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS; 1477ba522c76SMarek Vasut tmp_bit_chk |= correct_mask_vg & ~(base_rw_mgr); 14783da42859SDinh Nguyen } 14797ce23bb6SMarek Vasut 14803da42859SDinh Nguyen *bit_chk &= tmp_bit_chk; 14813da42859SDinh Nguyen } 14823da42859SDinh Nguyen 1483c4815f76SMarek Vasut addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_RUN_SINGLE_GROUP_OFFSET; 148417fdc916SMarek Vasut writel(RW_MGR_CLEAR_DQS_ENABLE, addr + (group << 2)); 14853da42859SDinh Nguyen 14863853d65eSMarek Vasut set_rank_and_odt_mask(0, RW_MGR_ODT_MODE_OFF); 14873853d65eSMarek Vasut 14883da42859SDinh Nguyen if (all_correct) { 14893853d65eSMarek Vasut ret = (*bit_chk == param->read_correct_mask); 14903853d65eSMarek Vasut debug_cond(DLEVEL == 2, 14913853d65eSMarek Vasut "%s:%d read_test(%u,ALL,%u) => (%u == %u) => %i\n", 14923853d65eSMarek Vasut __func__, __LINE__, group, all_groups, *bit_chk, 14933853d65eSMarek Vasut param->read_correct_mask, ret); 14943da42859SDinh Nguyen } else { 14953853d65eSMarek Vasut ret = (*bit_chk != 0x00); 14963853d65eSMarek Vasut debug_cond(DLEVEL == 2, 14973853d65eSMarek Vasut "%s:%d read_test(%u,ONE,%u) => (%u != %u) => %i\n", 14983853d65eSMarek Vasut __func__, __LINE__, group, all_groups, *bit_chk, 14993853d65eSMarek Vasut 0, ret); 15003da42859SDinh Nguyen } 15013853d65eSMarek Vasut 15023853d65eSMarek Vasut return ret; 15033da42859SDinh Nguyen } 15043da42859SDinh Nguyen 150596df6036SMarek Vasut /** 150696df6036SMarek Vasut * rw_mgr_mem_calibrate_read_test_all_ranks() - Perform READ test on all ranks 150796df6036SMarek Vasut * @grp: Read/Write group 150896df6036SMarek Vasut * @num_tries: Number of retries of the test 150996df6036SMarek Vasut * @all_correct: All bits must be correct in the mask 151096df6036SMarek Vasut * @all_groups: Test all R/W groups 151196df6036SMarek Vasut * 151296df6036SMarek Vasut * Perform a READ test across all memory ranks. 151396df6036SMarek Vasut */ 151496df6036SMarek Vasut static int 151596df6036SMarek Vasut rw_mgr_mem_calibrate_read_test_all_ranks(const u32 grp, const u32 num_tries, 151696df6036SMarek Vasut const u32 all_correct, 151796df6036SMarek Vasut const u32 all_groups) 15183da42859SDinh Nguyen { 151996df6036SMarek Vasut u32 bit_chk; 152096df6036SMarek Vasut return rw_mgr_mem_calibrate_read_test(0, grp, num_tries, all_correct, 152196df6036SMarek Vasut &bit_chk, all_groups, 1); 15223da42859SDinh Nguyen } 15233da42859SDinh Nguyen 152460bb8a8aSMarek Vasut /** 152560bb8a8aSMarek Vasut * rw_mgr_incr_vfifo() - Increase VFIFO value 152660bb8a8aSMarek Vasut * @grp: Read/Write group 152760bb8a8aSMarek Vasut * 152860bb8a8aSMarek Vasut * Increase VFIFO value. 152960bb8a8aSMarek Vasut */ 15308c887b6eSMarek Vasut static void rw_mgr_incr_vfifo(const u32 grp) 15313da42859SDinh Nguyen { 15321273dd9eSMarek Vasut writel(grp, &phy_mgr_cmd->inc_vfifo_hard_phy); 15333da42859SDinh Nguyen } 15343da42859SDinh Nguyen 153560bb8a8aSMarek Vasut /** 153660bb8a8aSMarek Vasut * rw_mgr_decr_vfifo() - Decrease VFIFO value 153760bb8a8aSMarek Vasut * @grp: Read/Write group 153860bb8a8aSMarek Vasut * 153960bb8a8aSMarek Vasut * Decrease VFIFO value. 154060bb8a8aSMarek Vasut */ 15418c887b6eSMarek Vasut static void rw_mgr_decr_vfifo(const u32 grp) 15423da42859SDinh Nguyen { 154360bb8a8aSMarek Vasut u32 i; 15443da42859SDinh Nguyen 15453da42859SDinh Nguyen for (i = 0; i < VFIFO_SIZE - 1; i++) 15468c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 15473da42859SDinh Nguyen } 15483da42859SDinh Nguyen 1549d145ca9fSMarek Vasut /** 1550d145ca9fSMarek Vasut * find_vfifo_failing_read() - Push VFIFO to get a failing read 1551d145ca9fSMarek Vasut * @grp: Read/Write group 1552d145ca9fSMarek Vasut * 1553d145ca9fSMarek Vasut * Push VFIFO until a failing read happens. 1554d145ca9fSMarek Vasut */ 1555d145ca9fSMarek Vasut static int find_vfifo_failing_read(const u32 grp) 15563da42859SDinh Nguyen { 155796df6036SMarek Vasut u32 v, ret, fail_cnt = 0; 15583da42859SDinh Nguyen 15598c887b6eSMarek Vasut for (v = 0; v < VFIFO_SIZE; v++) { 1560d145ca9fSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d: vfifo %u\n", 15613da42859SDinh Nguyen __func__, __LINE__, v); 1562d145ca9fSMarek Vasut ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, 156396df6036SMarek Vasut PASS_ONE_BIT, 0); 1564d145ca9fSMarek Vasut if (!ret) { 15653da42859SDinh Nguyen fail_cnt++; 15663da42859SDinh Nguyen 15673da42859SDinh Nguyen if (fail_cnt == 2) 1568d145ca9fSMarek Vasut return v; 15693da42859SDinh Nguyen } 15703da42859SDinh Nguyen 1571d145ca9fSMarek Vasut /* Fiddle with FIFO. */ 15728c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 15733da42859SDinh Nguyen } 15743da42859SDinh Nguyen 1575d145ca9fSMarek Vasut /* No failing read found! Something must have gone wrong. */ 1576d145ca9fSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d: vfifo failed\n", __func__, __LINE__); 15773da42859SDinh Nguyen return 0; 15783da42859SDinh Nguyen } 15793da42859SDinh Nguyen 1580192d6f9fSMarek Vasut /** 158152e8f217SMarek Vasut * sdr_find_phase_delay() - Find DQS enable phase or delay 158252e8f217SMarek Vasut * @working: If 1, look for working phase/delay, if 0, look for non-working 158352e8f217SMarek Vasut * @delay: If 1, look for delay, if 0, look for phase 158452e8f217SMarek Vasut * @grp: Read/Write group 158552e8f217SMarek Vasut * @work: Working window position 158652e8f217SMarek Vasut * @work_inc: Working window increment 158752e8f217SMarek Vasut * @pd: DQS Phase/Delay Iterator 158852e8f217SMarek Vasut * 158952e8f217SMarek Vasut * Find working or non-working DQS enable phase setting. 159052e8f217SMarek Vasut */ 159152e8f217SMarek Vasut static int sdr_find_phase_delay(int working, int delay, const u32 grp, 159252e8f217SMarek Vasut u32 *work, const u32 work_inc, u32 *pd) 159352e8f217SMarek Vasut { 159452e8f217SMarek Vasut const u32 max = delay ? IO_DQS_EN_DELAY_MAX : IO_DQS_EN_PHASE_MAX; 159596df6036SMarek Vasut u32 ret; 159652e8f217SMarek Vasut 159752e8f217SMarek Vasut for (; *pd <= max; (*pd)++) { 159852e8f217SMarek Vasut if (delay) 159952e8f217SMarek Vasut scc_mgr_set_dqs_en_delay_all_ranks(grp, *pd); 160052e8f217SMarek Vasut else 160152e8f217SMarek Vasut scc_mgr_set_dqs_en_phase_all_ranks(grp, *pd); 160252e8f217SMarek Vasut 160352e8f217SMarek Vasut ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, 160496df6036SMarek Vasut PASS_ONE_BIT, 0); 160552e8f217SMarek Vasut if (!working) 160652e8f217SMarek Vasut ret = !ret; 160752e8f217SMarek Vasut 160852e8f217SMarek Vasut if (ret) 160952e8f217SMarek Vasut return 0; 161052e8f217SMarek Vasut 161152e8f217SMarek Vasut if (work) 161252e8f217SMarek Vasut *work += work_inc; 161352e8f217SMarek Vasut } 161452e8f217SMarek Vasut 161552e8f217SMarek Vasut return -EINVAL; 161652e8f217SMarek Vasut } 161752e8f217SMarek Vasut /** 1618192d6f9fSMarek Vasut * sdr_find_phase() - Find DQS enable phase 1619192d6f9fSMarek Vasut * @working: If 1, look for working phase, if 0, look for non-working phase 1620192d6f9fSMarek Vasut * @grp: Read/Write group 1621192d6f9fSMarek Vasut * @work: Working window position 1622192d6f9fSMarek Vasut * @i: Iterator 1623192d6f9fSMarek Vasut * @p: DQS Phase Iterator 1624192d6f9fSMarek Vasut * 1625192d6f9fSMarek Vasut * Find working or non-working DQS enable phase setting. 1626192d6f9fSMarek Vasut */ 16278c887b6eSMarek Vasut static int sdr_find_phase(int working, const u32 grp, u32 *work, 162886a39dc7SMarek Vasut u32 *i, u32 *p) 1629192d6f9fSMarek Vasut { 1630192d6f9fSMarek Vasut const u32 end = VFIFO_SIZE + (working ? 0 : 1); 163152e8f217SMarek Vasut int ret; 1632192d6f9fSMarek Vasut 1633192d6f9fSMarek Vasut for (; *i < end; (*i)++) { 1634192d6f9fSMarek Vasut if (working) 1635192d6f9fSMarek Vasut *p = 0; 1636192d6f9fSMarek Vasut 163752e8f217SMarek Vasut ret = sdr_find_phase_delay(working, 0, grp, work, 163852e8f217SMarek Vasut IO_DELAY_PER_OPA_TAP, p); 163952e8f217SMarek Vasut if (!ret) 1640192d6f9fSMarek Vasut return 0; 1641192d6f9fSMarek Vasut 1642192d6f9fSMarek Vasut if (*p > IO_DQS_EN_PHASE_MAX) { 1643192d6f9fSMarek Vasut /* Fiddle with FIFO. */ 16448c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 1645192d6f9fSMarek Vasut if (!working) 1646192d6f9fSMarek Vasut *p = 0; 1647192d6f9fSMarek Vasut } 1648192d6f9fSMarek Vasut } 1649192d6f9fSMarek Vasut 1650192d6f9fSMarek Vasut return -EINVAL; 1651192d6f9fSMarek Vasut } 1652192d6f9fSMarek Vasut 16534c5e584bSMarek Vasut /** 16544c5e584bSMarek Vasut * sdr_working_phase() - Find working DQS enable phase 16554c5e584bSMarek Vasut * @grp: Read/Write group 16564c5e584bSMarek Vasut * @work_bgn: Working window start position 16574c5e584bSMarek Vasut * @d: dtaps output value 16584c5e584bSMarek Vasut * @p: DQS Phase Iterator 16594c5e584bSMarek Vasut * @i: Iterator 16604c5e584bSMarek Vasut * 16614c5e584bSMarek Vasut * Find working DQS enable phase setting. 16624c5e584bSMarek Vasut */ 16638c887b6eSMarek Vasut static int sdr_working_phase(const u32 grp, u32 *work_bgn, u32 *d, 16644c5e584bSMarek Vasut u32 *p, u32 *i) 16653da42859SDinh Nguyen { 166635ee867fSMarek Vasut const u32 dtaps_per_ptap = IO_DELAY_PER_OPA_TAP / 166735ee867fSMarek Vasut IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 1668192d6f9fSMarek Vasut int ret; 16693da42859SDinh Nguyen 1670192d6f9fSMarek Vasut *work_bgn = 0; 1671192d6f9fSMarek Vasut 1672192d6f9fSMarek Vasut for (*d = 0; *d <= dtaps_per_ptap; (*d)++) { 1673192d6f9fSMarek Vasut *i = 0; 1674521fe39cSMarek Vasut scc_mgr_set_dqs_en_delay_all_ranks(grp, *d); 16758c887b6eSMarek Vasut ret = sdr_find_phase(1, grp, work_bgn, i, p); 1676192d6f9fSMarek Vasut if (!ret) 1677192d6f9fSMarek Vasut return 0; 1678192d6f9fSMarek Vasut *work_bgn += IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 16793da42859SDinh Nguyen } 16803da42859SDinh Nguyen 168138ed6922SMarek Vasut /* Cannot find working solution */ 1682192d6f9fSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d find_dqs_en_phase: no vfifo/ptap/dtap\n", 1683192d6f9fSMarek Vasut __func__, __LINE__); 1684192d6f9fSMarek Vasut return -EINVAL; 16853da42859SDinh Nguyen } 16863da42859SDinh Nguyen 16874c5e584bSMarek Vasut /** 16884c5e584bSMarek Vasut * sdr_backup_phase() - Find DQS enable backup phase 16894c5e584bSMarek Vasut * @grp: Read/Write group 16904c5e584bSMarek Vasut * @work_bgn: Working window start position 16914c5e584bSMarek Vasut * @p: DQS Phase Iterator 16924c5e584bSMarek Vasut * 16934c5e584bSMarek Vasut * Find DQS enable backup phase setting. 16944c5e584bSMarek Vasut */ 16958c887b6eSMarek Vasut static void sdr_backup_phase(const u32 grp, u32 *work_bgn, u32 *p) 16963da42859SDinh Nguyen { 169796df6036SMarek Vasut u32 tmp_delay, d; 16984c5e584bSMarek Vasut int ret; 16993da42859SDinh Nguyen 17003da42859SDinh Nguyen /* Special case code for backing up a phase */ 17013da42859SDinh Nguyen if (*p == 0) { 17023da42859SDinh Nguyen *p = IO_DQS_EN_PHASE_MAX; 17038c887b6eSMarek Vasut rw_mgr_decr_vfifo(grp); 17043da42859SDinh Nguyen } else { 17053da42859SDinh Nguyen (*p)--; 17063da42859SDinh Nguyen } 17073da42859SDinh Nguyen tmp_delay = *work_bgn - IO_DELAY_PER_OPA_TAP; 1708521fe39cSMarek Vasut scc_mgr_set_dqs_en_phase_all_ranks(grp, *p); 17093da42859SDinh Nguyen 171049891df6SMarek Vasut for (d = 0; d <= IO_DQS_EN_DELAY_MAX && tmp_delay < *work_bgn; d++) { 171149891df6SMarek Vasut scc_mgr_set_dqs_en_delay_all_ranks(grp, d); 17123da42859SDinh Nguyen 17134c5e584bSMarek Vasut ret = rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, 171496df6036SMarek Vasut PASS_ONE_BIT, 0); 17154c5e584bSMarek Vasut if (ret) { 17163da42859SDinh Nguyen *work_bgn = tmp_delay; 17173da42859SDinh Nguyen break; 17183da42859SDinh Nguyen } 171949891df6SMarek Vasut 172049891df6SMarek Vasut tmp_delay += IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 17213da42859SDinh Nguyen } 17223da42859SDinh Nguyen 17234c5e584bSMarek Vasut /* Restore VFIFO to old state before we decremented it (if needed). */ 17243da42859SDinh Nguyen (*p)++; 17253da42859SDinh Nguyen if (*p > IO_DQS_EN_PHASE_MAX) { 17263da42859SDinh Nguyen *p = 0; 17278c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 17283da42859SDinh Nguyen } 17293da42859SDinh Nguyen 1730521fe39cSMarek Vasut scc_mgr_set_dqs_en_delay_all_ranks(grp, 0); 17313da42859SDinh Nguyen } 17323da42859SDinh Nguyen 17334c5e584bSMarek Vasut /** 17344c5e584bSMarek Vasut * sdr_nonworking_phase() - Find non-working DQS enable phase 17354c5e584bSMarek Vasut * @grp: Read/Write group 17364c5e584bSMarek Vasut * @work_end: Working window end position 17374c5e584bSMarek Vasut * @p: DQS Phase Iterator 17384c5e584bSMarek Vasut * @i: Iterator 17394c5e584bSMarek Vasut * 17404c5e584bSMarek Vasut * Find non-working DQS enable phase setting. 17414c5e584bSMarek Vasut */ 17428c887b6eSMarek Vasut static int sdr_nonworking_phase(const u32 grp, u32 *work_end, u32 *p, u32 *i) 17433da42859SDinh Nguyen { 1744192d6f9fSMarek Vasut int ret; 17453da42859SDinh Nguyen 17463da42859SDinh Nguyen (*p)++; 17473da42859SDinh Nguyen *work_end += IO_DELAY_PER_OPA_TAP; 17483da42859SDinh Nguyen if (*p > IO_DQS_EN_PHASE_MAX) { 1749192d6f9fSMarek Vasut /* Fiddle with FIFO. */ 17503da42859SDinh Nguyen *p = 0; 17518c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 17523da42859SDinh Nguyen } 17533da42859SDinh Nguyen 17548c887b6eSMarek Vasut ret = sdr_find_phase(0, grp, work_end, i, p); 1755192d6f9fSMarek Vasut if (ret) { 175638ed6922SMarek Vasut /* Cannot see edge of failing read. */ 1757192d6f9fSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d: end: failed\n", 1758192d6f9fSMarek Vasut __func__, __LINE__); 1759192d6f9fSMarek Vasut } 1760192d6f9fSMarek Vasut 1761192d6f9fSMarek Vasut return ret; 17623da42859SDinh Nguyen } 17633da42859SDinh Nguyen 17640a13a0fbSMarek Vasut /** 17650a13a0fbSMarek Vasut * sdr_find_window_center() - Find center of the working DQS window. 17660a13a0fbSMarek Vasut * @grp: Read/Write group 17670a13a0fbSMarek Vasut * @work_bgn: First working settings 17680a13a0fbSMarek Vasut * @work_end: Last working settings 17690a13a0fbSMarek Vasut * 17700a13a0fbSMarek Vasut * Find center of the working DQS enable window. 17710a13a0fbSMarek Vasut */ 17720a13a0fbSMarek Vasut static int sdr_find_window_center(const u32 grp, const u32 work_bgn, 17738c887b6eSMarek Vasut const u32 work_end) 17743da42859SDinh Nguyen { 177596df6036SMarek Vasut u32 work_mid; 17763da42859SDinh Nguyen int tmp_delay = 0; 177728fd242aSMarek Vasut int i, p, d; 17783da42859SDinh Nguyen 177928fd242aSMarek Vasut work_mid = (work_bgn + work_end) / 2; 17803da42859SDinh Nguyen 17813da42859SDinh Nguyen debug_cond(DLEVEL == 2, "work_bgn=%d work_end=%d work_mid=%d\n", 178228fd242aSMarek Vasut work_bgn, work_end, work_mid); 17833da42859SDinh Nguyen /* Get the middle delay to be less than a VFIFO delay */ 1784cbb0b7e0SMarek Vasut tmp_delay = (IO_DQS_EN_PHASE_MAX + 1) * IO_DELAY_PER_OPA_TAP; 178528fd242aSMarek Vasut 17863da42859SDinh Nguyen debug_cond(DLEVEL == 2, "vfifo ptap delay %d\n", tmp_delay); 1787cbb0b7e0SMarek Vasut work_mid %= tmp_delay; 178828fd242aSMarek Vasut debug_cond(DLEVEL == 2, "new work_mid %d\n", work_mid); 17893da42859SDinh Nguyen 1790cbb0b7e0SMarek Vasut tmp_delay = rounddown(work_mid, IO_DELAY_PER_OPA_TAP); 1791cbb0b7e0SMarek Vasut if (tmp_delay > IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP) 1792cbb0b7e0SMarek Vasut tmp_delay = IO_DQS_EN_PHASE_MAX * IO_DELAY_PER_OPA_TAP; 1793cbb0b7e0SMarek Vasut p = tmp_delay / IO_DELAY_PER_OPA_TAP; 17943da42859SDinh Nguyen 1795cbb0b7e0SMarek Vasut debug_cond(DLEVEL == 2, "new p %d, tmp_delay=%d\n", p, tmp_delay); 1796cbb0b7e0SMarek Vasut 1797cbb0b7e0SMarek Vasut d = DIV_ROUND_UP(work_mid - tmp_delay, IO_DELAY_PER_DQS_EN_DCHAIN_TAP); 1798cbb0b7e0SMarek Vasut if (d > IO_DQS_EN_DELAY_MAX) 1799cbb0b7e0SMarek Vasut d = IO_DQS_EN_DELAY_MAX; 1800cbb0b7e0SMarek Vasut tmp_delay += d * IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 1801cbb0b7e0SMarek Vasut 180228fd242aSMarek Vasut debug_cond(DLEVEL == 2, "new d %d, tmp_delay=%d\n", d, tmp_delay); 180328fd242aSMarek Vasut 1804cbb0b7e0SMarek Vasut scc_mgr_set_dqs_en_phase_all_ranks(grp, p); 180528fd242aSMarek Vasut scc_mgr_set_dqs_en_delay_all_ranks(grp, d); 18063da42859SDinh Nguyen 18073da42859SDinh Nguyen /* 18083da42859SDinh Nguyen * push vfifo until we can successfully calibrate. We can do this 18093da42859SDinh Nguyen * because the largest possible margin in 1 VFIFO cycle. 18103da42859SDinh Nguyen */ 18113da42859SDinh Nguyen for (i = 0; i < VFIFO_SIZE; i++) { 18128c887b6eSMarek Vasut debug_cond(DLEVEL == 2, "find_dqs_en_phase: center\n"); 181328fd242aSMarek Vasut if (rw_mgr_mem_calibrate_read_test_all_ranks(grp, 1, 18143da42859SDinh Nguyen PASS_ONE_BIT, 181596df6036SMarek Vasut 0)) { 181628fd242aSMarek Vasut debug_cond(DLEVEL == 2, 18178c887b6eSMarek Vasut "%s:%d center: found: ptap=%u dtap=%u\n", 18188c887b6eSMarek Vasut __func__, __LINE__, p, d); 18190a13a0fbSMarek Vasut return 0; 18203da42859SDinh Nguyen } 18210a13a0fbSMarek Vasut 18220a13a0fbSMarek Vasut /* Fiddle with FIFO. */ 18238c887b6eSMarek Vasut rw_mgr_incr_vfifo(grp); 18240a13a0fbSMarek Vasut } 18250a13a0fbSMarek Vasut 18260a13a0fbSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d center: failed.\n", 18270a13a0fbSMarek Vasut __func__, __LINE__); 18280a13a0fbSMarek Vasut return -EINVAL; 18293da42859SDinh Nguyen } 18303da42859SDinh Nguyen 183133756893SMarek Vasut /** 183233756893SMarek Vasut * rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase() - Find a good DQS enable to use 183333756893SMarek Vasut * @grp: Read/Write Group 183433756893SMarek Vasut * 183533756893SMarek Vasut * Find a good DQS enable to use. 183633756893SMarek Vasut */ 1837914546e7SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(const u32 grp) 18383da42859SDinh Nguyen { 18395735540fSMarek Vasut u32 d, p, i; 18405735540fSMarek Vasut u32 dtaps_per_ptap; 18415735540fSMarek Vasut u32 work_bgn, work_end; 18425735540fSMarek Vasut u32 found_passing_read, found_failing_read, initial_failing_dtap; 18435735540fSMarek Vasut int ret; 18443da42859SDinh Nguyen 18453da42859SDinh Nguyen debug("%s:%d %u\n", __func__, __LINE__, grp); 18463da42859SDinh Nguyen 18473da42859SDinh Nguyen reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER); 18483da42859SDinh Nguyen 18493da42859SDinh Nguyen scc_mgr_set_dqs_en_delay_all_ranks(grp, 0); 18503da42859SDinh Nguyen scc_mgr_set_dqs_en_phase_all_ranks(grp, 0); 18513da42859SDinh Nguyen 18522f3589caSMarek Vasut /* Step 0: Determine number of delay taps for each phase tap. */ 18533da42859SDinh Nguyen dtaps_per_ptap = IO_DELAY_PER_OPA_TAP / IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 18543da42859SDinh Nguyen 18552f3589caSMarek Vasut /* Step 1: First push vfifo until we get a failing read. */ 1856d145ca9fSMarek Vasut find_vfifo_failing_read(grp); 18573da42859SDinh Nguyen 18582f3589caSMarek Vasut /* Step 2: Find first working phase, increment in ptaps. */ 18593da42859SDinh Nguyen work_bgn = 0; 1860914546e7SMarek Vasut ret = sdr_working_phase(grp, &work_bgn, &d, &p, &i); 1861914546e7SMarek Vasut if (ret) 1862914546e7SMarek Vasut return ret; 18633da42859SDinh Nguyen 18643da42859SDinh Nguyen work_end = work_bgn; 18653da42859SDinh Nguyen 18663da42859SDinh Nguyen /* 18672f3589caSMarek Vasut * If d is 0 then the working window covers a phase tap and we can 18682f3589caSMarek Vasut * follow the old procedure. Otherwise, we've found the beginning 18693da42859SDinh Nguyen * and we need to increment the dtaps until we find the end. 18703da42859SDinh Nguyen */ 18713da42859SDinh Nguyen if (d == 0) { 18722f3589caSMarek Vasut /* 18732f3589caSMarek Vasut * Step 3a: If we have room, back off by one and 18742f3589caSMarek Vasut * increment in dtaps. 18752f3589caSMarek Vasut */ 18768c887b6eSMarek Vasut sdr_backup_phase(grp, &work_bgn, &p); 18773da42859SDinh Nguyen 18782f3589caSMarek Vasut /* 18792f3589caSMarek Vasut * Step 4a: go forward from working phase to non working 18802f3589caSMarek Vasut * phase, increment in ptaps. 18812f3589caSMarek Vasut */ 1882914546e7SMarek Vasut ret = sdr_nonworking_phase(grp, &work_end, &p, &i); 1883914546e7SMarek Vasut if (ret) 1884914546e7SMarek Vasut return ret; 18853da42859SDinh Nguyen 18862f3589caSMarek Vasut /* Step 5a: Back off one from last, increment in dtaps. */ 18873da42859SDinh Nguyen 18883da42859SDinh Nguyen /* Special case code for backing up a phase */ 18893da42859SDinh Nguyen if (p == 0) { 18903da42859SDinh Nguyen p = IO_DQS_EN_PHASE_MAX; 18918c887b6eSMarek Vasut rw_mgr_decr_vfifo(grp); 18923da42859SDinh Nguyen } else { 18933da42859SDinh Nguyen p = p - 1; 18943da42859SDinh Nguyen } 18953da42859SDinh Nguyen 18963da42859SDinh Nguyen work_end -= IO_DELAY_PER_OPA_TAP; 18973da42859SDinh Nguyen scc_mgr_set_dqs_en_phase_all_ranks(grp, p); 18983da42859SDinh Nguyen 18993da42859SDinh Nguyen d = 0; 19003da42859SDinh Nguyen 19012f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d p: ptap=%u\n", 19022f3589caSMarek Vasut __func__, __LINE__, p); 19033da42859SDinh Nguyen } 19043da42859SDinh Nguyen 19052f3589caSMarek Vasut /* The dtap increment to find the failing edge is done here. */ 190652e8f217SMarek Vasut sdr_find_phase_delay(0, 1, grp, &work_end, 190752e8f217SMarek Vasut IO_DELAY_PER_DQS_EN_DCHAIN_TAP, &d); 19083da42859SDinh Nguyen 19093da42859SDinh Nguyen /* Go back to working dtap */ 19103da42859SDinh Nguyen if (d != 0) 19113da42859SDinh Nguyen work_end -= IO_DELAY_PER_DQS_EN_DCHAIN_TAP; 19123da42859SDinh Nguyen 19132f3589caSMarek Vasut debug_cond(DLEVEL == 2, 19142f3589caSMarek Vasut "%s:%d p/d: ptap=%u dtap=%u end=%u\n", 19152f3589caSMarek Vasut __func__, __LINE__, p, d - 1, work_end); 19163da42859SDinh Nguyen 19173da42859SDinh Nguyen if (work_end < work_bgn) { 19183da42859SDinh Nguyen /* nil range */ 19192f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d end-2: failed\n", 19202f3589caSMarek Vasut __func__, __LINE__); 1921914546e7SMarek Vasut return -EINVAL; 19223da42859SDinh Nguyen } 19233da42859SDinh Nguyen 19242f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d found range [%u,%u]\n", 19253da42859SDinh Nguyen __func__, __LINE__, work_bgn, work_end); 19263da42859SDinh Nguyen 19273da42859SDinh Nguyen /* 19282f3589caSMarek Vasut * We need to calculate the number of dtaps that equal a ptap. 19292f3589caSMarek Vasut * To do that we'll back up a ptap and re-find the edge of the 19302f3589caSMarek Vasut * window using dtaps 19313da42859SDinh Nguyen */ 19322f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d calculate dtaps_per_ptap for tracking\n", 19332f3589caSMarek Vasut __func__, __LINE__); 19343da42859SDinh Nguyen 19353da42859SDinh Nguyen /* Special case code for backing up a phase */ 19363da42859SDinh Nguyen if (p == 0) { 19373da42859SDinh Nguyen p = IO_DQS_EN_PHASE_MAX; 19388c887b6eSMarek Vasut rw_mgr_decr_vfifo(grp); 19392f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d backedup cycle/phase: p=%u\n", 19402f3589caSMarek Vasut __func__, __LINE__, p); 19413da42859SDinh Nguyen } else { 19423da42859SDinh Nguyen p = p - 1; 19432f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d backedup phase only: p=%u", 19442f3589caSMarek Vasut __func__, __LINE__, p); 19453da42859SDinh Nguyen } 19463da42859SDinh Nguyen 19473da42859SDinh Nguyen scc_mgr_set_dqs_en_phase_all_ranks(grp, p); 19483da42859SDinh Nguyen 19493da42859SDinh Nguyen /* 19503da42859SDinh Nguyen * Increase dtap until we first see a passing read (in case the 19512f3589caSMarek Vasut * window is smaller than a ptap), and then a failing read to 19522f3589caSMarek Vasut * mark the edge of the window again. 19533da42859SDinh Nguyen */ 19543da42859SDinh Nguyen 19552f3589caSMarek Vasut /* Find a passing read. */ 19562f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d find passing read\n", 19573da42859SDinh Nguyen __func__, __LINE__); 195852e8f217SMarek Vasut 19593da42859SDinh Nguyen initial_failing_dtap = d; 19603da42859SDinh Nguyen 196152e8f217SMarek Vasut found_passing_read = !sdr_find_phase_delay(1, 1, grp, NULL, 0, &d); 19623da42859SDinh Nguyen if (found_passing_read) { 19632f3589caSMarek Vasut /* Find a failing read. */ 19642f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d find failing read\n", 19652f3589caSMarek Vasut __func__, __LINE__); 196652e8f217SMarek Vasut d++; 196752e8f217SMarek Vasut found_failing_read = !sdr_find_phase_delay(0, 1, grp, NULL, 0, 196852e8f217SMarek Vasut &d); 19693da42859SDinh Nguyen } else { 19702f3589caSMarek Vasut debug_cond(DLEVEL == 1, 19712f3589caSMarek Vasut "%s:%d failed to calculate dtaps per ptap. Fall back on static value\n", 19722f3589caSMarek Vasut __func__, __LINE__); 19733da42859SDinh Nguyen } 19743da42859SDinh Nguyen 19753da42859SDinh Nguyen /* 19763da42859SDinh Nguyen * The dynamically calculated dtaps_per_ptap is only valid if we 19773da42859SDinh Nguyen * found a passing/failing read. If we didn't, it means d hit the max 19783da42859SDinh Nguyen * (IO_DQS_EN_DELAY_MAX). Otherwise, dtaps_per_ptap retains its 19793da42859SDinh Nguyen * statically calculated value. 19803da42859SDinh Nguyen */ 19813da42859SDinh Nguyen if (found_passing_read && found_failing_read) 19823da42859SDinh Nguyen dtaps_per_ptap = d - initial_failing_dtap; 19833da42859SDinh Nguyen 19841273dd9eSMarek Vasut writel(dtaps_per_ptap, &sdr_reg_file->dtaps_per_ptap); 19852f3589caSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d dtaps_per_ptap=%u - %u = %u", 19862f3589caSMarek Vasut __func__, __LINE__, d, initial_failing_dtap, dtaps_per_ptap); 19873da42859SDinh Nguyen 19882f3589caSMarek Vasut /* Step 6: Find the centre of the window. */ 1989914546e7SMarek Vasut ret = sdr_find_window_center(grp, work_bgn, work_end); 19903da42859SDinh Nguyen 1991914546e7SMarek Vasut return ret; 19923da42859SDinh Nguyen } 19933da42859SDinh Nguyen 1994c4907898SMarek Vasut /** 1995901dc36eSMarek Vasut * search_stop_check() - Check if the detected edge is valid 1996901dc36eSMarek Vasut * @write: Perform read (Stage 2) or write (Stage 3) calibration 1997901dc36eSMarek Vasut * @d: DQS delay 1998901dc36eSMarek Vasut * @rank_bgn: Rank number 1999901dc36eSMarek Vasut * @write_group: Write Group 2000901dc36eSMarek Vasut * @read_group: Read Group 2001901dc36eSMarek Vasut * @bit_chk: Resulting bit mask after the test 2002901dc36eSMarek Vasut * @sticky_bit_chk: Resulting sticky bit mask after the test 2003901dc36eSMarek Vasut * @use_read_test: Perform read test 2004901dc36eSMarek Vasut * 2005901dc36eSMarek Vasut * Test if the found edge is valid. 2006901dc36eSMarek Vasut */ 2007901dc36eSMarek Vasut static u32 search_stop_check(const int write, const int d, const int rank_bgn, 2008901dc36eSMarek Vasut const u32 write_group, const u32 read_group, 2009901dc36eSMarek Vasut u32 *bit_chk, u32 *sticky_bit_chk, 2010901dc36eSMarek Vasut const u32 use_read_test) 2011901dc36eSMarek Vasut { 2012901dc36eSMarek Vasut const u32 ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / 2013901dc36eSMarek Vasut RW_MGR_MEM_IF_WRITE_DQS_WIDTH; 2014901dc36eSMarek Vasut const u32 correct_mask = write ? param->write_correct_mask : 2015901dc36eSMarek Vasut param->read_correct_mask; 2016901dc36eSMarek Vasut const u32 per_dqs = write ? RW_MGR_MEM_DQ_PER_WRITE_DQS : 2017901dc36eSMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS; 2018901dc36eSMarek Vasut u32 ret; 2019901dc36eSMarek Vasut /* 2020901dc36eSMarek Vasut * Stop searching when the read test doesn't pass AND when 2021901dc36eSMarek Vasut * we've seen a passing read on every bit. 2022901dc36eSMarek Vasut */ 2023901dc36eSMarek Vasut if (write) { /* WRITE-ONLY */ 2024901dc36eSMarek Vasut ret = !rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 2025901dc36eSMarek Vasut 0, PASS_ONE_BIT, 2026901dc36eSMarek Vasut bit_chk, 0); 2027901dc36eSMarek Vasut } else if (use_read_test) { /* READ-ONLY */ 2028901dc36eSMarek Vasut ret = !rw_mgr_mem_calibrate_read_test(rank_bgn, read_group, 2029901dc36eSMarek Vasut NUM_READ_PB_TESTS, 2030901dc36eSMarek Vasut PASS_ONE_BIT, bit_chk, 2031901dc36eSMarek Vasut 0, 0); 2032901dc36eSMarek Vasut } else { /* READ-ONLY */ 2033901dc36eSMarek Vasut rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 0, 2034901dc36eSMarek Vasut PASS_ONE_BIT, bit_chk, 0); 2035901dc36eSMarek Vasut *bit_chk = *bit_chk >> (per_dqs * 2036901dc36eSMarek Vasut (read_group - (write_group * ratio))); 2037901dc36eSMarek Vasut ret = (*bit_chk == 0); 2038901dc36eSMarek Vasut } 2039901dc36eSMarek Vasut *sticky_bit_chk = *sticky_bit_chk | *bit_chk; 2040901dc36eSMarek Vasut ret = ret && (*sticky_bit_chk == correct_mask); 2041901dc36eSMarek Vasut debug_cond(DLEVEL == 2, 2042901dc36eSMarek Vasut "%s:%d center(left): dtap=%u => %u == %u && %u", 2043901dc36eSMarek Vasut __func__, __LINE__, d, 2044901dc36eSMarek Vasut *sticky_bit_chk, correct_mask, ret); 2045901dc36eSMarek Vasut return ret; 2046901dc36eSMarek Vasut } 2047901dc36eSMarek Vasut 2048901dc36eSMarek Vasut /** 204971120773SMarek Vasut * search_left_edge() - Find left edge of DQ/DQS working phase 205071120773SMarek Vasut * @write: Perform read (Stage 2) or write (Stage 3) calibration 205171120773SMarek Vasut * @rank_bgn: Rank number 205271120773SMarek Vasut * @write_group: Write Group 205371120773SMarek Vasut * @read_group: Read Group 205471120773SMarek Vasut * @test_bgn: Rank number to begin the test 205571120773SMarek Vasut * @sticky_bit_chk: Resulting sticky bit mask after the test 205671120773SMarek Vasut * @left_edge: Left edge of the DQ/DQS phase 205771120773SMarek Vasut * @right_edge: Right edge of the DQ/DQS phase 205871120773SMarek Vasut * @use_read_test: Perform read test 205971120773SMarek Vasut * 206071120773SMarek Vasut * Find left edge of DQ/DQS working phase. 206171120773SMarek Vasut */ 206271120773SMarek Vasut static void search_left_edge(const int write, const int rank_bgn, 206371120773SMarek Vasut const u32 write_group, const u32 read_group, const u32 test_bgn, 20640c4be198SMarek Vasut u32 *sticky_bit_chk, 206571120773SMarek Vasut int *left_edge, int *right_edge, const u32 use_read_test) 206671120773SMarek Vasut { 206771120773SMarek Vasut const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX; 206871120773SMarek Vasut const u32 dqs_max = write ? IO_IO_OUT1_DELAY_MAX : IO_DQS_IN_DELAY_MAX; 206971120773SMarek Vasut const u32 per_dqs = write ? RW_MGR_MEM_DQ_PER_WRITE_DQS : 207071120773SMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS; 20710c4be198SMarek Vasut u32 stop, bit_chk; 207271120773SMarek Vasut int i, d; 207371120773SMarek Vasut 207471120773SMarek Vasut for (d = 0; d <= dqs_max; d++) { 207571120773SMarek Vasut if (write) 207671120773SMarek Vasut scc_mgr_apply_group_dq_out1_delay(d); 207771120773SMarek Vasut else 207871120773SMarek Vasut scc_mgr_apply_group_dq_in_delay(test_bgn, d); 207971120773SMarek Vasut 208071120773SMarek Vasut writel(0, &sdr_scc_mgr->update); 208171120773SMarek Vasut 2082901dc36eSMarek Vasut stop = search_stop_check(write, d, rank_bgn, write_group, 20830c4be198SMarek Vasut read_group, &bit_chk, sticky_bit_chk, 2084901dc36eSMarek Vasut use_read_test); 208571120773SMarek Vasut if (stop == 1) 208671120773SMarek Vasut break; 208771120773SMarek Vasut 208871120773SMarek Vasut /* stop != 1 */ 208971120773SMarek Vasut for (i = 0; i < per_dqs; i++) { 20900c4be198SMarek Vasut if (bit_chk & 1) { 209171120773SMarek Vasut /* 209271120773SMarek Vasut * Remember a passing test as 209371120773SMarek Vasut * the left_edge. 209471120773SMarek Vasut */ 209571120773SMarek Vasut left_edge[i] = d; 209671120773SMarek Vasut } else { 209771120773SMarek Vasut /* 209871120773SMarek Vasut * If a left edge has not been seen 209971120773SMarek Vasut * yet, then a future passing test 210071120773SMarek Vasut * will mark this edge as the right 210171120773SMarek Vasut * edge. 210271120773SMarek Vasut */ 210371120773SMarek Vasut if (left_edge[i] == delay_max + 1) 210471120773SMarek Vasut right_edge[i] = -(d + 1); 210571120773SMarek Vasut } 21060c4be198SMarek Vasut bit_chk >>= 1; 210771120773SMarek Vasut } 210871120773SMarek Vasut } 210971120773SMarek Vasut 211071120773SMarek Vasut /* Reset DQ delay chains to 0 */ 211171120773SMarek Vasut if (write) 211271120773SMarek Vasut scc_mgr_apply_group_dq_out1_delay(0); 211371120773SMarek Vasut else 211471120773SMarek Vasut scc_mgr_apply_group_dq_in_delay(test_bgn, 0); 211571120773SMarek Vasut 211671120773SMarek Vasut *sticky_bit_chk = 0; 211771120773SMarek Vasut for (i = per_dqs - 1; i >= 0; i--) { 211871120773SMarek Vasut debug_cond(DLEVEL == 2, 211971120773SMarek Vasut "%s:%d vfifo_center: left_edge[%u]: %d right_edge[%u]: %d\n", 212071120773SMarek Vasut __func__, __LINE__, i, left_edge[i], 212171120773SMarek Vasut i, right_edge[i]); 212271120773SMarek Vasut 212371120773SMarek Vasut /* 212471120773SMarek Vasut * Check for cases where we haven't found the left edge, 212571120773SMarek Vasut * which makes our assignment of the the right edge invalid. 212671120773SMarek Vasut * Reset it to the illegal value. 212771120773SMarek Vasut */ 212871120773SMarek Vasut if ((left_edge[i] == delay_max + 1) && 212971120773SMarek Vasut (right_edge[i] != delay_max + 1)) { 213071120773SMarek Vasut right_edge[i] = delay_max + 1; 213171120773SMarek Vasut debug_cond(DLEVEL == 2, 213271120773SMarek Vasut "%s:%d vfifo_center: reset right_edge[%u]: %d\n", 213371120773SMarek Vasut __func__, __LINE__, i, right_edge[i]); 213471120773SMarek Vasut } 213571120773SMarek Vasut 213671120773SMarek Vasut /* 213771120773SMarek Vasut * Reset sticky bit 213871120773SMarek Vasut * READ: except for bits where we have seen both 213971120773SMarek Vasut * the left and right edge. 214071120773SMarek Vasut * WRITE: except for bits where we have seen the 214171120773SMarek Vasut * left edge. 214271120773SMarek Vasut */ 214371120773SMarek Vasut *sticky_bit_chk <<= 1; 214471120773SMarek Vasut if (write) { 214571120773SMarek Vasut if (left_edge[i] != delay_max + 1) 214671120773SMarek Vasut *sticky_bit_chk |= 1; 214771120773SMarek Vasut } else { 214871120773SMarek Vasut if ((left_edge[i] != delay_max + 1) && 214971120773SMarek Vasut (right_edge[i] != delay_max + 1)) 215071120773SMarek Vasut *sticky_bit_chk |= 1; 215171120773SMarek Vasut } 215271120773SMarek Vasut } 215371120773SMarek Vasut 215471120773SMarek Vasut 215571120773SMarek Vasut } 215671120773SMarek Vasut 215771120773SMarek Vasut /** 2158c4907898SMarek Vasut * search_right_edge() - Find right edge of DQ/DQS working phase 2159c4907898SMarek Vasut * @write: Perform read (Stage 2) or write (Stage 3) calibration 2160c4907898SMarek Vasut * @rank_bgn: Rank number 2161c4907898SMarek Vasut * @write_group: Write Group 2162c4907898SMarek Vasut * @read_group: Read Group 2163c4907898SMarek Vasut * @start_dqs: DQS start phase 2164c4907898SMarek Vasut * @start_dqs_en: DQS enable start phase 2165c4907898SMarek Vasut * @sticky_bit_chk: Resulting sticky bit mask after the test 2166c4907898SMarek Vasut * @left_edge: Left edge of the DQ/DQS phase 2167c4907898SMarek Vasut * @right_edge: Right edge of the DQ/DQS phase 2168c4907898SMarek Vasut * @use_read_test: Perform read test 2169c4907898SMarek Vasut * 2170c4907898SMarek Vasut * Find right edge of DQ/DQS working phase. 2171c4907898SMarek Vasut */ 2172c4907898SMarek Vasut static int search_right_edge(const int write, const int rank_bgn, 2173c4907898SMarek Vasut const u32 write_group, const u32 read_group, 2174c4907898SMarek Vasut const int start_dqs, const int start_dqs_en, 21750c4be198SMarek Vasut u32 *sticky_bit_chk, 2176c4907898SMarek Vasut int *left_edge, int *right_edge, const u32 use_read_test) 2177c4907898SMarek Vasut { 2178c4907898SMarek Vasut const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX; 2179c4907898SMarek Vasut const u32 dqs_max = write ? IO_IO_OUT1_DELAY_MAX : IO_DQS_IN_DELAY_MAX; 2180c4907898SMarek Vasut const u32 per_dqs = write ? RW_MGR_MEM_DQ_PER_WRITE_DQS : 2181c4907898SMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS; 21820c4be198SMarek Vasut u32 stop, bit_chk; 2183c4907898SMarek Vasut int i, d; 2184c4907898SMarek Vasut 2185c4907898SMarek Vasut for (d = 0; d <= dqs_max - start_dqs; d++) { 2186c4907898SMarek Vasut if (write) { /* WRITE-ONLY */ 2187c4907898SMarek Vasut scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, 2188c4907898SMarek Vasut d + start_dqs); 2189c4907898SMarek Vasut } else { /* READ-ONLY */ 2190c4907898SMarek Vasut scc_mgr_set_dqs_bus_in_delay(read_group, d + start_dqs); 2191c4907898SMarek Vasut if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { 2192c4907898SMarek Vasut uint32_t delay = d + start_dqs_en; 2193c4907898SMarek Vasut if (delay > IO_DQS_EN_DELAY_MAX) 2194c4907898SMarek Vasut delay = IO_DQS_EN_DELAY_MAX; 2195c4907898SMarek Vasut scc_mgr_set_dqs_en_delay(read_group, delay); 2196c4907898SMarek Vasut } 2197c4907898SMarek Vasut scc_mgr_load_dqs(read_group); 2198c4907898SMarek Vasut } 2199c4907898SMarek Vasut 2200c4907898SMarek Vasut writel(0, &sdr_scc_mgr->update); 2201c4907898SMarek Vasut 2202901dc36eSMarek Vasut stop = search_stop_check(write, d, rank_bgn, write_group, 22030c4be198SMarek Vasut read_group, &bit_chk, sticky_bit_chk, 2204901dc36eSMarek Vasut use_read_test); 2205c4907898SMarek Vasut if (stop == 1) { 2206c4907898SMarek Vasut if (write && (d == 0)) { /* WRITE-ONLY */ 2207c4907898SMarek Vasut for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { 2208c4907898SMarek Vasut /* 2209c4907898SMarek Vasut * d = 0 failed, but it passed when 2210c4907898SMarek Vasut * testing the left edge, so it must be 2211c4907898SMarek Vasut * marginal, set it to -1 2212c4907898SMarek Vasut */ 2213c4907898SMarek Vasut if (right_edge[i] == delay_max + 1 && 2214c4907898SMarek Vasut left_edge[i] != delay_max + 1) 2215c4907898SMarek Vasut right_edge[i] = -1; 2216c4907898SMarek Vasut } 2217c4907898SMarek Vasut } 2218c4907898SMarek Vasut break; 2219c4907898SMarek Vasut } 2220c4907898SMarek Vasut 2221c4907898SMarek Vasut /* stop != 1 */ 2222c4907898SMarek Vasut for (i = 0; i < per_dqs; i++) { 22230c4be198SMarek Vasut if (bit_chk & 1) { 2224c4907898SMarek Vasut /* 2225c4907898SMarek Vasut * Remember a passing test as 2226c4907898SMarek Vasut * the right_edge. 2227c4907898SMarek Vasut */ 2228c4907898SMarek Vasut right_edge[i] = d; 2229c4907898SMarek Vasut } else { 2230c4907898SMarek Vasut if (d != 0) { 2231c4907898SMarek Vasut /* 2232c4907898SMarek Vasut * If a right edge has not 2233c4907898SMarek Vasut * been seen yet, then a future 2234c4907898SMarek Vasut * passing test will mark this 2235c4907898SMarek Vasut * edge as the left edge. 2236c4907898SMarek Vasut */ 2237c4907898SMarek Vasut if (right_edge[i] == delay_max + 1) 2238c4907898SMarek Vasut left_edge[i] = -(d + 1); 2239c4907898SMarek Vasut } else { 2240c4907898SMarek Vasut /* 2241c4907898SMarek Vasut * d = 0 failed, but it passed 2242c4907898SMarek Vasut * when testing the left edge, 2243c4907898SMarek Vasut * so it must be marginal, set 2244c4907898SMarek Vasut * it to -1 2245c4907898SMarek Vasut */ 2246c4907898SMarek Vasut if (right_edge[i] == delay_max + 1 && 2247c4907898SMarek Vasut left_edge[i] != delay_max + 1) 2248c4907898SMarek Vasut right_edge[i] = -1; 2249c4907898SMarek Vasut /* 2250c4907898SMarek Vasut * If a right edge has not been 2251c4907898SMarek Vasut * seen yet, then a future 2252c4907898SMarek Vasut * passing test will mark this 2253c4907898SMarek Vasut * edge as the left edge. 2254c4907898SMarek Vasut */ 2255c4907898SMarek Vasut else if (right_edge[i] == delay_max + 1) 2256c4907898SMarek Vasut left_edge[i] = -(d + 1); 2257c4907898SMarek Vasut } 2258c4907898SMarek Vasut } 2259c4907898SMarek Vasut 2260c4907898SMarek Vasut debug_cond(DLEVEL == 2, "%s:%d center[r,d=%u]: ", 2261c4907898SMarek Vasut __func__, __LINE__, d); 2262c4907898SMarek Vasut debug_cond(DLEVEL == 2, 2263c4907898SMarek Vasut "bit_chk_test=%i left_edge[%u]: %d ", 22640c4be198SMarek Vasut bit_chk & 1, i, left_edge[i]); 2265c4907898SMarek Vasut debug_cond(DLEVEL == 2, "right_edge[%u]: %d\n", i, 2266c4907898SMarek Vasut right_edge[i]); 22670c4be198SMarek Vasut bit_chk >>= 1; 2268c4907898SMarek Vasut } 2269c4907898SMarek Vasut } 2270c4907898SMarek Vasut 2271c4907898SMarek Vasut /* Check that all bits have a window */ 2272c4907898SMarek Vasut for (i = 0; i < per_dqs; i++) { 2273c4907898SMarek Vasut debug_cond(DLEVEL == 2, 2274c4907898SMarek Vasut "%s:%d write_center: left_edge[%u]: %d right_edge[%u]: %d", 2275c4907898SMarek Vasut __func__, __LINE__, i, left_edge[i], 2276c4907898SMarek Vasut i, right_edge[i]); 2277c4907898SMarek Vasut if ((left_edge[i] == dqs_max + 1) || 2278c4907898SMarek Vasut (right_edge[i] == dqs_max + 1)) 2279c4907898SMarek Vasut return i + 1; /* FIXME: If we fail, retval > 0 */ 2280c4907898SMarek Vasut } 2281c4907898SMarek Vasut 2282c4907898SMarek Vasut return 0; 2283c4907898SMarek Vasut } 2284c4907898SMarek Vasut 2285afb3eb84SMarek Vasut /** 2286afb3eb84SMarek Vasut * get_window_mid_index() - Find the best middle setting of DQ/DQS phase 2287afb3eb84SMarek Vasut * @write: Perform read (Stage 2) or write (Stage 3) calibration 2288afb3eb84SMarek Vasut * @left_edge: Left edge of the DQ/DQS phase 2289afb3eb84SMarek Vasut * @right_edge: Right edge of the DQ/DQS phase 2290afb3eb84SMarek Vasut * @mid_min: Best DQ/DQS phase middle setting 2291afb3eb84SMarek Vasut * 2292afb3eb84SMarek Vasut * Find index and value of the middle of the DQ/DQS working phase. 2293afb3eb84SMarek Vasut */ 2294afb3eb84SMarek Vasut static int get_window_mid_index(const int write, int *left_edge, 2295afb3eb84SMarek Vasut int *right_edge, int *mid_min) 2296afb3eb84SMarek Vasut { 2297afb3eb84SMarek Vasut const u32 per_dqs = write ? RW_MGR_MEM_DQ_PER_WRITE_DQS : 2298afb3eb84SMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS; 2299afb3eb84SMarek Vasut int i, mid, min_index; 2300afb3eb84SMarek Vasut 2301afb3eb84SMarek Vasut /* Find middle of window for each DQ bit */ 2302afb3eb84SMarek Vasut *mid_min = left_edge[0] - right_edge[0]; 2303afb3eb84SMarek Vasut min_index = 0; 2304afb3eb84SMarek Vasut for (i = 1; i < per_dqs; i++) { 2305afb3eb84SMarek Vasut mid = left_edge[i] - right_edge[i]; 2306afb3eb84SMarek Vasut if (mid < *mid_min) { 2307afb3eb84SMarek Vasut *mid_min = mid; 2308afb3eb84SMarek Vasut min_index = i; 2309afb3eb84SMarek Vasut } 2310afb3eb84SMarek Vasut } 2311afb3eb84SMarek Vasut 2312afb3eb84SMarek Vasut /* 2313afb3eb84SMarek Vasut * -mid_min/2 represents the amount that we need to move DQS. 2314afb3eb84SMarek Vasut * If mid_min is odd and positive we'll need to add one to make 2315afb3eb84SMarek Vasut * sure the rounding in further calculations is correct (always 2316afb3eb84SMarek Vasut * bias to the right), so just add 1 for all positive values. 2317afb3eb84SMarek Vasut */ 2318afb3eb84SMarek Vasut if (*mid_min > 0) 2319afb3eb84SMarek Vasut (*mid_min)++; 2320afb3eb84SMarek Vasut *mid_min = *mid_min / 2; 2321afb3eb84SMarek Vasut 2322afb3eb84SMarek Vasut debug_cond(DLEVEL == 1, "%s:%d vfifo_center: *mid_min=%d (index=%u)\n", 2323afb3eb84SMarek Vasut __func__, __LINE__, *mid_min, min_index); 2324afb3eb84SMarek Vasut return min_index; 2325afb3eb84SMarek Vasut } 2326afb3eb84SMarek Vasut 2327ffb8b66eSMarek Vasut /** 2328ffb8b66eSMarek Vasut * center_dq_windows() - Center the DQ/DQS windows 2329ffb8b66eSMarek Vasut * @write: Perform read (Stage 2) or write (Stage 3) calibration 2330ffb8b66eSMarek Vasut * @left_edge: Left edge of the DQ/DQS phase 2331ffb8b66eSMarek Vasut * @right_edge: Right edge of the DQ/DQS phase 2332ffb8b66eSMarek Vasut * @mid_min: Adjusted DQ/DQS phase middle setting 2333ffb8b66eSMarek Vasut * @orig_mid_min: Original DQ/DQS phase middle setting 2334ffb8b66eSMarek Vasut * @min_index: DQ/DQS phase middle setting index 2335ffb8b66eSMarek Vasut * @test_bgn: Rank number to begin the test 2336ffb8b66eSMarek Vasut * @dq_margin: Amount of shift for the DQ 2337ffb8b66eSMarek Vasut * @dqs_margin: Amount of shift for the DQS 2338ffb8b66eSMarek Vasut * 2339ffb8b66eSMarek Vasut * Align the DQ/DQS windows in each group. 2340ffb8b66eSMarek Vasut */ 2341ffb8b66eSMarek Vasut static void center_dq_windows(const int write, int *left_edge, int *right_edge, 2342ffb8b66eSMarek Vasut const int mid_min, const int orig_mid_min, 2343ffb8b66eSMarek Vasut const int min_index, const int test_bgn, 2344ffb8b66eSMarek Vasut int *dq_margin, int *dqs_margin) 2345ffb8b66eSMarek Vasut { 2346ffb8b66eSMarek Vasut const u32 delay_max = write ? IO_IO_OUT1_DELAY_MAX : IO_IO_IN_DELAY_MAX; 2347ffb8b66eSMarek Vasut const u32 per_dqs = write ? RW_MGR_MEM_DQ_PER_WRITE_DQS : 2348ffb8b66eSMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS; 2349ffb8b66eSMarek Vasut const u32 delay_off = write ? SCC_MGR_IO_OUT1_DELAY_OFFSET : 2350ffb8b66eSMarek Vasut SCC_MGR_IO_IN_DELAY_OFFSET; 2351ffb8b66eSMarek Vasut const u32 addr = SDR_PHYGRP_SCCGRP_ADDRESS | delay_off; 2352ffb8b66eSMarek Vasut 2353ffb8b66eSMarek Vasut u32 temp_dq_io_delay1, temp_dq_io_delay2; 2354ffb8b66eSMarek Vasut int shift_dq, i, p; 2355ffb8b66eSMarek Vasut 2356ffb8b66eSMarek Vasut /* Initialize data for export structures */ 2357ffb8b66eSMarek Vasut *dqs_margin = delay_max + 1; 2358ffb8b66eSMarek Vasut *dq_margin = delay_max + 1; 2359ffb8b66eSMarek Vasut 2360ffb8b66eSMarek Vasut /* add delay to bring centre of all DQ windows to the same "level" */ 2361ffb8b66eSMarek Vasut for (i = 0, p = test_bgn; i < per_dqs; i++, p++) { 2362ffb8b66eSMarek Vasut /* Use values before divide by 2 to reduce round off error */ 2363ffb8b66eSMarek Vasut shift_dq = (left_edge[i] - right_edge[i] - 2364ffb8b66eSMarek Vasut (left_edge[min_index] - right_edge[min_index]))/2 + 2365ffb8b66eSMarek Vasut (orig_mid_min - mid_min); 2366ffb8b66eSMarek Vasut 2367ffb8b66eSMarek Vasut debug_cond(DLEVEL == 2, 2368ffb8b66eSMarek Vasut "vfifo_center: before: shift_dq[%u]=%d\n", 2369ffb8b66eSMarek Vasut i, shift_dq); 2370ffb8b66eSMarek Vasut 2371ffb8b66eSMarek Vasut temp_dq_io_delay1 = readl(addr + (p << 2)); 2372ffb8b66eSMarek Vasut temp_dq_io_delay2 = readl(addr + (i << 2)); 2373ffb8b66eSMarek Vasut 2374ffb8b66eSMarek Vasut if (shift_dq + temp_dq_io_delay1 > delay_max) 2375ffb8b66eSMarek Vasut shift_dq = delay_max - temp_dq_io_delay2; 2376ffb8b66eSMarek Vasut else if (shift_dq + temp_dq_io_delay1 < 0) 2377ffb8b66eSMarek Vasut shift_dq = -temp_dq_io_delay1; 2378ffb8b66eSMarek Vasut 2379ffb8b66eSMarek Vasut debug_cond(DLEVEL == 2, 2380ffb8b66eSMarek Vasut "vfifo_center: after: shift_dq[%u]=%d\n", 2381ffb8b66eSMarek Vasut i, shift_dq); 2382ffb8b66eSMarek Vasut 2383ffb8b66eSMarek Vasut if (write) 2384ffb8b66eSMarek Vasut scc_mgr_set_dq_out1_delay(i, temp_dq_io_delay1 + shift_dq); 2385ffb8b66eSMarek Vasut else 2386ffb8b66eSMarek Vasut scc_mgr_set_dq_in_delay(p, temp_dq_io_delay1 + shift_dq); 2387ffb8b66eSMarek Vasut 2388ffb8b66eSMarek Vasut scc_mgr_load_dq(p); 2389ffb8b66eSMarek Vasut 2390ffb8b66eSMarek Vasut debug_cond(DLEVEL == 2, 2391ffb8b66eSMarek Vasut "vfifo_center: margin[%u]=[%d,%d]\n", i, 2392ffb8b66eSMarek Vasut left_edge[i] - shift_dq + (-mid_min), 2393ffb8b66eSMarek Vasut right_edge[i] + shift_dq - (-mid_min)); 2394ffb8b66eSMarek Vasut 2395ffb8b66eSMarek Vasut /* To determine values for export structures */ 2396ffb8b66eSMarek Vasut if (left_edge[i] - shift_dq + (-mid_min) < *dq_margin) 2397ffb8b66eSMarek Vasut *dq_margin = left_edge[i] - shift_dq + (-mid_min); 2398ffb8b66eSMarek Vasut 2399ffb8b66eSMarek Vasut if (right_edge[i] + shift_dq - (-mid_min) < *dqs_margin) 2400ffb8b66eSMarek Vasut *dqs_margin = right_edge[i] + shift_dq - (-mid_min); 2401ffb8b66eSMarek Vasut } 2402ffb8b66eSMarek Vasut 2403ffb8b66eSMarek Vasut } 2404ffb8b66eSMarek Vasut 2405ac63b9adSMarek Vasut /** 2406ac63b9adSMarek Vasut * rw_mgr_mem_calibrate_vfifo_center() - Per-bit deskew DQ and centering 2407ac63b9adSMarek Vasut * @rank_bgn: Rank number 2408ac63b9adSMarek Vasut * @rw_group: Read/Write Group 2409ac63b9adSMarek Vasut * @test_bgn: Rank at which the test begins 2410ac63b9adSMarek Vasut * @use_read_test: Perform a read test 2411ac63b9adSMarek Vasut * @update_fom: Update FOM 2412ac63b9adSMarek Vasut * 2413ac63b9adSMarek Vasut * Per-bit deskew DQ and centering. 2414ac63b9adSMarek Vasut */ 24150113c3e1SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_center(const u32 rank_bgn, 24160113c3e1SMarek Vasut const u32 rw_group, const u32 test_bgn, 24170113c3e1SMarek Vasut const int use_read_test, const int update_fom) 24183da42859SDinh Nguyen { 24195d6db444SMarek Vasut const u32 addr = 24205d6db444SMarek Vasut SDR_PHYGRP_SCCGRP_ADDRESS + SCC_MGR_DQS_IN_DELAY_OFFSET + 24210113c3e1SMarek Vasut (rw_group << 2); 24223da42859SDinh Nguyen /* 24233da42859SDinh Nguyen * Store these as signed since there are comparisons with 24243da42859SDinh Nguyen * signed numbers. 24253da42859SDinh Nguyen */ 24263da42859SDinh Nguyen uint32_t sticky_bit_chk; 24273da42859SDinh Nguyen int32_t left_edge[RW_MGR_MEM_DQ_PER_READ_DQS]; 24283da42859SDinh Nguyen int32_t right_edge[RW_MGR_MEM_DQ_PER_READ_DQS]; 24293da42859SDinh Nguyen int32_t orig_mid_min, mid_min; 24305d6db444SMarek Vasut int32_t new_dqs, start_dqs, start_dqs_en, final_dqs_en; 24313da42859SDinh Nguyen int32_t dq_margin, dqs_margin; 24325d6db444SMarek Vasut int i, min_index; 2433c4907898SMarek Vasut int ret; 24343da42859SDinh Nguyen 24350113c3e1SMarek Vasut debug("%s:%d: %u %u", __func__, __LINE__, rw_group, test_bgn); 24363da42859SDinh Nguyen 24375d6db444SMarek Vasut start_dqs = readl(addr); 24383da42859SDinh Nguyen if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) 24395d6db444SMarek Vasut start_dqs_en = readl(addr - IO_DQS_EN_DELAY_OFFSET); 24403da42859SDinh Nguyen 24413da42859SDinh Nguyen /* set the left and right edge of each bit to an illegal value */ 24423da42859SDinh Nguyen /* use (IO_IO_IN_DELAY_MAX + 1) as an illegal value */ 24433da42859SDinh Nguyen sticky_bit_chk = 0; 24443da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_DQ_PER_READ_DQS; i++) { 24453da42859SDinh Nguyen left_edge[i] = IO_IO_IN_DELAY_MAX + 1; 24463da42859SDinh Nguyen right_edge[i] = IO_IO_IN_DELAY_MAX + 1; 24473da42859SDinh Nguyen } 24483da42859SDinh Nguyen 24493da42859SDinh Nguyen /* Search for the left edge of the window for each bit */ 24500113c3e1SMarek Vasut search_left_edge(0, rank_bgn, rw_group, rw_group, test_bgn, 24510c4be198SMarek Vasut &sticky_bit_chk, 245271120773SMarek Vasut left_edge, right_edge, use_read_test); 24533da42859SDinh Nguyen 2454f0712c35SMarek Vasut 24553da42859SDinh Nguyen /* Search for the right edge of the window for each bit */ 24560113c3e1SMarek Vasut ret = search_right_edge(0, rank_bgn, rw_group, rw_group, 2457c4907898SMarek Vasut start_dqs, start_dqs_en, 24580c4be198SMarek Vasut &sticky_bit_chk, 2459c4907898SMarek Vasut left_edge, right_edge, use_read_test); 2460c4907898SMarek Vasut if (ret) { 24613da42859SDinh Nguyen /* 24623da42859SDinh Nguyen * Restore delay chain settings before letting the loop 24633da42859SDinh Nguyen * in rw_mgr_mem_calibrate_vfifo to retry different 24643da42859SDinh Nguyen * dqs/ck relationships. 24653da42859SDinh Nguyen */ 24660113c3e1SMarek Vasut scc_mgr_set_dqs_bus_in_delay(rw_group, start_dqs); 2467c4907898SMarek Vasut if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) 24680113c3e1SMarek Vasut scc_mgr_set_dqs_en_delay(rw_group, start_dqs_en); 2469c4907898SMarek Vasut 24700113c3e1SMarek Vasut scc_mgr_load_dqs(rw_group); 24711273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 24723da42859SDinh Nguyen 2473c4907898SMarek Vasut debug_cond(DLEVEL == 1, 2474c4907898SMarek Vasut "%s:%d vfifo_center: failed to find edge [%u]: %d %d", 2475c4907898SMarek Vasut __func__, __LINE__, i, left_edge[i], right_edge[i]); 24763da42859SDinh Nguyen if (use_read_test) { 24770113c3e1SMarek Vasut set_failing_group_stage(rw_group * 24783da42859SDinh Nguyen RW_MGR_MEM_DQ_PER_READ_DQS + i, 24793da42859SDinh Nguyen CAL_STAGE_VFIFO, 24803da42859SDinh Nguyen CAL_SUBSTAGE_VFIFO_CENTER); 24813da42859SDinh Nguyen } else { 24820113c3e1SMarek Vasut set_failing_group_stage(rw_group * 24833da42859SDinh Nguyen RW_MGR_MEM_DQ_PER_READ_DQS + i, 24843da42859SDinh Nguyen CAL_STAGE_VFIFO_AFTER_WRITES, 24853da42859SDinh Nguyen CAL_SUBSTAGE_VFIFO_CENTER); 24863da42859SDinh Nguyen } 248798668247SMarek Vasut return -EIO; 24883da42859SDinh Nguyen } 24893da42859SDinh Nguyen 2490afb3eb84SMarek Vasut min_index = get_window_mid_index(0, left_edge, right_edge, &mid_min); 24913da42859SDinh Nguyen 24923da42859SDinh Nguyen /* Determine the amount we can change DQS (which is -mid_min) */ 24933da42859SDinh Nguyen orig_mid_min = mid_min; 24943da42859SDinh Nguyen new_dqs = start_dqs - mid_min; 24953da42859SDinh Nguyen if (new_dqs > IO_DQS_IN_DELAY_MAX) 24963da42859SDinh Nguyen new_dqs = IO_DQS_IN_DELAY_MAX; 24973da42859SDinh Nguyen else if (new_dqs < 0) 24983da42859SDinh Nguyen new_dqs = 0; 24993da42859SDinh Nguyen 25003da42859SDinh Nguyen mid_min = start_dqs - new_dqs; 25013da42859SDinh Nguyen debug_cond(DLEVEL == 1, "vfifo_center: new mid_min=%d new_dqs=%d\n", 25023da42859SDinh Nguyen mid_min, new_dqs); 25033da42859SDinh Nguyen 25043da42859SDinh Nguyen if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { 25053da42859SDinh Nguyen if (start_dqs_en - mid_min > IO_DQS_EN_DELAY_MAX) 25063da42859SDinh Nguyen mid_min += start_dqs_en - mid_min - IO_DQS_EN_DELAY_MAX; 25073da42859SDinh Nguyen else if (start_dqs_en - mid_min < 0) 25083da42859SDinh Nguyen mid_min += start_dqs_en - mid_min; 25093da42859SDinh Nguyen } 25103da42859SDinh Nguyen new_dqs = start_dqs - mid_min; 25113da42859SDinh Nguyen 2512f0712c35SMarek Vasut debug_cond(DLEVEL == 1, 2513f0712c35SMarek Vasut "vfifo_center: start_dqs=%d start_dqs_en=%d new_dqs=%d mid_min=%d\n", 2514f0712c35SMarek Vasut start_dqs, 25153da42859SDinh Nguyen IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS ? start_dqs_en : -1, 25163da42859SDinh Nguyen new_dqs, mid_min); 25173da42859SDinh Nguyen 2518ffb8b66eSMarek Vasut /* Add delay to bring centre of all DQ windows to the same "level". */ 2519ffb8b66eSMarek Vasut center_dq_windows(0, left_edge, right_edge, mid_min, orig_mid_min, 2520ffb8b66eSMarek Vasut min_index, test_bgn, &dq_margin, &dqs_margin); 25213da42859SDinh Nguyen 25223da42859SDinh Nguyen /* Move DQS-en */ 25233da42859SDinh Nguyen if (IO_SHIFT_DQS_EN_WHEN_SHIFT_DQS) { 25245d6db444SMarek Vasut final_dqs_en = start_dqs_en - mid_min; 25250113c3e1SMarek Vasut scc_mgr_set_dqs_en_delay(rw_group, final_dqs_en); 25260113c3e1SMarek Vasut scc_mgr_load_dqs(rw_group); 25273da42859SDinh Nguyen } 25283da42859SDinh Nguyen 25293da42859SDinh Nguyen /* Move DQS */ 25300113c3e1SMarek Vasut scc_mgr_set_dqs_bus_in_delay(rw_group, new_dqs); 25310113c3e1SMarek Vasut scc_mgr_load_dqs(rw_group); 2532f0712c35SMarek Vasut debug_cond(DLEVEL == 2, 2533f0712c35SMarek Vasut "%s:%d vfifo_center: dq_margin=%d dqs_margin=%d", 2534f0712c35SMarek Vasut __func__, __LINE__, dq_margin, dqs_margin); 25353da42859SDinh Nguyen 25363da42859SDinh Nguyen /* 25373da42859SDinh Nguyen * Do not remove this line as it makes sure all of our decisions 25383da42859SDinh Nguyen * have been applied. Apply the update bit. 25393da42859SDinh Nguyen */ 25401273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 25413da42859SDinh Nguyen 254298668247SMarek Vasut if ((dq_margin < 0) || (dqs_margin < 0)) 254398668247SMarek Vasut return -EINVAL; 254498668247SMarek Vasut 254598668247SMarek Vasut return 0; 25463da42859SDinh Nguyen } 25473da42859SDinh Nguyen 2548bce24efaSMarek Vasut /** 254904372fb8SMarek Vasut * rw_mgr_mem_calibrate_guaranteed_write() - Perform guaranteed write into the device 255004372fb8SMarek Vasut * @rw_group: Read/Write Group 255104372fb8SMarek Vasut * @phase: DQ/DQS phase 255204372fb8SMarek Vasut * 255304372fb8SMarek Vasut * Because initially no communication ca be reliably performed with the memory 255404372fb8SMarek Vasut * device, the sequencer uses a guaranteed write mechanism to write data into 255504372fb8SMarek Vasut * the memory device. 255604372fb8SMarek Vasut */ 255704372fb8SMarek Vasut static int rw_mgr_mem_calibrate_guaranteed_write(const u32 rw_group, 255804372fb8SMarek Vasut const u32 phase) 255904372fb8SMarek Vasut { 256004372fb8SMarek Vasut int ret; 256104372fb8SMarek Vasut 256204372fb8SMarek Vasut /* Set a particular DQ/DQS phase. */ 256304372fb8SMarek Vasut scc_mgr_set_dqdqs_output_phase_all_ranks(rw_group, phase); 256404372fb8SMarek Vasut 256504372fb8SMarek Vasut debug_cond(DLEVEL == 1, "%s:%d guaranteed write: g=%u p=%u\n", 256604372fb8SMarek Vasut __func__, __LINE__, rw_group, phase); 256704372fb8SMarek Vasut 256804372fb8SMarek Vasut /* 256904372fb8SMarek Vasut * Altera EMI_RM 2015.05.04 :: Figure 1-25 257004372fb8SMarek Vasut * Load up the patterns used by read calibration using the 257104372fb8SMarek Vasut * current DQDQS phase. 257204372fb8SMarek Vasut */ 257304372fb8SMarek Vasut rw_mgr_mem_calibrate_read_load_patterns(0, 1); 257404372fb8SMarek Vasut 257504372fb8SMarek Vasut if (gbl->phy_debug_mode_flags & PHY_DEBUG_DISABLE_GUARANTEED_READ) 257604372fb8SMarek Vasut return 0; 257704372fb8SMarek Vasut 257804372fb8SMarek Vasut /* 257904372fb8SMarek Vasut * Altera EMI_RM 2015.05.04 :: Figure 1-26 258004372fb8SMarek Vasut * Back-to-Back reads of the patterns used for calibration. 258104372fb8SMarek Vasut */ 2582d844c7d4SMarek Vasut ret = rw_mgr_mem_calibrate_read_test_patterns(0, rw_group, 1); 2583d844c7d4SMarek Vasut if (ret) 258404372fb8SMarek Vasut debug_cond(DLEVEL == 1, 258504372fb8SMarek Vasut "%s:%d Guaranteed read test failed: g=%u p=%u\n", 258604372fb8SMarek Vasut __func__, __LINE__, rw_group, phase); 2587d844c7d4SMarek Vasut return ret; 258804372fb8SMarek Vasut } 258904372fb8SMarek Vasut 259004372fb8SMarek Vasut /** 2591f09da11eSMarek Vasut * rw_mgr_mem_calibrate_dqs_enable_calibration() - DQS Enable Calibration 2592f09da11eSMarek Vasut * @rw_group: Read/Write Group 2593f09da11eSMarek Vasut * @test_bgn: Rank at which the test begins 2594f09da11eSMarek Vasut * 2595f09da11eSMarek Vasut * DQS enable calibration ensures reliable capture of the DQ signal without 2596f09da11eSMarek Vasut * glitches on the DQS line. 2597f09da11eSMarek Vasut */ 2598f09da11eSMarek Vasut static int rw_mgr_mem_calibrate_dqs_enable_calibration(const u32 rw_group, 2599f09da11eSMarek Vasut const u32 test_bgn) 2600f09da11eSMarek Vasut { 2601f09da11eSMarek Vasut /* 2602f09da11eSMarek Vasut * Altera EMI_RM 2015.05.04 :: Figure 1-27 2603f09da11eSMarek Vasut * DQS and DQS Eanble Signal Relationships. 2604f09da11eSMarek Vasut */ 260528ea827dSMarek Vasut 260628ea827dSMarek Vasut /* We start at zero, so have one less dq to devide among */ 260728ea827dSMarek Vasut const u32 delay_step = IO_IO_IN_DELAY_MAX / 260828ea827dSMarek Vasut (RW_MGR_MEM_DQ_PER_READ_DQS - 1); 2609914546e7SMarek Vasut int ret; 261028ea827dSMarek Vasut u32 i, p, d, r; 261128ea827dSMarek Vasut 261228ea827dSMarek Vasut debug("%s:%d (%u,%u)\n", __func__, __LINE__, rw_group, test_bgn); 261328ea827dSMarek Vasut 261428ea827dSMarek Vasut /* Try different dq_in_delays since the DQ path is shorter than DQS. */ 261528ea827dSMarek Vasut for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 261628ea827dSMarek Vasut r += NUM_RANKS_PER_SHADOW_REG) { 261728ea827dSMarek Vasut for (i = 0, p = test_bgn, d = 0; 261828ea827dSMarek Vasut i < RW_MGR_MEM_DQ_PER_READ_DQS; 261928ea827dSMarek Vasut i++, p++, d += delay_step) { 262028ea827dSMarek Vasut debug_cond(DLEVEL == 1, 262128ea827dSMarek Vasut "%s:%d: g=%u r=%u i=%u p=%u d=%u\n", 262228ea827dSMarek Vasut __func__, __LINE__, rw_group, r, i, p, d); 262328ea827dSMarek Vasut 262428ea827dSMarek Vasut scc_mgr_set_dq_in_delay(p, d); 262528ea827dSMarek Vasut scc_mgr_load_dq(p); 262628ea827dSMarek Vasut } 262728ea827dSMarek Vasut 262828ea827dSMarek Vasut writel(0, &sdr_scc_mgr->update); 262928ea827dSMarek Vasut } 263028ea827dSMarek Vasut 263128ea827dSMarek Vasut /* 263228ea827dSMarek Vasut * Try rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase across different 263328ea827dSMarek Vasut * dq_in_delay values 263428ea827dSMarek Vasut */ 2635914546e7SMarek Vasut ret = rw_mgr_mem_calibrate_vfifo_find_dqs_en_phase(rw_group); 263628ea827dSMarek Vasut 263728ea827dSMarek Vasut debug_cond(DLEVEL == 1, 263828ea827dSMarek Vasut "%s:%d: g=%u found=%u; Reseting delay chain to zero\n", 2639914546e7SMarek Vasut __func__, __LINE__, rw_group, !ret); 264028ea827dSMarek Vasut 264128ea827dSMarek Vasut for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 264228ea827dSMarek Vasut r += NUM_RANKS_PER_SHADOW_REG) { 264328ea827dSMarek Vasut scc_mgr_apply_group_dq_in_delay(test_bgn, 0); 264428ea827dSMarek Vasut writel(0, &sdr_scc_mgr->update); 264528ea827dSMarek Vasut } 264628ea827dSMarek Vasut 2647914546e7SMarek Vasut return ret; 2648f09da11eSMarek Vasut } 2649f09da11eSMarek Vasut 2650f09da11eSMarek Vasut /** 265116cfc4b9SMarek Vasut * rw_mgr_mem_calibrate_dq_dqs_centering() - Centering DQ/DQS 265216cfc4b9SMarek Vasut * @rw_group: Read/Write Group 265316cfc4b9SMarek Vasut * @test_bgn: Rank at which the test begins 265416cfc4b9SMarek Vasut * @use_read_test: Perform a read test 265516cfc4b9SMarek Vasut * @update_fom: Update FOM 265616cfc4b9SMarek Vasut * 265716cfc4b9SMarek Vasut * The centerin DQ/DQS stage attempts to align DQ and DQS signals on reads 265816cfc4b9SMarek Vasut * within a group. 265916cfc4b9SMarek Vasut */ 266016cfc4b9SMarek Vasut static int 266116cfc4b9SMarek Vasut rw_mgr_mem_calibrate_dq_dqs_centering(const u32 rw_group, const u32 test_bgn, 266216cfc4b9SMarek Vasut const int use_read_test, 266316cfc4b9SMarek Vasut const int update_fom) 266416cfc4b9SMarek Vasut 266516cfc4b9SMarek Vasut { 266616cfc4b9SMarek Vasut int ret, grp_calibrated; 266716cfc4b9SMarek Vasut u32 rank_bgn, sr; 266816cfc4b9SMarek Vasut 266916cfc4b9SMarek Vasut /* 267016cfc4b9SMarek Vasut * Altera EMI_RM 2015.05.04 :: Figure 1-28 267116cfc4b9SMarek Vasut * Read per-bit deskew can be done on a per shadow register basis. 267216cfc4b9SMarek Vasut */ 267316cfc4b9SMarek Vasut grp_calibrated = 1; 267416cfc4b9SMarek Vasut for (rank_bgn = 0, sr = 0; 267516cfc4b9SMarek Vasut rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS; 267616cfc4b9SMarek Vasut rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) { 267716cfc4b9SMarek Vasut /* Check if this set of ranks should be skipped entirely. */ 267816cfc4b9SMarek Vasut if (param->skip_shadow_regs[sr]) 267916cfc4b9SMarek Vasut continue; 268016cfc4b9SMarek Vasut 268116cfc4b9SMarek Vasut ret = rw_mgr_mem_calibrate_vfifo_center(rank_bgn, rw_group, 26820113c3e1SMarek Vasut test_bgn, 268316cfc4b9SMarek Vasut use_read_test, 268416cfc4b9SMarek Vasut update_fom); 268598668247SMarek Vasut if (!ret) 268616cfc4b9SMarek Vasut continue; 268716cfc4b9SMarek Vasut 268816cfc4b9SMarek Vasut grp_calibrated = 0; 268916cfc4b9SMarek Vasut } 269016cfc4b9SMarek Vasut 269116cfc4b9SMarek Vasut if (!grp_calibrated) 269216cfc4b9SMarek Vasut return -EIO; 269316cfc4b9SMarek Vasut 269416cfc4b9SMarek Vasut return 0; 269516cfc4b9SMarek Vasut } 269616cfc4b9SMarek Vasut 269716cfc4b9SMarek Vasut /** 2698bce24efaSMarek Vasut * rw_mgr_mem_calibrate_vfifo() - Calibrate the read valid prediction FIFO 2699bce24efaSMarek Vasut * @rw_group: Read/Write Group 2700bce24efaSMarek Vasut * @test_bgn: Rank at which the test begins 27013da42859SDinh Nguyen * 2702bce24efaSMarek Vasut * Stage 1: Calibrate the read valid prediction FIFO. 2703bce24efaSMarek Vasut * 2704bce24efaSMarek Vasut * This function implements UniPHY calibration Stage 1, as explained in 2705bce24efaSMarek Vasut * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages". 2706bce24efaSMarek Vasut * 2707bce24efaSMarek Vasut * - read valid prediction will consist of finding: 2708bce24efaSMarek Vasut * - DQS enable phase and DQS enable delay (DQS Enable Calibration) 2709bce24efaSMarek Vasut * - DQS input phase and DQS input delay (DQ/DQS Centering) 27103da42859SDinh Nguyen * - we also do a per-bit deskew on the DQ lines. 27113da42859SDinh Nguyen */ 2712c336ca3eSMarek Vasut static int rw_mgr_mem_calibrate_vfifo(const u32 rw_group, const u32 test_bgn) 27133da42859SDinh Nguyen { 271416cfc4b9SMarek Vasut uint32_t p, d; 27153da42859SDinh Nguyen uint32_t dtaps_per_ptap; 27163da42859SDinh Nguyen uint32_t failed_substage; 27173da42859SDinh Nguyen 271804372fb8SMarek Vasut int ret; 271904372fb8SMarek Vasut 2720c336ca3eSMarek Vasut debug("%s:%d: %u %u\n", __func__, __LINE__, rw_group, test_bgn); 27213da42859SDinh Nguyen 27227c0a9df3SMarek Vasut /* Update info for sims */ 27237c0a9df3SMarek Vasut reg_file_set_group(rw_group); 27243da42859SDinh Nguyen reg_file_set_stage(CAL_STAGE_VFIFO); 27257c0a9df3SMarek Vasut reg_file_set_sub_stage(CAL_SUBSTAGE_GUARANTEED_READ); 27263da42859SDinh Nguyen 27277c0a9df3SMarek Vasut failed_substage = CAL_SUBSTAGE_GUARANTEED_READ; 27287c0a9df3SMarek Vasut 27297c0a9df3SMarek Vasut /* USER Determine number of delay taps for each phase tap. */ 2730d32badbdSMarek Vasut dtaps_per_ptap = DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP, 2731d32badbdSMarek Vasut IO_DELAY_PER_DQS_EN_DCHAIN_TAP) - 1; 27323da42859SDinh Nguyen 2733fe2d0a2dSMarek Vasut for (d = 0; d <= dtaps_per_ptap; d += 2) { 27343da42859SDinh Nguyen /* 27353da42859SDinh Nguyen * In RLDRAMX we may be messing the delay of pins in 2736c336ca3eSMarek Vasut * the same write rw_group but outside of the current read 2737c336ca3eSMarek Vasut * the rw_group, but that's ok because we haven't calibrated 2738ac70d2f3SMarek Vasut * output side yet. 27393da42859SDinh Nguyen */ 27403da42859SDinh Nguyen if (d > 0) { 2741f51a7d35SMarek Vasut scc_mgr_apply_group_all_out_delay_add_all_ranks( 2742c336ca3eSMarek Vasut rw_group, d); 27433da42859SDinh Nguyen } 27443da42859SDinh Nguyen 2745fe2d0a2dSMarek Vasut for (p = 0; p <= IO_DQDQS_OUT_PHASE_MAX; p++) { 274604372fb8SMarek Vasut /* 1) Guaranteed Write */ 274704372fb8SMarek Vasut ret = rw_mgr_mem_calibrate_guaranteed_write(rw_group, p); 274804372fb8SMarek Vasut if (ret) 27493da42859SDinh Nguyen break; 27503da42859SDinh Nguyen 2751f09da11eSMarek Vasut /* 2) DQS Enable Calibration */ 2752f09da11eSMarek Vasut ret = rw_mgr_mem_calibrate_dqs_enable_calibration(rw_group, 2753f09da11eSMarek Vasut test_bgn); 2754f09da11eSMarek Vasut if (ret) { 2755fe2d0a2dSMarek Vasut failed_substage = CAL_SUBSTAGE_DQS_EN_PHASE; 2756fe2d0a2dSMarek Vasut continue; 2757fe2d0a2dSMarek Vasut } 2758fe2d0a2dSMarek Vasut 275916cfc4b9SMarek Vasut /* 3) Centering DQ/DQS */ 27603da42859SDinh Nguyen /* 276116cfc4b9SMarek Vasut * If doing read after write calibration, do not update 276216cfc4b9SMarek Vasut * FOM now. Do it then. 27633da42859SDinh Nguyen */ 276416cfc4b9SMarek Vasut ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group, 276516cfc4b9SMarek Vasut test_bgn, 1, 0); 276616cfc4b9SMarek Vasut if (ret) { 2767d2ea4950SMarek Vasut failed_substage = CAL_SUBSTAGE_VFIFO_CENTER; 276816cfc4b9SMarek Vasut continue; 27693da42859SDinh Nguyen } 2770fe2d0a2dSMarek Vasut 277116cfc4b9SMarek Vasut /* All done. */ 2772fe2d0a2dSMarek Vasut goto cal_done_ok; 27733da42859SDinh Nguyen } 27743da42859SDinh Nguyen } 27753da42859SDinh Nguyen 2776fe2d0a2dSMarek Vasut /* Calibration Stage 1 failed. */ 2777c336ca3eSMarek Vasut set_failing_group_stage(rw_group, CAL_STAGE_VFIFO, failed_substage); 27783da42859SDinh Nguyen return 0; 27793da42859SDinh Nguyen 2780fe2d0a2dSMarek Vasut /* Calibration Stage 1 completed OK. */ 2781fe2d0a2dSMarek Vasut cal_done_ok: 27823da42859SDinh Nguyen /* 27833da42859SDinh Nguyen * Reset the delay chains back to zero if they have moved > 1 27843da42859SDinh Nguyen * (check for > 1 because loop will increase d even when pass in 27853da42859SDinh Nguyen * first case). 27863da42859SDinh Nguyen */ 27873da42859SDinh Nguyen if (d > 2) 2788c336ca3eSMarek Vasut scc_mgr_zero_group(rw_group, 1); 27893da42859SDinh Nguyen 27903da42859SDinh Nguyen return 1; 27913da42859SDinh Nguyen } 27923da42859SDinh Nguyen 279378cdd7d0SMarek Vasut /** 279478cdd7d0SMarek Vasut * rw_mgr_mem_calibrate_vfifo_end() - DQ/DQS Centering. 279578cdd7d0SMarek Vasut * @rw_group: Read/Write Group 279678cdd7d0SMarek Vasut * @test_bgn: Rank at which the test begins 279778cdd7d0SMarek Vasut * 279878cdd7d0SMarek Vasut * Stage 3: DQ/DQS Centering. 279978cdd7d0SMarek Vasut * 280078cdd7d0SMarek Vasut * This function implements UniPHY calibration Stage 3, as explained in 280178cdd7d0SMarek Vasut * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages". 280278cdd7d0SMarek Vasut */ 280378cdd7d0SMarek Vasut static int rw_mgr_mem_calibrate_vfifo_end(const u32 rw_group, 280478cdd7d0SMarek Vasut const u32 test_bgn) 28053da42859SDinh Nguyen { 280678cdd7d0SMarek Vasut int ret; 28073da42859SDinh Nguyen 280878cdd7d0SMarek Vasut debug("%s:%d %u %u", __func__, __LINE__, rw_group, test_bgn); 28093da42859SDinh Nguyen 281078cdd7d0SMarek Vasut /* Update info for sims. */ 281178cdd7d0SMarek Vasut reg_file_set_group(rw_group); 28123da42859SDinh Nguyen reg_file_set_stage(CAL_STAGE_VFIFO_AFTER_WRITES); 28133da42859SDinh Nguyen reg_file_set_sub_stage(CAL_SUBSTAGE_VFIFO_CENTER); 28143da42859SDinh Nguyen 281578cdd7d0SMarek Vasut ret = rw_mgr_mem_calibrate_dq_dqs_centering(rw_group, test_bgn, 0, 1); 281678cdd7d0SMarek Vasut if (ret) 281778cdd7d0SMarek Vasut set_failing_group_stage(rw_group, 28183da42859SDinh Nguyen CAL_STAGE_VFIFO_AFTER_WRITES, 28193da42859SDinh Nguyen CAL_SUBSTAGE_VFIFO_CENTER); 282078cdd7d0SMarek Vasut return ret; 28213da42859SDinh Nguyen } 28223da42859SDinh Nguyen 2823c984278aSMarek Vasut /** 2824c984278aSMarek Vasut * rw_mgr_mem_calibrate_lfifo() - Minimize latency 2825c984278aSMarek Vasut * 2826c984278aSMarek Vasut * Stage 4: Minimize latency. 2827c984278aSMarek Vasut * 2828c984278aSMarek Vasut * This function implements UniPHY calibration Stage 4, as explained in 2829c984278aSMarek Vasut * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages". 2830c984278aSMarek Vasut * Calibrate LFIFO to find smallest read latency. 2831c984278aSMarek Vasut */ 28323da42859SDinh Nguyen static uint32_t rw_mgr_mem_calibrate_lfifo(void) 28333da42859SDinh Nguyen { 2834c984278aSMarek Vasut int found_one = 0; 28353da42859SDinh Nguyen 28363da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 28373da42859SDinh Nguyen 2838c984278aSMarek Vasut /* Update info for sims. */ 28393da42859SDinh Nguyen reg_file_set_stage(CAL_STAGE_LFIFO); 28403da42859SDinh Nguyen reg_file_set_sub_stage(CAL_SUBSTAGE_READ_LATENCY); 28413da42859SDinh Nguyen 28423da42859SDinh Nguyen /* Load up the patterns used by read calibration for all ranks */ 28433da42859SDinh Nguyen rw_mgr_mem_calibrate_read_load_patterns(0, 1); 28443da42859SDinh Nguyen 28453da42859SDinh Nguyen do { 28461273dd9eSMarek Vasut writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); 28473da42859SDinh Nguyen debug_cond(DLEVEL == 2, "%s:%d lfifo: read_lat=%u", 28483da42859SDinh Nguyen __func__, __LINE__, gbl->curr_read_lat); 28493da42859SDinh Nguyen 2850c984278aSMarek Vasut if (!rw_mgr_mem_calibrate_read_test_all_ranks(0, NUM_READ_TESTS, 2851c984278aSMarek Vasut PASS_ALL_BITS, 1)) 28523da42859SDinh Nguyen break; 28533da42859SDinh Nguyen 28543da42859SDinh Nguyen found_one = 1; 2855c984278aSMarek Vasut /* 2856c984278aSMarek Vasut * Reduce read latency and see if things are 2857c984278aSMarek Vasut * working correctly. 2858c984278aSMarek Vasut */ 28593da42859SDinh Nguyen gbl->curr_read_lat--; 28603da42859SDinh Nguyen } while (gbl->curr_read_lat > 0); 28613da42859SDinh Nguyen 2862c984278aSMarek Vasut /* Reset the fifos to get pointers to known state. */ 28631273dd9eSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 28643da42859SDinh Nguyen 28653da42859SDinh Nguyen if (found_one) { 2866c984278aSMarek Vasut /* Add a fudge factor to the read latency that was determined */ 28673da42859SDinh Nguyen gbl->curr_read_lat += 2; 28681273dd9eSMarek Vasut writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); 2869c984278aSMarek Vasut debug_cond(DLEVEL == 2, 2870c984278aSMarek Vasut "%s:%d lfifo: success: using read_lat=%u\n", 2871c984278aSMarek Vasut __func__, __LINE__, gbl->curr_read_lat); 28723da42859SDinh Nguyen } else { 28733da42859SDinh Nguyen set_failing_group_stage(0xff, CAL_STAGE_LFIFO, 28743da42859SDinh Nguyen CAL_SUBSTAGE_READ_LATENCY); 28753da42859SDinh Nguyen 2876c984278aSMarek Vasut debug_cond(DLEVEL == 2, 2877c984278aSMarek Vasut "%s:%d lfifo: failed at initial read_lat=%u\n", 2878c984278aSMarek Vasut __func__, __LINE__, gbl->curr_read_lat); 28793da42859SDinh Nguyen } 2880c984278aSMarek Vasut 2881c984278aSMarek Vasut return found_one; 28823da42859SDinh Nguyen } 28833da42859SDinh Nguyen 2884c8570afaSMarek Vasut /** 2885c8570afaSMarek Vasut * search_window() - Search for the/part of the window with DM/DQS shift 2886c8570afaSMarek Vasut * @search_dm: If 1, search for the DM shift, if 0, search for DQS shift 2887c8570afaSMarek Vasut * @rank_bgn: Rank number 2888c8570afaSMarek Vasut * @write_group: Write Group 2889c8570afaSMarek Vasut * @bgn_curr: Current window begin 2890c8570afaSMarek Vasut * @end_curr: Current window end 2891c8570afaSMarek Vasut * @bgn_best: Current best window begin 2892c8570afaSMarek Vasut * @end_best: Current best window end 2893c8570afaSMarek Vasut * @win_best: Size of the best window 2894c8570afaSMarek Vasut * @new_dqs: New DQS value (only applicable if search_dm = 0). 2895c8570afaSMarek Vasut * 2896c8570afaSMarek Vasut * Search for the/part of the window with DM/DQS shift. 2897c8570afaSMarek Vasut */ 2898c8570afaSMarek Vasut static void search_window(const int search_dm, 2899c8570afaSMarek Vasut const u32 rank_bgn, const u32 write_group, 2900c8570afaSMarek Vasut int *bgn_curr, int *end_curr, int *bgn_best, 2901c8570afaSMarek Vasut int *end_best, int *win_best, int new_dqs) 2902c8570afaSMarek Vasut { 2903c8570afaSMarek Vasut u32 bit_chk; 2904c8570afaSMarek Vasut const int max = IO_IO_OUT1_DELAY_MAX - new_dqs; 2905c8570afaSMarek Vasut int d, di; 2906c8570afaSMarek Vasut 2907c8570afaSMarek Vasut /* Search for the/part of the window with DM/DQS shift. */ 2908c8570afaSMarek Vasut for (di = max; di >= 0; di -= DELTA_D) { 2909c8570afaSMarek Vasut if (search_dm) { 2910c8570afaSMarek Vasut d = di; 2911c8570afaSMarek Vasut scc_mgr_apply_group_dm_out1_delay(d); 2912c8570afaSMarek Vasut } else { 2913c8570afaSMarek Vasut /* For DQS, we go from 0...max */ 2914c8570afaSMarek Vasut d = max - di; 2915c8570afaSMarek Vasut /* 2916c8570afaSMarek Vasut * Note: This only shifts DQS, so are we limiting ourselve to 2917c8570afaSMarek Vasut * width of DQ unnecessarily. 2918c8570afaSMarek Vasut */ 2919c8570afaSMarek Vasut scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, 2920c8570afaSMarek Vasut d + new_dqs); 2921c8570afaSMarek Vasut } 2922c8570afaSMarek Vasut 2923c8570afaSMarek Vasut writel(0, &sdr_scc_mgr->update); 2924c8570afaSMarek Vasut 2925c8570afaSMarek Vasut if (rw_mgr_mem_calibrate_write_test(rank_bgn, write_group, 1, 2926c8570afaSMarek Vasut PASS_ALL_BITS, &bit_chk, 2927c8570afaSMarek Vasut 0)) { 2928c8570afaSMarek Vasut /* Set current end of the window. */ 2929c8570afaSMarek Vasut *end_curr = search_dm ? -d : d; 2930c8570afaSMarek Vasut 2931c8570afaSMarek Vasut /* 2932c8570afaSMarek Vasut * If a starting edge of our window has not been seen 2933c8570afaSMarek Vasut * this is our current start of the DM window. 2934c8570afaSMarek Vasut */ 2935c8570afaSMarek Vasut if (*bgn_curr == IO_IO_OUT1_DELAY_MAX + 1) 2936c8570afaSMarek Vasut *bgn_curr = search_dm ? -d : d; 2937c8570afaSMarek Vasut 2938c8570afaSMarek Vasut /* 2939c8570afaSMarek Vasut * If current window is bigger than best seen. 2940c8570afaSMarek Vasut * Set best seen to be current window. 2941c8570afaSMarek Vasut */ 2942c8570afaSMarek Vasut if ((*end_curr - *bgn_curr + 1) > *win_best) { 2943c8570afaSMarek Vasut *win_best = *end_curr - *bgn_curr + 1; 2944c8570afaSMarek Vasut *bgn_best = *bgn_curr; 2945c8570afaSMarek Vasut *end_best = *end_curr; 2946c8570afaSMarek Vasut } 2947c8570afaSMarek Vasut } else { 2948c8570afaSMarek Vasut /* We just saw a failing test. Reset temp edge. */ 2949c8570afaSMarek Vasut *bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; 2950c8570afaSMarek Vasut *end_curr = IO_IO_OUT1_DELAY_MAX + 1; 2951c8570afaSMarek Vasut 2952c8570afaSMarek Vasut /* Early exit is only applicable to DQS. */ 2953c8570afaSMarek Vasut if (search_dm) 2954c8570afaSMarek Vasut continue; 2955c8570afaSMarek Vasut 2956c8570afaSMarek Vasut /* 2957c8570afaSMarek Vasut * Early exit optimization: if the remaining delay 2958c8570afaSMarek Vasut * chain space is less than already seen largest 2959c8570afaSMarek Vasut * window we can exit. 2960c8570afaSMarek Vasut */ 2961c8570afaSMarek Vasut if (*win_best - 1 > IO_IO_OUT1_DELAY_MAX - new_dqs - d) 2962c8570afaSMarek Vasut break; 2963c8570afaSMarek Vasut } 2964c8570afaSMarek Vasut } 2965c8570afaSMarek Vasut } 2966c8570afaSMarek Vasut 29673da42859SDinh Nguyen /* 2968a386a50eSMarek Vasut * rw_mgr_mem_calibrate_writes_center() - Center all windows 2969a386a50eSMarek Vasut * @rank_bgn: Rank number 2970a386a50eSMarek Vasut * @write_group: Write group 2971a386a50eSMarek Vasut * @test_bgn: Rank at which the test begins 2972a386a50eSMarek Vasut * 2973a386a50eSMarek Vasut * Center all windows. Do per-bit-deskew to possibly increase size of 29743da42859SDinh Nguyen * certain windows. 29753da42859SDinh Nguyen */ 29763b44f55cSMarek Vasut static int 29773b44f55cSMarek Vasut rw_mgr_mem_calibrate_writes_center(const u32 rank_bgn, const u32 write_group, 29783b44f55cSMarek Vasut const u32 test_bgn) 29793da42859SDinh Nguyen { 2980c8570afaSMarek Vasut int i; 29813b44f55cSMarek Vasut u32 sticky_bit_chk; 29823b44f55cSMarek Vasut u32 min_index; 29833b44f55cSMarek Vasut int left_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS]; 29843b44f55cSMarek Vasut int right_edge[RW_MGR_MEM_DQ_PER_WRITE_DQS]; 29853b44f55cSMarek Vasut int mid; 29863b44f55cSMarek Vasut int mid_min, orig_mid_min; 29873b44f55cSMarek Vasut int new_dqs, start_dqs; 29883b44f55cSMarek Vasut int dq_margin, dqs_margin, dm_margin; 29893b44f55cSMarek Vasut int bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; 29903b44f55cSMarek Vasut int end_curr = IO_IO_OUT1_DELAY_MAX + 1; 29913b44f55cSMarek Vasut int bgn_best = IO_IO_OUT1_DELAY_MAX + 1; 29923b44f55cSMarek Vasut int end_best = IO_IO_OUT1_DELAY_MAX + 1; 29933b44f55cSMarek Vasut int win_best = 0; 29943da42859SDinh Nguyen 2995c4907898SMarek Vasut int ret; 2996c4907898SMarek Vasut 29973da42859SDinh Nguyen debug("%s:%d %u %u", __func__, __LINE__, write_group, test_bgn); 29983da42859SDinh Nguyen 29993da42859SDinh Nguyen dm_margin = 0; 30003da42859SDinh Nguyen 3001c6540872SMarek Vasut start_dqs = readl((SDR_PHYGRP_SCCGRP_ADDRESS | 3002c6540872SMarek Vasut SCC_MGR_IO_OUT1_DELAY_OFFSET) + 30033da42859SDinh Nguyen (RW_MGR_MEM_DQ_PER_WRITE_DQS << 2)); 30043da42859SDinh Nguyen 30053b44f55cSMarek Vasut /* Per-bit deskew. */ 30063da42859SDinh Nguyen 30073da42859SDinh Nguyen /* 30083b44f55cSMarek Vasut * Set the left and right edge of each bit to an illegal value. 30093b44f55cSMarek Vasut * Use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value. 30103da42859SDinh Nguyen */ 30113da42859SDinh Nguyen sticky_bit_chk = 0; 30123da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_DQ_PER_WRITE_DQS; i++) { 30133da42859SDinh Nguyen left_edge[i] = IO_IO_OUT1_DELAY_MAX + 1; 30143da42859SDinh Nguyen right_edge[i] = IO_IO_OUT1_DELAY_MAX + 1; 30153da42859SDinh Nguyen } 30163da42859SDinh Nguyen 30173b44f55cSMarek Vasut /* Search for the left edge of the window for each bit. */ 301871120773SMarek Vasut search_left_edge(1, rank_bgn, write_group, 0, test_bgn, 30190c4be198SMarek Vasut &sticky_bit_chk, 302071120773SMarek Vasut left_edge, right_edge, 0); 30213da42859SDinh Nguyen 30223b44f55cSMarek Vasut /* Search for the right edge of the window for each bit. */ 3023c4907898SMarek Vasut ret = search_right_edge(1, rank_bgn, write_group, 0, 3024c4907898SMarek Vasut start_dqs, 0, 30250c4be198SMarek Vasut &sticky_bit_chk, 3026c4907898SMarek Vasut left_edge, right_edge, 0); 3027c4907898SMarek Vasut if (ret) { 3028c4907898SMarek Vasut set_failing_group_stage(test_bgn + ret - 1, CAL_STAGE_WRITES, 30293da42859SDinh Nguyen CAL_SUBSTAGE_WRITES_CENTER); 3030d043ee5bSMarek Vasut return -EINVAL; 30313da42859SDinh Nguyen } 30323da42859SDinh Nguyen 3033afb3eb84SMarek Vasut min_index = get_window_mid_index(1, left_edge, right_edge, &mid_min); 30343da42859SDinh Nguyen 30353b44f55cSMarek Vasut /* Determine the amount we can change DQS (which is -mid_min). */ 30363da42859SDinh Nguyen orig_mid_min = mid_min; 30373da42859SDinh Nguyen new_dqs = start_dqs; 30383da42859SDinh Nguyen mid_min = 0; 30393b44f55cSMarek Vasut debug_cond(DLEVEL == 1, 30403b44f55cSMarek Vasut "%s:%d write_center: start_dqs=%d new_dqs=%d mid_min=%d\n", 30413b44f55cSMarek Vasut __func__, __LINE__, start_dqs, new_dqs, mid_min); 30423da42859SDinh Nguyen 3043ffb8b66eSMarek Vasut /* Add delay to bring centre of all DQ windows to the same "level". */ 3044ffb8b66eSMarek Vasut center_dq_windows(1, left_edge, right_edge, mid_min, orig_mid_min, 3045ffb8b66eSMarek Vasut min_index, 0, &dq_margin, &dqs_margin); 30463da42859SDinh Nguyen 30473da42859SDinh Nguyen /* Move DQS */ 30483da42859SDinh Nguyen scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs); 30491273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 30503da42859SDinh Nguyen 30513da42859SDinh Nguyen /* Centre DM */ 30523da42859SDinh Nguyen debug_cond(DLEVEL == 2, "%s:%d write_center: DM\n", __func__, __LINE__); 30533da42859SDinh Nguyen 30543da42859SDinh Nguyen /* 30553b44f55cSMarek Vasut * Set the left and right edge of each bit to an illegal value. 30563b44f55cSMarek Vasut * Use (IO_IO_OUT1_DELAY_MAX + 1) as an illegal value. 30573da42859SDinh Nguyen */ 30583da42859SDinh Nguyen left_edge[0] = IO_IO_OUT1_DELAY_MAX + 1; 30593da42859SDinh Nguyen right_edge[0] = IO_IO_OUT1_DELAY_MAX + 1; 30603da42859SDinh Nguyen 30613b44f55cSMarek Vasut /* Search for the/part of the window with DM shift. */ 3062c8570afaSMarek Vasut search_window(1, rank_bgn, write_group, &bgn_curr, &end_curr, 3063c8570afaSMarek Vasut &bgn_best, &end_best, &win_best, 0); 30643da42859SDinh Nguyen 30653b44f55cSMarek Vasut /* Reset DM delay chains to 0. */ 306632675249SMarek Vasut scc_mgr_apply_group_dm_out1_delay(0); 30673da42859SDinh Nguyen 30683da42859SDinh Nguyen /* 30693da42859SDinh Nguyen * Check to see if the current window nudges up aganist 0 delay. 30703da42859SDinh Nguyen * If so we need to continue the search by shifting DQS otherwise DQS 30713b44f55cSMarek Vasut * search begins as a new search. 30723b44f55cSMarek Vasut */ 30733da42859SDinh Nguyen if (end_curr != 0) { 30743da42859SDinh Nguyen bgn_curr = IO_IO_OUT1_DELAY_MAX + 1; 30753da42859SDinh Nguyen end_curr = IO_IO_OUT1_DELAY_MAX + 1; 30763da42859SDinh Nguyen } 30773da42859SDinh Nguyen 30783b44f55cSMarek Vasut /* Search for the/part of the window with DQS shifts. */ 3079c8570afaSMarek Vasut search_window(0, rank_bgn, write_group, &bgn_curr, &end_curr, 3080c8570afaSMarek Vasut &bgn_best, &end_best, &win_best, new_dqs); 30813da42859SDinh Nguyen 30823b44f55cSMarek Vasut /* Assign left and right edge for cal and reporting. */ 30833da42859SDinh Nguyen left_edge[0] = -1 * bgn_best; 30843da42859SDinh Nguyen right_edge[0] = end_best; 30853da42859SDinh Nguyen 30863b44f55cSMarek Vasut debug_cond(DLEVEL == 2, "%s:%d dm_calib: left=%d right=%d\n", 30873b44f55cSMarek Vasut __func__, __LINE__, left_edge[0], right_edge[0]); 30883da42859SDinh Nguyen 30893b44f55cSMarek Vasut /* Move DQS (back to orig). */ 30903da42859SDinh Nguyen scc_mgr_apply_group_dqs_io_and_oct_out1(write_group, new_dqs); 30913da42859SDinh Nguyen 30923da42859SDinh Nguyen /* Move DM */ 30933da42859SDinh Nguyen 30943b44f55cSMarek Vasut /* Find middle of window for the DM bit. */ 30953da42859SDinh Nguyen mid = (left_edge[0] - right_edge[0]) / 2; 30963da42859SDinh Nguyen 30973b44f55cSMarek Vasut /* Only move right, since we are not moving DQS/DQ. */ 30983da42859SDinh Nguyen if (mid < 0) 30993da42859SDinh Nguyen mid = 0; 31003da42859SDinh Nguyen 31013b44f55cSMarek Vasut /* dm_marign should fail if we never find a window. */ 31023da42859SDinh Nguyen if (win_best == 0) 31033da42859SDinh Nguyen dm_margin = -1; 31043da42859SDinh Nguyen else 31053da42859SDinh Nguyen dm_margin = left_edge[0] - mid; 31063da42859SDinh Nguyen 310732675249SMarek Vasut scc_mgr_apply_group_dm_out1_delay(mid); 31081273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 31093da42859SDinh Nguyen 31103b44f55cSMarek Vasut debug_cond(DLEVEL == 2, 31113b44f55cSMarek Vasut "%s:%d dm_calib: left=%d right=%d mid=%d dm_margin=%d\n", 31123b44f55cSMarek Vasut __func__, __LINE__, left_edge[0], right_edge[0], 31133b44f55cSMarek Vasut mid, dm_margin); 31143b44f55cSMarek Vasut /* Export values. */ 31153da42859SDinh Nguyen gbl->fom_out += dq_margin + dqs_margin; 31163da42859SDinh Nguyen 31173b44f55cSMarek Vasut debug_cond(DLEVEL == 2, 31183b44f55cSMarek Vasut "%s:%d write_center: dq_margin=%d dqs_margin=%d dm_margin=%d\n", 31193b44f55cSMarek Vasut __func__, __LINE__, dq_margin, dqs_margin, dm_margin); 31203da42859SDinh Nguyen 31213da42859SDinh Nguyen /* 31223da42859SDinh Nguyen * Do not remove this line as it makes sure all of our 31233da42859SDinh Nguyen * decisions have been applied. 31243da42859SDinh Nguyen */ 31251273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 31263b44f55cSMarek Vasut 3127d043ee5bSMarek Vasut if ((dq_margin < 0) || (dqs_margin < 0) || (dm_margin < 0)) 3128d043ee5bSMarek Vasut return -EINVAL; 3129d043ee5bSMarek Vasut 3130d043ee5bSMarek Vasut return 0; 31313da42859SDinh Nguyen } 31323da42859SDinh Nguyen 3133db3a6061SMarek Vasut /** 3134db3a6061SMarek Vasut * rw_mgr_mem_calibrate_writes() - Write Calibration Part One 3135db3a6061SMarek Vasut * @rank_bgn: Rank number 3136db3a6061SMarek Vasut * @group: Read/Write Group 3137db3a6061SMarek Vasut * @test_bgn: Rank at which the test begins 3138db3a6061SMarek Vasut * 3139db3a6061SMarek Vasut * Stage 2: Write Calibration Part One. 3140db3a6061SMarek Vasut * 3141db3a6061SMarek Vasut * This function implements UniPHY calibration Stage 2, as explained in 3142db3a6061SMarek Vasut * detail in Altera EMI_RM 2015.05.04 , "UniPHY Calibration Stages". 3143db3a6061SMarek Vasut */ 3144db3a6061SMarek Vasut static int rw_mgr_mem_calibrate_writes(const u32 rank_bgn, const u32 group, 3145db3a6061SMarek Vasut const u32 test_bgn) 31463da42859SDinh Nguyen { 3147db3a6061SMarek Vasut int ret; 31483da42859SDinh Nguyen 3149db3a6061SMarek Vasut /* Update info for sims */ 3150db3a6061SMarek Vasut debug("%s:%d %u %u\n", __func__, __LINE__, group, test_bgn); 3151db3a6061SMarek Vasut 3152db3a6061SMarek Vasut reg_file_set_group(group); 31533da42859SDinh Nguyen reg_file_set_stage(CAL_STAGE_WRITES); 31543da42859SDinh Nguyen reg_file_set_sub_stage(CAL_SUBSTAGE_WRITES_CENTER); 31553da42859SDinh Nguyen 3156db3a6061SMarek Vasut ret = rw_mgr_mem_calibrate_writes_center(rank_bgn, group, test_bgn); 3157d043ee5bSMarek Vasut if (ret) 3158db3a6061SMarek Vasut set_failing_group_stage(group, CAL_STAGE_WRITES, 31593da42859SDinh Nguyen CAL_SUBSTAGE_WRITES_CENTER); 31603da42859SDinh Nguyen 3161d043ee5bSMarek Vasut return ret; 31623da42859SDinh Nguyen } 31633da42859SDinh Nguyen 31644b0ac26aSMarek Vasut /** 31654b0ac26aSMarek Vasut * mem_precharge_and_activate() - Precharge all banks and activate 31664b0ac26aSMarek Vasut * 31674b0ac26aSMarek Vasut * Precharge all banks and activate row 0 in bank "000..." and bank "111...". 31684b0ac26aSMarek Vasut */ 31693da42859SDinh Nguyen static void mem_precharge_and_activate(void) 31703da42859SDinh Nguyen { 31714b0ac26aSMarek Vasut int r; 31723da42859SDinh Nguyen 31733da42859SDinh Nguyen for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; r++) { 31744b0ac26aSMarek Vasut /* Test if the rank should be skipped. */ 31754b0ac26aSMarek Vasut if (param->skip_ranks[r]) 31763da42859SDinh Nguyen continue; 31773da42859SDinh Nguyen 31784b0ac26aSMarek Vasut /* Set rank. */ 31793da42859SDinh Nguyen set_rank_and_odt_mask(r, RW_MGR_ODT_MODE_OFF); 31803da42859SDinh Nguyen 31814b0ac26aSMarek Vasut /* Precharge all banks. */ 31821273dd9eSMarek Vasut writel(RW_MGR_PRECHARGE_ALL, SDR_PHYGRP_RWMGRGRP_ADDRESS | 31831273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 31843da42859SDinh Nguyen 31851273dd9eSMarek Vasut writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr0); 31861273dd9eSMarek Vasut writel(RW_MGR_ACTIVATE_0_AND_1_WAIT1, 31871273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add0); 31883da42859SDinh Nguyen 31891273dd9eSMarek Vasut writel(0x0F, &sdr_rw_load_mgr_regs->load_cntr1); 31901273dd9eSMarek Vasut writel(RW_MGR_ACTIVATE_0_AND_1_WAIT2, 31911273dd9eSMarek Vasut &sdr_rw_load_jump_mgr_regs->load_jump_add1); 31923da42859SDinh Nguyen 31934b0ac26aSMarek Vasut /* Activate rows. */ 31941273dd9eSMarek Vasut writel(RW_MGR_ACTIVATE_0_AND_1, SDR_PHYGRP_RWMGRGRP_ADDRESS | 31951273dd9eSMarek Vasut RW_MGR_RUN_SINGLE_GROUP_OFFSET); 31963da42859SDinh Nguyen } 31973da42859SDinh Nguyen } 31983da42859SDinh Nguyen 319916502a0bSMarek Vasut /** 320016502a0bSMarek Vasut * mem_init_latency() - Configure memory RLAT and WLAT settings 320116502a0bSMarek Vasut * 320216502a0bSMarek Vasut * Configure memory RLAT and WLAT parameters. 320316502a0bSMarek Vasut */ 320416502a0bSMarek Vasut static void mem_init_latency(void) 32053da42859SDinh Nguyen { 320616502a0bSMarek Vasut /* 320716502a0bSMarek Vasut * For AV/CV, LFIFO is hardened and always runs at full rate 320816502a0bSMarek Vasut * so max latency in AFI clocks, used here, is correspondingly 320916502a0bSMarek Vasut * smaller. 321016502a0bSMarek Vasut */ 321116502a0bSMarek Vasut const u32 max_latency = (1 << MAX_LATENCY_COUNT_WIDTH) - 1; 321216502a0bSMarek Vasut u32 rlat, wlat; 32133da42859SDinh Nguyen 32143da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 321516502a0bSMarek Vasut 321616502a0bSMarek Vasut /* 321716502a0bSMarek Vasut * Read in write latency. 321816502a0bSMarek Vasut * WL for Hard PHY does not include additive latency. 321916502a0bSMarek Vasut */ 32201273dd9eSMarek Vasut wlat = readl(&data_mgr->t_wl_add); 32211273dd9eSMarek Vasut wlat += readl(&data_mgr->mem_t_add); 32223da42859SDinh Nguyen 322316502a0bSMarek Vasut gbl->rw_wl_nop_cycles = wlat - 1; 32243da42859SDinh Nguyen 322516502a0bSMarek Vasut /* Read in readl latency. */ 32261273dd9eSMarek Vasut rlat = readl(&data_mgr->t_rl_add); 32273da42859SDinh Nguyen 322816502a0bSMarek Vasut /* Set a pretty high read latency initially. */ 32293da42859SDinh Nguyen gbl->curr_read_lat = rlat + 16; 32303da42859SDinh Nguyen if (gbl->curr_read_lat > max_latency) 32313da42859SDinh Nguyen gbl->curr_read_lat = max_latency; 32323da42859SDinh Nguyen 32331273dd9eSMarek Vasut writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); 32343da42859SDinh Nguyen 323516502a0bSMarek Vasut /* Advertise write latency. */ 323616502a0bSMarek Vasut writel(wlat, &phy_mgr_cfg->afi_wlat); 32373da42859SDinh Nguyen } 32383da42859SDinh Nguyen 323951cea0b6SMarek Vasut /** 324051cea0b6SMarek Vasut * @mem_skip_calibrate() - Set VFIFO and LFIFO to instant-on settings 324151cea0b6SMarek Vasut * 324251cea0b6SMarek Vasut * Set VFIFO and LFIFO to instant-on settings in skip calibration mode. 324351cea0b6SMarek Vasut */ 32443da42859SDinh Nguyen static void mem_skip_calibrate(void) 32453da42859SDinh Nguyen { 32463da42859SDinh Nguyen uint32_t vfifo_offset; 32473da42859SDinh Nguyen uint32_t i, j, r; 32483da42859SDinh Nguyen 32493da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 32503da42859SDinh Nguyen /* Need to update every shadow register set used by the interface */ 32513da42859SDinh Nguyen for (r = 0; r < RW_MGR_MEM_NUMBER_OF_RANKS; 32523da42859SDinh Nguyen r += NUM_RANKS_PER_SHADOW_REG) { 32533da42859SDinh Nguyen /* 32543da42859SDinh Nguyen * Set output phase alignment settings appropriate for 32553da42859SDinh Nguyen * skip calibration. 32563da42859SDinh Nguyen */ 32573da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { 32583da42859SDinh Nguyen scc_mgr_set_dqs_en_phase(i, 0); 32593da42859SDinh Nguyen #if IO_DLL_CHAIN_LENGTH == 6 32603da42859SDinh Nguyen scc_mgr_set_dqdqs_output_phase(i, 6); 32613da42859SDinh Nguyen #else 32623da42859SDinh Nguyen scc_mgr_set_dqdqs_output_phase(i, 7); 32633da42859SDinh Nguyen #endif 32643da42859SDinh Nguyen /* 32653da42859SDinh Nguyen * Case:33398 32663da42859SDinh Nguyen * 32673da42859SDinh Nguyen * Write data arrives to the I/O two cycles before write 32683da42859SDinh Nguyen * latency is reached (720 deg). 32693da42859SDinh Nguyen * -> due to bit-slip in a/c bus 32703da42859SDinh Nguyen * -> to allow board skew where dqs is longer than ck 32713da42859SDinh Nguyen * -> how often can this happen!? 32723da42859SDinh Nguyen * -> can claim back some ptaps for high freq 32733da42859SDinh Nguyen * support if we can relax this, but i digress... 32743da42859SDinh Nguyen * 32753da42859SDinh Nguyen * The write_clk leads mem_ck by 90 deg 32763da42859SDinh Nguyen * The minimum ptap of the OPA is 180 deg 32773da42859SDinh Nguyen * Each ptap has (360 / IO_DLL_CHAIN_LENGH) deg of delay 32783da42859SDinh Nguyen * The write_clk is always delayed by 2 ptaps 32793da42859SDinh Nguyen * 32803da42859SDinh Nguyen * Hence, to make DQS aligned to CK, we need to delay 32813da42859SDinh Nguyen * DQS by: 32823da42859SDinh Nguyen * (720 - 90 - 180 - 2 * (360 / IO_DLL_CHAIN_LENGTH)) 32833da42859SDinh Nguyen * 32843da42859SDinh Nguyen * Dividing the above by (360 / IO_DLL_CHAIN_LENGTH) 32853da42859SDinh Nguyen * gives us the number of ptaps, which simplies to: 32863da42859SDinh Nguyen * 32873da42859SDinh Nguyen * (1.25 * IO_DLL_CHAIN_LENGTH - 2) 32883da42859SDinh Nguyen */ 328951cea0b6SMarek Vasut scc_mgr_set_dqdqs_output_phase(i, 329051cea0b6SMarek Vasut 1.25 * IO_DLL_CHAIN_LENGTH - 2); 32913da42859SDinh Nguyen } 32921273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dqs_ena); 32931273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dqs_io_ena); 32943da42859SDinh Nguyen 32953da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; i++) { 32961273dd9eSMarek Vasut writel(i, SDR_PHYGRP_SCCGRP_ADDRESS | 32971273dd9eSMarek Vasut SCC_MGR_GROUP_COUNTER_OFFSET); 32983da42859SDinh Nguyen } 32991273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dq_ena); 33001273dd9eSMarek Vasut writel(0xff, &sdr_scc_mgr->dm_ena); 33011273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 33023da42859SDinh Nguyen } 33033da42859SDinh Nguyen 33043da42859SDinh Nguyen /* Compensate for simulation model behaviour */ 33053da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { 33063da42859SDinh Nguyen scc_mgr_set_dqs_bus_in_delay(i, 10); 33073da42859SDinh Nguyen scc_mgr_load_dqs(i); 33083da42859SDinh Nguyen } 33091273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 33103da42859SDinh Nguyen 33113da42859SDinh Nguyen /* 33123da42859SDinh Nguyen * ArriaV has hard FIFOs that can only be initialized by incrementing 33133da42859SDinh Nguyen * in sequencer. 33143da42859SDinh Nguyen */ 33153da42859SDinh Nguyen vfifo_offset = CALIB_VFIFO_OFFSET; 331651cea0b6SMarek Vasut for (j = 0; j < vfifo_offset; j++) 33171273dd9eSMarek Vasut writel(0xff, &phy_mgr_cmd->inc_vfifo_hard_phy); 33181273dd9eSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 33193da42859SDinh Nguyen 33203da42859SDinh Nguyen /* 332151cea0b6SMarek Vasut * For Arria V and Cyclone V with hard LFIFO, we get the skip-cal 332251cea0b6SMarek Vasut * setting from generation-time constant. 33233da42859SDinh Nguyen */ 33243da42859SDinh Nguyen gbl->curr_read_lat = CALIB_LFIFO_OFFSET; 33251273dd9eSMarek Vasut writel(gbl->curr_read_lat, &phy_mgr_cfg->phy_rlat); 33263da42859SDinh Nguyen } 33273da42859SDinh Nguyen 33283589fbfbSMarek Vasut /** 33293589fbfbSMarek Vasut * mem_calibrate() - Memory calibration entry point. 33303589fbfbSMarek Vasut * 33313589fbfbSMarek Vasut * Perform memory calibration. 33323589fbfbSMarek Vasut */ 33333da42859SDinh Nguyen static uint32_t mem_calibrate(void) 33343da42859SDinh Nguyen { 33353da42859SDinh Nguyen uint32_t i; 33363da42859SDinh Nguyen uint32_t rank_bgn, sr; 33373da42859SDinh Nguyen uint32_t write_group, write_test_bgn; 33383da42859SDinh Nguyen uint32_t read_group, read_test_bgn; 33393da42859SDinh Nguyen uint32_t run_groups, current_run; 33403da42859SDinh Nguyen uint32_t failing_groups = 0; 33413da42859SDinh Nguyen uint32_t group_failed = 0; 33423da42859SDinh Nguyen 334333c42bb8SMarek Vasut const u32 rwdqs_ratio = RW_MGR_MEM_IF_READ_DQS_WIDTH / 334433c42bb8SMarek Vasut RW_MGR_MEM_IF_WRITE_DQS_WIDTH; 334533c42bb8SMarek Vasut 33463da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 33473da42859SDinh Nguyen 334816502a0bSMarek Vasut /* Initialize the data settings */ 33493da42859SDinh Nguyen gbl->error_substage = CAL_SUBSTAGE_NIL; 33503da42859SDinh Nguyen gbl->error_stage = CAL_STAGE_NIL; 33513da42859SDinh Nguyen gbl->error_group = 0xff; 33523da42859SDinh Nguyen gbl->fom_in = 0; 33533da42859SDinh Nguyen gbl->fom_out = 0; 33543da42859SDinh Nguyen 335516502a0bSMarek Vasut /* Initialize WLAT and RLAT. */ 335616502a0bSMarek Vasut mem_init_latency(); 335716502a0bSMarek Vasut 335816502a0bSMarek Vasut /* Initialize bit slips. */ 335916502a0bSMarek Vasut mem_precharge_and_activate(); 33603da42859SDinh Nguyen 33613da42859SDinh Nguyen for (i = 0; i < RW_MGR_MEM_IF_READ_DQS_WIDTH; i++) { 33621273dd9eSMarek Vasut writel(i, SDR_PHYGRP_SCCGRP_ADDRESS | 33631273dd9eSMarek Vasut SCC_MGR_GROUP_COUNTER_OFFSET); 3364fa5d821bSMarek Vasut /* Only needed once to set all groups, pins, DQ, DQS, DM. */ 3365fa5d821bSMarek Vasut if (i == 0) 3366fa5d821bSMarek Vasut scc_mgr_set_hhp_extras(); 3367fa5d821bSMarek Vasut 3368c5c5f537SMarek Vasut scc_set_bypass_mode(i); 33693da42859SDinh Nguyen } 33703da42859SDinh Nguyen 3371722c9685SMarek Vasut /* Calibration is skipped. */ 33723da42859SDinh Nguyen if ((dyn_calib_steps & CALIB_SKIP_ALL) == CALIB_SKIP_ALL) { 33733da42859SDinh Nguyen /* 33743da42859SDinh Nguyen * Set VFIFO and LFIFO to instant-on settings in skip 33753da42859SDinh Nguyen * calibration mode. 33763da42859SDinh Nguyen */ 33773da42859SDinh Nguyen mem_skip_calibrate(); 3378722c9685SMarek Vasut 3379722c9685SMarek Vasut /* 3380722c9685SMarek Vasut * Do not remove this line as it makes sure all of our 3381722c9685SMarek Vasut * decisions have been applied. 3382722c9685SMarek Vasut */ 3383722c9685SMarek Vasut writel(0, &sdr_scc_mgr->update); 3384722c9685SMarek Vasut return 1; 3385722c9685SMarek Vasut } 3386722c9685SMarek Vasut 3387722c9685SMarek Vasut /* Calibration is not skipped. */ 33883da42859SDinh Nguyen for (i = 0; i < NUM_CALIB_REPEAT; i++) { 33893da42859SDinh Nguyen /* 33903da42859SDinh Nguyen * Zero all delay chain/phase settings for all 33913da42859SDinh Nguyen * groups and all shadow register sets. 33923da42859SDinh Nguyen */ 33933da42859SDinh Nguyen scc_mgr_zero_all(); 33943da42859SDinh Nguyen 33953da42859SDinh Nguyen run_groups = ~param->skip_groups; 33963da42859SDinh Nguyen 33973da42859SDinh Nguyen for (write_group = 0, write_test_bgn = 0; write_group 33983da42859SDinh Nguyen < RW_MGR_MEM_IF_WRITE_DQS_WIDTH; write_group++, 33993da42859SDinh Nguyen write_test_bgn += RW_MGR_MEM_DQ_PER_WRITE_DQS) { 3400c452dcd0SMarek Vasut 3401c452dcd0SMarek Vasut /* Initialize the group failure */ 34023da42859SDinh Nguyen group_failed = 0; 34033da42859SDinh Nguyen 34043da42859SDinh Nguyen current_run = run_groups & ((1 << 34053da42859SDinh Nguyen RW_MGR_NUM_DQS_PER_WRITE_GROUP) - 1); 34063da42859SDinh Nguyen run_groups = run_groups >> 34073da42859SDinh Nguyen RW_MGR_NUM_DQS_PER_WRITE_GROUP; 34083da42859SDinh Nguyen 34093da42859SDinh Nguyen if (current_run == 0) 34103da42859SDinh Nguyen continue; 34113da42859SDinh Nguyen 34121273dd9eSMarek Vasut writel(write_group, SDR_PHYGRP_SCCGRP_ADDRESS | 34131273dd9eSMarek Vasut SCC_MGR_GROUP_COUNTER_OFFSET); 3414d41ea93aSMarek Vasut scc_mgr_zero_group(write_group, 0); 34153da42859SDinh Nguyen 341633c42bb8SMarek Vasut for (read_group = write_group * rwdqs_ratio, 34173da42859SDinh Nguyen read_test_bgn = 0; 3418c452dcd0SMarek Vasut read_group < (write_group + 1) * rwdqs_ratio; 341933c42bb8SMarek Vasut read_group++, 342033c42bb8SMarek Vasut read_test_bgn += RW_MGR_MEM_DQ_PER_READ_DQS) { 342133c42bb8SMarek Vasut if (STATIC_CALIB_STEPS & CALIB_SKIP_VFIFO) 342233c42bb8SMarek Vasut continue; 34233da42859SDinh Nguyen 342433c42bb8SMarek Vasut /* Calibrate the VFIFO */ 342533c42bb8SMarek Vasut if (rw_mgr_mem_calibrate_vfifo(read_group, 342633c42bb8SMarek Vasut read_test_bgn)) 342733c42bb8SMarek Vasut continue; 342833c42bb8SMarek Vasut 342933c42bb8SMarek Vasut if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS)) 34303da42859SDinh Nguyen return 0; 3431c452dcd0SMarek Vasut 3432c452dcd0SMarek Vasut /* The group failed, we're done. */ 3433c452dcd0SMarek Vasut goto grp_failed; 34343da42859SDinh Nguyen } 34353da42859SDinh Nguyen 34363da42859SDinh Nguyen /* Calibrate the output side */ 34374ac21610SMarek Vasut for (rank_bgn = 0, sr = 0; 34384ac21610SMarek Vasut rank_bgn < RW_MGR_MEM_NUMBER_OF_RANKS; 34394ac21610SMarek Vasut rank_bgn += NUM_RANKS_PER_SHADOW_REG, sr++) { 34404ac21610SMarek Vasut if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES) 34414ac21610SMarek Vasut continue; 34424ac21610SMarek Vasut 34434ac21610SMarek Vasut /* Not needed in quick mode! */ 34444ac21610SMarek Vasut if (STATIC_CALIB_STEPS & CALIB_SKIP_DELAY_SWEEPS) 34454ac21610SMarek Vasut continue; 34464ac21610SMarek Vasut 34473da42859SDinh Nguyen /* 34484ac21610SMarek Vasut * Determine if this set of ranks 34494ac21610SMarek Vasut * should be skipped entirely. 34503da42859SDinh Nguyen */ 34514ac21610SMarek Vasut if (param->skip_shadow_regs[sr]) 34524ac21610SMarek Vasut continue; 34534ac21610SMarek Vasut 34544ac21610SMarek Vasut /* Calibrate WRITEs */ 3455db3a6061SMarek Vasut if (!rw_mgr_mem_calibrate_writes(rank_bgn, 34564ac21610SMarek Vasut write_group, write_test_bgn)) 34574ac21610SMarek Vasut continue; 34584ac21610SMarek Vasut 34593da42859SDinh Nguyen group_failed = 1; 34604ac21610SMarek Vasut if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS)) 34614ac21610SMarek Vasut return 0; 34623da42859SDinh Nguyen } 34633da42859SDinh Nguyen 3464c452dcd0SMarek Vasut /* Some group failed, we're done. */ 3465c452dcd0SMarek Vasut if (group_failed) 3466c452dcd0SMarek Vasut goto grp_failed; 3467c452dcd0SMarek Vasut 34688213609eSMarek Vasut for (read_group = write_group * rwdqs_ratio, 34693da42859SDinh Nguyen read_test_bgn = 0; 3470c452dcd0SMarek Vasut read_group < (write_group + 1) * rwdqs_ratio; 34718213609eSMarek Vasut read_group++, 34728213609eSMarek Vasut read_test_bgn += RW_MGR_MEM_DQ_PER_READ_DQS) { 34738213609eSMarek Vasut if (STATIC_CALIB_STEPS & CALIB_SKIP_WRITES) 34748213609eSMarek Vasut continue; 34753da42859SDinh Nguyen 347678cdd7d0SMarek Vasut if (!rw_mgr_mem_calibrate_vfifo_end(read_group, 34778213609eSMarek Vasut read_test_bgn)) 34788213609eSMarek Vasut continue; 34798213609eSMarek Vasut 34808213609eSMarek Vasut if (!(gbl->phy_debug_mode_flags & PHY_DEBUG_SWEEP_ALL_GROUPS)) 34813da42859SDinh Nguyen return 0; 3482c452dcd0SMarek Vasut 3483c452dcd0SMarek Vasut /* The group failed, we're done. */ 3484c452dcd0SMarek Vasut goto grp_failed; 34853da42859SDinh Nguyen } 34863da42859SDinh Nguyen 3487c452dcd0SMarek Vasut /* No group failed, continue as usual. */ 3488c452dcd0SMarek Vasut continue; 3489c452dcd0SMarek Vasut 3490c452dcd0SMarek Vasut grp_failed: /* A group failed, increment the counter. */ 34913da42859SDinh Nguyen failing_groups++; 34923da42859SDinh Nguyen } 34933da42859SDinh Nguyen 34943da42859SDinh Nguyen /* 34953da42859SDinh Nguyen * USER If there are any failing groups then report 34963da42859SDinh Nguyen * the failure. 34973da42859SDinh Nguyen */ 34983da42859SDinh Nguyen if (failing_groups != 0) 34993da42859SDinh Nguyen return 0; 35003da42859SDinh Nguyen 3501c50ae303SMarek Vasut if (STATIC_CALIB_STEPS & CALIB_SKIP_LFIFO) 3502c50ae303SMarek Vasut continue; 3503c50ae303SMarek Vasut 35043da42859SDinh Nguyen /* 35053da42859SDinh Nguyen * If we're skipping groups as part of debug, 35063da42859SDinh Nguyen * don't calibrate LFIFO. 35073da42859SDinh Nguyen */ 3508c50ae303SMarek Vasut if (param->skip_groups != 0) 3509c50ae303SMarek Vasut continue; 3510c50ae303SMarek Vasut 3511c50ae303SMarek Vasut /* Calibrate the LFIFO */ 35123da42859SDinh Nguyen if (!rw_mgr_mem_calibrate_lfifo()) 35133da42859SDinh Nguyen return 0; 35143da42859SDinh Nguyen } 35153da42859SDinh Nguyen 35163da42859SDinh Nguyen /* 35173da42859SDinh Nguyen * Do not remove this line as it makes sure all of our decisions 35183da42859SDinh Nguyen * have been applied. 35193da42859SDinh Nguyen */ 35201273dd9eSMarek Vasut writel(0, &sdr_scc_mgr->update); 35213da42859SDinh Nguyen return 1; 35223da42859SDinh Nguyen } 35233da42859SDinh Nguyen 352423a040c0SMarek Vasut /** 352523a040c0SMarek Vasut * run_mem_calibrate() - Perform memory calibration 352623a040c0SMarek Vasut * 352723a040c0SMarek Vasut * This function triggers the entire memory calibration procedure. 352823a040c0SMarek Vasut */ 352923a040c0SMarek Vasut static int run_mem_calibrate(void) 35303da42859SDinh Nguyen { 353123a040c0SMarek Vasut int pass; 35323da42859SDinh Nguyen 35333da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 35343da42859SDinh Nguyen 35353da42859SDinh Nguyen /* Reset pass/fail status shown on afi_cal_success/fail */ 35361273dd9eSMarek Vasut writel(PHY_MGR_CAL_RESET, &phy_mgr_cfg->cal_status); 35373da42859SDinh Nguyen 353823a040c0SMarek Vasut /* Stop tracking manager. */ 353923a040c0SMarek Vasut clrbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22); 35403da42859SDinh Nguyen 35419fa9c90eSMarek Vasut phy_mgr_initialize(); 35423da42859SDinh Nguyen rw_mgr_mem_initialize(); 35433da42859SDinh Nguyen 354423a040c0SMarek Vasut /* Perform the actual memory calibration. */ 35453da42859SDinh Nguyen pass = mem_calibrate(); 35463da42859SDinh Nguyen 35473da42859SDinh Nguyen mem_precharge_and_activate(); 35481273dd9eSMarek Vasut writel(0, &phy_mgr_cmd->fifo_reset); 35493da42859SDinh Nguyen 355023a040c0SMarek Vasut /* Handoff. */ 35513da42859SDinh Nguyen rw_mgr_mem_handoff(); 35523da42859SDinh Nguyen /* 35533da42859SDinh Nguyen * In Hard PHY this is a 2-bit control: 35543da42859SDinh Nguyen * 0: AFI Mux Select 35553da42859SDinh Nguyen * 1: DDIO Mux Select 35563da42859SDinh Nguyen */ 35571273dd9eSMarek Vasut writel(0x2, &phy_mgr_cfg->mux_sel); 355823a040c0SMarek Vasut 355923a040c0SMarek Vasut /* Start tracking manager. */ 356023a040c0SMarek Vasut setbits_le32(&sdr_ctrl->ctrl_cfg, 1 << 22); 356123a040c0SMarek Vasut 356223a040c0SMarek Vasut return pass; 35633da42859SDinh Nguyen } 35643da42859SDinh Nguyen 356523a040c0SMarek Vasut /** 356623a040c0SMarek Vasut * debug_mem_calibrate() - Report result of memory calibration 356723a040c0SMarek Vasut * @pass: Value indicating whether calibration passed or failed 356823a040c0SMarek Vasut * 356923a040c0SMarek Vasut * This function reports the results of the memory calibration 357023a040c0SMarek Vasut * and writes debug information into the register file. 357123a040c0SMarek Vasut */ 357223a040c0SMarek Vasut static void debug_mem_calibrate(int pass) 357323a040c0SMarek Vasut { 357423a040c0SMarek Vasut uint32_t debug_info; 35753da42859SDinh Nguyen 35763da42859SDinh Nguyen if (pass) { 35773da42859SDinh Nguyen printf("%s: CALIBRATION PASSED\n", __FILE__); 35783da42859SDinh Nguyen 35793da42859SDinh Nguyen gbl->fom_in /= 2; 35803da42859SDinh Nguyen gbl->fom_out /= 2; 35813da42859SDinh Nguyen 35823da42859SDinh Nguyen if (gbl->fom_in > 0xff) 35833da42859SDinh Nguyen gbl->fom_in = 0xff; 35843da42859SDinh Nguyen 35853da42859SDinh Nguyen if (gbl->fom_out > 0xff) 35863da42859SDinh Nguyen gbl->fom_out = 0xff; 35873da42859SDinh Nguyen 35883da42859SDinh Nguyen /* Update the FOM in the register file */ 35893da42859SDinh Nguyen debug_info = gbl->fom_in; 35903da42859SDinh Nguyen debug_info |= gbl->fom_out << 8; 35911273dd9eSMarek Vasut writel(debug_info, &sdr_reg_file->fom); 35923da42859SDinh Nguyen 35931273dd9eSMarek Vasut writel(debug_info, &phy_mgr_cfg->cal_debug_info); 35941273dd9eSMarek Vasut writel(PHY_MGR_CAL_SUCCESS, &phy_mgr_cfg->cal_status); 35953da42859SDinh Nguyen } else { 35963da42859SDinh Nguyen printf("%s: CALIBRATION FAILED\n", __FILE__); 35973da42859SDinh Nguyen 35983da42859SDinh Nguyen debug_info = gbl->error_stage; 35993da42859SDinh Nguyen debug_info |= gbl->error_substage << 8; 36003da42859SDinh Nguyen debug_info |= gbl->error_group << 16; 36013da42859SDinh Nguyen 36021273dd9eSMarek Vasut writel(debug_info, &sdr_reg_file->failing_stage); 36031273dd9eSMarek Vasut writel(debug_info, &phy_mgr_cfg->cal_debug_info); 36041273dd9eSMarek Vasut writel(PHY_MGR_CAL_FAIL, &phy_mgr_cfg->cal_status); 36053da42859SDinh Nguyen 36063da42859SDinh Nguyen /* Update the failing group/stage in the register file */ 36073da42859SDinh Nguyen debug_info = gbl->error_stage; 36083da42859SDinh Nguyen debug_info |= gbl->error_substage << 8; 36093da42859SDinh Nguyen debug_info |= gbl->error_group << 16; 36101273dd9eSMarek Vasut writel(debug_info, &sdr_reg_file->failing_stage); 36113da42859SDinh Nguyen } 36123da42859SDinh Nguyen 361323a040c0SMarek Vasut printf("%s: Calibration complete\n", __FILE__); 36143da42859SDinh Nguyen } 36153da42859SDinh Nguyen 3616bb06434bSMarek Vasut /** 3617bb06434bSMarek Vasut * hc_initialize_rom_data() - Initialize ROM data 3618bb06434bSMarek Vasut * 3619bb06434bSMarek Vasut * Initialize ROM data. 3620bb06434bSMarek Vasut */ 36213da42859SDinh Nguyen static void hc_initialize_rom_data(void) 36223da42859SDinh Nguyen { 3623bb06434bSMarek Vasut u32 i, addr; 36243da42859SDinh Nguyen 3625c4815f76SMarek Vasut addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_INST_ROM_WRITE_OFFSET; 3626bb06434bSMarek Vasut for (i = 0; i < ARRAY_SIZE(inst_rom_init); i++) 3627bb06434bSMarek Vasut writel(inst_rom_init[i], addr + (i << 2)); 36283da42859SDinh Nguyen 3629c4815f76SMarek Vasut addr = SDR_PHYGRP_RWMGRGRP_ADDRESS | RW_MGR_AC_ROM_WRITE_OFFSET; 3630bb06434bSMarek Vasut for (i = 0; i < ARRAY_SIZE(ac_rom_init); i++) 3631bb06434bSMarek Vasut writel(ac_rom_init[i], addr + (i << 2)); 36323da42859SDinh Nguyen } 36333da42859SDinh Nguyen 36349c1ab2caSMarek Vasut /** 36359c1ab2caSMarek Vasut * initialize_reg_file() - Initialize SDR register file 36369c1ab2caSMarek Vasut * 36379c1ab2caSMarek Vasut * Initialize SDR register file. 36389c1ab2caSMarek Vasut */ 36393da42859SDinh Nguyen static void initialize_reg_file(void) 36403da42859SDinh Nguyen { 36413da42859SDinh Nguyen /* Initialize the register file with the correct data */ 36421273dd9eSMarek Vasut writel(REG_FILE_INIT_SEQ_SIGNATURE, &sdr_reg_file->signature); 36431273dd9eSMarek Vasut writel(0, &sdr_reg_file->debug_data_addr); 36441273dd9eSMarek Vasut writel(0, &sdr_reg_file->cur_stage); 36451273dd9eSMarek Vasut writel(0, &sdr_reg_file->fom); 36461273dd9eSMarek Vasut writel(0, &sdr_reg_file->failing_stage); 36471273dd9eSMarek Vasut writel(0, &sdr_reg_file->debug1); 36481273dd9eSMarek Vasut writel(0, &sdr_reg_file->debug2); 36493da42859SDinh Nguyen } 36503da42859SDinh Nguyen 36512ca151f8SMarek Vasut /** 36522ca151f8SMarek Vasut * initialize_hps_phy() - Initialize HPS PHY 36532ca151f8SMarek Vasut * 36542ca151f8SMarek Vasut * Initialize HPS PHY. 36552ca151f8SMarek Vasut */ 36563da42859SDinh Nguyen static void initialize_hps_phy(void) 36573da42859SDinh Nguyen { 36583da42859SDinh Nguyen uint32_t reg; 36593da42859SDinh Nguyen /* 36603da42859SDinh Nguyen * Tracking also gets configured here because it's in the 36613da42859SDinh Nguyen * same register. 36623da42859SDinh Nguyen */ 36633da42859SDinh Nguyen uint32_t trk_sample_count = 7500; 36643da42859SDinh Nguyen uint32_t trk_long_idle_sample_count = (10 << 16) | 100; 36653da42859SDinh Nguyen /* 36663da42859SDinh Nguyen * Format is number of outer loops in the 16 MSB, sample 36673da42859SDinh Nguyen * count in 16 LSB. 36683da42859SDinh Nguyen */ 36693da42859SDinh Nguyen 36703da42859SDinh Nguyen reg = 0; 36713da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ACDELAYEN_SET(2); 36723da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQDELAYEN_SET(1); 36733da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSDELAYEN_SET(1); 36743da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_DQSLOGICDELAYEN_SET(1); 36753da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_RESETDELAYEN_SET(0); 36763da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_LPDDRDIS_SET(1); 36773da42859SDinh Nguyen /* 36783da42859SDinh Nguyen * This field selects the intrinsic latency to RDATA_EN/FULL path. 36793da42859SDinh Nguyen * 00-bypass, 01- add 5 cycles, 10- add 10 cycles, 11- add 15 cycles. 36803da42859SDinh Nguyen */ 36813da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_ADDLATSEL_SET(0); 36823da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_SET( 36833da42859SDinh Nguyen trk_sample_count); 36846cb9f167SMarek Vasut writel(reg, &sdr_ctrl->phy_ctrl0); 36853da42859SDinh Nguyen 36863da42859SDinh Nguyen reg = 0; 36873da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_SAMPLECOUNT_31_20_SET( 36883da42859SDinh Nguyen trk_sample_count >> 36893da42859SDinh Nguyen SDR_CTRLGRP_PHYCTRL_PHYCTRL_0_SAMPLECOUNT_19_0_WIDTH); 36903da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_SET( 36913da42859SDinh Nguyen trk_long_idle_sample_count); 36926cb9f167SMarek Vasut writel(reg, &sdr_ctrl->phy_ctrl1); 36933da42859SDinh Nguyen 36943da42859SDinh Nguyen reg = 0; 36953da42859SDinh Nguyen reg |= SDR_CTRLGRP_PHYCTRL_PHYCTRL_2_LONGIDLESAMPLECOUNT_31_20_SET( 36963da42859SDinh Nguyen trk_long_idle_sample_count >> 36973da42859SDinh Nguyen SDR_CTRLGRP_PHYCTRL_PHYCTRL_1_LONGIDLESAMPLECOUNT_19_0_WIDTH); 36986cb9f167SMarek Vasut writel(reg, &sdr_ctrl->phy_ctrl2); 36993da42859SDinh Nguyen } 37003da42859SDinh Nguyen 3701880e46f2SMarek Vasut /** 3702880e46f2SMarek Vasut * initialize_tracking() - Initialize tracking 3703880e46f2SMarek Vasut * 3704880e46f2SMarek Vasut * Initialize the register file with usable initial data. 3705880e46f2SMarek Vasut */ 37063da42859SDinh Nguyen static void initialize_tracking(void) 37073da42859SDinh Nguyen { 3708880e46f2SMarek Vasut /* 3709880e46f2SMarek Vasut * Initialize the register file with the correct data. 3710880e46f2SMarek Vasut * Compute usable version of value in case we skip full 3711880e46f2SMarek Vasut * computation later. 3712880e46f2SMarek Vasut */ 3713880e46f2SMarek Vasut writel(DIV_ROUND_UP(IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP) - 1, 3714880e46f2SMarek Vasut &sdr_reg_file->dtaps_per_ptap); 3715880e46f2SMarek Vasut 3716880e46f2SMarek Vasut /* trk_sample_count */ 3717880e46f2SMarek Vasut writel(7500, &sdr_reg_file->trk_sample_count); 3718880e46f2SMarek Vasut 3719880e46f2SMarek Vasut /* longidle outer loop [15:0] */ 3720880e46f2SMarek Vasut writel((10 << 16) | (100 << 0), &sdr_reg_file->trk_longidle); 37213da42859SDinh Nguyen 37223da42859SDinh Nguyen /* 3723880e46f2SMarek Vasut * longidle sample count [31:24] 3724880e46f2SMarek Vasut * trfc, worst case of 933Mhz 4Gb [23:16] 3725880e46f2SMarek Vasut * trcd, worst case [15:8] 3726880e46f2SMarek Vasut * vfifo wait [7:0] 37273da42859SDinh Nguyen */ 3728880e46f2SMarek Vasut writel((243 << 24) | (14 << 16) | (10 << 8) | (4 << 0), 3729880e46f2SMarek Vasut &sdr_reg_file->delays); 37303da42859SDinh Nguyen 37313da42859SDinh Nguyen /* mux delay */ 3732880e46f2SMarek Vasut writel((RW_MGR_IDLE << 24) | (RW_MGR_ACTIVATE_1 << 16) | 3733880e46f2SMarek Vasut (RW_MGR_SGLE_READ << 8) | (RW_MGR_PRECHARGE_ALL << 0), 3734880e46f2SMarek Vasut &sdr_reg_file->trk_rw_mgr_addr); 37353da42859SDinh Nguyen 3736880e46f2SMarek Vasut writel(RW_MGR_MEM_IF_READ_DQS_WIDTH, 3737880e46f2SMarek Vasut &sdr_reg_file->trk_read_dqs_width); 37383da42859SDinh Nguyen 3739880e46f2SMarek Vasut /* trefi [7:0] */ 3740880e46f2SMarek Vasut writel((RW_MGR_REFRESH_ALL << 24) | (1000 << 0), 3741880e46f2SMarek Vasut &sdr_reg_file->trk_rfsh); 37423da42859SDinh Nguyen } 37433da42859SDinh Nguyen 37443da42859SDinh Nguyen int sdram_calibration_full(void) 37453da42859SDinh Nguyen { 37463da42859SDinh Nguyen struct param_type my_param; 37473da42859SDinh Nguyen struct gbl_type my_gbl; 37483da42859SDinh Nguyen uint32_t pass; 374984e0b0cfSMarek Vasut 375084e0b0cfSMarek Vasut memset(&my_param, 0, sizeof(my_param)); 375184e0b0cfSMarek Vasut memset(&my_gbl, 0, sizeof(my_gbl)); 37523da42859SDinh Nguyen 37533da42859SDinh Nguyen param = &my_param; 37543da42859SDinh Nguyen gbl = &my_gbl; 37553da42859SDinh Nguyen 37563da42859SDinh Nguyen /* Set the calibration enabled by default */ 37573da42859SDinh Nguyen gbl->phy_debug_mode_flags |= PHY_DEBUG_ENABLE_CAL_RPT; 37583da42859SDinh Nguyen /* 37593da42859SDinh Nguyen * Only sweep all groups (regardless of fail state) by default 37603da42859SDinh Nguyen * Set enabled read test by default. 37613da42859SDinh Nguyen */ 37623da42859SDinh Nguyen #if DISABLE_GUARANTEED_READ 37633da42859SDinh Nguyen gbl->phy_debug_mode_flags |= PHY_DEBUG_DISABLE_GUARANTEED_READ; 37643da42859SDinh Nguyen #endif 37653da42859SDinh Nguyen /* Initialize the register file */ 37663da42859SDinh Nguyen initialize_reg_file(); 37673da42859SDinh Nguyen 37683da42859SDinh Nguyen /* Initialize any PHY CSR */ 37693da42859SDinh Nguyen initialize_hps_phy(); 37703da42859SDinh Nguyen 37713da42859SDinh Nguyen scc_mgr_initialize(); 37723da42859SDinh Nguyen 37733da42859SDinh Nguyen initialize_tracking(); 37743da42859SDinh Nguyen 37753da42859SDinh Nguyen printf("%s: Preparing to start memory calibration\n", __FILE__); 37763da42859SDinh Nguyen 37773da42859SDinh Nguyen debug("%s:%d\n", __func__, __LINE__); 377823f62b36SMarek Vasut debug_cond(DLEVEL == 1, 377923f62b36SMarek Vasut "DDR3 FULL_RATE ranks=%u cs/dimm=%u dq/dqs=%u,%u vg/dqs=%u,%u ", 378023f62b36SMarek Vasut RW_MGR_MEM_NUMBER_OF_RANKS, RW_MGR_MEM_NUMBER_OF_CS_PER_DIMM, 378123f62b36SMarek Vasut RW_MGR_MEM_DQ_PER_READ_DQS, RW_MGR_MEM_DQ_PER_WRITE_DQS, 378223f62b36SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_READ_DQS, 378323f62b36SMarek Vasut RW_MGR_MEM_VIRTUAL_GROUPS_PER_WRITE_DQS); 378423f62b36SMarek Vasut debug_cond(DLEVEL == 1, 378523f62b36SMarek Vasut "dqs=%u,%u dq=%u dm=%u ptap_delay=%u dtap_delay=%u ", 378623f62b36SMarek Vasut RW_MGR_MEM_IF_READ_DQS_WIDTH, RW_MGR_MEM_IF_WRITE_DQS_WIDTH, 378723f62b36SMarek Vasut RW_MGR_MEM_DATA_WIDTH, RW_MGR_MEM_DATA_MASK_WIDTH, 378823f62b36SMarek Vasut IO_DELAY_PER_OPA_TAP, IO_DELAY_PER_DCHAIN_TAP); 378923f62b36SMarek Vasut debug_cond(DLEVEL == 1, "dtap_dqsen_delay=%u, dll=%u", 379023f62b36SMarek Vasut IO_DELAY_PER_DQS_EN_DCHAIN_TAP, IO_DLL_CHAIN_LENGTH); 379123f62b36SMarek Vasut debug_cond(DLEVEL == 1, "max values: en_p=%u dqdqs_p=%u en_d=%u dqs_in_d=%u ", 379223f62b36SMarek Vasut IO_DQS_EN_PHASE_MAX, IO_DQDQS_OUT_PHASE_MAX, 379323f62b36SMarek Vasut IO_DQS_EN_DELAY_MAX, IO_DQS_IN_DELAY_MAX); 379423f62b36SMarek Vasut debug_cond(DLEVEL == 1, "io_in_d=%u io_out1_d=%u io_out2_d=%u ", 379523f62b36SMarek Vasut IO_IO_IN_DELAY_MAX, IO_IO_OUT1_DELAY_MAX, 379623f62b36SMarek Vasut IO_IO_OUT2_DELAY_MAX); 379723f62b36SMarek Vasut debug_cond(DLEVEL == 1, "dqs_in_reserve=%u dqs_out_reserve=%u\n", 379823f62b36SMarek Vasut IO_DQS_IN_RESERVE, IO_DQS_OUT_RESERVE); 37993da42859SDinh Nguyen 38003da42859SDinh Nguyen hc_initialize_rom_data(); 38013da42859SDinh Nguyen 38023da42859SDinh Nguyen /* update info for sims */ 38033da42859SDinh Nguyen reg_file_set_stage(CAL_STAGE_NIL); 38043da42859SDinh Nguyen reg_file_set_group(0); 38053da42859SDinh Nguyen 38063da42859SDinh Nguyen /* 38073da42859SDinh Nguyen * Load global needed for those actions that require 38083da42859SDinh Nguyen * some dynamic calibration support. 38093da42859SDinh Nguyen */ 38103da42859SDinh Nguyen dyn_calib_steps = STATIC_CALIB_STEPS; 38113da42859SDinh Nguyen /* 38123da42859SDinh Nguyen * Load global to allow dynamic selection of delay loop settings 38133da42859SDinh Nguyen * based on calibration mode. 38143da42859SDinh Nguyen */ 38153da42859SDinh Nguyen if (!(dyn_calib_steps & CALIB_SKIP_DELAY_LOOPS)) 38163da42859SDinh Nguyen skip_delay_mask = 0xff; 38173da42859SDinh Nguyen else 38183da42859SDinh Nguyen skip_delay_mask = 0x0; 38193da42859SDinh Nguyen 38203da42859SDinh Nguyen pass = run_mem_calibrate(); 382123a040c0SMarek Vasut debug_mem_calibrate(pass); 38223da42859SDinh Nguyen return pass; 38233da42859SDinh Nguyen } 3824