1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright (C) 2013, Intel Corporation
3*4882a593Smuzhiyun * Copyright (C) 2015, Bin Meng <bmeng.cn@gmail.com>
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Ported from Intel released Quark UEFI BIOS
6*4882a593Smuzhiyun * QuarkSocPkg/QuarkNorthCluster/MemoryInit/Pei
7*4882a593Smuzhiyun *
8*4882a593Smuzhiyun * SPDX-License-Identifier: Intel
9*4882a593Smuzhiyun */
10*4882a593Smuzhiyun
11*4882a593Smuzhiyun #include <common.h>
12*4882a593Smuzhiyun #include <asm/arch/mrc.h>
13*4882a593Smuzhiyun #include <asm/arch/msg_port.h>
14*4882a593Smuzhiyun #include "mrc_util.h"
15*4882a593Smuzhiyun #include "hte.h"
16*4882a593Smuzhiyun
17*4882a593Smuzhiyun /**
18*4882a593Smuzhiyun * Enable HTE to detect all possible errors for the given training parameters
19*4882a593Smuzhiyun * (per-bit or full byte lane).
20*4882a593Smuzhiyun */
hte_enable_all_errors(void)21*4882a593Smuzhiyun static void hte_enable_all_errors(void)
22*4882a593Smuzhiyun {
23*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a2, 0xffffffff);
24*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a3, 0x000000ff);
25*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a4, 0x00000000);
26*4882a593Smuzhiyun }
27*4882a593Smuzhiyun
28*4882a593Smuzhiyun /**
29*4882a593Smuzhiyun * Go and read the HTE register in order to find any error
30*4882a593Smuzhiyun *
31*4882a593Smuzhiyun * @return: The errors detected in the HTE status register
32*4882a593Smuzhiyun */
hte_check_errors(void)33*4882a593Smuzhiyun static u32 hte_check_errors(void)
34*4882a593Smuzhiyun {
35*4882a593Smuzhiyun return msg_port_read(HTE, 0x000200a7);
36*4882a593Smuzhiyun }
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun /**
39*4882a593Smuzhiyun * Wait until HTE finishes
40*4882a593Smuzhiyun */
hte_wait_for_complete(void)41*4882a593Smuzhiyun static void hte_wait_for_complete(void)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun u32 tmp;
44*4882a593Smuzhiyun
45*4882a593Smuzhiyun ENTERFN();
46*4882a593Smuzhiyun
47*4882a593Smuzhiyun do {} while ((msg_port_read(HTE, 0x00020012) & (1 << 30)) != 0);
48*4882a593Smuzhiyun
49*4882a593Smuzhiyun tmp = msg_port_read(HTE, 0x00020011);
50*4882a593Smuzhiyun tmp |= (1 << 9);
51*4882a593Smuzhiyun tmp &= ~((1 << 12) | (1 << 13));
52*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, tmp);
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun LEAVEFN();
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun
57*4882a593Smuzhiyun /**
58*4882a593Smuzhiyun * Clear registers related with errors in the HTE
59*4882a593Smuzhiyun */
hte_clear_error_regs(void)60*4882a593Smuzhiyun static void hte_clear_error_regs(void)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun u32 tmp;
63*4882a593Smuzhiyun
64*4882a593Smuzhiyun /*
65*4882a593Smuzhiyun * Clear all HTE errors and enable error checking
66*4882a593Smuzhiyun * for burst and chunk.
67*4882a593Smuzhiyun */
68*4882a593Smuzhiyun tmp = msg_port_read(HTE, 0x000200a1);
69*4882a593Smuzhiyun tmp |= (1 << 8);
70*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a1, tmp);
71*4882a593Smuzhiyun }
72*4882a593Smuzhiyun
73*4882a593Smuzhiyun /**
74*4882a593Smuzhiyun * Execute a basic single-cache-line memory write/read/verify test using simple
75*4882a593Smuzhiyun * constant pattern, different for READ_TRAIN and WRITE_TRAIN modes.
76*4882a593Smuzhiyun *
77*4882a593Smuzhiyun * See hte_basic_write_read() which is the external visible wrapper.
78*4882a593Smuzhiyun *
79*4882a593Smuzhiyun * @mrc_params: host structure for all MRC global data
80*4882a593Smuzhiyun * @addr: memory adress being tested (must hit specific channel/rank)
81*4882a593Smuzhiyun * @first_run: if set then the HTE registers are configured, otherwise it is
82*4882a593Smuzhiyun * assumed configuration is done and we just re-run the test
83*4882a593Smuzhiyun * @mode: READ_TRAIN or WRITE_TRAIN (the difference is in the pattern)
84*4882a593Smuzhiyun *
85*4882a593Smuzhiyun * @return: byte lane failure on each bit (for Quark only bit0 and bit1)
86*4882a593Smuzhiyun */
hte_basic_data_cmp(struct mrc_params * mrc_params,u32 addr,u8 first_run,u8 mode)87*4882a593Smuzhiyun static u16 hte_basic_data_cmp(struct mrc_params *mrc_params, u32 addr,
88*4882a593Smuzhiyun u8 first_run, u8 mode)
89*4882a593Smuzhiyun {
90*4882a593Smuzhiyun u32 pattern;
91*4882a593Smuzhiyun u32 offset;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun if (first_run) {
94*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x01b10021);
95*4882a593Smuzhiyun msg_port_write(HTE, 0x00020021, 0x06000000);
96*4882a593Smuzhiyun msg_port_write(HTE, 0x00020022, addr >> 6);
97*4882a593Smuzhiyun msg_port_write(HTE, 0x00020062, 0x00800015);
98*4882a593Smuzhiyun msg_port_write(HTE, 0x00020063, 0xaaaaaaaa);
99*4882a593Smuzhiyun msg_port_write(HTE, 0x00020064, 0xcccccccc);
100*4882a593Smuzhiyun msg_port_write(HTE, 0x00020065, 0xf0f0f0f0);
101*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00030008);
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun if (mode == WRITE_TRAIN)
104*4882a593Smuzhiyun pattern = 0xc33c0000;
105*4882a593Smuzhiyun else /* READ_TRAIN */
106*4882a593Smuzhiyun pattern = 0xaa5555aa;
107*4882a593Smuzhiyun
108*4882a593Smuzhiyun for (offset = 0x80; offset <= 0x8f; offset++)
109*4882a593Smuzhiyun msg_port_write(HTE, offset, pattern);
110*4882a593Smuzhiyun }
111*4882a593Smuzhiyun
112*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a1, 0xffff1000);
113*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00011000);
114*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00011100);
115*4882a593Smuzhiyun
116*4882a593Smuzhiyun hte_wait_for_complete();
117*4882a593Smuzhiyun
118*4882a593Smuzhiyun /*
119*4882a593Smuzhiyun * Return bits 15:8 of HTE_CH0_ERR_XSTAT to check for
120*4882a593Smuzhiyun * any bytelane errors.
121*4882a593Smuzhiyun */
122*4882a593Smuzhiyun return (hte_check_errors() >> 8) & 0xff;
123*4882a593Smuzhiyun }
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun /**
126*4882a593Smuzhiyun * Examine a single-cache-line memory with write/read/verify test using multiple
127*4882a593Smuzhiyun * data patterns (victim-aggressor algorithm).
128*4882a593Smuzhiyun *
129*4882a593Smuzhiyun * See hte_write_stress_bit_lanes() which is the external visible wrapper.
130*4882a593Smuzhiyun *
131*4882a593Smuzhiyun * @mrc_params: host structure for all MRC global data
132*4882a593Smuzhiyun * @addr: memory adress being tested (must hit specific channel/rank)
133*4882a593Smuzhiyun * @loop_cnt: number of test iterations
134*4882a593Smuzhiyun * @seed_victim: victim data pattern seed
135*4882a593Smuzhiyun * @seed_aggressor: aggressor data pattern seed
136*4882a593Smuzhiyun * @victim_bit: should be 0 as auto-rotate feature is in use
137*4882a593Smuzhiyun * @first_run: if set then the HTE registers are configured, otherwise it is
138*4882a593Smuzhiyun * assumed configuration is done and we just re-run the test
139*4882a593Smuzhiyun *
140*4882a593Smuzhiyun * @return: byte lane failure on each bit (for Quark only bit0 and bit1)
141*4882a593Smuzhiyun */
hte_rw_data_cmp(struct mrc_params * mrc_params,u32 addr,u8 loop_cnt,u32 seed_victim,u32 seed_aggressor,u8 victim_bit,u8 first_run)142*4882a593Smuzhiyun static u16 hte_rw_data_cmp(struct mrc_params *mrc_params, u32 addr,
143*4882a593Smuzhiyun u8 loop_cnt, u32 seed_victim, u32 seed_aggressor,
144*4882a593Smuzhiyun u8 victim_bit, u8 first_run)
145*4882a593Smuzhiyun {
146*4882a593Smuzhiyun u32 offset;
147*4882a593Smuzhiyun u32 tmp;
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun if (first_run) {
150*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x00910024);
151*4882a593Smuzhiyun msg_port_write(HTE, 0x00020023, 0x00810024);
152*4882a593Smuzhiyun msg_port_write(HTE, 0x00020021, 0x06070000);
153*4882a593Smuzhiyun msg_port_write(HTE, 0x00020024, 0x06070000);
154*4882a593Smuzhiyun msg_port_write(HTE, 0x00020022, addr >> 6);
155*4882a593Smuzhiyun msg_port_write(HTE, 0x00020025, addr >> 6);
156*4882a593Smuzhiyun msg_port_write(HTE, 0x00020062, 0x0000002a);
157*4882a593Smuzhiyun msg_port_write(HTE, 0x00020063, seed_victim);
158*4882a593Smuzhiyun msg_port_write(HTE, 0x00020064, seed_aggressor);
159*4882a593Smuzhiyun msg_port_write(HTE, 0x00020065, seed_victim);
160*4882a593Smuzhiyun
161*4882a593Smuzhiyun /*
162*4882a593Smuzhiyun * Write the pattern buffers to select the victim bit
163*4882a593Smuzhiyun *
164*4882a593Smuzhiyun * Start with bit0
165*4882a593Smuzhiyun */
166*4882a593Smuzhiyun for (offset = 0x80; offset <= 0x8f; offset++) {
167*4882a593Smuzhiyun if ((offset % 8) == victim_bit)
168*4882a593Smuzhiyun msg_port_write(HTE, offset, 0x55555555);
169*4882a593Smuzhiyun else
170*4882a593Smuzhiyun msg_port_write(HTE, offset, 0xcccccccc);
171*4882a593Smuzhiyun }
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00000000);
174*4882a593Smuzhiyun msg_port_write(HTE, 0x00020066, 0x03440000);
175*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a1, 0xffff1000);
176*4882a593Smuzhiyun }
177*4882a593Smuzhiyun
178*4882a593Smuzhiyun tmp = 0x10001000 | (loop_cnt << 16);
179*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, tmp);
180*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, tmp | (1 << 8));
181*4882a593Smuzhiyun
182*4882a593Smuzhiyun hte_wait_for_complete();
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun /*
185*4882a593Smuzhiyun * Return bits 15:8 of HTE_CH0_ERR_XSTAT to check for
186*4882a593Smuzhiyun * any bytelane errors.
187*4882a593Smuzhiyun */
188*4882a593Smuzhiyun return (hte_check_errors() >> 8) & 0xff;
189*4882a593Smuzhiyun }
190*4882a593Smuzhiyun
191*4882a593Smuzhiyun /**
192*4882a593Smuzhiyun * Use HW HTE engine to initialize or test all memory attached to a given DUNIT.
193*4882a593Smuzhiyun * If flag is MRC_MEM_INIT, this routine writes 0s to all memory locations to
194*4882a593Smuzhiyun * initialize ECC. If flag is MRC_MEM_TEST, this routine will send an 5AA55AA5
195*4882a593Smuzhiyun * pattern to all memory locations on the RankMask and then read it back.
196*4882a593Smuzhiyun * Then it sends an A55AA55A pattern to all memory locations on the RankMask
197*4882a593Smuzhiyun * and reads it back.
198*4882a593Smuzhiyun *
199*4882a593Smuzhiyun * @mrc_params: host structure for all MRC global data
200*4882a593Smuzhiyun * @flag: MRC_MEM_INIT or MRC_MEM_TEST
201*4882a593Smuzhiyun *
202*4882a593Smuzhiyun * @return: errors register showing HTE failures. Also prints out which rank
203*4882a593Smuzhiyun * failed the HTE test if failure occurs. For rank detection to work,
204*4882a593Smuzhiyun * the address map must be left in its default state. If MRC changes
205*4882a593Smuzhiyun * the address map, this function must be modified to change it back
206*4882a593Smuzhiyun * to default at the beginning, then restore it at the end.
207*4882a593Smuzhiyun */
hte_mem_init(struct mrc_params * mrc_params,u8 flag)208*4882a593Smuzhiyun u32 hte_mem_init(struct mrc_params *mrc_params, u8 flag)
209*4882a593Smuzhiyun {
210*4882a593Smuzhiyun u32 offset;
211*4882a593Smuzhiyun int test_num;
212*4882a593Smuzhiyun int i;
213*4882a593Smuzhiyun
214*4882a593Smuzhiyun /*
215*4882a593Smuzhiyun * Clear out the error registers at the start of each memory
216*4882a593Smuzhiyun * init or memory test run.
217*4882a593Smuzhiyun */
218*4882a593Smuzhiyun hte_clear_error_regs();
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun msg_port_write(HTE, 0x00020062, 0x00000015);
221*4882a593Smuzhiyun
222*4882a593Smuzhiyun for (offset = 0x80; offset <= 0x8f; offset++)
223*4882a593Smuzhiyun msg_port_write(HTE, offset, ((offset & 1) ? 0xa55a : 0x5aa5));
224*4882a593Smuzhiyun
225*4882a593Smuzhiyun msg_port_write(HTE, 0x00020021, 0x00000000);
226*4882a593Smuzhiyun msg_port_write(HTE, 0x00020022, (mrc_params->mem_size >> 6) - 1);
227*4882a593Smuzhiyun msg_port_write(HTE, 0x00020063, 0xaaaaaaaa);
228*4882a593Smuzhiyun msg_port_write(HTE, 0x00020064, 0xcccccccc);
229*4882a593Smuzhiyun msg_port_write(HTE, 0x00020065, 0xf0f0f0f0);
230*4882a593Smuzhiyun msg_port_write(HTE, 0x00020066, 0x03000000);
231*4882a593Smuzhiyun
232*4882a593Smuzhiyun switch (flag) {
233*4882a593Smuzhiyun case MRC_MEM_INIT:
234*4882a593Smuzhiyun /*
235*4882a593Smuzhiyun * Only 1 write pass through memory is needed
236*4882a593Smuzhiyun * to initialize ECC
237*4882a593Smuzhiyun */
238*4882a593Smuzhiyun test_num = 1;
239*4882a593Smuzhiyun break;
240*4882a593Smuzhiyun case MRC_MEM_TEST:
241*4882a593Smuzhiyun /* Write/read then write/read with inverted pattern */
242*4882a593Smuzhiyun test_num = 4;
243*4882a593Smuzhiyun break;
244*4882a593Smuzhiyun default:
245*4882a593Smuzhiyun DPF(D_INFO, "Unknown parameter for flag: %d\n", flag);
246*4882a593Smuzhiyun return 0xffffffff;
247*4882a593Smuzhiyun }
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun DPF(D_INFO, "hte_mem_init");
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun for (i = 0; i < test_num; i++) {
252*4882a593Smuzhiyun DPF(D_INFO, ".");
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun if (i == 0) {
255*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00000000);
256*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x00110010);
257*4882a593Smuzhiyun } else if (i == 1) {
258*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00000000);
259*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x00010010);
260*4882a593Smuzhiyun } else if (i == 2) {
261*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00010100);
262*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x00110010);
263*4882a593Smuzhiyun } else {
264*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00010100);
265*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, 0x00010010);
266*4882a593Smuzhiyun }
267*4882a593Smuzhiyun
268*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00111000);
269*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00111100);
270*4882a593Smuzhiyun
271*4882a593Smuzhiyun hte_wait_for_complete();
272*4882a593Smuzhiyun
273*4882a593Smuzhiyun /* If this is a READ pass, check for errors at the end */
274*4882a593Smuzhiyun if ((i % 2) == 1) {
275*4882a593Smuzhiyun /* Return immediately if error */
276*4882a593Smuzhiyun if (hte_check_errors())
277*4882a593Smuzhiyun break;
278*4882a593Smuzhiyun }
279*4882a593Smuzhiyun }
280*4882a593Smuzhiyun
281*4882a593Smuzhiyun DPF(D_INFO, "done\n");
282*4882a593Smuzhiyun
283*4882a593Smuzhiyun return hte_check_errors();
284*4882a593Smuzhiyun }
285*4882a593Smuzhiyun
286*4882a593Smuzhiyun /**
287*4882a593Smuzhiyun * Execute a basic single-cache-line memory write/read/verify test using simple
288*4882a593Smuzhiyun * constant pattern, different for READ_TRAIN and WRITE_TRAIN modes.
289*4882a593Smuzhiyun *
290*4882a593Smuzhiyun * @mrc_params: host structure for all MRC global data
291*4882a593Smuzhiyun * @addr: memory adress being tested (must hit specific channel/rank)
292*4882a593Smuzhiyun * @first_run: if set then the HTE registers are configured, otherwise it is
293*4882a593Smuzhiyun * assumed configuration is done and we just re-run the test
294*4882a593Smuzhiyun * @mode: READ_TRAIN or WRITE_TRAIN (the difference is in the pattern)
295*4882a593Smuzhiyun *
296*4882a593Smuzhiyun * @return: byte lane failure on each bit (for Quark only bit0 and bit1)
297*4882a593Smuzhiyun */
hte_basic_write_read(struct mrc_params * mrc_params,u32 addr,u8 first_run,u8 mode)298*4882a593Smuzhiyun u16 hte_basic_write_read(struct mrc_params *mrc_params, u32 addr,
299*4882a593Smuzhiyun u8 first_run, u8 mode)
300*4882a593Smuzhiyun {
301*4882a593Smuzhiyun u16 errors;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun ENTERFN();
304*4882a593Smuzhiyun
305*4882a593Smuzhiyun /* Enable all error reporting in preparation for HTE test */
306*4882a593Smuzhiyun hte_enable_all_errors();
307*4882a593Smuzhiyun hte_clear_error_regs();
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun errors = hte_basic_data_cmp(mrc_params, addr, first_run, mode);
310*4882a593Smuzhiyun
311*4882a593Smuzhiyun LEAVEFN();
312*4882a593Smuzhiyun
313*4882a593Smuzhiyun return errors;
314*4882a593Smuzhiyun }
315*4882a593Smuzhiyun
316*4882a593Smuzhiyun /**
317*4882a593Smuzhiyun * Examine a single-cache-line memory with write/read/verify test using multiple
318*4882a593Smuzhiyun * data patterns (victim-aggressor algorithm).
319*4882a593Smuzhiyun *
320*4882a593Smuzhiyun * @mrc_params: host structure for all MRC global data
321*4882a593Smuzhiyun * @addr: memory adress being tested (must hit specific channel/rank)
322*4882a593Smuzhiyun * @first_run: if set then the HTE registers are configured, otherwise it is
323*4882a593Smuzhiyun * assumed configuration is done and we just re-run the test
324*4882a593Smuzhiyun *
325*4882a593Smuzhiyun * @return: byte lane failure on each bit (for Quark only bit0 and bit1)
326*4882a593Smuzhiyun */
hte_write_stress_bit_lanes(struct mrc_params * mrc_params,u32 addr,u8 first_run)327*4882a593Smuzhiyun u16 hte_write_stress_bit_lanes(struct mrc_params *mrc_params,
328*4882a593Smuzhiyun u32 addr, u8 first_run)
329*4882a593Smuzhiyun {
330*4882a593Smuzhiyun u16 errors;
331*4882a593Smuzhiyun u8 victim_bit = 0;
332*4882a593Smuzhiyun
333*4882a593Smuzhiyun ENTERFN();
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun /* Enable all error reporting in preparation for HTE test */
336*4882a593Smuzhiyun hte_enable_all_errors();
337*4882a593Smuzhiyun hte_clear_error_regs();
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun /*
340*4882a593Smuzhiyun * Loop through each bit in the bytelane.
341*4882a593Smuzhiyun *
342*4882a593Smuzhiyun * Each pass creates a victim bit while keeping all other bits the same
343*4882a593Smuzhiyun * as aggressors. AVN HTE adds an auto-rotate feature which allows us
344*4882a593Smuzhiyun * to program the entire victim/aggressor sequence in 1 step.
345*4882a593Smuzhiyun *
346*4882a593Smuzhiyun * The victim bit rotates on each pass so no need to have software
347*4882a593Smuzhiyun * implement a victim bit loop like on VLV.
348*4882a593Smuzhiyun */
349*4882a593Smuzhiyun errors = hte_rw_data_cmp(mrc_params, addr, HTE_LOOP_CNT,
350*4882a593Smuzhiyun HTE_LFSR_VICTIM_SEED, HTE_LFSR_AGRESSOR_SEED,
351*4882a593Smuzhiyun victim_bit, first_run);
352*4882a593Smuzhiyun
353*4882a593Smuzhiyun LEAVEFN();
354*4882a593Smuzhiyun
355*4882a593Smuzhiyun return errors;
356*4882a593Smuzhiyun }
357*4882a593Smuzhiyun
358*4882a593Smuzhiyun /**
359*4882a593Smuzhiyun * Execute a basic single-cache-line memory write or read.
360*4882a593Smuzhiyun * This is just for receive enable / fine write-levelling purpose.
361*4882a593Smuzhiyun *
362*4882a593Smuzhiyun * @addr: memory adress being tested (must hit specific channel/rank)
363*4882a593Smuzhiyun * @first_run: if set then the HTE registers are configured, otherwise it is
364*4882a593Smuzhiyun * assumed configuration is done and we just re-run the test
365*4882a593Smuzhiyun * @is_write: when non-zero memory write operation executed, otherwise read
366*4882a593Smuzhiyun */
hte_mem_op(u32 addr,u8 first_run,u8 is_write)367*4882a593Smuzhiyun void hte_mem_op(u32 addr, u8 first_run, u8 is_write)
368*4882a593Smuzhiyun {
369*4882a593Smuzhiyun u32 offset;
370*4882a593Smuzhiyun u32 tmp;
371*4882a593Smuzhiyun
372*4882a593Smuzhiyun hte_enable_all_errors();
373*4882a593Smuzhiyun hte_clear_error_regs();
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun if (first_run) {
376*4882a593Smuzhiyun tmp = is_write ? 0x01110021 : 0x01010021;
377*4882a593Smuzhiyun msg_port_write(HTE, 0x00020020, tmp);
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun msg_port_write(HTE, 0x00020021, 0x06000000);
380*4882a593Smuzhiyun msg_port_write(HTE, 0x00020022, addr >> 6);
381*4882a593Smuzhiyun msg_port_write(HTE, 0x00020062, 0x00800015);
382*4882a593Smuzhiyun msg_port_write(HTE, 0x00020063, 0xaaaaaaaa);
383*4882a593Smuzhiyun msg_port_write(HTE, 0x00020064, 0xcccccccc);
384*4882a593Smuzhiyun msg_port_write(HTE, 0x00020065, 0xf0f0f0f0);
385*4882a593Smuzhiyun msg_port_write(HTE, 0x00020061, 0x00030008);
386*4882a593Smuzhiyun
387*4882a593Smuzhiyun for (offset = 0x80; offset <= 0x8f; offset++)
388*4882a593Smuzhiyun msg_port_write(HTE, offset, 0xc33c0000);
389*4882a593Smuzhiyun }
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun msg_port_write(HTE, 0x000200a1, 0xffff1000);
392*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00011000);
393*4882a593Smuzhiyun msg_port_write(HTE, 0x00020011, 0x00011100);
394*4882a593Smuzhiyun
395*4882a593Smuzhiyun hte_wait_for_complete();
396*4882a593Smuzhiyun }
397