1 // SPDX-License-Identifier: BSD-2-Clause
2 /*
3 * Copyright (c) 2025, Linaro Limited
4 */
5
6 #include <kernel/transfer_list.h>
7 #include <mm/core_mmu.h>
8 #include <mm/phys_mem.h>
9 #include <pta_invoke_tests.h>
10 #include <stdlib.h>
11 #include <string.h>
12 #include <trace.h>
13 #include <util.h>
14
15 #include "misc.h"
16
17 #define TEST_TL_MAX_SIZE 256
18 #define TEST_TE2_ALIGN_POWER 4
19 #define TEST_TE1_ID 0xf000
20 #define TEST_TE2_ID 0xf0f0
21 #define TEST_TE3_ID 0xff00
22
23 static const char test_str1[] = "first added entry";
24 static const char test_str2[] = "second added entry";
25 static const char test_str3[] = "last added entry";
26
test_add_te(struct transfer_list_header * tl,uint16_t tag_id,uint32_t data_size,const void * data,uint8_t align,struct transfer_list_entry ** tle)27 static TEE_Result test_add_te(struct transfer_list_header *tl,
28 uint16_t tag_id, uint32_t data_size,
29 const void *data, uint8_t align,
30 struct transfer_list_entry **tle)
31 {
32 uint8_t *te_dat = NULL;
33 uint8_t old_tl_align = 0;
34 vaddr_t old_tl_ev = (vaddr_t)tl + tl->size;
35 struct transfer_list_entry *tl_e = NULL;
36
37 old_tl_align = tl->alignment;
38
39 if (!align)
40 tl_e = transfer_list_add(tl, tag_id, data_size, data);
41 else
42 tl_e = transfer_list_add_with_align(tl, tag_id, data_size, data,
43 align);
44
45 if (!tl_e)
46 return TEE_ERROR_GENERIC;
47
48 if (tl->alignment != MAX(old_tl_align, align))
49 return TEE_ERROR_CORRUPT_OBJECT;
50
51 if (tl_e->tag_id != tag_id || tl_e->hdr_size != sizeof(*tl_e) ||
52 tl_e->data_size != data_size)
53 return TEE_ERROR_CORRUPT_OBJECT;
54
55 te_dat = transfer_list_entry_data(tl_e);
56
57 if (!te_dat || te_dat != (uint8_t *)tl_e + sizeof(*tl_e))
58 return TEE_ERROR_CORRUPT_OBJECT;
59
60 /*
61 * If an align arg is passed in:
62 * 1. Entry data must start from an aligned address, and;
63 * 2. To align the entry data, a minimum gap should be inserted before
64 * the new entry.
65 */
66 if (align) {
67 vaddr_t mask = TL_ALIGNMENT_FROM_ORDER(align) - 1;
68 size_t gap_min = (~(old_tl_ev + sizeof(*tl_e)) + 1) & mask;
69
70 if (((vaddr_t)tl_e - old_tl_ev != gap_min) ||
71 ((vaddr_t)te_dat & mask))
72 return TEE_ERROR_CORRUPT_OBJECT;
73 }
74
75 if (memcmp(te_dat, data, tl_e->data_size))
76 return TEE_ERROR_CORRUPT_OBJECT;
77
78 if (!transfer_list_verify_checksum(tl))
79 return TEE_ERROR_CORRUPT_OBJECT;
80
81 *tle = tl_e;
82
83 return TEE_SUCCESS;
84 }
85
test_rm_te(struct transfer_list_header * tl,uint16_t tag_id)86 static TEE_Result test_rm_te(struct transfer_list_header *tl,
87 uint16_t tag_id)
88 {
89 struct transfer_list_entry *tl_e = NULL;
90
91 tl_e = transfer_list_find(tl, tag_id);
92 if (!tl_e)
93 return TEE_ERROR_ITEM_NOT_FOUND;
94
95 if (!transfer_list_rem(tl, tl_e))
96 return TEE_ERROR_GENERIC;
97
98 if (transfer_list_find(tl, tag_id))
99 return TEE_ERROR_CORRUPT_OBJECT;
100
101 if (!transfer_list_verify_checksum(tl))
102 return TEE_ERROR_CORRUPT_OBJECT;
103
104 return TEE_SUCCESS;
105 }
106
test_set_te_data_size(struct transfer_list_header * tl,struct transfer_list_entry * tl_e,uint32_t new_data_size)107 static TEE_Result test_set_te_data_size(struct transfer_list_header *tl,
108 struct transfer_list_entry *tl_e,
109 uint32_t new_data_size)
110 {
111 struct transfer_list_entry *old_te_next = NULL;
112 struct transfer_list_entry *new_te_next = NULL;
113 size_t mov_dis = 0;
114
115 old_te_next = transfer_list_next(tl, tl_e);
116
117 if (!transfer_list_set_data_size(tl, tl_e, new_data_size))
118 return TEE_ERROR_GENERIC;
119
120 if (!transfer_list_verify_checksum(tl))
121 return TEE_ERROR_CORRUPT_OBJECT;
122
123 new_te_next = transfer_list_next(tl, tl_e);
124
125 /* skip the inserted void entries */
126 while (new_te_next && new_te_next->tag_id == TL_TAG_EMPTY)
127 new_te_next = transfer_list_next(tl, new_te_next);
128
129 /*
130 * The followed entry moved distance must be aligned with the
131 * max alignment of the TL.
132 */
133 if (new_te_next > old_te_next)
134 mov_dis = (vaddr_t)new_te_next - (vaddr_t)old_te_next;
135 else
136 mov_dis = (vaddr_t)old_te_next - (vaddr_t)new_te_next;
137
138 if (!IS_ALIGNED(mov_dis, TL_ALIGNMENT_FROM_ORDER(tl->alignment)))
139 return TEE_ERROR_CORRUPT_OBJECT;
140
141 return TEE_SUCCESS;
142 }
143
transfer_list_tests(void)144 static TEE_Result transfer_list_tests(void)
145 {
146 struct transfer_list_header *tl = NULL;
147 struct transfer_list_entry *te1 = NULL;
148 struct transfer_list_entry *te2 = NULL;
149 struct transfer_list_entry *te3 = NULL;
150 TEE_Result ret = TEE_SUCCESS;
151 tee_mm_entry_t *mm = NULL;
152
153 mm = phys_mem_core_alloc(SMALL_PAGE_SIZE);
154 if (!mm)
155 return TEE_ERROR_OUT_OF_MEMORY;
156
157 tl = transfer_list_init(tee_mm_get_smem(mm), TEST_TL_MAX_SIZE);
158 if (!tl) {
159 ret = TEE_ERROR_GENERIC;
160 goto free_tl;
161 }
162
163 if (transfer_list_check_header(tl) == TL_OPS_NONE) {
164 ret = TEE_ERROR_CORRUPT_OBJECT;
165 goto unmap_tl;
166 }
167
168 if (tl->hdr_size != sizeof(*tl) ||
169 tl->alignment != TRANSFER_LIST_INIT_MAX_ALIGN ||
170 tl->size != sizeof(*tl) ||
171 tl->max_size != TEST_TL_MAX_SIZE ||
172 tl->flags != TL_FLAGS_HAS_CHECKSUM ||
173 !transfer_list_verify_checksum(tl)) {
174 ret = TEE_ERROR_CORRUPT_OBJECT;
175 goto unmap_tl;
176 }
177
178 /* Add a new entry following the tail without data alignment required */
179 ret = test_add_te(tl, TEST_TE1_ID, sizeof(test_str1), test_str1, 0,
180 &te1);
181 if (ret)
182 goto unmap_tl;
183
184 /* Add a new entry with alignment, expecting a padding before it */
185 ret = test_add_te(tl, TEST_TE2_ID, sizeof(test_str2), test_str2,
186 TEST_TE2_ALIGN_POWER, &te2);
187 if (ret)
188 goto unmap_tl;
189
190 /* Add a new entry following the tail without data alignment required */
191 ret = test_add_te(tl, TEST_TE3_ID, sizeof(test_str3), test_str3, 0,
192 &te3);
193 if (ret)
194 goto unmap_tl;
195
196 if (transfer_list_find(tl, TEST_TE1_ID) != te1 ||
197 transfer_list_find(tl, TEST_TE2_ID) != te2 ||
198 transfer_list_find(tl, TEST_TE3_ID) != te3) {
199 ret = TEE_ERROR_ITEM_NOT_FOUND;
200 goto unmap_tl;
201 }
202
203 ret = test_set_te_data_size(tl, te1, sizeof(test_str1) + 10);
204 if (ret)
205 goto unmap_tl;
206
207 /* The following TEs are shifted, get the new entry */
208 te2 = transfer_list_find(tl, TEST_TE2_ID);
209 ret = test_set_te_data_size(tl, te2, sizeof(test_str2) - 10);
210 if (ret)
211 goto unmap_tl;
212
213 /* The following TEs are shifted, get the new entry */
214 te3 = transfer_list_find(tl, TEST_TE3_ID);
215 ret = test_set_te_data_size(tl, te3, sizeof(test_str3) + 10);
216 if (ret)
217 goto unmap_tl;
218
219 ret = test_rm_te(tl, TEST_TE2_ID);
220 if (ret)
221 goto unmap_tl;
222
223 ret = test_rm_te(tl, TEST_TE3_ID);
224 if (ret)
225 goto unmap_tl;
226
227 ret = test_rm_te(tl, TEST_TE1_ID);
228
229 unmap_tl:
230 transfer_list_unmap_sync(tl);
231
232 free_tl:
233 tee_mm_free(mm);
234 return ret;
235 }
236
237 /* Exported entrypoint for transfer_list tests */
core_transfer_list_tests(uint32_t nParamTypes __unused,TEE_Param pParams[TEE_NUM_PARAMS]__unused)238 TEE_Result core_transfer_list_tests(uint32_t nParamTypes __unused,
239 TEE_Param pParams[TEE_NUM_PARAMS] __unused)
240 {
241 if (transfer_list_tests())
242 return TEE_ERROR_GENERIC;
243
244 return TEE_SUCCESS;
245 }
246