xref: /OK3568_Linux_fs/kernel/drivers/net/wireless/nxp/mlan/mlan_util.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /** @file mlan_util.h
2*4882a593Smuzhiyun  *
3*4882a593Smuzhiyun  *  @brief This file contains wrappers for linked-list,
4*4882a593Smuzhiyun  *  spinlock and timer defines.
5*4882a593Smuzhiyun  *
6*4882a593Smuzhiyun  *
7*4882a593Smuzhiyun  *  Copyright 2008-2021 NXP
8*4882a593Smuzhiyun  *
9*4882a593Smuzhiyun  *  This software file (the File) is distributed by NXP
10*4882a593Smuzhiyun  *  under the terms of the GNU General Public License Version 2, June 1991
11*4882a593Smuzhiyun  *  (the License).  You may use, redistribute and/or modify the File in
12*4882a593Smuzhiyun  *  accordance with the terms and conditions of the License, a copy of which
13*4882a593Smuzhiyun  *  is available by writing to the Free Software Foundation, Inc.,
14*4882a593Smuzhiyun  *  51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA or on the
15*4882a593Smuzhiyun  *  worldwide web at http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
16*4882a593Smuzhiyun  *
17*4882a593Smuzhiyun  *  THE FILE IS DISTRIBUTED AS-IS, WITHOUT WARRANTY OF ANY KIND, AND THE
18*4882a593Smuzhiyun  *  IMPLIED WARRANTIES OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE
19*4882a593Smuzhiyun  *  ARE EXPRESSLY DISCLAIMED.  The License provides additional details about
20*4882a593Smuzhiyun  *  this warranty disclaimer.
21*4882a593Smuzhiyun  *
22*4882a593Smuzhiyun  */
23*4882a593Smuzhiyun 
24*4882a593Smuzhiyun /******************************************************
25*4882a593Smuzhiyun Change log:
26*4882a593Smuzhiyun     10/28/2008: initial version
27*4882a593Smuzhiyun ******************************************************/
28*4882a593Smuzhiyun 
29*4882a593Smuzhiyun #ifndef _MLAN_UTIL_H_
30*4882a593Smuzhiyun #define _MLAN_UTIL_H_
31*4882a593Smuzhiyun 
32*4882a593Smuzhiyun /** Circular doubly linked list */
33*4882a593Smuzhiyun typedef struct _mlan_linked_list {
34*4882a593Smuzhiyun 	/** Pointer to previous node */
35*4882a593Smuzhiyun 	struct _mlan_linked_list *pprev;
36*4882a593Smuzhiyun 	/** Pointer to next node */
37*4882a593Smuzhiyun 	struct _mlan_linked_list *pnext;
38*4882a593Smuzhiyun } mlan_linked_list, *pmlan_linked_list;
39*4882a593Smuzhiyun 
40*4882a593Smuzhiyun /** List head */
41*4882a593Smuzhiyun typedef struct _mlan_list_head {
42*4882a593Smuzhiyun 	/** Pointer to previous node */
43*4882a593Smuzhiyun 	struct _mlan_linked_list *pprev;
44*4882a593Smuzhiyun 	/** Pointer to next node */
45*4882a593Smuzhiyun 	struct _mlan_linked_list *pnext;
46*4882a593Smuzhiyun 	/** Pointer to lock */
47*4882a593Smuzhiyun 	t_void *plock;
48*4882a593Smuzhiyun } mlan_list_head, *pmlan_list_head;
49*4882a593Smuzhiyun 
50*4882a593Smuzhiyun /** MLAN MNULL pointer */
51*4882a593Smuzhiyun #define MNULL ((void *)0)
52*4882a593Smuzhiyun 
53*4882a593Smuzhiyun /**
54*4882a593Smuzhiyun  *  @brief This function initializes a list without locking
55*4882a593Smuzhiyun  *
56*4882a593Smuzhiyun  *  @param phead		List head
57*4882a593Smuzhiyun  *
58*4882a593Smuzhiyun  *  @return			N/A
59*4882a593Smuzhiyun  */
util_init_list(pmlan_linked_list phead)60*4882a593Smuzhiyun static INLINE t_void util_init_list(pmlan_linked_list phead)
61*4882a593Smuzhiyun {
62*4882a593Smuzhiyun 	/* Both next and prev point to self */
63*4882a593Smuzhiyun 	phead->pprev = phead->pnext = (pmlan_linked_list)phead;
64*4882a593Smuzhiyun }
65*4882a593Smuzhiyun 
66*4882a593Smuzhiyun /**
67*4882a593Smuzhiyun  *  @brief This function initializes a list
68*4882a593Smuzhiyun  *
69*4882a593Smuzhiyun  *  @param phead		List head
70*4882a593Smuzhiyun  *  @param lock_required	A flag for spinlock requirement
71*4882a593Smuzhiyun  *  @param moal_init_lock	A pointer to init lock handler
72*4882a593Smuzhiyun  *
73*4882a593Smuzhiyun  *  @return			N/A
74*4882a593Smuzhiyun  */
util_init_list_head(t_void * pmoal_handle,pmlan_list_head phead,t_u8 lock_required,mlan_status (* moal_init_lock)(t_void * handle,t_void ** pplock))75*4882a593Smuzhiyun static INLINE t_void util_init_list_head(
76*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead, t_u8 lock_required,
77*4882a593Smuzhiyun 	mlan_status (*moal_init_lock)(t_void *handle, t_void **pplock))
78*4882a593Smuzhiyun {
79*4882a593Smuzhiyun 	/* Both next and prev point to self */
80*4882a593Smuzhiyun 	util_init_list((pmlan_linked_list)phead);
81*4882a593Smuzhiyun 	if (lock_required)
82*4882a593Smuzhiyun 		moal_init_lock(pmoal_handle, &phead->plock);
83*4882a593Smuzhiyun 	else
84*4882a593Smuzhiyun 		phead->plock = MNULL;
85*4882a593Smuzhiyun }
86*4882a593Smuzhiyun 
87*4882a593Smuzhiyun /**
88*4882a593Smuzhiyun  *  @brief This function frees a list
89*4882a593Smuzhiyun  *
90*4882a593Smuzhiyun  *  @param phead		List head
91*4882a593Smuzhiyun  *  @param moal_free_lock	A pointer to free lock handler
92*4882a593Smuzhiyun  *
93*4882a593Smuzhiyun  *  @return			N/A
94*4882a593Smuzhiyun  */
util_free_list_head(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_free_lock)(t_void * handle,t_void * plock))95*4882a593Smuzhiyun static INLINE t_void util_free_list_head(
96*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead,
97*4882a593Smuzhiyun 	mlan_status (*moal_free_lock)(t_void *handle, t_void *plock))
98*4882a593Smuzhiyun {
99*4882a593Smuzhiyun 	phead->pprev = phead->pnext = MNULL;
100*4882a593Smuzhiyun 	if (phead->plock)
101*4882a593Smuzhiyun 		moal_free_lock(pmoal_handle, phead->plock);
102*4882a593Smuzhiyun }
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun /**
105*4882a593Smuzhiyun  *  @brief This function peeks into a list
106*4882a593Smuzhiyun  *
107*4882a593Smuzhiyun  *  @param phead		List head
108*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
109*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
110*4882a593Smuzhiyun  *
111*4882a593Smuzhiyun  *  @return			List node
112*4882a593Smuzhiyun  */
113*4882a593Smuzhiyun static INLINE pmlan_linked_list
util_peek_list(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))114*4882a593Smuzhiyun util_peek_list(t_void *pmoal_handle, pmlan_list_head phead,
115*4882a593Smuzhiyun 	       mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
116*4882a593Smuzhiyun 	       mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
117*4882a593Smuzhiyun {
118*4882a593Smuzhiyun 	pmlan_linked_list pnode = MNULL;
119*4882a593Smuzhiyun 
120*4882a593Smuzhiyun 	if (moal_spin_lock)
121*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, phead->plock);
122*4882a593Smuzhiyun 	if (phead->pnext != (pmlan_linked_list)phead)
123*4882a593Smuzhiyun 		pnode = phead->pnext;
124*4882a593Smuzhiyun 	if (moal_spin_unlock)
125*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, phead->plock);
126*4882a593Smuzhiyun 	return pnode;
127*4882a593Smuzhiyun }
128*4882a593Smuzhiyun 
129*4882a593Smuzhiyun /**
130*4882a593Smuzhiyun  *  @brief This function queues a node at the list tail
131*4882a593Smuzhiyun  *
132*4882a593Smuzhiyun  *  @param phead		List head
133*4882a593Smuzhiyun  *  @param pnode		List node to queue
134*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
135*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
136*4882a593Smuzhiyun  *
137*4882a593Smuzhiyun  *  @return			N/A
138*4882a593Smuzhiyun  */
util_enqueue_list_tail(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))139*4882a593Smuzhiyun static INLINE t_void util_enqueue_list_tail(
140*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead, pmlan_linked_list pnode,
141*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
142*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
143*4882a593Smuzhiyun {
144*4882a593Smuzhiyun 	pmlan_linked_list pold_last;
145*4882a593Smuzhiyun 
146*4882a593Smuzhiyun 	if (moal_spin_lock)
147*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, phead->plock);
148*4882a593Smuzhiyun 	pold_last = phead->pprev;
149*4882a593Smuzhiyun 	pnode->pprev = pold_last;
150*4882a593Smuzhiyun 	pnode->pnext = (pmlan_linked_list)phead;
151*4882a593Smuzhiyun 
152*4882a593Smuzhiyun 	phead->pprev = pold_last->pnext = pnode;
153*4882a593Smuzhiyun 	if (moal_spin_unlock)
154*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, phead->plock);
155*4882a593Smuzhiyun }
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun /**
158*4882a593Smuzhiyun  *  @brief This function adds a node at the list head
159*4882a593Smuzhiyun  *
160*4882a593Smuzhiyun  *  @param phead		List head
161*4882a593Smuzhiyun  *  @param pnode		List node to add
162*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
163*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
164*4882a593Smuzhiyun  *
165*4882a593Smuzhiyun  *  @return			N/A
166*4882a593Smuzhiyun  */
util_enqueue_list_head(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))167*4882a593Smuzhiyun static INLINE t_void util_enqueue_list_head(
168*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead, pmlan_linked_list pnode,
169*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
170*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
171*4882a593Smuzhiyun {
172*4882a593Smuzhiyun 	pmlan_linked_list pold_first;
173*4882a593Smuzhiyun 
174*4882a593Smuzhiyun 	if (moal_spin_lock)
175*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, phead->plock);
176*4882a593Smuzhiyun 	pold_first = phead->pnext;
177*4882a593Smuzhiyun 	pnode->pprev = (pmlan_linked_list)phead;
178*4882a593Smuzhiyun 	pnode->pnext = pold_first;
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 	phead->pnext = pold_first->pprev = pnode;
181*4882a593Smuzhiyun 	if (moal_spin_unlock)
182*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, phead->plock);
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun 
185*4882a593Smuzhiyun /**
186*4882a593Smuzhiyun  *  @brief This function removes a node from the list
187*4882a593Smuzhiyun  *
188*4882a593Smuzhiyun  *  @param phead		List head
189*4882a593Smuzhiyun  *  @param pnode		List node to remove
190*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
191*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
192*4882a593Smuzhiyun  *
193*4882a593Smuzhiyun  *  @return			N/A
194*4882a593Smuzhiyun  */
util_unlink_list(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))195*4882a593Smuzhiyun static INLINE t_void util_unlink_list(
196*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead, pmlan_linked_list pnode,
197*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
198*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
199*4882a593Smuzhiyun {
200*4882a593Smuzhiyun 	pmlan_linked_list pmy_prev;
201*4882a593Smuzhiyun 	pmlan_linked_list pmy_next;
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 	if (moal_spin_lock)
204*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, phead->plock);
205*4882a593Smuzhiyun 	pmy_prev = pnode->pprev;
206*4882a593Smuzhiyun 	pmy_next = pnode->pnext;
207*4882a593Smuzhiyun 	pmy_next->pprev = pmy_prev;
208*4882a593Smuzhiyun 	pmy_prev->pnext = pmy_next;
209*4882a593Smuzhiyun 
210*4882a593Smuzhiyun 	pnode->pnext = pnode->pprev = MNULL;
211*4882a593Smuzhiyun 	if (moal_spin_unlock)
212*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, phead->plock);
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun 
215*4882a593Smuzhiyun /**
216*4882a593Smuzhiyun  *  @brief This function dequeues a node from the list
217*4882a593Smuzhiyun  *
218*4882a593Smuzhiyun  *  @param phead		List head
219*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
220*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
221*4882a593Smuzhiyun  *
222*4882a593Smuzhiyun  *  @return			List node
223*4882a593Smuzhiyun  */
util_dequeue_list(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))224*4882a593Smuzhiyun static INLINE pmlan_linked_list util_dequeue_list(
225*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_list_head phead,
226*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
227*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
228*4882a593Smuzhiyun {
229*4882a593Smuzhiyun 	pmlan_linked_list pnode;
230*4882a593Smuzhiyun 
231*4882a593Smuzhiyun 	if (moal_spin_lock)
232*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, phead->plock);
233*4882a593Smuzhiyun 	pnode = phead->pnext;
234*4882a593Smuzhiyun 	if (pnode && (pnode != (pmlan_linked_list)phead))
235*4882a593Smuzhiyun 		util_unlink_list(pmoal_handle, phead, pnode, MNULL, MNULL);
236*4882a593Smuzhiyun 	else
237*4882a593Smuzhiyun 		pnode = MNULL;
238*4882a593Smuzhiyun 	if (moal_spin_unlock)
239*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, phead->plock);
240*4882a593Smuzhiyun 	return pnode;
241*4882a593Smuzhiyun }
242*4882a593Smuzhiyun 
243*4882a593Smuzhiyun /** Access controlled scalar variable */
244*4882a593Smuzhiyun typedef struct _mlan_scalar {
245*4882a593Smuzhiyun 	/** Value */
246*4882a593Smuzhiyun 	t_s32 value;
247*4882a593Smuzhiyun 	/** Pointer to lock */
248*4882a593Smuzhiyun 	t_void *plock;
249*4882a593Smuzhiyun 	/** Control flags */
250*4882a593Smuzhiyun 	t_u32 flags;
251*4882a593Smuzhiyun } mlan_scalar, *pmlan_scalar;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun /** Flag to scalar lock acquired */
254*4882a593Smuzhiyun #define MLAN_SCALAR_FLAG_UNIQUE_LOCK MBIT(16)
255*4882a593Smuzhiyun 
256*4882a593Smuzhiyun /** scalar conditional value list */
257*4882a593Smuzhiyun typedef enum _MLAN_SCALAR_CONDITIONAL {
258*4882a593Smuzhiyun 	MLAN_SCALAR_COND_EQUAL,
259*4882a593Smuzhiyun 	MLAN_SCALAR_COND_NOT_EQUAL,
260*4882a593Smuzhiyun 	MLAN_SCALAR_COND_GREATER_THAN,
261*4882a593Smuzhiyun 	MLAN_SCALAR_COND_GREATER_OR_EQUAL,
262*4882a593Smuzhiyun 	MLAN_SCALAR_COND_LESS_THAN,
263*4882a593Smuzhiyun 	MLAN_SCALAR_COND_LESS_OR_EQUAL
264*4882a593Smuzhiyun } MLAN_SCALAR_CONDITIONAL;
265*4882a593Smuzhiyun 
266*4882a593Smuzhiyun /**
267*4882a593Smuzhiyun  *  @brief This function initializes a scalar
268*4882a593Smuzhiyun  *
269*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
270*4882a593Smuzhiyun  *  @param val				Initial scalar value
271*4882a593Smuzhiyun  *  @param plock_to_use		A new lock is created if NULL, else lock to use
272*4882a593Smuzhiyun  *  @param moal_init_lock	A pointer to init lock handler
273*4882a593Smuzhiyun  *
274*4882a593Smuzhiyun  *  @return					N/A
275*4882a593Smuzhiyun  */
276*4882a593Smuzhiyun static INLINE t_void
util_scalar_init(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 val,t_void * plock_to_use,mlan_status (* moal_init_lock)(t_void * handle,t_void ** pplock))277*4882a593Smuzhiyun util_scalar_init(t_void *pmoal_handle, pmlan_scalar pscalar, t_s32 val,
278*4882a593Smuzhiyun 		 t_void *plock_to_use,
279*4882a593Smuzhiyun 		 mlan_status (*moal_init_lock)(t_void *handle, t_void **pplock))
280*4882a593Smuzhiyun {
281*4882a593Smuzhiyun 	pscalar->value = val;
282*4882a593Smuzhiyun 	pscalar->flags = 0;
283*4882a593Smuzhiyun 	if (plock_to_use) {
284*4882a593Smuzhiyun 		pscalar->flags &= ~MLAN_SCALAR_FLAG_UNIQUE_LOCK;
285*4882a593Smuzhiyun 		pscalar->plock = plock_to_use;
286*4882a593Smuzhiyun 	} else {
287*4882a593Smuzhiyun 		pscalar->flags |= MLAN_SCALAR_FLAG_UNIQUE_LOCK;
288*4882a593Smuzhiyun 		moal_init_lock(pmoal_handle, &pscalar->plock);
289*4882a593Smuzhiyun 	}
290*4882a593Smuzhiyun }
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun /**
293*4882a593Smuzhiyun  *  @brief This function frees a scalar
294*4882a593Smuzhiyun  *
295*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
296*4882a593Smuzhiyun  *  @param moal_free_lock	A pointer to free lock handler
297*4882a593Smuzhiyun  *
298*4882a593Smuzhiyun  *  @return			N/A
299*4882a593Smuzhiyun  */
300*4882a593Smuzhiyun static INLINE t_void
util_scalar_free(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_free_lock)(t_void * handle,t_void * plock))301*4882a593Smuzhiyun util_scalar_free(t_void *pmoal_handle, pmlan_scalar pscalar,
302*4882a593Smuzhiyun 		 mlan_status (*moal_free_lock)(t_void *handle, t_void *plock))
303*4882a593Smuzhiyun {
304*4882a593Smuzhiyun 	if (pscalar->flags & MLAN_SCALAR_FLAG_UNIQUE_LOCK)
305*4882a593Smuzhiyun 		moal_free_lock(pmoal_handle, pscalar->plock);
306*4882a593Smuzhiyun }
307*4882a593Smuzhiyun 
308*4882a593Smuzhiyun /**
309*4882a593Smuzhiyun  *  @brief This function reads value from scalar
310*4882a593Smuzhiyun  *
311*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
312*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
313*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
314*4882a593Smuzhiyun  *
315*4882a593Smuzhiyun  *  @return					Stored value
316*4882a593Smuzhiyun  */
317*4882a593Smuzhiyun static INLINE t_s32
util_scalar_read(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))318*4882a593Smuzhiyun util_scalar_read(t_void *pmoal_handle, pmlan_scalar pscalar,
319*4882a593Smuzhiyun 		 mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
320*4882a593Smuzhiyun 		 mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
321*4882a593Smuzhiyun {
322*4882a593Smuzhiyun 	t_s32 val;
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun 	if (moal_spin_lock)
325*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
326*4882a593Smuzhiyun 	val = pscalar->value;
327*4882a593Smuzhiyun 	if (moal_spin_unlock)
328*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
329*4882a593Smuzhiyun 
330*4882a593Smuzhiyun 	return val;
331*4882a593Smuzhiyun }
332*4882a593Smuzhiyun 
333*4882a593Smuzhiyun /**
334*4882a593Smuzhiyun  *  @brief This function writes value to scalar
335*4882a593Smuzhiyun  *
336*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
337*4882a593Smuzhiyun  *  @param val				Value to write
338*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
339*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
340*4882a593Smuzhiyun  *
341*4882a593Smuzhiyun  *  @return					N/A
342*4882a593Smuzhiyun  */
util_scalar_write(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 val,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))343*4882a593Smuzhiyun static INLINE t_void util_scalar_write(
344*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_scalar pscalar, t_s32 val,
345*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
346*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
347*4882a593Smuzhiyun {
348*4882a593Smuzhiyun 	if (moal_spin_lock)
349*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
350*4882a593Smuzhiyun 	pscalar->value = val;
351*4882a593Smuzhiyun 	if (moal_spin_unlock)
352*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
353*4882a593Smuzhiyun }
354*4882a593Smuzhiyun 
355*4882a593Smuzhiyun /**
356*4882a593Smuzhiyun  *  @brief This function increments the value in scalar
357*4882a593Smuzhiyun  *
358*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
359*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
360*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
361*4882a593Smuzhiyun  *
362*4882a593Smuzhiyun  *  @return					N/A
363*4882a593Smuzhiyun  */
util_scalar_increment(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))364*4882a593Smuzhiyun static INLINE t_void util_scalar_increment(
365*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_scalar pscalar,
366*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
367*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
368*4882a593Smuzhiyun {
369*4882a593Smuzhiyun 	if (moal_spin_lock)
370*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
371*4882a593Smuzhiyun 	pscalar->value++;
372*4882a593Smuzhiyun 	if (moal_spin_unlock)
373*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
374*4882a593Smuzhiyun }
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun /**
377*4882a593Smuzhiyun  *  @brief This function decrements the value in scalar
378*4882a593Smuzhiyun  *
379*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
380*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
381*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
382*4882a593Smuzhiyun  *
383*4882a593Smuzhiyun  *  @return					N/A
384*4882a593Smuzhiyun  */
util_scalar_decrement(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))385*4882a593Smuzhiyun static INLINE t_void util_scalar_decrement(
386*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_scalar pscalar,
387*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
388*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
389*4882a593Smuzhiyun {
390*4882a593Smuzhiyun 	if (moal_spin_lock)
391*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
392*4882a593Smuzhiyun 	pscalar->value--;
393*4882a593Smuzhiyun 	if (moal_spin_unlock)
394*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
395*4882a593Smuzhiyun }
396*4882a593Smuzhiyun 
397*4882a593Smuzhiyun /**
398*4882a593Smuzhiyun  *  @brief This function adds an offset to the value in scalar,
399*4882a593Smuzhiyun  *         and returns the new value
400*4882a593Smuzhiyun  *
401*4882a593Smuzhiyun  *  @param pscalar			Pointer to scalar
402*4882a593Smuzhiyun  *  @param offset			Offset value (can be negative)
403*4882a593Smuzhiyun  *  @param moal_spin_lock	A pointer to spin lock handler
404*4882a593Smuzhiyun  *  @param moal_spin_unlock	A pointer to spin unlock handler
405*4882a593Smuzhiyun  *
406*4882a593Smuzhiyun  *  @return					Value after offset
407*4882a593Smuzhiyun  */
util_scalar_offset(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 offset,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))408*4882a593Smuzhiyun static INLINE t_s32 util_scalar_offset(
409*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_scalar pscalar, t_s32 offset,
410*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
411*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
412*4882a593Smuzhiyun {
413*4882a593Smuzhiyun 	t_s32 newval;
414*4882a593Smuzhiyun 
415*4882a593Smuzhiyun 	if (moal_spin_lock)
416*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
417*4882a593Smuzhiyun 	newval = (pscalar->value += offset);
418*4882a593Smuzhiyun 	if (moal_spin_unlock)
419*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun 	return newval;
422*4882a593Smuzhiyun }
423*4882a593Smuzhiyun 
424*4882a593Smuzhiyun /**
425*4882a593Smuzhiyun  *  @brief This function writes the value to the scalar
426*4882a593Smuzhiyun  *         if existing value compared with other value is true.
427*4882a593Smuzhiyun  *
428*4882a593Smuzhiyun  *  @param pscalar          Pointer to scalar
429*4882a593Smuzhiyun  *  @param condition        Condition to check
430*4882a593Smuzhiyun  *  @param val_compare      Value to compare against current value
431*4882a593Smuzhiyun  *                          ((A X B), where B = val_compare)
432*4882a593Smuzhiyun  *  @param val_to_set       Value to set if comparison is true
433*4882a593Smuzhiyun  *  @param moal_spin_lock   A pointer to spin lock handler
434*4882a593Smuzhiyun  *  @param moal_spin_unlock A pointer to spin unlock handler
435*4882a593Smuzhiyun  *
436*4882a593Smuzhiyun  *  @return                 Comparison result (MTRUE or MFALSE)
437*4882a593Smuzhiyun  */
util_scalar_conditional_write(t_void * pmoal_handle,pmlan_scalar pscalar,MLAN_SCALAR_CONDITIONAL condition,t_s32 val_compare,t_s32 val_to_set,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))438*4882a593Smuzhiyun static INLINE t_u8 util_scalar_conditional_write(
439*4882a593Smuzhiyun 	t_void *pmoal_handle, pmlan_scalar pscalar,
440*4882a593Smuzhiyun 	MLAN_SCALAR_CONDITIONAL condition, t_s32 val_compare, t_s32 val_to_set,
441*4882a593Smuzhiyun 	mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
442*4882a593Smuzhiyun 	mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
443*4882a593Smuzhiyun {
444*4882a593Smuzhiyun 	t_u8 update;
445*4882a593Smuzhiyun 	if (moal_spin_lock)
446*4882a593Smuzhiyun 		moal_spin_lock(pmoal_handle, pscalar->plock);
447*4882a593Smuzhiyun 
448*4882a593Smuzhiyun 	switch (condition) {
449*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_EQUAL:
450*4882a593Smuzhiyun 		update = (pscalar->value == val_compare);
451*4882a593Smuzhiyun 		break;
452*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_NOT_EQUAL:
453*4882a593Smuzhiyun 		update = (pscalar->value != val_compare);
454*4882a593Smuzhiyun 		break;
455*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_GREATER_THAN:
456*4882a593Smuzhiyun 		update = (pscalar->value > val_compare);
457*4882a593Smuzhiyun 		break;
458*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_GREATER_OR_EQUAL:
459*4882a593Smuzhiyun 		update = (pscalar->value >= val_compare);
460*4882a593Smuzhiyun 		break;
461*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_LESS_THAN:
462*4882a593Smuzhiyun 		update = (pscalar->value < val_compare);
463*4882a593Smuzhiyun 		break;
464*4882a593Smuzhiyun 	case MLAN_SCALAR_COND_LESS_OR_EQUAL:
465*4882a593Smuzhiyun 		update = (pscalar->value <= val_compare);
466*4882a593Smuzhiyun 		break;
467*4882a593Smuzhiyun 	default:
468*4882a593Smuzhiyun 		update = MFALSE;
469*4882a593Smuzhiyun 		break;
470*4882a593Smuzhiyun 	}
471*4882a593Smuzhiyun 	if (update)
472*4882a593Smuzhiyun 		pscalar->value = val_to_set;
473*4882a593Smuzhiyun 
474*4882a593Smuzhiyun 	if (moal_spin_unlock)
475*4882a593Smuzhiyun 		moal_spin_unlock(pmoal_handle, pscalar->plock);
476*4882a593Smuzhiyun 	return (update) ? MTRUE : MFALSE;
477*4882a593Smuzhiyun }
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun /**
480*4882a593Smuzhiyun  *  @brief This function counts the bits of unsigned int number
481*4882a593Smuzhiyun  *
482*4882a593Smuzhiyun  *  @param num  number
483*4882a593Smuzhiyun  *  @return     number of bits
484*4882a593Smuzhiyun  */
bitcount(t_u32 num)485*4882a593Smuzhiyun static INLINE t_u32 bitcount(t_u32 num)
486*4882a593Smuzhiyun {
487*4882a593Smuzhiyun 	t_u32 count = 0;
488*4882a593Smuzhiyun 	static t_u32 nibblebits[] = {0, 1, 1, 2, 1, 2, 2, 3,
489*4882a593Smuzhiyun 				     1, 2, 2, 3, 2, 3, 3, 4};
490*4882a593Smuzhiyun 	for (; num != 0; num >>= 4)
491*4882a593Smuzhiyun 		count += nibblebits[num & 0x0f];
492*4882a593Smuzhiyun 	return count;
493*4882a593Smuzhiyun }
494*4882a593Smuzhiyun 
495*4882a593Smuzhiyun #endif /* !_MLAN_UTIL_H_ */
496