1 /** @file mlan_util.h
2  *
3  *  @brief This file contains wrappers for linked-list,
4  *  spinlock and timer defines.
5  *
6  *  Copyright 2008-2024 NXP
7  *
8  *  SPDX-License-Identifier: BSD-3-Clause
9  *
10  */
11 
12 /******************************************************
13 Change log:
14     10/28/2008: initial version
15 ******************************************************/
16 
17 #ifndef _MLAN_UTIL_H_
18 #define _MLAN_UTIL_H_
19 
20 #ifndef MNULL
21 #define MNULL ((void *)0)
22 #endif
23 
24 /** Circular doubly linked list */
25 typedef struct _mlan_linked_list
26 {
27     /** Pointer to previous node */
28     struct _mlan_linked_list *pprev;
29     /** Pointer to next node */
30     struct _mlan_linked_list *pnext;
31 } mlan_linked_list, *pmlan_linked_list;
32 
33 /** List head */
34 typedef struct _mlan_list_head
35 {
36     /** Pointer to previous node */
37     struct _mlan_linked_list *pprev;
38     /** Pointer to next node */
39     struct _mlan_linked_list *pnext;
40     /** Pointer to lock */
41     OSA_SEMAPHORE_HANDLE_DEFINE(plock);
42 } mlan_list_head, *pmlan_list_head;
43 
44 /**
45  *  @brief This function initializes a list without locking
46  *
47  *  @param phead		List head
48  *
49  *  @return			N/A
50  */
util_init_list(pmlan_linked_list phead)51 static INLINE t_void util_init_list(pmlan_linked_list phead)
52 {
53     /* Both next and prev point to self */
54     phead->pnext = phead->pprev = (pmlan_linked_list)phead;
55 }
56 
57 /**
58  *  @brief This function initializes a list
59  *
60  *  @param phead		List head
61  *  @param lock_required	A flag for spinlock requirement
62  *  @param moal_init_lock	A pointer to init lock handler
63  *
64  *  @return			N/A
65  */
util_init_list_head(t_void * pmoal_handle,pmlan_list_head phead,bool lock_required,mlan_status (* moal_init_lock)(t_void * handle,t_void * plock))66 static INLINE t_void util_init_list_head(t_void *pmoal_handle,
67                                          pmlan_list_head phead,
68                                          bool lock_required,
69                                          mlan_status (*moal_init_lock)(t_void *handle, t_void *plock))
70 {
71     /* Both next and prev point to self */
72     util_init_list((pmlan_linked_list)(void *)phead);
73     if (lock_required != 0U)
74     {
75         (void)moal_init_lock(pmoal_handle, &phead->plock);
76     }
77 }
78 
79 /**
80  *  @brief This function frees a list
81  *
82  *  @param phead		List head
83  *  @param moal_free_lock	A pointer to free lock handler
84  *
85  *  @return			N/A
86  */
util_free_list_head(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_free_lock)(t_void * handle,t_void * plock))87 static INLINE t_void util_free_list_head(t_void *pmoal_handle,
88                                          pmlan_list_head phead,
89                                          mlan_status (*moal_free_lock)(t_void *handle, t_void *plock))
90 {
91     phead->pprev = phead->pnext = MNULL;
92 
93     if (moal_free_lock != MNULL)
94     {
95         (void)moal_free_lock(pmoal_handle, phead->plock);
96     }
97 }
98 
99 /**
100  *  @brief This function peeks into a list
101  *
102  *  @param phead		List head
103  *  @param moal_spin_lock	A pointer to spin lock handler
104  *  @param moal_spin_unlock	A pointer to spin unlock handler
105  *
106  *  @return			List node
107  */
util_peek_list(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))108 static INLINE pmlan_linked_list util_peek_list(t_void *pmoal_handle,
109                                                pmlan_list_head phead,
110                                                mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
111                                                mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
112 {
113     pmlan_linked_list pnode = MNULL;
114 
115     if (moal_spin_lock != MNULL)
116     {
117         (void)moal_spin_lock(pmoal_handle, phead->plock);
118     }
119     if (phead->pnext != (pmlan_linked_list)(void *)phead)
120     {
121         pnode = phead->pnext;
122     }
123     if (moal_spin_unlock != MNULL)
124     {
125         (void)moal_spin_unlock(pmoal_handle, phead->plock);
126     }
127     return pnode;
128 }
129 
130 /**
131  *  @brief This function queues a node at the list tail
132  *
133  *  @param phead		List head
134  *  @param pnode		List node to queue
135  *  @param moal_spin_lock	A pointer to spin lock handler
136  *  @param moal_spin_unlock	A pointer to spin unlock handler
137  *
138  *  @return			N/A
139  */
util_enqueue_list_tail(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))140 static INLINE t_void util_enqueue_list_tail(t_void *pmoal_handle,
141                                             pmlan_list_head phead,
142                                             pmlan_linked_list pnode,
143                                             mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
144                                             mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
145 {
146     pmlan_linked_list pold_last;
147 
148     if (moal_spin_lock != MNULL)
149     {
150         (void)moal_spin_lock(pmoal_handle, phead->plock);
151     }
152     pold_last    = phead->pprev;
153     pnode->pprev = pold_last;
154     pnode->pnext = (pmlan_linked_list)(void *)phead;
155 
156     phead->pprev = pold_last->pnext = pnode;
157     if (moal_spin_unlock != MNULL)
158     {
159         (void)moal_spin_unlock(pmoal_handle, phead->plock);
160     }
161 }
162 
163 /**
164  *  @brief This function adds a node at the list head
165  *
166  *  @param phead		List head
167  *  @param pnode		List node to add
168  *  @param moal_spin_lock	A pointer to spin lock handler
169  *  @param moal_spin_unlock	A pointer to spin unlock handler
170  *
171  *  @return			N/A
172  */
util_enqueue_list_head(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))173 static INLINE t_void util_enqueue_list_head(t_void *pmoal_handle,
174                                             pmlan_list_head phead,
175                                             pmlan_linked_list pnode,
176                                             mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
177                                             mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
178 {
179     pmlan_linked_list pold_first;
180 
181     if (moal_spin_lock != MNULL)
182     {
183         (void)moal_spin_lock(pmoal_handle, phead->plock);
184     }
185     pold_first   = phead->pnext;
186     pnode->pprev = (pmlan_linked_list)(void *)phead;
187     pnode->pnext = pold_first;
188 
189     phead->pnext = pold_first->pprev = pnode;
190     if (moal_spin_unlock != MNULL)
191     {
192         (void)moal_spin_unlock(pmoal_handle, phead->plock);
193     }
194 }
195 
196 /**
197  *  @brief This function removes a node from the list
198  *
199  *  @param phead		List head
200  *  @param pnode		List node to remove
201  *  @param moal_spin_lock	A pointer to spin lock handler
202  *  @param moal_spin_unlock	A pointer to spin unlock handler
203  *
204  *  @return			N/A
205  */
util_unlink_list(t_void * pmoal_handle,pmlan_list_head phead,pmlan_linked_list pnode,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))206 static INLINE t_void util_unlink_list(t_void *pmoal_handle,
207                                       pmlan_list_head phead,
208                                       pmlan_linked_list pnode,
209                                       mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
210                                       mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
211 {
212     pmlan_linked_list pmy_prev;
213     pmlan_linked_list pmy_next;
214 
215     if (moal_spin_lock != MNULL)
216     {
217         (void)moal_spin_lock(pmoal_handle, phead->plock);
218     }
219     pmy_prev        = pnode->pprev;
220     pmy_next        = pnode->pnext;
221     pmy_next->pprev = pmy_prev;
222     pmy_prev->pnext = pmy_next;
223 
224     pnode->pnext = pnode->pprev = MNULL;
225     if (moal_spin_unlock != MNULL)
226     {
227         (void)moal_spin_unlock(pmoal_handle, phead->plock);
228     }
229 }
230 
231 /**
232  *  @brief This function dequeues a node from the list
233  *
234  *  @param phead		List head
235  *  @param moal_spin_lock	A pointer to spin lock handler
236  *  @param moal_spin_unlock	A pointer to spin unlock handler
237  *
238  *  @return			List node
239  */
util_dequeue_list(t_void * pmoal_handle,pmlan_list_head phead,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))240 static INLINE pmlan_linked_list util_dequeue_list(t_void *pmoal_handle,
241                                                   pmlan_list_head phead,
242                                                   mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
243                                                   mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
244 {
245     pmlan_linked_list pnode;
246 
247     if (moal_spin_lock != MNULL)
248     {
249         (void)moal_spin_lock(pmoal_handle, phead->plock);
250     }
251     pnode = phead->pnext;
252     if (pnode != MNULL && (pnode != (pmlan_linked_list)(void *)phead))
253     {
254         util_unlink_list(pmoal_handle, phead, pnode, 0, 0);
255     }
256     else
257     {
258         pnode = 0;
259     }
260     if (moal_spin_unlock != MNULL)
261     {
262         (void)moal_spin_unlock(pmoal_handle, phead->plock);
263     }
264     return pnode;
265 }
266 
267 /** Access controlled scalar variable */
268 typedef struct _mlan_scalar
269 {
270     /** Value */
271     t_s32 value;
272     /** Pointer to lock */
273     t_void *plock;
274     /** Control flags */
275     t_u32 flags;
276 } mlan_scalar, *pmlan_scalar;
277 
278 /** Flag to scalar lock acquired */
279 #define MLAN_SCALAR_FLAG_UNIQUE_LOCK MBIT(16)
280 
281 /** scalar conditional value list */
282 typedef enum _MLAN_SCALAR_CONDITIONAL
283 {
284     MLAN_SCALAR_COND_EQUAL,
285     MLAN_SCALAR_COND_NOT_EQUAL,
286     MLAN_SCALAR_COND_GREATER_THAN,
287     MLAN_SCALAR_COND_GREATER_OR_EQUAL,
288     MLAN_SCALAR_COND_LESS_THAN,
289     MLAN_SCALAR_COND_LESS_OR_EQUAL
290 } MLAN_SCALAR_CONDITIONAL;
291 
292 /**
293  *  @brief This function initializes a scalar
294  *
295  *  @param pscalar			Pointer to scalar
296  *  @param val				Initial scalar value
297  *  @param plock_to_use		A new lock is created if NULL, else lock to use
298  *  @param moal_init_lock	A pointer to init lock handler
299  *
300  *  @return					N/A
301  */
util_scalar_init(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 val,t_void * plock_to_use,mlan_status (* moal_init_lock)(t_void * handle,t_void * plock))302 static INLINE t_void util_scalar_init(t_void *pmoal_handle,
303                                       pmlan_scalar pscalar,
304                                       t_s32 val,
305                                       t_void *plock_to_use,
306                                       mlan_status (*moal_init_lock)(t_void *handle, t_void *plock))
307 {
308     pscalar->value = val;
309     pscalar->flags = 0;
310     if (plock_to_use != MNULL)
311     {
312         pscalar->flags &= ~MLAN_SCALAR_FLAG_UNIQUE_LOCK;
313         pscalar->plock = plock_to_use;
314     }
315     else
316     {
317         pscalar->flags |= MLAN_SCALAR_FLAG_UNIQUE_LOCK;
318         (void)moal_init_lock(pmoal_handle, &pscalar->plock);
319     }
320 }
321 
322 /**
323  *  @brief This function frees a scalar
324  *
325  *  @param pscalar			Pointer to scalar
326  *  @param moal_free_lock	A pointer to free lock handler
327  *
328  *  @return			N/A
329  */
util_scalar_free(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_free_lock)(t_void * handle,t_void * plock))330 static INLINE t_void util_scalar_free(t_void *pmoal_handle,
331                                       pmlan_scalar pscalar,
332                                       mlan_status (*moal_free_lock)(t_void *handle, t_void *plock))
333 {
334     if ((pscalar->flags & MLAN_SCALAR_FLAG_UNIQUE_LOCK) > 0U)
335     {
336         (void)moal_free_lock(pmoal_handle, &pscalar->plock);
337     }
338 }
339 
340 /**
341  *  @brief This function reads value from scalar
342  *
343  *  @param pscalar			Pointer to scalar
344  *  @param moal_spin_lock	A pointer to spin lock handler
345  *  @param moal_spin_unlock	A pointer to spin unlock handler
346  *
347  *  @return					Stored value
348  */
util_scalar_read(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))349 static INLINE t_s32 util_scalar_read(t_void *pmoal_handle,
350                                      pmlan_scalar pscalar,
351                                      mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
352                                      mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
353 {
354     t_s32 val;
355 
356     if (moal_spin_lock != MNULL)
357     {
358         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
359     }
360     val = pscalar->value;
361     if (moal_spin_unlock != MNULL)
362     {
363         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
364     }
365 
366     return val;
367 }
368 
369 /**
370  *  @brief This function writes value to scalar
371  *
372  *  @param pscalar			Pointer to scalar
373  *  @param val				Value to write
374  *  @param moal_spin_lock	A pointer to spin lock handler
375  *  @param moal_spin_unlock	A pointer to spin unlock handler
376  *
377  *  @return					N/A
378  */
util_scalar_write(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 val,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))379 static INLINE t_void util_scalar_write(t_void *pmoal_handle,
380                                        pmlan_scalar pscalar,
381                                        t_s32 val,
382                                        mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
383                                        mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
384 {
385     if (moal_spin_lock != MNULL)
386     {
387         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
388     }
389     pscalar->value = val;
390     if (moal_spin_unlock != MNULL)
391     {
392         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
393     }
394 }
395 
396 /**
397  *  @brief This function increments the value in scalar
398  *
399  *  @param pscalar			Pointer to scalar
400  *  @param moal_spin_lock	A pointer to spin lock handler
401  *  @param moal_spin_unlock	A pointer to spin unlock handler
402  *
403  *  @return					N/A
404  */
util_scalar_increment(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))405 static INLINE t_void util_scalar_increment(t_void *pmoal_handle,
406                                            pmlan_scalar pscalar,
407                                            mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
408                                            mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
409 {
410     if (moal_spin_lock != MNULL)
411     {
412         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
413     }
414     pscalar->value++;
415     if (moal_spin_unlock != MNULL)
416     {
417         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
418     }
419 }
420 
421 /**
422  *  @brief This function decrements the value in scalar
423  *
424  *  @param pscalar			Pointer to scalar
425  *  @param moal_spin_lock	A pointer to spin lock handler
426  *  @param moal_spin_unlock	A pointer to spin unlock handler
427  *
428  *  @return					N/A
429  */
util_scalar_decrement(t_void * pmoal_handle,pmlan_scalar pscalar,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))430 static INLINE t_void util_scalar_decrement(t_void *pmoal_handle,
431                                            pmlan_scalar pscalar,
432                                            mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
433                                            mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
434 {
435     if (moal_spin_lock != MNULL)
436     {
437         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
438     }
439     pscalar->value--;
440     if (moal_spin_unlock != MNULL)
441     {
442         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
443     }
444 }
445 
446 /**
447  *  @brief This function adds an offset to the value in scalar,
448  *         and returns the new value
449  *
450  *  @param pscalar			Pointer to scalar
451  *  @param offset			Offset value (can be negative)
452  *  @param moal_spin_lock	A pointer to spin lock handler
453  *  @param moal_spin_unlock	A pointer to spin unlock handler
454  *
455  *  @return					Value after offset
456  */
util_scalar_offset(t_void * pmoal_handle,pmlan_scalar pscalar,t_s32 offset,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))457 static INLINE t_s32 util_scalar_offset(t_void *pmoal_handle,
458                                        pmlan_scalar pscalar,
459                                        t_s32 offset,
460                                        mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
461                                        mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
462 {
463     t_s32 newval;
464 
465     if (moal_spin_lock != MNULL)
466     {
467         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
468     }
469     newval = (pscalar->value += offset);
470     if (moal_spin_unlock != MNULL)
471     {
472         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
473     }
474 
475     return newval;
476 }
477 
478 /**
479  *  @brief This function writes the value to the scalar
480  *         if existing value compared with other value is true.
481  *
482  *  @param pscalar			Pointer to scalar
483  *  @param condition		Condition to check
484  *  @param val_compare		Value to compare against current value
485  *  						((A X B), where B = val_compare)
486  *  @param val_to_set		Value to set if comparison is true
487  *  @param moal_spin_lock	A pointer to spin lock handler
488  *  @param moal_spin_unlock	A pointer to spin unlock handler
489  *
490  *  @return					Comparison result (MTRUE or MFALSE)
491  */
util_scalar_conditional_write(t_void * pmoal_handle,pmlan_scalar pscalar,MLAN_SCALAR_CONDITIONAL condition,t_s32 val_compare,t_s32 val_to_set,mlan_status (* moal_spin_lock)(t_void * handle,t_void * plock),mlan_status (* moal_spin_unlock)(t_void * handle,t_void * plock))492 static INLINE t_u8 util_scalar_conditional_write(t_void *pmoal_handle,
493                                                  pmlan_scalar pscalar,
494                                                  MLAN_SCALAR_CONDITIONAL condition,
495                                                  t_s32 val_compare,
496                                                  t_s32 val_to_set,
497                                                  mlan_status (*moal_spin_lock)(t_void *handle, t_void *plock),
498                                                  mlan_status (*moal_spin_unlock)(t_void *handle, t_void *plock))
499 {
500     t_u8 update;
501     if (moal_spin_lock != MNULL)
502     {
503         (void)moal_spin_lock(pmoal_handle, pscalar->plock);
504     }
505 
506     switch (condition)
507     {
508         case MLAN_SCALAR_COND_EQUAL:
509             update = (pscalar->value == val_compare);
510             break;
511         case MLAN_SCALAR_COND_NOT_EQUAL:
512             update = (pscalar->value != val_compare);
513             break;
514         case MLAN_SCALAR_COND_GREATER_THAN:
515             update = (pscalar->value > val_compare);
516             break;
517         case MLAN_SCALAR_COND_GREATER_OR_EQUAL:
518             update = (pscalar->value >= val_compare);
519             break;
520         case MLAN_SCALAR_COND_LESS_THAN:
521             update = (pscalar->value < val_compare);
522             break;
523         case MLAN_SCALAR_COND_LESS_OR_EQUAL:
524             update = (pscalar->value <= val_compare);
525             break;
526         default:
527             update = MFALSE;
528             break;
529     }
530     if (update != 0U)
531     {
532         pscalar->value = val_to_set;
533     }
534 
535     if (moal_spin_unlock != MNULL)
536     {
537         (void)moal_spin_unlock(pmoal_handle, pscalar->plock);
538     }
539     return (update) ? MTRUE : MFALSE;
540 }
541 
542 #endif /* !_MLAN_UTIL_H_ */
543