1//
2// cache_asm.S - assembly language cache management routines
3//
4// $Id: //depot/rel/Foxhill/dot.8/Xtensa/OS/hal/cache_asm.S#1 $
5
6// Copyright (c) 1999-2015 Cadence Design Systems, Inc.
7//
8// Permission is hereby granted, free of charge, to any person obtaining
9// a copy of this software and associated documentation files (the
10// "Software"), to deal in the Software without restriction, including
11// without limitation the rights to use, copy, modify, merge, publish,
12// distribute, sublicense, and/or sell copies of the Software, and to
13// permit persons to whom the Software is furnished to do so, subject to
14// the following conditions:
15//
16// The above copyright notice and this permission notice shall be included
17// in all copies or substantial portions of the Software.
18//
19// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
22// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
23// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
27#include <xtensa/cacheasm.h>
28#include <xtensa/cacheattrasm.h>
29#include <xtensa/xtensa-versions.h>
30
31
32
33
34//----------------------------------------------------------------------
35//  Huge Range cache routines
36//----------------------------------------------------------------------
37
38	//  void  xthal_dcache_hugerange_<name>(void *addr, unsigned size);
39	//
40	//  Invalidate and/or writeback dcache entries for an arbitrary large
41	//  virtual address range with a single scan of the dcache.
42	//  Assumes no address translation, i.e. virtual = physical.
43	//
44	//  a2 = ptr to range
45	//  a3 = size of range
46	//
47	//  Note:  -128 is a valid immediate for ADDI, but +128 is not,
48	//  and ADDI can relax to ADDMI for multiples of 256.  So scanning
49	//  cache backwards (from end to start) allows all cache line sizes
50	//  without creating an extra instruction for the ADDI.
51	//
52	.macro dcache_hugefunc  name, instruction
53	.text
54	.align	4
55	.type	xthal_dcache_hugerange_\name,@function
56	.global	xthal_dcache_hugerange_\name
57xthal_dcache_hugerange_\name:
58	abi_entry
59#if (!defined(XCHAL_HAVE_NX) || XCHAL_HAVE_NX == 0) && XCHAL_DCACHE_SIZE > 0 \
60	&& XCHAL_HAVE_DCACHE_TEST && XCHAL_HAVE_MINMAX && XCHAL_HAVE_LOOPS
61	movi	a4, XCHAL_DCACHE_SIZE*2		// size at which to use huge algorithm
62	movi	a7, -XCHAL_DCACHE_LINESIZE	// for rounding to cache line size
63	bltu	a3, a4, 7f			// use normal (line-by-line hit) function
64#if XCHAL_HAVE_PREFETCH
65	movi	a11, 0
66	xsr.prefctl a11		// temporarily disable prefetch (invalidates prefetch bufs!)
67#endif
68	add	a5, a3, a2			// a5 = end of range
69	and	a4, a2, a7			// a4 = low end, rounded to containing cache line
70	addi	a5, a5, /*XCHAL_DCACHE_LINESIZE*/-1
71	and	a5, a5, a7			// a5 = high end, rounded to containing cache line
72	movi	a7, XCHAL_DCACHE_SIZE/XCHAL_DCACHE_LINESIZE	// a7 = number of lines in dcache
73	movi	a3, XCHAL_DCACHE_SIZE-XCHAL_DCACHE_LINESIZE	// way index
74	mov	a6, a5
75	//movi	a8, -XCHAL_DCACHE_SETSIZE	// use if LDCT gives non-zero index bits
76	movi	a10, (XCHAL_DCACHE_SIZE/XCHAL_DCACHE_WAYS) - 1
77
78	loopgtz a7, 1f
79	ldct	a7, a3				// a3 = cache tag for cache entry [a7]
80	\instruction	a2, 0
81	.begin schedule
82	//extui	a9, a3, 0, XCHAL_DCACHE_SETWIDTH+XCHAL_DCACHE_LINEWIDTH
83	and	a9, a3, a10
84	addi	a3, a3, -XCHAL_DCACHE_LINESIZE
85	.end schedule
86	.begin schedule
87	//and	a7, a7, a8	// uncomment if LDCT reports non-zero index bits
88	maxu	a6, a6, a4	// a4 = low end of range
89	minu	a2, a6, a5	// a5 = high end of range
90	or	a6, a7, a9
91	.end schedule
921:
93
94	\instruction	a2, 0
95	maxu	a6, a6, a4
96	minu	a2, a6, a5
97	\instruction	a2, 0
98#if XCHAL_HAVE_PREFETCH
99	wsr.prefctl a11		// restore prefetch
100#endif
101	isync_return_nop
102	abi_return
103#endif /* dcache supports hugerange */
104// Jump to non-huge routine
1057:	j.l	xthal_dcache_region_\name + ABI_ENTRY_MINSIZE, a4
106	.size xthal_dcache_hugerange_\name, . - xthal_dcache_hugerange_\name
107	.endm
108
109
110
111	//  void  xthal_icache_hugerange_<name>(void *addr, unsigned size);
112	//
113	//  Invalidate icache entries for an arbitrary large
114	//  virtual address range with a single scan of the icache.
115	//  Assumes no address translation, i.e. virtual = physical.
116	//
117	//  a2 = ptr to range
118	//  a3 = size of range
119	//
120	//  Note:  -128 is a valid immediate for ADDI, but +128 is not,
121	//  and ADDI can relax to ADDMI for multiples of 256.  So scanning
122	//  cache backwards (from end to start) allows all cache line sizes
123	//  without creating an extra instruction for the ADDI.
124	//
125	.macro icache_hugefunc  name, instruction
126	.text
127	.align	4
128	.type	xthal_icache_hugerange_\name,@function
129	.global	xthal_icache_hugerange_\name
130xthal_icache_hugerange_\name:
131	abi_entry
132#if (!defined(XCHAL_HAVE_NX) || XCHAL_HAVE_NX == 0) &&XCHAL_ICACHE_SIZE > 0 && \
133	XCHAL_HAVE_ICACHE_TEST && XCHAL_HAVE_MINMAX && XCHAL_HAVE_LOOPS
134	movi	a4, XCHAL_ICACHE_SIZE*2		// size at which to use huge algorithm
135	movi	a7, -XCHAL_ICACHE_LINESIZE	// for rounding to cache line size
136	bltu	a3, a4, 7f			// use normal (line-by-line hit) function
137	add	a5, a3, a2			// a5 = end of range
138	and	a4, a2, a7			// a4 = low end, rounded to containing cache line
139	addi	a5, a5, XCHAL_ICACHE_LINESIZE-1
140	and	a5, a5, a7			// a5 = high end, rounded to containing cache line
141	movi	a7, XCHAL_ICACHE_SIZE/XCHAL_ICACHE_LINESIZE	// a7 = number of lines in dcache
142	movi	a3, XCHAL_ICACHE_SIZE-XCHAL_ICACHE_LINESIZE	// way index
143	mov	a6, a5
144	//movi	a8, -XCHAL_ICACHE_SETSIZE	// use if LICT gives non-zero index bits
145	movi	a10, (XCHAL_ICACHE_SIZE/XCHAL_ICACHE_WAYS) - 1
146
147	loopgtz a7, 1f
148	lict	a7, a3				// a3 = cache tag for cache entry [a7]
149	\instruction	a2, 0
150	.begin schedule
151	//extui	a9, a3, 0, XCHAL_ICACHE_SETWIDTH+XCHAL_ICACHE_LINEWIDTH
152	and	a9, a3, a10
153	addi	a3, a3, -XCHAL_ICACHE_LINESIZE
154	.end schedule
155	.begin schedule
156	//and	a7, a7, a8	// uncomment if LDCT reports non-zero index bits
157	maxu	a6, a6, a4	// a4 = low end of range
158	minu	a2, a6, a5	// a5 = high end of range
159	or	a6, a7, a9
160	.end schedule
1611:
162
163	\instruction	a2, 0
164	maxu	a6, a6, a4
165	minu	a2, a6, a5
166	\instruction	a2, 0
167	isync_return_nop
168	abi_return
169#endif /* icache supports hugerange */
1707:	j.l	xthal_icache_region_\name + ABI_ENTRY_MINSIZE, a4
171	.size xthal_icache_hugerange_\name, . - xthal_icache_hugerange_\name
172	.endm
173
174
175
176
177	.text
178
179//----------------------------------------------------------------------
180// Read CACHEATTR register
181//----------------------------------------------------------------------
182
183#if defined(__SPLIT__get_cacheattr) ||\
184    defined(__SPLIT__get_cacheattr_nw)
185
186//  unsigned xthal_get_cacheattr(void);
187
188DECLFUNC(xthal_get_cacheattr)
189DECLFUNC(xthal_get_dcacheattr)
190# if XCHAL_HAVE_CACHEATTR	/* single CACHEATTR register used for both I and D */
191DECLFUNC(xthal_get_icacheattr)
192# endif
193	abi_entry
194	dcacheattr_get
195	abi_return
196	endfunc
197
198#endif
199
200#if defined(__SPLIT__get_icacheattr) ||\
201    defined(__SPLIT__get_icacheattr_nw)
202
203//  unsigned xthal_get_icacheattr(void);
204
205# if !XCHAL_HAVE_CACHEATTR	/* possibly independent CACHEATTR states used for I and D */
206DECLFUNC(xthal_get_icacheattr)
207	abi_entry
208	icacheattr_get
209	abi_return
210	endfunc
211# endif
212
213#endif /*split*/
214
215
216//----------------------------------------------------------------------
217//  Write CACHEATTR register, or equivalent.
218//----------------------------------------------------------------------
219
220/*
221 *  Set CACHEATTR register in a safe manner.
222 *
223 *	void  xthal_set_cacheattr( unsigned new_cacheattr );
224 *	void  xthal_set_icacheattr( unsigned new_cacheattr );
225 *	void  xthal_set_dcacheattr( unsigned new_cacheattr );
226 */
227
228#if defined(__SPLIT__set_cacheattr) ||\
229	defined(__SPLIT__set_cacheattr_nw)
230
231# if XCHAL_HAVE_CACHEATTR	/* single CACHEATTR register used for both I and D accesses */
232DECLFUNC(xthal_set_icacheattr)
233DECLFUNC(xthal_set_dcacheattr)
234# endif
235DECLFUNC(xthal_set_cacheattr)
236	abi_entry
237	cacheattr_set
238	abi_return
239	endfunc
240
241#endif /*split*/
242
243
244#if XCHAL_HAVE_CACHEATTR
245
246	/*
247	 *  Already done above.
248	 *
249	 *  Since we can't enable/disable the icache and dcache independently,
250	 *  and don't have a nice place to store a state which would enable
251	 *  us to only enable them both when both have been requested to be
252	 *  enabled, we simply enable both for any request to enable either,
253	 *  and disable both for any request to disable either cache.
254	 */
255
256#elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR || (XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY)
257
258# if defined(__SPLIT__set_icacheattr) \
259	 || defined(__SPLIT__set_icacheattr_nw)
260
261DECLFUNC(xthal_set_icacheattr)
262	abi_entry
263	icacheattr_set
264	isync_return_nop
265	abi_return
266	endfunc
267
268# endif
269
270# if defined(__SPLIT__set_dcacheattr) \
271	 || defined(__SPLIT__set_dcacheattr_nw)
272
273DECLFUNC(xthal_set_dcacheattr)
274	abi_entry
275	dcacheattr_set
276	abi_return
277	endfunc
278
279# endif /*split*/
280
281#else /* full MMU (pre-v3): */
282
283# if defined(__SPLIT__set_idcacheattr) \
284	 || defined(__SPLIT__set_idcacheattr_nw)
285
286//  These functions aren't applicable to arbitrary MMU configurations.
287//  Do nothing in this case.
288
289DECLFUNC(xthal_set_icacheattr)
290DECLFUNC(xthal_set_dcacheattr)
291	abi_entry
292	abi_return
293	endfunc
294
295# endif /*split*/
296
297#endif /* cacheattr/MMU type */
298
299
300//----------------------------------------------------------------------
301// Determine (guess) whether caches are "enabled"
302//----------------------------------------------------------------------
303
304/*
305 *  There is no "cache enable" bit in the Xtensa architecture,
306 *  but we can use CACHEATTR (if it or its equivalent exists)
307 *  as an indication that caches have been enabled.
308 */
309
310#if XCHAL_HAVE_CACHEATTR
311
312# if defined(__SPLIT__idcache_is_enabled) || \
313     defined(__SPLIT__idcache_is_enabled_nw)
314
315DECLFUNC(xthal_icache_is_enabled)
316DECLFUNC(xthal_dcache_is_enabled)
317	abi_entry
318	cacheattr_is_enabled	2f
319	movi	a2, 0
320	abi_return
3212:	movi	a2, 1
322	abi_return
323	endfunc
324
325# endif /*split*/
326
327#elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR
328
329# if defined(__SPLIT__icache_is_enabled) || \
330     defined(__SPLIT__icache_is_enabled_nw)
331
332DECLFUNC(xthal_icache_is_enabled)
333	abi_entry
334	icacheattr_is_enabled	2f
335	movi	a2, 0
336	abi_return
3372:	movi	a2, 1
338	abi_return
339	endfunc
340
341# endif
342
343# if defined(__SPLIT__dcache_is_enabled) || \
344     defined(__SPLIT__dcache_is_enabled_nw)
345
346DECLFUNC(xthal_dcache_is_enabled)
347	abi_entry
348	dcacheattr_is_enabled	2f
349	movi	a2, 0
350	abi_return
3512:	movi	a2, 1
352	abi_return
353	endfunc
354
355# endif /*split*/
356
357#else
358
359//  These functions aren't applicable to arbitrary MMU configurations.
360//  Assume caches are enabled in this case (!).
361
362# if defined(__SPLIT__idcache_is_enabled) || \
363     defined(__SPLIT__idcache_is_enabled_nw)
364
365DECLFUNC(xthal_icache_is_enabled)
366DECLFUNC(xthal_dcache_is_enabled)
367	abi_entry
368	movi	a2, 1
369	abi_return
370	endfunc
371# endif /*split*/
372
373#endif
374
375
376
377//----------------------------------------------------------------------
378// invalidate the icache
379//----------------------------------------------------------------------
380
381#if defined(__SPLIT__icache_all_invalidate) || \
382    defined(__SPLIT__icache_all_invalidate_nw)
383
384// void xthal_icache_all_invalidate(void);
385
386DECLFUNC(xthal_icache_all_invalidate)
387	abi_entry
388	icache_invalidate_all	a2, a3
389	isync_return_nop
390	abi_return
391	endfunc
392
393//----------------------------------------------------------------------
394// invalidate the dcache
395//----------------------------------------------------------------------
396
397#endif
398
399#if defined(__SPLIT__dcache_all_invalidate) || \
400    defined(__SPLIT__dcache_all_invalidate_nw)
401
402// void xthal_dcache_all_invalidate(void);
403
404DECLFUNC(xthal_dcache_all_invalidate)
405	abi_entry
406	dcache_invalidate_all	a2, a3
407	abi_return
408	endfunc
409
410//----------------------------------------------------------------------
411// write dcache dirty data
412//----------------------------------------------------------------------
413
414#endif
415
416#if defined(__SPLIT__dcache_all_writeback) || \
417    defined(__SPLIT__dcache_all_writeback_nw)
418
419// void xthal_dcache_all_writeback(void);
420
421DECLFUNC(xthal_dcache_all_writeback)
422	abi_entry
423	dcache_writeback_all	a2, a3, a4
424	abi_return
425	endfunc
426
427//----------------------------------------------------------------------
428// write dcache dirty data and invalidate
429//----------------------------------------------------------------------
430
431#endif
432
433#if defined(__SPLIT__dcache_all_writeback_inv) || \
434    defined(__SPLIT__dcache_all_writeback_inv_nw)
435
436// void xthal_dcache_all_writeback_inv(void);
437
438DECLFUNC(xthal_dcache_all_writeback_inv)
439	abi_entry
440	dcache_writeback_inv_all	a2, a3, a4
441	abi_return
442	endfunc
443
444//----------------------------------------------------------------------
445// unlock instructions from icache
446//----------------------------------------------------------------------
447
448#endif
449
450#if defined(__SPLIT__icache_all_unlock) || \
451    defined(__SPLIT__icache_all_unlock_nw)
452
453// void xthal_icache_all_unlock(void);
454
455DECLFUNC(xthal_icache_all_unlock)
456	abi_entry
457	icache_unlock_all	a2, a3
458	abi_return
459	endfunc
460
461//----------------------------------------------------------------------
462// unlock data from dcache
463//----------------------------------------------------------------------
464
465#endif
466
467#if defined(__SPLIT__dcache_all_unlock) || \
468    defined(__SPLIT__dcache_all_unlock_nw)
469
470// void xthal_dcache_all_unlock(void);
471
472DECLFUNC(xthal_dcache_all_unlock)
473	abi_entry
474	dcache_unlock_all	a2, a3
475	abi_return
476	endfunc
477
478//----------------------------------------------------------------------
479// invalidate the address range in the icache
480//----------------------------------------------------------------------
481
482#endif
483
484#if defined(__SPLIT__icache_region_invalidate) || \
485    defined(__SPLIT__icache_region_invalidate_nw)
486
487// void xthal_icache_region_invalidate( void *addr, unsigned size );
488
489DECLFUNC(xthal_icache_region_invalidate)
490	abi_entry
491	icache_invalidate_region	a2, a3, a4
492	isync_return_nop
493	abi_return
494	endfunc
495
496#endif
497
498#if defined(__SPLIT__icache_hugerange_invalidate)
499
500// void xthal_icache_hugerange_invalidate( void *addr, unsigned size );
501icache_hugefunc	invalidate,	ihi
502
503#endif
504
505#if defined(__SPLIT__icache_hugerange_unlock)
506
507# if XCHAL_ICACHE_LINE_LOCKABLE
508// void xthal_icache_hugerange_unlock( void *addr, unsigned size );
509icache_hugefunc	unlock,		ihu
510# endif
511
512#endif
513
514#if defined(__SPLIT__dcache_hugerange_invalidate)
515
516// void xthal_dcache_hugerange_invalidate( void *addr, unsigned size );
517dcache_hugefunc	invalidate,	dhi
518
519#endif
520
521#if defined(__SPLIT__dcache_hugerange_unlock)
522
523# if XCHAL_DCACHE_LINE_LOCKABLE
524// void xthal_dcache_hugerange_unlock( void *addr, unsigned size );
525dcache_hugefunc	unlock,		dhu
526# endif
527
528#endif
529
530#if defined(__SPLIT__dcache_hugerange_writeback)
531
532// void xthal_dcache_hugerange_writeback( void *addr, unsigned size );
533dcache_hugefunc	writeback,	dhwb
534
535#endif
536
537#if defined(__SPLIT__dcache_hugerange_writeback_inv)
538
539// void xthal_dcache_hugerange_writeback_inv( void *addr, unsigned size );
540dcache_hugefunc	writeback_inv,	dhwbi
541
542
543
544//----------------------------------------------------------------------
545// invalidate the address range in the dcache
546//----------------------------------------------------------------------
547
548#endif
549
550#if defined(__SPLIT__dcache_region_invalidate) || \
551    defined(__SPLIT__dcache_region_invalidate_nw)
552
553// void xthal_dcache_region_invalidate( void *addr, unsigned size );
554
555DECLFUNC(xthal_dcache_region_invalidate)
556	abi_entry
557	dcache_invalidate_region	a2, a3, a4
558	abi_return
559	endfunc
560
561//----------------------------------------------------------------------
562// write dcache region dirty data
563//----------------------------------------------------------------------
564
565#endif
566
567#if defined(__SPLIT__dcache_region_writeback) || \
568    defined(__SPLIT__dcache_region_writeback_nw)
569
570// void xthal_dcache_region_writeback( void *addr, unsigned size );
571
572DECLFUNC(xthal_dcache_region_writeback)
573	abi_entry
574	dcache_writeback_region		a2, a3, a4, a5
575	abi_return
576	endfunc
577
578//----------------------------------------------------------------------
579// write dcache region dirty data and invalidate
580//----------------------------------------------------------------------
581
582#endif
583
584#if defined(__SPLIT__dcache_region_writeback_inv) || \
585	defined(__SPLIT__dcache_region_writeback_inv_nw)
586
587// void xthal_dcache_region_writeback_inv( void *addr, unsigned size );
588
589DECLFUNC(xthal_dcache_region_writeback_inv)
590	abi_entry
591	dcache_writeback_inv_region	a2, a3, a4, a5
592	abi_return
593	endfunc
594
595//----------------------------------------------------------------------
596// lock instructions in icache region
597//----------------------------------------------------------------------
598
599#endif
600
601#if defined(__SPLIT__icache_region_lock) || \
602	defined(__SPLIT__icache_region_lock_nw)
603
604// void xthal_icache_region_lock(void);
605
606DECLFUNC(xthal_icache_region_lock)
607	abi_entry
608	icache_lock_region	a2, a3, a4
609	abi_return
610	endfunc
611
612//----------------------------------------------------------------------
613// lock data in dcache region
614//----------------------------------------------------------------------
615
616#endif
617
618#if defined(__SPLIT__dcache_region_lock) || \
619	defined(__SPLIT__dcache_region_lock_nw)
620
621// void xthal_dcache_region_lock(void);
622
623DECLFUNC(xthal_dcache_region_lock)
624	abi_entry
625	dcache_lock_region	a2, a3, a4
626	abi_return
627	endfunc
628
629//----------------------------------------------------------------------
630// unlock instructions from icache region
631//----------------------------------------------------------------------
632
633#endif
634
635#if defined(__SPLIT__icache_region_unlock) || \
636	defined(__SPLIT__icache_region_unlock_nw)
637
638// void xthal_icache_region_unlock(void);
639
640DECLFUNC(xthal_icache_region_unlock)
641	abi_entry
642	icache_unlock_region	a2, a3, a4
643	abi_return
644	endfunc
645
646//----------------------------------------------------------------------
647// unlock data from dcache region
648//----------------------------------------------------------------------
649
650#endif
651
652#if defined(__SPLIT__dcache_region_unlock) || \
653	defined(__SPLIT__dcache_region_unlock_nw)
654
655// void xthal_dcache_region_unlock(void);
656
657DECLFUNC(xthal_dcache_region_unlock)
658	abi_entry
659	dcache_unlock_region	a2, a3, a4
660	abi_return
661	endfunc
662
663
664//----------------------------------------------------------------------
665// invalidate single icache line
666//----------------------------------------------------------------------
667
668#endif
669
670#if	defined(__SPLIT__icache_line_invalidate) || \
671	defined(__SPLIT__icache_line_invalidate_nw)
672
673// void xthal_icache_line_invalidate(void *addr);
674
675DECLFUNC(xthal_icache_line_invalidate)
676	abi_entry
677	icache_invalidate_line	a2, 0
678	isync_return_nop
679	abi_return
680	endfunc
681
682
683//----------------------------------------------------------------------
684// invalidate single dcache line
685//----------------------------------------------------------------------
686
687#endif
688
689#if defined(__SPLIT__dcache_line_invalidate) || \
690	defined(__SPLIT__dcache_line_invalidate_nw)
691
692// void xthal_dcache_line_invalidate(void *addr);
693
694DECLFUNC(xthal_dcache_line_invalidate)
695	abi_entry
696	dcache_invalidate_line	a2, 0
697	abi_return
698	endfunc
699
700//----------------------------------------------------------------------
701// write single dcache line dirty data
702//----------------------------------------------------------------------
703
704#endif
705
706#if defined(__SPLIT__dcache_line_writeback) || \
707	defined(__SPLIT__dcache_line_writeback_nw)
708
709// void xthal_dcache_line_writeback(void *addr);
710
711DECLFUNC(xthal_dcache_line_writeback)
712	abi_entry
713	dcache_writeback_line	a2, 0
714	abi_return
715	endfunc
716
717//----------------------------------------------------------------------
718// write single dcache line dirty data and invalidate
719//----------------------------------------------------------------------
720
721#endif
722
723#if defined(__SPLIT__dcache_line_writeback_inv) || \
724	defined(__SPLIT__dcache_line_writeback_inv_nw)
725
726// void xthal_dcache_line_writeback_inv(void *addr);
727
728DECLFUNC(xthal_dcache_line_writeback_inv)
729	abi_entry
730	dcache_writeback_inv_line	a2, 0
731	abi_return
732	endfunc
733
734//----------------------------------------------------------------------
735// lock instructions in icache line
736//----------------------------------------------------------------------
737
738#endif
739
740#if defined(__SPLIT__icache_line_lock) || \
741	defined(__SPLIT__icache_line_lock_nw)
742
743// void xthal_icache_line_lock(void);
744
745DECLFUNC(xthal_icache_line_lock)
746	abi_entry
747	icache_lock_line	a2, 0
748	abi_return
749	endfunc
750
751//----------------------------------------------------------------------
752// lock data in dcache line
753//----------------------------------------------------------------------
754
755#endif
756
757#if defined(__SPLIT__dcache_line_lock) || \
758	defined(__SPLIT__dcache_line_lock_nw)
759
760// void xthal_dcache_line_lock(void);
761
762DECLFUNC(xthal_dcache_line_lock)
763	abi_entry
764	dcache_lock_line	a2, 0
765	abi_return
766	endfunc
767
768//----------------------------------------------------------------------
769// unlock instructions from icache line
770//----------------------------------------------------------------------
771
772#endif
773
774#if defined(__SPLIT__icache_line_unlock) || \
775	defined(__SPLIT__icache_line_unlock_nw)
776
777// void xthal_icache_line_unlock(void);
778
779DECLFUNC(xthal_icache_line_unlock)
780	abi_entry
781	icache_unlock_line	a2, 0
782	abi_return
783	endfunc
784
785//----------------------------------------------------------------------
786// unlock data from dcache line
787//----------------------------------------------------------------------
788
789#endif
790
791#if defined(__SPLIT__dcache_line_unlock) || \
792	defined(__SPLIT__dcache_line_unlock_nw)
793
794// void xthal_dcache_line_unlock(void);
795
796DECLFUNC(xthal_dcache_line_unlock)
797	abi_entry
798	dcache_unlock_line	a2, 0
799	abi_return
800	endfunc
801
802//----------------------------------------------------------------------
803// sync icache and memory (???)
804//----------------------------------------------------------------------
805
806#endif
807
808#if defined(__SPLIT__icache_sync) || \
809	defined(__SPLIT__icache_sync_nw)
810
811// void xthal_icache_sync(void);
812
813DECLFUNC(xthal_icache_sync)
814	abi_entry
815	icache_sync	a2
816	isync_return_nop
817	abi_return
818	endfunc
819
820//----------------------------------------------------------------------
821// sync dcache and memory (???)
822//----------------------------------------------------------------------
823
824#endif
825
826#if defined(__SPLIT__dcache_sync) || \
827	defined(__SPLIT__dcache_sync_nw)
828
829// void xthal_dcache_sync(void);
830
831DECLFUNC(xthal_dcache_sync)
832	abi_entry
833	dcache_sync	a2
834	abi_return
835	endfunc
836
837//----------------------------------------------------------------------
838// Get/Set icache number of ways enabled
839//----------------------------------------------------------------------
840
841#endif
842
843#if defined (__SPLIT__icache_get_ways) || \
844	defined (__SPLIT__icache_get_ways_nw)
845
846// unsigned int xthal_icache_get_ways(void);
847
848DECLFUNC(xthal_icache_get_ways)
849	abi_entry
850	icache_get_ways	a2
851	abi_return
852	endfunc
853
854#endif
855
856#if defined (__SPLIT__icache_set_ways) || \
857	defined(__SPLIT__icache_set_ways_nw)
858
859/// void xthal_icache_set_ways(unsigned int ways);
860
861DECLFUNC(xthal_icache_set_ways)
862	abi_entry
863	icache_set_ways	a2 a3 a4
864	abi_return
865	endfunc
866
867//----------------------------------------------------------------------
868// Get/Set dcache number of ways enabled
869//----------------------------------------------------------------------
870
871#endif
872
873#if defined (__SPLIT__dcache_get_ways) || \
874	defined (__SPLIT__dcache_get_ways_nw)
875
876// unsigned int xthal_dcache_get_ways(void);
877
878DECLFUNC(xthal_dcache_get_ways)
879	abi_entry
880	dcache_get_ways a2
881	abi_return
882	endfunc
883
884#endif
885
886#if defined (__SPLIT__dcache_set_ways) || \
887	defined (__SPLIT__dcache_set_ways_nw)
888
889// void xthal_dcache_set_ways(unsigned int ways);
890
891DECLFUNC(xthal_dcache_set_ways)
892	abi_entry
893	dcache_set_ways a2 a3 a4
894	abi_return
895	endfunc
896
897//----------------------------------------------------------------------
898// opt into and out of coherence
899//----------------------------------------------------------------------
900
901#endif
902
903#if defined(__SPLIT__cache_coherence_on) || \
904	defined(__SPLIT__cache_coherence_on_nw)
905
906// The opt-in routine assumes cache was initialized at reset,
907// so it's equivalent to the low-level coherence_on routine.
908
909// void xthal_cache_coherence_optin(void)
910// void xthal_cache_coherence_on(void)
911
912DECLFUNC(xthal_cache_coherence_optin)
913DECLFUNC(xthal_cache_coherence_on)
914	abi_entry
915	cache_coherence_on	a2, a3
916	abi_return
917	endfunc
918
919#endif
920
921#if defined(__SPLIT__cache_coherence_off) || \
922	defined(__SPLIT__cache_coherence_off_nw)
923
924// The coherence_off routines should not normally be called directly.
925// Use the xthal_cache_coherence_optout() C routine instead
926// (which first empties the cache).
927
928// void xthal_cache_coherence_off
929
930DECLFUNC(xthal_cache_coherence_off)
931	abi_entry
932	cache_coherence_off	a2, a3
933	abi_return
934	endfunc
935
936
937//----------------------------------------------------------------------
938// Control cache prefetch
939//----------------------------------------------------------------------
940
941#endif
942
943#if defined(__SPLIT__set_cache_prefetch_long) || \
944	defined(__SPLIT__set_cache_prefetch_long_nw)
945
946# if XCHAL_HAVE_BE
947#  define aH a2	/* msb word = prefctl mask */
948#  define aL a3 /* lsb word = prefctl value */
949# else
950#  define aH a3	/* msb word = prefctl mask */
951#  define aL a2 /* lsb word = prefctl value */
952# endif
953
954// Set cache prefetch state (-1=enable, 0=disable, and see XTHAL_*PREFETCH_*),
955// and return previous one.
956//
957// int  xthal_set_cache_prefetch_long( unsigned long long );
958//
959DECLFUNC(xthal_set_cache_prefetch_long)
960	abi_entry
961# if XCHAL_HAVE_PREFETCH
962	movi	a5, XCHAL_CACHE_PREFCTL_DEFAULT
963	addi	a4, aL, 1	// does prefctl value aL == -1 ?
964	moveqz	aL, a5, a4	// if yes (XTHAL_PREFETCH_ENABLE), set it to default
965        movgez  a2, aL, aL      // if the high bit is not set, then we want to transfer the contents of aL to prefctl
966				// so we move it to a2
967	bgez	aL, 1f		// high bit set indicates masked update
968	ssai	16		// 16-bit right shifts
969	src	a5, aL, aH	// get 16-bit-swapped 32-bit value
970	src	a5, a5, a5	// get 32-bit value (rotate by 16)
971	rsr.prefctl a4
972	src	a3, aH, aL	// get 32-bit mask
973	or	a4, a4, a3	// set masked bits
974	xor	a4, a4, a3	// clear masked bits
975	and	a5, a5, a3	// only use masked bits
976	or	a2, a4, a5	// combine masked bits
9771:
978#  if XCHAL_HW_MIN_VERSION <= XTENSA_HWVERSION_RC_2010_1    /* for erratum #325 */
979	j 1f ; .align 8 ; 1: xsr.prefctl a2 ; isync	// ensure XSR.PREFCTL;ISYNC wholly within an icache line
980#  else
981	xsr.prefctl a2
982#  endif
983# else
984	movi	a2, 0
985# endif
986	abi_return
987	endfunc
988
989//----------------------------------------------------------------------
990
991#endif
992
993#if defined(__SPLIT__set_cache_prefetch) || \
994	defined(__SPLIT__set_cache_prefetch_nw)
995
996// FOR BACKWARD COMPATIBILITY WITH PRE-RF RELEASE OBJECT CODE ONLY.
997// Set cache prefetch state (-1=enable, 0=disable, and see the
998//   definitions of XTHAL_*PREFETCH_* with only the lower 32 bits set),
999// and return previous one.
1000// int  xthal_set_cache_prefetch( int )
1001//
1002DECLFUNC(xthal_set_cache_prefetch)
1003	abi_entry
1004# if XCHAL_HAVE_PREFETCH
1005	movi	a3, XCHAL_CACHE_PREFCTL_DEFAULT
1006	addi	a4, a2, 1	// does a2 == -1 ?
1007	moveqz	a2, a3, a4	// if yes (XTHAL_PREFETCH_ENABLE), set it to default
1008	bbci.l	a2, 31, 1f	// high bit set indicates masked update
1009	rsr.prefctl a4
1010	extui	a5, a2, 16, 15
1011	or	a4, a4, a5	// set masked bits
1012	xor	a4, a4, a5	// clear masked bits
1013	and	a2, a2, a5	// only use masked bits
1014	or	a2, a4, a2	// combine masked bits
10151:
1016#  if XCHAL_HW_MIN_VERSION <= XTENSA_HWVERSION_RC_2010_1    /* for erratum #325 */
1017	j 1f ; .align 8 ; 1: xsr.prefctl a2 ; isync	// ensure XSR.PREFCTL;ISYNC wholly within an icache line
1018#  else
1019	xsr.prefctl a2
1020#  endif
1021# else
1022	movi	a2, 0
1023# endif
1024	abi_return
1025	endfunc
1026
1027//----------------------------------------------------------------------
1028
1029#endif
1030
1031#if defined(__SPLIT__get_cache_prefetch) ||\
1032	defined(__SPLIT__get_cache_prefetch_nw)
1033
1034// Return current cache prefetch state.
1035// int  xthal_get_cache_prefetch( void )
1036DECLFUNC(xthal_get_cache_prefetch)
1037	abi_entry
1038# if XCHAL_HAVE_PREFETCH
1039	rsr.prefctl a2
1040# else
1041	movi	a2, 0
1042# endif
1043	abi_return
1044	endfunc
1045
1046//----------------------------------------------------------------------
1047// Misc configuration info
1048//----------------------------------------------------------------------
1049#endif
1050
1051// Eventually these will move to their own file:
1052#if defined(__SPLIT__hw_configid0)
1053	.set	xthals_hw_configid0, XCHAL_HW_CONFIGID0
1054#endif
1055
1056#if defined(__SPLIT__hw_configid1)
1057	.set	xthals_hw_configid1, XCHAL_HW_CONFIGID1
1058#endif
1059
1060#if defined(__SPLIT__release_major)
1061	.set	xthals_release_major, XTHAL_RELEASE_MAJOR
1062#endif
1063
1064#if defined(__SPLIT__release_minor)
1065	.set	xthals_release_minor, XTHAL_RELEASE_MINOR
1066
1067#endif /*split*/
1068
1069	.global	xthals_hw_configid0, xthals_hw_configid1
1070	.global	xthals_release_major, xthals_release_minor
1071
1072//----------------------------------------------------------------------
1073
1074