1//
2// cache_asm.S - assembly language cache management routines
3//
4// $Id: //depot/rel/Foxhill/dot.8/Xtensa/OS/hal/cache_asm.S#1 $
5
6// Copyright (c) 1999-2015 Cadence Design Systems, Inc.
7//
8// Permission is hereby granted, free of charge, to any person obtaining
9// a copy of this software and associated documentation files (the
10// "Software"), to deal in the Software without restriction, including
11// without limitation the rights to use, copy, modify, merge, publish,
12// distribute, sublicense, and/or sell copies of the Software, and to
13// permit persons to whom the Software is furnished to do so, subject to
14// the following conditions:
15//
16// The above copyright notice and this permission notice shall be included
17// in all copies or substantial portions of the Software.
18//
19// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
20// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
21// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
22// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
23// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
24// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
25// SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
26
27#include <xtensa/cacheasm.h>
28#include <xtensa/cacheattrasm.h>
29#include <xtensa/xtensa-versions.h>
30
31
32
33
34//----------------------------------------------------------------------
35//  Huge Range cache routines
36//----------------------------------------------------------------------
37
38	//  void  xthal_dcache_hugerange_<name>(void *addr, unsigned size);
39	//
40	//  Invalidate and/or writeback dcache entries for an arbitrary large
41	//  virtual address range with a single scan of the dcache.
42	//  Assumes no address translation, i.e. virtual = physical.
43	//
44	//  a2 = ptr to range
45	//  a3 = size of range
46	//
47	//  Note:  -128 is a valid immediate for ADDI, but +128 is not,
48	//  and ADDI can relax to ADDMI for multiples of 256.  So scanning
49	//  cache backwards (from end to start) allows all cache line sizes
50	//  without creating an extra instruction for the ADDI.
51	//
52	.macro dcache_hugefunc  name, instruction
53	.text
54	.align	4
55	.type	xthal_dcache_hugerange_\name,@function
56	.global	xthal_dcache_hugerange_\name
57xthal_dcache_hugerange_\name:
58	abi_entry
59#if (!defined(XCHAL_HAVE_NX) || XCHAL_HAVE_NX == 0) && XCHAL_DCACHE_SIZE > 0 \
60	&& XCHAL_HAVE_DCACHE_TEST && XCHAL_HAVE_MINMAX && XCHAL_HAVE_LOOPS
61	movi	a4, XCHAL_DCACHE_SIZE*2		// size at which to use huge algorithm
62	movi	a7, -XCHAL_DCACHE_LINESIZE	// for rounding to cache line size
63	bltu	a3, a4, 7f			// use normal (line-by-line hit) function
64#if XCHAL_HAVE_PREFETCH
65	movi	a11, 0
66	xsr.prefctl a11		// temporarily disable prefetch (invalidates prefetch bufs!)
67#endif
68	add	a5, a3, a2			// a5 = end of range
69	and	a4, a2, a7			// a4 = low end, rounded to containing cache line
70	addi	a5, a5, /*XCHAL_DCACHE_LINESIZE*/-1
71	and	a5, a5, a7			// a5 = high end, rounded to containing cache line
72	movi	a7, XCHAL_DCACHE_SIZE/XCHAL_DCACHE_LINESIZE	// a7 = number of lines in dcache
73	movi	a3, XCHAL_DCACHE_SIZE-XCHAL_DCACHE_LINESIZE	// way index
74	mov	a6, a5
75	//movi	a8, -XCHAL_DCACHE_SETSIZE	// use if LDCT gives non-zero index bits
76	movi	a10, (XCHAL_DCACHE_SIZE/XCHAL_DCACHE_WAYS) - 1
77
78	loopgtz a7, 1f
79	ldct	a7, a3				// a3 = cache tag for cache entry [a7]
80	\instruction	a2, 0
81	.begin schedule
82	//extui	a9, a3, 0, XCHAL_DCACHE_SETWIDTH+XCHAL_DCACHE_LINEWIDTH
83	and	a9, a3, a10
84	addi	a3, a3, -XCHAL_DCACHE_LINESIZE
85	.end schedule
86	.begin schedule
87	//and	a7, a7, a8	// uncomment if LDCT reports non-zero index bits
88	maxu	a6, a6, a4	// a4 = low end of range
89	minu	a2, a6, a5	// a5 = high end of range
90	or	a6, a7, a9
91	.end schedule
921:
93
94	\instruction	a2, 0
95	maxu	a6, a6, a4
96	minu	a2, a6, a5
97	\instruction	a2, 0
98#if XCHAL_HAVE_PREFETCH
99	wsr.prefctl a11		// restore prefetch
100#endif
101	isync_return_nop
102	abi_return
103#endif /* dcache supports hugerange */
104// Jump to non-huge routine
1057:	j.l	xthal_dcache_region_\name + ABI_ENTRY_MINSIZE, a4
106	.size xthal_dcache_hugerange_\name, . - xthal_dcache_hugerange_\name
107	.endm
108
109
110
111	//  void  xthal_icache_hugerange_<name>(void *addr, unsigned size);
112	//
113	//  Invalidate icache entries for an arbitrary large
114	//  virtual address range with a single scan of the icache.
115	//  Assumes no address translation, i.e. virtual = physical.
116	//
117	//  a2 = ptr to range
118	//  a3 = size of range
119	//
120	//  Note:  -128 is a valid immediate for ADDI, but +128 is not,
121	//  and ADDI can relax to ADDMI for multiples of 256.  So scanning
122	//  cache backwards (from end to start) allows all cache line sizes
123	//  without creating an extra instruction for the ADDI.
124	//
125	.macro icache_hugefunc  name, instruction
126	.text
127	.align	4
128	.type	xthal_icache_hugerange_\name,@function
129	.global	xthal_icache_hugerange_\name
130xthal_icache_hugerange_\name:
131	abi_entry
132#if (!defined(XCHAL_HAVE_NX) || XCHAL_HAVE_NX == 0) &&XCHAL_ICACHE_SIZE > 0 && \
133	XCHAL_HAVE_ICACHE_TEST && XCHAL_HAVE_MINMAX && XCHAL_HAVE_LOOPS
134	movi	a4, XCHAL_ICACHE_SIZE*2		// size at which to use huge algorithm
135	movi	a7, -XCHAL_ICACHE_LINESIZE	// for rounding to cache line size
136	bltu	a3, a4, 7f			// use normal (line-by-line hit) function
137	add	a5, a3, a2			// a5 = end of range
138	and	a4, a2, a7			// a4 = low end, rounded to containing cache line
139	addi	a5, a5, XCHAL_ICACHE_LINESIZE-1
140	and	a5, a5, a7			// a5 = high end, rounded to containing cache line
141	movi	a7, XCHAL_ICACHE_SIZE/XCHAL_ICACHE_LINESIZE	// a7 = number of lines in dcache
142	movi	a3, XCHAL_ICACHE_SIZE-XCHAL_ICACHE_LINESIZE	// way index
143	mov	a6, a5
144	//movi	a8, -XCHAL_ICACHE_SETSIZE	// use if LICT gives non-zero index bits
145	movi	a10, (XCHAL_ICACHE_SIZE/XCHAL_ICACHE_WAYS) - 1
146
147	loopgtz a7, 1f
148	lict	a7, a3				// a3 = cache tag for cache entry [a7]
149	\instruction	a2, 0
150	.begin schedule
151	//extui	a9, a3, 0, XCHAL_ICACHE_SETWIDTH+XCHAL_ICACHE_LINEWIDTH
152	and	a9, a3, a10
153	addi	a3, a3, -XCHAL_ICACHE_LINESIZE
154	.end schedule
155	.begin schedule
156	//and	a7, a7, a8	// uncomment if LDCT reports non-zero index bits
157	maxu	a6, a6, a4	// a4 = low end of range
158	minu	a2, a6, a5	// a5 = high end of range
159	or	a6, a7, a9
160	.end schedule
1611:
162
163	\instruction	a2, 0
164	maxu	a6, a6, a4
165	minu	a2, a6, a5
166	\instruction	a2, 0
167	isync_return_nop
168	abi_return
169#endif /* icache supports hugerange */
1707:	j.l	xthal_icache_region_\name + ABI_ENTRY_MINSIZE, a4
171	.size xthal_icache_hugerange_\name, . - xthal_icache_hugerange_\name
172	.endm
173
174
175
176
177	.text
178
179//----------------------------------------------------------------------
180// Read CACHEATTR register
181//----------------------------------------------------------------------
182
183
184//  unsigned xthal_get_cacheattr(void);
185
186DECLFUNC(xthal_get_cacheattr)
187DECLFUNC(xthal_get_dcacheattr)
188# if XCHAL_HAVE_CACHEATTR	/* single CACHEATTR register used for both I and D */
189DECLFUNC(xthal_get_icacheattr)
190# endif
191	abi_entry
192	dcacheattr_get
193	abi_return
194	endfunc
195
196
197
198//  unsigned xthal_get_icacheattr(void);
199
200# if !XCHAL_HAVE_CACHEATTR	/* possibly independent CACHEATTR states used for I and D */
201DECLFUNC(xthal_get_icacheattr)
202	abi_entry
203	icacheattr_get
204	abi_return
205	endfunc
206# endif
207
208
209
210//----------------------------------------------------------------------
211//  Write CACHEATTR register, or equivalent.
212//----------------------------------------------------------------------
213
214/*
215 *  Set CACHEATTR register in a safe manner.
216 *
217 *	void  xthal_set_cacheattr( unsigned new_cacheattr );
218 *	void  xthal_set_icacheattr( unsigned new_cacheattr );
219 *	void  xthal_set_dcacheattr( unsigned new_cacheattr );
220 */
221
222
223# if XCHAL_HAVE_CACHEATTR	/* single CACHEATTR register used for both I and D accesses */
224DECLFUNC(xthal_set_icacheattr)
225DECLFUNC(xthal_set_dcacheattr)
226# endif
227DECLFUNC(xthal_set_cacheattr)
228	abi_entry
229	cacheattr_set
230	abi_return
231	endfunc
232
233
234
235#if XCHAL_HAVE_CACHEATTR
236
237	/*
238	 *  Already done above.
239	 *
240	 *  Since we can't enable/disable the icache and dcache independently,
241	 *  and don't have a nice place to store a state which would enable
242	 *  us to only enable them both when both have been requested to be
243	 *  enabled, we simply enable both for any request to enable either,
244	 *  and disable both for any request to disable either cache.
245	 */
246
247#elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR || (XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY)
248
249
250DECLFUNC(xthal_set_icacheattr)
251	abi_entry
252	icacheattr_set
253	isync_return_nop
254	abi_return
255	endfunc
256
257
258
259DECLFUNC(xthal_set_dcacheattr)
260	abi_entry
261	dcacheattr_set
262	abi_return
263	endfunc
264
265
266#else /* full MMU (pre-v3): */
267
268
269//  These functions aren't applicable to arbitrary MMU configurations.
270//  Do nothing in this case.
271
272DECLFUNC(xthal_set_icacheattr)
273DECLFUNC(xthal_set_dcacheattr)
274	abi_entry
275	abi_return
276	endfunc
277
278
279#endif /* cacheattr/MMU type */
280
281
282//----------------------------------------------------------------------
283// Determine (guess) whether caches are "enabled"
284//----------------------------------------------------------------------
285
286/*
287 *  There is no "cache enable" bit in the Xtensa architecture,
288 *  but we can use CACHEATTR (if it or its equivalent exists)
289 *  as an indication that caches have been enabled.
290 */
291
292#if XCHAL_HAVE_CACHEATTR
293
294
295DECLFUNC(xthal_icache_is_enabled)
296DECLFUNC(xthal_dcache_is_enabled)
297	abi_entry
298	cacheattr_is_enabled	2f
299	movi	a2, 0
300	abi_return
3012:	movi	a2, 1
302	abi_return
303	endfunc
304
305
306#elif XCHAL_HAVE_MIMIC_CACHEATTR || XCHAL_HAVE_XLT_CACHEATTR
307
308
309DECLFUNC(xthal_icache_is_enabled)
310	abi_entry
311	icacheattr_is_enabled	2f
312	movi	a2, 0
313	abi_return
3142:	movi	a2, 1
315	abi_return
316	endfunc
317
318
319
320DECLFUNC(xthal_dcache_is_enabled)
321	abi_entry
322	dcacheattr_is_enabled	2f
323	movi	a2, 0
324	abi_return
3252:	movi	a2, 1
326	abi_return
327	endfunc
328
329
330#else
331
332//  These functions aren't applicable to arbitrary MMU configurations.
333//  Assume caches are enabled in this case (!).
334
335
336DECLFUNC(xthal_icache_is_enabled)
337DECLFUNC(xthal_dcache_is_enabled)
338	abi_entry
339	movi	a2, 1
340	abi_return
341	endfunc
342
343#endif
344
345
346
347//----------------------------------------------------------------------
348// invalidate the icache
349//----------------------------------------------------------------------
350
351
352// void xthal_icache_all_invalidate(void);
353
354DECLFUNC(xthal_icache_all_invalidate)
355	abi_entry
356	icache_invalidate_all	a2, a3
357	isync_return_nop
358	abi_return
359	endfunc
360
361//----------------------------------------------------------------------
362// invalidate the dcache
363//----------------------------------------------------------------------
364
365
366
367// void xthal_dcache_all_invalidate(void);
368
369DECLFUNC(xthal_dcache_all_invalidate)
370	abi_entry
371	dcache_invalidate_all	a2, a3
372	abi_return
373	endfunc
374
375//----------------------------------------------------------------------
376// write dcache dirty data
377//----------------------------------------------------------------------
378
379
380
381// void xthal_dcache_all_writeback(void);
382
383DECLFUNC(xthal_dcache_all_writeback)
384	abi_entry
385	dcache_writeback_all	a2, a3, a4
386	abi_return
387	endfunc
388
389//----------------------------------------------------------------------
390// write dcache dirty data and invalidate
391//----------------------------------------------------------------------
392
393
394
395// void xthal_dcache_all_writeback_inv(void);
396
397DECLFUNC(xthal_dcache_all_writeback_inv)
398	abi_entry
399	dcache_writeback_inv_all	a2, a3, a4
400	abi_return
401	endfunc
402
403//----------------------------------------------------------------------
404// unlock instructions from icache
405//----------------------------------------------------------------------
406
407
408
409// void xthal_icache_all_unlock(void);
410
411DECLFUNC(xthal_icache_all_unlock)
412	abi_entry
413	icache_unlock_all	a2, a3
414	abi_return
415	endfunc
416
417//----------------------------------------------------------------------
418// unlock data from dcache
419//----------------------------------------------------------------------
420
421
422
423// void xthal_dcache_all_unlock(void);
424
425DECLFUNC(xthal_dcache_all_unlock)
426	abi_entry
427	dcache_unlock_all	a2, a3
428	abi_return
429	endfunc
430
431//----------------------------------------------------------------------
432// invalidate the address range in the icache
433//----------------------------------------------------------------------
434
435
436
437// void xthal_icache_region_invalidate( void *addr, unsigned size );
438
439DECLFUNC(xthal_icache_region_invalidate)
440	abi_entry
441	icache_invalidate_region	a2, a3, a4
442	isync_return_nop
443	abi_return
444	endfunc
445
446
447
448// void xthal_icache_hugerange_invalidate( void *addr, unsigned size );
449icache_hugefunc	invalidate,	ihi
450
451
452
453# if XCHAL_ICACHE_LINE_LOCKABLE
454// void xthal_icache_hugerange_unlock( void *addr, unsigned size );
455icache_hugefunc	unlock,		ihu
456# endif
457
458
459
460// void xthal_dcache_hugerange_invalidate( void *addr, unsigned size );
461dcache_hugefunc	invalidate,	dhi
462
463
464
465# if XCHAL_DCACHE_LINE_LOCKABLE
466// void xthal_dcache_hugerange_unlock( void *addr, unsigned size );
467dcache_hugefunc	unlock,		dhu
468# endif
469
470
471
472// void xthal_dcache_hugerange_writeback( void *addr, unsigned size );
473dcache_hugefunc	writeback,	dhwb
474
475
476
477// void xthal_dcache_hugerange_writeback_inv( void *addr, unsigned size );
478dcache_hugefunc	writeback_inv,	dhwbi
479
480
481
482//----------------------------------------------------------------------
483// invalidate the address range in the dcache
484//----------------------------------------------------------------------
485
486
487
488// void xthal_dcache_region_invalidate( void *addr, unsigned size );
489
490DECLFUNC(xthal_dcache_region_invalidate)
491	abi_entry
492	dcache_invalidate_region	a2, a3, a4
493	abi_return
494	endfunc
495
496//----------------------------------------------------------------------
497// write dcache region dirty data
498//----------------------------------------------------------------------
499
500
501
502// void xthal_dcache_region_writeback( void *addr, unsigned size );
503
504DECLFUNC(xthal_dcache_region_writeback)
505	abi_entry
506	dcache_writeback_region		a2, a3, a4, a5
507	abi_return
508	endfunc
509
510//----------------------------------------------------------------------
511// write dcache region dirty data and invalidate
512//----------------------------------------------------------------------
513
514
515
516// void xthal_dcache_region_writeback_inv( void *addr, unsigned size );
517
518DECLFUNC(xthal_dcache_region_writeback_inv)
519	abi_entry
520	dcache_writeback_inv_region	a2, a3, a4, a5
521	abi_return
522	endfunc
523
524//----------------------------------------------------------------------
525// lock instructions in icache region
526//----------------------------------------------------------------------
527
528
529
530// void xthal_icache_region_lock(void);
531
532DECLFUNC(xthal_icache_region_lock)
533	abi_entry
534	icache_lock_region	a2, a3, a4
535	abi_return
536	endfunc
537
538//----------------------------------------------------------------------
539// lock data in dcache region
540//----------------------------------------------------------------------
541
542
543
544// void xthal_dcache_region_lock(void);
545
546DECLFUNC(xthal_dcache_region_lock)
547	abi_entry
548	dcache_lock_region	a2, a3, a4
549	abi_return
550	endfunc
551
552//----------------------------------------------------------------------
553// unlock instructions from icache region
554//----------------------------------------------------------------------
555
556
557
558// void xthal_icache_region_unlock(void);
559
560DECLFUNC(xthal_icache_region_unlock)
561	abi_entry
562	icache_unlock_region	a2, a3, a4
563	abi_return
564	endfunc
565
566//----------------------------------------------------------------------
567// unlock data from dcache region
568//----------------------------------------------------------------------
569
570
571
572// void xthal_dcache_region_unlock(void);
573
574DECLFUNC(xthal_dcache_region_unlock)
575	abi_entry
576	dcache_unlock_region	a2, a3, a4
577	abi_return
578	endfunc
579
580
581//----------------------------------------------------------------------
582// invalidate single icache line
583//----------------------------------------------------------------------
584
585
586
587// void xthal_icache_line_invalidate(void *addr);
588
589DECLFUNC(xthal_icache_line_invalidate)
590	abi_entry
591	icache_invalidate_line	a2, 0
592	isync_return_nop
593	abi_return
594	endfunc
595
596
597//----------------------------------------------------------------------
598// invalidate single dcache line
599//----------------------------------------------------------------------
600
601
602
603// void xthal_dcache_line_invalidate(void *addr);
604
605DECLFUNC(xthal_dcache_line_invalidate)
606	abi_entry
607	dcache_invalidate_line	a2, 0
608	abi_return
609	endfunc
610
611//----------------------------------------------------------------------
612// write single dcache line dirty data
613//----------------------------------------------------------------------
614
615
616
617// void xthal_dcache_line_writeback(void *addr);
618
619DECLFUNC(xthal_dcache_line_writeback)
620	abi_entry
621	dcache_writeback_line	a2, 0
622	abi_return
623	endfunc
624
625//----------------------------------------------------------------------
626// write single dcache line dirty data and invalidate
627//----------------------------------------------------------------------
628
629
630
631// void xthal_dcache_line_writeback_inv(void *addr);
632
633DECLFUNC(xthal_dcache_line_writeback_inv)
634	abi_entry
635	dcache_writeback_inv_line	a2, 0
636	abi_return
637	endfunc
638
639//----------------------------------------------------------------------
640// lock instructions in icache line
641//----------------------------------------------------------------------
642
643
644
645// void xthal_icache_line_lock(void);
646
647DECLFUNC(xthal_icache_line_lock)
648	abi_entry
649	icache_lock_line	a2, 0
650	abi_return
651	endfunc
652
653//----------------------------------------------------------------------
654// lock data in dcache line
655//----------------------------------------------------------------------
656
657
658
659// void xthal_dcache_line_lock(void);
660
661DECLFUNC(xthal_dcache_line_lock)
662	abi_entry
663	dcache_lock_line	a2, 0
664	abi_return
665	endfunc
666
667//----------------------------------------------------------------------
668// unlock instructions from icache line
669//----------------------------------------------------------------------
670
671
672
673// void xthal_icache_line_unlock(void);
674
675DECLFUNC(xthal_icache_line_unlock)
676	abi_entry
677	icache_unlock_line	a2, 0
678	abi_return
679	endfunc
680
681//----------------------------------------------------------------------
682// unlock data from dcache line
683//----------------------------------------------------------------------
684
685
686
687// void xthal_dcache_line_unlock(void);
688
689DECLFUNC(xthal_dcache_line_unlock)
690	abi_entry
691	dcache_unlock_line	a2, 0
692	abi_return
693	endfunc
694
695//----------------------------------------------------------------------
696// sync icache and memory (???)
697//----------------------------------------------------------------------
698
699
700
701// void xthal_icache_sync(void);
702
703DECLFUNC(xthal_icache_sync)
704	abi_entry
705	icache_sync	a2
706	isync_return_nop
707	abi_return
708	endfunc
709
710//----------------------------------------------------------------------
711// sync dcache and memory (???)
712//----------------------------------------------------------------------
713
714
715
716// void xthal_dcache_sync(void);
717
718DECLFUNC(xthal_dcache_sync)
719	abi_entry
720	dcache_sync	a2
721	abi_return
722	endfunc
723
724//----------------------------------------------------------------------
725// Get/Set icache number of ways enabled
726//----------------------------------------------------------------------
727
728
729
730// unsigned int xthal_icache_get_ways(void);
731
732DECLFUNC(xthal_icache_get_ways)
733	abi_entry
734	icache_get_ways	a2
735	abi_return
736	endfunc
737
738
739
740/// void xthal_icache_set_ways(unsigned int ways);
741
742DECLFUNC(xthal_icache_set_ways)
743	abi_entry
744	icache_set_ways	a2 a3 a4
745	abi_return
746	endfunc
747
748//----------------------------------------------------------------------
749// Get/Set dcache number of ways enabled
750//----------------------------------------------------------------------
751
752
753
754// unsigned int xthal_dcache_get_ways(void);
755
756DECLFUNC(xthal_dcache_get_ways)
757	abi_entry
758	dcache_get_ways a2
759	abi_return
760	endfunc
761
762
763
764// void xthal_dcache_set_ways(unsigned int ways);
765
766DECLFUNC(xthal_dcache_set_ways)
767	abi_entry
768	dcache_set_ways a2 a3 a4
769	abi_return
770	endfunc
771
772//----------------------------------------------------------------------
773// opt into and out of coherence
774//----------------------------------------------------------------------
775
776
777
778// The opt-in routine assumes cache was initialized at reset,
779// so it's equivalent to the low-level coherence_on routine.
780
781// void xthal_cache_coherence_optin(void)
782// void xthal_cache_coherence_on(void)
783
784DECLFUNC(xthal_cache_coherence_optin)
785DECLFUNC(xthal_cache_coherence_on)
786	abi_entry
787	cache_coherence_on	a2, a3
788	abi_return
789	endfunc
790
791
792
793// The coherence_off routines should not normally be called directly.
794// Use the xthal_cache_coherence_optout() C routine instead
795// (which first empties the cache).
796
797// void xthal_cache_coherence_off
798
799DECLFUNC(xthal_cache_coherence_off)
800	abi_entry
801	cache_coherence_off	a2, a3
802	abi_return
803	endfunc
804
805
806//----------------------------------------------------------------------
807// Control cache prefetch
808//----------------------------------------------------------------------
809
810
811
812# if XCHAL_HAVE_BE
813#  define aH a2	/* msb word = prefctl mask */
814#  define aL a3 /* lsb word = prefctl value */
815# else
816#  define aH a3	/* msb word = prefctl mask */
817#  define aL a2 /* lsb word = prefctl value */
818# endif
819
820// Set cache prefetch state (-1=enable, 0=disable, and see XTHAL_*PREFETCH_*),
821// and return previous one.
822//
823// int  xthal_set_cache_prefetch_long( unsigned long long );
824//
825DECLFUNC(xthal_set_cache_prefetch_long)
826	abi_entry
827# if XCHAL_HAVE_PREFETCH
828	movi	a5, XCHAL_CACHE_PREFCTL_DEFAULT
829	addi	a4, aL, 1	// does prefctl value aL == -1 ?
830	moveqz	aL, a5, a4	// if yes (XTHAL_PREFETCH_ENABLE), set it to default
831        movgez  a2, aL, aL      // if the high bit is not set, then we want to transfer the contents of aL to prefctl
832				// so we move it to a2
833	bgez	aL, 1f		// high bit set indicates masked update
834	ssai	16		// 16-bit right shifts
835	src	a5, aL, aH	// get 16-bit-swapped 32-bit value
836	src	a5, a5, a5	// get 32-bit value (rotate by 16)
837	rsr.prefctl a4
838	src	a3, aH, aL	// get 32-bit mask
839	or	a4, a4, a3	// set masked bits
840	xor	a4, a4, a3	// clear masked bits
841	and	a5, a5, a3	// only use masked bits
842	or	a2, a4, a5	// combine masked bits
8431:
844#  if XCHAL_HW_MIN_VERSION <= XTENSA_HWVERSION_RC_2010_1    /* for erratum #325 */
845	j 1f ; .align 8 ; 1: xsr.prefctl a2 ; isync	// ensure XSR.PREFCTL;ISYNC wholly within an icache line
846#  else
847	xsr.prefctl a2
848#  endif
849# else
850	movi	a2, 0
851# endif
852	abi_return
853	endfunc
854
855//----------------------------------------------------------------------
856
857
858
859// FOR BACKWARD COMPATIBILITY WITH PRE-RF RELEASE OBJECT CODE ONLY.
860// Set cache prefetch state (-1=enable, 0=disable, and see the
861//   definitions of XTHAL_*PREFETCH_* with only the lower 32 bits set),
862// and return previous one.
863// int  xthal_set_cache_prefetch( int )
864//
865DECLFUNC(xthal_set_cache_prefetch)
866	abi_entry
867# if XCHAL_HAVE_PREFETCH
868	movi	a3, XCHAL_CACHE_PREFCTL_DEFAULT
869	addi	a4, a2, 1	// does a2 == -1 ?
870	moveqz	a2, a3, a4	// if yes (XTHAL_PREFETCH_ENABLE), set it to default
871	bbci.l	a2, 31, 1f	// high bit set indicates masked update
872	rsr.prefctl a4
873	extui	a5, a2, 16, 15
874	or	a4, a4, a5	// set masked bits
875	xor	a4, a4, a5	// clear masked bits
876	and	a2, a2, a5	// only use masked bits
877	or	a2, a4, a2	// combine masked bits
8781:
879#  if XCHAL_HW_MIN_VERSION <= XTENSA_HWVERSION_RC_2010_1    /* for erratum #325 */
880	j 1f ; .align 8 ; 1: xsr.prefctl a2 ; isync	// ensure XSR.PREFCTL;ISYNC wholly within an icache line
881#  else
882	xsr.prefctl a2
883#  endif
884# else
885	movi	a2, 0
886# endif
887	abi_return
888	endfunc
889
890//----------------------------------------------------------------------
891
892
893
894// Return current cache prefetch state.
895// int  xthal_get_cache_prefetch( void )
896DECLFUNC(xthal_get_cache_prefetch)
897	abi_entry
898# if XCHAL_HAVE_PREFETCH
899	rsr.prefctl a2
900# else
901	movi	a2, 0
902# endif
903	abi_return
904	endfunc
905
906//----------------------------------------------------------------------
907// Misc configuration info
908//----------------------------------------------------------------------
909
910// Eventually these will move to their own file:
911	.set	xthals_hw_configid0, XCHAL_HW_CONFIGID0
912
913	.set	xthals_hw_configid1, XCHAL_HW_CONFIGID1
914
915	.set	xthals_release_major, XTHAL_RELEASE_MAJOR
916
917	.set	xthals_release_minor, XTHAL_RELEASE_MINOR
918
919
920	.global	xthals_hw_configid0, xthals_hw_configid1
921	.global	xthals_release_major, xthals_release_minor
922
923//----------------------------------------------------------------------
924
925