]> git.saurik.com Git - apple/xnu.git/blob - osfmk/ppc/savearea_asm.s
xnu-344.32.tar.gz
[apple/xnu.git] / osfmk / ppc / savearea_asm.s
1 /*
2 * Copyright (c) 2000 Apple Computer, Inc. All rights reserved.
3 *
4 * @APPLE_LICENSE_HEADER_START@
5 *
6 * The contents of this file constitute Original Code as defined in and
7 * are subject to the Apple Public Source License Version 1.1 (the
8 * "License"). You may not use this file except in compliance with the
9 * License. Please obtain a copy of the License at
10 * http://www.apple.com/publicsource and read it before using this file.
11 *
12 * This Original Code and all software distributed under the License are
13 * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
14 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
15 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE OR NON-INFRINGEMENT. Please see the
17 * License for the specific language governing rights and limitations
18 * under the License.
19 *
20 * @APPLE_LICENSE_HEADER_END@
21 */
22
23 #define FPVECDBG 0
24
25 #include <assym.s>
26 #include <debug.h>
27 #include <cpus.h>
28 #include <db_machine_commands.h>
29 #include <mach_rt.h>
30
31 #include <mach_debug.h>
32 #include <ppc/asm.h>
33 #include <ppc/proc_reg.h>
34 #include <ppc/exception.h>
35 #include <ppc/Performance.h>
36 #include <ppc/exception.h>
37 #include <ppc/pmap_internals.h>
38 #include <ppc/savearea.h>
39 #include <mach/ppc/vm_param.h>
40
41 .text
42
43 /*
44 * This routine will add a savearea block to the free list.
45 * Note really well: we can take NO exceptions of any kind,
46 * including a PTE miss once the savearea lock is held. That's
47 * a guaranteed deadlock. That means we must disable for interrutions
48 * and turn all translation off.
49 *
50 * We also queue the block to the free pool list. This is a
51 * circular double linked list. Because this block has no free entries,
52 * it gets queued to the end of the list
53 *
54 */
55
56 .align 5
57 .globl EXT(save_queue)
58
59 LEXT(save_queue)
60
61 mfsprg r9,2 ; Get the feature flags
62 mr r11,r3 ; Save the block
63 mtcrf 0x04,r9 ; Set the features
64 mfmsr r12 ; Get the MSR
65 rlwinm r12,r12,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
66 rlwinm r12,r12,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
67 lis r10,hi16(EXT(saveanchor)) ; Get the high part of the anchor
68 andi. r3,r12,0x7FCF ; Turn off all translation and rupts
69 ori r10,r10,lo16(EXT(saveanchor)) ; Bottom half of the anchor
70
71 bt pfNoMSRirb,sqNoMSR ; No MSR...
72
73 mtmsr r3 ; Translation and all off
74 isync ; Toss prefetch
75 b sqNoMSRx
76
77 sqNoMSR: li r0,loadMSR ; Get the MSR setter SC
78 sc ; Set it
79 sqNoMSRx:
80
81 rlwinm. r3,r11,0,0,19 ; (TEST/DEBUG)
82 #if 0
83 bne+ notrapit ; (TEST/DEBUG)
84 BREAKPOINT_TRAP ; (TEST/DEBUG)
85 notrapit: ; (TEST/DEBUG)
86 #endif
87
88
89 li r8,sac_cnt ; Get the number of saveareas per page
90 mr r4,r11 ; Point to start of chain
91 li r0,SAVempty ; Get empty marker
92
93 sqchain: addic. r8,r8,-1 ; Keep track of how many we did
94 stb r0,SAVflags+2(r4) ; Set empty
95 addi r9,r4,SAVsize ; Point to the next slot
96 ble- sqchaindn ; We are done with the chain
97 stw r9,SAVprev(r4) ; Set this chain
98 mr r4,r9 ; Step to the next
99 b sqchain ; Fill the whole block...
100
101 .align 5
102
103 sqchaindn: mflr r9 ; Save the return address
104 bl savelock ; Go lock the save anchor
105
106 lwz r7,SVfree(r10) ; Get the free save area list anchor
107 lwz r6,SVfreecnt(r10) ; Get the number of free saveareas
108
109 stw r11,SVfree(r10) ; Queue in the new one
110 addi r6,r6,sac_cnt ; Count the ones we are linking in
111 stw r7,SAVprev(r4) ; Queue the old first one off of us
112 stw r6,SVfreecnt(r10) ; Save the new count
113
114 bl saveunlock ; Unlock the list and set the adjust count
115
116 mtlr r9 ; Restore the return
117 mtmsr r12 ; Restore interrupts and translation
118 isync ; Dump any speculations
119
120 #if FPVECDBG
121 mfsprg r2,0 ; (TEST/DEBUG)
122 lwz r2,next_savearea(r2) ; (TEST/DEBUG)
123 mr. r2,r2 ; (TEST/DEBUG)
124 beqlr- ; (TEST/DEBUG)
125 lis r0,hi16(CutTrace) ; (TEST/DEBUG)
126 li r2,0x2201 ; (TEST/DEBUG)
127 oris r0,r0,lo16(CutTrace) ; (TEST/DEBUG)
128 sc ; (TEST/DEBUG)
129 #endif
130
131 blr ; Leave...
132
133 /*
134 * This routine will obtain a savearea.
135 * Note really well: we can take NO exceptions of any kind,
136 * including a PTE miss during this process. That's
137 * a guaranteed deadlock or screwup. That means we must disable for interrutions
138 * and turn all translation off.
139 *
140 * We pass back the virtual address of the one we just obtained
141 * or a zero if none to allocate.
142 *
143 * First we try the local list. If that is below a threshold, we will
144 * lock the free list and replenish.
145 *
146 * If there are no saveareas in either list, we will install the
147 * backpocket and choke.
148 *
149 * The save_get_phys call assumes that translation and interruptions are
150 * already off and that the returned address is physical.
151 *
152 * Note that save_get_init is used in initial processor startup only. It
153 * is used because translation is on, but no tables exist yet and we have
154 * no V=R BAT registers that cover the entire physical memory.
155 *
156 *
157 * NOTE!!! NEVER USE R0, R2, or R12 IN HERE THAT WAY WE DON'T NEED A
158 * STACK FRAME IN FPU_SAVE, FPU_SWITCH, VEC_SAVE, OR VEC_SWITCH.
159 */
160
161 .align 5
162 .globl EXT(save_get_init)
163
164 LEXT(save_get_init)
165
166 mfsprg r9,2 ; Get the feature flags
167 mfmsr r12 ; Get the MSR
168 rlwinm r12,r12,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
169 rlwinm r12,r12,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
170 mtcrf 0x04,r9 ; Set the features
171 andi. r3,r12,0x7FCF ; Turn off all translation and interrupts
172
173 bt pfNoMSRirb,sgiNoMSR ; No MSR...
174
175 mtmsr r3 ; Translation and all off
176 isync ; Toss prefetch
177 b sgiGetPhys ; Go get the savearea...
178
179 sgiNoMSR: li r0,loadMSR ; Get the MSR setter SC
180 sc ; Set it
181
182 sgiGetPhys: mflr r11 ; Save R11 (save_get_phys does not use this one)
183 bl EXT(save_get_phys) ; Get a savearea
184 mtlr r11 ; Restore return
185
186 mtmsr r12 ; Restore translation and exceptions
187 isync ; Make sure about it
188 blr ; Return...
189
190 .align 5
191 .globl EXT(save_get)
192
193 LEXT(save_get)
194
195 crclr cr1_eq ; Clear CR1_eq to indicate we want virtual address
196 mfsprg r9,2 ; Get the feature flags
197 mfmsr r11 ; Get the MSR
198 rlwinm. r3,r11,0,MSR_EE_BIT,MSR_EE_BIT ; Are interrupts enabled here?
199 beq+ sgnomess ; Nope, do not mess with fp or vec...
200 rlwinm r11,r11,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
201 rlwinm r11,r11,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
202
203 sgnomess: mtcrf 0x04,r9 ; Set the features
204 andi. r3,r11,0x7FCF ; Turn off all translation and interrupts
205
206 bt pfNoMSRirb,sgNoMSR ; No MSR...
207
208 mtmsr r3 ; Translation and all off
209 isync ; Toss prefetch
210 b csaveget
211
212 sgNoMSR: mr r9,r0 ; Save this
213 li r0,loadMSR ; Get the MSR setter SC
214 sc ; Set it
215 mr r0,r9 ; Restore it
216
217 b csaveget ; Join the common...
218
219 .align 5
220 .globl EXT(save_get_phys)
221
222 LEXT(save_get_phys)
223
224 crset cr1_eq ; Clear CR1_ne to indicate we want physical address
225
226 csaveget: mfsprg r9,0 ; Get the per proc
227 lis r10,hi16(EXT(saveanchor)) ; Get the high part of the anchor
228 lwz r8,lclfreecnt(r9) ; Get the count
229 lwz r3,lclfree(r9) ; Get the start of local savearea list
230 cmplwi r8,LocalSaveMin ; Are we too low?
231 ori r10,r10,lo16(EXT(saveanchor)) ; Bottom half of the anchor
232 ble- sglow ; We are too low and need to grow list...
233
234 sgreserve: lis r10,0x5555 ; Get top of empty indication
235 li r6,0 ; zero value
236 lwz r4,SAVprev(r3) ; Chain to the next one
237 stw r6,SAVflags(r3) ; Clear flags
238 ori r10,r10,0x5555 ; And the bottom
239 subi r8,r8,1 ; Back down count
240 stw r10,SAVprev(r3) ; Trash this
241 stw r10,SAVlevel(r3) ; Trash this
242 stw r4,lclfree(r9) ; Unchain first savearea
243 rlwinm r5,r3,0,0,19 ; Back up to first page where SAC is
244 stw r10,SAVact(r3) ; Trash this
245 stw r8,lclfreecnt(r9) ; Set new count
246
247 btlr+ cr1_eq ; Return now if physical request
248
249 lwz r5,SACvrswap(r5) ; Get the virtual to real translation
250
251 mtmsr r11 ; Restore translation and exceptions
252 isync ; Make sure about it
253
254 #if FPVECDBG
255 ; Note: we do not trace the physical request because this ususally comes from the
256 ; exception vector code
257
258 mr r6,r0 ; (TEST/DEBUG)
259 mr r7,r2 ; (TEST/DEBUG)
260 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
261 li r2,0x2203 ; (TEST/DEBUG)
262 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
263 sc ; (TEST/DEBUG)
264 mr r0,r6 ; (TEST/DEBUG)
265 mr r2,r7 ; (TEST/DEBUG)
266 #endif
267
268 xor r3,r3,r5 ; Get the virtual address
269 blr ; Leave...
270
271 ;
272 ; Here is the slow path which is executed when there are not enough in the local list
273 ;
274
275 .align 5
276
277 sglow: mflr r9 ; Save the return
278 bl savelock ; Go lock up the anchor
279 mtlr r9 ; Restore the return
280
281 subfic r5,r8,LocalSaveTarget ; Get the number of saveareas we need to grab to get to target
282 lwz r9,SVfreecnt(r10) ; Get the number on this list
283 lwz r8,SVfree(r10) ; Get the head of the save area list
284
285 sub r3,r9,r5 ; Get number left after we swipe enough for local list
286 srawi r3,r3,31 ; Get 0 if enough or 0xFFFFFFFF if not
287 andc r4,r5,r3 ; Get number to get if there are enough, 0 otherwise
288 and r5,r9,r3 ; Get 0 if there are enough, number on list otherwise
289 or. r5,r4,r5 ; Get the number we will move
290 beq- sgnofree ; There are none to get...
291
292 mtctr r5 ; Get loop count
293 mr r6,r8 ; Remember the first in the list
294
295 sgtrimf: bdz sgtfdone ; Count down and branch when we hit 0...
296 lwz r8,SAVprev(r8) ; Get the next
297 b sgtrimf ; Keep going...
298
299 .align 5
300
301 sgtfdone: lwz r7,SAVprev(r8) ; Get the next one
302 lwz r4,SVinuse(r10) ; Get the in use count
303 sub r9,r9,r5 ; Count down what we stole
304 stw r7,SVfree(r10) ; Set the new first in list
305 add r4,r4,r5 ; Count the ones we just put in the local list as "in use"
306 stw r9,SVfreecnt(r10) ; Set the new count
307 mfsprg r9,0 ; Get the per proc
308 stw r4,SVinuse(r10) ; Set the new in use count
309
310 lwz r4,lclfree(r9) ; Get the old head of list
311 lwz r3,lclfreecnt(r9) ; Get the old count
312 stw r6,lclfree(r9) ; Set the new head of the list
313 add r3,r3,r5 ; Get the new count
314 stw r4,SAVprev(r8) ; Point to the old head
315 stw r3,lclfreecnt(r9) ; Set the new count
316
317 mflr r9 ; Save the return
318 bl saveunlock ; Update the adjust field and unlock
319 mtlr r9 ; Restore return
320 b csaveget ; Start over and finally allocate the savearea...
321
322 ;
323 ; The local list is below the repopulate threshold and the free list is empty.
324 ; First we check if there are any left in the local list and if so, we allow
325 ; them to be allocated. If not, we release the backpocket list and choke.
326 ; There is nothing more that we can do at this point. Hopefully we stay alive
327 ; long enough to grab some much-needed panic information.
328 ;
329
330 sgnofree: mfsprg r9,0 ; Get the per proc
331 lwz r8,lclfreecnt(r9) ; Get the count
332 lwz r3,lclfree(r9) ; Get the start of local savearea list
333 mr. r8,r8 ; Are there any reserve to get?
334
335 mflr r9 ; Save the return
336 beq- sgchoke ; No, go choke and die...
337 bl saveunlock ; Update the adjust field and unlock
338 mtlr r9 ; Restore return
339
340 mfsprg r9,0 ; Get the per proc again
341 lwz r3,lclfree(r9) ; Get the start of local savearea list
342 lwz r8,lclfreecnt(r9) ; Get the count
343 b sgreserve ; We have some left, dip on in...
344
345 ;
346 ; We who are about to die salute you. The savearea chain is messed up or
347 ; empty. Add in a few so we have enough to take down the system.
348 ;
349
350 sgchoke: lis r9,hi16(EXT(backpocket)) ; Get high order of back pocket
351 ori r9,r9,lo16(EXT(backpocket)) ; and low part
352
353 lwz r8,SVfreecnt(r9) ; Get the new number of free elements
354 lwz r7,SVfree(r9) ; Get the head of the chain
355 lwz r6,SVinuse(r10) ; Get total in the old list
356
357 stw r8,SVfreecnt(r10) ; Set the new number of free elements
358 add r6,r6,r8 ; Add in the new ones
359 stw r7,SVfree(r10) ; Set the new head of the chain
360 stw r6,SVinuse(r10) ; Set total in the new list
361
362 lis r0,hi16(Choke) ; Set choke firmware call
363 li r7,0 ; Get a clear register to unlock
364 ori r0,r0,lo16(Choke) ; Set the rest of the choke call
365 li r3,failNoSavearea ; Set failure code
366
367 sync ; Make sure all is committed
368 stw r7,SVlock(r10) ; Unlock the free list
369 sc ; System ABEND
370
371
372
373 /*
374 * This routine will return a savearea to the free list.
375 * Note really well: we can take NO exceptions of any kind,
376 * including a PTE miss once the savearea lock is held. That's
377 * a guaranteed deadlock. That means we must disable for interrutions
378 * and turn all translation off.
379 *
380 * We take a virtual address for save_ret. For save_ret_phys we
381 * assume we are already physical/interrupts off and the address is physical.
382 *
383 * Here's a tricky bit, and important:
384 *
385 * When we trim the list, we NEVER trim the very first one. This is because that is
386 * the very last one released and the exception exit code will release the savearea
387 * BEFORE it is done using it. Wouldn't be too good if another processor started
388 * using it, eh? So for this case, we are safe so long as the savearea stays on
389 * the local list. (Note: the exit routine needs to do this because it is in the
390 * process of restoring all context and it needs to keep it until the last second.)
391 *
392 */
393
394 ;
395 ; Note: when called from interrupt enabled code, we want to turn off vector and
396 ; floating point because we can not guarantee that the enablement will not change
397 ; while we hold a copy of the MSR. We force it off so that the lazy switcher will
398 ; turn it back on if used. However, we need to NOT change it save_ret or save_get
399 ; is called with interrupts disabled. This is because both of these routine are
400 ; called from within the context switcher and changing the enablement would be
401 ; very, very bad..... (especially from within the lazt switcher)
402 ;
403
404 .align 5
405 .globl EXT(save_ret)
406
407 LEXT(save_ret)
408
409 mfmsr r12 ; Get the MSR
410 rlwinm. r9,r12,0,MSR_EE_BIT,MSR_EE_BIT ; Are interrupts enabled here?
411 beq+ EXT(save_ret_join) ; Nope, do not mess with fp or vec...
412 rlwinm r12,r12,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
413 rlwinm r12,r12,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
414
415 .globl EXT(save_ret_join)
416
417 LEXT(save_ret_join)
418 crclr cr1_eq ; Clear CR1_ne to indicate we have virtual address
419 mfsprg r9,2 ; Get the feature flags
420 rlwinm r6,r3,0,0,19 ; Round back down to the savearea page block
421 lwz r5,SACvrswap(r6) ; Get the conversion to real
422 mtcrf 0x04,r9 ; Set the features
423 mfsprg r9,0 ; Get the per proc
424 xor r8,r3,r5 ; Get the real address of the savearea
425 andi. r3,r12,0x7FCF ; Turn off all translation and rupts
426
427 bt pfNoMSRirb,srNoMSR ; No MSR...
428
429 mtmsr r3 ; Translation and all off
430 isync ; Toss prefetch
431 b srcommon
432
433 .align 5
434
435 srNoMSR: li r0,loadMSR ; Get the MSR setter SC
436 sc ; Set it
437 srNoMSRx: b srcommon ; Join up below...
438
439
440 .align 5
441 .globl EXT(save_ret_phys)
442
443 LEXT(save_ret_phys)
444
445 mfsprg r9,0 ; Get the per proc
446 crset cr1_eq ; Clear CR1_ne to indicate we have physical address
447 mr r8,r3 ; Save the savearea address
448
449 nop
450
451 srcommon:
452 li r0,SAVempty ; Get marker for free savearea
453 lwz r7,lclfreecnt(r9) ; Get the local count
454 lwz r6,lclfree(r9) ; Get the old local header
455 addi r7,r7,1 ; Pop up the free count
456 stw r6,SAVprev(r8) ; Plant free chain pointer
457 cmplwi r7,LocalSaveMax ; Has the list gotten too long?
458 stb r0,SAVflags+2(r8) ; Mark savearea free
459 stw r8,lclfree(r9) ; Chain us on in
460 stw r7,lclfreecnt(r9) ; Bump up the count
461 bgt- srtrim ; List is too long, go trim it...
462
463 btlr cr1_eq ; Leave if we were a physical request...
464
465 mtmsr r12 ; Restore translation and exceptions
466 isync ; Make sure about it
467
468 #if FPVECDBG
469 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
470 li r2,0x2204 ; (TEST/DEBUG)
471 mr r3,r8 ; (TEST/DEBUG)
472 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
473 sc ; (TEST/DEBUG)
474 #endif
475 blr ; Leave...
476
477 ;
478 ; The local savearea chain has gotten too long. Trim it down to the target.
479 ; Note: never trim the first one, just skip over it.
480 ;
481
482 .align 5
483
484 srtrim:
485 mr r2,r8 ; Save the guy we are releasing
486 lwz r8,SAVprev(r8) ; Skip over the first
487 subi r7,r7,LocalSaveTarget ; Figure out how much to trim
488 mr r6,r8 ; Save the first one to trim
489 mr r5,r7 ; Save the number we are trimming
490
491 srtrimming: addic. r7,r7,-1 ; Any left to do?
492 ble- srtrimmed ; Nope...
493 lwz r8,SAVprev(r8) ; Skip to the next one
494 b srtrimming ; Keep going...
495
496 .align 5
497
498 srtrimmed: lis r10,hi16(EXT(saveanchor)) ; Get the high part of the anchor
499 lwz r7,SAVprev(r8) ; Point to the next one
500 ori r10,r10,lo16(EXT(saveanchor)) ; Bottom half of the anchor
501 li r4,LocalSaveTarget ; Set the target count
502 stw r7,SAVprev(r2) ; Trim stuff leaving the one just released as first
503 stw r4,lclfreecnt(r9) ; Set the current count
504
505 mflr r9 ; Save the return
506 bl savelock ; Lock up the anchor
507
508 lwz r3,SVfree(r10) ; Get the old head of the free list
509 lwz r4,SVfreecnt(r10) ; Get the number of free ones
510 lwz r7,SVinuse(r10) ; Get the number that are in use
511 stw r6,SVfree(r10) ; Point to the first trimmed savearea
512 add r4,r4,r5 ; Add number trimmed to free count
513 stw r3,SAVprev(r8) ; Chain the old head to the tail of the trimmed guys
514 sub r7,r7,r5 ; Remove the trims from the in use count
515 stw r4,SVfreecnt(r10) ; Set new free count
516 stw r7,SVinuse(r10) ; Set new in use count
517
518 bl saveunlock ; Set adjust count and unlock the saveanchor
519
520 mtlr r9 ; Restore the return
521
522 btlr+ cr1_eq ; Leave if we were a physical request...
523
524 mtmsr r12 ; Restore translation and exceptions
525 isync ; Make sure about it
526
527 #if FPVECDBG
528 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
529 mr r3,r2 ; (TEST/DEBUG)
530 li r2,0x2205 ; (TEST/DEBUG)
531 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
532 sc ; (TEST/DEBUG)
533 #endif
534 blr ; Leave...
535
536
537 ;
538 ; NOTE: This is the most complicated part of savearea maintainence.
539 ; Expect errors here.......
540 ;
541 ; save_trim_free - this routine will trim the free list down to the target count.
542 ; It trims the list and, if the pool page was fully allocated, puts that page on
543 ; the start of the pool list.
544 ;
545 ; If the savearea being released is the last on a pool page (i.e., all entries
546 ; are released), the page is dequeued from the pool and queued to any other
547 ; found during this scan. Note that this queue is maintained virtually.
548 ;
549 ; When the scan is done, the saveanchor lock is released and the list of
550 ; freed pool pages is returned.
551
552
553 ; For latency sake we may want to revisit this code. If we are trimming a
554 ; large number of saveareas, we could be disabled and holding the savearea lock
555 ; for quite a while. It may be that we want to break the trim down into parts.
556 ; Possibly trimming the free list, then individually pushing them into the free pool.
557 ;
558 ; This function expects to be called with translation on and a valid stack.
559 ;
560
561 .align 5
562 .globl EXT(save_trim_free)
563
564 LEXT(save_trim_free)
565
566 subi r1,r1,(FM_ALIGN(16)+FM_SIZE) ; Make space for 4 registers on stack
567 mfsprg r9,2 ; Get the feature flags
568 stw r28,FM_SIZE+0(r1) ; Save R28
569 mfmsr r12 ; Get the MSR
570 rlwinm r12,r12,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
571 rlwinm r12,r12,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
572 stw r29,FM_SIZE+4(r1) ; Save R28
573 mtcrf 0x04,r9 ; Set the features
574 stw r30,FM_SIZE+8(r1) ; Save R28
575 lis r10,hi16(EXT(saveanchor)) ; Get the high part of the anchor
576 stw r31,FM_SIZE+12(r1) ; Save R28
577 andi. r3,r12,0x7FCF ; Turn off all translation and rupts
578 ori r10,r10,lo16(EXT(saveanchor)) ; Bottom half of the anchor
579 mflr r9 ; Save the return
580
581 bt pfNoMSRirb,stNoMSR ; No MSR...
582
583 mtmsr r3 ; Translation and all off
584 isync ; Toss prefetch
585 b stNoMSRx
586
587 .align 5
588
589 stNoMSR: li r0,loadMSR ; Get the MSR setter SC
590 sc ; Set it
591
592 stNoMSRx: bl savelock ; Go lock up the anchor
593
594 lwz r8,SVadjust(r10) ; How many do we need to clear out?
595 li r3,0 ; Get a 0
596 neg. r8,r8 ; Get the actual we need to toss (adjust is neg if too many)
597 lwz r7,SVfree(r10) ; Get the first on the free list
598 bgt+ stneedtrim ; Yeah, we still need it...
599
600 mtlr r9 ; Restore return
601 stw r3,SVlock(r10) ; Quick unlock (no need for sync or to set adjust, nothing changed)
602
603 mtmsr r12 ; Restore translation and exceptions
604 isync ; Make sure about it
605
606 #if FPVECDBG
607 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
608 li r2,0x2206 ; (TEST/DEBUG)
609 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
610 sc ; (TEST/DEBUG)
611 #endif
612 addi r1,r1,(FM_ALIGN(16)+FM_SIZE) ; Pop stack - have not trashed register so no need to reload
613 blr ; Leave...
614
615 .align 5
616
617 stneedtrim: mr r6,r7 ; Save the first one
618 mr r5,r8 ; Save the number we are trimming
619
620 nop
621 nop
622
623 sttrimming: addic. r5,r5,-1 ; Any left to do?
624 ble- sttrimmed ; Nope...
625 lwz r7,SAVprev(r7) ; Skip to the next one
626 b sttrimming ; Keep going...
627
628 .align 5
629
630 sttrimmed: lwz r5,SAVprev(r7) ; Get the next one (for new head of free list)
631 lwz r4,SVfreecnt(r10) ; Get the free count
632 stw r5,SVfree(r10) ; Set new head
633 sub r4,r4,r8 ; Calculate the new free count
634 li r31,0 ; Show we have no free pool blocks yet
635 cmplwi cr1,r5,0 ; Make sure this is not equal
636 stw r4,SVfreecnt(r10) ; Set new free count
637 lis r30,hi16(sac_empty) ; Get what empty looks like
638
639 ;
640 ; NOTE: The savearea size must be 640 (0x280). We are doing a divide by shifts and stuff
641 ; here.
642 ;
643 #if SAVsize != 640
644 #error Savearea size is not 640!!!!!!!!!!!!
645 #endif
646
647 sttoss: beq+ cr1,stdone ; All done now...
648
649 cmplw cr1,r6,r7 ; Have we finished the loop?
650
651 lis r0,0x0044 ; Get top of table
652 rlwinm r2,r6,0,0,19 ; Back down to the savearea control stuff
653 ori r0,r0,0x2200 ; Finish shift table
654 rlwinm r4,r6,25,27,30 ; Get (addr >> 7) & 0x1E (same as twice high nybble)
655 lwz r5,SACalloc(r2) ; Get the allocation bits
656 addi r4,r4,1 ; Shift 1 extra
657 rlwinm r3,r6,25,31,31 ; Get (addr >> 7) & 1
658 rlwnm r0,r0,r4,29,31 ; Get partial index
659 lis r4,lo16(0x8000) ; Get the bit mask
660 add r0,r0,r3 ; Make the real index
661 srw r4,r4,r0 ; Get the allocation mask
662 or r5,r5,r4 ; Free this entry
663 cmplw r5,r4 ; Is this the only free entry?
664 lwz r6,SAVprev(r6) ; Chain to the next trimmed savearea
665 cmplw cr7,r30,r5 ; Does this look empty?
666 stw r5,SACalloc(r2) ; Save back the allocation bits
667 beq- stputpool ; First free entry, go put it into the pool...
668 bne+ cr7,sttoss ; Not an empty block
669
670 ;
671 ; We have an empty block. Remove it from the pool list.
672 ;
673
674 lwz r29,SACflags(r2) ; Get the flags
675 cmplwi cr5,r31,0 ; Is this guy on the release list?
676 lwz r28,SACnext(r2) ; Get the forward chain
677
678 rlwinm. r0,r29,0,sac_permb,sac_permb ; Is this a permanently allocated area? (also sets 0 needed below)
679 bne- sttoss ; This is permanent entry, do not try to release...
680
681 lwz r29,SACprev(r2) ; and the previous
682 beq- cr5,stnot1st ; Not first
683 lwz r0,SACvrswap(r31) ; Load the previous pool page vr conversion
684
685 stnot1st: stw r28,SACnext(r29) ; Previous guy points to my next
686 xor r0,r0,r31 ; Make the last guy virtual
687 stw r29,SACprev(r28) ; Next guy points back to my previous
688 stw r0,SAVprev(r2) ; Store the old top virtual as my back chain
689 mr r31,r2 ; My physical is now the head of the chain
690 b sttoss ; Get the next one...
691
692 ;
693 ; A pool block that had no free entries now has one. Stick it on the pool list.
694 ;
695
696 .align 5
697
698 stputpool: lwz r28,SVpoolfwd(r10) ; Get the first guy on the list
699 stw r2,SVpoolfwd(r10) ; Put us on the top of the list
700 stw r28,SACnext(r2) ; We point to the old top
701 stw r2,SACprev(r28) ; Old top guy points back to us
702 stw r10,SACprev(r2) ; Our back points to the anchor
703 b sttoss ; Go on to the next one...
704
705 ;
706 ; We are all done. Relocate pool release head, restore all, and go.
707 ;
708
709 .align 5
710
711 stdone: bl saveunlock ; Unlock the saveanchor and set adjust field
712
713 mr. r3,r31 ; Move release chain and see if there are any
714 li r5,0 ; Assume either V=R or no release chain
715 beq- stnorel ; Nothing to release...
716 lwz r5,SACvrswap(r31) ; Get the vr conversion
717
718 stnorel: mtmsr r12 ; Restore translation and exceptions
719 isync ; Make sure about it
720
721 mtlr r9 ; Restore the return
722
723 lwz r28,FM_SIZE+0(r1) ; Restore R28
724 lwz r29,FM_SIZE+4(r1) ; Restore R29
725 lwz r30,FM_SIZE+8(r1) ; Restore R30
726 lwz r31,FM_SIZE+12(r1) ; Restore R31
727 addi r1,r1,(FM_ALIGN(16)+FM_SIZE) ; Pop the stack
728 xor r3,r3,r5 ; Convert release chain address to virtual
729
730 #if FPVECDBG
731 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
732 li r2,0x2207 ; (TEST/DEBUG)
733 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
734 sc ; (TEST/DEBUG)
735 #endif
736 blr ; Return...
737
738 ;
739 ; save_recover - here we scan the free pool and see if we can get
740 ; enough free saveareas to hit target.
741 ;
742 ; If we empty a pool block, remove it from the pool list
743 ;
744 ;
745
746 .align 5
747 .globl EXT(save_recover)
748
749 LEXT(save_recover)
750 mfsprg r9,2 ; Get the feature flags
751 mfmsr r12 ; Get the MSR
752 rlwinm r12,r12,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
753 rlwinm r12,r12,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
754 mtcrf 0x04,r9 ; Set the features
755 lis r10,hi16(EXT(saveanchor)) ; Get the high part of the anchor
756 andi. r3,r12,0x7FCF ; Turn off all translation and rupts
757 ori r10,r10,lo16(EXT(saveanchor)) ; Bottom half of the anchor
758 mflr r9 ; Save the return
759
760 bt pfNoMSRirb,srcNoMSR ; No MSR...
761
762 mtmsr r3 ; Translation and all off
763 isync ; Toss prefetch
764 b srcNoMSRx
765
766 .align 5
767
768 srcNoMSR: li r0,loadMSR ; Get the MSR setter SC
769 sc ; Set it
770
771 srcNoMSRx: bl savelock ; Go lock up the anchor
772
773 lwz r8,SVadjust(r10) ; How many do we need to clear get?
774 li r3,0 ; Get a 0
775 mr. r8,r8 ; Do we need any?
776 bgt+ srcneedmore ; Yeah, we still need it...
777
778 mtlr r9 ; Restore return
779 stw r3,SVlock(r10) ; Quick unlock (no need for sync or to set adjust, nothing changed)
780
781 mtmsr r12 ; Restore translation and exceptions
782 isync ; Make sure about it
783
784 #if FPVECDBG
785 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
786 li r2,0x2208 ; (TEST/DEBUG)
787 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
788 sc ; (TEST/DEBUG)
789 #endif
790 blr ; Leave...
791
792 .align 5
793
794 srcneedmore:
795 mr r6,r10 ; Start at pool anchor
796 cmplwi cr1,r10,0 ; Make sure we start as not equal
797 lwz r7,SVfreecnt(r10) ; Get the current free count
798
799 srcnpool: lwz r6,SACnext(r6) ; Point to the next one
800 cmplw r6,r10 ; Have we wrapped?
801 beq- srcdone ; Yes, did not have enough...
802
803 lwz r5,SACalloc(r6) ; Pick up the allocation for this pool block
804
805 ;
806 ; NOTE: The savearea size must be 640 (0x280). We are doing a multiply by shifts and add.
807 ; offset = (index << 9) + (index << 7)
808 ;
809 #if SAVsize != 640
810 #error Savearea size is not 640!!!!!!!!!!!!
811 #endif
812
813 srcnext: beq- cr1,srcdone ; We have no more to get...
814
815 lis r3,0x8000 ; Get the top bit on
816 cntlzw r4,r5 ; Find a free slot
817 addi r7,r7,1 ; Bump up the free count
818 srw r3,r3,r4 ; Make a mask
819 slwi r0,r4,7 ; First multiply by 128
820 subi r8,r8,1 ; Decrement the need count
821 slwi r2,r4,9 ; Then multiply by 512
822 andc. r5,r5,r3 ; Clear out the "free" bit
823 add r2,r2,r0 ; Sum to multiply by 640
824
825 stw r5,SACalloc(r6) ; Set new allocation bits
826
827 add r2,r2,r6 ; Get the actual address of the savearea
828 lwz r3,SVfree(r10) ; Get the head of the chain
829 cmplwi cr1,r8,0 ; Do we actually need any more?
830 stw r2,SVfree(r10) ; Push ourselves in the front
831 stw r3,SAVprev(r2) ; Chain the rest of the list behind
832
833 bne+ srcnext ; The pool block is not empty yet, try for another...
834
835 lwz r2,SACnext(r6) ; Get the next pointer
836 lwz r3,SACprev(r6) ; Get the previous pointer
837 stw r3,SACprev(r2) ; The previous of my next points to my previous
838 stw r2,SACnext(r3) ; The next of my previous points to my next
839 bne+ cr1,srcnpool ; We still have more to do...
840
841 srcdone: stw r7,SVfreecnt(r10) ; Set the new free count
842 bl saveunlock ; Unlock the save and set adjust field
843
844 mtlr r9 ; Restore the return
845 mtmsr r12 ; Restore translation and exceptions
846 isync ; Make sure about it
847
848 #if FPVECDBG
849 lis r0,HIGH_ADDR(CutTrace) ; (TEST/DEBUG)
850 li r2,0x2209 ; (TEST/DEBUG)
851 oris r0,r0,LOW_ADDR(CutTrace) ; (TEST/DEBUG)
852 sc ; (TEST/DEBUG)
853 #endif
854 blr ; Leave...
855
856 ;
857 ; Here is where we lock the saveanchor lock
858 ; We assume R10 points to the saveanchor
859 ; We trash R7 and R3
860 ;
861
862 .align 5
863
864 savelock: lwarx r7,0,r10 ; Grab the lock value
865 li r3,1 ; Use part of the delay time
866 mr. r7,r7 ; Is it locked? */
867 bne- sllcks ; Yeah, wait for it to clear...
868 stwcx. r3,0,r10 ; Try to seize that there durn lock
869 beq+ sllckd ; Got it...
870 b savelock ; Collision, try again...
871
872 .align 5
873
874 sllcks: lwz r7,SVlock(r10) ; Get that lock in here
875 mr. r7,r7 ; Is it free yet?
876 beq+ savelock ; Yeah, try for it again...
877 b sllcks ; Sniff away...
878
879 nop ; Force isync to last in ifetch buffer
880 nop
881 nop
882
883 sllckd: isync ; Make sure translation is off
884 blr ; Return....
885
886
887 ;
888 ; This is the common routine that sets the saveadjust field and unlocks the savearea
889 ; anchor.
890 ;
891 ; Note that we can not use R9 here because we use it to save the LR across the call.
892 ; Also, R10 is assumed to point to the saveanchor. R3 is also reserved.
893 ;
894
895 .align 5
896
897 saveunlock:
898 lwz r6,SVfreecnt(r10) ; and the number on the free list
899 lwz r5,SVinuse(r10) ; Pick up the in use count
900 cmplwi r6,FreeListMin ; Do we have at least the minimum?
901 blt- sutooshort ; Do not have minumum....
902 lwz r7,SVtarget(r10) ; Get the target
903
904 add r6,r6,r5 ; Get the total number of saveareas
905 addi r5,r7,-SaveLowHysteresis ; Find bottom
906 sub r5,r6,r5 ; Make everything below hysteresis negative
907 sub r7,r7,r6 ; Get the distance from the target
908 rlwinm r5,r5,0,0,31 ; Clear negative bit
909 addi r5,r5,-(SaveLowHysteresis + SaveHighHysteresis + 1) ; Subtract full hysteresis range
910 srawi r5,r5,31 ; Get 0xFFFFFFFF if outside range or 0 if inside
911 and r7,r7,r5 ; Get 0 if in range or distance to target if not
912
913 li r8,0 ; Set a clear value
914 stw r7,SVadjust(r10) ; Set the adjustment value
915
916 sync ; Make sure everything is done
917 stw r8,SVlock(r10) ; Unlock the savearea chain
918 blr
919
920 .align 5
921
922 sutooshort: subfic r6,r6,FreeListMin ; Get the number needed to hit minimum
923 li r8,0 ; Set a clear value
924 stw r6,SVadjust(r10) ; Set the adjustment value
925
926 sync ; Make sure everything is done
927 stw r8,SVlock(r10) ; Unlock the savearea chain
928 blr
929
930
931
932
933 /*
934 * struct savearea *save_cpv(struct savearea *); Converts a physical savearea address to virtual
935 */
936
937 .align 5
938 .globl EXT(save_cpv)
939
940 LEXT(save_cpv)
941
942 mfmsr r10 ; Get the current MSR
943 rlwinm r10,r10,0,MSR_FP_BIT+1,MSR_FP_BIT-1 ; Force floating point off
944 rlwinm r10,r10,0,MSR_VEC_BIT+1,MSR_VEC_BIT-1 ; Force vectors off
945 rlwinm r4,r3,0,0,19 ; Round back to the start of the physical savearea block
946 andi. r9,r10,0x7FEF ; Turn off interrupts and data translation
947 mtmsr r9 ; Disable DR and EE
948 isync
949
950 lwz r4,SACvrswap(r4) ; Get the conversion to virtual
951 mtmsr r10 ; Interrupts and DR back on
952 isync
953 xor r3,r3,r4 ; Convert to physical
954 blr