2 * Copyright (c) 1999 Apple Computer, Inc. All rights reserved.
4 * @APPLE_LICENSE_HEADER_START@
6 * Portions Copyright (c) 1999 Apple Computer, Inc. All Rights
7 * Reserved. This file contains Original Code and/or Modifications of
8 * Original Code as defined in and that are subject to the Apple Public
9 * Source License Version 1.1 (the "License"). You may not use this file
10 * except in compliance with the License. Please obtain a copy of the
11 * License at http://www.apple.com/publicsource and read it before using
14 * The Original Code and all software distributed under the License are
15 * distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, EITHER
16 * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
17 * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE OR NON- INFRINGEMENT. Please see the
19 * License for the specific language governing rights and limitations
22 * @APPLE_LICENSE_HEADER_END@
24 #import <mach/boolean.h>
26 #import <mach/i386/vm_types.h>
27 #import <architecture/i386/cpu.h>
30 * Primatives for manipulating the
47 "wbinvd; mov %0,%%cr0"
56 asm volatile("wbinvd");
63 * SCAN_INCR Size of memory scanning segment.
64 * SCAN_LEN Length to actually test, per segment,
65 * starting at the front of the segment.
66 * SCAN_LIM Highest address to test for existence of memory.
69 * Primary assumption is that SCAN_INCR and SCAN_LEN are chosen
70 * and the stack is positioned in its segment such that the tested
71 * portion of the segment does not overlap with any portion of the
72 * stack's location in that segment. The best way to acomplish this
73 * is to position the stack at the high end of a segment, and to make
74 * the segment size large enough to prevent it from overlapping the
75 * test area. The stack needs to be large enough to contain the low
76 * memory save area as well as the code for the scan function.
78 #define KB(x) (1024*(x))
79 #define MB(x) (1024*KB(x))
81 #define SCAN_PAT0 0x76543210
82 #define SCAN_PAT1 0x89abcdef
91 vm_offset_t end_of_memory
,
92 vm_offset_t end_of_cnvmem
,
93 unsigned int SCAN_INCR
,
94 unsigned int SCAN_LEN
,
98 struct test_datum zero_pat
= { 0, 0 };
102 * Make sure that the cache(s) are flushed
108 * Round the starting address to the next
109 * segment boundary. This is where we will
112 end_of_memory
= (end_of_memory
+ (SCAN_INCR
- 1) & ~(SCAN_INCR
- 1));
115 * Zero out the test area of each segent
116 * which is located in extended memory.
120 while (memory
< end_of_memory
) {
121 struct test_datum
*memory_ptr
;
123 (vm_offset_t
)memory_ptr
= memory
;
125 while ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
)
126 *memory_ptr
++ = zero_pat
;
133 * Code for segment scanning function.
135 extern unsigned int Scan_segment_code
[],
136 Scan_segment_code_end
[];
138 * Location on the stack to where this
139 * function is copied and then executed
140 * from!! N.B. This code must be position
143 unsigned int scan_func
[
144 Scan_segment_code_end
-
148 * Copy the scan function onto the stack.
150 memcpy(scan_func
, Scan_segment_code
, sizeof (scan_func
));
152 while (end_of_memory
< SCAN_LIM
) {
153 display_kbytes(end_of_memory
);
154 if (!((vm_offset_t (*)())scan_func
)(
161 end_of_memory
+= SCAN_INCR
;
165 display_kbytes(end_of_memory
);
167 return (end_of_memory
);
176 unsigned int quant
, dig
, done
= 0, mag
= 1000000000;
179 quant
= address
/ 1024;
183 dig
= (quant
/ mag
) - done
;
185 if (done
> 0 || mag
== 1) {
199 * Memory scan function, which tests one segment of memory.
200 * This code is copied onto the stack and executed there to
201 * avoid problems when memory aliasing occurs. If it detects
202 * problems due to aliasing to low memory, it restores the
203 * low memory segments before returning.
206 * end_of_memory Address to start testing at,
207 * this is rounded to the start of
208 * the next segment internally.
209 * end_of_cnvmem Address where conventional
211 * SCAN_INCR Size of each segment.
212 * SCAN_LEN Size of per segment test area,
213 * located at the front of the segment.
214 * SCAN_LIM Address next segment after highest
220 vm_offset_t start_of_segment
,
221 vm_offset_t end_of_cnvmem
,
222 unsigned int SCAN_INCR
,
223 unsigned int SCAN_LEN
227 * Location on the stack where the test
228 * area of each segment of low memory is
229 * saved, appended together. The copy is
230 * used to detect memory aliasing and to
231 * restore memory on that occasion.
233 unsigned int copy_area
[
234 ((KB(640) / SCAN_INCR
) * SCAN_LEN
)
235 / sizeof (unsigned int)];
236 struct test_datum
*test_ptr
,
237 test_pat
= { SCAN_PAT0
, SCAN_PAT1
},
239 vm_offset_t memory
, copy
;
242 * Copy the test area of each low memory
243 * segment to the save area. Low memory
244 * begins at zero, and runs to the end of
245 * conventional memory.
247 copy
= (vm_offset_t
)copy_area
;
250 while (memory
< KB(640)) {
251 unsigned int *memory_ptr
, *copy_ptr
;
253 if (memory
<= (end_of_cnvmem
- SCAN_LEN
)) {
254 (vm_offset_t
)memory_ptr
= memory
;
255 (vm_offset_t
)copy_ptr
= copy
;
257 while ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
)
258 *copy_ptr
++ = *memory_ptr
++;
261 memory
+= SCAN_INCR
; copy
+= SCAN_LEN
;
265 * Write the test pattern in the test
266 * area of the current segment.
268 (vm_offset_t
)test_ptr
= start_of_segment
;
270 while ((vm_offset_t
)test_ptr
< start_of_segment
+ SCAN_LEN
)
271 *test_ptr
++ = test_pat
;
274 * Flush the data cache to insure that the
275 * data actually gets written to main memory.
276 * This will provoke aliasing to occur if
277 * it is in fact present.
282 * Compare low memory against the save
283 * area, breaking out immediately if
284 * an inconsistency is observed.
286 copy
= (vm_offset_t
)copy_area
;
289 while (memory
< KB(640)) {
290 struct test_datum
*memory_ptr
, *copy_ptr
;
292 if (memory
<= (end_of_cnvmem
- SCAN_LEN
)) {
293 (vm_offset_t
)memory_ptr
= memory
;
294 (vm_offset_t
)copy_ptr
= copy
;
296 while ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
) {
297 if ( memory_ptr
->word0
!= copy_ptr
->word0
||
298 memory_ptr
->word1
!= copy_ptr
->word1
)
301 memory_ptr
++; copy_ptr
++;
304 if ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
)
308 memory
+= SCAN_INCR
; copy
+= SCAN_LEN
;
312 * If an inconsistency was found in low
313 * memory, restore the entire region from
314 * the save area and return a failure.
316 if (memory
< KB(640)) {
317 copy
= (vm_offset_t
)copy_area
;
320 while (memory
< KB(640)) {
321 unsigned int *memory_ptr
, *copy_ptr
;
323 if (memory
<= (end_of_cnvmem
- SCAN_LEN
)) {
324 (vm_offset_t
)memory_ptr
= memory
;
325 (vm_offset_t
)copy_ptr
= copy
;
327 while ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
)
328 *memory_ptr
++ = *copy_ptr
++;
331 memory
+= SCAN_INCR
; copy
+= SCAN_LEN
;
338 * Check the memory we have already scanned
339 * to see whether aliasing occurred there.
340 * The test area of each segment should contain
345 while (memory
< start_of_segment
) {
346 struct test_datum
*memory_ptr
;
348 (vm_offset_t
)memory_ptr
= memory
;
350 while ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
) {
351 if ( memory_ptr
->word0
!= zero_pat
.word0
||
352 memory_ptr
->word1
!= zero_pat
.word1
)
358 if ((vm_offset_t
)memory_ptr
< memory
+ SCAN_LEN
)
364 if (memory
< start_of_segment
)
368 * Now check the current segment to see
369 * whether the test patten was correctly
372 (vm_offset_t
)test_ptr
= start_of_segment
;
374 while ((vm_offset_t
)test_ptr
< start_of_segment
+ SCAN_LEN
) {
375 if ( test_ptr
->word0
!= test_pat
.word0
||
376 test_ptr
->word1
!= test_pat
.word1
)
382 if ((vm_offset_t
)test_ptr
< start_of_segment
+ SCAN_LEN
)
386 * Zero the current segment, which has now
389 (vm_offset_t
)test_ptr
= start_of_segment
;
391 while ((vm_offset_t
)test_ptr
< start_of_segment
+ SCAN_LEN
)
392 *test_ptr
++ = zero_pat
;