+
+ /* slide kxld relocations */
+ if (kaslrOffsets && vm_kernel_slide > 0) {
+ int slidKextAddrCount = 0;
+ int badSlideAddr = 0;
+ int badSlideTarget = 0;
+
+ kaslrPackedOffsets * myOffsets = NULL;
+ myOffsets = (kaslrPackedOffsets *) kaslrOffsets->getBytesNoCopy();
+
+ for (uint32_t j = 0; j < myOffsets->count; j++) {
+
+ uint64_t slideOffset = (uint64_t) myOffsets->offsetsArray[j];
+ uintptr_t * slideAddr = (uintptr_t *) ((uint64_t)prelinkData + slideOffset);
+ int slideAddrSegIndex = -1;
+ int addrToSlideSegIndex = -1;
+
+ slideAddrSegIndex = __whereIsAddr( (vm_offset_t)slideAddr, &plk_segSizes[0], &plk_segAddrs[0], PLK_SEGMENTS );
+ if (slideAddrSegIndex >= 0) {
+ addrToSlideSegIndex = __whereIsAddr( (vm_offset_t)(*slideAddr + vm_kernel_slide), &plk_segSizes[0], &plk_segAddrs[0], PLK_SEGMENTS );
+ if (addrToSlideSegIndex < 0) {
+ badSlideTarget++;
+ continue;
+ }
+ }
+ else {
+ badSlideAddr++;
+ continue;
+ }
+
+ slidKextAddrCount++;
+ *(slideAddr) += vm_kernel_slide;
+ } // for ...
+
+ /* All kexts are now slid, set VM protections for them */
+ OSKext::setAllVMAttributes();
+ }
+