+# Availability of DWARF allows DTrace CTF (compressed type format) to be constructed.
+# ctf_insert creates the CTF section. It needs reserved padding in the
+# headers for the load command segment and the CTF section structures.
+#
+LDFLAGS_KERNEL_GEN = \
+ -nostdlib \
+ -fapple-kext \
+ -Wl,-e,__start \
+ -Wl,-sectalign,__TEXT,__text,0x1000 \
+ -Wl,-sectalign,__DATA,__common,0x1000 \
+ -Wl,-sectalign,__DATA,__bss,0x1000 \
+ -Wl,-sectcreate,__PRELINK_TEXT,__text,/dev/null \
+ -Wl,-sectcreate,__PRELINK_INFO,__info,/dev/null \
+ -Wl,-new_linker \
+ -Wl,-pagezero_size,0x0 \
+ -Wl,-version_load_command \
+ -Wl,-function_starts \
+ -Wl,-headerpad,152
+
+# LDFLAGS_KERNEL_SDK = -L$(SDKROOT)/usr/local/lib/kernel -lfirehose_kernel
+LDFLAGS_KERNEL_SDK = -L$(SDKROOT)/usr/local/lib/kernel
+
+LDFLAGS_KERNEL_RELEASE =
+LDFLAGS_KERNEL_DEVELOPMENT =
+LDFLAGS_KERNEL_KASAN = $(LDFLAGS_KERNEL_DEVELOPMENT)
+LDFLAGS_KERNEL_DEBUG =
+LDFLAGS_KERNEL_PROFILE =
+
+# KASLR static slide config:
+ifndef SLIDE
+SLIDE=0x00
+endif
+KERNEL_MIN_ADDRESS = 0xffffff8000000000
+KERNEL_BASE_OFFSET = 0x100000
+KERNEL_STATIC_SLIDE = $(shell printf "0x%016x" \
+ $$[ $(SLIDE) << 21 ])
+KERNEL_STATIC_BASE = $(shell printf "0x%016x" \
+ $$[ $(KERNEL_MIN_ADDRESS) + $(KERNEL_BASE_OFFSET) ])
+KERNEL_HIB_SECTION_BASE = $(shell printf "0x%016x" \
+ $$[ $(KERNEL_STATIC_BASE) + $(KERNEL_STATIC_SLIDE) ])
+KERNEL_TEXT_BASE = $(shell printf "0x%016x" \
+ $$[ $(KERNEL_HIB_SECTION_BASE) + 0x100000 ])
+
+LDFLAGS_KERNEL_RELEASEX86_64 = \
+ -Wl,-pie \
+ -Wl,-segaddr,__HIB,$(KERNEL_HIB_SECTION_BASE) \
+ -Wl,-image_base,$(KERNEL_TEXT_BASE) \
+ -Wl,-seg_page_size,__TEXT,0x200000 \
+ -Wl,-sectalign,__HIB,__bootPT,0x1000 \
+ -Wl,-sectalign,__HIB,__desc,0x1000 \
+ -Wl,-sectalign,__HIB,__data,0x1000 \
+ -Wl,-sectalign,__HIB,__text,0x1000 \
+ -Wl,-sectalign,__HIB,__const,0x1000 \
+ -Wl,-sectalign,__HIB,__bss,0x1000 \
+ -Wl,-sectalign,__HIB,__common,0x1000 \
+ -Wl,-sectalign,__HIB,__llvm_prf_cnts,0x1000 \
+ -Wl,-sectalign,__HIB,__llvm_prf_names,0x1000 \
+ -Wl,-sectalign,__HIB,__llvm_prf_data,0x1000 \
+ -Wl,-sectalign,__HIB,__textcoal_nt,0x1000 \
+ -Wl,-rename_section,__DATA,__const,__DATA_CONST,__const \
+ -Wl,-no_zero_fill_sections \
+ $(LDFLAGS_NOSTRIP_FLAG)
+
+ifeq ($(SAN),1)
+LDFLAGS_KERNEL_RELEASEX86_64 += \
+ -Wl,-sectalign,__HIB,__cstring,0x1000
+endif
+
+ifeq ($(KSANCOV),1)
+LDFLAGS_KERNEL_RELEASEX86_64 += \
+ -Wl,-sectalign,__HIB,__sancov_guards,0x1000 \
+ -Wl,-sectalign,__HIB,__sancov_pcs,0x1000
+endif
+
+# Define KERNEL_BASE_OFFSET so known at compile time:
+CFLAGS_X86_64 += -DKERNEL_BASE_OFFSET=$(KERNEL_BASE_OFFSET)
+CFLAGS_X86_64H += -DKERNEL_BASE_OFFSET=$(KERNEL_BASE_OFFSET)
+
+LDFLAGS_KERNEL_DEBUGX86_64 = $(LDFLAGS_KERNEL_RELEASEX86_64)
+LDFLAGS_KERNEL_DEVELOPMENTX86_64 = $(LDFLAGS_KERNEL_RELEASEX86_64)
+LDFLAGS_KERNEL_KASANX86_64 = $(LDFLAGS_KERNEL_DEVELOPMENTX86_64) \
+ -Wl,-sectalign,__HIB,__asan_globals,0x1000 \
+ -Wl,-sectalign,__HIB,__asan_liveness,0x1000 \
+ -Wl,-sectalign,__HIB,__mod_term_func,0x1000 \
+ -Wl,-rename_section,__HIB,__mod_init_func,__NULL,__mod_init_func \
+ -Wl,-rename_section,__HIB,__eh_frame,__NULL,__eh_frame
+LDFLAGS_KERNEL_PROFILEX86_64 = $(LDFLAGS_KERNEL_RELEASEX86_64)
+
+LDFLAGS_KERNEL_RELEASEX86_64H = $(LDFLAGS_KERNEL_RELEASEX86_64)
+LDFLAGS_KERNEL_DEBUGX86_64H = $(LDFLAGS_KERNEL_RELEASEX86_64H)
+LDFLAGS_KERNEL_DEVELOPMENTX86_64H = $(LDFLAGS_KERNEL_RELEASEX86_64H)
+LDFLAGS_KERNEL_KASANX86_64H = $(LDFLAGS_KERNEL_KASANX86_64)
+LDFLAGS_KERNEL_PROFILEX86_64H = $(LDFLAGS_KERNEL_RELEASEX86_64H)
+
+# We preload ___udivmoddi4 in order to work around an issue with building
+# LTO on armv7.
+LDFLAGS_KERNEL_GENARM = \
+ -Wl,-pie \
+ -Wl,-static \
+ -Wl,-image_base,0x80001000 \
+ -Wl,-sectalign,__DATA,__const,0x1000 \
+ -Wl,-u,___udivmoddi4
+
+LDFLAGS_KERNEL_RELEASEARM = \
+ $(LDFLAGS_KERNEL_GENARM) \
+ $(LDFLAGS_KERNEL_STRIP_LTO)
+
+LDFLAGS_KERNEL_EXPORTS_RELEASEARM = \
+ -Wl,-exported_symbols_list,$(TARGET)/all-kpi.exp
+
+LDFLAGS_KERNEL_DEVELOPMENTARM = \
+ $(LDFLAGS_KERNEL_GENARM) \
+ $(LDFLAGS_NOSTRIP_FLAG)
+
+LDFLAGS_KERNEL_EXPORTS_DEVELOPMENTARM =
+
+LDFLAGS_KERNEL_DEBUGARM = $(LDFLAGS_KERNEL_DEVELOPMENTARM)
+LDFLAGS_KERNEL_EXPORTS_DEBUGARM = $(LDFLAGS_KERNEL_EXPORTS_DEVELOPMENTARM)
+
+# Offset image base by page to have iBoot load kernel TEXT correctly.
+# First page is used for various purposes : sleep token, reset vector.
+# We also need a 32MB offset, as this is the minimum block mapping size
+# for a 16KB page runtime, and we wish to use the first virtual block
+# to map the low globals page. We also need another 4MB to account for
+# the address space reserved by L4 (because the reservation is not a
+# multiple of the block size in alignment/length, we will implictly map
+# it with our block mapping, and we therefore must reflect that the
+# first 4MB of the block mapping for xnu do not belong to xnu).
+# For the moment, kaliber has a unique memory layout (monitor at the top
+# of memory). Support this by breaking 16KB on other platforms and
+# mandating 32MB alignment. Image base (i.e. __TEXT) must be 16KB
+# aligned since ld64 will link with 16KB alignment for ARM64.
+#
+# We currently offset by an additional 32MB in order to reclaim memory.
+# We need a dedicated virtual page for the low globals. Our bootloader
+# may have a significant chunk of memory (up to an L2 entry in size)
+# that lies before the kernel. The addition 32MB of virtual padding
+# ensures that we have enough virtual address space to map all of that
+# memory as part of the V-to-P mapping.
+# 23355738 - put __PRELINK_TEXT first. We reserve enough room
+# for 0x0000000003000000 = 48MB of kexts
+#
+# 0xfffffff000000000 (32MB range for low globals)
+# 0xfffffff002000000 (32MB range to allow for large page physical slide)
+# 0xfffffff004000000 (16KB range to reserve the first available page)
+# 0xfffffff004004000 (48MB range for kexts)
+# 0xfffffff007004000 (Start of xnu proper).
+LDFLAGS_KERNEL_GENARM64 = \
+ -Wl,-pie \
+ -Wl,-static \
+ -Wl,-segaddr,__PRELINK_TEXT,0xfffffff004004000 \
+ -Wl,-image_base,0xfffffff007004000 \
+ -Wl,-sectalign,__DATA,__const,0x4000 \
+ -Wl,-rename_section,__DATA,__mod_init_func,__DATA_CONST,__mod_init_func \
+ -Wl,-rename_section,__DATA,__mod_term_func,__DATA_CONST,__mod_term_func \
+ -Wl,-rename_section,__DATA,__auth_ptr,__DATA_CONST,__auth_ptr \
+ -Wl,-rename_section,__DATA,__auth_got,__DATA_CONST,__auth_got \
+ -Wl,-rename_section,__DATA,__const,__DATA_CONST,__const \
+ -Wl,-rename_section,__TEXT,__text,__TEXT_EXEC,__text \
+ -Wl,-rename_section,__TEXT,__stubs,__TEXT_EXEC,__stubs \
+ -Wl,-rename_section,__TEXT,initcode,__TEXT_EXEC,initcode \
+ -Wl,-sectcreate,"__PLK_TEXT_EXEC",__text,/dev/null \
+ -Wl,-sectcreate,__PRELINK_DATA,__data,/dev/null \
+ -Wl,-sectcreate,"__PLK_DATA_CONST",__data,/dev/null \
+ -Wl,-sectcreate,"__PLK_LLVM_COV",__llvm_covmap,/dev/null \
+ -Wl,-sectcreate,"__PLK_LINKEDIT",__data,/dev/null
+
+
+LDFLAGS_KERNEL_SEGARM64 ?= \
+ -Wl,-segment_order,__TEXT:__DATA_CONST:__LINKEDIT:__TEXT_EXEC:__LAST:__KLD:__DATA:__BOOTDATA
+
+LDFLAGS_KERNEL_RELEASEARM64 = \
+ $(LDFLAGS_KERNEL_GENARM64) \
+ $(LDFLAGS_KERNEL_SEGARM64) \
+ $(LDFLAGS_KERNEL_STRIP_LTO)
+
+LDFLAGS_KERNEL_EXPORTS_RELEASEARM64 = \
+ -Wl,-exported_symbols_list,$(TARGET)/all-kpi.exp
+
+LDFLAGS_KERNEL_DEVELOPMENTARM64 = \
+ $(LDFLAGS_KERNEL_GENARM64) \
+ $(LDFLAGS_KERNEL_SEGARM64) \
+ $(LDFLAGS_NOSTRIP_FLAG)
+
+LDFLAGS_KERNEL_EXPORTS_DEVELOPMENTARM64 =
+
+LDFLAGS_KERNEL_KASANARM64 = $(LDFLAGS_KERNEL_DEVELOPMENTARM64)
+LDFLAGS_KERNEL_DEBUGARM64 = $(LDFLAGS_KERNEL_DEVELOPMENTARM64)
+
+LDFLAGS_KERNEL_EXPORTS_KASANARM64 = $(LDFLAGS_KERNEL_EXPORTS_DEVELOPMENTARM64)
+LDFLAGS_KERNEL_EXPORTS_DEBUGARM64 = $(LDFLAGS_KERNEL_EXPORTS_DEVELOPMENTARM64)
+
+LDFLAGS_KERNEL = $(LDFLAGS_KERNEL_GEN) \
+ $(LDFLAGS_KERNEL_SDK) \
+ $($(addsuffix $(CURRENT_ARCH_CONFIG),ARCH_FLAGS_)) \
+ $($(addsuffix $(CURRENT_ARCH_CONFIG),LDFLAGS_KERNEL_)) \
+ $($(addsuffix $(CURRENT_KERNEL_CONFIG),LDFLAGS_KERNEL_)) \
+ $($(addsuffix $(CURRENT_ARCH_CONFIG), $(addsuffix $(CURRENT_KERNEL_CONFIG),LDFLAGS_KERNEL_))) \
+ $(DEPLOYMENT_TARGET_FLAGS)
+
+
+LDFLAGS_KERNEL_EXPORTS = \
+ $($(addsuffix $(CURRENT_ARCH_CONFIG), $(addsuffix $(CURRENT_KERNEL_CONFIG),LDFLAGS_KERNEL_EXPORTS_))) \
+ -Wl,-alias_list,$(TARGET)/all-alias.exp