- pte = pmap_pte(pmap, va);
- attributes |= *pte & (PHYS_MODIFIED|PHYS_REFERENCED);
-
- pmap_update_pte(pte, *pte, (*pte & ~bits));
- /* Ensure all processors using this translation
- * invalidate this TLB entry. The invalidation *must*
- * follow the PTE update, to ensure that the TLB
- * shadow of the 'D' bit (in particular) is
- * synchronized with the updated PTE.
- */
- PMAP_UPDATE_TLBS(pmap, va, va + PAGE_SIZE);
+ if (pte_bits) {
+ pmap_update_pte(pte, bits, 0);
+
+ /* Ensure all processors using this translation
+ * invalidate this TLB entry. The invalidation
+ * *must* follow the PTE update, to ensure that
+ * the TLB shadow of the 'D' bit (in particular)
+ * is synchronized with the updated PTE.
+ */
+ if (! (options & PMAP_OPTIONS_NOFLUSH)) {
+ /* flush TLBS now */
+ PMAP_UPDATE_TLBS(pmap,
+ va,
+ va + PAGE_SIZE);
+ } else if (arg) {
+ /* delayed TLB flush: add "pmap" info */
+ PMAP_UPDATE_TLBS_DELAYED(
+ pmap,
+ va,
+ va + PAGE_SIZE,
+ (pmap_flush_context *)arg);
+ } else {
+ /* no TLB flushing at all */
+ }
+ }
+
+ /* update pmap "reusable" stats */
+ if ((options & PMAP_OPTIONS_CLEAR_REUSABLE) &&
+ is_reusable &&
+ pmap != kernel_pmap) {
+ /* one less "reusable" */
+ assert(pmap->stats.reusable > 0);
+ OSAddAtomic(-1, &pmap->stats.reusable);
+ if (is_internal) {
+ /* one more "internal" */
+ OSAddAtomic(+1, &pmap->stats.internal);
+ PMAP_STATS_PEAK(pmap->stats.internal);
+ } else {
+ /* one more "external" */
+ OSAddAtomic(+1, &pmap->stats.external);
+ PMAP_STATS_PEAK(pmap->stats.external);
+ }
+ } else if ((options & PMAP_OPTIONS_SET_REUSABLE) &&
+ !is_reusable &&
+ pmap != kernel_pmap) {
+ /* one more "reusable" */
+ OSAddAtomic(+1, &pmap->stats.reusable);
+ PMAP_STATS_PEAK(pmap->stats.reusable);
+ if (is_internal) {
+ /* one less "internal" */
+ assert(pmap->stats.internal > 0);
+ OSAddAtomic(-1, &pmap->stats.internal);
+ } else {
+ /* one less "external" */
+ assert(pmap->stats.external > 0);
+ OSAddAtomic(-1, &pmap->stats.external);
+ }
+ }