]> git.saurik.com Git - apple/javascriptcore.git/blame - interpreter/JSStack.cpp
JavaScriptCore-1218.35.tar.gz
[apple/javascriptcore.git] / interpreter / JSStack.cpp
CommitLineData
b5422865
A
1/*
2 * Copyright (C) 2008 Apple Inc. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
14 * its contributors may be used to endorse or promote products derived
15 * from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
18 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
20 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
21 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 */
28
9dae56ea 29#include "config.h"
93a37866
A
30#include "JSStack.h"
31#include "JSStackInlines.h"
9dae56ea 32
14957cd0
A
33#include "ConservativeRoots.h"
34#include "Interpreter.h"
4e4e5a6f 35
9dae56ea
A
36namespace JSC {
37
14957cd0
A
38static size_t committedBytesCount = 0;
39
93a37866 40static Mutex& stackStatisticsMutex()
14957cd0
A
41{
42 DEFINE_STATIC_LOCAL(Mutex, staticMutex, ());
43 return staticMutex;
44}
93a37866
A
45
46JSStack::JSStack(VM& vm, size_t capacity)
47 : m_end(0)
48 , m_topCallFrame(vm.topCallFrame)
49{
50 ASSERT(capacity && isPageAligned(capacity));
51
52 m_reservation = PageReservation::reserve(roundUpAllocationSize(capacity * sizeof(Register), commitSize), OSAllocator::JSVMStackPages);
53 m_end = static_cast<Register*>(m_reservation.base());
54 m_commitEnd = static_cast<Register*>(m_reservation.base());
55
56 disableErrorStackReserve();
57
58 m_topCallFrame = 0;
59}
60
61JSStack::~JSStack()
9dae56ea 62{
14957cd0
A
63 void* base = m_reservation.base();
64 m_reservation.decommit(base, reinterpret_cast<intptr_t>(m_commitEnd) - reinterpret_cast<intptr_t>(base));
65 addToCommittedByteCount(-(reinterpret_cast<intptr_t>(m_commitEnd) - reinterpret_cast<intptr_t>(base)));
66 m_reservation.deallocate();
67}
68
93a37866 69bool JSStack::growSlowCase(Register* newEnd)
14957cd0 70{
93a37866
A
71 // If we have already committed enough memory to satisfy this request,
72 // just update the end pointer and return.
6fe7ccc8
A
73 if (newEnd <= m_commitEnd) {
74 m_end = newEnd;
75 return true;
14957cd0 76 }
6fe7ccc8 77
93a37866
A
78 // Compute the chunk size of additional memory to commit, and see if we
79 // have it is still within our budget. If not, we'll fail to grow and
80 // return false.
6fe7ccc8 81 long delta = roundUpAllocationSize(reinterpret_cast<char*>(newEnd) - reinterpret_cast<char*>(m_commitEnd), commitSize);
93a37866 82 if (reinterpret_cast<char*>(m_commitEnd) + delta > reinterpret_cast<char*>(m_useableEnd))
6fe7ccc8
A
83 return false;
84
93a37866
A
85 // Otherwise, the growth is still within our budget. Go ahead and commit
86 // it and return true.
6fe7ccc8
A
87 m_reservation.commit(m_commitEnd, delta);
88 addToCommittedByteCount(delta);
89 m_commitEnd = reinterpret_cast_ptr<Register*>(reinterpret_cast<char*>(m_commitEnd) + delta);
90 m_end = newEnd;
91 return true;
9dae56ea
A
92}
93
93a37866 94void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots)
ba379fdc 95{
93a37866 96 conservativeRoots.add(begin(), getTopOfStack());
ba379fdc
A
97}
98
93a37866 99void JSStack::gatherConservativeRoots(ConservativeRoots& conservativeRoots, JITStubRoutineSet& jitStubRoutines, DFGCodeBlocks& dfgCodeBlocks)
4e4e5a6f 100{
93a37866 101 conservativeRoots.add(begin(), getTopOfStack(), jitStubRoutines, dfgCodeBlocks);
4e4e5a6f
A
102}
103
93a37866 104void JSStack::releaseExcessCapacity()
4e4e5a6f 105{
6fe7ccc8
A
106 ptrdiff_t delta = reinterpret_cast<uintptr_t>(m_commitEnd) - reinterpret_cast<uintptr_t>(m_reservation.base());
107 m_reservation.decommit(m_reservation.base(), delta);
108 addToCommittedByteCount(-delta);
109 m_commitEnd = static_cast<Register*>(m_reservation.base());
4e4e5a6f
A
110}
111
93a37866 112void JSStack::initializeThreading()
4e4e5a6f 113{
93a37866 114 stackStatisticsMutex();
14957cd0
A
115}
116
93a37866 117size_t JSStack::committedByteCount()
14957cd0 118{
93a37866 119 MutexLocker locker(stackStatisticsMutex());
14957cd0
A
120 return committedBytesCount;
121}
122
93a37866 123void JSStack::addToCommittedByteCount(long byteCount)
14957cd0 124{
93a37866 125 MutexLocker locker(stackStatisticsMutex());
14957cd0
A
126 ASSERT(static_cast<long>(committedBytesCount) + byteCount > -1);
127 committedBytesCount += byteCount;
4e4e5a6f
A
128}
129
93a37866
A
130void JSStack::enableErrorStackReserve()
131{
132 m_useableEnd = reservationEnd();
133}
134
135void JSStack::disableErrorStackReserve()
136{
137 char* useableEnd = reinterpret_cast<char*>(reservationEnd()) - commitSize;
138 m_useableEnd = reinterpret_cast_ptr<Register*>(useableEnd);
139
140 // By the time we get here, we are guaranteed to be destructing the last
141 // Interpreter::ErrorHandlingMode that enabled this reserve in the first
142 // place. That means the stack space beyond m_useableEnd before we
143 // enabled the reserve was not previously in use. Hence, it is safe to
144 // shrink back to that m_useableEnd.
145 if (m_end > m_useableEnd) {
146 ASSERT(m_topCallFrame->frameExtent() <= m_useableEnd);
147 shrink(m_useableEnd);
148 }
149}
150
9dae56ea 151} // namespace JSC