]> git.saurik.com Git - apple/javascriptcore.git/blame - bytecode/CodeBlock.h
JavaScriptCore-7601.1.46.3.tar.gz
[apple/javascriptcore.git] / bytecode / CodeBlock.h
CommitLineData
9dae56ea 1/*
ed1e77d3 2 * Copyright (C) 2008-2015 Apple Inc. All rights reserved.
9dae56ea
A
3 * Copyright (C) 2008 Cameron Zwarich <cwzwarich@uwaterloo.ca>
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in the
13 * documentation and/or other materials provided with the distribution.
81345200 14 * 3. Neither the name of Apple Inc. ("Apple") nor the names of
9dae56ea
A
15 * its contributors may be used to endorse or promote products derived
16 * from this software without specific prior written permission.
17 *
18 * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
19 * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
20 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
22 * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
23 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
25 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#ifndef CodeBlock_h
31#define CodeBlock_h
32
93a37866
A
33#include "ArrayProfile.h"
34#include "ByValInfo.h"
6fe7ccc8 35#include "BytecodeConventions.h"
81345200 36#include "BytecodeLivenessAnalysis.h"
6fe7ccc8
A
37#include "CallLinkInfo.h"
38#include "CallReturnOffsetToBytecodeOffset.h"
93a37866 39#include "CodeBlockHash.h"
81345200
A
40#include "CodeBlockSet.h"
41#include "ConcurrentJITLock.h"
6fe7ccc8
A
42#include "CodeOrigin.h"
43#include "CodeType.h"
44#include "CompactJITCodeMap.h"
93a37866 45#include "DFGCommon.h"
81345200 46#include "DFGCommonData.h"
6fe7ccc8 47#include "DFGExitProfile.h"
81345200 48#include "DeferredCompilationCallback.h"
9dae56ea 49#include "EvalCodeCache.h"
6fe7ccc8
A
50#include "ExecutionCounter.h"
51#include "ExpressionRangeInfo.h"
6fe7ccc8 52#include "HandlerInfo.h"
93a37866 53#include "ObjectAllocationProfile.h"
6fe7ccc8 54#include "Options.h"
81345200 55#include "PutPropertySlot.h"
9dae56ea 56#include "Instruction.h"
ba379fdc 57#include "JITCode.h"
14957cd0 58#include "JITWriteBarrier.h"
9dae56ea
A
59#include "JSGlobalObject.h"
60#include "JumpTable.h"
6fe7ccc8
A
61#include "LLIntCallLinkInfo.h"
62#include "LazyOperandValueProfile.h"
93a37866 63#include "ProfilerCompilation.h"
81345200 64#include "ProfilerJettisonReason.h"
14957cd0 65#include "RegExpObject.h"
6fe7ccc8 66#include "StructureStubInfo.h"
6fe7ccc8
A
67#include "UnconditionalFinalizer.h"
68#include "ValueProfile.h"
81345200 69#include "VirtualRegister.h"
93a37866 70#include "Watchpoint.h"
81345200
A
71#include <wtf/Bag.h>
72#include <wtf/FastMalloc.h>
81345200 73#include <wtf/RefCountedArray.h>
9dae56ea 74#include <wtf/RefPtr.h>
6fe7ccc8 75#include <wtf/SegmentedVector.h>
9dae56ea 76#include <wtf/Vector.h>
93a37866 77#include <wtf/text/WTFString.h>
ba379fdc 78
9dae56ea
A
79namespace JSC {
80
93a37866
A
81class ExecState;
82class LLIntOffsetsExtractor;
83class RepatchBuffer;
ed1e77d3 84class TypeLocation;
9dae56ea 85
81345200
A
86enum ReoptimizationMode { DontCountReoptimization, CountReoptimization };
87
88class CodeBlock : public ThreadSafeRefCounted<CodeBlock>, public UnconditionalFinalizer, public WeakReferenceHarvester {
93a37866 89 WTF_MAKE_FAST_ALLOCATED;
81345200 90 friend class BytecodeLivenessAnalysis;
93a37866
A
91 friend class JIT;
92 friend class LLIntOffsetsExtractor;
93public:
94 enum CopyParsedBlockTag { CopyParsedBlock };
95protected:
96 CodeBlock(CopyParsedBlockTag, CodeBlock& other);
6fe7ccc8 97
81345200 98 CodeBlock(ScriptExecutable* ownerExecutable, UnlinkedCodeBlock*, JSScope*, PassRefPtr<SourceProvider>, unsigned sourceOffset, unsigned firstLineColumnOffset);
14957cd0 99
93a37866
A
100 WriteBarrier<JSGlobalObject> m_globalObject;
101 Heap* m_heap;
14957cd0 102
93a37866
A
103public:
104 JS_EXPORT_PRIVATE virtual ~CodeBlock();
81345200 105
93a37866 106 UnlinkedCodeBlock* unlinkedCodeBlock() const { return m_unlinkedCode.get(); }
81345200
A
107
108 CString inferredName() const;
93a37866 109 CodeBlockHash hash() const;
81345200
A
110 bool hasHash() const;
111 bool isSafeToComputeHash() const;
112 CString hashAsStringIfPossible() const;
113 CString sourceCodeForTools() const; // Not quite the actual source we parsed; this will do things like prefix the source for a function with a reified signature.
114 CString sourceCodeOnOneLine() const; // As sourceCodeForTools(), but replaces all whitespace runs with a single space.
93a37866
A
115 void dumpAssumingJITType(PrintStream&, JITCode::JITType) const;
116 void dump(PrintStream&) const;
81345200 117
93a37866
A
118 int numParameters() const { return m_numParameters; }
119 void setNumParameters(int newValue);
81345200 120
93a37866
A
121 int* addressOfNumParameters() { return &m_numParameters; }
122 static ptrdiff_t offsetOfNumParameters() { return OBJECT_OFFSETOF(CodeBlock, m_numParameters); }
9dae56ea 123
93a37866 124 CodeBlock* alternative() { return m_alternative.get(); }
81345200
A
125 PassRefPtr<CodeBlock> releaseAlternative() { return m_alternative.release(); }
126 void setAlternative(PassRefPtr<CodeBlock> alternative) { m_alternative = alternative; }
127
128 template <typename Functor> void forEachRelatedCodeBlock(Functor&& functor)
93a37866 129 {
81345200
A
130 Functor f(std::forward<Functor>(functor));
131 Vector<CodeBlock*, 4> codeBlocks;
132 codeBlocks.append(this);
133
134 while (!codeBlocks.isEmpty()) {
135 CodeBlock* currentCodeBlock = codeBlocks.takeLast();
136 f(currentCodeBlock);
137
138 if (CodeBlock* alternative = currentCodeBlock->alternative())
139 codeBlocks.append(alternative);
140 if (CodeBlock* osrEntryBlock = currentCodeBlock->specialOSREntryBlockOrNull())
141 codeBlocks.append(osrEntryBlock);
142 }
93a37866 143 }
81345200
A
144
145 CodeSpecializationKind specializationKind() const
93a37866 146 {
81345200 147 return specializationFromIsConstruct(m_isConstructor);
93a37866 148 }
81345200
A
149
150 CodeBlock* baselineAlternative();
151
152 // FIXME: Get rid of this.
153 // https://bugs.webkit.org/show_bug.cgi?id=123677
154 CodeBlock* baselineVersion();
9dae56ea 155
93a37866 156 void visitAggregate(SlotVisitor&);
9dae56ea 157
ed1e77d3
A
158 void dumpSource();
159 void dumpSource(PrintStream&);
160
161 void dumpBytecode();
162 void dumpBytecode(PrintStream&);
81345200
A
163 void dumpBytecode(
164 PrintStream&, unsigned bytecodeOffset,
165 const StubInfoMap& = StubInfoMap(), const CallLinkInfoMap& = CallLinkInfoMap());
93a37866
A
166 void printStructures(PrintStream&, const Instruction*);
167 void printStructure(PrintStream&, const char* name, const Instruction*, int operand);
14957cd0 168
93a37866 169 bool isStrictMode() const { return m_isStrictMode; }
81345200 170 ECMAMode ecmaMode() const { return isStrictMode() ? StrictMode : NotStrictMode; }
9dae56ea 171
93a37866
A
172 inline bool isKnownNotImmediate(int index)
173 {
81345200 174 if (index == m_thisRegister.offset() && !m_isStrictMode)
93a37866 175 return true;
9dae56ea 176
93a37866
A
177 if (isConstantRegisterIndex(index))
178 return getConstant(index).isCell();
9dae56ea 179
93a37866
A
180 return false;
181 }
9dae56ea 182
93a37866
A
183 ALWAYS_INLINE bool isTemporaryRegisterIndex(int index)
184 {
185 return index >= m_numVars;
186 }
9dae56ea 187
ed1e77d3
A
188 enum class RequiredHandler {
189 CatchHandler,
190 AnyHandler
191 };
192 HandlerInfo* handlerForBytecodeOffset(unsigned bytecodeOffset, RequiredHandler = RequiredHandler::AnyHandler);
93a37866
A
193 unsigned lineNumberForBytecodeOffset(unsigned bytecodeOffset);
194 unsigned columnNumberForBytecodeOffset(unsigned bytecodeOffset);
195 void expressionRangeForBytecodeOffset(unsigned bytecodeOffset, int& divot,
81345200 196 int& startOffset, int& endOffset, unsigned& line, unsigned& column);
9dae56ea 197
81345200
A
198 void getStubInfoMap(const ConcurrentJITLocker&, StubInfoMap& result);
199 void getStubInfoMap(StubInfoMap& result);
200
201 void getCallLinkInfoMap(const ConcurrentJITLocker&, CallLinkInfoMap& result);
202 void getCallLinkInfoMap(CallLinkInfoMap& result);
203
93a37866 204#if ENABLE(JIT)
81345200
A
205 StructureStubInfo* addStubInfo();
206 Bag<StructureStubInfo>::iterator stubInfoBegin() { return m_stubInfos.begin(); }
207 Bag<StructureStubInfo>::iterator stubInfoEnd() { return m_stubInfos.end(); }
ed1e77d3
A
208
209 // O(n) operation. Use getStubInfoMap() unless you really only intend to get one
210 // stub info.
211 StructureStubInfo* findStubInfo(CodeOrigin);
9dae56ea 212
93a37866 213 void resetStub(StructureStubInfo&);
81345200 214
93a37866
A
215 ByValInfo& getByValInfo(unsigned bytecodeIndex)
216 {
217 return *(binarySearch<ByValInfo, unsigned>(m_byValInfos, m_byValInfos.size(), bytecodeIndex, getByValInfoBytecodeIndex));
218 }
ba379fdc 219
81345200
A
220 CallLinkInfo* addCallLinkInfo();
221 Bag<CallLinkInfo>::iterator callLinkInfosBegin() { return m_callLinkInfos.begin(); }
222 Bag<CallLinkInfo>::iterator callLinkInfosEnd() { return m_callLinkInfos.end(); }
223
224 // This is a slow function call used primarily for compiling OSR exits in the case
225 // that there had been inlining. Chances are if you want to use this, you're really
226 // looking for a CallLinkInfoMap to amortize the cost of calling this.
227 CallLinkInfo* getCallLinkInfoForBytecodeIndex(unsigned bytecodeIndex);
93a37866 228#endif // ENABLE(JIT)
6fe7ccc8 229
81345200 230 void unlinkIncomingCalls();
6fe7ccc8 231
93a37866 232#if ENABLE(JIT)
93a37866 233 void unlinkCalls();
6fe7ccc8 234
81345200 235 void linkIncomingCall(ExecState* callerFrame, CallLinkInfo*);
ed1e77d3 236 void linkIncomingPolymorphicCall(ExecState* callerFrame, PolymorphicCallNode*);
93a37866
A
237#endif // ENABLE(JIT)
238
81345200 239 void linkIncomingCall(ExecState* callerFrame, LLIntCallLinkInfo*);
6fe7ccc8 240
ed1e77d3 241 void setJITCodeMap(std::unique_ptr<CompactJITCodeMap> jitCodeMap)
93a37866 242 {
ed1e77d3 243 m_jitCodeMap = WTF::move(jitCodeMap);
93a37866
A
244 }
245 CompactJITCodeMap* jitCodeMap()
246 {
247 return m_jitCodeMap.get();
248 }
81345200 249
93a37866
A
250 unsigned bytecodeOffset(Instruction* returnAddress)
251 {
252 RELEASE_ASSERT(returnAddress >= instructions().begin() && returnAddress < instructions().end());
253 return static_cast<Instruction*>(returnAddress) - instructions().begin();
254 }
9dae56ea 255
93a37866
A
256 unsigned numberOfInstructions() const { return m_instructions.size(); }
257 RefCountedArray<Instruction>& instructions() { return m_instructions; }
258 const RefCountedArray<Instruction>& instructions() const { return m_instructions; }
81345200 259
93a37866 260 size_t predictedMachineCodeSize();
81345200 261
93a37866
A
262 bool usesOpcode(OpcodeID);
263
81345200 264 unsigned instructionCount() const { return m_instructions.size(); }
f9bf01c6 265
81345200
A
266 // Exactly equivalent to codeBlock->ownerExecutable()->installCode(codeBlock);
267 void install();
268
269 // Exactly equivalent to codeBlock->ownerExecutable()->newReplacementCodeBlockFor(codeBlock->specializationKind())
270 PassRefPtr<CodeBlock> newReplacement();
271
272 void setJITCode(PassRefPtr<JITCode> code)
93a37866 273 {
81345200 274 ASSERT(m_heap->isDeferred());
ed1e77d3 275 m_heap->reportExtraMemoryAllocated(code->size());
81345200
A
276 ConcurrentJITLocker locker(m_lock);
277 WTF::storeStoreFence(); // This is probably not needed because the lock will also do something similar, but it's good to be paranoid.
93a37866 278 m_jitCode = code;
93a37866 279 }
81345200
A
280 PassRefPtr<JITCode> jitCode() { return m_jitCode; }
281 JITCode::JITType jitType() const
93a37866 282 {
81345200
A
283 JITCode* jitCode = m_jitCode.get();
284 WTF::loadLoadFence();
285 JITCode::JITType result = JITCode::jitTypeFor(jitCode);
286 WTF::loadLoadFence(); // This probably isn't needed. Oh well, paranoia is good.
93a37866
A
287 return result;
288 }
6fe7ccc8 289
81345200 290 bool hasBaselineJITProfiling() const
93a37866 291 {
81345200 292 return jitType() == JITCode::BaselineJIT;
93a37866 293 }
81345200
A
294
295#if ENABLE(JIT)
296 virtual CodeBlock* replacement() = 0;
297
298 virtual DFG::CapabilityLevel capabilityLevelInternal() = 0;
299 DFG::CapabilityLevel capabilityLevel();
300 DFG::CapabilityLevel capabilityLevelState() { return m_capabilityLevelState; }
301
302 bool hasOptimizedReplacement(JITCode::JITType typeToReplace);
303 bool hasOptimizedReplacement(); // the typeToReplace is my JITType
9dae56ea
A
304#endif
305
ed1e77d3 306 void jettison(Profiler::JettisonReason, ReoptimizationMode = DontCountReoptimization, const FireDetail* = nullptr);
81345200 307
93a37866 308 ScriptExecutable* ownerExecutable() const { return m_ownerExecutable.get(); }
9dae56ea 309
93a37866
A
310 void setVM(VM* vm) { m_vm = vm; }
311 VM* vm() { return m_vm; }
9dae56ea 312
81345200
A
313 void setThisRegister(VirtualRegister thisRegister) { m_thisRegister = thisRegister; }
314 VirtualRegister thisRegister() const { return m_thisRegister; }
9dae56ea 315
93a37866 316 bool usesEval() const { return m_unlinkedCode->usesEval(); }
81345200 317
ed1e77d3 318 void setScopeRegister(VirtualRegister scopeRegister)
93a37866 319 {
ed1e77d3
A
320 ASSERT(scopeRegister.isLocal() || !scopeRegister.isValid());
321 m_scopeRegister = scopeRegister;
93a37866 322 }
ed1e77d3
A
323
324 VirtualRegister scopeRegister() const
93a37866 325 {
ed1e77d3 326 return m_scopeRegister;
93a37866 327 }
ed1e77d3 328
81345200 329 void setActivationRegister(VirtualRegister activationRegister)
93a37866 330 {
ed1e77d3 331 m_lexicalEnvironmentRegister = activationRegister;
93a37866 332 }
81345200
A
333
334 VirtualRegister activationRegister() const
93a37866 335 {
ed1e77d3
A
336 ASSERT(m_lexicalEnvironmentRegister.isValid());
337 return m_lexicalEnvironmentRegister;
93a37866 338 }
81345200
A
339
340 VirtualRegister uncheckedActivationRegister()
93a37866 341 {
ed1e77d3 342 return m_lexicalEnvironmentRegister;
93a37866 343 }
81345200 344
93a37866
A
345 bool needsActivation() const
346 {
ed1e77d3 347 ASSERT(m_lexicalEnvironmentRegister.isValid() == m_needsActivation);
81345200 348 return m_needsActivation;
93a37866 349 }
81345200 350
93a37866 351 CodeType codeType() const { return m_unlinkedCode->codeType(); }
81345200
A
352 PutPropertySlot::Context putByIdContext() const
353 {
354 if (codeType() == EvalCode)
355 return PutPropertySlot::PutByIdEval;
356 return PutPropertySlot::PutById;
357 }
9dae56ea 358
93a37866
A
359 SourceProvider* source() const { return m_source.get(); }
360 unsigned sourceOffset() const { return m_sourceOffset; }
361 unsigned firstLineColumnOffset() const { return m_firstLineColumnOffset; }
9dae56ea 362
93a37866
A
363 size_t numberOfJumpTargets() const { return m_unlinkedCode->numberOfJumpTargets(); }
364 unsigned jumpTarget(int index) const { return m_unlinkedCode->jumpTarget(index); }
365
93a37866 366 void clearEvalCache();
81345200
A
367
368 String nameForRegister(VirtualRegister);
93a37866
A
369
370#if ENABLE(JIT)
81345200 371 void setNumberOfByValInfos(size_t size) { m_byValInfos.resizeToFit(size); }
93a37866
A
372 size_t numberOfByValInfos() const { return m_byValInfos.size(); }
373 ByValInfo& byValInfo(size_t index) { return m_byValInfos[index]; }
6fe7ccc8 374#endif
81345200 375
93a37866
A
376 unsigned numberOfArgumentValueProfiles()
377 {
378 ASSERT(m_numParameters >= 0);
379 ASSERT(m_argumentValueProfiles.size() == static_cast<unsigned>(m_numParameters));
380 return m_argumentValueProfiles.size();
381 }
382 ValueProfile* valueProfileForArgument(unsigned argumentIndex)
383 {
384 ValueProfile* result = &m_argumentValueProfiles[argumentIndex];
385 ASSERT(result->m_bytecodeOffset == -1);
386 return result;
387 }
388
389 unsigned numberOfValueProfiles() { return m_valueProfiles.size(); }
390 ValueProfile* valueProfile(int index) { return &m_valueProfiles[index]; }
ed1e77d3 391 ValueProfile* valueProfileForBytecodeOffset(int bytecodeOffset);
81345200 392 SpeculatedType valueProfilePredictionForBytecodeOffset(const ConcurrentJITLocker& locker, int bytecodeOffset)
93a37866 393 {
81345200 394 return valueProfileForBytecodeOffset(bytecodeOffset)->computeUpdatedPrediction(locker);
93a37866 395 }
81345200 396
93a37866
A
397 unsigned totalNumberOfValueProfiles()
398 {
399 return numberOfArgumentValueProfiles() + numberOfValueProfiles();
400 }
401 ValueProfile* getFromAllValueProfiles(unsigned index)
402 {
403 if (index < numberOfArgumentValueProfiles())
404 return valueProfileForArgument(index);
405 return valueProfile(index - numberOfArgumentValueProfiles());
406 }
81345200 407
93a37866
A
408 RareCaseProfile* addRareCaseProfile(int bytecodeOffset)
409 {
410 m_rareCaseProfiles.append(RareCaseProfile(bytecodeOffset));
411 return &m_rareCaseProfiles.last();
412 }
413 unsigned numberOfRareCaseProfiles() { return m_rareCaseProfiles.size(); }
414 RareCaseProfile* rareCaseProfile(int index) { return &m_rareCaseProfiles[index]; }
81345200
A
415 RareCaseProfile* rareCaseProfileForBytecodeOffset(int bytecodeOffset);
416
93a37866
A
417 bool likelyToTakeSlowCase(int bytecodeOffset)
418 {
81345200 419 if (!hasBaselineJITProfiling())
93a37866
A
420 return false;
421 unsigned value = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
422 return value >= Options::likelyToTakeSlowCaseMinimumCount();
423 }
81345200 424
93a37866
A
425 bool couldTakeSlowCase(int bytecodeOffset)
426 {
81345200 427 if (!hasBaselineJITProfiling())
93a37866
A
428 return false;
429 unsigned value = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
430 return value >= Options::couldTakeSlowCaseMinimumCount();
431 }
81345200 432
93a37866
A
433 RareCaseProfile* addSpecialFastCaseProfile(int bytecodeOffset)
434 {
435 m_specialFastCaseProfiles.append(RareCaseProfile(bytecodeOffset));
436 return &m_specialFastCaseProfiles.last();
437 }
438 unsigned numberOfSpecialFastCaseProfiles() { return m_specialFastCaseProfiles.size(); }
439 RareCaseProfile* specialFastCaseProfile(int index) { return &m_specialFastCaseProfiles[index]; }
440 RareCaseProfile* specialFastCaseProfileForBytecodeOffset(int bytecodeOffset)
441 {
442 return tryBinarySearch<RareCaseProfile, int>(
443 m_specialFastCaseProfiles, m_specialFastCaseProfiles.size(), bytecodeOffset,
444 getRareCaseProfileBytecodeOffset);
445 }
81345200 446
93a37866
A
447 bool likelyToTakeSpecialFastCase(int bytecodeOffset)
448 {
81345200 449 if (!hasBaselineJITProfiling())
93a37866
A
450 return false;
451 unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
452 return specialFastCaseCount >= Options::likelyToTakeSlowCaseMinimumCount();
453 }
81345200 454
93a37866
A
455 bool couldTakeSpecialFastCase(int bytecodeOffset)
456 {
81345200 457 if (!hasBaselineJITProfiling())
93a37866
A
458 return false;
459 unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
460 return specialFastCaseCount >= Options::couldTakeSlowCaseMinimumCount();
461 }
81345200 462
93a37866
A
463 bool likelyToTakeDeepestSlowCase(int bytecodeOffset)
464 {
81345200 465 if (!hasBaselineJITProfiling())
93a37866
A
466 return false;
467 unsigned slowCaseCount = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
468 unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
469 unsigned value = slowCaseCount - specialFastCaseCount;
470 return value >= Options::likelyToTakeSlowCaseMinimumCount();
471 }
81345200 472
93a37866
A
473 bool likelyToTakeAnySlowCase(int bytecodeOffset)
474 {
81345200 475 if (!hasBaselineJITProfiling())
93a37866
A
476 return false;
477 unsigned slowCaseCount = rareCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
478 unsigned specialFastCaseCount = specialFastCaseProfileForBytecodeOffset(bytecodeOffset)->m_counter;
479 unsigned value = slowCaseCount + specialFastCaseCount;
480 return value >= Options::likelyToTakeSlowCaseMinimumCount();
481 }
81345200 482
93a37866
A
483 unsigned numberOfArrayProfiles() const { return m_arrayProfiles.size(); }
484 const ArrayProfileVector& arrayProfiles() { return m_arrayProfiles; }
485 ArrayProfile* addArrayProfile(unsigned bytecodeOffset)
486 {
487 m_arrayProfiles.append(ArrayProfile(bytecodeOffset));
488 return &m_arrayProfiles.last();
489 }
490 ArrayProfile* getArrayProfile(unsigned bytecodeOffset);
491 ArrayProfile* getOrAddArrayProfile(unsigned bytecodeOffset);
6fe7ccc8 492
93a37866 493 // Exception handling support
9dae56ea 494
93a37866 495 size_t numberOfExceptionHandlers() const { return m_rareData ? m_rareData->m_exceptionHandlers.size() : 0; }
93a37866 496 HandlerInfo& exceptionHandler(int index) { RELEASE_ASSERT(m_rareData); return m_rareData->m_exceptionHandlers[index]; }
9dae56ea 497
93a37866 498 bool hasExpressionInfo() { return m_unlinkedCode->hasExpressionInfo(); }
9dae56ea 499
6fe7ccc8 500#if ENABLE(DFG_JIT)
81345200 501 Vector<CodeOrigin, 0, UnsafeVectorOverflow>& codeOrigins()
93a37866 502 {
81345200 503 return m_jitCode->dfgCommon()->codeOrigins;
93a37866 504 }
81345200 505
93a37866
A
506 // Having code origins implies that there has been some inlining.
507 bool hasCodeOrigins()
508 {
81345200 509 return JITCode::isOptimizingJIT(jitType());
93a37866 510 }
6fe7ccc8 511
93a37866
A
512 bool canGetCodeOrigin(unsigned index)
513 {
81345200 514 if (!hasCodeOrigins())
93a37866 515 return false;
81345200 516 return index < codeOrigins().size();
93a37866 517 }
81345200 518
93a37866
A
519 CodeOrigin codeOrigin(unsigned index)
520 {
81345200 521 return codeOrigins()[index];
93a37866 522 }
81345200 523
93a37866
A
524 bool addFrequentExitSite(const DFG::FrequentExitSite& site)
525 {
81345200
A
526 ASSERT(JITCode::isBaselineCode(jitType()));
527 ConcurrentJITLocker locker(m_lock);
528 return m_exitProfile.add(locker, site);
529 }
530
531 bool hasExitSite(const ConcurrentJITLocker& locker, const DFG::FrequentExitSite& site) const
532 {
533 return m_exitProfile.hasExitSite(locker, site);
534 }
535 bool hasExitSite(const DFG::FrequentExitSite& site) const
536 {
537 ConcurrentJITLocker locker(m_lock);
538 return hasExitSite(locker, site);
93a37866 539 }
6fe7ccc8 540
93a37866 541 DFG::ExitProfile& exitProfile() { return m_exitProfile; }
81345200 542
93a37866
A
543 CompressedLazyOperandValueProfileHolder& lazyOperandValueProfiles()
544 {
545 return m_lazyOperandValueProfiles;
546 }
81345200 547#endif // ENABLE(DFG_JIT)
6fe7ccc8 548
93a37866 549 // Constant Pool
81345200
A
550#if ENABLE(DFG_JIT)
551 size_t numberOfIdentifiers() const { return m_unlinkedCode->numberOfIdentifiers() + numberOfDFGIdentifiers(); }
552 size_t numberOfDFGIdentifiers() const
553 {
554 if (!JITCode::isOptimizingJIT(jitType()))
555 return 0;
9dae56ea 556
81345200
A
557 return m_jitCode->dfgCommon()->dfgIdentifiers.size();
558 }
9dae56ea 559
81345200
A
560 const Identifier& identifier(int index) const
561 {
562 size_t unlinkedIdentifiers = m_unlinkedCode->numberOfIdentifiers();
563 if (static_cast<unsigned>(index) < unlinkedIdentifiers)
564 return m_unlinkedCode->identifier(index);
565 ASSERT(JITCode::isOptimizingJIT(jitType()));
566 return m_jitCode->dfgCommon()->dfgIdentifiers[index - unlinkedIdentifiers];
567 }
568#else
569 size_t numberOfIdentifiers() const { return m_unlinkedCode->numberOfIdentifiers(); }
570 const Identifier& identifier(int index) const { return m_unlinkedCode->identifier(index); }
571#endif
572
573 Vector<WriteBarrier<Unknown>>& constants() { return m_constantRegisters; }
ed1e77d3 574 Vector<SourceCodeRepresentation>& constantsSourceCodeRepresentation() { return m_constantsSourceCodeRepresentation; }
93a37866
A
575 unsigned addConstant(JSValue v)
576 {
577 unsigned result = m_constantRegisters.size();
578 m_constantRegisters.append(WriteBarrier<Unknown>());
579 m_constantRegisters.last().set(m_globalObject->vm(), m_ownerExecutable.get(), v);
ed1e77d3 580 m_constantsSourceCodeRepresentation.append(SourceCodeRepresentation::Other);
93a37866
A
581 return result;
582 }
9dae56ea 583
81345200
A
584 unsigned addConstantLazily()
585 {
586 unsigned result = m_constantRegisters.size();
587 m_constantRegisters.append(WriteBarrier<Unknown>());
ed1e77d3 588 m_constantsSourceCodeRepresentation.append(SourceCodeRepresentation::Other);
81345200
A
589 return result;
590 }
9dae56ea 591
93a37866
A
592 WriteBarrier<Unknown>& constantRegister(int index) { return m_constantRegisters[index - FirstConstantRegisterIndex]; }
593 ALWAYS_INLINE bool isConstantRegisterIndex(int index) const { return index >= FirstConstantRegisterIndex; }
594 ALWAYS_INLINE JSValue getConstant(int index) const { return m_constantRegisters[index - FirstConstantRegisterIndex].get(); }
ed1e77d3 595 ALWAYS_INLINE SourceCodeRepresentation constantSourceCodeRepresentation(int index) const { return m_constantsSourceCodeRepresentation[index - FirstConstantRegisterIndex]; }
9dae56ea 596
93a37866
A
597 FunctionExecutable* functionDecl(int index) { return m_functionDecls[index].get(); }
598 int numberOfFunctionDecls() { return m_functionDecls.size(); }
599 FunctionExecutable* functionExpr(int index) { return m_functionExprs[index].get(); }
ed1e77d3 600
93a37866 601 RegExp* regexp(int index) const { return m_unlinkedCode->regexp(index); }
14957cd0 602
93a37866
A
603 unsigned numberOfConstantBuffers() const
604 {
605 if (!m_rareData)
606 return 0;
607 return m_rareData->m_constantBuffers.size();
608 }
609 unsigned addConstantBuffer(const Vector<JSValue>& buffer)
610 {
611 createRareDataIfNecessary();
612 unsigned size = m_rareData->m_constantBuffers.size();
613 m_rareData->m_constantBuffers.append(buffer);
614 return size;
615 }
9dae56ea 616
93a37866
A
617 Vector<JSValue>& constantBufferAsVector(unsigned index)
618 {
619 ASSERT(m_rareData);
620 return m_rareData->m_constantBuffers[index];
621 }
622 JSValue* constantBuffer(unsigned index)
623 {
624 return constantBufferAsVector(index).data();
625 }
9dae56ea 626
81345200 627 Heap* heap() const { return m_heap; }
93a37866 628 JSGlobalObject* globalObject() { return m_globalObject.get(); }
81345200 629
93a37866 630 JSGlobalObject* globalObjectFor(CodeOrigin);
9dae56ea 631
81345200
A
632 BytecodeLivenessAnalysis& livenessAnalysis()
633 {
634 {
635 ConcurrentJITLocker locker(m_lock);
636 if (!!m_livenessAnalysis)
637 return *m_livenessAnalysis;
638 }
639 std::unique_ptr<BytecodeLivenessAnalysis> analysis =
640 std::make_unique<BytecodeLivenessAnalysis>(this);
641 {
642 ConcurrentJITLocker locker(m_lock);
643 if (!m_livenessAnalysis)
644 m_livenessAnalysis = WTF::move(analysis);
645 return *m_livenessAnalysis;
646 }
647 }
648
649 void validate();
9dae56ea 650
81345200 651 // Jump Tables
9dae56ea 652
81345200
A
653 size_t numberOfSwitchJumpTables() const { return m_rareData ? m_rareData->m_switchJumpTables.size() : 0; }
654 SimpleJumpTable& addSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_switchJumpTables.append(SimpleJumpTable()); return m_rareData->m_switchJumpTables.last(); }
655 SimpleJumpTable& switchJumpTable(int tableIndex) { RELEASE_ASSERT(m_rareData); return m_rareData->m_switchJumpTables[tableIndex]; }
656 void clearSwitchJumpTables()
657 {
658 if (!m_rareData)
659 return;
660 m_rareData->m_switchJumpTables.clear();
661 }
9dae56ea 662
93a37866
A
663 size_t numberOfStringSwitchJumpTables() const { return m_rareData ? m_rareData->m_stringSwitchJumpTables.size() : 0; }
664 StringJumpTable& addStringSwitchJumpTable() { createRareDataIfNecessary(); m_rareData->m_stringSwitchJumpTables.append(StringJumpTable()); return m_rareData->m_stringSwitchJumpTables.last(); }
665 StringJumpTable& stringSwitchJumpTable(int tableIndex) { RELEASE_ASSERT(m_rareData); return m_rareData->m_stringSwitchJumpTables[tableIndex]; }
9dae56ea 666
9dae56ea 667
81345200 668 SymbolTable* symbolTable() const { return m_symbolTable.get(); }
9dae56ea 669
93a37866
A
670 EvalCodeCache& evalCodeCache() { createRareDataIfNecessary(); return m_rareData->m_evalCodeCache; }
671
672 enum ShrinkMode {
673 // Shrink prior to generating machine code that may point directly into vectors.
674 EarlyShrink,
81345200 675
93a37866
A
676 // Shrink after generating machine code, and after possibly creating new vectors
677 // and appending to others. At this time it is not safe to shrink certain vectors
678 // because we would have generated machine code that references them directly.
679 LateShrink
680 };
681 void shrinkToFit(ShrinkMode);
81345200 682
93a37866
A
683 // Functions for controlling when JITting kicks in, in a mixed mode
684 // execution world.
81345200 685
93a37866
A
686 bool checkIfJITThresholdReached()
687 {
688 return m_llintExecuteCounter.checkIfThresholdCrossedAndSet(this);
689 }
81345200 690
93a37866
A
691 void dontJITAnytimeSoon()
692 {
693 m_llintExecuteCounter.deferIndefinitely();
694 }
81345200 695
93a37866
A
696 void jitAfterWarmUp()
697 {
698 m_llintExecuteCounter.setNewThreshold(Options::thresholdForJITAfterWarmUp(), this);
699 }
81345200 700
93a37866
A
701 void jitSoon()
702 {
703 m_llintExecuteCounter.setNewThreshold(Options::thresholdForJITSoon(), this);
704 }
81345200
A
705
706 const BaselineExecutionCounter& llintExecuteCounter() const
93a37866
A
707 {
708 return m_llintExecuteCounter;
709 }
81345200 710
93a37866
A
711 // Functions for controlling when tiered compilation kicks in. This
712 // controls both when the optimizing compiler is invoked and when OSR
713 // entry happens. Two triggers exist: the loop trigger and the return
714 // trigger. In either case, when an addition to m_jitExecuteCounter
715 // causes it to become non-negative, the optimizing compiler is
716 // invoked. This includes a fast check to see if this CodeBlock has
717 // already been optimized (i.e. replacement() returns a CodeBlock
718 // that was optimized with a higher tier JIT than this one). In the
719 // case of the loop trigger, if the optimized compilation succeeds
720 // (or has already succeeded in the past) then OSR is attempted to
721 // redirect program flow into the optimized code.
81345200 722
93a37866
A
723 // These functions are called from within the optimization triggers,
724 // and are used as a single point at which we define the heuristics
725 // for how much warm-up is mandated before the next optimization
726 // trigger files. All CodeBlocks start out with optimizeAfterWarmUp(),
727 // as this is called from the CodeBlock constructor.
81345200 728
93a37866
A
729 // When we observe a lot of speculation failures, we trigger a
730 // reoptimization. But each time, we increase the optimization trigger
731 // to avoid thrashing.
81345200 732 JS_EXPORT_PRIVATE unsigned reoptimizationRetryCounter() const;
93a37866 733 void countReoptimization();
81345200
A
734#if ENABLE(JIT)
735 unsigned numberOfDFGCompiles();
93a37866
A
736
737 int32_t codeTypeThresholdMultiplier() const;
81345200
A
738
739 int32_t adjustedCounterValue(int32_t desiredThreshold);
740
93a37866
A
741 int32_t* addressOfJITExecuteCounter()
742 {
743 return &m_jitExecuteCounter.m_counter;
744 }
93a37866 745
81345200
A
746 static ptrdiff_t offsetOfJITExecuteCounter() { return OBJECT_OFFSETOF(CodeBlock, m_jitExecuteCounter) + OBJECT_OFFSETOF(BaselineExecutionCounter, m_counter); }
747 static ptrdiff_t offsetOfJITExecutionActiveThreshold() { return OBJECT_OFFSETOF(CodeBlock, m_jitExecuteCounter) + OBJECT_OFFSETOF(BaselineExecutionCounter, m_activeThreshold); }
748 static ptrdiff_t offsetOfJITExecutionTotalCount() { return OBJECT_OFFSETOF(CodeBlock, m_jitExecuteCounter) + OBJECT_OFFSETOF(BaselineExecutionCounter, m_totalCount); }
749
750 const BaselineExecutionCounter& jitExecuteCounter() const { return m_jitExecuteCounter; }
751
93a37866 752 unsigned optimizationDelayCounter() const { return m_optimizationDelayCounter; }
81345200 753
93a37866
A
754 // Check if the optimization threshold has been reached, and if not,
755 // adjust the heuristics accordingly. Returns true if the threshold has
756 // been reached.
757 bool checkIfOptimizationThresholdReached();
81345200 758
93a37866
A
759 // Call this to force the next optimization trigger to fire. This is
760 // rarely wise, since optimization triggers are typically more
761 // expensive than executing baseline code.
762 void optimizeNextInvocation();
81345200 763
93a37866
A
764 // Call this to prevent optimization from happening again. Note that
765 // optimization will still happen after roughly 2^29 invocations,
766 // so this is really meant to delay that as much as possible. This
767 // is called if optimization failed, and we expect it to fail in
768 // the future as well.
769 void dontOptimizeAnytimeSoon();
81345200 770
93a37866
A
771 // Call this to reinitialize the counter to its starting state,
772 // forcing a warm-up to happen before the next optimization trigger
773 // fires. This is called in the CodeBlock constructor. It also
774 // makes sense to call this if an OSR exit occurred. Note that
775 // OSR exit code is code generated, so the value of the execute
776 // counter that this corresponds to is also available directly.
777 void optimizeAfterWarmUp();
81345200 778
93a37866
A
779 // Call this to force an optimization trigger to fire only after
780 // a lot of warm-up.
781 void optimizeAfterLongWarmUp();
81345200 782
93a37866
A
783 // Call this to cause an optimization trigger to fire soon, but
784 // not necessarily the next one. This makes sense if optimization
785 // succeeds. Successfuly optimization means that all calls are
786 // relinked to the optimized code, so this only affects call
787 // frames that are still executing this CodeBlock. The value here
788 // is tuned to strike a balance between the cost of OSR entry
789 // (which is too high to warrant making every loop back edge to
790 // trigger OSR immediately) and the cost of executing baseline
791 // code (which is high enough that we don't necessarily want to
792 // have a full warm-up). The intuition for calling this instead of
793 // optimizeNextInvocation() is for the case of recursive functions
794 // with loops. Consider that there may be N call frames of some
795 // recursive function, for a reasonably large value of N. The top
796 // one triggers optimization, and then returns, and then all of
797 // the others return. We don't want optimization to be triggered on
798 // each return, as that would be superfluous. It only makes sense
799 // to trigger optimization if one of those functions becomes hot
800 // in the baseline code.
801 void optimizeSoon();
81345200
A
802
803 void forceOptimizationSlowPathConcurrently();
804
805 void setOptimizationThresholdBasedOnCompilationResult(CompilationResult);
806
93a37866 807 uint32_t osrExitCounter() const { return m_osrExitCounter; }
81345200 808
93a37866 809 void countOSRExit() { m_osrExitCounter++; }
81345200 810
93a37866 811 uint32_t* addressOfOSRExitCounter() { return &m_osrExitCounter; }
81345200 812
93a37866 813 static ptrdiff_t offsetOfOSRExitCounter() { return OBJECT_OFFSETOF(CodeBlock, m_osrExitCounter); }
6fe7ccc8 814
93a37866
A
815 uint32_t adjustedExitCountThreshold(uint32_t desiredThreshold);
816 uint32_t exitCountThresholdForReoptimization();
817 uint32_t exitCountThresholdForReoptimizationFromLoop();
818 bool shouldReoptimizeNow();
819 bool shouldReoptimizeFromLoopNow();
81345200
A
820#else // No JIT
821 void optimizeAfterWarmUp() { }
822 unsigned numberOfDFGCompiles() { return 0; }
6fe7ccc8
A
823#endif
824
93a37866 825 bool shouldOptimizeNow();
81345200
A
826 void updateAllValueProfilePredictions();
827 void updateAllArrayPredictions();
828 void updateAllPredictions();
6fe7ccc8 829
81345200
A
830 unsigned frameRegisterCount();
831 int stackPointerOffset();
832
833 bool hasOpDebugForLineAndColumn(unsigned line, unsigned column);
834
835 bool hasDebuggerRequests() const { return m_debuggerRequests; }
836 void* debuggerRequestsAddress() { return &m_debuggerRequests; }
837
838 void addBreakpoint(unsigned numBreakpoints);
839 void removeBreakpoint(unsigned numBreakpoints)
840 {
841 ASSERT(m_numBreakpoints >= numBreakpoints);
842 m_numBreakpoints -= numBreakpoints;
843 }
844
845 enum SteppingMode {
846 SteppingModeDisabled,
847 SteppingModeEnabled
848 };
849 void setSteppingMode(SteppingMode);
850
851 void clearDebuggerRequests()
852 {
853 m_steppingMode = SteppingModeDisabled;
854 m_numBreakpoints = 0;
855 }
856
93a37866 857 // FIXME: Make these remaining members private.
9dae56ea 858
93a37866
A
859 int m_numCalleeRegisters;
860 int m_numVars;
81345200
A
861 bool m_isConstructor : 1;
862
863 // This is intentionally public; it's the responsibility of anyone doing any
864 // of the following to hold the lock:
865 //
866 // - Modifying any inline cache in this code block.
867 //
868 // - Quering any inline cache in this code block, from a thread other than
869 // the main thread.
870 //
871 // Additionally, it's only legal to modify the inline cache on the main
872 // thread. This means that the main thread can query the inline cache without
873 // locking. This is crucial since executing the inline cache is effectively
874 // "querying" it.
875 //
876 // Another exception to the rules is that the GC can do whatever it wants
877 // without holding any locks, because the GC is guaranteed to wait until any
878 // concurrent compilation threads finish what they're doing.
879 mutable ConcurrentJITLock m_lock;
880
881 bool m_shouldAlwaysBeInlined; // Not a bitfield because the JIT wants to store to it.
882 bool m_allTransitionsHaveBeenMarked : 1; // Initialized and used on every GC.
883
884 bool m_didFailFTLCompilation : 1;
885 bool m_hasBeenCompiledWithFTL : 1;
886
887 // Internal methods for use by validation code. It would be private if it wasn't
888 // for the fact that we use it from anonymous namespaces.
889 void beginValidationDidFail();
890 NO_RETURN_DUE_TO_CRASH void endValidationDidFail();
891
892 bool isKnownToBeLiveDuringGC(); // Will only return valid results when called during GC. Assumes that you've already established that the owner executable is live.
9dae56ea 893
ed1e77d3
A
894 struct RareData {
895 WTF_MAKE_FAST_ALLOCATED;
896 public:
897 Vector<HandlerInfo> m_exceptionHandlers;
898
899 // Buffers used for large array literals
900 Vector<Vector<JSValue>> m_constantBuffers;
901
902 // Jump Tables
903 Vector<SimpleJumpTable> m_switchJumpTables;
904 Vector<StringJumpTable> m_stringSwitchJumpTables;
905
906 EvalCodeCache m_evalCodeCache;
907 };
908
93a37866 909protected:
81345200
A
910 virtual void visitWeakReferences(SlotVisitor&) override;
911 virtual void finalizeUnconditionally() override;
93a37866 912
6fe7ccc8 913#if ENABLE(DFG_JIT)
93a37866 914 void tallyFrequentExitSites();
6fe7ccc8 915#else
93a37866
A
916 void tallyFrequentExitSites() { }
917#endif
918
919private:
81345200
A
920 friend class CodeBlockSet;
921
922 CodeBlock* specialOSREntryBlockOrNull();
923
924 void noticeIncomingCall(ExecState* callerFrame);
925
93a37866
A
926 double optimizationThresholdScalingFactor();
927
81345200 928 void updateAllPredictionsAndCountLiveness(unsigned& numberOfLiveNonArgumentValueProfiles, unsigned& numberOfSamplesInProfiles);
93a37866 929
ed1e77d3 930 void setConstantRegisters(const Vector<WriteBarrier<Unknown>>& constants, const Vector<SourceCodeRepresentation>& constantsSourceCodeRepresentation)
93a37866 931 {
ed1e77d3 932 ASSERT(constants.size() == constantsSourceCodeRepresentation.size());
93a37866 933 size_t count = constants.size();
ed1e77d3 934 m_constantRegisters.resizeToFit(count);
93a37866
A
935 for (size_t i = 0; i < count; i++)
936 m_constantRegisters[i].set(*m_vm, ownerExecutable(), constants[i].get());
ed1e77d3 937 m_constantsSourceCodeRepresentation = constantsSourceCodeRepresentation;
93a37866
A
938 }
939
81345200
A
940 void dumpBytecode(
941 PrintStream&, ExecState*, const Instruction* begin, const Instruction*&,
942 const StubInfoMap& = StubInfoMap(), const CallLinkInfoMap& = CallLinkInfoMap());
93a37866 943
81345200 944 CString registerName(int r) const;
ed1e77d3 945 CString constantName(int index) const;
93a37866
A
946 void printUnaryOp(PrintStream&, ExecState*, int location, const Instruction*&, const char* op);
947 void printBinaryOp(PrintStream&, ExecState*, int location, const Instruction*&, const char* op);
948 void printConditionalJump(PrintStream&, ExecState*, const Instruction*, const Instruction*&, int location, const char* op);
949 void printGetByIdOp(PrintStream&, ExecState*, int location, const Instruction*&);
81345200 950 void printGetByIdCacheStatus(PrintStream&, ExecState*, int location, const StubInfoMap&);
93a37866 951 enum CacheDumpMode { DumpCaches, DontDumpCaches };
81345200 952 void printCallOp(PrintStream&, ExecState*, int location, const Instruction*&, const char* op, CacheDumpMode, bool& hasPrintedProfiling, const CallLinkInfoMap&);
93a37866 953 void printPutByIdOp(PrintStream&, ExecState*, int location, const Instruction*&, const char* op);
ed1e77d3 954 void printPutByIdCacheStatus(PrintStream&, ExecState*, int location, const StubInfoMap&);
81345200
A
955 void printLocationAndOp(PrintStream&, ExecState*, int location, const Instruction*&, const char* op);
956 void printLocationOpAndRegisterOperand(PrintStream&, ExecState*, int location, const Instruction*& it, const char* op, int operand);
957
93a37866
A
958 void beginDumpProfiling(PrintStream&, bool& hasPrintedProfiling);
959 void dumpValueProfiling(PrintStream&, const Instruction*&, bool& hasPrintedProfiling);
960 void dumpArrayProfiling(PrintStream&, const Instruction*&, bool& hasPrintedProfiling);
93a37866 961 void dumpRareCaseProfile(PrintStream&, const char* name, RareCaseProfile*, bool& hasPrintedProfiling);
6fe7ccc8 962
81345200
A
963 bool shouldImmediatelyAssumeLivenessDuringScan();
964
965 void propagateTransitions(SlotVisitor&);
966 void determineLiveness(SlotVisitor&);
6fe7ccc8 967
93a37866
A
968 void stronglyVisitStrongReferences(SlotVisitor&);
969 void stronglyVisitWeakReferences(SlotVisitor&);
9dae56ea 970
93a37866
A
971 void createRareDataIfNecessary()
972 {
973 if (!m_rareData)
ed1e77d3 974 m_rareData = std::make_unique<RareData>();
93a37866 975 }
ed1e77d3
A
976
977 void insertBasicBlockBoundariesForControlFlowProfiler(Vector<Instruction, 0, UnsafeVectorOverflow>&);
978
93a37866
A
979#if ENABLE(JIT)
980 void resetStubInternal(RepatchBuffer&, StructureStubInfo&);
981 void resetStubDuringGCInternal(RepatchBuffer&, StructureStubInfo&);
982#endif
983 WriteBarrier<UnlinkedCodeBlock> m_unlinkedCode;
984 int m_numParameters;
81345200
A
985 union {
986 unsigned m_debuggerRequests;
987 struct {
988 unsigned m_hasDebuggerStatement : 1;
989 unsigned m_steppingMode : 1;
990 unsigned m_numBreakpoints : 30;
991 };
992 };
93a37866
A
993 WriteBarrier<ScriptExecutable> m_ownerExecutable;
994 VM* m_vm;
9dae56ea 995
93a37866 996 RefCountedArray<Instruction> m_instructions;
81345200
A
997 WriteBarrier<SymbolTable> m_symbolTable;
998 VirtualRegister m_thisRegister;
ed1e77d3
A
999 VirtualRegister m_scopeRegister;
1000 VirtualRegister m_lexicalEnvironmentRegister;
9dae56ea 1001
93a37866
A
1002 bool m_isStrictMode;
1003 bool m_needsActivation;
81345200 1004 bool m_mayBeExecuting;
ed1e77d3 1005 Atomic<bool> m_visitAggregateHasBeenCalled;
9dae56ea 1006
93a37866
A
1007 RefPtr<SourceProvider> m_source;
1008 unsigned m_sourceOffset;
1009 unsigned m_firstLineColumnOffset;
1010 unsigned m_codeType;
9dae56ea 1011
81345200
A
1012 Vector<LLIntCallLinkInfo> m_llintCallLinkInfos;
1013 SentinelLinkedList<LLIntCallLinkInfo, BasicRawSentinelNode<LLIntCallLinkInfo>> m_incomingLLIntCalls;
1014 RefPtr<JITCode> m_jitCode;
4e4e5a6f 1015#if ENABLE(JIT)
81345200 1016 Bag<StructureStubInfo> m_stubInfos;
93a37866 1017 Vector<ByValInfo> m_byValInfos;
81345200
A
1018 Bag<CallLinkInfo> m_callLinkInfos;
1019 SentinelLinkedList<CallLinkInfo, BasicRawSentinelNode<CallLinkInfo>> m_incomingCalls;
ed1e77d3 1020 SentinelLinkedList<PolymorphicCallNode, BasicRawSentinelNode<PolymorphicCallNode>> m_incomingPolymorphicCalls;
6fe7ccc8 1021#endif
ed1e77d3 1022 std::unique_ptr<CompactJITCodeMap> m_jitCodeMap;
6fe7ccc8 1023#if ENABLE(DFG_JIT)
93a37866
A
1024 // This is relevant to non-DFG code blocks that serve as the profiled code block
1025 // for DFG code blocks.
1026 DFG::ExitProfile m_exitProfile;
1027 CompressedLazyOperandValueProfileHolder m_lazyOperandValueProfiles;
6fe7ccc8 1028#endif
93a37866 1029 Vector<ValueProfile> m_argumentValueProfiles;
81345200 1030 Vector<ValueProfile> m_valueProfiles;
93a37866
A
1031 SegmentedVector<RareCaseProfile, 8> m_rareCaseProfiles;
1032 SegmentedVector<RareCaseProfile, 8> m_specialFastCaseProfiles;
81345200 1033 Vector<ArrayAllocationProfile> m_arrayAllocationProfiles;
93a37866 1034 ArrayProfileVector m_arrayProfiles;
81345200 1035 Vector<ObjectAllocationProfile> m_objectAllocationProfiles;
9dae56ea 1036
93a37866 1037 // Constant Pool
93a37866
A
1038 COMPILE_ASSERT(sizeof(Register) == sizeof(WriteBarrier<Unknown>), Register_must_be_same_size_as_WriteBarrier_Unknown);
1039 // TODO: This could just be a pointer to m_unlinkedCodeBlock's data, but the DFG mutates
1040 // it, so we're stuck with it for now.
81345200 1041 Vector<WriteBarrier<Unknown>> m_constantRegisters;
ed1e77d3 1042 Vector<SourceCodeRepresentation> m_constantsSourceCodeRepresentation;
81345200
A
1043 Vector<WriteBarrier<FunctionExecutable>> m_functionDecls;
1044 Vector<WriteBarrier<FunctionExecutable>> m_functionExprs;
9dae56ea 1045
81345200
A
1046 RefPtr<CodeBlock> m_alternative;
1047
1048 BaselineExecutionCounter m_llintExecuteCounter;
1049
1050 BaselineExecutionCounter m_jitExecuteCounter;
93a37866
A
1051 int32_t m_totalJITExecutions;
1052 uint32_t m_osrExitCounter;
1053 uint16_t m_optimizationDelayCounter;
1054 uint16_t m_reoptimizationRetryCounter;
81345200
A
1055
1056 mutable CodeBlockHash m_hash;
9dae56ea 1057
81345200 1058 std::unique_ptr<BytecodeLivenessAnalysis> m_livenessAnalysis;
9dae56ea 1059
ed1e77d3 1060 std::unique_ptr<RareData> m_rareData;
6fe7ccc8 1061#if ENABLE(JIT)
81345200 1062 DFG::CapabilityLevel m_capabilityLevelState;
6fe7ccc8 1063#endif
93a37866 1064};
9dae56ea 1065
93a37866
A
1066// Program code is not marked by any function, so we make the global object
1067// responsible for marking it.
9dae56ea 1068
93a37866
A
1069class GlobalCodeBlock : public CodeBlock {
1070protected:
1071 GlobalCodeBlock(CopyParsedBlockTag, GlobalCodeBlock& other)
81345200 1072 : CodeBlock(CopyParsedBlock, other)
93a37866
A
1073 {
1074 }
6fe7ccc8 1075
81345200
A
1076 GlobalCodeBlock(ScriptExecutable* ownerExecutable, UnlinkedCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
1077 : CodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
93a37866
A
1078 {
1079 }
1080};
9dae56ea 1081
93a37866
A
1082class ProgramCodeBlock : public GlobalCodeBlock {
1083public:
1084 ProgramCodeBlock(CopyParsedBlockTag, ProgramCodeBlock& other)
81345200 1085 : GlobalCodeBlock(CopyParsedBlock, other)
93a37866
A
1086 {
1087 }
f9bf01c6 1088
81345200
A
1089 ProgramCodeBlock(ProgramExecutable* ownerExecutable, UnlinkedProgramCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned firstLineColumnOffset)
1090 : GlobalCodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, firstLineColumnOffset)
93a37866
A
1091 {
1092 }
6fe7ccc8 1093
6fe7ccc8 1094#if ENABLE(JIT)
93a37866 1095protected:
81345200
A
1096 virtual CodeBlock* replacement() override;
1097 virtual DFG::CapabilityLevel capabilityLevelInternal() override;
6fe7ccc8 1098#endif
93a37866 1099};
9dae56ea 1100
93a37866
A
1101class EvalCodeBlock : public GlobalCodeBlock {
1102public:
1103 EvalCodeBlock(CopyParsedBlockTag, EvalCodeBlock& other)
81345200 1104 : GlobalCodeBlock(CopyParsedBlock, other)
93a37866
A
1105 {
1106 }
6fe7ccc8 1107
81345200
A
1108 EvalCodeBlock(EvalExecutable* ownerExecutable, UnlinkedEvalCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider)
1109 : GlobalCodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, 0, 1)
93a37866
A
1110 {
1111 }
81345200 1112
93a37866
A
1113 const Identifier& variable(unsigned index) { return unlinkedEvalCodeBlock()->variable(index); }
1114 unsigned numVariables() { return unlinkedEvalCodeBlock()->numVariables(); }
81345200 1115
6fe7ccc8 1116#if ENABLE(JIT)
93a37866 1117protected:
81345200
A
1118 virtual CodeBlock* replacement() override;
1119 virtual DFG::CapabilityLevel capabilityLevelInternal() override;
6fe7ccc8 1120#endif
81345200 1121
93a37866
A
1122private:
1123 UnlinkedEvalCodeBlock* unlinkedEvalCodeBlock() const { return jsCast<UnlinkedEvalCodeBlock*>(unlinkedCodeBlock()); }
1124};
6fe7ccc8 1125
93a37866
A
1126class FunctionCodeBlock : public CodeBlock {
1127public:
1128 FunctionCodeBlock(CopyParsedBlockTag, FunctionCodeBlock& other)
81345200 1129 : CodeBlock(CopyParsedBlock, other)
ba379fdc 1130 {
ba379fdc
A
1131 }
1132
81345200
A
1133 FunctionCodeBlock(FunctionExecutable* ownerExecutable, UnlinkedFunctionCodeBlock* unlinkedCodeBlock, JSScope* scope, PassRefPtr<SourceProvider> sourceProvider, unsigned sourceOffset, unsigned firstLineColumnOffset)
1134 : CodeBlock(ownerExecutable, unlinkedCodeBlock, scope, sourceProvider, sourceOffset, firstLineColumnOffset)
14957cd0 1135 {
14957cd0 1136 }
81345200 1137
93a37866
A
1138#if ENABLE(JIT)
1139protected:
81345200
A
1140 virtual CodeBlock* replacement() override;
1141 virtual DFG::CapabilityLevel capabilityLevelInternal() override;
93a37866
A
1142#endif
1143};
1144
1145inline CodeBlock* baselineCodeBlockForInlineCallFrame(InlineCallFrame* inlineCallFrame)
1146{
1147 RELEASE_ASSERT(inlineCallFrame);
1148 ExecutableBase* executable = inlineCallFrame->executable.get();
81345200 1149 RELEASE_ASSERT(executable->structure()->classInfo() == FunctionExecutable::info());
ed1e77d3 1150 return static_cast<FunctionExecutable*>(executable)->baselineCodeBlockFor(inlineCallFrame->specializationKind());
93a37866 1151}
81345200 1152
93a37866
A
1153inline CodeBlock* baselineCodeBlockForOriginAndBaselineCodeBlock(const CodeOrigin& codeOrigin, CodeBlock* baselineCodeBlock)
1154{
1155 if (codeOrigin.inlineCallFrame)
1156 return baselineCodeBlockForInlineCallFrame(codeOrigin.inlineCallFrame);
1157 return baselineCodeBlock;
1158}
1159
93a37866
A
1160inline Register& ExecState::r(int index)
1161{
1162 CodeBlock* codeBlock = this->codeBlock();
1163 if (codeBlock->isConstantRegisterIndex(index))
1164 return *reinterpret_cast<Register*>(&codeBlock->constantRegister(index));
1165 return this[index];
1166}
1167
ed1e77d3
A
1168inline Register& ExecState::r(VirtualRegister reg)
1169{
1170 return r(reg.offset());
1171}
1172
93a37866
A
1173inline Register& ExecState::uncheckedR(int index)
1174{
1175 RELEASE_ASSERT(index < FirstConstantRegisterIndex);
1176 return this[index];
1177}
6fe7ccc8 1178
ed1e77d3 1179inline Register& ExecState::uncheckedR(VirtualRegister reg)
93a37866 1180{
ed1e77d3 1181 return uncheckedR(reg.offset());
93a37866
A
1182}
1183
81345200 1184inline void CodeBlockSet::mark(void* candidateCodeBlock)
93a37866
A
1185{
1186 // We have to check for 0 and -1 because those are used by the HashMap as markers.
1187 uintptr_t value = reinterpret_cast<uintptr_t>(candidateCodeBlock);
81345200 1188
93a37866
A
1189 // This checks for both of those nasty cases in one go.
1190 // 0 + 1 = 1
1191 // -1 + 1 = 0
1192 if (value + 1 <= 1)
1193 return;
81345200
A
1194
1195 CodeBlock* codeBlock = static_cast<CodeBlock*>(candidateCodeBlock);
1196 if (!m_oldCodeBlocks.contains(codeBlock) && !m_newCodeBlocks.contains(codeBlock))
93a37866 1197 return;
81345200
A
1198
1199 mark(codeBlock);
93a37866 1200}
81345200
A
1201
1202inline void CodeBlockSet::mark(CodeBlock* codeBlock)
1203{
1204 if (!codeBlock)
1205 return;
14957cd0 1206
81345200
A
1207 if (codeBlock->m_mayBeExecuting)
1208 return;
1209
1210 codeBlock->m_mayBeExecuting = true;
1211 // We might not have cleared the marks for this CodeBlock, but we need to visit it.
ed1e77d3 1212 codeBlock->m_visitAggregateHasBeenCalled.store(false, std::memory_order_relaxed);
81345200
A
1213#if ENABLE(GGC)
1214 m_currentlyExecuting.append(codeBlock);
1215#endif
1216}
1217
1218template <typename Functor> inline void ScriptExecutable::forEachCodeBlock(Functor&& functor)
1219{
1220 switch (type()) {
1221 case ProgramExecutableType: {
1222 if (CodeBlock* codeBlock = jsCast<ProgramExecutable*>(this)->m_programCodeBlock.get())
1223 codeBlock->forEachRelatedCodeBlock(std::forward<Functor>(functor));
1224 break;
1225 }
1226
1227 case EvalExecutableType: {
1228 if (CodeBlock* codeBlock = jsCast<EvalExecutable*>(this)->m_evalCodeBlock.get())
1229 codeBlock->forEachRelatedCodeBlock(std::forward<Functor>(functor));
1230 break;
1231 }
1232
1233 case FunctionExecutableType: {
1234 Functor f(std::forward<Functor>(functor));
1235 FunctionExecutable* executable = jsCast<FunctionExecutable*>(this);
1236 if (CodeBlock* codeBlock = executable->m_codeBlockForCall.get())
1237 codeBlock->forEachRelatedCodeBlock(f);
1238 if (CodeBlock* codeBlock = executable->m_codeBlockForConstruct.get())
1239 codeBlock->forEachRelatedCodeBlock(f);
1240 break;
1241 }
1242 default:
1243 RELEASE_ASSERT_NOT_REACHED();
1244 }
1245}
1246
9dae56ea
A
1247} // namespace JSC
1248
1249#endif // CodeBlock_h