#include "JSAPIValueWrapper.h"
#include "JSGlobalObject.h"
#include "JSValue.h"
-#include <wtf/Platform.h>
#include <wtf/UnusedParam.h>
namespace JSC {
#include "CallFrame.h"
#include "JSLock.h"
+#include <wtf/WTFThreadData.h>
namespace JSC {
protected:
APIEntryShimWithoutLock(JSGlobalData* globalData, bool registerThread)
: m_globalData(globalData)
- , m_entryIdentifierTable(setCurrentIdentifierTable(globalData->identifierTable))
+ , m_entryIdentifierTable(wtfThreadData().setCurrentIdentifierTable(globalData->identifierTable))
{
if (registerThread)
globalData->heap.registerThread();
~APIEntryShimWithoutLock()
{
m_globalData->timeoutChecker.stop();
- setCurrentIdentifierTable(m_entryIdentifierTable);
+ wtfThreadData().setCurrentIdentifierTable(m_entryIdentifierTable);
}
private:
// JSPropertyNameAccumulator only has a globalData.
APIEntryShim(JSGlobalData* globalData, bool registerThread = true)
: APIEntryShimWithoutLock(globalData, registerThread)
- , m_lock(globalData->isSharedInstance ? LockForReal : SilenceAssertionsOnly)
+ , m_lock(globalData->isSharedInstance() ? LockForReal : SilenceAssertionsOnly)
{
}
: m_dropAllLocks(exec)
, m_globalData(&exec->globalData())
{
- resetCurrentIdentifierTable();
- m_globalData->timeoutChecker.start();
+ wtfThreadData().resetCurrentIdentifierTable();
}
~APICallbackShim()
{
- m_globalData->timeoutChecker.stop();
- setCurrentIdentifierTable(m_globalData->identifierTable);
+ wtfThreadData().setCurrentIdentifierTable(m_globalData->identifierTable);
}
private:
#include <runtime/JSGlobalObject.h>
#include <runtime/JSLock.h>
#include <runtime/JSObject.h>
+#include <wtf/text/StringHash.h>
using namespace JSC;
*/
#include "config.h"
-#include <wtf/Platform.h>
#include "JSCallbackFunction.h"
#include "APIShims.h"
#include "JSCallbackObject.h"
#include "Collector.h"
+#include <wtf/text/StringHash.h>
namespace JSC {
namespace JSC {
+struct JSCallbackObjectData {
+ JSCallbackObjectData(void* privateData, JSClassRef jsClass)
+ : privateData(privateData)
+ , jsClass(jsClass)
+ {
+ JSClassRetain(jsClass);
+ }
+
+ ~JSCallbackObjectData()
+ {
+ JSClassRelease(jsClass);
+ }
+
+ JSValue getPrivateProperty(const Identifier& propertyName) const
+ {
+ if (!m_privateProperties)
+ return JSValue();
+ return m_privateProperties->getPrivateProperty(propertyName);
+ }
+
+ void setPrivateProperty(const Identifier& propertyName, JSValue value)
+ {
+ if (!m_privateProperties)
+ m_privateProperties.set(new JSPrivatePropertyMap);
+ m_privateProperties->setPrivateProperty(propertyName, value);
+ }
+
+ void deletePrivateProperty(const Identifier& propertyName)
+ {
+ if (!m_privateProperties)
+ return;
+ m_privateProperties->deletePrivateProperty(propertyName);
+ }
+
+ void markChildren(MarkStack& markStack)
+ {
+ if (!m_privateProperties)
+ return;
+ m_privateProperties->markChildren(markStack);
+ }
+
+ void* privateData;
+ JSClassRef jsClass;
+ struct JSPrivatePropertyMap {
+ JSValue getPrivateProperty(const Identifier& propertyName) const
+ {
+ PrivatePropertyMap::const_iterator location = m_propertyMap.find(propertyName.ustring().rep());
+ if (location == m_propertyMap.end())
+ return JSValue();
+ return location->second;
+ }
+
+ void setPrivateProperty(const Identifier& propertyName, JSValue value)
+ {
+ m_propertyMap.set(propertyName.ustring().rep(), value);
+ }
+
+ void deletePrivateProperty(const Identifier& propertyName)
+ {
+ m_propertyMap.remove(propertyName.ustring().rep());
+ }
+
+ void markChildren(MarkStack& markStack)
+ {
+ for (PrivatePropertyMap::iterator ptr = m_propertyMap.begin(); ptr != m_propertyMap.end(); ++ptr) {
+ if (ptr->second)
+ markStack.append(ptr->second);
+ }
+ }
+
+ private:
+ typedef HashMap<RefPtr<UString::Rep>, JSValue, IdentifierRepHash> PrivatePropertyMap;
+ PrivatePropertyMap m_propertyMap;
+ };
+ OwnPtr<JSPrivatePropertyMap> m_privateProperties;
+};
+
+
template <class Base>
class JSCallbackObject : public Base {
public:
{
return Structure::create(proto, TypeInfo(ObjectType, StructureFlags), Base::AnonymousSlotCount);
}
+
+ JSValue getPrivateProperty(const Identifier& propertyName) const
+ {
+ return m_callbackObjectData->getPrivateProperty(propertyName);
+ }
+
+ void setPrivateProperty(const Identifier& propertyName, JSValue value)
+ {
+ m_callbackObjectData->setPrivateProperty(propertyName, value);
+ }
+
+ void deletePrivateProperty(const Identifier& propertyName)
+ {
+ m_callbackObjectData->deletePrivateProperty(propertyName);
+ }
protected:
static const unsigned StructureFlags = OverridesGetOwnPropertySlot | ImplementsHasInstance | OverridesHasInstance | OverridesMarkChildren | OverridesGetPropertyNames | Base::StructureFlags;
virtual CallType getCallData(CallData&);
virtual const ClassInfo* classInfo() const { return &info; }
+ virtual void markChildren(MarkStack& markStack)
+ {
+ Base::markChildren(markStack);
+ m_callbackObjectData->markChildren(markStack);
+ }
+
void init(ExecState*);
static JSCallbackObject* asCallbackObject(JSValue);
static JSValue JSC_HOST_CALL call(ExecState*, JSObject* functionObject, JSValue thisValue, const ArgList&);
static JSObject* construct(ExecState*, JSObject* constructor, const ArgList&);
- static JSValue staticValueGetter(ExecState*, const Identifier&, const PropertySlot&);
- static JSValue staticFunctionGetter(ExecState*, const Identifier&, const PropertySlot&);
- static JSValue callbackGetter(ExecState*, const Identifier&, const PropertySlot&);
-
- struct JSCallbackObjectData {
- JSCallbackObjectData(void* privateData, JSClassRef jsClass)
- : privateData(privateData)
- , jsClass(jsClass)
- {
- JSClassRetain(jsClass);
- }
-
- ~JSCallbackObjectData()
- {
- JSClassRelease(jsClass);
- }
-
- void* privateData;
- JSClassRef jsClass;
- };
-
+ static JSValue staticValueGetter(ExecState*, JSValue, const Identifier&);
+ static JSValue staticFunctionGetter(ExecState*, JSValue, const Identifier&);
+ static JSValue callbackGetter(ExecState*, JSValue, const Identifier&);
+
OwnPtr<JSCallbackObjectData> m_callbackObjectData;
};
}
template <class Base>
-JSValue JSCallbackObject<Base>::staticValueGetter(ExecState* exec, const Identifier& propertyName, const PropertySlot& slot)
+JSValue JSCallbackObject<Base>::staticValueGetter(ExecState* exec, JSValue slotBase, const Identifier& propertyName)
{
- JSCallbackObject* thisObj = asCallbackObject(slot.slotBase());
+ JSCallbackObject* thisObj = asCallbackObject(slotBase);
JSObjectRef thisRef = toRef(thisObj);
RefPtr<OpaqueJSString> propertyNameRef;
}
template <class Base>
-JSValue JSCallbackObject<Base>::staticFunctionGetter(ExecState* exec, const Identifier& propertyName, const PropertySlot& slot)
+JSValue JSCallbackObject<Base>::staticFunctionGetter(ExecState* exec, JSValue slotBase, const Identifier& propertyName)
{
- JSCallbackObject* thisObj = asCallbackObject(slot.slotBase());
+ JSCallbackObject* thisObj = asCallbackObject(slotBase);
// Check for cached or override property.
PropertySlot slot2(thisObj);
}
template <class Base>
-JSValue JSCallbackObject<Base>::callbackGetter(ExecState* exec, const Identifier& propertyName, const PropertySlot& slot)
+JSValue JSCallbackObject<Base>::callbackGetter(ExecState* exec, JSValue slotBase, const Identifier& propertyName)
{
- JSCallbackObject* thisObj = asCallbackObject(slot.slotBase());
+ JSCallbackObject* thisObj = asCallbackObject(slotBase);
JSObjectRef thisRef = toRef(thisObj);
RefPtr<OpaqueJSString> propertyNameRef;
#include <runtime/JSGlobalObject.h>
#include <runtime/ObjectPrototype.h>
#include <runtime/Identifier.h>
+#include <wtf/text/StringHash.h>
+#include <wtf/unicode/UTF8.h>
using namespace std;
using namespace JSC;
+using namespace WTF::Unicode;
const JSClassDefinition kJSClassDefinitionEmpty = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 };
+static inline UString tryCreateStringFromUTF8(const char* string)
+{
+ if (!string)
+ return UString::null();
+
+ size_t length = strlen(string);
+ Vector<UChar, 1024> buffer(length);
+ UChar* p = buffer.data();
+ if (conversionOK != convertUTF8ToUTF16(&string, string + length, &p, p + length))
+ return UString::null();
+
+ return UString(buffer.data(), p - buffer.data());
+}
+
OpaqueJSClass::OpaqueJSClass(const JSClassDefinition* definition, OpaqueJSClass* protoClass)
: parentClass(definition->parentClass)
, prototypeClass(0)
, callAsConstructor(definition->callAsConstructor)
, hasInstance(definition->hasInstance)
, convertToType(definition->convertToType)
- , m_className(UString::createFromUTF8(definition->className).rep()->ref())
+ , m_className(tryCreateStringFromUTF8(definition->className))
, m_staticValues(0)
, m_staticFunctions(0)
{
if (const JSStaticValue* staticValue = definition->staticValues) {
m_staticValues = new OpaqueJSClassStaticValuesTable();
while (staticValue->name) {
- // Use a local variable here to sidestep an RVCT compiler bug.
- StaticValueEntry* entry = new StaticValueEntry(staticValue->getProperty, staticValue->setProperty, staticValue->attributes);
- m_staticValues->add(UString::createFromUTF8(staticValue->name).rep()->ref(), entry);
+ UString valueName = tryCreateStringFromUTF8(staticValue->name);
+ if (!valueName.isNull()) {
+ // Use a local variable here to sidestep an RVCT compiler bug.
+ StaticValueEntry* entry = new StaticValueEntry(staticValue->getProperty, staticValue->setProperty, staticValue->attributes);
+ UStringImpl* impl = valueName.rep();
+ impl->ref();
+ m_staticValues->add(impl, entry);
+ }
++staticValue;
}
}
if (const JSStaticFunction* staticFunction = definition->staticFunctions) {
m_staticFunctions = new OpaqueJSClassStaticFunctionsTable();
while (staticFunction->name) {
- // Use a local variable here to sidestep an RVCT compiler bug.
- StaticFunctionEntry* entry = new StaticFunctionEntry(staticFunction->callAsFunction, staticFunction->attributes);
- m_staticFunctions->add(UString::createFromUTF8(staticFunction->name).rep()->ref(), entry);
+ UString functionName = tryCreateStringFromUTF8(staticFunction->name);
+ if (!functionName.isNull()) {
+ // Use a local variable here to sidestep an RVCT compiler bug.
+ StaticFunctionEntry* entry = new StaticFunctionEntry(staticFunction->callAsFunction, staticFunction->attributes);
+ UStringImpl* impl = functionName.rep();
+ impl->ref();
+ m_staticFunctions->add(impl, entry);
+ }
++staticFunction;
}
}
OpaqueJSClass::~OpaqueJSClass()
{
- ASSERT(!m_className.rep()->isIdentifier());
+ // The empty string is shared across threads & is an identifier, in all other cases we should have done a deep copy in className(), below.
+ ASSERT(!m_className.size() || !m_className.rep()->isIdentifier());
if (m_staticValues) {
OpaqueJSClassStaticValuesTable::const_iterator end = m_staticValues->end();
ASSERT(!it->first->isIdentifier());
// Use a local variable here to sidestep an RVCT compiler bug.
StaticValueEntry* entry = new StaticValueEntry(it->second->getProperty, it->second->setProperty, it->second->attributes);
- staticValues->add(UString::Rep::create(it->first->data(), it->first->size()), entry);
-
+ staticValues->add(UString::Rep::create(it->first->characters(), it->first->length()), entry);
}
-
} else
staticValues = 0;
-
if (jsClass->m_staticFunctions) {
staticFunctions = new OpaqueJSClassStaticFunctionsTable;
ASSERT(!it->first->isIdentifier());
// Use a local variable here to sidestep an RVCT compiler bug.
StaticFunctionEntry* entry = new StaticFunctionEntry(it->second->callAsFunction, it->second->attributes);
- staticFunctions->add(UString::Rep::create(it->first->data(), it->first->size()), entry);
+ staticFunctions->add(UString::Rep::create(it->first->characters(), it->first->length()), entry);
}
} else
#include "JSClassRef.h"
#include "JSGlobalObject.h"
#include "JSObject.h"
-#include <wtf/Platform.h>
+#include <wtf/text/StringHash.h>
#if OS(DARWIN)
#include <mach-o/dyld.h>
JSContextGroupRef JSContextGroupCreate()
{
initializeThreading();
- return toRef(JSGlobalData::createNonDefault().releaseRef());
+ return toRef(JSGlobalData::createContextGroup(ThreadStackTypeSmall).releaseRef());
}
JSContextGroupRef JSContextGroupRetain(JSContextGroupRef group)
initializeThreading();
JSLock lock(LockForReal);
- RefPtr<JSGlobalData> globalData = group ? PassRefPtr<JSGlobalData>(toJS(group)) : JSGlobalData::createNonDefault();
+ RefPtr<JSGlobalData> globalData = group ? PassRefPtr<JSGlobalData>(toJS(group)) : JSGlobalData::createContextGroup(ThreadStackTypeSmall);
APIEntryShim entryShim(globalData.get(), false);
JSLock lock(exec);
JSGlobalData& globalData = exec->globalData();
- IdentifierTable* savedIdentifierTable = setCurrentIdentifierTable(globalData.identifierTable);
-
- gcUnprotect(exec->dynamicGlobalObject());
-
- if (globalData.refCount() == 2) { // One reference is held by JSGlobalObject, another added by JSGlobalContextRetain().
- // The last reference was released, this is our last chance to collect.
+ JSGlobalObject* dgo = exec->dynamicGlobalObject();
+ IdentifierTable* savedIdentifierTable = wtfThreadData().setCurrentIdentifierTable(globalData.identifierTable);
+
+ // One reference is held by JSGlobalObject, another added by JSGlobalContextRetain().
+ bool releasingContextGroup = globalData.refCount() == 2;
+ bool releasingGlobalObject = Heap::heap(dgo)->unprotect(dgo);
+ // If this is the last reference to a global data, it should also
+ // be the only remaining reference to the global object too!
+ ASSERT(!releasingContextGroup || releasingGlobalObject);
+
+ // An API 'JSGlobalContextRef' retains two things - a global object and a
+ // global data (or context group, in API terminology).
+ // * If this is the last reference to any contexts in the given context group,
+ // call destroy on the heap (the global data is being freed).
+ // * If this was the last reference to the global object, then unprotecting
+ // it may release a lot of GC memory - run the garbage collector now.
+ // * If there are more references remaining the the global object, then do nothing
+ // (specifically that is more protects, which we assume come from other JSGlobalContextRefs).
+ if (releasingContextGroup)
globalData.heap.destroy();
- } else
+ else if (releasingGlobalObject)
globalData.heap.collectAllGarbage();
globalData.deref();
- setCurrentIdentifierTable(savedIdentifierTable);
+ wtfThreadData().setCurrentIdentifierTable(savedIdentifierTable);
}
JSObjectRef JSContextGetGlobalObject(JSContextRef ctx)
#include "config.h"
#include "JSObjectRef.h"
+#include "JSObjectRefPrivate.h"
#include "APICast.h"
#include "CodeBlock.h"
#include "ObjectPrototype.h"
#include "PropertyNameArray.h"
#include "RegExpConstructor.h"
-#include <wtf/Platform.h>
using namespace JSC;
return false;
}
+JSValueRef JSObjectGetPrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSObject* jsObject = toJS(object);
+ JSValue result;
+ Identifier name(propertyName->identifier(&exec->globalData()));
+ if (jsObject->inherits(&JSCallbackObject<JSGlobalObject>::info))
+ result = static_cast<JSCallbackObject<JSGlobalObject>*>(jsObject)->getPrivateProperty(name);
+ else if (jsObject->inherits(&JSCallbackObject<JSObject>::info))
+ result = static_cast<JSCallbackObject<JSObject>*>(jsObject)->getPrivateProperty(name);
+ return toRef(exec, result);
+}
+
+bool JSObjectSetPrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef value)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSObject* jsObject = toJS(object);
+ JSValue jsValue = toJS(exec, value);
+ Identifier name(propertyName->identifier(&exec->globalData()));
+ if (jsObject->inherits(&JSCallbackObject<JSGlobalObject>::info)) {
+ static_cast<JSCallbackObject<JSGlobalObject>*>(jsObject)->setPrivateProperty(name, jsValue);
+ return true;
+ }
+ if (jsObject->inherits(&JSCallbackObject<JSObject>::info)) {
+ static_cast<JSCallbackObject<JSObject>*>(jsObject)->setPrivateProperty(name, jsValue);
+ return true;
+ }
+ return false;
+}
+
+bool JSObjectDeletePrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSObject* jsObject = toJS(object);
+ Identifier name(propertyName->identifier(&exec->globalData()));
+ if (jsObject->inherits(&JSCallbackObject<JSGlobalObject>::info)) {
+ static_cast<JSCallbackObject<JSGlobalObject>*>(jsObject)->deletePrivateProperty(name);
+ return true;
+ }
+ if (jsObject->inherits(&JSCallbackObject<JSObject>::info)) {
+ static_cast<JSCallbackObject<JSObject>*>(jsObject)->deletePrivateProperty(name);
+ return true;
+ }
+ return false;
+}
+
bool JSObjectIsFunction(JSContextRef, JSObjectRef object)
{
CallData callData;
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSObjectRefPrivate_h
+#define JSObjectRefPrivate_h
+
+#include <JavaScriptCore/JSObjectRef.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*!
+ @function
+ @abstract Sets a private property on an object. This private property cannot be accessed from within JavaScript.
+ @param ctx The execution context to use.
+ @param object The JSObject whose private property you want to set.
+ @param propertyName A JSString containing the property's name.
+ @param value A JSValue to use as the property's value. This may be NULL.
+ @result true if object can store private data, otherwise false.
+ @discussion This API allows you to store JS values directly an object in a way that will be ensure that they are kept alive without exposing them to JavaScript code and without introducing the reference cycles that may occur when using JSValueProtect.
+
+ The default object class does not allocate storage for private data. Only objects created with a non-NULL JSClass can store private properties.
+ */
+JS_EXPORT bool JSObjectSetPrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName, JSValueRef value);
+
+/*!
+ @function
+ @abstract Gets a private property from an object.
+ @param ctx The execution context to use.
+ @param object The JSObject whose private property you want to get.
+ @param propertyName A JSString containing the property's name.
+ @result The property's value if object has the property, otherwise NULL.
+ */
+JS_EXPORT JSValueRef JSObjectGetPrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName);
+
+/*!
+ @function
+ @abstract Deletes a private property from an object.
+ @param ctx The execution context to use.
+ @param object The JSObject whose private property you want to delete.
+ @param propertyName A JSString containing the property's name.
+ @result true if object can store private data, otherwise false.
+ @discussion The default object class does not allocate storage for private data. Only objects created with a non-NULL JSClass can store private data.
+ */
+JS_EXPORT bool JSObjectDeletePrivateProperty(JSContextRef ctx, JSObjectRef object, JSStringRef propertyName);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // JSObjectRefPrivate_h
extern "C" {
#endif
-#if !defined(WIN32) && !defined(_WIN32) && !defined(__WINSCW__)
+#if !defined(WIN32) && !defined(_WIN32) && !defined(__WINSCW__) \
+ && !((defined(__CC_ARM) || defined(__ARMCC__)) && defined(__SYMBIAN32__)) /* RVCT */
/*!
@typedef JSChar
@abstract A Unicode character.
#include "config.h"
#include "JSValueRef.h"
-#include <wtf/Platform.h>
#include "APICast.h"
#include "APIShims.h"
#include "JSCallbackObject.h"
#include <runtime/JSGlobalObject.h>
+#include <runtime/JSONObject.h>
#include <runtime/JSString.h>
+#include <runtime/LiteralParser.h>
#include <runtime/Operations.h>
#include <runtime/Protect.h>
#include <runtime/UString.h>
#include <runtime/JSValue.h>
#include <wtf/Assertions.h>
+#include <wtf/text/StringHash.h>
#include <algorithm> // for std::min
ExecState* exec = toJS(ctx);
APIEntryShim entryShim(exec);
+ // Our JSValue representation relies on a standard bit pattern for NaN. NaNs
+ // generated internally to JavaScriptCore naturally have that representation,
+ // but an external NaN might not.
+ if (isnan(value))
+ value = NaN;
+
return toRef(exec, jsNumber(exec, value));
}
return toRef(exec, jsString(exec, string->ustring()));
}
+JSValueRef JSValueMakeFromJSONString(JSContextRef ctx, JSStringRef string)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ LiteralParser parser(exec, string->ustring(), LiteralParser::StrictJSON);
+ return toRef(exec, parser.tryLiteralParse());
+}
+
+JSStringRef JSValueCreateJSONString(JSContextRef ctx, JSValueRef apiValue, unsigned indent, JSValueRef* exception)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSValue value = toJS(exec, apiValue);
+ UString result = JSONStringify(exec, value, indent);
+ if (exception)
+ *exception = 0;
+ if (exec->hadException()) {
+ if (exception)
+ *exception = toRef(exec, exec->exception());
+ exec->clearException();
+ return 0;
+ }
+ return OpaqueJSString::create(result).releaseRef();
+}
+
bool JSValueToBoolean(JSContextRef ctx, JSValueRef value)
{
ExecState* exec = toJS(ctx);
#define JSValueRef_h
#include <JavaScriptCore/JSBase.h>
+#include <JavaScriptCore/WebKitAvailability.h>
#ifndef __cplusplus
#include <stdbool.h>
*/
JS_EXPORT JSValueRef JSValueMakeString(JSContextRef ctx, JSStringRef string);
+/* Converting to and from JSON formatted strings */
+
+/*!
+ @function
+ @abstract Creates a JavaScript value from a JSON formatted string.
+ @param ctx The execution context to use.
+ @param string The JSString containing the JSON string to be parsed.
+ @result A JSValue containing the parsed value, or NULL if the input is invalid.
+ */
+JS_EXPORT JSValueRef JSValueMakeFromJSONString(JSContextRef ctx, JSStringRef string) AVAILABLE_AFTER_WEBKIT_VERSION_4_0;
+
+/*!
+ @function
+ @abstract Creates a JavaScript string containing the JSON serialized representation of a JS value.
+ @param ctx The execution context to use.
+ @param value The value to serialize.
+ @param indent The number of spaces to indent when nesting. If 0, the resulting JSON will not contains newlines. The size of the indent is clamped to 10 spaces.
+ @param exception A pointer to a JSValueRef in which to store an exception, if any. Pass NULL if you do not care to store an exception.
+ @result A JSString with the result of serialization, or NULL if an exception is thrown.
+ */
+JS_EXPORT JSStringRef JSValueCreateJSONString(JSContextRef ctx, JSValueRef value, unsigned indent, JSValueRef* exception) AVAILABLE_AFTER_WEBKIT_VERSION_4_0;
+
/* Converting to primitive values */
/*!
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSWeakObjectMapRefInternal_h
+#define JSWeakObjectMapRefInternal_h
+
+#include "WeakGCMap.h"
+#include <wtf/RefCounted.h>
+
+namespace JSC {
+
+class JSObject;
+
+}
+
+typedef void (*JSWeakMapDestroyedCallback)(struct OpaqueJSWeakObjectMap*, void*);
+
+typedef JSC::WeakGCMap<void*, JSC::JSObject*> WeakMapType;
+
+struct OpaqueJSWeakObjectMap : public RefCounted<OpaqueJSWeakObjectMap> {
+public:
+ static PassRefPtr<OpaqueJSWeakObjectMap> create(void* data, JSWeakMapDestroyedCallback callback)
+ {
+ return adoptRef(new OpaqueJSWeakObjectMap(data, callback));
+ }
+
+ WeakMapType& map() { return m_map; }
+
+ ~OpaqueJSWeakObjectMap()
+ {
+ m_callback(this, m_data);
+ }
+
+private:
+ OpaqueJSWeakObjectMap(void* data, JSWeakMapDestroyedCallback callback)
+ : m_data(data)
+ , m_callback(callback)
+ {
+ }
+ WeakMapType m_map;
+ void* m_data;
+ JSWeakMapDestroyedCallback m_callback;
+};
+
+
+#endif // JSWeakObjectMapInternal_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "JSWeakObjectMapRefPrivate.h"
+
+#include "APICast.h"
+#include "APIShims.h"
+#include "JSCallbackObject.h"
+#include "JSValue.h"
+#include "JSWeakObjectMapRefInternal.h"
+#include <wtf/HashMap.h>
+#include <wtf/RefCounted.h>
+#include <wtf/text/StringHash.h>
+
+using namespace WTF;
+using namespace JSC;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+JSWeakObjectMapRef JSWeakObjectMapCreate(JSContextRef context, void* privateData, JSWeakMapDestroyedCallback callback)
+{
+ ExecState* exec = toJS(context);
+ APIEntryShim entryShim(exec);
+ RefPtr<OpaqueJSWeakObjectMap> map = OpaqueJSWeakObjectMap::create(privateData, callback);
+ exec->lexicalGlobalObject()->registerWeakMap(map.get());
+ return map.get();
+}
+
+void JSWeakObjectMapSet(JSContextRef ctx, JSWeakObjectMapRef map, void* key, JSObjectRef object)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSObject* obj = toJS(object);
+ if (!obj)
+ return;
+ ASSERT(obj->inherits(&JSCallbackObject<JSGlobalObject>::info) || obj->inherits(&JSCallbackObject<JSObject>::info));
+ map->map().set(key, obj);
+}
+
+JSObjectRef JSWeakObjectMapGet(JSContextRef ctx, JSWeakObjectMapRef map, void* key)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ return toRef(static_cast<JSObject*>(map->map().get(key)));
+}
+
+bool JSWeakObjectMapClear(JSContextRef ctx, JSWeakObjectMapRef map, void* key, JSObjectRef object)
+{
+ ExecState* exec = toJS(ctx);
+ APIEntryShim entryShim(exec);
+ JSObject* obj = toJS(object);
+ if (map->map().uncheckedRemove(key, obj))
+ return true;
+ return false;
+}
+
+#ifdef __cplusplus
+}
+#endif
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSWeakObjectMapRefPrivate_h
+#define JSWeakObjectMapRefPrivate_h
+
+#include <JavaScriptCore/JSContextRef.h>
+#include <JavaScriptCore/JSValueRef.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*! @typedef JSWeakObjectMapRef A weak map for storing JSObjectRefs */
+typedef struct OpaqueJSWeakObjectMap* JSWeakObjectMapRef;
+
+/*!
+ @typedef JSWeakMapDestroyedCallback
+ @abstract The callback invoked when a JSWeakObjectMapRef is being destroyed.
+ @param map The map that is being destroyed.
+ @param data The private data (if any) that was associated with the map instance.
+ */
+typedef void (*JSWeakMapDestroyedCallback)(JSWeakObjectMapRef map, void* data);
+
+/*!
+ @function
+ @abstract Creates a weak value map that can be used to reference user defined objects without preventing them from being collected.
+ @param ctx The execution context to use.
+ @param data A void* to set as the map's private data. Pass NULL to specify no private data.
+ @param destructor A function to call when the weak map is destroyed.
+ @result A JSWeakObjectMapRef bound to the given context, data and destructor.
+ @discussion The JSWeakObjectMapRef can be used as a storage mechanism to hold custom JS objects without forcing those objects to
+ remain live as JSValueProtect would. Any objects that are intended to be stored in a weak map must be user defined objects that
+ remove themselves from the map in their finalizer.
+ */
+JS_EXPORT JSWeakObjectMapRef JSWeakObjectMapCreate(JSContextRef ctx, void* data, JSWeakMapDestroyedCallback destructor);
+
+/*!
+ @function
+ @abstract Associates a JSObjectRef with the given key in a JSWeakObjectMap.
+ @param ctx The execution context to use.
+ @param map The map to operate on.
+ @param key The key to associate a weak reference with.
+ @param object The user defined object to associate with the key.
+ */
+JS_EXPORT void JSWeakObjectMapSet(JSContextRef ctx, JSWeakObjectMapRef map, void* key, JSObjectRef);
+
+/*!
+ @function
+ @abstract Retrieves the JSObjectRef associated with a key.
+ @param ctx The execution context to use.
+ @param map The map to query.
+ @param key The key to search for.
+ @result Either the live object associated with the provided key, or NULL.
+ */
+JS_EXPORT JSObjectRef JSWeakObjectMapGet(JSContextRef ctx, JSWeakObjectMapRef map, void* key);
+
+/*!
+ @function
+ @abstract Clears the association between a key and an object in a JSWeakObjectMapRef
+ @param ctx The execution context to use.
+ @param map The map to clear the key association from.
+ @param key The key to use.
+ @param object The old object value.
+ @result Returns true if the key/object association was present in map, and has been removed.
+ */
+JS_EXPORT bool JSWeakObjectMapClear(JSContextRef ctx, JSWeakObjectMapRef map, void* key, JSObjectRef object);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // JSWeakObjectMapPrivate_h
/*
- * Copyright (C) 2008 Apple Inc. All Rights Reserved.
+ * Copyright (C) 2008, 2009, 2010 Apple Inc. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#define WEBKIT_VERSION_LATEST 0x9999
#ifdef __APPLE__
-#import <AvailabilityMacros.h>
+#include <AvailabilityMacros.h>
#else
/*
* For non-Mac platforms, require the newest version.
#elif !defined(MAC_OS_X_VERSION_10_6) || MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_6
/* WebKit 3.0 is the version that shipped on Mac OS X 10.5. */
#define WEBKIT_VERSION_MIN_REQUIRED WEBKIT_VERSION_3_0
+ #elif !defined(MAC_OS_X_VERSION_10_7) || MAC_OS_X_VERSION_MIN_REQUIRED < MAC_OS_X_VERSION_10_7
+ /* WebKit 4.0 is the version that shipped on Mac OS X 10.6. */
+ #define WEBKIT_VERSION_MIN_REQUIRED WEBKIT_VERSION_4_0
#else
#define WEBKIT_VERSION_MIN_REQUIRED WEBKIT_VERSION_LATEST
#endif
*
* Used on declarations introduced in WebKit 4.0
*/
-#if WEBKIT_VERSION_MAX_ALLOWED < WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MAX_ALLOWED < WEBKIT_VERSION_4_0
#define AVAILABLE_IN_WEBKIT_VERSION_4_0 UNAVAILABLE_ATTRIBUTE
-#elif WEBKIT_VERSION_MIN_REQUIRED < WEBKIT_VERSION_LATEST
+#elif WEBKIT_VERSION_MIN_REQUIRED < WEBKIT_VERSION_4_0
#define AVAILABLE_IN_WEBKIT_VERSION_4_0 WEAK_IMPORT_ATTRIBUTE
#else
#define AVAILABLE_IN_WEBKIT_VERSION_4_0
* Used on declarations introduced in WebKit 4.0,
* and deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_IN_WEBKIT_VERSION_4_0_BUT_DEPRECATED DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_IN_WEBKIT_VERSION_4_0_BUT_DEPRECATED AVAILABLE_IN_WEBKIT_VERSION_4_0
* Used on declarations introduced in WebKit 1.0,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER
* Used on declarations introduced in WebKit 1.1,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER
* Used on declarations introduced in WebKit 1.2,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER
* Used on declarations introduced in WebKit 1.3,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER
* Used on declarations introduced in WebKit 2.0,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER
* Used on declarations introduced in WebKit 3.0,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER
* Used on declarations introduced in WebKit 3.1,
* but later deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER_BUT_DEPRECATED_IN_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER
*
* Used on types deprecated in WebKit 4.0
*/
-#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_4_0
#define DEPRECATED_IN_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
#else
#define DEPRECATED_IN_WEBKIT_VERSION_4_0
#endif
+
+
+
+
+/*
+ * AVAILABLE_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MAX_ALLOWED < WEBKIT_VERSION_LATEST
+ #define AVAILABLE_AFTER_WEBKIT_VERSION_4_0 UNAVAILABLE_ATTRIBUTE
+#elif WEBKIT_VERSION_MIN_REQUIRED < WEBKIT_VERSION_LATEST
+ #define AVAILABLE_AFTER_WEBKIT_VERSION_4_0 WEAK_IMPORT_ATTRIBUTE
+#else
+ #define AVAILABLE_AFTER_WEBKIT_VERSION_4_0
+#endif
+
+/*
+ * AVAILABLE_AFTER_WEBKIT_VERSION_4_0_BUT_DEPRECATED
+ *
+ * Used on declarations introduced after WebKit 4.0,
+ * and deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_AFTER_WEBKIT_VERSION_4_0_BUT_DEPRECATED DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_AFTER_WEBKIT_VERSION_4_0_BUT_DEPRECATED AVAILABLE_AFTER_WEBKIT_VERSION_4_0
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 1.0,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_0_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 1.1,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_1_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 1.2,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_2_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 1.3,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_1_3_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 2.0,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_2_0_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 3.0,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_3_0_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 3.1,
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_3_1_AND_LATER
+#endif
+
+/*
+ * AVAILABLE_WEBKIT_VERSION_4_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on declarations introduced in WebKit 4.0
+ * but later deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define AVAILABLE_WEBKIT_VERSION_4_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define AVAILABLE_WEBKIT_VERSION_4_0_AND_LATER_BUT_DEPRECATED_AFTER_WEBKIT_VERSION_4_0 AVAILABLE_WEBKIT_VERSION_4_0_AND_LATER
+#endif
+
+/*
+ * DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+ *
+ * Used on types deprecated after WebKit 4.0
+ */
+#if WEBKIT_VERSION_MIN_REQUIRED >= WEBKIT_VERSION_LATEST
+ #define DEPRECATED_AFTER_WEBKIT_VERSION_4_0 DEPRECATED_ATTRIBUTE
+#else
+ #define DEPRECATED_AFTER_WEBKIT_VERSION_4_0
+#endif
+
+
#endif /* __WebKitAvailability__ */
#include "JavaScriptCore.h"
#include "JSBasePrivate.h"
#include "JSContextRefPrivate.h"
+#include "JSObjectRefPrivate.h"
#include <math.h>
#define ASSERT_DISABLED 0
#include <wtf/Assertions.h>
static JSValueRef jsNumberValue = NULL;
+static JSObjectRef aHeapRef = NULL;
+
static void makeGlobalNumberValue(JSContextRef context) {
JSValueRef v = JSValueMakeNumber(context, 420);
JSValueProtect(context, v);
JSObjectSetProperty(context, globalObject, EmptyObjectIString, EmptyObject, kJSPropertyAttributeNone, NULL);
JSStringRelease(EmptyObjectIString);
+ JSStringRef lengthStr = JSStringCreateWithUTF8CString("length");
+ aHeapRef = JSObjectMakeArray(context, 0, 0, 0);
+ JSObjectSetProperty(context, aHeapRef, lengthStr, JSValueMakeNumber(context, 10), 0, 0);
+ JSStringRef privatePropertyName = JSStringCreateWithUTF8CString("privateProperty");
+ if (!JSObjectSetPrivateProperty(context, myObject, privatePropertyName, aHeapRef)) {
+ printf("FAIL: Could not set private property.\n");
+ failed = 1;
+ } else {
+ printf("PASS: Set private property.\n");
+ }
+ if (JSObjectSetPrivateProperty(context, aHeapRef, privatePropertyName, aHeapRef)) {
+ printf("FAIL: JSObjectSetPrivateProperty should fail on non-API objects.\n");
+ failed = 1;
+ } else {
+ printf("PASS: Did not allow JSObjectSetPrivateProperty on a non-API object.\n");
+ }
+ if (JSObjectGetPrivateProperty(context, myObject, privatePropertyName) != aHeapRef) {
+ printf("FAIL: Could not retrieve private property.\n");
+ failed = 1;
+ } else
+ printf("PASS: Retrieved private property.\n");
+ if (JSObjectGetPrivateProperty(context, aHeapRef, privatePropertyName)) {
+ printf("FAIL: JSObjectGetPrivateProperty should return NULL when called on a non-API object.\n");
+ failed = 1;
+ } else
+ printf("PASS: JSObjectGetPrivateProperty return NULL.\n");
+
+ if (JSObjectGetProperty(context, myObject, privatePropertyName, 0) == aHeapRef) {
+ printf("FAIL: Accessed private property through ordinary property lookup.\n");
+ failed = 1;
+ } else
+ printf("PASS: Cannot access private property through ordinary property lookup.\n");
+
+ JSGarbageCollect(context);
+
+ for (int i = 0; i < 10000; i++)
+ JSObjectMake(context, 0, 0);
+
+ if (JSValueToNumber(context, JSObjectGetProperty(context, aHeapRef, lengthStr, 0), 0) != 10) {
+ printf("FAIL: Private property has been collected.\n");
+ failed = 1;
+ } else
+ printf("PASS: Private property does not appear to have been collected.\n");
+ JSStringRelease(lengthStr);
+
+ JSStringRef validJSON = JSStringCreateWithUTF8CString("{\"aProperty\":true}");
+ JSValueRef jsonObject = JSValueMakeFromJSONString(context, validJSON);
+ JSStringRelease(validJSON);
+ if (!JSValueIsObject(context, jsonObject)) {
+ printf("FAIL: Did not parse valid JSON correctly\n");
+ failed = 1;
+ } else
+ printf("PASS: Parsed valid JSON string.\n");
+ JSStringRef propertyName = JSStringCreateWithUTF8CString("aProperty");
+ assertEqualsAsBoolean(JSObjectGetProperty(context, JSValueToObject(context, jsonObject, 0), propertyName, 0), true);
+ JSStringRelease(propertyName);
+ JSStringRef invalidJSON = JSStringCreateWithUTF8CString("fail!");
+ if (JSValueMakeFromJSONString(context, invalidJSON)) {
+ printf("FAIL: Should return null for invalid JSON data\n");
+ failed = 1;
+ } else
+ printf("PASS: Correctly returned null for invalid JSON data.\n");
JSValueRef exception;
+ JSStringRef str = JSValueCreateJSONString(context, jsonObject, 0, 0);
+ if (!JSStringIsEqualToUTF8CString(str, "{\"aProperty\":true}")) {
+ printf("FAIL: Did not correctly serialise with indent of 0.\n");
+ failed = 1;
+ } else
+ printf("PASS: Correctly serialised with indent of 0.\n");
+ JSStringRelease(str);
+ str = JSValueCreateJSONString(context, jsonObject, 4, 0);
+ if (!JSStringIsEqualToUTF8CString(str, "{\n \"aProperty\": true\n}")) {
+ printf("FAIL: Did not correctly serialise with indent of 4.\n");
+ failed = 1;
+ } else
+ printf("PASS: Correctly serialised with indent of 4.\n");
+ JSStringRelease(str);
+ JSStringRef src = JSStringCreateWithUTF8CString("({get a(){ throw '';}})");
+ JSValueRef unstringifiableObj = JSEvaluateScript(context, src, NULL, NULL, 1, NULL);
+
+ str = JSValueCreateJSONString(context, unstringifiableObj, 4, 0);
+ if (str) {
+ printf("FAIL: Didn't return null when attempting to serialize unserializable value.\n");
+ JSStringRelease(str);
+ failed = 1;
+ } else
+ printf("PASS: returned null when attempting to serialize unserializable value.\n");
+
+ str = JSValueCreateJSONString(context, unstringifiableObj, 4, &exception);
+ if (str) {
+ printf("FAIL: Didn't return null when attempting to serialize unserializable value.\n");
+ JSStringRelease(str);
+ failed = 1;
+ } else
+ printf("PASS: returned null when attempting to serialize unserializable value.\n");
+ if (!exception) {
+ printf("FAIL: Did not set exception on serialisation error\n");
+ failed = 1;
+ } else
+ printf("PASS: set exception on serialisation error\n");
// Conversions that throw exceptions
exception = NULL;
ASSERT(NULL == JSValueToObject(context, jsNull, &exception));
interpreter/Interpreter.cpp \
interpreter/RegisterFile.cpp \
\
+ jit/ExecutableAllocator.cpp\
+ jit/ExecutableAllocatorFixedVMPool.cpp \
+ jit/ExecutableAllocatorPosix.cpp \
+ jit/JIT.cpp \
+ jit/JITArithmetic.cpp \
+ jit/JITCall.cpp \
+ jit/JITOpcodes.cpp \
+ jit/JITPropertyAccess.cpp \
+ jit/JITStubs.cpp \
+ \
parser/Lexer.cpp \
parser/Nodes.cpp \
parser/Parser.cpp \
runtime/TimeoutChecker.cpp \
runtime/UString.cpp \
\
- wrec/CharacterClass.cpp \
- wrec/CharacterClassConstructor.cpp \
- wrec/WREC.cpp \
- wrec/WRECFunctors.cpp \
- wrec/WRECGenerator.cpp \
- wrec/WRECParser.cpp \
- \
wtf/Assertions.cpp \
wtf/ByteArray.cpp \
wtf/CurrentTime.cpp \
wtf/ThreadIdentifierDataPthreads.cpp \
wtf/Threading.cpp \
wtf/ThreadingPthreads.cpp \
+ wtf/WTFThreadData.cpp \
\
wtf/TypeTraits.cpp \
wtf/dtoa.cpp \
\
wtf/android/MainThreadAndroid.cpp \
\
+ wtf/text/AtomicString.cpp \
+ wtf/text/CString.cpp \
+ wtf/text/StringImpl.cpp \
+ wtf/text/WTFString.cpp \
+ \
wtf/unicode/CollatorDefault.cpp \
wtf/unicode/UTF8.cpp \
\
- wtf/unicode/icu/CollatorICU.cpp
+ wtf/unicode/icu/CollatorICU.cpp \
+ \
+ yarr/RegexCompiler.cpp \
+ yarr/RegexInterpreter.cpp \
+ yarr/RegexJIT.cpp
# Rule to build grammar.y with our custom bison.
GEN := $(intermediates)/parser/Grammar.cpp
wtf/ThreadIdentifierDataPthreads.cpp \
wtf/Threading.cpp \
wtf/ThreadingPthreads.cpp \
- \
- wtf/android/MainThreadAndroid.cpp \
+ wtf/WTFThreadData.cpp \
\
wtf/TypeTraits.cpp \
wtf/dtoa.cpp \
\
+ wtf/android/MainThreadAndroid.cpp \
+ \
+ wtf/text/AtomicString.cpp \
+ wtf/text/CString.cpp \
+ wtf/text/StringImpl.cpp \
+ wtf/text/WTFString.cpp \
+ \
wtf/unicode/CollatorDefault.cpp \
wtf/unicode/UTF8.cpp \
\
RegExpObject.lut.h \
StringPrototype.lut.h \
docs/bytecode.html \
+ RegExpJitTables.h \
#
# lookup tables for classes
docs/bytecode.html: make-bytecode-docs.pl Interpreter.cpp
perl $^ $@
+
+#character tables for Yarr
+RegExpJitTables.h: create_regex_tables
+ python $^ > $@
# GENERATOR 3: JIT Stub functions for RVCT
rvctstubs.output = $${JSC_GENERATED_SOURCES_DIR}$${QMAKE_DIR_SEP}Generated${QMAKE_FILE_BASE}_RVCT.h
-rvctstubs.wkScript = $$PWD/create_rvct_stubs
-rvctstubs.commands = perl $$rvctstubs.wkScript ${QMAKE_FILE_NAME} -i > ${QMAKE_FILE_OUT}
+rvctstubs.wkScript = $$PWD/create_jit_stubs
+rvctstubs.commands = perl -i $$rvctstubs.wkScript --prefix RVCT ${QMAKE_FILE_NAME} > ${QMAKE_FILE_OUT}
rvctstubs.depends = ${QMAKE_FILE_NAME}
rvctstubs.input = RVCT_STUB_FILES
rvctstubs.CONFIG += no_link
ctgen.clean = ${QMAKE_FILE_OUT} ${QMAKE_VAR_JSC_GENERATED_SOURCES_DIR}${QMAKE_FILE_BASE}
addExtraCompiler(ctgen)
+#GENERATOR: "RegExpJitTables.h": tables used by Yarr
+retgen.output = $$JSC_GENERATED_SOURCES_DIR/RegExpJitTables.h
+retgen.wkScript = $$PWD/create_regex_tables
+retgen.input = retgen.wkScript
+retgen.commands = python $$retgen.wkScript > ${QMAKE_FILE_OUT}
+addExtraCompiler(retgen)
-I$(srcdir)/JavaScriptCore/pcre \
-I$(srcdir)/JavaScriptCore/profiler \
-I$(srcdir)/JavaScriptCore/runtime \
- -I$(srcdir)/JavaScriptCore/wrec \
-I$(srcdir)/JavaScriptCore/jit \
-I$(srcdir)/JavaScriptCore/assembler \
+ -I$(srcdir)/JavaScriptCore/wtf \
+ -I$(srcdir)/JavaScriptCore/wtf/text \
-I$(srcdir)/JavaScriptCore/wtf/unicode \
-I$(srcdir)/JavaScriptCore/yarr \
+ -I$(top_builddir)/JavaScriptCore \
-I$(top_builddir)/JavaScriptCore/pcre \
-I$(top_builddir)/JavaScriptCore/parser \
-I$(top_builddir)/JavaScriptCore/runtime
javascriptcore_built_nosources += \
DerivedSources/Lexer.lut.h \
+ JavaScriptCore/RegExpJitTables.h \
JavaScriptCore/runtime/ArrayPrototype.lut.h \
JavaScriptCore/runtime/DatePrototype.lut.h \
JavaScriptCore/runtime/JSONObject.lut.h \
JavaScriptCore/jit/ExecutableAllocator.h \
JavaScriptCore/jit/JIT.cpp \
JavaScriptCore/jit/JITOpcodes.cpp \
+ JavaScriptCore/jit/JITOpcodes32_64.cpp \
JavaScriptCore/jit/JITCall.cpp \
JavaScriptCore/jit/JITCode.h \
JavaScriptCore/jit/JITPropertyAccess.cpp \
+ JavaScriptCore/jit/JITPropertyAccess32_64.cpp \
JavaScriptCore/jit/JITArithmetic.cpp \
+ JavaScriptCore/jit/JITArithmetic32_64.cpp \
JavaScriptCore/jit/ExecutableAllocator.cpp \
JavaScriptCore/jit/JIT.h \
JavaScriptCore/jit/JITInlineMethods.h \
JavaScriptCore/jit/JITStubs.cpp \
JavaScriptCore/jit/JITStubs.h \
JavaScriptCore/jit/JITStubCall.h \
+ JavaScriptCore/jit/JSInterfaceJIT.h \
+ JavaScriptCore/jit/SpecializedThunkJIT.h \
+ JavaScriptCore/jit/ThunkGenerators.cpp \
+ JavaScriptCore/jit/ThunkGenerators.h \
JavaScriptCore/bytecode/StructureStubInfo.cpp \
JavaScriptCore/bytecode/StructureStubInfo.h \
JavaScriptCore/bytecode/CodeBlock.cpp \
JavaScriptCore/runtime/JSONObject.h \
JavaScriptCore/runtime/JSPropertyNameIterator.cpp \
JavaScriptCore/runtime/JSPropertyNameIterator.h \
+ JavaScriptCore/runtime/JSStringBuilder.h \
JavaScriptCore/runtime/JSZombie.h \
JavaScriptCore/runtime/LiteralParser.cpp \
JavaScriptCore/runtime/LiteralParser.h \
JavaScriptCore/runtime/StructureChain.cpp \
JavaScriptCore/runtime/StructureChain.h \
JavaScriptCore/runtime/StructureTransitionTable.h \
+ JavaScriptCore/runtime/Terminator.h \
JavaScriptCore/runtime/TimeoutChecker.cpp \
JavaScriptCore/runtime/TimeoutChecker.h \
JavaScriptCore/runtime/JSTypeInfo.h \
JavaScriptCore/runtime/WeakGCMap.h \
JavaScriptCore/runtime/WeakGCPtr.h \
- JavaScriptCore/wrec/CharacterClass.h \
- JavaScriptCore/wrec/CharacterClassConstructor.h \
- JavaScriptCore/wrec/Escapes.h \
- JavaScriptCore/wrec/Quantifier.h \
- JavaScriptCore/wrec/WREC.h \
- JavaScriptCore/wrec/WRECFunctors.h \
- JavaScriptCore/wrec/WRECGenerator.h \
- JavaScriptCore/wrec/WRECParser.h \
JavaScriptCore/wtf/ASCIICType.h \
JavaScriptCore/wtf/AVLTree.h \
JavaScriptCore/wtf/AlwaysInline.h \
JavaScriptCore/wtf/Assertions.cpp \
JavaScriptCore/wtf/Assertions.h \
+ JavaScriptCore/wtf/Atomics.h \
JavaScriptCore/wtf/ByteArray.cpp \
JavaScriptCore/wtf/ByteArray.h \
JavaScriptCore/wtf/CrossThreadRefCounted.h \
JavaScriptCore/wtf/ListHashSet.h \
JavaScriptCore/wtf/ListRefPtr.h \
JavaScriptCore/wtf/Locker.h \
+ JavaScriptCore/wtf/MD5.cpp \
+ JavaScriptCore/wtf/MD5.h \
JavaScriptCore/wtf/MainThread.cpp \
JavaScriptCore/wtf/MainThread.h \
JavaScriptCore/wtf/MathExtras.h \
JavaScriptCore/wtf/PassRefPtr.h \
JavaScriptCore/wtf/Platform.h \
JavaScriptCore/wtf/PossiblyNull.h \
- JavaScriptCore/wtf/PtrAndFlags.h \
JavaScriptCore/wtf/RandomNumber.cpp \
JavaScriptCore/wtf/RandomNumber.h \
JavaScriptCore/wtf/RandomNumberSeed.h \
JavaScriptCore/wtf/RefPtrHashMap.h \
JavaScriptCore/wtf/RetainPtr.h \
JavaScriptCore/wtf/SegmentedVector.h \
+ JavaScriptCore/wtf/StaticConstructors.h \
JavaScriptCore/wtf/StdLibExtras.h \
JavaScriptCore/wtf/StringExtras.h \
JavaScriptCore/wtf/StringHashFunctions.h \
JavaScriptCore/wtf/ThreadIdentifierDataPthreads.h \
JavaScriptCore/wtf/Threading.cpp \
JavaScriptCore/wtf/Threading.h \
+ JavaScriptCore/wtf/ThreadingPrimitives.h \
JavaScriptCore/wtf/ThreadingPthreads.cpp \
+ JavaScriptCore/wtf/ThreadSafeShared.h \
JavaScriptCore/wtf/ThreadSpecific.h \
JavaScriptCore/wtf/TypeTraits.cpp \
JavaScriptCore/wtf/TypeTraits.h \
JavaScriptCore/wtf/UnusedParam.h \
+ JavaScriptCore/wtf/ValueCheck.h \
JavaScriptCore/wtf/Vector.h \
JavaScriptCore/wtf/VectorTraits.h \
- JavaScriptCore/wtf/gtk/GOwnPtr.cpp \
- JavaScriptCore/wtf/gtk/GOwnPtr.h \
- JavaScriptCore/wtf/gtk/GRefPtr.cpp \
- JavaScriptCore/wtf/gtk/GRefPtr.h \
+ JavaScriptCore/wtf/WTFThreadData.cpp \
+ JavaScriptCore/wtf/WTFThreadData.h \
+ JavaScriptCore/wtf/gobject/GOwnPtr.cpp \
+ JavaScriptCore/wtf/gobject/GOwnPtr.h \
+ JavaScriptCore/wtf/gobject/GRefPtr.cpp \
+ JavaScriptCore/wtf/gobject/GRefPtr.h \
JavaScriptCore/wtf/gtk/MainThreadGtk.cpp \
JavaScriptCore/wtf/gtk/ThreadingGtk.cpp \
+ JavaScriptCore/wtf/text/AtomicString.cpp \
+ JavaScriptCore/wtf/text/AtomicString.h \
+ JavaScriptCore/wtf/text/AtomicStringImpl.h \
+ JavaScriptCore/wtf/text/CString.cpp \
+ JavaScriptCore/wtf/text/CString.h \
+ JavaScriptCore/wtf/text/StringHash.h \
+ JavaScriptCore/wtf/text/StringImpl.cpp \
+ JavaScriptCore/wtf/text/StringImpl.h \
+ JavaScriptCore/wtf/text/StringStatics.cpp \
+ JavaScriptCore/wtf/text/WTFString.cpp \
+ JavaScriptCore/wtf/text/WTFString.h \
JavaScriptCore/wtf/unicode/Collator.h \
JavaScriptCore/wtf/unicode/CollatorDefault.cpp \
JavaScriptCore/wtf/unicode/UTF8.cpp \
JavaScriptCore/runtime/PutPropertySlot.h \
JavaScriptCore/runtime/RegExp.cpp \
JavaScriptCore/runtime/RegExp.h \
+ JavaScriptCore/runtime/RegExpCache.cpp \
+ JavaScriptCore/runtime/RegExpCache.h \
JavaScriptCore/runtime/RegExpConstructor.cpp \
JavaScriptCore/runtime/RegExpConstructor.h \
+ JavaScriptCore/runtime/RegExpKey.h \
JavaScriptCore/runtime/RegExpMatchesArray.h \
JavaScriptCore/runtime/RegExpObject.cpp \
JavaScriptCore/runtime/RegExpObject.h \
JavaScriptCore/runtime/RegExpPrototype.cpp \
JavaScriptCore/runtime/RegExpPrototype.h \
+ JavaScriptCore/runtime/RopeImpl.cpp \
+ JavaScriptCore/runtime/RopeImpl.h \
JavaScriptCore/runtime/ScopeChain.cpp \
JavaScriptCore/runtime/ScopeChain.h \
JavaScriptCore/runtime/ScopeChainMark.h \
JavaScriptCore/runtime/Tracing.h \
JavaScriptCore/runtime/UString.cpp \
JavaScriptCore/runtime/UString.h \
- JavaScriptCore/runtime/UStringImpl.cpp \
JavaScriptCore/runtime/UStringImpl.h \
JavaScriptCore/runtime/WeakRandom.h \
JavaScriptCore/wtf/FastAllocBase.h \
JavaScriptCore/%.lut.h: $(CREATE_HASH_TABLE) $(srcdir)/JavaScriptCore/%.cpp
$(PERL) $^ -i > $@
+JavaScriptCore/RegExpJitTables.h: $(srcdir)/JavaScriptCore/create_regex_tables
+ $(PYTHON) $(srcdir)/JavaScriptCore/create_regex_tables > $@
+
JavaScriptCore/pcre/chartables.c: $(srcdir)/JavaScriptCore/pcre/dftables
$(PERL) $^ $@
javascriptcore_dist += \
$(CREATE_HASH_TABLE) \
+ $(CREATE_REGEXP_TABLES) \
JavaScriptCore/AUTHORS \
JavaScriptCore/COPYING.LIB \
JavaScriptCore/ChangeLog \
+__ZN7WebCore10StringImpl6createEPKcj
+__ZN7WebCore12AtomicString11addSlowCaseEPNS_10StringImplE
_JSCheckScriptSyntax
_JSClassCreate
_JSClassRelease
_JSObjectCallAsConstructor
_JSObjectCallAsFunction
_JSObjectCopyPropertyNames
+_JSObjectDeletePrivateProperty
_JSObjectDeleteProperty
_JSObjectGetPrivate
+_JSObjectGetPrivateProperty
_JSObjectGetProperty
_JSObjectGetPropertyAtIndex
_JSObjectGetPrototype
_JSObjectMakeFunctionWithCallback
_JSObjectMakeRegExp
_JSObjectSetPrivate
+_JSObjectSetPrivateProperty
_JSObjectSetProperty
_JSObjectSetPropertyAtIndex
_JSObjectSetPrototype
_JSStringIsEqualToUTF8CString
_JSStringRelease
_JSStringRetain
+_JSValueCreateJSONString
_JSValueGetType
_JSValueIsBoolean
_JSValueIsEqual
_JSValueIsString
_JSValueIsUndefined
_JSValueMakeBoolean
+_JSValueMakeFromJSONString
_JSValueMakeNull
_JSValueMakeNumber
_JSValueMakeString
_JSValueToObject
_JSValueToStringCopy
_JSValueUnprotect
+_JSWeakObjectMapClear
+_JSWeakObjectMapCreate
+_JSWeakObjectMapGet
+_JSWeakObjectMapSet
_WebCoreWebThreadIsLockedOrDisabled
_WTFLog
_WTFLogVerbose
__Z15jsRegExpCompilePKti24JSRegExpIgnoreCaseOption23JSRegExpMultilineOptionPjPPKc
__Z15jsRegExpExecutePK8JSRegExpPKtiiPii
__ZN14OpaqueJSString6createERKN3JSC7UStringE
-__ZN3JSC10Identifier11addSlowCaseEPNS_12JSGlobalDataEPNS_11UStringImplE
-__ZN3JSC10Identifier11addSlowCaseEPNS_9ExecStateEPNS_11UStringImplE
-__ZN3JSC10Identifier24checkSameIdentifierTableEPNS_12JSGlobalDataEPNS_11UStringImplE
-__ZN3JSC10Identifier24checkSameIdentifierTableEPNS_9ExecStateEPNS_11UStringImplE
+__ZN3JSC10Identifier11addSlowCaseEPNS_12JSGlobalDataEPN7WebCore10StringImplE
+__ZN3JSC10Identifier11addSlowCaseEPNS_9ExecStateEPN7WebCore10StringImplE
+__ZN3JSC10Identifier27checkCurrentIdentifierTableEPNS_12JSGlobalDataE
+__ZN3JSC10Identifier27checkCurrentIdentifierTableEPNS_9ExecStateE
__ZN3JSC10Identifier3addEPNS_9ExecStateEPKc
-__ZN3JSC10Identifier5equalEPKNS_11UStringImplEPKc
+__ZN3JSC10Identifier4fromEPNS_9ExecStateEi
+__ZN3JSC10Identifier4fromEPNS_9ExecStateEj
+__ZN3JSC10Identifier5equalEPKN7WebCore10StringImplEPKc
__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RKNS_7ArgListEE
__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeE
__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeEPKc
__ZN3JSC11JSByteArray15createStructureENS_7JSValueE
__ZN3JSC11JSByteArrayC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
__ZN3JSC11ParserArena5resetEv
-__ZN3JSC11UStringImpl12sharedBufferEv
-__ZN3JSC11UStringImpl6s_nullE
-__ZN3JSC11UStringImpl7s_emptyE
-__ZN3JSC11UStringImplD1Ev
__ZN3JSC11checkSyntaxEPNS_9ExecStateERKNS_10SourceCodeE
__ZN3JSC12DateInstance4infoE
__ZN3JSC12DateInstanceC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEd
__ZN3JSC12DateInstanceC1EPNS_9ExecStateEd
__ZN3JSC12JSGlobalData10ClientDataD2Ev
__ZN3JSC12JSGlobalData11jsArrayVPtrE
-__ZN3JSC12JSGlobalData12createLeakedEv
+__ZN3JSC12JSGlobalData12createLeakedENS_15ThreadStackTypeE
__ZN3JSC12JSGlobalData12jsStringVPtrE
__ZN3JSC12JSGlobalData12stopSamplingEv
__ZN3JSC12JSGlobalData13startSamplingEv
__ZN3JSC12JSGlobalData14dumpSampleDataEPNS_9ExecStateE
__ZN3JSC12JSGlobalData14resetDateCacheEv
__ZN3JSC12JSGlobalData14sharedInstanceEv
-__ZN3JSC12JSGlobalData6createEv
+__ZN3JSC12JSGlobalData6createENS_15ThreadStackTypeE
__ZN3JSC12JSGlobalDataD1Ev
__ZN3JSC12SamplingTool5setupEv
__ZN3JSC12SmallStrings17createEmptyStringEPNS_12JSGlobalDataE
__ZN3JSC16JSVariableObject19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
__ZN3JSC16toUInt32SlowCaseEdRb
__ZN3JSC17BytecodeGenerator21setDumpsGeneratedCodeEb
-__ZN3JSC17PropertyNameArray3addEPNS_11UStringImplE
+__ZN3JSC17PropertyNameArray3addEPN7WebCore10StringImplE
__ZN3JSC17PrototypeFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RKNS_7ArgListEE
__ZN3JSC17PrototypeFunctionC1EPNS_9ExecStateEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectES6_RKNS_7ArgListEE
__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
__ZN3JSC23setUpStaticFunctionSlotEPNS_9ExecStateEPKNS_9HashEntryEPNS_8JSObjectERKNS_10IdentifierERNS_12PropertySlotE
__ZN3JSC24createStackOverflowErrorEPNS_9ExecStateE
__ZN3JSC25evaluateInGlobalCallFrameERKNS_7UStringERNS_7JSValueEPNS_14JSGlobalObjectE
-__ZN3JSC25g_identifierTableSpecificE
-__ZN3JSC29createIdentifierTableSpecificEv
__ZN3JSC35createInterruptedExecutionExceptionEPNS_12JSGlobalDataE
__ZN3JSC3NaNE
__ZN3JSC4Heap14primaryHeapEndEv
__ZN3JSC4Heap15recordExtraCostEm
+__ZN3JSC4Heap16objectTypeCountsEv
__ZN3JSC4Heap16primaryHeapBeginEv
__ZN3JSC4Heap17collectAllGarbageEv
__ZN3JSC4Heap17globalObjectCountEv
__ZN3JSC4Heap8allocateEm
__ZN3JSC4Heap9unprotectENS_7JSValueE
__ZN3JSC4callEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataES2_RKNS_7ArgListE
-__ZN3JSC5equalEPKNS_11UStringImplES2_
__ZN3JSC6JSCell11getCallDataERNS_8CallDataE
__ZN3JSC6JSCell11getJSNumberEv
__ZN3JSC6JSCell14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
__ZN3JSC6JSCell14deletePropertyEPNS_9ExecStateEj
-__ZN3JSC6JSCell14toThisJSStringEPNS_9ExecStateE
__ZN3JSC6JSCell16getConstructDataERNS_13ConstructDataE
__ZN3JSC6JSCell18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
__ZN3JSC6JSCell18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
__ZN3JSC6JSLock9lockCountEv
__ZN3JSC6JSLockC1EPNS_9ExecStateE
__ZN3JSC6Parser5parseEPNS_12JSGlobalDataEPiPNS_7UStringE
-__ZN3JSC7CStringD1Ev
-__ZN3JSC7CStringaSERKS0_
+__ZN3JSC7JSArray12markChildrenERNS_9MarkStackE
+__ZN3JSC7JSArray15setSubclassDataEPv
__ZN3JSC7JSArray4infoE
__ZN3JSC7JSArray9setLengthEj
__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7ArgListE
+__ZN3JSC7JSArrayC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC7JSArrayD2Ev
__ZN3JSC7Profile10restoreAllEv
__ZN3JSC7Profile5focusEPKNS_11ProfileNodeE
__ZN3JSC7Profile7excludeEPKNS_11ProfileNodeE
__ZN3JSC7UString4fromEj
__ZN3JSC7UString4fromEl
__ZN3JSC7UStringC1EPKc
-__ZN3JSC7UStringC1EPKti
-__ZN3JSC7UStringaSEPKc
+__ZN3JSC7UStringC1EPKtj
__ZN3JSC8Debugger23recompileAllJSFunctionsEPNS_12JSGlobalDataE
__ZN3JSC8Debugger6attachEPNS_14JSGlobalObjectE
__ZN3JSC8Debugger6detachEPNS_14JSGlobalObjectE
__ZN3JSC8JSObject24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
__ZN3JSC8JSObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
__ZN3JSC8JSObject3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC8JSString4RopeD1Ev
__ZN3JSC8Profiler13stopProfilingEPNS_9ExecStateERKNS_7UStringE
__ZN3JSC8Profiler14startProfilingEPNS_9ExecStateERKNS_7UStringE
__ZN3JSC8Profiler8profilerEv
__ZN3JSC9MarkStack12releaseStackEPvm
__ZN3JSC9MarkStack13allocateStackEm
__ZN3JSC9MarkStack18initializePagesizeEv
-__ZN3JSC9Structure13hasTransitionEPNS_11UStringImplEj
+__ZN3JSC9Structure13hasTransitionEPN7WebCore10StringImplEj
__ZN3JSC9Structure17stopIgnoringLeaksEv
__ZN3JSC9Structure18startIgnoringLeaksEv
__ZN3JSC9Structure21addPropertyTransitionEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
__ZN3JSC9Structure27despecifyDictionaryFunctionERKNS_10IdentifierE
__ZN3JSC9Structure27despecifyFunctionTransitionEPS0_RKNS_10IdentifierE
__ZN3JSC9Structure28addPropertyWithoutTransitionERKNS_10IdentifierEjPNS_6JSCellE
-__ZN3JSC9Structure3getEPKNS_11UStringImplERjRPNS_6JSCellE
+__ZN3JSC9Structure3getEPKN7WebCore10StringImplERjRPNS_6JSCellE
__ZN3JSC9Structure40addPropertyTransitionToExistingStructureEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
__ZN3JSC9StructureC1ENS_7JSValueERKNS_8TypeInfoEj
__ZN3JSC9StructureD1Ev
__ZN3JSCltERKNS_7UStringES2_
__ZN3WTF10fastCallocEmm
__ZN3WTF10fastMallocEm
+__ZN3WTF10fastStrDupEPKc
__ZN3WTF11currentTimeEv
__ZN3WTF11fastReallocEPvm
__ZN3WTF12createThreadEPFPvS0_ES0_
__ZN3WTF12detachThreadEj
__ZN3WTF12isMainThreadEv
__ZN3WTF12randomNumberEv
+__ZN3WTF13WTFThreadData10staticDataE
+__ZN3WTF13WTFThreadDataC1Ev
+__ZN3WTF13WTFThreadDataD1Ev
__ZN3WTF13currentThreadEv
__ZN3WTF13tryFastCallocEmm
__ZN3WTF13tryFastMallocEm
+__ZN3WTF14fastMallocSizeEPKv
__ZN3WTF15ThreadCondition4waitERNS_5MutexE
__ZN3WTF15ThreadCondition6signalEv
__ZN3WTF15ThreadCondition9broadcastEv
__ZN3WTF18monthFromDayInYearEib
__ZN3WTF19initializeThreadingEv
__ZN3WTF20fastMallocStatisticsEv
+__ZN3WTF20initializeMainThreadEv
__ZN3WTF21RefCountedLeakCounter16suppressMessagesEPKc
__ZN3WTF21RefCountedLeakCounter24cancelMessageSuppressionEPKc
__ZN3WTF21RefCountedLeakCounter9decrementEv
__ZN3WTF36lockAtomicallyInitializedStaticMutexEv
__ZN3WTF37parseDateFromNullTerminatedCharactersEPKc
__ZN3WTF38unlockAtomicallyInitializedStaticMutexEv
+__ZN3WTF39initializeMainThreadToProcessMainThreadEv
+__ZN3WTF3MD58addBytesEPKhm
+__ZN3WTF3MD58checksumEv
+__ZN3WTF3MD5C1Ev
__ZN3WTF5Mutex4lockEv
__ZN3WTF5Mutex6unlockEv
__ZN3WTF5Mutex7tryLockEv
__ZN3WTF5MutexC1Ev
__ZN3WTF5MutexD1Ev
__ZN3WTF6strtodEPKcPPc
+__ZN3WTF7CString11mutableDataEv
+__ZN3WTF7CString16newUninitializedEmRPc
+__ZN3WTF7CStringC1EPKc
+__ZN3WTF7CStringC1EPKcj
__ZN3WTF7Unicode18convertUTF16ToUTF8EPPKtS2_PPcS4_b
+__ZN3WTF7Unicode18convertUTF8ToUTF16EPPKcS2_PPtS4_b
__ZN3WTF8Collator18setOrderLowerFirstEb
__ZN3WTF8CollatorC1EPKc
__ZN3WTF8CollatorD1Ev
__ZN3WTF8fastFreeEPv
__ZN3WTF8msToYearEd
-__ZN3WTF9dayInYearEdi
__ZN3WTF9ByteArray6createEm
+__ZN3WTF9dayInYearEdi
+__ZN3WTFeqERKNS_7CStringES2_
+__ZN7WebCore10StringImpl11reverseFindEPS0_ib
+__ZN7WebCore10StringImpl11reverseFindEti
+__ZN7WebCore10StringImpl12sharedBufferEv
+__ZN7WebCore10StringImpl18simplifyWhiteSpaceEv
+__ZN7WebCore10StringImpl19characterStartingAtEj
+__ZN7WebCore10StringImpl19createUninitializedEjRPt
+__ZN7WebCore10StringImpl22containsOnlyWhitespaceEv
+__ZN7WebCore10StringImpl23defaultWritingDirectionEv
+__ZN7WebCore10StringImpl37createStrippingNullCharactersSlowCaseEPKtj
+__ZN7WebCore10StringImpl4findEPFbtEi
+__ZN7WebCore10StringImpl4findEPKcib
+__ZN7WebCore10StringImpl4findEPS0_ib
+__ZN7WebCore10StringImpl4findEti
+__ZN7WebCore10StringImpl5adoptERNS_12StringBufferE
+__ZN7WebCore10StringImpl5emptyEv
+__ZN7WebCore10StringImpl5lowerEv
+__ZN7WebCore10StringImpl5toIntEPb
+__ZN7WebCore10StringImpl5upperEv
+__ZN7WebCore10StringImpl6createEPKc
+__ZN7WebCore10StringImpl6createEPKtj
+__ZN7WebCore10StringImpl6createEPKtjN3WTF10PassRefPtrINS3_21CrossThreadRefCountedINS3_16OwnFastMallocPtrIS1_EEEEEE
+__ZN7WebCore10StringImpl6secureEtb
+__ZN7WebCore10StringImpl7replaceEPS0_S1_
+__ZN7WebCore10StringImpl7replaceEjjPS0_
+__ZN7WebCore10StringImpl7replaceEtPS0_
+__ZN7WebCore10StringImpl7replaceEtt
+__ZN7WebCore10StringImpl8endsWithEPS0_b
+__ZN7WebCore10StringImpl9substringEjj
+__ZN7WebCore10StringImplD1Ev
+__ZN7WebCore11commentAtomE
+__ZN7WebCore12AtomicString3addEPKc
+__ZN7WebCore12AtomicString3addEPKt
+__ZN7WebCore12AtomicString3addEPKtj
+__ZN7WebCore12AtomicString3addEPKtjj
+__ZN7WebCore12AtomicString4findEPKtjj
+__ZN7WebCore12AtomicString4initEv
+__ZN7WebCore15charactersToIntEPKtmPb
+__ZN7WebCore17charactersToFloatEPKtmPb
+__ZN7WebCore17equalIgnoringCaseEPKtPKcj
+__ZN7WebCore17equalIgnoringCaseEPNS_10StringImplEPKc
+__ZN7WebCore17equalIgnoringCaseEPNS_10StringImplES1_
+__ZN7WebCore18charactersToDoubleEPKtmPb
+__ZN7WebCore20equalIgnoringNullityEPNS_10StringImplES1_
+__ZN7WebCore21charactersToIntStrictEPKtmPbi
+__ZN7WebCore22charactersToUIntStrictEPKtmPbi
+__ZN7WebCore5equalEPKNS_10StringImplEPKc
+__ZN7WebCore5equalEPKNS_10StringImplES2_
+__ZN7WebCore6String26fromUTF8WithLatin1FallbackEPKcm
+__ZN7WebCore6String29charactersWithNullTerminationEv
+__ZN7WebCore6String6appendEPKtj
+__ZN7WebCore6String6appendERKS0_
+__ZN7WebCore6String6appendEc
+__ZN7WebCore6String6appendEt
+__ZN7WebCore6String6formatEPKcz
+__ZN7WebCore6String6insertERKS0_j
+__ZN7WebCore6String6numberEd
+__ZN7WebCore6String6numberEi
+__ZN7WebCore6String6numberEj
+__ZN7WebCore6String6numberEl
+__ZN7WebCore6String6numberEm
+__ZN7WebCore6String6numberEt
+__ZN7WebCore6String6numberEx
+__ZN7WebCore6String6numberEy
+__ZN7WebCore6String6removeEji
+__ZN7WebCore6String8fromUTF8EPKc
+__ZN7WebCore6String8fromUTF8EPKcm
+__ZN7WebCore6String8truncateEj
+__ZN7WebCore6StringC1EPKt
+__ZN7WebCore7xmlAtomE
+__ZN7WebCore8nullAtomE
+__ZN7WebCore8starAtomE
+__ZN7WebCore8textAtomE
+__ZN7WebCore9emptyAtomE
+__ZN7WebCore9xmlnsAtomE
+__ZN7WebCoreeqERKNS_12AtomicStringEPKc
+__ZN7WebCoreplEPKcRKNS_6StringE
+__ZN7WebCoreplERKNS_6StringEPKc
+__ZN7WebCoreplERKNS_6StringES2_
__ZNK3JSC10JSFunction23isHostFunctionNonInlineEv
__ZNK3JSC11Interpreter14retrieveCallerEPNS_9ExecStateEPNS_16InternalFunctionE
__ZNK3JSC11Interpreter18retrieveLastCallerEPNS_9ExecStateERiRlRNS_7UStringERNS_7JSValueE
-__ZNK3JSC14JSGlobalObject14isDynamicScopeEv
+__ZNK3JSC12PropertySlot14functionGetterEPNS_9ExecStateE
+__ZNK3JSC14JSGlobalObject14isDynamicScopeERb
__ZNK3JSC16InternalFunction9classInfoEv
__ZNK3JSC16JSVariableObject16isVariableObjectEv
__ZNK3JSC17DebuggerCallFrame10thisObjectEv
__ZNK3JSC4Heap11objectCountEv
__ZNK3JSC6JSCell11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
__ZNK3JSC6JSCell12toThisObjectEPNS_9ExecStateE
-__ZNK3JSC6JSCell12toThisStringEPNS_9ExecStateE
__ZNK3JSC6JSCell14isGetterSetterEv
__ZNK3JSC6JSCell8toNumberEPNS_9ExecStateE
__ZNK3JSC6JSCell8toObjectEPNS_9ExecStateE
__ZNK3JSC6JSCell9getUInt32ERj
__ZNK3JSC6JSCell9toBooleanEPNS_9ExecStateE
__ZNK3JSC7ArgList8getSliceEiRS0_
+__ZNK3JSC7JSArray12subclassDataEv
__ZNK3JSC7JSValue16toObjectSlowCaseEPNS_9ExecStateE
__ZNK3JSC7JSValue19synthesizePrototypeEPNS_9ExecStateE
__ZNK3JSC7JSValue20toThisObjectSlowCaseEPNS_9ExecStateE
__ZNK3JSC7UString14toStrictUInt32EPb
__ZNK3JSC7UString5asciiEv
__ZNK3JSC7UString6is8BitEv
-__ZNK3JSC7UString6substrEii
+__ZNK3JSC7UString6substrEjj
__ZNK3JSC7UString8toUInt32EPb
__ZNK3JSC7UString8toUInt32EPbb
__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateERKNS_10IdentifierE
__ZNK3JSC9HashTable11createTableEPNS_12JSGlobalDataE
__ZNK3JSC9HashTable11deleteTableEv
__ZNK3WTF8Collator7collateEPKtmS2_m
+__ZNK7WebCore12AtomicString5lowerEv
+__ZNK7WebCore6String11toIntStrictEPbi
+__ZNK7WebCore6String12toUIntStrictEPbi
+__ZNK7WebCore6String14threadsafeCopyEv
+__ZNK7WebCore6String15stripWhiteSpaceEv
+__ZNK7WebCore6String16removeCharactersEPFbtE
+__ZNK7WebCore6String17crossThreadStringEv
+__ZNK7WebCore6String18simplifyWhiteSpaceEv
+__ZNK7WebCore6String19characterStartingAtEj
+__ZNK7WebCore6String4utf8Ev
+__ZNK7WebCore6String5asciiEv
+__ZNK7WebCore6String5lowerEv
+__ZNK7WebCore6String5splitERKS0_RN3WTF6VectorIS0_Lm0EEE
+__ZNK7WebCore6String5splitERKS0_bRN3WTF6VectorIS0_Lm0EEE
+__ZNK7WebCore6String5splitEtRN3WTF6VectorIS0_Lm0EEE
+__ZNK7WebCore6String5splitEtbRN3WTF6VectorIS0_Lm0EEE
+__ZNK7WebCore6String5toIntEPb
+__ZNK7WebCore6String5upperEv
+__ZNK7WebCore6String6latin1Ev
+__ZNK7WebCore6String6toUIntEPb
+__ZNK7WebCore6String7toFloatEPb
+__ZNK7WebCore6String8foldCaseEv
+__ZNK7WebCore6String8toDoubleEPb
+__ZNK7WebCore6String8toIntPtrEPb
+__ZNK7WebCore6String8toUInt64EPb
+__ZNK7WebCore6String9substringEjj
__ZTVN3JSC12StringObjectE
__ZTVN3JSC14JSGlobalObjectE
__ZTVN3JSC15JSWrapperObjectE
# ... Then include what we want.
['include', '../wtf/'],
# GLib/GTK, even though its name doesn't really indicate.
- ['exclude', '/(gtk|glib)/.*\\.(cpp|h)$'],
+ ['exclude', '/(gtk|glib|gobject)/.*\\.(cpp|h)$'],
['exclude', '(Default|Gtk|Mac|None|Qt|Win|Wx)\\.(cpp|mm)$'],
['exclude', 'wtf/CurrentTime\\.cpp$'],
+ ['exclude', 'wtf/MainThread.cpp$'],
['exclude', 'wtf/TC.*\\.(cpp|h)$'],
],
'direct_dependent_settings': {
'jit/JIT.cpp',
'jit/JIT.h',
'jit/JITArithmetic.cpp',
+ 'jit/JITArithmetic32_64.cpp',
'jit/JITCall.cpp',
'jit/JITCode.h',
'jit/JITInlineMethods.h',
'jit/JITOpcodes.cpp',
+ 'jit/JITOpcodes32_64.cpp',
'jit/JITPropertyAccess.cpp',
+ 'jit/JITPropertyAccess32_64.cpp',
'jit/JITStubCall.h',
'jit/JITStubs.cpp',
'jit/JITStubs.h',
'runtime/PutPropertySlot.h',
'runtime/RegExp.cpp',
'runtime/RegExp.h',
+ 'runtime/RegExpCache.cpp',
+ 'runtime/RegExpCache.h',
'runtime/RegExpConstructor.cpp',
'runtime/RegExpConstructor.h',
+ 'runtime/RegExpKey.h',
'runtime/RegExpMatchesArray.h',
'runtime/RegExpObject.cpp',
'runtime/RegExpObject.h',
'runtime/StructureChain.h',
'runtime/StructureTransitionTable.h',
'runtime/SymbolTable.h',
+ 'runtime/Terminator.h',
'runtime/TimeoutChecker.cpp',
'runtime/TimeoutChecker.h',
'runtime/Tracing.h',
'runtime/UString.cpp',
'runtime/UString.h',
'runtime/WeakRandom.h',
- 'wrec/CharacterClass.cpp',
- 'wrec/CharacterClass.h',
- 'wrec/CharacterClassConstructor.cpp',
- 'wrec/CharacterClassConstructor.h',
- 'wrec/Escapes.h',
- 'wrec/Quantifier.h',
- 'wrec/WREC.cpp',
- 'wrec/WREC.h',
- 'wrec/WRECFunctors.cpp',
- 'wrec/WRECFunctors.h',
- 'wrec/WRECGenerator.cpp',
- 'wrec/WRECGenerator.h',
- 'wrec/WRECParser.cpp',
- 'wrec/WRECParser.h',
'wtf/AlwaysInline.h',
'wtf/ASCIICType.h',
'wtf/Assertions.cpp',
'wtf/Assertions.h',
+ 'wtf/Atomics.h',
'wtf/AVLTree.h',
'wtf/ByteArray.cpp',
'wtf/ByteArray.h',
'wtf/FastMalloc.h',
'wtf/Forward.h',
'wtf/GetPtr.h',
- 'wtf/gtk/GOwnPtr.cpp',
- 'wtf/gtk/GOwnPtr.h',
+ 'wtf/gobject/GOwnPtr.cpp',
+ 'wtf/gobject/GOwnPtr.h',
'wtf/gtk/MainThreadGtk.cpp',
'wtf/gtk/ThreadingGtk.cpp',
'wtf/HashCountedSet.h',
'wtf/ListHashSet.h',
'wtf/ListRefPtr.h',
'wtf/Locker.h',
+ 'wtf/MD5.cpp',
+ 'wtf/MD5.h',
'wtf/MainThread.cpp',
'wtf/MainThread.h',
'wtf/MallocZoneSupport.h',
'wtf/OwnFastMallocPtr.h',
'wtf/OwnPtr.h',
'wtf/OwnPtrCommon.h',
- 'wtf/OwnPtrWin.cpp',
'wtf/PassOwnPtr.h',
'wtf/PassRefPtr.h',
'wtf/Platform.h',
'wtf/RefPtrHashMap.h',
'wtf/RetainPtr.h',
'wtf/SegmentedVector.h',
+ 'wtf/StaticConstructors.h',
'wtf/StdLibExtras.h',
'wtf/StringExtras.h',
'wtf/StringHashFunctions.h',
'wtf/TCPackedCache.h',
'wtf/qt/MainThreadQt.cpp',
+ 'wtf/qt/StringQt.cpp',
'wtf/qt/ThreadingQt.cpp',
'wtf/TCPageMap.h',
'wtf/TCSpinLock.h',
'wtf/Threading.cpp',
'wtf/Threading.h',
'wtf/ThreadingNone.cpp',
+ 'wtf/ThreadingPrimitives.h',
'wtf/ThreadingPthreads.cpp',
'wtf/ThreadingWin.cpp',
+ 'wtf/ThreadSafeShared.h',
'wtf/ThreadSpecific.h',
'wtf/ThreadSpecificWin.cpp',
'wtf/TypeTraits.cpp',
'wtf/TypeTraits.h',
+ 'wtf/text/AtomicString.cpp',
+ 'wtf/text/AtomicString.h',
+ 'wtf/text/AtomicStringImpl.h',
+ 'wtf/text/CString.cpp',
+ 'wtf/text/CString.h',
+ 'wtf/text/StringHash.h',
+ 'wtf/text/StringImpl.cpp',
+ 'wtf/text/StringImpl.h',
+ 'wtf/text/StringStatics.cpp',
+ 'wtf/text/WTFString.cpp',
+ 'wtf/text/WTFString.h',
'wtf/unicode/Collator.h',
'wtf/unicode/CollatorDefault.cpp',
'wtf/unicode/glib/UnicodeGLib.cpp',
'wtf/unicode/UTF8.cpp',
'wtf/unicode/UTF8.h',
'wtf/UnusedParam.h',
+ 'wtf/ValueCheck.h',
'wtf/Vector.h',
'wtf/VectorTraits.h',
'wtf/VMTags.h',
+ 'wtf/WTFThreadData.cpp',
+ 'wtf/WTFThreadData.h',
'wtf/win/MainThreadWin.cpp',
+ 'wtf/win/OwnPtrWin.cpp',
'wtf/wx/MainThreadWx.cpp',
'yarr/RegexCompiler.cpp',
'yarr/RegexCompiler.h',
+++ /dev/null
-__ZN3WTF10fastMallocEm
-__ZN3WTF10fastMallocILb1EEEPvm
-__ZN3WTF20TCMalloc_ThreadCache10InitModuleEv
- stub helpers
-__ZN3WTFL15InitSizeClassesEv
-__Z20TCMalloc_SystemAllocmPmm
-__ZN3WTF20TCMalloc_ThreadCache22CreateCacheIfNecessaryEv
-__ZN3WTF20TCMalloc_ThreadCache7NewHeapEP17_opaque_pthread_t
-__ZN3WTF20TCMalloc_ThreadCache14PickNextSampleEm
-__ZN3WTF25TCMalloc_Central_FreeList11RemoveRangeEPPvS2_Pi
-__ZN3WTF25TCMalloc_Central_FreeList18FetchFromSpansSafeEv
-__ZN3WTF17TCMalloc_PageHeap3NewEm
-__ZN3WTF17TCMalloc_PageHeap10AllocLargeEm
-__ZN3WTF17TCMalloc_PageHeap8GrowHeapEm
-__ZN3WTFL13MetaDataAllocEm
-__ZN3WTF17TCMalloc_PageHeap6DeleteEPNS_4SpanE
-__ZN3WTF17TCMalloc_PageHeap19IncrementalScavengeEm
-__Z22TCMalloc_SystemReleasePvm
-__ZN3WTF17TCMalloc_PageHeap5CarveEPNS_4SpanEmb
-__ZN3WTF16fastZeroedMallocEm
-__ZN3WTF8fastFreeEPv
-__ZN3WTF12isMainThreadEv
-__ZN3JSC19initializeThreadingEv
-__ZN3JSCL23initializeThreadingOnceEv
-__ZN3WTF19initializeThreadingEv
-__ZN3WTF20initializeMainThreadEv
-__ZN3WTF5MutexC1Ev
-__ZN3WTF28initializeMainThreadPlatformEv
-__ZN3JSC17initializeUStringEv
-__ZN3JSC12JSGlobalData10storeVPtrsEv
-__ZN3JSC9StructureC1ENS_7JSValueERKNS_8TypeInfoEj
-__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC7JSArrayD1Ev
-__ZN3JSC7JSArrayD2Ev
-__ZN3JSC9StructureD1Ev
-__ZN3JSC9StructureD2Ev
-__ZN3JSC11JSByteArray15createStructureENS_7JSValueE
-__ZN3JSC11JSByteArrayD1Ev
-__ZN3JSC8JSStringD1Ev
-__ZN3JSC8JSStringD2Ev
-__ZN3JSC10JSFunctionC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC10JSFunctionD1Ev
-__ZN3JSC10JSFunctionD2Ev
-__ZN3JSC18VPtrHackExecutableD0Ev
-__ZN3JSC16InternalFunctionD2Ev
-__ZN3WTF15initializeDatesEv
-__ZN3WTF20equivalentYearForDSTEi
-__ZN3WTF11currentTimeEv
-__ZN3WTF14FastMallocZone4sizeEP14_malloc_zone_tPKv
-__ZN3WTF36lockAtomicallyInitializedStaticMutexEv
-__ZN3WTF38unlockAtomicallyInitializedStaticMutexEv
-__ZN3WTF5Mutex4lockEv
-__ZN3WTF5Mutex6unlockEv
-__ZN3WTF6strtodEPKcPPc
-__ZN3JSC6JSLock12DropAllLocksC1ENS_14JSLockBehaviorE
-__ZN3JSC6JSLock12DropAllLocksC2ENS_14JSLockBehaviorE
-__ZN3JSCL17createJSLockCountEv
-__ZN3JSC6JSLock12DropAllLocksD1Ev
-__ZN3WTF20TCMalloc_ThreadCache18DestroyThreadCacheEPv
-__ZN3WTF20TCMalloc_ThreadCache11DeleteCacheEPS0_
-__ZN3WTF20TCMalloc_ThreadCache21ReleaseToCentralCacheEmi
-__ZN3WTF25TCMalloc_Central_FreeList18ReleaseListToSpansEPv
-__ZN3WTF25TCMalloc_Central_FreeList14MakeCacheSpaceEv
-__ZN3WTF16callOnMainThreadEPFvPvES0_
-__ZN3WTF5DequeINS_19FunctionWithContextEE14expandCapacityEv
-__ZN3WTF37scheduleDispatchFunctionsOnMainThreadEv
--[WTFMainThreadCaller call]
-__ZN3WTF31dispatchFunctionsFromMainThreadEv
-__ZN3WTF15ThreadConditionC1Ev
-__ZN3WTF12createThreadEPFPvS0_ES0_PKc
-__ZN3WTF20createThreadInternalEPFPvS0_ES0_PKc
-__ZN3WTFL35establishIdentifierForPthreadHandleERKP17_opaque_pthread_t
-__ZN3WTF9HashTableIjSt4pairIjP17_opaque_pthread_tENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTraitsIjEENSA_IS3_EEEESB_E6rehashEi
-__ZN3WTF9HashTableIjSt4pairIjP17_opaque_pthread_tENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTraitsIjEENSA_IS3_EEEESB_E4findIjNS_22IdentityHashTranslatorIjS4_S8_EEEENS_17HashTableIteratorIjS4_S6_S8_SD_SB_EERKT_
-__ZN3WTF9HashTableIjSt4pairIjP17_opaque_pthread_tENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTraitsIjEENSA_IS3_EEEESB_E3addIjS3_NS_17HashMapTranslatorIS4_SD_S8_EEEES1_INS_17HashTableIteratorIjS4_S6_S8_SD_SB_EEbERKT_RKT0_
-__ZN3WTF15ThreadCondition4waitERNS_5MutexE
-__ZN3WTFL16threadEntryPointEPv
-__ZN3WTF31initializeCurrentThreadInternalEPKc
-__ZN3WTF20ThreadIdentifierData10initializeEj
-__ZN3WTF20ThreadIdentifierData23initializeKeyOnceHelperEv
-__ZN3WTF5MutexD1Ev
-__ZN3WTF13currentThreadEv
-__ZN3WTF20ThreadIdentifierData10identifierEv
-__ZN3WTF15ThreadCondition9broadcastEv
-__ZN3WTF15ThreadCondition6signalEv
-__ZN3JSC12JSGlobalData12createLeakedEv
-__ZN3JSC9Structure18startIgnoringLeaksEv
-__ZN3JSC12JSGlobalDataC2Eb
-__ZN3JSC21createIdentifierTableEv
-__ZN3JSC17CommonIdentifiersC1EPNS_12JSGlobalDataE
-__ZN3JSC17CommonIdentifiersC2EPNS_12JSGlobalDataE
-__ZN3JSC10Identifier3addEPNS_12JSGlobalDataEPKc
-__ZNK3JSC11UStringImpl4hashEv
-__ZN3WTF9HashTableIPKcSt4pairIS2_NS_6RefPtrIN3JSC11UStringImplEEEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSE_IS7_EEEESF_E4findIS2_NS_22IdentityHashTranslatorIS2_S8_SC_EEEENS_17HashTableIteratorIS2_S8_SA_SC_SH_SF_EERKT_
-__ZN3WTF9HashTableIPKcSt4pairIS2_NS_6RefPtrIN3JSC11UStringImplEEEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSE_IS7_EEEESF_E6rehashEi
-__ZN3WTF9HashTableIPKcSt4pairIS2_NS_6RefPtrIN3JSC11UStringImplEEEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSE_IS7_EEEESF_E3addIS2_S7_NS_17HashMapTranslatorIS8_SH_SC_EEEES3_INS_17HashTableIteratorIS2_S8_SA_SC_SH_SF_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3JSC5equalEPKNS_11UStringImplES2_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEENS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EERKT_
-__ZN3JSC12SmallStringsC1Ev
-__ZN3JSC5LexerC1EPNS_12JSGlobalDataE
-__ZN3JSC11ParserArenaC1Ev
-__ZN3JSC11InterpreterC1Ev
-__ZN3JSC11InterpreterC2Ev
-__ZN3JSC11Interpreter14privateExecuteENS0_13ExecutionFlagEPNS_12RegisterFileEPNS_9ExecStateEPNS_7JSValueE
-__ZN3WTF9HashTableIPKvSt4pairIS2_N3JSC8OpcodeIDEENS_18PairFirstExtractorIS6_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSC_IS5_EEEESD_E4findIS2_NS_22IdentityHashTranslatorIS2_S6_SA_EEEENS_17HashTableIteratorIS2_S6_S8_SA_SF_SD_EERKT_
-__ZN3WTF9HashTableIPKvSt4pairIS2_N3JSC8OpcodeIDEENS_18PairFirstExtractorIS6_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSC_IS5_EEEESD_E6rehashEi
-__ZN3WTF9HashTableIPKvSt4pairIS2_N3JSC8OpcodeIDEENS_18PairFirstExtractorIS6_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSC_IS5_EEEESD_E3addIS2_S5_NS_17HashMapTranslatorIS6_SF_SA_EEEES3_INS_17HashTableIteratorIS2_S6_S8_SA_SF_SD_EEbERKT_RKT0_
-__ZN3JSC14TimeoutCheckerC1Ev
-__ZN3JSC4HeapC1EPNS_12JSGlobalDataE
-__ZN3JSC4Heap13allocateBlockEv
-__ZN3WTF11fastReallocEPvm
-__ZN3WTF11fastReallocILb1EEEPvS1_m
-__ZN3JSC9MarkStack18initializePagesizeEv
-__ZN3JSC9MarkStack13allocateStackEm
-__ZN3JSC27startProfilerServerIfNeededEv
-+[ProfilerServer sharedProfileServer]
--[ProfilerServer init]
-__ZN3JSC25setDefaultIdentifierTableEPNS_15IdentifierTableE
-__ZN3JSC29createIdentifierTableSpecificEv
-__ZN3JSCL37createIdentifierTableSpecificCallbackEv
-__ZN3JSC25setCurrentIdentifierTableEPNS_15IdentifierTableE
-__ZN3JSC9Structure17stopIgnoringLeaksEv
-__Z15jsRegExpCompilePKti24JSRegExpIgnoreCaseOption23JSRegExpMultilineOptionPjPPKc
-__ZL30calculateCompiledPatternLengthPKti24JSRegExpIgnoreCaseOptionR11CompileDataR9ErrorCode
-__ZL11checkEscapePPKtS0_P9ErrorCodeib
-__ZL13compileBranchiPiPPhPPKtS3_P9ErrorCodeS_S_R11CompileData
-__Z15jsRegExpExecutePK8JSRegExpPKtiiPii
-__ZL5matchPKtPKhiR9MatchData
-__ZN3JSC7UStringC1EPKti
-__ZN3JSC7UStringC2EPKti
-__ZN3WTF13tryFastMallocEm
-__ZN3WTF10fastMallocILb0EEEPvm
-__ZN3JSC11UStringImpl12sharedBufferEv
-__ZN3JSC4Heap8allocateEm
-__ZN3JSC6JSCellD1Ev
-__ZN3JSC4Heap7protectENS_7JSValueE
-__ZN3WTF9HashTableIPN3JSC6JSCellESt4pairIS3_jENS_18PairFirstExtractorIS5_EENS_7PtrHashIS3_EENS_14PairHashTraitsINS_10HashTraitsIS3_EENSB_IjEEEESC_E6rehashEi
-__ZN3WTF9HashTableIPN3JSC6JSCellESt4pairIS3_jENS_18PairFirstExtractorIS5_EENS_7PtrHashIS3_EENS_14PairHashTraitsINS_10HashTraitsIS3_EENSB_IjEEEESC_E4findIS3_NS_22IdentityHashTranslatorIS3_S5_S9_EEEENS_17HashTableIteratorIS3_S5_S7_S9_SE_SC_EERKT_
-__ZN3WTF9HashTableIPN3JSC6JSCellESt4pairIS3_jENS_18PairFirstExtractorIS5_EENS_7PtrHashIS3_EENS_14PairHashTraitsINS_10HashTraitsIS3_EENSB_IjEEEESC_E3addIS3_jNS_17HashMapTranslatorIS5_SE_S9_EEEES4_INS_17HashTableIteratorIS3_S5_S7_S9_SE_SC_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC6JSCellESt4pairIS3_jENS_18PairFirstExtractorIS5_EENS_7PtrHashIS3_EENS_14PairHashTraitsINS_10HashTraitsIS3_EENSB_IjEEEESC_E3addIS3_S5_NS_22IdentityHashTranslatorIS3_S5_S9_EEEES4_INS_17HashTableIteratorIS3_S5_S7_S9_SE_SC_EEbERKT_RKT0_
-__ZN3JSC14JSGlobalObjectnwEmPNS_12JSGlobalDataE
-__ZN3JSC14JSGlobalObject4initEPNS_8JSObjectE
-__ZN3JSC14JSGlobalObject5resetENS_7JSValueE
-__ZN3JSC17FunctionPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC17FunctionPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC16InternalFunctionC2EPNS_12JSGlobalDataEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_10IdentifierE
-__ZN3JSC8jsStringEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC12SmallStrings17createEmptyStringEPNS_12JSGlobalDataE
-__ZN3JSC7UStringC1EPKc
-__ZN3JSCL9createRepEPKc
-__ZN3JSC8JSObject17putDirectInternalERKNS_10IdentifierENS_7JSValueEjbRNS_15PutPropertySlotEPNS_6JSCellE
-__ZN3JSC9Structure40addPropertyTransitionToExistingStructureEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
-__ZNK3JSC24StructureTransitionTable3getERKSt4pairIN3WTF6RefPtrINS_11UStringImplEEEjEPNS_6JSCellE
-__ZN3JSC9Structure3getEPKNS_11UStringImplERjRPNS_6JSCellE
-__ZN3JSC9Structure21addPropertyTransitionEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
-__ZN3JSC9Structure3putERKNS_10IdentifierEjPNS_6JSCellE
-__ZN3JSC24StructureTransitionTable3addERKSt4pairIN3WTF6RefPtrINS_11UStringImplEEEjEPNS_9StructureEPNS_6JSCellE
-__ZN3JSC9Structure28addPropertyWithoutTransitionERKNS_10IdentifierEjPNS_6JSCellE
-__ZN3JSC17FunctionPrototype21addFunctionPropertiesEPNS_9ExecStateEPNS_9StructureEPPNS_17PrototypeFunctionES7_
-__ZN3JSC17PrototypeFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RKNS_7ArgListEE
-__ZN3JSC17PrototypeFunctionC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RKNS_7ArgListEE
-__ZN3JSC8JSObject34putDirectFunctionWithoutTransitionEPNS_9ExecStateEPNS_16InternalFunctionEj
-__ZN3JSC16InternalFunction4nameEPNS_9ExecStateE
-__ZN3JSC9Structure3getERKNS_10IdentifierE
-__ZN3JSC15ObjectPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC15ObjectPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC9Structure26rehashPropertyMapHashTableEj
-__ZN3JSC8JSObject17createInheritorIDEv
-__ZN3JSC14ArrayPrototypeC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC7JSArrayC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC15StringPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC15StringPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC12StringObjectC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC16BooleanPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC16BooleanPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC13BooleanObjectC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC15NumberPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC15NumberPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC12NumberObjectC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC13DatePrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC12DateInstanceC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC12nonInlineNaNEv
-__ZN3JSC15RegExpPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC15RegExpPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC14ErrorPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC14ErrorPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC13ErrorInstanceC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC18jsNontrivialStringEPNS_12JSGlobalDataEPKc
-__ZN3JSC20NativeErrorPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringES9_
-__ZN3JSC17ObjectConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15ObjectPrototypeEPS5_
-__ZN3JSC17ObjectConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15ObjectPrototypeEPS5_
-__ZN3JSC10Identifier3addEPNS_9ExecStateEPKc
-__ZN3JSC19FunctionConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_17FunctionPrototypeE
-__ZN3JSC19FunctionConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_17FunctionPrototypeE
-__ZNK3JSC16InternalFunction9classInfoEv
-__ZN3JSC16ArrayConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ArrayPrototypeEPS5_
-__ZN3JSC16ArrayConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ArrayPrototypeEPS5_
-__ZNK3JSC14ArrayPrototype9classInfoEv
-__ZN3JSC17StringConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_15StringPrototypeE
-__ZN3JSC17StringConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_15StringPrototypeE
-__ZNK3JSC15StringPrototype9classInfoEv
-__ZN3JSC18BooleanConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_16BooleanPrototypeE
-__ZN3JSC18BooleanConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_16BooleanPrototypeE
-__ZNK3JSC13BooleanObject9classInfoEv
-__ZN3JSC17NumberConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15NumberPrototypeE
-__ZN3JSC17NumberConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15NumberPrototypeE
-__ZN3JSC15DateConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_13DatePrototypeE
-__ZN3JSC15DateConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_13DatePrototypeE
-__ZNK3JSC13DatePrototype9classInfoEv
-__ZN3JSC17RegExpConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15RegExpPrototypeE
-__ZN3JSC17RegExpConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15RegExpPrototypeE
-__ZN3JSC16ErrorConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ErrorPrototypeE
-__ZN3JSC16ErrorConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ErrorPrototypeE
-__ZNK3JSC13ErrorInstance9classInfoEv
-__ZN3JSC22NativeErrorConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_20NativeErrorPrototypeE
-__ZN3JSC22NativeErrorConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_20NativeErrorPrototypeE
-__ZNK3JSC6JSCell9getStringEPNS_9ExecStateE
-__ZN3JSC10Identifier11addSlowCaseEPNS_9ExecStateEPNS_11UStringImplE
-__ZN3JSC10Identifier11addSlowCaseEPNS_12JSGlobalDataEPNS_11UStringImplE
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E3addIS3_S3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3JSC8JSObject23allocatePropertyStorageEmm
-__ZN3JSC10MathObjectC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC10MathObjectC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC12SmallStrings24singleCharacterStringRepEh
-__ZN3JSC19SmallStringsStorageC2Ev
-__ZN3JSC14JSGlobalObject16addStaticGlobalsEPNS0_18GlobalPropertyInfoEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_16SymbolTableEntryEENS_18PairFirstExtractorIS7_EENS2_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS4_EENS2_26SymbolTableIndexHashTraitsEEESD_E6rehashEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_16SymbolTableEntryEENS_18PairFirstExtractorIS7_EENS2_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS4_EENS2_26SymbolTableIndexHashTraitsEEESD_E3addIPS3_S6_NS_29RefPtrHashMapRawKeyTranslatorISI_S7_SF_SA_EEEES5_INS_17HashTableIteratorIS4_S7_S9_SA_SF_SD_EEbERKT_RKT0_
-__ZN3JSC18GlobalEvalFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RKNS_7ArgListEEPNS_14JSGlobalObjectE
-__ZN3JSC14JSGlobalObject14resetPrototypeENS_7JSValueE
-__ZN3JSC9Structure25changePrototypeTransitionEPS0_NS_7JSValueE
-__ZN3JSC9Structure17copyPropertyTableEv
-__ZN3JSC14JSGlobalObject10globalExecEv
-__ZN3JSC4Heap9unprotectENS_7JSValueE
-__ZNK3JSC9HashTable11createTableEPNS_12JSGlobalDataE
-__ZN3JSC14JSGlobalObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC8JSObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC14TimeoutChecker5resetEv
-__ZN3JSC8evaluateEPNS_9ExecStateERNS_10ScopeChainERKNS_10SourceCodeENS_7JSValueE
-__ZN3JSC6JSLockC1EPNS_9ExecStateE
-__ZN3JSC17ProgramExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC6Parser5parseINS_11ProgramNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10SourceCodeEPiPNS_7UStringE
-__ZN3JSC6Parser5parseEPNS_12JSGlobalDataEPiPNS_7UStringE
-__ZN3JSC7UStringaSEPKc
-__ZN3JSC5Lexer7setCodeERKNS_10SourceCodeERNS_11ParserArenaE
-__Z10jscyyparsePv
-__ZN3JSC5Lexer3lexEPvS1_
-__ZN3JSC10Identifier3addEPNS_12JSGlobalDataEPKti
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E18addPassingHashCodeINS1_11UCharBufferESC_NS_24HashSetTranslatorAdapterIS3_S9_SC_NS1_21UCharBufferTranslatorEEEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E18addPassingHashCodeIPKcSD_NS_24HashSetTranslatorAdapterIS3_S9_SD_NS1_17CStringTranslatorEEEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E20fullLookupForWritingINS1_11UCharBufferENS_24HashSetTranslatorAdapterIS3_S9_SC_NS1_21UCharBufferTranslatorEEEEESt4pairISG_IPS3_bEjERKT_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7StrHashIS3_EENS_10HashTraitsIS3_EES9_E20fullLookupForWritingIPKcNS_24HashSetTranslatorAdapterIS3_S9_SD_NS1_17CStringTranslatorEEEEESt4pairISH_IPS3_bEjERKT_
-__ZN3WTF15SegmentedVectorIN3JSC10IdentifierELm64EE6appendIS2_EEvRKT_
-__ZN3JSC16FunctionBodyNode6createEPNS_12JSGlobalDataE
-__ZN3JSC11ParserArena14derefWithArenaEN3WTF10PassRefPtrINS_21ParserArenaRefCountedEEE
-__ZN3JSC5Lexer10sourceCodeEiii
-__ZN3JSC11ParserArena20allocateFreeablePoolEv
-__ZN3JSC16FunctionBodyNode13finishParsingERKNS_10SourceCodeEPNS_13ParameterNodeERKNS_10IdentifierE
-__ZN3JSC18FunctionParametersC2EPNS_13ParameterNodeE
-__ZN3WTF6VectorIPN3JSC20ParserArenaDeletableELm0EE14expandCapacityEmPKS3_
-__ZN3JSC13StatementNode6setLocEii
-__ZN3WTF6VectorIPN3JSC16FunctionBodyNodeELm0EE14expandCapacityEmPKS3_
-__ZN3JSC14SourceElements6appendEPNS_13StatementNodeE
-__ZNK3JSC13StatementNode16isEmptyStatementEv
-__ZN3WTF6VectorIPN3JSC13StatementNodeELm0EE14expandCapacityEmPKS3_
-__ZN3WTF6VectorIN3JSC10IdentifierELm0EE15reserveCapacityEm
-__Z21mergeDeclarationListsIPN3JSC15ParserArenaDataIN3WTF6VectorISt4pairIPKNS0_10IdentifierEjELm0EEEEEET_SC_SC_
-__Z21mergeDeclarationListsIPN3JSC15ParserArenaDataIN3WTF6VectorIPNS0_16FunctionBodyNodeELm0EEEEEET_S9_S9_
-__ZN3JSC5Lexer10scanRegExpERPKNS_10IdentifierES4_t
-__ZN3WTF6VectorItLm0EE14expandCapacityEmPKt
-__ZL20makeFunctionCallNodePN3JSC12JSGlobalDataENS_8NodeInfoIPNS_14ExpressionNodeEEENS2_IPNS_13ArgumentsNodeEEEiii
-__ZNK3JSC15DotAccessorNode10isLocationEv
-__ZNK3JSC14ExpressionNode13isResolveNodeEv
-__ZNK3JSC14ExpressionNode21isBracketAccessorNodeEv
-__ZN3JSC5Lexer10skipRegExpEv
-__ZL14makeAssignNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorES3_bbiii
-__ZNK3JSC11ResolveNode10isLocationEv
-__ZNK3JSC11ResolveNode13isResolveNodeEv
-__ZN3WTF6VectorISt4pairIPKN3JSC10IdentifierEjELm0EE14expandCapacityEmPKS6_
-__ZNK3JSC14ExpressionNode10isLocationEv
-__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE14expandCapacityEmPKS4_
-__ZN3WTF6VectorIPvLm0EE14expandCapacityEmPKS1_
-__ZL11makeAddNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZNK3JSC14ExpressionNode8isNumberEv
-__ZN3JSC6Parser16didFinishParsingEPNS_14SourceElementsEPNS_15ParserArenaDataIN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEEEPNS3_INS5_IPNS_16FunctionBodyNodeELm0EEEEEjii
-__ZN3JSC5Lexer5clearEv
-__ZN3JSC11ProgramNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPNS_16FunctionBodyNodeELm0EEERKNS_10SourceCodeEji
-__ZN3JSC9ScopeNodeC2EPNS_12JSGlobalDataERKNS_10SourceCodeEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS9_IPNS_16FunctionBodyNodeELm0EEEji
-__ZN3JSC13ScopeNodeDataC2ERNS_11ParserArenaEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPNS_16FunctionBodyNodeELm0EEEi
-__ZN3JSC11ParserArena10removeLastEv
-__ZN3JSC11ParserArena5resetEv
-__ZN3WTF15SegmentedVectorIN3JSC10IdentifierELm64EE5clearEv
-__ZN3WTF15SegmentedVectorIN3JSC10IdentifierELm64EE17deleteAllSegmentsEv
-__ZN3WTF6VectorIN3JSC10IdentifierELm64EE14shrinkCapacityEm
-__ZN3WTF6VectorINS_6RefPtrIN3JSC21ParserArenaRefCountedEEELm0EE14shrinkCapacityEm
-__ZN3JSC9CodeBlockC2EPNS_16ScriptExecutableENS_8CodeTypeEN3WTF10PassRefPtrINS_14SourceProviderEEEjPNS4_7HashMapINS4_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS4_10HashTraitsISB_EENS_26SymbolTableIndexHashTraitsEEE
-__ZN3WTF9HashTableIPN3JSC15GlobalCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E3addIS3_S3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC15GlobalCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3JSC17BytecodeGeneratorC1EPNS_11ProgramNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_16ProgramCodeBlockE
-__ZN3JSC17BytecodeGeneratorC2EPNS_11ProgramNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_16ProgramCodeBlockE
-__ZN3JSC17BytecodeGenerator10emitOpcodeENS_8OpcodeIDE
-__ZN3WTF15SegmentedVectorIN3JSC10RegisterIDELm32EE17ensureSegmentsForEm
-__ZN3WTF6VectorIN3JSC10RegisterIDELm32EE4growEm
-__ZN3JSC26BatchedTransitionOptimizerC2EPNS_8JSObjectE
-__ZN3JSC9Structure31toCacheableDictionaryTransitionEPS0_
-__ZN3JSC9Structure22toDictionaryTransitionEPS0_NS0_14DictionaryKindE
-__ZN3JSC24StructureTransitionTable6removeERKSt4pairIN3WTF6RefPtrINS_11UStringImplEEEjEPNS_6JSCellE
-__ZN3JSC8JSObject12removeDirectERKNS_10IdentifierE
-__ZN3JSC9Structure24removePropertyTransitionEPS0_RKNS_10IdentifierERm
-__ZN3JSC9Structure6removeERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator12addGlobalVarERKNS_10IdentifierEbRPNS_10RegisterIDE
-__ZN3WTF15SegmentedVectorIN3JSC10RegisterIDELm32EE6appendIiEEvRKT_
-__ZN3JSC17BytecodeGenerator15emitNewFunctionEPNS_10RegisterIDEPNS_16FunctionBodyNodeE
-__ZN3JSC9CodeBlock15addFunctionDeclEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEE
-__ZN3JSC9Structure31removePropertyWithoutTransitionERKNS_10IdentifierE
-__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZNK3JSC7UString14toStrictUInt32EPb
-__ZN3JSC17BytecodeGenerator16addConstantValueENS_7JSValueE
-__ZN3WTF6VectorIN3JSC8RegisterELm0EE14expandCapacityEmPKS2_
-__ZN3JSC17BytecodeGenerator8emitMoveEPNS_10RegisterIDES2_
-__ZN3JSC9Structure26flattenDictionaryStructureEPNS_8JSObjectE
-__ZN3JSCL30comparePropertyMapEntryIndicesEPKvS1_
-__ZN3JSC17BytecodeGenerator8generateEv
-__ZN3JSC11ProgramNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitDebugHookENS_11DebugHookIDEii
-__ZN3JSC17BytecodeGenerator12newTemporaryEv
-__ZN3WTF15SegmentedVectorIN3JSC10RegisterIDELm32EE6appendImEEvRKT_
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDENS_7JSValueE
-__ZN3JSC17BytecodeGenerator8emitNodeEPNS_10RegisterIDEPNS_4NodeE
-__ZN3WTF6VectorIN3JSC8LineInfoELm0EE14expandCapacityEm
-__ZN3JSC12FuncDeclNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC6IfNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8newLabelEv
-__ZN3WTF15SegmentedVectorIN3JSC5LabelELm32EE6appendIPNS1_9CodeBlockEEEvRKT_
-__ZNK3JSC13LogicalOpNode26hasConditionContextCodegenEv
-__ZN3JSC17BytecodeGenerator26emitNodeInConditionContextEPNS_14ExpressionNodeEPNS_5LabelES4_b
-__ZN3JSC13LogicalOpNode30emitBytecodeInConditionContextERNS_17BytecodeGeneratorEPNS_5LabelES4_b
-__ZNK3JSC14ExpressionNode26hasConditionContextCodegenEv
-__ZN3JSC12BinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode6isNullEv
-__ZNK3JSC14ExpressionNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC15DotAccessorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC11ResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11registerForERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator18emitExpressionInfoEjjj
-__ZN3WTF6VectorIN3JSC19ExpressionRangeInfoELm0EE14expandCapacityEmPKS2_
-__ZN3JSC17BytecodeGenerator11emitGetByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3WTF6VectorIjLm0EE14expandCapacityEmPKj
-__ZN3JSC17BytecodeGenerator11addConstantERKNS_10IdentifierE
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_iENS_18PairFirstExtractorIS6_EENS2_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS4_EENS2_17BytecodeGenerator28IdentifierMapIndexHashTraitsEEESC_E6rehashEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_iENS_18PairFirstExtractorIS6_EENS2_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS4_EENS2_17BytecodeGenerator28IdentifierMapIndexHashTraitsEEESC_E3addIPS3_iNS_29RefPtrHashMapRawKeyTranslatorISI_S6_SF_S9_EEEES5_INS_17HashTableIteratorIS4_S6_S8_S9_SF_SC_EEbERKT_RKT0_
-__ZN3JSC9CodeBlock13addIdentifierERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator12emitBinaryOpENS_8OpcodeIDEPNS_10RegisterIDES3_S3_NS_12OperandTypesE
-__ZN3JSC17BytecodeGenerator15emitJumpIfFalseEPNS_10RegisterIDEPNS_5LabelE
-__ZNK3JSC14LogicalNotNode26hasConditionContextCodegenEv
-__ZN3JSC11UnaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC19FunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitMethodCheckEv
-__ZN3JSC17BytecodeGenerator8emitCallEPNS_10RegisterIDES2_S2_PNS_13ArgumentsNodeEjjj
-__ZN3JSC17BytecodeGenerator8emitCallENS_8OpcodeIDEPNS_10RegisterIDES3_S3_PNS_13ArgumentsNodeEjjj
-__ZN3JSC16ArgumentListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC10RegExpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC6RegExp6createEPNS_12JSGlobalDataERKNS_7UStringES5_
-__ZN3JSC6RegExpC2EPNS_12JSGlobalDataERKNS_7UStringES5_
-__ZNK3JSC7UString4findEti
-__ZN3JSC17BytecodeGenerator13emitNewRegExpEPNS_10RegisterIDEPNS_6RegExpE
-__ZN3JSC9CodeBlock9addRegExpEPNS_6RegExpE
-__ZN3JSC17BytecodeGenerator11emitUnaryOpENS_8OpcodeIDEPNS_10RegisterIDES3_
-__ZN3JSC17BytecodeGenerator9emitLabelEPNS_5LabelE
-__ZN3JSC9BlockNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17ExprStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11emitResolveEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator18findScopedPropertyERKNS_10IdentifierERiRmbRPNS_8JSObjectE
-__ZN3JSC13AssignDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC12FuncExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator25emitNewFunctionExpressionEPNS_10RegisterIDEPNS_12FuncExprNodeE
-__ZN3JSC9CodeBlock15addFunctionExprEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEE
-__ZN3JSC17BytecodeGenerator11emitPutByIdEPNS_10RegisterIDERKNS_10IdentifierES2_
-__ZN3JSC17AssignResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitResolveBaseEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3JSC16VarStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15isLocalConstantERKNS_10IdentifierE
-__ZN3JSC13LogicalOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3WTF15SegmentedVectorIN3JSC5LabelELm32EE10removeLastEv
-__ZN3JSC17BytecodeGenerator14emitJumpIfTrueEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC17ObjectLiteralNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitNewObjectEPNS_10RegisterIDE
-__ZNK3JSC10StringNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC10StringNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3WTF9HashTableIPN3JSC11UStringImplESt4pairIS3_PNS1_8JSStringEENS_18PairFirstExtractorIS7_EENS1_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS3_EENSC_IS6_EEEESD_E6rehashEi
-__ZN3WTF9HashTableIPN3JSC11UStringImplESt4pairIS3_PNS1_8JSStringEENS_18PairFirstExtractorIS7_EENS1_17IdentifierRepHashENS_14PairHashTraitsINS_10HashTraitsIS3_EENSC_IS6_EEEESD_E3addIS3_S6_NS_17HashMapTranslatorIS7_SF_SA_EEEES4_INS_17HashTableIteratorIS3_S7_S9_SA_SF_SD_EEbERKT_RKT0_
-__ZN3JSC13jsOwnedStringEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC21FunctionCallValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC16PropertyListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZL14compileBracketiPiPPhPPKtS3_P9ErrorCodeiS_S_R11CompileData
-__ZNK3JSC11BooleanNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC11BooleanNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEb
-__ZL33bracketFindFirstAssertedCharacterPKhb
-__ZL32branchFindFirstAssertedCharacterPKhb
-__ZL20branchNeedsLineStartPKhjj
-__ZNK3JSC11ResolveNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC17BytecodeGenerator7isLocalERKNS_10IdentifierE
-__ZN3JSC10NumberNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEd
-__ZN3JSC9ArrayNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitNewArrayEPNS_10RegisterIDEPNS_11ElementNodeE
-__ZN3WTF9HashTableIxSt4pairIxjENS_18PairFirstExtractorIS2_EENS_7IntHashIxEENS_14PairHashTraitsIN3JSC24EncodedJSValueHashTraitsENS_10HashTraitsIjEEEES9_E4findIxNS_22IdentityHashTranslatorIxS2_S6_EEEENS_17HashTableIteratorIxS2_S4_S6_SC_S9_EERKT_
-__ZN3WTF9HashTableIxSt4pairIxjENS_18PairFirstExtractorIS2_EENS_7IntHashIxEENS_14PairHashTraitsIN3JSC24EncodedJSValueHashTraitsENS_10HashTraitsIjEEEES9_E6rehashEi
-__ZN3WTF9HashTableIxSt4pairIxjENS_18PairFirstExtractorIS2_EENS_7IntHashIxEENS_14PairHashTraitsIN3JSC24EncodedJSValueHashTraitsENS_10HashTraitsIjEEEES9_E3addIxjNS_17HashMapTranslatorIS2_SC_S6_EEEES1_INS_17HashTableIteratorIxS2_S4_S6_SC_S9_EEbERKT_RKT0_
-__ZN3JSC8ThisNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC10NumberNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC10IfElseNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitJumpEPNS_5LabelE
-__ZN3JSC23FunctionCallResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode5isAddEv
-__ZN3JSC21ThrowableBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15StrictEqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14emitEqualityOpENS_8OpcodeIDEPNS_10RegisterIDES3_S3_
-__ZN3JSC11NewExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitConstructEPNS_10RegisterIDES2_PNS_13ArgumentsNodeEjjj
-__ZN3WTF6VectorIN3JSC20GetByIdExceptionInfoELm0EE14expandCapacityEmPKS2_
-__ZN3JSC17BytecodeGenerator16emitUnaryNoDstOpENS_8OpcodeIDEPNS_10RegisterIDE
-__ZN3JSC12JSGlobalData22numericCompareFunctionEPNS_9ExecStateE
-__ZNK3JSC21UStringSourceProvider6lengthEv
-__ZN3JSC18FunctionExecutable14fromGlobalCodeERKNS_10IdentifierEPNS_9ExecStateEPNS_8DebuggerERKNS_10SourceCodeEPiPNS_7UStringE
-__ZNK3JSC21UStringSourceProvider4dataEv
-__ZNK3JSC9ScopeNode15singleStatementEv
-__ZNK3JSC17ExprStatementNode15isExprStatementEv
-__ZNK3JSC12FuncExprNode14isFuncExprNodeEv
-__ZThn12_N3JSC11ProgramNodeD0Ev
-__ZN3JSC9ScopeNodeD2Ev
-__ZN3JSC11ParserArenaD1Ev
-__ZN3JSC14SourceElementsD1Ev
-__ZThn12_N3JSC16FunctionBodyNodeD0Ev
-__ZN3WTF10RefCountedIN3JSC18FunctionParametersEE5derefEv
-__ZN3WTF15SegmentedVectorIN3JSC10IdentifierELm64EED1Ev
-__ZN3JSC18FunctionExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC6Parser5parseINS_16FunctionBodyNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10SourceCodeEPiPNS_7UStringE
-__ZL11makeSubNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZN3JSC14ExpressionNode14stripUnaryPlusEv
-__ZN3JSC16FunctionBodyNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPS0_Lm0EEERKNS_10SourceCodeEji
-__ZN3JSC16FunctionBodyNode13finishParsingEN3WTF10PassRefPtrINS_18FunctionParametersEEERKNS_10IdentifierE
-__ZN3JSC17BytecodeGeneratorC1EPNS_16FunctionBodyNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_9CodeBlockE
-__ZN3JSC17BytecodeGeneratorC2EPNS_16FunctionBodyNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_9CodeBlockE
-__ZN3JSC17BytecodeGenerator12addParameterERKNS_10IdentifierE
-__ZN3JSC16FunctionBodyNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC10ReturnNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator10emitReturnEPNS_10RegisterIDE
-__ZNK3JSC9BlockNode7isBlockEv
-__ZNK3JSC10ReturnNode12isReturnNodeEv
-__ZN3WTF6OwnPtrIN3JSC13ExceptionInfoEE5clearEv
-__ZN3JSC9CodeBlock11shrinkToFitEv
-__ZN3WTF6VectorIN3JSC10IdentifierELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorINS_6RefPtrIN3JSC18FunctionExecutableEEELm0EE14shrinkCapacityEm
-__ZN3JSC17BytecodeGeneratorD2Ev
-__ZN3WTF15SegmentedVectorIN3JSC10LabelScopeELm8EED1Ev
-__ZN3WTF15SegmentedVectorIN3JSC10LabelScopeELm8EE17deleteAllSegmentsEv
-__ZN3WTF15SegmentedVectorIN3JSC5LabelELm32EED1Ev
-__ZN3WTF15SegmentedVectorIN3JSC5LabelELm32EE17deleteAllSegmentsEv
-__ZN3WTF15SegmentedVectorIN3JSC10RegisterIDELm32EED1Ev
-__ZN3WTF6VectorIN3JSC11InstructionELm0EEaSERKS3_
-__ZN3JSC18FunctionExecutableD0Ev
-__ZN3JSC17FunctionCodeBlockD0Ev
-__ZN3JSC9CodeBlockD2Ev
-__ZN3WTF6OwnPtrIN3JSC13ExceptionInfoEED1Ev
-__ZN3JSC11UStringImplD1Ev
-__ZN3JSC11UStringImplD2Ev
-__ZN3JSC10Identifier6removeEPNS_11UStringImplE
-__ZN3JSC22currentIdentifierTableEv
-__ZN3JSC21UStringSourceProviderD0Ev
-__ZN3WTF6VectorINS_6RefPtrIN3JSC6RegExpEEELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14shrinkCapacityEm
-__ZN3JSC15ParserArenaDataIN3WTF6VectorIPNS_16FunctionBodyNodeELm0EEEED1Ev
-__ZN3JSC15ParserArenaDataIN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEED1Ev
-__ZN3WTF25TCMalloc_Central_FreeList11ShrinkCacheEib
-__ZNK3JSC8JSObject8toObjectEPNS_9ExecStateE
-__ZN3JSC11Interpreter7executeEPNS_17ProgramExecutableEPNS_9ExecStateEPNS_14ScopeChainNodeEPNS_8JSObjectEPNS_7JSValueE
-__ZN3JSC12JSGlobalData14resetDateCacheEv
-__ZN3JSC14JSGlobalObject13copyGlobalsToERNS_12RegisterFileE
-__ZN3JSC18FunctionExecutable4makeEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEEPNS_14ScopeChainNodeE
-__ZN3JSC12SmallStrings27createSingleCharacterStringEPNS_12JSGlobalDataEh
-__ZNK3JSC7JSValue3getEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC11Interpreter15tryCacheGetByIDEPNS_9ExecStateEPNS_9CodeBlockEPNS_11InstructionENS_7JSValueERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC7JSValue13equalSlowCaseEPNS_9ExecStateES0_S0_
-__ZN3JSC10JSFunction18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC9Structure13hasTransitionEPNS_11UStringImplEj
-__ZN3JSC11Interpreter15tryCachePutByIDEPNS_9ExecStateEPNS_9CodeBlockEPNS_11InstructionENS_7JSValueERKNS_15PutPropertySlotE
-__ZN3JSC24StructureTransitionTable21reifySingleTransitionEv
-__ZN3WTF9HashTableISt4pairINS_6RefPtrIN3JSC11UStringImplEEEjES1_IS6_S1_IPNS3_9StructureES8_EENS_18PairFirstExtractorISA_EENS3_28StructureTransitionTableHashENS_14PairHashTraitsINS3_34StructureTransitionTableHashTraitsENS_10HashTraitsIS9_EEEESF_E4findIS6_NS_22IdentityHashTranslatorIS6_SA_SD_EEEENS_17HashTableIteratorIS6_SA_SC_SD_SI_SF_EERKT_
-__ZN3WTF9HashTableISt4pairINS_6RefPtrIN3JSC11UStringImplEEEjES1_IS6_S1_IPNS3_9StructureES8_EENS_18PairFirstExtractorISA_EENS3_28StructureTransitionTableHashENS_14PairHashTraitsINS3_34StructureTransitionTableHashTraitsENS_10HashTraitsIS9_EEEESF_E6rehashEi
-__ZN3WTF9HashTableISt4pairINS_6RefPtrIN3JSC11UStringImplEEEjES1_IS6_S1_IPNS3_9StructureES8_EENS_18PairFirstExtractorISA_EENS3_28StructureTransitionTableHashENS_14PairHashTraitsINS3_34StructureTransitionTableHashTraitsENS_10HashTraitsIS9_EEEESF_E3addIS6_S9_NS_17HashMapTranslatorISA_SI_SD_EEEES1_INS_17HashTableIteratorIS6_SA_SC_SD_SI_SF_EEbERKT_RKT0_
-__ZN3JSC10JSFunction11getCallDataERNS_8CallDataE
-__ZL15makePostfixNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorEiii
-__ZNK3JSC10NumberNode8isNumberEv
-__ZN3JSC17BytecodeGenerator6addVarERKNS_10IdentifierEbRPNS_10RegisterIDE
-__ZNK3JSC16JSVariableObject16isVariableObjectEv
-__ZN3JSC17BytecodeGenerator16emitGetScopedVarEPNS_10RegisterIDEmiNS_7JSValueE
-__ZN3JSC7ForNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13newLabelScopeENS_10LabelScope4TypeEPKNS_10IdentifierE
-__ZN3WTF15SegmentedVectorIN3JSC10LabelScopeELm8EE6appendIS2_EEvRKT_
-__ZN3JSC15ConditionalNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14JSGlobalObject14isDynamicScopeEv
-__ZN3JSC9BreakNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11breakTargetERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator14emitJumpScopesEPNS_5LabelEi
-__ZN3JSC18PostfixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator10emitPreIncEPNS_10RegisterIDE
-__ZN3JSC19BracketAccessorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitGetByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC23setUpStaticFunctionSlotEPNS_9ExecStateEPKNS_9HashEntryEPNS_8JSObjectERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC17PrototypeFunction11getCallDataERNS_8CallDataE
-__ZNK3JSC7JSValue3getEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZNK3JSC12PropertySlot8getValueEPNS_9ExecStateEj
-__ZN3JSC7UString4fromEj
-__ZNK3JSC8JSObject9toBooleanEPNS_9ExecStateE
-__ZNK3JSC8JSString9toBooleanEPNS_9ExecStateE
-__ZN3JSC12RegExpObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEENS2_INS_6RegExpEEE
-__ZN3JSC12RegExpObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC18getStaticValueSlotINS_12RegExpObjectENS_8JSObjectEEEbPNS_9ExecStateEPKNS_9HashTableEPT_RKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL19regExpProtoFuncTestEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12RegExpObject9classInfoEv
-__ZN3JSC12RegExpObject4testEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC12RegExpObject5matchEPNS_9ExecStateERKNS_7ArgListE
-__ZNK3JSC7JSValue8toStringEPNS_9ExecStateE
-__ZN3JSC17RegExpConstructor12performMatchEPNS_6RegExpERKNS_7UStringEiRiS6_PPi
-__ZN3JSC6RegExp5matchERKNS_7UStringEiPN3WTF6VectorIiLm32EEE
-__ZN3JSC11Interpreter13resolveGlobalEPNS_9ExecStateEPNS_11InstructionERNS_7JSValueE
-__ZN3JSC17RegExpConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC18getStaticValueSlotINS_17RegExpConstructorENS_16InternalFunctionEEEbPNS_9ExecStateEPKNS_9HashTableEPT_RKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar1EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC11jsSubstringEPNS_12JSGlobalDataERKNS_7UStringEjj
-__ZN3JSC8JSString18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC15StringPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC12StringObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL22stringProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSString12toThisStringEPNS_9ExecStateE
-__ZNK3JSC7UString4findERKS0_i
-__ZN3JSC7JSValue11strictEqualEPNS_9ExecStateES0_S0_
-__ZNK3JSC18EmptyStatementNode16isEmptyStatementEv
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEES4_NS_17IdentityExtractorIS4_EENS2_17IdentifierRepHashENS_10HashTraitsIS4_EES9_E6rehashEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEES4_NS_17IdentityExtractorIS4_EENS2_17IdentifierRepHashENS_10HashTraitsIS4_EES9_E3addIS4_S4_NS_22IdentityHashTranslatorIS4_S4_S7_EEEESt4pairINS_17HashTableIteratorIS4_S4_S6_S7_S9_S9_EEbERKT_RKT0_
-__ZNK3JSC13StatementNode12isReturnNodeEv
-__ZN3JSC12JSActivationC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEE
-__ZNK3JSC8JSObject12toThisObjectEPNS_9ExecStateE
-__ZN3JSC12JSActivation13copyRegistersEPNS_9ArgumentsE
-__ZN3JSC9EqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC23CallFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator25emitJumpIfNotFunctionCallEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC19ReverseBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC23objectProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue20toThisObjectSlowCaseEPNS_9ExecStateE
-__ZNK3JSC8JSObject9classNameEv
-__ZN3JSC10makeStringIPKcNS_7UStringES2_EES3_T_T0_T1_
-__ZN3JSC18jsNontrivialStringEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC15TypeOfValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8NullNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC20jsTypeStringForValueEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC6JSCell11getCallDataERNS_8CallDataE
-__ZNK3JSC19BracketAccessorNode10isLocationEv
-__ZNK3JSC19BracketAccessorNode21isBracketAccessorNodeEv
-__ZN3JSC9ForInNodeC2EPNS_12JSGlobalDataERKNS_10IdentifierEPNS_14ExpressionNodeES7_PNS_13StatementNodeEiii
-__ZN3JSC20ParserArenaDeletablenwEmPNS_12JSGlobalDataE
-__ZN3JSC9ForInNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator20emitGetPropertyNamesEPNS_10RegisterIDES2_S2_S2_PNS_5LabelE
-__ZN3JSC17BytecodeGenerator18pushOptimisedForInEPNS_10RegisterIDES2_S2_S2_
-__ZN3WTF6VectorIN3JSC12ForInContextELm0EE14expandCapacityEmPKS2_
-__ZN3WTF11VectorMoverILb0EN3JSC12ForInContextEE4moveEPKS2_S5_PS2_
-__ZN3JSC17AssignBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitPutByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC17BytecodeGenerator20emitNextPropertyNameEPNS_10RegisterIDES2_S2_S2_S2_PNS_5LabelE
-__ZN3JSC22JSPropertyNameIterator6createEPNS_9ExecStateEPNS_8JSObjectE
-__ZN3JSC8JSObject16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
-__ZN3JSC8JSObject19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
-__ZN3JSC9Structure16getPropertyNamesERNS_17PropertyNameArrayENS_15EnumerationModeE
-__ZN3JSC17PropertyNameArray14addKnownUniqueEPNS_11UStringImplE
-__ZNK3JSC6JSCell9classInfoEv
-__ZNK3JSC9Structure14prototypeChainEPNS_9ExecStateE
-__ZN3JSC14StructureChainC1EPNS_9StructureE
-__ZN3JSC14StructureChainC2EPNS_9StructureE
-__ZN3WTF10RefCountedIN3JSC21PropertyNameArrayDataEE5derefEv
-__ZN3JSC22JSPropertyNameIterator3getEPNS_9ExecStateEPNS_8JSObjectEm
-__ZN3JSC9Structure27despecifyFunctionTransitionEPS0_RKNS_10IdentifierE
-__ZN3JSC9Structure17despecifyFunctionERKNS_10IdentifierE
-__ZN3JSC14ArrayPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC17RegExpConstructor3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC9lookupPutINS_17RegExpConstructorEEEbPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEPKNS_9HashTableEPT_
-__ZNK3JSC14ExpressionNode11isCommaNodeEv
-__ZN3JSC9CommaNodeC1EPNS_12JSGlobalDataEPNS_14ExpressionNodeES4_
-__ZN3JSC9CommaNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC12JSActivation14isDynamicScopeEv
-__ZN3JSC9CommaNodeD1Ev
-__ZN3JSC9ArgumentsC2EPNS_9ExecStateE
-__ZN3JSC17BytecodeGenerator19emitResolveWithBaseEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3JSC9WhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11emitPostDecEPNS_10RegisterIDES2_
-__ZN3JSC17ObjectConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callObjectConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL19isInvalidParamForInEPNS_9ExecStateEPNS_9CodeBlockEPKNS_11InstructionENS_7JSValueERS7_
-__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC16ArrayConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL29constructWithArrayConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSCL27constructArrayWithSizeQuirkEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEEj
-__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC17TypeOfResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSCeqERKNS_7UStringEPKc
-__ZN3JSC16jsIsFunctionTypeENS_7JSValueE
-__ZN3JSC10JSFunction3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC9Structure22materializePropertyMapEv
-__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7ArgListE
-__ZN3JSCL18arrayProtoFuncPushEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray4pushEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC7JSArray20increaseVectorLengthEj
-__ZN3WTF14tryFastReallocEPvm
-__ZN3WTF11fastReallocILb0EEEPvS1_m
-__ZNK3JSC9CodeBlock13refStructuresEPNS_11InstructionE
-__ZN3WTF6VectorIN3JSC10IdentifierELm20EE15reserveCapacityEm
-__ZN3JSC12StringObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC11Interpreter14uncacheGetByIDEPNS_9CodeBlockEPNS_11InstructionE
-__ZNK3JSC9CodeBlock15derefStructuresEPNS_11InstructionE
-__ZNK3JSC9CommaNode11isCommaNodeEv
-__ZN3JSC9CommaNode6appendEPNS_14ExpressionNodeE
-__ZN3JSC7JSArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSCL20arrayProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSArray9classInfoEv
-__ZNK3JSC9Arguments9classInfoEv
-__ZN3JSC7JSArray3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC7JSArray11putSlowCaseEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC7JSArray9setLengthEj
-__ZN3JSC17NumberConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC18getStaticValueSlotINS_17NumberConstructorENS_16InternalFunctionEEEbPNS_9ExecStateEPKNS_9HashTableEPT_RKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC7TryNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator9emitCatchEPNS_10RegisterIDEPNS_5LabelES4_
-__ZN3JSC9CodeBlock19addExceptionHandlerERKNS_11HandlerInfoE
-__ZN3WTF6VectorIN3JSC11HandlerInfoELm0EE14expandCapacityEmPKS2_
-__ZN3JSC17BytecodeGenerator16emitPushNewScopeEPNS_10RegisterIDERKNS_10IdentifierES2_
-__ZN3WTF6VectorIN3JSC18ControlFlowContextELm0EE14expandCapacityEmPKS2_
-__ZN3JSC9ThrowNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitPopScopeEv
-__ZN3JSCL21arrayProtoFuncForEachEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSObject3getEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC11Interpreter20prepareForRepeatCallEPNS_18FunctionExecutableEPNS_9ExecStateEPNS_10JSFunctionEiPNS_14ScopeChainNodeEPNS_7JSValueE
-__ZN3JSC17BytecodeGenerator11emitPostIncEPNS_10RegisterIDES2_
-__ZN3JSC17BytecodeGenerator16emitPutScopedVarEmiPNS_10RegisterIDENS_7JSValueE
-__ZN3JSC11Interpreter13endRepeatCallERNS_16CallFrameClosureE
-__ZN3JSCL19arrayProtoFuncShiftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC7JSArray14deletePropertyEPNS_9ExecStateEj
-__ZN3JSC10JSFunction16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL25functionProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC10JSFunction9classInfoEv
-__ZNK3JSC7UStringixEi
-__ZNK3JSC18FunctionExecutable11paramStringEv
-__ZN3WTF6VectorItLm64EE6appendIcEEvPKT_m
-__ZN3JSC13StringBuilder7releaseEv
-__ZN3JSC10makeStringIPKcNS_7UStringES2_S3_S2_S3_EES3_T_T0_T1_T2_T3_T4_
-__ZN3JSCL20stringProtoFuncMatchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC17RegExpConstructor14arrayOfMatchesEPNS_9ExecStateE
-__ZN3JSC18RegExpMatchesArrayC2EPNS_9ExecStateEPNS_24RegExpConstructorPrivateE
-__ZN3JSC7JSArrayC2EN3WTF17NonNullPassRefPtrINS_9StructureEEEj
-__ZN3JSC7JSArray19setLazyCreationDataEPv
-__ZN3JSC18RegExpMatchesArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC7JSArray16lazyCreationDataEv
-__ZN3JSC18RegExpMatchesArray17fillArrayInstanceEPNS_9ExecStateE
-__ZN3JSC24RegExpConstructorPrivateD1Ev
-__ZN3JSCL22stringProtoFuncReplaceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSString14toThisJSStringEPNS_9ExecStateE
-__ZNK3JSC7UString30spliceSubstringsWithSeparatorsEPKNS0_5RangeEiPKS0_i
-__ZN3JSCL20stringProtoFuncSplitEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL19arrayProtoFuncSliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue9toIntegerEPNS_9ExecStateE
-__ZN3JSC4Heap15recordExtraCostEm
-__ZNK3JSC8JSObject3getEPNS_9ExecStateEj
-__ZN3JSC15ObjectPrototype18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC4Heap9markRootsEv
-__ZN3JSC4Heap30markStackObjectsConservativelyERNS_9MarkStackE
-__ZN3JSC4Heap31markCurrentThreadConservativelyERNS_9MarkStackE
-__ZN3JSC4Heap39markCurrentThreadConservativelyInternalERNS_9MarkStackE
-__ZN3JSC4Heap18markConservativelyERNS_9MarkStackEPvS3_
-__ZN3JSC9MarkStack5drainEv
-__ZN3JSC9MarkStack12markChildrenEPNS_6JSCellE
-__ZN3JSC8JSObject12markChildrenERNS_9MarkStackE
-__ZN3JSC14JSGlobalObject12markChildrenERNS_9MarkStackE
-__ZN3JSC9CodeBlock13markAggregateERNS_9MarkStackE
-__ZN3JSC18FunctionExecutable13markAggregateERNS_9MarkStackE
-__ZN3JSC10JSFunction12markChildrenERNS_9MarkStackE
-__ZN3JSC7JSArray12markChildrenERNS_9MarkStackE
-__ZN3JSC7JSArray18markChildrenDirectERNS_9MarkStackE
-__ZN3JSC15JSWrapperObject12markChildrenERNS_9MarkStackE
-__ZN3JSC18GlobalEvalFunction12markChildrenERNS_9MarkStackE
-__ZN3JSC12JSActivation12markChildrenERNS_9MarkStackE
-__ZN3JSC9Arguments12markChildrenERNS_9MarkStackE
-__ZN3JSCL12markIfNeededERNS_9MarkStackENS_7JSValueE
-__ZN3JSC4Heap20markProtectedObjectsERNS_9MarkStackE
-__ZN3JSC12SmallStrings12markChildrenERNS_9MarkStackE
-__ZN3JSC9MarkStack7compactEv
-__ZN3JSC4Heap12resizeBlocksEv
-__ZNK3JSC4Heap11markedCellsEmm
-__ZN3JSC17PrototypeFunctionD1Ev
-__ZN3JSC12RegExpObjectD1Ev
-__ZN3JSC8JSObjectD1Ev
-__ZN3JSC22JSPropertyNameIteratorD1Ev
-__ZN3WTF10RefCountedIN3JSC14StructureChainEE5derefEv
-__ZN3JSC12JSActivationD1Ev
-__ZN3JSC12JSActivation16JSActivationDataD1Ev
-__ZN3JSC16JSVariableObjectD2Ev
-__ZN3JSC23normalizePrototypeChainEPNS_9ExecStateENS_7JSValueES2_RKNS_10IdentifierERm
-__ZN3JSC9ArgumentsD1Ev
-__ZN3JSC9ArgumentsD2Ev
-__ZN3JSC18RegExpMatchesArrayD1Ev
-__ZNK3JSC7UString8toUInt32EPbb
-__ZNK3JSC7UString8toDoubleEbb
-__ZNK3JSC7UString10getCStringERN3WTF6VectorIcLm32EEE
-__ZN3JSCL26stringProtoFuncToUpperCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17DeleteBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitDeleteByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC10MathObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC21getStaticFunctionSlotINS_8JSObjectEEEbPNS_9ExecStateEPKNS_9HashTableEPS1_RKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL19mathProtoFuncRandomEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC13jsAddSlowCaseEPNS_9ExecStateENS_7JSValueES2_
-__ZNK3JSC8JSString11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZN3JSC14NumericStrings3addEd
-__ZN3JSC7UString4fromEd
-__ZN3WTF32doubleToStringInJavaScriptFormatEdPcPj
-__ZN3WTF4dtoaEPcdiPiS1_PS0_
-__ZN3WTFL4multERNS_6BigIntERKS0_
-__ZN3WTF6VectorIjLm16EEaSERKS1_
-__ZN3WTF6BigInt6appendEj
-__ZN3JSC7UStringC1EPKci
-__ZN3JSCL20stringProtoFuncSliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSString11resolveRopeEPNS_9ExecStateE
-__ZN3JSC8JSStringC2EPNS_12JSGlobalDataEjPS0_S3_
-__ZN3JSC8JSObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC12ContinueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14continueTargetERKNS_10IdentifierE
-__ZN3JSC13DeleteDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14emitDeleteByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3JSC10JSFunction14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC15IdentifierArena21makeNumericIdentifierEPNS_12JSGlobalDataEd
-__ZN3JSC11Interpreter7executeERNS_16CallFrameClosureEPNS_7JSValueE
-__ZN3JSC9Arguments13copyRegistersEv
-__ZN3JSC14PostfixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8JSObject18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC8JSObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC24ApplyFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode13isSimpleArrayEv
-__ZN3JSC17BytecodeGenerator26emitJumpIfNotFunctionApplyEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC17BytecodeGenerator22willResolveToArgumentsERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator29uncheckedRegisterForArgumentsEv
-__ZN3JSC17BytecodeGenerator15emitLoadVarargsEPNS_10RegisterIDES2_
-__ZN3JSC17BytecodeGenerator15emitCallVarargsEPNS_10RegisterIDES2_S2_S2_jjj
-__ZN3JSC14InstanceOfNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14emitInstanceOfEPNS_10RegisterIDES2_S2_S2_
-__ZN3JSCL27isInvalidParamForInstanceOfEPNS_9ExecStateEPNS_9CodeBlockEPKNS_11InstructionENS_7JSValueERS7_
-__ZN3JSC8JSObject11hasInstanceEPNS_9ExecStateENS_7JSValueES3_
-__ZN3JSC8JSObject3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSCL21arrayProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSString12toThisObjectEPNS_9ExecStateE
-__ZN3JSC12StringObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_8JSStringE
-__ZNK3JSC6JSCell12toThisStringEPNS_9ExecStateE
-__ZNK3JSC8JSObject8toStringEPNS_9ExecStateE
-__ZNK3JSC8JSObject11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZNK3JSC8JSObject12defaultValueEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZN3JSC4callEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataES2_RKNS_7ArgListE
-__ZN3JSCL23stringProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12StringObject9classInfoEv
-__ZN3JSC14TimeoutChecker10didTimeOutEPNS_9ExecStateE
-__ZN3JSC16ErrorConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL29constructWithErrorConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC14constructErrorEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC13ErrorInstanceC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZL14makePrefixNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorEiii
-__ZNK3JSC7AddNode5isAddEv
-__ZN3JSC12BinaryOpNode10emitStrcatERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_PNS_21ReadModifyResolveNodeE
-__ZNK3JSC10StringNode8isStringEv
-__ZNK3JSC14ExpressionNode8isStringEv
-__ZN3JSC17BytecodeGenerator15emitToPrimitiveEPNS_10RegisterIDES2_
-__ZN3JSC17BytecodeGenerator10emitStrcatEPNS_10RegisterIDES2_i
-__ZN3JSC13PrefixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8JSString40appendValueInConstructAndIncrementLengthEPNS_9ExecStateERjNS_7JSValueE
-__ZN3JSC17ProgramExecutableD0Ev
-__ZN3JSC16ProgramCodeBlockD0Ev
-__ZN3JSC15GlobalCodeBlockD2Ev
-__ZN3WTF9HashTableIPN3JSC15GlobalCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEENS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EERKT_
-__ZN3JSC9CodeBlock8RareDataD2Ev
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS1_INS2_14EvalExecutableEEEENS_18PairFirstExtractorIS8_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E15deallocateTableEPS8_i
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS1_INS2_14EvalExecutableEEEENS_18PairFirstExtractorIS8_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E4findIS4_NS_22IdentityHashTranslatorIS4_S8_SC_EEEENS_17HashTableIteratorIS4_S8_SA_SC_SH_SF_EERKT_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS1_INS2_14EvalExecutableEEEENS_18PairFirstExtractorIS8_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E6rehashEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS1_INS2_14EvalExecutableEEEENS_18PairFirstExtractorIS8_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSE_IS7_EEEESF_E3addIPS3_S7_NS_29RefPtrHashMapRawKeyTranslatorISK_S8_SH_SC_EEEES5_INS_17HashTableIteratorIS4_S8_SA_SC_SH_SF_EEbERKT_RKT0_
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EED1Ev
-__ZN3JSC6RegExpD1Ev
-__Z12jsRegExpFreeP8JSRegExp
-__ZN3JSC5Lexer19shiftLineTerminatorEv
-__ZNK3JSC8JSString8toNumberEPNS_9ExecStateE
-__ZNK3JSC7UString8toDoubleEv
-__ZNK3JSC7ArgList8getSliceEiRS0_
-__ZN3JSC17RegExpConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC15constructRegExpEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC18globalFuncUnescapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF6VectorItLm64EE14expandCapacityEmPKt
-__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm8EE14expandCapacityEmPKS3_
-__ZN3JSC10JSFunction4callEPNS_9ExecStateENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11Interpreter7executeEPNS_18FunctionExecutableEPNS_9ExecStateEPNS_10JSFunctionEPNS_8JSObjectERKNS_7ArgListEPNS_14ScopeChainNodeEPNS_7JSValueE
-__ZL11makeDivNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZNK3JSC8NullNode6isNullEv
-__ZN3JSC21ReadModifyResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15DateConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL28constructWithDateConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC13constructDateEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC12DateInstanceC1EPNS_9ExecStateEd
-__ZN3WTF8timeClipEd
-__ZN3JSC13DatePrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL19dateProtoFuncGetDayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12DateInstance9classInfoEv
-__ZNK3JSC12DateInstance26calculateGregorianDateTimeEPNS_9ExecStateE
-__ZN3JSC17DateInstanceCache3addEd
-__ZN3JSC21msToGregorianDateTimeEPNS_9ExecStateEdbRNS_17GregorianDateTimeE
-__ZN3JSC12getUTCOffsetEPNS_9ExecStateE
-__ZN3JSCL12getDSTOffsetEPNS_9ExecStateEdd
-__ZN3WTFL18calculateDSTOffsetEdd
-__ZN3WTF23dayInMonthFromDayInYearEib
-__ZN3JSCL21dateProtoFuncGetHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23dateProtoFuncGetMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL30dateProtoFuncGetTimezoneOffsetEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC14NumericStrings3addEi
-__ZN3JSC7UString4fromEi
-__ZN3JSCL26stringProtoFuncToLowerCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSString4Rope20destructNonRecursiveEv
-__ZNK3JSC19JSStaticScopeObject14isDynamicScopeEv
-__ZN3JSC17ReadModifyDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC18BooleanConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL22callBooleanConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZL17bracketIsAnchoredPKh
-__ZN3JSCL18arrayProtoFuncJoinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF9HashTableIPN3JSC8JSObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E3addIS3_S3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC8JSObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3WTF6VectorItLm256EE14expandCapacityEmPKt
-__ZN3WTF9HashTableIPN3JSC8JSObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEENS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EERKT_
-__ZN3JSC19FunctionConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL32constructWithFunctionConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
-__ZN3JSC8WithNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitPushScopeEPNS_10RegisterIDE
-__ZN3JSC11Interpreter7resolveEPNS_9ExecStateEPNS_11InstructionERNS_7JSValueE
-__ZN3JSC12JSActivation18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC10SwitchNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC13CaseBlockNode20emitBytecodeForBlockERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_
-__ZN3JSC13CaseBlockNode18tryOptimizedSwitchERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERiS7_
-__ZN3JSCL17processClauseListEPNS_14ClauseListNodeERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERNS_10SwitchKindERbRiSB_
-__ZN3JSC17BytecodeGenerator11beginSwitchEPNS_10RegisterIDENS_10SwitchInfo10SwitchTypeE
-__ZN3WTF6VectorIN3JSC10SwitchInfoELm0EE14expandCapacityEm
-__ZN3WTF6VectorISt4pairIiiELm8EE14expandCapacityEmPKS2_
-__ZN3JSC17BytecodeGenerator9endSwitchEjPN3WTF6RefPtrINS_5LabelEEEPPNS_14ExpressionNodeEPS3_ii
-__ZN3JSC9CodeBlock24addStringSwitchJumpTableEv
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14expandCapacityEmPKS2_
-__ZN3WTF11VectorMoverILb0EN3JSC15StringJumpTableEE4moveEPKS2_S5_PS2_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_14OffsetLocationEENS_18PairFirstExtractorIS7_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSD_IS6_EEEESE_E6rehashEi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_14OffsetLocationEENS_18PairFirstExtractorIS7_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSD_IS6_EEEESE_E4findIS4_NS_22IdentityHashTranslatorIS4_S7_SB_EEEENS_17HashTableIteratorIS4_S7_S9_SB_SG_SE_EERKT_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_14OffsetLocationEENS_18PairFirstExtractorIS7_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSD_IS6_EEEESE_E3addIS4_S7_NS_22IdentityHashTranslatorIS4_S7_SB_EEEES5_INS_17HashTableIteratorIS4_S7_S9_SB_SG_SE_EEbERKT_RKT0_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC11UStringImplEEESt4pairIS4_NS2_14OffsetLocationEENS_18PairFirstExtractorIS7_EENS_7StrHashIS4_EENS_14PairHashTraitsINS_10HashTraitsIS4_EENSD_IS6_EEEESE_E3addIPS3_S6_NS_29RefPtrHashMapRawKeyTranslatorISJ_S7_SG_SB_EEEES5_INS_17HashTableIteratorIS4_S7_S9_SB_SG_SE_EEbERKT_RKT0_
-__ZN3JSC15StringJumpTable14offsetForValueEPNS_11UStringImplEi
-__ZN3JSC28globalFuncEncodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL6encodeEPNS_9ExecStateERKNS_7ArgListEPKc
-__ZNK3JSC7UString10UTF8StringEb
-__ZN3WTF7Unicode18convertUTF16ToUTF8EPPKtS2_PPcS4_b
-__ZN3JSC7CStringD1Ev
-__ZN3JSC11Interpreter22resolveBaseAndPropertyEPNS_9ExecStateEPNS_11InstructionERNS_7JSValueE
-__ZN3JSCL24regExpConstructorDollar2EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar3EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar4EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZNK3JSC11Interpreter18retrieveLastCallerEPNS_9ExecStateERiRlRNS_7UStringERNS_7JSValueE
-__ZN3JSC9CodeBlock27lineNumberForBytecodeOffsetEPNS_9ExecStateEj
-__ZN3JSC9CodeBlock34reparseForExceptionInfoIfNecessaryEPNS_9ExecStateE
-__ZN3JSC18FunctionExecutable20reparseExceptionInfoEPNS_12JSGlobalDataEPNS_14ScopeChainNodeEPNS_9CodeBlockE
-__ZN3WTF6OwnPtrIN3JSC13ExceptionInfoEE3setEPS2_
-__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm16EE14expandCapacityEmPKS3_
-__ZN3JSC8jsLessEqEPNS_9ExecStateENS_7JSValueES2_
-__ZN3WTF6VectorIN3JSC8JSString4Rope5FiberELm32EE14expandCapacityEm
-__ZL12makeMultNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZN3JSCL17mathProtoFuncCeilEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue17toPrimitiveStringEPNS_9ExecStateE
-__ZN3JSC20EvalFunctionCallNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitCallEvalEPNS_10RegisterIDES2_S2_PNS_13ArgumentsNodeEjjj
-__ZN3JSC15globalFuncIsNaNEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11Interpreter8callEvalEPNS_9ExecStateEPNS_12RegisterFileEPNS_8RegisterEiiRNS_7JSValueE
-__ZN3JSC13LiteralParser5Lexer3lexERNS1_18LiteralParserTokenE
-__ZN3JSC13LiteralParser5parseENS0_11ParserStateE
-__ZN3JSC20MarkedArgumentBufferD1Ev
-__ZN3JSC13EvalCodeCache3getEPNS_9ExecStateERKNS_7UStringEPNS_14ScopeChainNodeERNS_7JSValueE
-__ZN3JSC14EvalExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC6Parser5parseINS_8EvalNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10SourceCodeEPiPNS_7UStringE
-__ZN3JSC8EvalNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPNS_16FunctionBodyNodeELm0EEERKNS_10SourceCodeEji
-__ZNK3JSC10ScopeChain10localDepthEv
-__ZN3JSC13EvalCodeBlockC2EPNS_14EvalExecutableEPNS_14JSGlobalObjectEN3WTF10PassRefPtrINS_14SourceProviderEEEi
-__ZN3JSC17BytecodeGeneratorC1EPNS_8EvalNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_13EvalCodeBlockE
-__ZN3JSC17BytecodeGeneratorC2EPNS_8EvalNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_11UStringImplEEENS_16SymbolTableEntryENS_17IdentifierRepHashENS9_10HashTraitsISD_EENS_26SymbolTableIndexHashTraitsEEEPNS_13EvalCodeBlockE
-__ZN3JSC8EvalNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZThn12_N3JSC8EvalNodeD0Ev
-__ZN3WTF29RefPtrHashMapRawKeyTranslatorIPN3JSC11UStringImplESt4pairINS_6RefPtrIS2_EENS5_INS1_14EvalExecutableEEEENS_14PairHashTraitsINS_10HashTraitsIS6_EENSB_IS8_EEEENS_7StrHashIS6_EEE9translateERS9_S3_RKS8_
-__ZN3JSC9ExecState9thisValueEv
-__ZN3JSC11Interpreter7executeEPNS_14EvalExecutableEPNS_9ExecStateEPNS_8JSObjectEiPNS_14ScopeChainNodeEPNS_7JSValueE
-__ZN3JSC11Interpreter11resolveBaseEPNS_9ExecStateEPNS_11InstructionE
-__ZN3JSC16globalFuncEscapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26stringProtoFuncLastIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue20toIntegerPreserveNaNEPNS_9ExecStateE
-__ZNK3JSC7UString5rfindERKS0_i
-__ZN3JSCL24stringProtoFuncSubstringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21stringProtoFuncSubstrEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC14EvalExecutableD0Ev
-__ZN3JSC13EvalCodeBlockD0Ev
-__ZN3WTF21CrossThreadRefCountedINS_16OwnFastMallocPtrItEEE5derefEv
-__ZN3JSC10JSONObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL22JSONProtoFuncStringifyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11StringifierC2EPNS_9ExecStateENS_7JSValueES3_
-__ZN3JSCL20unwrapBoxedPrimitiveEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC11Stringifier9stringifyENS_7JSValueE
-__ZN3JSC11Stringifier22appendStringifiedValueERNS_13StringBuilderENS_7JSValueEPNS_8JSObjectERKNS_27PropertyNameForFunctionCallE
-__ZN3JSC11Stringifier6toJSONENS_7JSValueERKNS_27PropertyNameForFunctionCallE
-__ZNK3JSC6JSCell9getStringEPNS_9ExecStateERNS_7UStringE
-__ZN3JSC11Stringifier6Holder18appendNextPropertyERS0_RNS_13StringBuilderE
-__ZN3JSC11Stringifier6indentEv
-__ZNK3JSC7UString6substrEii
-__ZNK3JSC11Stringifier12startNewLineERNS_13StringBuilderE
-__ZN3JSC11Stringifier18appendQuotedStringERNS_13StringBuilderERKNS_7UStringE
-__ZN3JSC11Stringifier8unindentEv
-__ZN3JSC11StringifierD2Ev
-__ZN3JSC19JSStaticScopeObject12markChildrenERNS_9MarkStackE
-__ZN3JSC17PrefixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSCL18JSONProtoFuncParseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC13LiteralParser5Lexer9lexStringILNS0_10ParserModeE0EEENS0_9TokenTypeERNS1_18LiteralParserTokenE
-__ZN3JSC13LiteralParser5Lexer9lexNumberERNS1_18LiteralParserTokenE
-__ZN3JSC14LogicalNotNode30emitBytecodeInConditionContextERNS_17BytecodeGeneratorEPNS_5LabelES4_b
-__ZN3JSCL18regExpObjectSourceEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC18RegExpMatchesArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL19stringProtoFuncTrimEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL10trimStringEPNS_9ExecStateENS_7JSValueEi
-__ZN3JSC15isStrWhiteSpaceEt
-__ZN3JSC17StringConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callStringConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC23FunctionCallBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC14jsIsObjectTypeENS_7JSValueE
-__ZN3JSC14StructureChainD1Ev
-__ZN3WTF11OwnArrayPtrINS_6RefPtrIN3JSC9StructureEEEE10safeDeleteEv
-__ZN3JSC11Interpreter14throwExceptionERPNS_9ExecStateERNS_7JSValueEjb
-__ZNK3JSC8JSObject22isNotAnObjectErrorStubEv
-__ZN3JSC9CodeBlock32expressionRangeForBytecodeOffsetEPNS_9ExecStateEjRiS3_S3_
-__ZN3JSC9CodeBlock43hasGlobalResolveInstructionAtBytecodeOffsetEj
-__ZN3JSC8JSObject17putWithAttributesEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEj
-__ZNK3JSC8JSObject19isWatchdogExceptionEv
-__ZN3JSC9CodeBlock24handlerForBytecodeOffsetEj
-__ZN3JSC11Interpreter15unwindCallFrameERPNS_9ExecStateENS_7JSValueERjRPNS_9CodeBlockE
-__ZNK3JSC12JSActivation9classInfoEv
-__ZN3JSC11Interpreter20createExceptionScopeEPNS_9ExecStateEPKNS_11InstructionE
-__ZN3JSC19JSStaticScopeObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC4Heap17collectAllGarbageEv
-__ZN3JSC4Heap5sweepEv
-__ZN3JSC12StringObjectD1Ev
-__ZN3JSC15JSWrapperObjectD2Ev
-__ZN3JSC19JSStaticScopeObjectD1Ev
-__ZN3JSC19JSStaticScopeObjectD2Ev
-__ZN3JSC12DateInstanceD1Ev
-__ZN3JSC14JSGlobalObjectD2Ev
-__ZN3JSC17FunctionPrototypeD1Ev
-__ZN3JSC15ObjectPrototypeD1Ev
-__ZN3JSC14ArrayPrototypeD1Ev
-__ZN3JSC15StringPrototypeD1Ev
-__ZN3JSC16BooleanPrototypeD1Ev
-__ZN3JSC15NumberPrototypeD1Ev
-__ZN3JSC13DatePrototypeD1Ev
-__ZN3JSC15RegExpPrototypeD1Ev
-__ZN3JSC14ErrorPrototypeD1Ev
-__ZN3JSC13ErrorInstanceD2Ev
-__ZN3JSC20NativeErrorPrototypeD1Ev
-__ZN3JSC17ObjectConstructorD1Ev
-__ZN3JSC19FunctionConstructorD1Ev
-__ZN3JSC16ArrayConstructorD1Ev
-__ZN3JSC17StringConstructorD1Ev
-__ZN3JSC18BooleanConstructorD1Ev
-__ZN3JSC17NumberConstructorD1Ev
-__ZN3JSC15DateConstructorD1Ev
-__ZN3JSC17RegExpConstructorD1Ev
-__ZN3JSC16ErrorConstructorD1Ev
-__ZN3JSC22NativeErrorConstructorD1Ev
-__ZN3JSC10MathObjectD1Ev
-__ZN3JSC10JSONObjectD1Ev
-__ZN3JSC18GlobalEvalFunctionD1Ev
-__ZN3JSC7JSArray15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
-__ZN3JSC11DoWhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC11Interpreter14uncachePutByIDEPNS_9CodeBlockEPNS_11InstructionE
-__ZN3JSC9parseDateEPNS_9ExecStateERKNS_7UStringE
-__ZN3JSC37parseDateFromNullTerminatedCharactersEPNS_9ExecStateEPKc
-__ZN3WTFL37parseDateFromNullTerminatedCharactersEPKcRbRi
-__ZN3JSCL21dateProtoFuncSetHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23setNewValueFromTimeArgsEPNS_9ExecStateENS_7JSValueERKNS_7ArgListEib
-__ZN3JSC21gregorianDateTimeToMSEPNS_9ExecStateERKNS_17GregorianDateTimeEdb
-__ZN3JSCL20dateProtoFuncGetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL18mathProtoFuncRoundEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC21UntypedPtrAndBitfieldanEm
-__ZN3JSCL18mathProtoFuncFloorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC18globalFuncParseIntEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC15toInt32SlowCaseEdRb
-__ZN3JSCL19regExpProtoFuncExecEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12RegExpObject4execEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC20globalFuncParseFloatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC9CodeBlock27addImmediateSwitchJumpTableEv
-__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE15reserveCapacityEm
-__ZN3WTF6VectorIiLm0EE4fillERKim
-__ZN3JSC15SimpleJumpTable14offsetForValueEii
-__ZN3JSCL29objectProtoFuncHasOwnPropertyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSObject14hasOwnPropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC17NumberConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callNumberConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF37parseDateFromNullTerminatedCharactersEPKc
-__ZN3JSC6JSCell14toThisJSStringEPNS_9ExecStateE
-__ZN3JSCL22arrayProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7UString12replaceRangeEiiRKS0_
-__ZN3JSC28globalFuncDecodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL6decodeEPNS_9ExecStateERKNS_7ArgListEPKcb
-__ZN3JSC17BytecodeGenerator10emitPreDecEPNS_10RegisterIDE
-__ZN3JSCL8parseIntERKNS_7UStringEi
-__ZNK3JSC7JSValue19synthesizePrototypeEPNS_9ExecStateE
-__ZN3JSC26createNotAnObjectErrorStubEPNS_9ExecStateEb
-__ZN3JSC13JSNotAnObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZNK3JSC22JSNotAnObjectErrorStub22isNotAnObjectErrorStubEv
-__ZN3JSC22createNotAnObjectErrorEPNS_9ExecStateEPNS_22JSNotAnObjectErrorStubEjPNS_9CodeBlockE
-__ZN3JSC9CodeBlock37getByIdExceptionInfoForBytecodeOffsetEPNS_9ExecStateEjRNS_8OpcodeIDE
-__ZN3JSCL18createErrorMessageEPNS_9ExecStateEPNS_9CodeBlockEiiiNS_7JSValueENS_7UStringE
-__ZN3JSC10makeStringIPKcNS_7UStringES2_S3_S2_S3_S2_EES3_T_T0_T1_T2_T3_T4_T5_
-__ZN3JSC5Error6createEPNS_9ExecStateENS_9ErrorTypeERKNS_7UStringEilS6_
-__ZN3JSC22NativeErrorConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSC9constructEPNS_9ExecStateENS_7JSValueENS_13ConstructTypeERKNS_13ConstructDataERKNS_7ArgListE
-__ZN3JSCL35constructWithNativeErrorConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC22NativeErrorConstructor9constructEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSCL20dateProtoFuncSetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21dateProtoFuncGetMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL20dateProtoFuncGetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24dateProtoFuncToGMTStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12DateInstance29calculateGregorianDateTimeUTCEPNS_9ExecStateE
-__ZN3JSC20formatDateUTCVariantERKNS_17GregorianDateTimeERA100_c
-__ZN3JSC13formatTimeUTCERKNS_17GregorianDateTimeERA100_c
-__ZN3JSC10makeStringIPcPKcS1_EENS_7UStringET_T0_T1_
-__ZN3JSCL23numberProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL28substituteBackreferencesSlowERKNS_7UStringES2_PKiPNS_6RegExpEi
-__ZN3JSCL21dateProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC10formatDateERKNS_17GregorianDateTimeERA100_c
-__ZN3JSC10formatTimeERKNS_17GregorianDateTimeERA100_c
-__ZNK3JSC8JSObject8toNumberEPNS_9ExecStateE
-__ZN3JSCL20dateProtoFuncGetDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC21ReadModifyBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZL17makeLeftShiftNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZL18makeRightShiftNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
-__ZN3JSCL25stringProtoFuncCharCodeAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncMaxEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17RegExpConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17PrefixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC22JSNotAnObjectErrorStubD1Ev
-__ZN3JSC13JSNotAnObjectD1Ev
-__ZN3JSC13ErrorInstanceD1Ev
-__ZN3JSC17ObjectConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithObjectConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3WTF9HashTableIdSt4pairIdN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_9FloatHashIdEENS_14PairHashTraitsINS_10HashTraitsIdEENSA_IS3_EEEESB_E4findIdNS_22IdentityHashTranslatorIdS4_S8_EEEENS_17HashTableIteratorIdS4_S6_S8_SD_SB_EERKT_
-__ZN3WTF9HashTableIdSt4pairIdN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_9FloatHashIdEENS_14PairHashTraitsINS_10HashTraitsIdEENSA_IS3_EEEESB_E6rehashEi
-__ZN3WTF9HashTableIdSt4pairIdN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_9FloatHashIdEENS_14PairHashTraitsINS_10HashTraitsIdEENSA_IS3_EEEESB_E3addIdS3_NS_17HashMapTranslatorIS4_SD_S8_EEEES1_INS_17HashTableIteratorIdS4_S6_S8_SD_SB_EEbERKT_RKT0_
-__ZN3JSC21UntypedPtrAndBitfieldoREm
-__ZN3JSC6RegExp6createEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC17PropertyNameArray3addEPNS_11UStringImplE
-__ZN3JSCL22objectProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17StringConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithStringConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC12StringObjectC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringE
-__ZN3JSC12RegExpObject11getCallDataERNS_8CallDataE
-__ZN3JSC8VoidNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15constructNumberEPNS_9ExecStateENS_7JSValueE
-__ZNK3JSC12NumberObject9classInfoEv
-__ZN3JSC12NumberObjectD1Ev
-__ZN3JSC4Heap9freeBlockEm
-__ZN3JSC4Heap12freeBlockPtrEPNS_14CollectorBlockE
-__ZN3JSCL20arrayProtoFuncSpliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL18arrayProtoFuncSortEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray4sortEPNS_9ExecStateE
-__ZN3JSC7JSArray17compactForSortingEv
-__ZN3JSC16JSVariableObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC9Structure27despecifyDictionaryFunctionERKNS_10IdentifierE
-__ZN3JSCL18stringFromCharCodeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC23jsSingleCharacterStringEPNS_12JSGlobalDataEt
-__ZN3WTF6VectorIN3JSC7UString5RangeELm16EE15reserveCapacityEm
-__ZN3WTF6VectorIN3JSC7UStringELm16EE15reserveCapacityEm
-__ZN3JSC19globalFuncEncodeURIEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC9ArrayNode13isSimpleArrayEv
-__ZNK3JSC9ArrayNode14toArgumentListEPNS_12JSGlobalDataE
-__ZN3JSCL17arrayProtoFuncPopEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray3popEv
-__ZN3WTF6VectorIPN3JSC9StructureELm8EE14expandCapacityEmPKS3_
-__ZNK3JSC9Arguments20numProvidedArgumentsEPNS_9ExecStateE
-__ZN3JSC9Arguments15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
-__ZN3JSC12RegExpObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC9lookupPutINS_12RegExpObjectEEEbPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEPKNS_9HashTableEPT_
-__ZN3JSCL24setRegExpObjectLastIndexEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueE
-__Z15jsc_pcre_xclassiPKh
-__ZN3JSC18PostfixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSCL28regExpConstructorLeftContextEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC18RegExpMatchesArray3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC12JSActivation3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC36constructBooleanFromImmediateBooleanEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC13BooleanObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
-__ZN3JSC28createUndefinedVariableErrorEPNS_9ExecStateERKNS_10IdentifierEjPNS_9CodeBlockE
-__ZN3JSC10makeStringIPKcNS_7UStringEEES3_T_T0_
-__ZN3JSCL22errorProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC10makeStringINS_7UStringEPKcS1_EES1_T_T0_T1_
-__ZN3JSCL16mathProtoFuncAbsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC6JSCell9getObjectEv
-__ZN3JSC9MarkStack12releaseStackEPvm
-__ZN3JSC8JSObject14deletePropertyEPNS_9ExecStateEj
-__ZNK3JSC8JSObject21findPropertyHashEntryEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC13BooleanObjectD1Ev
-__ZN3JSC22JSPropertyNameIterator12markChildrenERNS_9MarkStackE
-__ZN3JSCL21arrayProtoFuncUnShiftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21stringProtoFuncCharAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC26jsSingleCharacterSubstringEPNS_12JSGlobalDataERKNS_7UStringEj
-__ZN3JSC13UnaryPlusNode14stripUnaryPlusEv
-__ZN3JSC8JSString18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
-__ZN3JSC9Arguments3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC9LabelNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSCL21stringProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24booleanProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL9dateParseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC10JSFunction19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E3addIS3_S3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3JSC11Interpreter11resolveSkipEPNS_9ExecStateEPNS_11InstructionERNS_7JSValueE
-__ZN3JSCL21arrayProtoFuncReverseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF7Unicode18UTF8SequenceLengthEc
-__ZN3WTF7Unicode18decodeUTF8SequenceEPKc
-__ZN3JSC23createNotAFunctionErrorEPNS_9ExecStateENS_7JSValueEjPNS_9CodeBlockE
-__ZN3JSCL24dateProtoFuncGetFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC16ArrayConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL20callArrayConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
-__ZN3WTF9HashTableIPN3JSC11UStringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEENS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EERKT_
-__ZN3JSC9ExecState10arrayTableEPS0_
-__ZN3JSCL23dateProtoFuncGetSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL20dateProtoFuncSetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC13LiteralParser5Lexer9lexStringILNS0_10ParserModeE1EEENS0_9TokenTypeERNS1_18LiteralParserTokenE
-__ZN3WTFL3i2bERNS_6BigIntEi
-__ZN3JSC6JSCell3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZNK3JSC8JSString8toObjectEPNS_9ExecStateE
-__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateEj
-__ZN3WTFL3b2dERKNS_6BigIntEPi
-__ZN3WTF6VectorIPNS0_IN3JSC10RegisterIDELm32EEELm32EE14expandCapacityEmPKS4_
-__ZN3JSC7JSArray4sortEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
-__ZN3WTF6VectorIN3JSC26AVLTreeNodeForArrayCompareELm0EE6resizeEm
-__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE6insertEi
-__ZN3JSC32AVLTreeAbstractorForArrayCompare15compare_key_keyENS_7JSValueES1_
-__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE7balanceEi
-__ZN3JSCL22numberProtoFuncToFixedEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16integerPartNoExpEd
-__ZN3JSC10makeStringINS_7UStringES1_PKcS1_EES1_T_T0_T1_T2_
-__ZN3JSCL16mathProtoFuncLogEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncPowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF13tryFastCallocEmm
-__ZN3WTF10fastCallocILb0EEEPvmm
-__ZN3JSC7JSArray11sortNumericEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
-__ZN3JSCL22compareNumbersForQSortEPKvS1_
-__ZN3JSC10makeStringINS_7UStringES1_EES1_T_T0_
-__ZN3WTF6VectorIPN3JSC10RegisterIDELm32EE14expandCapacityEmPKS3_
-__ZN3JSC16ErrorConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL20callErrorConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21stringProtoFuncSearchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL17arrayProtoFuncMapEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue16synthesizeObjectEPNS_9ExecStateE
-__ZN3WTF15ThreadCondition9timedWaitERNS_5MutexEd
-__ZN3JSCL7dateNowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC18globalFuncIsFiniteEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17DeleteResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC21UStringSourceProvider8getRangeEii
-__ZN3JSC12JSActivation14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSCL22numberProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncCosEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12JSGlobalData6createEv
-__ZN3JSC8JSObject17putDirectFunctionEPNS_9ExecStateEPNS_16InternalFunctionEj
-__ZN3JSC12JSGlobalData13startSamplingEv
-__ZN3JSC11Interpreter13startSamplingEv
-__ZN3JSC12JSGlobalData12stopSamplingEv
-__ZN3JSC11Interpreter12stopSamplingEv
-__ZN3JSCL17mathProtoFuncSqrtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncSinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC16toUInt32SlowCaseEdRb
-__ZN3JSC9CodeBlock27addCharacterSwitchJumpTableEv
-__ZN3JSC20MarkedArgumentBuffer10slowAppendENS_7JSValueE
-__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E3addIS3_S3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEESt4pairINS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EEbERKT_RKT0_
-__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_22IdentityHashTranslatorIS3_S3_S7_EEEENS_17HashTableIteratorIS3_S3_S5_S7_S9_S9_EERKT_
-__ZN3JSC15ObjectPrototype3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSCltERKNS_7UStringES2_
-__ZN3JSC12JSGlobalData14dumpSampleDataEPNS_9ExecStateE
-__ZN3JSC11Interpreter14dumpSampleDataEPNS_9ExecStateE
-__ZN3JSC4Heap7destroyEv
-__ZN3JSC4Heap10freeBlocksEv
-__ZN3JSC14JSGlobalObject25destroyJSGlobalObjectDataEPv
-__ZN3JSC14JSGlobalObject18JSGlobalObjectDataD2Ev
-__ZN3JSC12JSGlobalDataD1Ev
-__ZN3JSC12JSGlobalDataD2Ev
-__ZN3JSC12RegisterFileD1Ev
-__ZNK3JSC9HashTable11deleteTableEv
-__ZN3JSC5LexerD1Ev
-__ZN3JSC17CommonIdentifiersD2Ev
-__ZN3JSC21deleteIdentifierTableEPNS_15IdentifierTableE
-__ZN3JSC15IdentifierTableD2Ev
-__ZN3JSC4HeapD1Ev
-__ZN3JSC14NumericStringsD1Ev
-__ZN3JSC12SmallStringsD1Ev
__ZN3WTF10fastMallocEm
-__ZN3WTF10fastMallocILb1EEEPvm
__ZN3WTF20TCMalloc_ThreadCache10InitModuleEv
__ZN3WTFL15InitSizeClassesEv
__Z20TCMalloc_SystemAllocmPmm
__ZN3WTFL13MetaDataAllocEm
+__ZN3WTF17TCMalloc_PageHeap19initializeScavengerEv
__ZN3WTF20TCMalloc_ThreadCache22CreateCacheIfNecessaryEv
__ZN3WTF25TCMalloc_Central_FreeList11RemoveRangeEPPvS2_Pi
__ZN3WTF25TCMalloc_Central_FreeList18FetchFromSpansSafeEv
__ZN3WTF17TCMalloc_PageHeap10AllocLargeEm
__ZN3WTF17TCMalloc_PageHeap8GrowHeapEm
+__ZN3WTF17TCMalloc_PageHeap6DeleteEPNS_4SpanE
+__ZN3WTF8fastFreeEPv
__ZN3WTF19initializeThreadingEv
-__ZN3WTF20initializeMainThreadEv
-__ZN3WTF5MutexC1Ev
-__ZN3WTF28initializeMainThreadPlatformEv
__ZN3WTF36lockAtomicallyInitializedStaticMutexEv
-__ZN3WTF8fastFreeEPv
__ZN3WTF38unlockAtomicallyInitializedStaticMutexEv
__ZN3JSC19initializeThreadingEv
__ZN3JSCL23initializeThreadingOnceEv
+__ZN3WTF13WTFThreadDataC1Ev
__ZN3JSC17initializeUStringEv
-__ZN3JSC12initDateMathEv
+__ZN7WebCore10StringImpl5emptyEv
+__ZN3JSC12JSGlobalData10storeVPtrsEv
+__ZN3JSC9StructureC1ENS_7JSValueERKNS_8TypeInfoEj
+__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3WTF16fastZeroedMallocEm
+__ZN3JSC7JSArrayD1Ev
+__ZN3JSC7JSArrayD2Ev
+__ZN3JSC9StructureD1Ev
+__ZN3JSC9StructureD2Ev
+__ZN3JSC11JSByteArray15createStructureENS_7JSValueE
+__ZN3JSC11JSByteArrayD1Ev
+__ZN3JSC8JSStringD1Ev
+__ZN3JSC10JSFunctionC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC10JSFunctionC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC10JSFunctionD1Ev
+__ZN3JSC10JSFunctionD2Ev
+__ZN3JSC18VPtrHackExecutableD0Ev
+__ZN3WTF5MutexC1Ev
+__ZN3WTF15initializeDatesEv
__ZN3WTF11currentTimeEv
+__ZN3WTF8msToYearEd
+__ZN3WTF39initializeMainThreadToProcessMainThreadEv
+__ZN3WTFL43initializeMainThreadToProcessMainThreadOnceEv
+__ZN3WTF47initializeMainThreadToProcessMainThreadPlatformEv
+__ZN3WTF20initializeMainThreadEv
__ZN3WTF15ThreadConditionC1Ev
+__ZN7WebCore10StringImpl6createEPKtj
+__ZN7WebCore10StringImpl19createUninitializedEjRPt
__ZN3WTF5Mutex4lockEv
__ZN3WTF5Mutex6unlockEv
+__ZNK7WebCore6String17crossThreadStringEv
+__ZN7WebCore10StringImpl17crossThreadStringEv
+__ZN7WebCore10StringImpl12sharedBufferEv
+__ZN7WebCore6StringC1EPKc
+__ZN7WebCore10StringImpl6createEPKc
+__ZN7WebCore10StringImpl6createEPKcj
+__ZNK7WebCore6String14threadsafeCopyEv
+__ZNK7WebCore10StringImpl14threadsafeCopyEv
+__ZN7WebCore10StringImpl8endsWithEPS0_b
+__ZN7WebCore10StringImpl4findEPS0_ib
+__ZN7WebCore10StringImplD1Ev
+__ZN7WebCore10StringImplD2Ev
+__ZN7WebCoreplERKNS_6StringEPKc
+__ZN7WebCore6String6appendERKS0_
+__ZN7WebCoreplERKNS_6StringES2_
__ZN3WTF12createThreadEPFPvS0_ES0_PKc
__ZN3WTF20createThreadInternalEPFPvS0_ES0_PKc
-__ZN3WTFL35establishIdentifierForPthreadHandleERP17_opaque_pthread_t
+__ZN3WTFL35establishIdentifierForPthreadHandleERKP17_opaque_pthread_t
__ZN3WTF9HashTableIjSt4pairIjP17_opaque_pthread_tENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTrai
__ZN3WTFL16threadEntryPointEPv
-__ZN3WTF16fastZeroedMallocEm
-__ZN3WTF21setThreadNameInternalEPKc
+__ZN3WTF31initializeCurrentThreadInternalEPKc
+__ZN3WTF20ThreadIdentifierData10initializeEj
+__ZN3WTF20ThreadIdentifierData23initializeKeyOnceHelperEv
__ZN3WTF5MutexD1Ev
-__ZN3WTF25TCMalloc_Central_FreeList11InsertRangeEPvS1_i
-__ZN3WTF25TCMalloc_Central_FreeList18ReleaseListToSpansEPv
__ZN3WTF12isMainThreadEv
+__ZN3WTF7CString16newUninitializedEmRPc
+__ZNK3WTF7CString4dataEv
__ZN3WTF14FastMallocZone4sizeEP14_malloc_zone_tPKv
+__ZN7WebCore6String29charactersWithNullTerminationEv
+__ZN7WebCore10StringImpl34createWithTerminatingNullCharacterERKS0_
__ZN3WTF13currentThreadEv
+__ZN3WTF20ThreadIdentifierData10identifierEv
+__ZNK7WebCore6String15stripWhiteSpaceEv
+__ZN7WebCore10StringImpl15stripWhiteSpaceEv
+__ZN7WebCore5equalEPKNS_10StringImplES2_
+__ZN7WebCoreplEPKcRKNS_6StringE
+__ZN7WebCore6StringC1EPKt
+__ZN7WebCore6StringC2EPKt
+__ZNK7WebCore6String7isEmptyEv
__ZN3WTF16callOnMainThreadEPFvPvES0_
__ZN3WTF5DequeINS_19FunctionWithContextEE14expandCapacityEv
__ZN3WTF37scheduleDispatchFunctionsOnMainThreadEv
__ZN3WTF15ThreadCondition4waitERNS_5MutexE
-__ZN3JSC8DebuggerC2Ev
+__ZN3WTF17TCMalloc_PageHeap3NewEm
+__ZN7WebCore12AtomicString4initEv
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS1_10StringHashENS_10HashTraitsIS3_EEE3addIPKcNS1_17CStringTranslatorEEESt4pairINS_24H
+__ZN3WTF9HashTableIPN7WebCore10StringImplES3_NS_17IdentityExtractorIS3_EENS1_10StringHashENS_10HashTraitsIS3_EES8_E6rehashEi
+__ZN7WebCore12AtomicString3addEPKc
+__ZN7WebCore12AtomicString3addEPNS_10StringImplE
+__ZN7WebCore6StringC1EPKcj
+__ZN7WebCore5equalEPKNS_10StringImplEPKc
+__ZNK7WebCore6String6lengthEv
+__ZNK7WebCore6StringixEj
+__ZNK7WebCore6String9substringEjj
+__ZN7WebCore10StringImpl9substringEjj
+__ZNK7WebCore6String5lowerEv
+__ZN7WebCore10StringImpl5lowerEv
+__ZN7WebCore10StringImpl4findEti
+__ZNK7WebCore6String10charactersEv
+__ZN7WebCore17equalIgnoringCaseEPNS_10StringImplES1_
+__ZN7WebCore17equalIgnoringCaseEPNS_10StringImplEPKc
+__ZN7WebCore12AtomicString3addEPKtj
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS1_10StringHashENS_10HashTraitsIS3_EEE3addINS1_11UCharBufferENS1_21UCharBufferTranslat
+__ZN7WebCore18charactersToDoubleEPKtmPb
__ZN3WTF6strtodEPKcPPc
-__ZN3WTF15ThreadCondition6signalEv
-__ZN3WTF15ThreadCondition9timedWaitERNS_5MutexEd
__ZN3WTF15ThreadCondition9broadcastEv
--[WTFMainThreadCaller call]
-__ZN3WTF31dispatchFunctionsFromMainThreadEv
-__ZN3WTF14FastMallocZone9forceLockEP14_malloc_zone_t
-__ZN3WTF11fastReallocEPvm
-__ZN3WTF11fastReallocILb1EEEPvS1_m
-__ZN3JSC7UStringC1EPKti
-__ZN3JSC7UStringC2EPKti
-__ZN3JSC12JSGlobalData12createLeakedEv
+__ZN3WTF15ThreadCondition6signalEv
+__ZN7WebCore12AtomicString6removeEPNS_10StringImplE
+__ZN3JSC12JSGlobalData12createLeakedENS_15ThreadStackTypeE
__ZN3JSC9Structure18startIgnoringLeaksEv
-__ZN3JSC7VPtrSetC2Ev
-__ZN3JSC9StructureC1ENS_7JSValueERKNS_8TypeInfoE
-__ZN3JSC7JSArrayC1EN3WTF10PassRefPtrINS_9StructureEEE
-__ZN3JSC7JSArrayD1Ev
-__ZN3JSC7JSArrayD2Ev
-__ZN3WTF10RefCountedIN3JSC9StructureEE5derefEv
-__ZN3JSC9StructureD1Ev
-__ZN3JSC9StructureD2Ev
-__ZN3JSC11JSByteArray15createStructureENS_7JSValueE
-__ZN3JSC11JSByteArrayD1Ev
-__ZN3JSC8JSStringD1Ev
-__ZN3JSC10JSFunctionD1Ev
-__ZN3JSC10JSFunctionD2Ev
-__ZN3JSC8JSObjectD2Ev
-__ZN3JSC12JSGlobalDataC2EbRKNS_7VPtrSetE
-__ZN3JSC21createIdentifierTableEv
+__ZN3JSC12JSGlobalDataC2ENS0_14GlobalDataTypeENS_15ThreadStackTypeE
__ZN3JSC17CommonIdentifiersC1EPNS_12JSGlobalDataE
__ZN3JSC17CommonIdentifiersC2EPNS_12JSGlobalDataE
__ZN3JSC10Identifier3addEPNS_12JSGlobalDataEPKc
-__ZN3WTF7HashSetIPN3JSC7UString3RepENS_7StrHashIS4_EENS_10HashTraitsIS4_EEE3addIPKcNS1_17CStringTranslatorEEESt4pairINS_24HashT
-__ZN3WTF9HashTableIPN3JSC7UString3RepES4_NS_17IdentityExtractorIS4_EENS_7StrHashIS4_EENS_10HashTraitsIS4_EESA_E6rehashEi
-__ZN3WTF9HashTableIPKcSt4pairIS2_NS_6RefPtrIN3JSC7UString3RepEEEENS_18PairFirstExtractorIS9_EENS_7PtrHashIS2_EENS_14PairHashTra
-__ZN3WTF6RefPtrIN3JSC7UString3RepEED1Ev
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS1_10StringHashENS_10HashTraitsIS3_EEE3addIPKcN3JSC27IdentifierCStringTranslatorEEESt4
+__ZN3WTF9HashTableIPN7WebCore10StringImplES3_NS_17IdentityExtractorIS3_EENS1_10StringHashENS_10HashTraitsIS3_EES8_E6expandEv
+__ZN3WTF9HashTableIPKcSt4pairIS2_NS_6RefPtrIN7WebCore10StringImplEEEENS_18PairFirstExtractorIS8_EENS_7PtrHashIS2_EENS_14PairHas
+__ZN3JSC10IdentifierC1EPNS_12JSGlobalDataEPKc
__ZN3JSC12SmallStringsC1Ev
__ZN3JSC19ExecutableAllocator17intializePageSizeEv
__ZN3JSC14ExecutablePool11systemAllocEm
__ZN3JSC5LexerC1EPNS_12JSGlobalDataE
-__ZN3JSC5LexerC2EPNS_12JSGlobalDataE
+__ZN3JSC11ParserArenaC1Ev
+__ZN3JSC11ParserArenaC2Ev
__ZN3JSC11InterpreterC1Ev
__ZN3JSC11InterpreterC2Ev
-__ZN3JSC11Interpreter14privateExecuteENS0_13ExecutionFlagEPNS_12RegisterFileEPNS_9ExecStateEPNS_7JSValueE
-__ZN3WTF7HashMapIPvN3JSC8OpcodeIDENS_7PtrHashIS1_EENS_10HashTraitsIS1_EENS6_IS3_EEE3addERKS1_RKS3_
-__ZN3WTF9HashTableIPvSt4pairIS1_N3JSC8OpcodeIDEENS_18PairFirstExtractorIS5_EENS_7PtrHashIS1_EENS_14PairHashTraitsINS_10HashTrai
-__ZN3JSC8JITStubsC1EPNS_12JSGlobalDataE
+__ZN3JSC9JITThunksC1EPNS_12JSGlobalDataE
__ZN3JSC3JITC1EPNS_12JSGlobalDataEPNS_9CodeBlockE
__ZN3JSC3JITC2EPNS_12JSGlobalDataEPNS_9CodeBlockE
-__ZN3JSC3JIT35privateCompileCTIMachineTrampolinesEPN3WTF6RefPtrINS_14ExecutablePoolEEEPNS_12JSGlobalDataEPPvS9_S9_S9_S9_S9_
-__ZN3JSC12X86Assembler23X86InstructionFormatter11oneByteOp64ENS0_15OneByteOpcodeIDEiNS_3X8610RegisterIDE
+__ZN3JSC3JIT35privateCompileCTIMachineTrampolinesEPN3WTF6RefPtrINS_14ExecutablePoolEEEPNS_12JSGlobalDataEPNS_19TrampolineStruct
+__ZN3JSC20MacroAssemblerX86_6413branchTestPtrENS_23MacroAssemblerX86Common9ConditionENS_12X86Registers10RegisterIDES4_
+__ZN3JSC12X86Assembler23X86InstructionFormatter11oneByteOp64ENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDE
__ZN3JSC12X86Assembler3jCCENS0_9ConditionE
-__ZN3JSC23MacroAssemblerX86Common4moveENS_22AbstractMacroAssemblerINS_12X86AssemblerEE6ImmPtrENS_3X8610RegisterIDE
-__ZN3JSC12X86Assembler23X86InstructionFormatter11oneByteOp64ENS0_15OneByteOpcodeIDEiNS_3X8610RegisterIDEi
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_3X8610RegisterIDE
+__ZN3JSC12X86Assembler23X86InstructionFormatter11oneByteOp64ENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDEi
+__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDEi
+__ZN3JSC12X86Assembler23X86InstructionFormatter11memoryModRMEiNS_12X86Registers10RegisterIDEi
+__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDE
__ZN3JSC15AssemblerBuffer11ensureSpaceEi
-__ZN3JSC20MacroAssemblerX86_6413branchTestPtrENS_23MacroAssemblerX86Common9ConditionENS_3X8610RegisterIDENS_22AbstractMacroAsse
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDENS_3X8610RegisterIDE
__ZN3JSC20MacroAssemblerX86_644callEv
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_3X8610RegisterIDEi
+__ZN3JSC20MacroAssemblerX86_647loadPtrENS_22AbstractMacroAssemblerINS_12X86AssemblerEE15ImplicitAddressENS_12X86Registers10Regi
+__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDENS_12X86Registers10RegisterIDE
__ZN3JSC3JIT32compileOpCallInitializeCallFrameEv
-__ZN3JSC12X86Assembler23X86InstructionFormatter11memoryModRMEiNS_3X8610RegisterIDEi
-__ZN3JSC20MacroAssemblerX86_6421makeTailRecursiveCallENS_22AbstractMacroAssemblerINS_12X86AssemblerEE4JumpE
+__ZN3JSC15AssemblerBuffer4growEi
+__ZN3WTF11fastReallocEPvm
+__ZN3WTF11fastReallocILb1EEEPvS1_m
+__ZN3WTF10RefCountedIN3JSC14ExecutablePoolEE5derefEv
__ZN3JSC14TimeoutCheckerC1Ev
__ZN3JSC4HeapC1EPNS_12JSGlobalDataE
+__ZN3JSC4Heap13allocateBlockEv
+__ZN3JSC9MarkStack18initializePagesizeEv
+__ZN3JSC9MarkStack13allocateStackEm
__ZN3JSC27startProfilerServerIfNeededEv
+[ProfilerServer sharedProfileServer]
-[ProfilerServer init]
__ZN3JSC9Structure17stopIgnoringLeaksEv
+__ZNK7WebCore6String6latin1Ev
+__ZNK3WTF7CString6lengthEv
+__ZN7WebCore10StringImpl22containsOnlyWhitespaceEv
+__ZN7WebCore12AtomicString3addEPKt
+__ZN7WebCore10StringImpl11reverseFindEPS0_ib
+__ZN7WebCore10StringImpl5adoptERNS_12StringBufferE
+__ZNK7WebCore6String5splitEtRN3WTF6VectorIS0_Lm0EEE
+__ZNK7WebCore6String5splitERKS0_bRN3WTF6VectorIS0_Lm0EEE
+-[WTFMainThreadCaller call]
+__ZN3WTF31dispatchFunctionsFromMainThreadEv
+__ZN3WTF15ThreadCondition9timedWaitERNS_5MutexEd
+__ZN3WTF14FastMallocZone9forceLockEP14_malloc_zone_t
+__ZN3WTF14FastMallocZone11forceUnlockEP14_malloc_zone_t
+__ZN3WTF20TCMalloc_ThreadCache18DestroyThreadCacheEPv
+__ZN3WTF20TCMalloc_ThreadCache11DeleteCacheEPS0_
+__ZN3WTF25TCMalloc_Central_FreeList11InsertRangeEPvS1_i
+__ZN3WTF25TCMalloc_Central_FreeList18ReleaseListToSpansEPv
+__ZN3WTF20ThreadIdentifierData8destructEPv
+__ZN3WTF31clearPthreadHandleForIdentifierEj
+__ZN3WTF12detachThreadEj
+__ZN3WTFL26pthreadHandleForIdentifierEj
+__ZN7WebCore6StringC1EPKtj
+__Z15jsRegExpCompilePKti24JSRegExpIgnoreCaseOption23JSRegExpMultilineOptionPjPPKc
+__ZL30calculateCompiledPatternLengthPKti24JSRegExpIgnoreCaseOptionR11CompileDataR9ErrorCode
+__ZL11checkEscapePPKtS0_P9ErrorCodeib
+__ZL13compileBranchiPiPPhPPKtS3_P9ErrorCodeS_S_R11CompileData
+__Z15jsRegExpExecutePK8JSRegExpPKtiiPii
+__ZL5matchPKtPKhiR9MatchData
+__ZN7WebCore6String6formatEPKcz
+__ZNK7WebCore6String19characterStartingAtEj
+__ZN7WebCore10StringImpl19characterStartingAtEj
+__ZN3WTF25TCMalloc_Central_FreeList11ShrinkCacheEib
+__ZNK7WebCore6String16removeCharactersEPFbtE
+__ZN7WebCore10StringImpl16removeCharactersEPFbtE
+__ZN7WebCore10StringImpl7replaceEtt
+__ZNK7WebCore6String4utf8Ev
+__ZN3WTF7Unicode18convertUTF16ToUTF8EPPKtS2_PPcS4_b
+__ZN3WTF7CStringC1EPKcj
+__ZN3WTF7CString4initEPKcj
+__ZN7WebCore10StringImpl4findEPFbtEi
__ZN3JSC4Heap8allocateEm
-__ZN3JSCL13allocateBlockILNS_8HeapTypeE0EEEPNS_14CollectorBlockEv
-__ZN3JSC4Heap4heapENS_7JSValueE
+__ZN3JSC6JSCellD1Ev
__ZN3JSC4Heap7protectENS_7JSValueE
-__ZN3WTF7HashMapIPN3JSC6JSCellEjNS_7PtrHashIS3_EENS_10HashTraitsIS3_EENS6_IjEEE3addERKS3_RKj
__ZN3WTF9HashTableIPN3JSC6JSCellESt4pairIS3_jENS_18PairFirstExtractorIS5_EENS_7PtrHashIS3_EENS_14PairHashTraitsINS_10HashTraits
__ZN3JSC14JSGlobalObjectnwEmPNS_12JSGlobalDataE
__ZN3JSC14JSGlobalObject4initEPNS_8JSObjectE
__ZN3JSC14JSGlobalObject5resetENS_7JSValueE
-__ZN3JSC4Heap12heapAllocateILNS_8HeapTypeE0EEEPvm
-__ZN3JSC8jsStringEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC12SmallStrings17createEmptyStringEPNS_12JSGlobalDataE
+__ZN3JSC17FunctionPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC17FunctionPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC16InternalFunctionC2EPNS_12JSGlobalDataEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_10IdentifierE
__ZN3JSC7UStringC1EPKc
-__ZN3JSCL9createRepEPKc
-__ZN3JSC8JSObject9putDirectERKNS_10IdentifierENS_7JSValueEjbRNS_15PutPropertySlotE
-__ZN3JSC9Structure40addPropertyTransitionToExistingStructureEPS0_RKNS_10IdentifierEjRm
-__ZN3JSC9Structure3getERKNS_10IdentifierERj
-__ZN3JSC9Structure21addPropertyTransitionEPS0_RKNS_10IdentifierEjRm
-__ZN3JSC9Structure3putERKNS_10IdentifierEj
-__ZN3JSC8JSObject26putDirectWithoutTransitionERKNS_10IdentifierENS_7JSValueEj
-__ZN3JSC9Structure28addPropertyWithoutTransitionERKNS_10IdentifierEj
+__ZN3JSC12SmallStrings17createEmptyStringEPNS_12JSGlobalDataE
+__ZN3JSC8JSObject17putDirectInternalERKNS_10IdentifierENS_7JSValueEjbRNS_15PutPropertySlotEPNS_6JSCellE
+__ZN3JSC9Structure40addPropertyTransitionToExistingStructureEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
+__ZN3JSC9Structure3getEPKN7WebCore10StringImplERjRPNS_6JSCellE
+__ZN3JSC9Structure21addPropertyTransitionEPS0_RKNS_10IdentifierEjPNS_6JSCellERm
+__ZN3JSC9Structure3putERKNS_10IdentifierEjPNS_6JSCellE
+__ZN3JSC9Structure28addPropertyWithoutTransitionERKNS_10IdentifierEjPNS_6JSCellE
__ZN3JSC17FunctionPrototype21addFunctionPropertiesEPNS_9ExecStateEPNS_9StructureEPPNS_10JSFunctionES7_
-__ZN3JSC10JSFunctionC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RK
-__ZN3JSC12JSGlobalData17createNativeThunkEv
-__ZN3JSC16FunctionBodyNode17createNativeThunkEPNS_12JSGlobalDataE
-__ZN3WTF6VectorINS_6RefPtrIN3JSC21ParserArenaRefCountedEEELm0EE15reserveCapacityEm
-__ZN3JSC11ParserArena5resetEv
+__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjec
+__ZN3JSC10JSFunctionC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjec
__ZN3JSC8JSObject34putDirectFunctionWithoutTransitionEPNS_9ExecStateEPNS_16InternalFunctionEj
-__ZN3JSC15ObjectPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC16InternalFunction4nameEPNS_9ExecStateE
+__ZN3JSC15ObjectPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC15ObjectPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
__ZN3JSC9Structure26rehashPropertyMapHashTableEj
-__ZN3JSC15StringPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEE
-__ZN3JSC16BooleanPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC15NumberPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC15RegExpPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC14ErrorPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_
-__ZN3JSC20NativeErrorPrototypeC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEERKNS_7UStringES9_
-__ZN3JSC17ObjectConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_15ObjectPrototypeE
-__ZN3JSC19FunctionConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_17FunctionPrototypeE
+__ZN3JSC8JSObject17createInheritorIDEv
+__ZN3JSC14ArrayPrototypeC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC7JSArrayC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC15StringPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC15StringPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC12StringObjectC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC16BooleanPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC16BooleanPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC13BooleanObjectC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC15NumberPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC15NumberPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC12NumberObjectC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC13DatePrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC12DateInstanceC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC15RegExpPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC15RegExpPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC14ErrorPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC14ErrorPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_
+__ZN3JSC13ErrorInstanceC2EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC20NativeErrorPrototypeC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringES9_
+__ZN3JSC20NativeErrorPrototypeC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringES9_
+__ZN3JSC17ObjectConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15ObjectPrototypeEPS5_
+__ZN3JSC17ObjectConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15ObjectPrototypeEPS5_
+__ZN3JSC10Identifier3addEPNS_9ExecStateEPKc
+__ZN3JSC19FunctionConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_17FunctionPrototypeE
+__ZN3JSC19FunctionConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_17FunctionPrototypeE
__ZNK3JSC16InternalFunction9classInfoEv
-__ZN3JSC16ArrayConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_14ArrayPrototypeE
+__ZN3JSC16ArrayConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ArrayPrototypeEPS5_
+__ZN3JSC16ArrayConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ArrayPrototypeEPS5_
__ZNK3JSC14ArrayPrototype9classInfoEv
-__ZN3JSC17StringConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_PNS_15StringPrototypeE
+__ZN3JSC17StringConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_15StringPrototypeE
+__ZN3JSC17StringConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_15StringPrototypeE
__ZNK3JSC15StringPrototype9classInfoEv
-__ZN3JSC18BooleanConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_16BooleanPrototypeE
+__ZN3JSC9JITThunks16specializedThunkEPNS_12JSGlobalDataEPFN3WTF10PassRefPtrINS_16NativeExecutableEEES2_PNS_14ExecutablePoolEE
+__ZN3JSC26fromCharCodeThunkGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSC22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpList6appendENS2_4JumpE
+__ZN3JSCL12charToStringERNS_19SpecializedThunkJITEPNS_12JSGlobalDataENS_12X86Registers10RegisterIDES5_S5_
+__ZN3JSC19SpecializedThunkJIT8finalizeEv
+__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPNS_16NativeExecutableEPFNS_7
+__ZN3JSC10JSFunctionC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPNS_16NativeExecutableEPFNS_7
+__ZN3JSC8JSObject23allocatePropertyStorageEmm
+__ZN3JSC18BooleanConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_16BooleanPrototypeE
+__ZN3JSC18BooleanConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_16BooleanPrototypeE
__ZNK3JSC13BooleanObject9classInfoEv
-__ZN3JSC17NumberConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_15NumberPrototypeE
-__ZN3JSC15DateConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPS5_PNS_13DatePrototypeE
+__ZN3JSC17NumberConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15NumberPrototypeE
+__ZN3JSC17NumberConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15NumberPrototypeE
+__ZN3JSC15DateConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_13DatePrototypeE
+__ZN3JSC15DateConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPS5_PNS_13DatePrototypeE
__ZNK3JSC13DatePrototype9classInfoEv
-__ZN3JSC17RegExpConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_15RegExpPrototypeE
-__ZN3JSC16ErrorConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_14ErrorPrototypeE
+__ZN3JSC17RegExpConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15RegExpPrototypeE
+__ZN3JSC17RegExpConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_15RegExpPrototypeE
+__ZN3JSC16ErrorConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ErrorPrototypeE
+__ZN3JSC16ErrorConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_14ErrorPrototypeE
__ZNK3JSC13ErrorInstance9classInfoEv
-__ZN3JSC22NativeErrorConstructorC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS_20NativeErrorPrototypeE
-__ZN3JSC10Identifier11addSlowCaseEPNS_12JSGlobalDataEPNS_7UString3RepE
-__ZN3WTF7HashSetIPN3JSC7UString3RepENS_7StrHashIS4_EENS_10HashTraitsIS4_EEE3addERKS4_
-__ZN3JSC10MathObjectC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEE
+__ZN3JSC22NativeErrorConstructorC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_20NativeErrorPrototypeE
+__ZN3JSC22NativeErrorConstructorC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_20NativeErrorPrototypeE
+__ZNK3JSC6JSCell9getStringEPNS_9ExecStateE
+__ZN3JSC10Identifier11addSlowCaseEPNS_9ExecStateEPN7WebCore10StringImplE
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS1_10StringHashENS_10HashTraitsIS3_EEE3addERKS3_
+__ZN3JSC10MathObjectC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC10MathObjectC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
__ZN3JSC12SmallStrings24singleCharacterStringRepEh
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEENS2_16SymbolTableEntryENS2_17IdentifierRepHashENS_10HashTraitsIS5_EENS2_26Symbo
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEESt4pairIS5_NS2_16SymbolTableEntryEENS_18PairFirstExtractorIS8_EENS2_17Identif
-__ZN3JSC17PrototypeFunctionC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjec
+__ZN3JSC19SmallStringsStorageC2Ev
+__ZN3JSC18GlobalEvalFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_
+__ZN3JSC17PrototypeFunctionC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8
__ZN3JSC9Structure25changePrototypeTransitionEPS0_NS_7JSValueE
__ZN3JSC9Structure17copyPropertyTableEv
+__ZN3WTF10RefCountedIN3JSC9StructureEE5derefEv
__ZN3JSC14JSGlobalObject10globalExecEv
-__ZN3JSC10Identifier3addEPNS_9ExecStateEPKc
__ZN3JSC4Heap9unprotectENS_7JSValueE
-__ZN3JSC6JSCellnwEmPNS_9ExecStateE
__ZN3JSC14TimeoutChecker5resetEv
__ZN3JSC8evaluateEPNS_9ExecStateERNS_10ScopeChainERKNS_10SourceCodeENS_7JSValueE
-__ZN3JSC6JSLock4lockEb
-__ZN3JSC6Parser5parseINS_11ProgramNodeEEEN3WTF10PassRefPtrIT_EEPNS_9ExecStateEPNS_8DebuggerERKNS_10SourceCodeEPiPNS_7UStringE
+__ZN3JSC6JSLockC1EPNS_9ExecStateE
+__ZN3JSC6JSLock4lockENS_14JSLockBehaviorE
+__ZN3JSC17ProgramExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
+__ZN3JSC6Parser5parseINS_11ProgramNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10SourceCod
__ZN3JSC6Parser5parseEPNS_12JSGlobalDataEPiPNS_7UStringE
-__ZN3JSC7UStringaSEPKc
+__ZN3JSC5Lexer7setCodeERKNS_10SourceCodeERNS_11ParserArenaE
__Z10jscyyparsePv
__ZN3JSC5Lexer3lexEPvS1_
__ZN3JSC10Identifier3addEPNS_12JSGlobalDataEPKti
-__ZN3WTF7HashSetIPN3JSC7UString3RepENS_7StrHashIS4_EENS_10HashTraitsIS4_EEE3addINS1_11UCharBufferENS1_21UCharBufferTranslatorEE
-__ZN3WTF15SegmentedVectorINS_10IdentifierELm64EE6appendIS1_EEvRKT_
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS1_10StringHashENS_10HashTraitsIS3_EEE3addIN3JSC11UCharBufferENS9_31IdentifierUCharBuf
__ZNK3JSC9HashTable11createTableEPNS_12JSGlobalDataE
-__ZN3JSC20ParserArenaDeletablenwEmPNS_12JSGlobalDataE
-__ZN3WTF6VectorIPN3JSC20ParserArenaDeletableELm0EE15reserveCapacityEm
-__ZN3JSC5Lexer10sourceCodeEiii
-__ZN3JSC16FunctionBodyNode13finishParsingERKNS_10SourceCodeEPNS_13ParameterNodeE
-__ZN3WTF6VectorIN3JSC10IdentifierELm0EE14expandCapacityEm
-__ZN3WTF6VectorIPN3JSC12FuncDeclNodeELm0EE14expandCapacityEm
+__ZN3JSC11ParserArena20allocateFreeablePoolEv
+__ZL20makeFunctionCallNodePN3JSC12JSGlobalDataENS_8NodeInfoIPNS_14ExpressionNodeEEENS2_IPNS_13ArgumentsNodeEEEiii
+__ZNK3JSC15DotAccessorNode10isLocationEv
+__ZNK3JSC14ExpressionNode13isResolveNodeEv
+__ZNK3JSC14ExpressionNode21isBracketAccessorNodeEv
+__ZL14makeAssignNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorES3_bbiii
+__ZNK3JSC11ResolveNode10isLocationEv
+__ZNK3JSC11ResolveNode13isResolveNodeEv
+__ZN3JSC13StatementNode6setLocEii
+__ZN3WTF6VectorIPN3JSC20ParserArenaDeletableELm0EE14expandCapacityEm
__ZN3JSC14SourceElements6appendEPNS_13StatementNodeE
__ZNK3JSC13StatementNode16isEmptyStatementEv
__ZN3WTF6VectorIPN3JSC13StatementNodeELm0EE14expandCapacityEm
-__ZL20makeFunctionCallNodePvN3JSC8NodeInfoIPNS0_14ExpressionNodeEEENS1_IPNS0_13ArgumentsNodeEEEiii
-__ZNK3JSC11ResolveNode10isLocationEv
-__ZNK3JSC11ResolveNode13isResolveNodeEv
-__ZN3JSC5Lexer7record8Ei
-__ZN3JSC5Lexer10scanRegExpEv
-__ZN3JSC7UStringC2ERKN3WTF6VectorItLm0EEE
-__ZN3JSC7UString3Rep7destroyEv
+__ZN3JSC6Parser16didFinishParsingEPNS_14SourceElementsEPNS_15ParserArenaDataIN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEEEP
__ZN3JSC5Lexer5clearEv
-__ZN3JSC10Identifier6removeEPNS_7UString3RepE
-__ZN3WTF6VectorIN3JSC10IdentifierELm64EE14shrinkCapacityEm
-__ZN3JSC9ScopeNodeC2EPNS_12JSGlobalDataERKNS_10SourceCodeEPNS_14SourceElementsEPN3WTF6VectorISt4pairINS_10IdentifierEjELm0EEEPN
-__ZN3WTF6VectorIPN3JSC13StatementNodeELm0EE14shrinkCapacityEm
+__ZN3JSC11ProgramNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPNS_
+__ZN3JSC9ScopeNodeC2EPNS_12JSGlobalDataERKNS_10SourceCodeEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEE
+__ZN3JSC11ParserArena14derefWithArenaEN3WTF10PassRefPtrINS_21ParserArenaRefCountedEEE
__ZN3JSC11ParserArena10removeLastEv
-__ZNK3JSC8JSObject8toObjectEPNS_9ExecStateE
-__ZN3JSC11Interpreter7executeEPNS_11ProgramNodeEPNS_9ExecStateEPNS_14ScopeChainNodeEPNS_8JSObjectEPNS_7JSValueE
-__ZN3JSC11ProgramNode16generateBytecodeEPNS_14ScopeChainNodeE
-__ZN3JSC9CodeBlockC2EPNS_9ScopeNodeENS_8CodeTypeEN3WTF10PassRefPtrINS_14SourceProviderEEEj
-__ZN3WTF7HashSetIPN3JSC16ProgramCodeBlockENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
-__ZN3WTF9HashTableIPN3JSC16ProgramCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3JSC17BytecodeGeneratorC2EPNS_11ProgramNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_7UString3RepEEEN
-__ZN3WTF6VectorIN3JSC11InstructionELm0EE14expandCapacityEm
-__ZN3JSC9Structure22toDictionaryTransitionEPS0_
-__ZN3JSC8JSObject12removeDirectERKNS_10IdentifierE
-__ZN3JSC9Structure31removePropertyWithoutTransitionERKNS_10IdentifierE
-__ZN3JSC9Structure6removeERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator12addGlobalVarERKNS_10IdentifierEbRPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitNewFunctionEPNS_10RegisterIDEPNS_12FuncDeclNodeE
-__ZN3JSC9CodeBlock25createRareDataIfNecessaryEv
-__ZN3JSC17BytecodeGenerator11newRegisterEv
-__ZN3JSC9Structure24fromDictionaryTransitionEPS0_
+__ZN3JSC11ParserArena5resetEv
+__ZN3WTF6VectorIN3JSC10IdentifierELm64EE14shrinkCapacityEm
+__ZN3JSC9CodeBlockC2EPNS_16ScriptExecutableENS_8CodeTypeEN3WTF10PassRefPtrINS_14SourceProviderEEEjPNS4_7HashMapINS4_6RefPtrIN7W
+__ZN3WTF7HashSetIPN3JSC15GlobalCodeBlockENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
+__ZN3WTF9HashTableIPN3JSC15GlobalCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
+__ZN3JSC17BytecodeGeneratorC1EPNS_11ProgramNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10StringIm
+__ZN3JSC17BytecodeGeneratorC2EPNS_11ProgramNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10StringIm
+__ZN3WTF6VectorIN3JSC11InstructionELm0EE15reserveCapacityEm
+__ZN3JSC9Structure31toCacheableDictionaryTransitionEPS0_
+__ZN3JSC9Structure22toDictionaryTransitionEPS0_NS0_14DictionaryKindE
+__ZN3JSC9Structure26flattenDictionaryStructureEPNS_8JSObjectE
__ZN3JSC17BytecodeGenerator8generateEv
__ZN3JSC11ProgramNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC17BytecodeGenerator13emitDebugHookENS_11DebugHookIDEii
-__ZN3JSC17BytecodeGenerator11addConstantENS_7JSValueE
-__ZN3WTF9HashTableIPvSt4pairIS1_jENS_18PairFirstExtractorIS3_EENS_7PtrHashIS1_EENS_14PairHashTraitsIN3JSC17JSValueHashTraitsENS
-__ZN3WTF6VectorIN3JSC8RegisterELm0EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator8emitMoveEPNS_10RegisterIDES2_
-__ZN3JSC17BytecodeGenerator8emitNodeEPNS_10RegisterIDEPNS_4NodeE
+__ZN3JSC17BytecodeGenerator12newTemporaryEv
+__ZN3JSC17BytecodeGenerator11newRegisterEv
+__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDENS_7JSValueE
+__ZN3JSC17BytecodeGenerator16addConstantValueENS_7JSValueE
+__ZN3WTF9HashTableIPvSt4pairIS1_jENS_18PairFirstExtractorIS3_EENS_7PtrHashIS1_EENS_14PairHashTraitsIN3JSC24EncodedJSValueHashTr
+__ZN3WTF6VectorIN3JSC8RegisterELm0EE15reserveCapacityEm
__ZN3WTF6VectorIN3JSC8LineInfoELm0EE14expandCapacityEm
-__ZN3JSC12FuncDeclNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC17ExprStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC23FunctionCallResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17AssignResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC17BytecodeGenerator11registerForERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator8emitCallENS_8OpcodeIDEPNS_10RegisterIDES3_S3_PNS_13ArgumentsNodeEjjj
-__ZN3JSC16ArgumentListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC12FuncExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator25emitNewFunctionExpressionEPNS_10RegisterIDEPNS_12FuncExprNodeE
-__ZN3WTF6VectorIN3JSC19ExpressionRangeInfoELm0EE14expandCapacityEm
-__ZN3WTF6VectorIN3JSC12CallLinkInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator18findScopedPropertyERKNS_10IdentifierERiRmbRbRPNS_8JSObjectE
+__ZN3JSC17BytecodeGenerator15emitResolveBaseEPNS_10RegisterIDERKNS_10IdentifierE
+__ZN3JSC19FunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC11NewExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC11ResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3WTF6VectorIN3JSC19ExpressionRangeInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator11emitResolveEPNS_10RegisterIDERKNS_10IdentifierE
+__ZN3WTF6VectorIN3JSC17GlobalResolveInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator11addConstantERKNS_10IdentifierE
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_iENS_18PairFirstExtractorIS6_EEN3JSC17IdentifierRepHashENS_14P
+__ZN3WTF6VectorIN3JSC10IdentifierELm0EE14expandCapacityEmPKS2_
+__ZN3WTF6VectorIN3JSC10IdentifierELm0EE15reserveCapacityEm
+__ZN3JSC17BytecodeGenerator13emitConstructEPNS_10RegisterIDES2_PNS_13ArgumentsNodeEjjj
+__ZN3WTF6VectorIN3JSC20GetByIdExceptionInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator11emitGetByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
+__ZN3WTF6VectorIN3JSC17StructureStubInfoELm0EE14expandCapacityEm
+__ZN3WTF6VectorIN3JSC12CallLinkInfoELm0EE15reserveCapacityEm
+__ZN3JSC17BytecodeGenerator15emitMethodCheckEv
+__ZN3JSC17BytecodeGenerator8emitCallEPNS_10RegisterIDES2_S2_PNS_13ArgumentsNodeEjjj
+__ZN3JSC17BytecodeGenerator8emitCallENS_8OpcodeIDEPNS_10RegisterIDES3_S3_PNS_13ArgumentsNodeEjjj
+__ZN3JSC17BytecodeGenerator11emitPutByIdEPNS_10RegisterIDERKNS_10IdentifierES2_
+__ZN3JSC17BytecodeGenerator16emitUnaryNoDstOpENS_8OpcodeIDEPNS_10RegisterIDE
__ZN3JSC12JSGlobalData22numericCompareFunctionEPNS_9ExecStateE
__ZNK3JSC21UStringSourceProvider6lengthEv
+__ZN3JSC18FunctionExecutable14fromGlobalCodeERKNS_10IdentifierEPNS_9ExecStateEPNS_8DebuggerERKNS_10SourceCodeEPiPNS_7UStringE
__ZNK3JSC21UStringSourceProvider4dataEv
-__ZN3JSC19extractFunctionBodyEPNS_11ProgramNodeE
+__ZN3JSC16FunctionBodyNode6createEPNS_12JSGlobalDataE
+__ZN3JSC5Lexer10sourceCodeEiii
+__ZN3JSC16FunctionBodyNode13finishParsingERKNS_10SourceCodeEPNS_13ParameterNodeERKNS_10IdentifierE
+__ZN3WTF6VectorIN3JSC10IdentifierELm0EE14expandCapacityEm
+__ZN3JSC16FunctionBodyNode13finishParsingEN3WTF10PassRefPtrINS_18FunctionParametersEEERKNS_10IdentifierE
+__ZNK3JSC9ScopeNode15singleStatementEv
__ZNK3JSC17ExprStatementNode15isExprStatementEv
__ZNK3JSC12FuncExprNode14isFuncExprNodeEv
-__ZN3JSC16FunctionBodyNode16generateBytecodeEPNS_14ScopeChainNodeE
-__ZN3JSC6Parser14reparseInPlaceEPNS_12JSGlobalDataEPNS_16FunctionBodyNodeE
-__ZL11makeSubNodePvPN3JSC14ExpressionNodeES2_b
+__ZThn16_N3JSC11ProgramNodeD0Ev
+__ZN3JSC11ProgramNodeD0Ev
+__ZN3JSC11ParserArenaD1Ev
+__ZN3JSC14SourceElementsD1Ev
+__ZThn16_N3JSC16FunctionBodyNodeD0Ev
+__ZN3JSC16FunctionBodyNodeD0Ev
+__ZN3JSC18FunctionExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
+__ZN3JSC6Parser5parseINS_16FunctionBodyNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10Sour
+__ZL11makeSubNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
__ZN3JSC14ExpressionNode14stripUnaryPlusEv
__ZNK3JSC14ExpressionNode8isNumberEv
-__ZN3JSC9CodeBlockC1EPNS_9ScopeNodeENS_8CodeTypeEN3WTF10PassRefPtrINS_14SourceProviderEEEj
-__ZN3JSC17BytecodeGeneratorC2EPNS_16FunctionBodyNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_7UString3Re
+__ZN3JSC16FunctionBodyNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_
+__ZN3JSC17BytecodeGeneratorC1EPNS_16FunctionBodyNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10Str
+__ZN3JSC17BytecodeGeneratorC2EPNS_16FunctionBodyNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10Str
__ZN3JSC17BytecodeGenerator12addParameterERKNS_10IdentifierE
__ZN3JSC16FunctionBodyNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC9BlockNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC10ReturnNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC12BinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZNK3JSC11ResolveNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC17BytecodeGenerator12newTemporaryEv
+__ZN3JSC17BytecodeGenerator7isLocalERKNS_10IdentifierE
__ZN3JSC17BytecodeGenerator12emitBinaryOpENS_8OpcodeIDEPNS_10RegisterIDES3_S3_NS_12OperandTypesE
__ZN3JSC17BytecodeGenerator10emitReturnEPNS_10RegisterIDE
__ZNK3JSC9BlockNode7isBlockEv
__ZNK3JSC10ReturnNode12isReturnNodeEv
__ZN3JSC9CodeBlock11shrinkToFitEv
__ZN3WTF6VectorIN3JSC11InstructionELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC17StructureStubInfoELm0EE14shrinkCapacityEm
__ZN3WTF6VectorIPN3JSC12CallLinkInfoELm0EE14shrinkCapacityEm
__ZN3WTF6VectorIN3JSC10IdentifierELm0EE14shrinkCapacityEm
-__ZN3JSC11ParserArenaD1Ev
-__ZN3JSC11ResolveNodeD0Ev
-__ZN3JSC7SubNodeD0Ev
-__ZN3JSC10ReturnNodeD0Ev
-__ZN3JSC14SourceElementsD0Ev
-__ZN3JSC9BlockNodeD0Ev
__ZN3JSC17BytecodeGeneratorD2Ev
__ZN3WTF6VectorIN3JSC11InstructionELm0EEaSERKS3_
-__ZThn16_N3JSC11ProgramNodeD0Ev
-__ZN3JSC11ProgramNodeD0Ev
-__ZN3JSC13ParameterNodeD0Ev
-__ZN3JSC17ExprStatementNodeD0Ev
-__ZThn16_N3JSC12FuncExprNodeD0Ev
-__ZN3JSC12FuncExprNodeD0Ev
-__ZThn16_N3JSC16FunctionBodyNodeD0Ev
-__ZN3JSC16FunctionBodyNodeD0Ev
-__ZN3JSC9CodeBlockD1Ev
+__ZN3JSC18FunctionExecutableD0Ev
+__ZN3JSC17FunctionCodeBlockD0Ev
__ZN3JSC9CodeBlockD2Ev
__ZN3JSC21UStringSourceProviderD0Ev
-__ZN3WTF6VectorIN3JSC19ExpressionRangeInfoELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC8LineInfoELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorINS_6RefPtrIN3JSC12FuncDeclNodeEEELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE14shrinkCapacityEm
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14shrinkCapacityEm
-__ZN3JSC15ParserArenaDataIN3WTF6VectorIPNS_12FuncDeclNodeELm0EEEED0Ev
-__ZN3JSC16ArgumentListNodeD0Ev
-__ZN3JSC13ArgumentsNodeD0Ev
-__ZN3JSC23FunctionCallResolveNodeD0Ev
+__ZNK3JSC8JSObject8toObjectEPNS_9ExecStateE
+__ZN3JSC11Interpreter7executeEPNS_17ProgramExecutableEPNS_9ExecStateEPNS_14ScopeChainNodeEPNS_8JSObjectEPNS_7JSValueE
+__ZN3JSC12JSGlobalData14resetDateCacheEv
+__ZN3JSC12RegisterFile12globalObjectEv
__ZN3JSC14JSGlobalObject13copyGlobalsToERNS_12RegisterFileE
+__ZN3JSC12RegisterFile15setGlobalObjectEPNS_14JSGlobalObjectE
+__ZN3JSC17ProgramExecutable15generateJITCodeEPNS_9ExecStateEPNS_14ScopeChainNodeE
__ZN3JSC3JIT14privateCompileEv
__ZN3JSC3JIT22privateCompileMainPassEv
__ZN3JSC3JIT13emit_op_enterEPNS_11InstructionE
-__ZN3JSC3JIT16emit_op_new_funcEPNS_11InstructionE
-__ZN3JSC20MacroAssemblerX86_648storePtrENS_22AbstractMacroAssemblerINS_12X86AssemblerEE6ImmPtrENS3_15ImplicitAddressE
-__ZN3JSC11JITStubCall4callEj
-__ZN3WTF6VectorIN3JSC10CallRecordELm0EE14expandCapacityEm
__ZN3JSC3JIT11emit_op_movEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_new_func_expEPNS_11InstructionE
-__ZN3JSC3JIT12emit_op_callEPNS_11InstructionE
-__ZN3JSC3JIT13compileOpCallENS_8OpcodeIDEPNS_11InstructionEj
+__ZN3JSC3JIT22emit_op_resolve_globalEPNS_11InstructionEb
+__ZN3JSC23MacroAssemblerX86Common4moveENS_12X86Registers10RegisterIDES2_
+__ZN3WTF6VectorIN3JSC13SlowCaseEntryELm0EE14expandCapacityEmPKS2_
__ZN3WTF6VectorIN3JSC13SlowCaseEntryELm0EE14expandCapacityEm
-__ZN3JSC3JIT11emit_op_endEPNS_11InstructionE
+__ZN3JSC3JIT17emit_op_get_by_idEPNS_11InstructionE
+__ZN3JSC3JIT21compileGetByIdHotPathEiiPNS_10IdentifierEj
+__ZN3JSC3JIT17emit_op_constructEPNS_11InstructionE
+__ZN3JSC3JIT13compileOpCallENS_8OpcodeIDEPNS_11InstructionEj
__ZN3JSC11JITStubCall4callEv
+__ZN3WTF6VectorIN3JSC10CallRecordELm0EE14expandCapacityEm
+__ZN3JSC3JIT24emit_op_construct_verifyEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_method_checkEPNS_11InstructionE
+__ZN3WTF6VectorIN3JSC25MethodCallCompilationInfoELm0EE14expandCapacityEm
+__ZN3JSC3JIT12emit_op_callEPNS_11InstructionE
+__ZN3JSC3JIT17emit_op_put_by_idEPNS_11InstructionE
+__ZN3JSC3JIT11emit_op_endEPNS_11InstructionE
__ZN3WTF6VectorIN3JSC9JumpTableELm0EE14shrinkCapacityEm
__ZN3JSC3JIT23privateCompileSlowCasesEv
-__ZN3JSC3JIT16emitSlow_op_callEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT26emitSlow_op_resolve_globalEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC20MacroAssemblerX86_648storePtrENS_22AbstractMacroAssemblerINS_12X86AssemblerEE6ImmPtrENS3_15ImplicitAddressE
+__ZN3JSC11JITStubCall4callEj
+__ZN3JSC3JIT21emitSlow_op_get_by_idEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT22compileGetByIdSlowCaseEiiPNS_10IdentifierERPNS_13SlowCaseEntryEb
+__ZN3JSC3JIT21emitSlow_op_constructEPNS_11InstructionERPNS_13SlowCaseEntryE
__ZN3JSC3JIT21compileOpCallSlowCaseEPNS_11InstructionERPNS_13SlowCaseEntryEjNS_8OpcodeIDE
+__ZN3JSC3JIT27compileOpConstructSetupArgsEPNS_11InstructionE
+__ZN3JSC3JIT28emitSlow_op_construct_verifyEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT24emitSlow_op_method_checkEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT16emitSlow_op_callEPNS_11InstructionERPNS_13SlowCaseEntryE
__ZN3JSC3JIT22compileOpCallSetupArgsEPNS_11InstructionE
-__ZN3JSC9CodeBlock10setJITCodeERNS_10JITCodeRefE
+__ZN3JSC3JIT21emitSlow_op_put_by_idEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3WTF6VectorIN3JSC18MethodCallLinkInfoELm0EE14expandCapacityEm
__ZN3JSC17BytecodeGenerator18dumpsGeneratedCodeEv
-__ZN3WTF10RefCountedIN3JSC14ExecutablePoolEE5derefEv
_ctiTrampoline
-__ZN3JSC8JITStubs15cti_op_new_funcEPPv
-__ZN3JSC12FuncDeclNode12makeFunctionEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC8JITStubs19cti_op_new_func_expEPPv
-__ZN3JSC12FuncExprNode12makeFunctionEPNS_9ExecStateEPNS_14ScopeChainNodeE
-__ZN3JSC8JITStubs22cti_op_call_JSFunctionEPPv
-__ZN3JSC16FunctionBodyNode15generateJITCodeEPNS_14ScopeChainNodeE
-__ZN3JSC10IfElseNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8newLabelEv
+_cti_op_resolve_global
+_cti_op_get_by_id
+__ZNK3JSC7JSValue3getEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+_cti_op_construct_NotJSConstruct
+__ZN3JSC15DateConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL28constructWithDateConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC13constructDateEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC12DateInstanceC1EPNS_9ExecStateEd
+__ZN3JSC12DateInstanceC2EPNS_9ExecStateEd
+__ZN3WTF8timeClipEd
+_cti_op_get_by_id_method_check
+__ZN3JSC13DatePrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC23setUpStaticFunctionSlotEPNS_9ExecStateEPKNS_9HashEntryEPNS_8JSObjectERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC9Structure13hasTransitionEPN7WebCore10StringImplEj
+_cti_vm_lazyLinkCall
+__ZN3JSCL20dateProtoFuncGetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC12DateInstance9classInfoEv
+_cti_op_put_by_id
+__ZN3JSC14JSGlobalObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSC8JSObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSC17ProgramExecutableD0Ev
+__ZN3JSC16ProgramCodeBlockD0Ev
+__ZN3WTF9HashTableIPN3JSC15GlobalCodeBlockES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS
+__ZN3JSC17StructureStubInfo5derefEv
+__ZN7WebCore6String8truncateEj
+__ZN3JSC5Lexer10skipRegExpEv
+__ZNK3JSC14ExpressionNode10isLocationEv
+__ZN3WTF6VectorIPN3JSC16FunctionBodyNodeELm0EE14expandCapacityEm
+__ZNK3JSC18EmptyStatementNode16isEmptyStatementEv
+__ZN3WTF6VectorISt4pairIPKN3JSC10IdentifierEjELm0EE14expandCapacityEm
+__ZN3JSC8JSObject12removeDirectERKNS_10IdentifierE
+__ZN3JSC9Structure24removePropertyTransitionEPS0_RKNS_10IdentifierERm
+__ZN3JSC9Structure6removeERKNS_10IdentifierE
+__ZN3JSC17BytecodeGenerator12addGlobalVarERKNS_10IdentifierEbRPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator15emitNewFunctionEPNS_10RegisterIDEPNS_16FunctionBodyNodeE
+__ZN3WTF6VectorINS_6RefPtrIN3JSC18FunctionExecutableEEELm0EE14expandCapacityEm
+__ZN3JSC9Structure31removePropertyWithoutTransitionERKNS_10IdentifierE
+__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZNK3JSC7UString14toStrictUInt32EPb
+__ZN7WebCore12AtomicString4findEPKtjj
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_N3JSC16SymbolTableEntryEENS_18PairFirstExtractorIS8_EENS6_17Id
+__ZN3JSCL30comparePropertyMapEntryIndicesEPKvS1_
+__ZN3JSC13AssignDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZNK3JSC14ExpressionNode6isPureERNS_17BytecodeGeneratorE
+__ZN3JSC17BytecodeGenerator8emitMoveEPNS_10RegisterIDES2_
+__ZN3JSC17ObjectLiteralNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator13emitNewObjectEPNS_10RegisterIDE
+__ZN3JSC21FunctionCallValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC12FuncExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator25emitNewFunctionExpressionEPNS_10RegisterIDEPNS_12FuncExprNodeE
+__ZN3JSC12FuncDeclNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC15DotAccessorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11emitResolveEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator18findScopedPropertyERKNS_10IdentifierERiRmbRPNS_8JSObjectE
+__ZN3JSC16VarStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator15isLocalConstantERKNS_10IdentifierE
+__ZN3JSC16ArgumentListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC11BooleanNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEb
+__ZN3JSC9ArrayNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator12emitNewArrayEPNS_10RegisterIDEPNS_11ElementNodeE
+__ZN3JSC23FunctionCallResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator19emitResolveWithBaseEPNS_10RegisterIDES2_RKNS_10IdentifierE
+__ZN3JSC10NumberNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEd
+__ZN3WTF9HashTableIdSt4pairIdN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_9FloatHashIdEENS_14PairHashTraitsINS_10HashTraitsId
+__ZN3JSC10StringNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDERKNS_10IdentifierE
+__ZN3WTF9HashTableIPN7WebCore10StringImplESt4pairIS3_PN3JSC8JSStringEENS_18PairFirstExtractorIS8_EENS5_17IdentifierRepHashENS_1
+__ZN3JSC12SmallStrings27createSingleCharacterStringEPNS_12JSGlobalDataEh
+__ZN3JSC8NullNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC15ParserArenaDataIN3WTF6VectorIPNS_16FunctionBodyNodeELm0EEEED1Ev
+__ZN3JSC15ParserArenaDataIN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEED1Ev
+__ZN3JSC3JIT16emit_op_new_funcEPNS_11InstructionE
+__ZN3JSC3JIT18emit_op_new_objectEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_new_func_expEPNS_11InstructionE
+__ZN3JSC3JIT17emit_op_new_arrayEPNS_11InstructionE
+__ZN3JSC3JIT25emit_op_resolve_with_baseEPNS_11InstructionE
+_cti_op_new_func
+__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEEPNS_14ScopeChainNodeE
+_cti_op_new_object
+_cti_op_new_func_exp
+_cti_op_call_JSFunction
+__ZN3JSC18FunctionExecutable15generateJITCodeEPNS_9ExecStateEPNS_14ScopeChainNodeE
+__ZN3WTF7HashSetINS_6RefPtrIN7WebCore10StringImplEEEN3JSC17IdentifierRepHashENS_10HashTraitsIS4_EEE3addERKS4_
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEES4_NS_17IdentityExtractorIS4_EEN3JSC17IdentifierRepHashENS_10HashTraitsIS
+__ZN3JSC17BytecodeGenerator6addVarERKNS_10IdentifierEbRPNS_10RegisterIDE
__ZNK3JSC16JSVariableObject16isVariableObjectEv
__ZN3JSC17BytecodeGenerator16emitGetScopedVarEPNS_10RegisterIDEmiNS_7JSValueE
-__ZN3JSC17BytecodeGenerator11emitGetByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3WTF6VectorIN3JSC17StructureStubInfoELm0EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator11addConstantERKNS_10IdentifierE
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEEiNS2_17IdentifierRepHashENS_10HashTraitsIS5_EENS2_17BytecodeGenerator28Identifi
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEESt4pairIS5_iENS_18PairFirstExtractorIS7_EENS2_17IdentifierRepHashENS_14PairHa
+__ZNK3JSC13StatementNode12isReturnNodeEv
+__ZN3JSC3JIT29emit_op_enter_with_activationEPNS_11InstructionE
+__ZN3JSC3JIT22emit_op_get_global_varEPNS_11InstructionE
+__ZN3JSC3JIT29emitGetVariableObjectRegisterENS_12X86Registers10RegisterIDEiS2_
+__ZN3JSC3JIT27emit_op_tear_off_activationEPNS_11InstructionE
+__ZN3JSC11JITStubCall11addArgumentEjNS_12X86Registers10RegisterIDE
+__ZN3JSC3JIT11emit_op_retEPNS_11InstructionE
+_cti_register_file_check
+_cti_op_push_activation
+__ZN3JSC12JSActivationC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEE
+__ZN3JSC12JSActivationC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_18FunctionExecutableEEE
+_cti_op_tear_off_activation
+_cti_op_ret_scopeChain
+__ZN3JSC10JSFunction18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC9Structure18transitionTableAddERKSt4pairIN3WTF6RefPtrIN7WebCore10StringImplEEEjEPS0_PNS_6JSCellE
+__ZN3WTF7HashMapISt4pairINS_6RefPtrIN7WebCore10StringImplEEEjES1_IPN3JSC9StructureES9_ENS7_28StructureTransitionTableHashENS7_3
+__ZN3WTF9HashTableISt4pairINS_6RefPtrIN7WebCore10StringImplEEEjES1_IS6_S1_IPN3JSC9StructureES9_EENS_18PairFirstExtractorISB_EEN
+_cti_op_new_array
+__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7ArgListE
+_cti_op_construct_JSConstruct
+__ZNK3JSC10NumberNode6isPureERNS_17BytecodeGeneratorE
+__ZN3JSC8ThisNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT20emit_op_convert_thisEPNS_11InstructionE
+__ZN3JSC3JIT24emitSlow_op_convert_thisEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_resolve_with_base
+__ZN3JSC10JSFunction11getCallDataERNS_8CallDataE
+__ZNK3JSC7ArgList8getSliceEiRS0_
+__ZN3JSC9Structure22materializePropertyMapEv
+_cti_op_end
+__ZNK7WebCore6String7toFloatEPb
+__ZN7WebCore10StringImpl7toFloatEPb
+__ZN7WebCore17charactersToFloatEPKtmPb
+__ZN7WebCoreeqERKNS_12AtomicStringEPKc
+__ZNK7WebCore6String5toIntEPb
+__ZN7WebCore10StringImpl5toIntEPb
+__ZN7WebCore15charactersToIntEPKtmPb
+__ZN7WebCore6String6numberEi
+__ZNK7WebCore6String11toIntStrictEPbi
+__ZN7WebCore10StringImpl11toIntStrictEPbi
+__ZN7WebCore21charactersToIntStrictEPKtmPbi
+__ZN7WebCore10StringImpl7replaceEPS0_S1_
+__ZN7WebCore6String6numberEt
+__ZL11makeDivNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
+__ZNK3JSC10NumberNode8isNumberEv
+__ZN3WTF6VectorIPvLm0EE14expandCapacityEm
+__ZN3WTF6VectorIjLm16EE6resizeEm
+__ZN3WTFL7multaddERNS_6BigIntEii
+__ZN3WTF6VectorIjLm16EEaSERKS1_
+__ZN3WTFL4multERNS_6BigIntERKS0_
+__ZL11makeAddNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
+__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE15reserveCapacityEm
+__ZL14makePrefixNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorEiii
+__ZN3WTF6VectorIPN3JSC10RegisterIDELm32EE15reserveCapacityEm
+__ZN3JSC16PropertyListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC15ConditionalNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator8newLabelEv
+__ZNK3JSC14ExpressionNode26hasConditionContextCodegenEv
__ZN3JSC17BytecodeGenerator15emitJumpIfFalseEPNS_10RegisterIDEPNS_5LabelE
-__ZNK3JSC14JSGlobalObject14isDynamicScopeEv
-__ZN3JSC17BytecodeGenerator19emitResolveFunctionEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3JSC10StringNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3WTF9HashTableIPN3JSC7UString3RepESt4pairIS4_PNS1_8JSStringEENS_18PairFirstExtractorIS8_EENS1_17IdentifierRepHashENS_14Pair
-__ZN3JSC11BooleanNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC17BytecodeGenerator8emitJumpEPNS_5LabelE
__ZN3JSC17BytecodeGenerator9emitLabelEPNS_5LabelE
-__ZN3WTF6VectorIjLm0EE15reserveCapacityEm
+__ZN3WTF6VectorIjLm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator14emitPutByIndexEPNS_10RegisterIDEjS2_
__ZN3JSC6IfNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC13StatementNode12isReturnNodeEv
-__ZN3JSC15DotAccessorNodeD0Ev
-__ZN3JSC10StringNodeD0Ev
-__ZN3JSC11BooleanNodeD0Ev
-__ZN3JSC6IfNodeD0Ev
-__ZN3JSC10IfElseNodeD0Ev
-__ZN3JSC3JIT22emit_op_get_global_varEPNS_11InstructionE
-__ZN3JSC3JIT29emitGetVariableObjectRegisterENS_3X8610RegisterIDEiS2_
-__ZN3JSC3JIT17emit_op_get_by_idEPNS_11InstructionE
-__ZN3JSC3JIT21compileGetByIdHotPathEiiPNS_10IdentifierEj
-__ZN3WTF6VectorIN3JSC13SlowCaseEntryELm0EE14expandCapacityEmPKS2_
+__ZNK3JSC14LogicalNotNode26hasConditionContextCodegenEv
+__ZN3JSC11UnaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator11emitUnaryOpENS_8OpcodeIDEPNS_10RegisterIDES3_
+__ZNK3JSC10StringNode6isPureERNS_17BytecodeGeneratorE
+__ZNK3JSC14ExpressionNode5isAddEv
+__ZN3JSC7ForNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator13newLabelScopeENS_10LabelScope4TypeEPKNS_10IdentifierE
+__ZN3JSC19BracketAccessorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator12emitGetByValEPNS_10RegisterIDES2_S2_
+__ZN3JSC17PrefixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator10emitPreIncEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator14emitJumpIfTrueEPNS_10RegisterIDEPNS_5LabelE
+__ZN3JSC3JIT11emit_op_divEPNS_11InstructionE
+__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_12X86Registers10RegisterIDES4_
+__ZN3JSC12X86Assembler11cvtsi2sd_rrENS_12X86Registers10RegisterIDENS1_13XMMRegisterIDE
+__ZN3JSC12X86Assembler23X86InstructionFormatter9twoByteOpENS0_15TwoByteOpcodeIDEiNS_12X86Registers10RegisterIDE
+__ZN3JSC12X86Assembler7movq_rrENS_12X86Registers10RegisterIDENS1_13XMMRegisterIDE
+__ZN3JSC23MacroAssemblerX86Common4moveENS_22AbstractMacroAssemblerINS_12X86AssemblerEE5Imm32ENS_12X86Registers10RegisterIDE
+__ZN3JSC15AssemblerBuffer7putByteEi
__ZN3JSC3JIT14emit_op_jfalseEPNS_11InstructionE
-__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_3X8610RegisterIDENS_22AbstractMacroAssembler
-__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_3X8610RegisterIDES4_
+__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_12X86Registers10RegisterIDENS_22AbstractMacr
__ZN3WTF6VectorIN3JSC9JumpTableELm0EE14expandCapacityEmPKS2_
__ZN3WTF6VectorIN3JSC9JumpTableELm0EE14expandCapacityEm
-__ZN3JSC3JIT20emit_op_resolve_funcEPNS_11InstructionE
__ZN3JSC3JIT11emit_op_jmpEPNS_11InstructionE
-__ZN3JSC3JIT11emit_op_retEPNS_11InstructionE
-__ZN3JSC3JIT21emitSlow_op_get_by_idEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT22compileGetByIdSlowCaseEiiPNS_10IdentifierERPNS_13SlowCaseEntryEj
-__ZN3JSC3JIT18emitSlow_op_jfalseEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC23MacroAssemblerX86Common12branchTest32ENS0_9ConditionENS_3X8610RegisterIDENS_22AbstractMacroAssemblerINS_12X86Assemble
-__ZN3JSC8JITStubs23cti_vm_dontLazyLinkCallEPPv
-__ZN3JSC31ctiPatchNearCallByReturnAddressENS_22AbstractMacroAssemblerINS_12X86AssemblerEE22ProcessorReturnAddressEPv
-__ZN3JSC8JITStubs23cti_register_file_checkEPPv
-__ZN3JSC8JITStubs16cti_op_get_by_idEPPv
-__ZNK3JSC7JSValue3getEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC23setUpStaticFunctionSlotEPNS_9ExecStateEPKNS_9HashEntryEPNS_8JSObjectERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC27ctiPatchCallByReturnAddressENS_22AbstractMacroAssemblerINS_12X86AssemblerEE22ProcessorReturnAddressEPv
-__ZN3JSC8JITStubs12cti_op_jtrueEPPv
-__ZNK3JSC8JSObject9toBooleanEPNS_9ExecStateE
-__ZN3JSC8JITStubs19cti_op_resolve_funcEPPv
+__ZN3JSC3JIT20emit_op_put_by_indexEPNS_11InstructionE
+__ZN3JSC3JIT13emit_op_jtrueEPNS_11InstructionE
+__ZN3JSC3JIT11emit_op_addEPNS_11InstructionE
+__ZN3JSC3JIT18emit_op_get_by_valEPNS_11InstructionE
+__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE
+__ZN3JSC12X86Assembler23X86InstructionFormatter11oneByteOp64ENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDES4_ii
+__ZN3JSC3JIT15emit_op_pre_incEPNS_11InstructionE
+__ZN3JSC3JIT16emitTimeoutCheckEv
+__ZN3JSC3JIT13emit_op_jlessEPNS_11InstructionE
+__ZN3JSC3JIT15emitSlow_op_divEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT18emitSlow_op_jfalseEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC23MacroAssemblerX86Common12branchTest32ENS0_9ConditionENS_12X86Registers10RegisterIDENS_22AbstractMacroAssemblerINS_12X
+__ZN3JSC3JIT17emitSlow_op_jtrueEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT22emitSlow_op_get_by_valEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT27stringGetByValStubGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSC15AssemblerBuffer14executableCopyEPNS_14ExecutablePoolE
+__ZN3JSC3JIT19emitSlow_op_pre_incEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC11JITStubCall11addArgumentENS_12X86Registers10RegisterIDE
+__ZN3JSC3JIT17emitSlow_op_jlessEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC23MacroAssemblerX86Common12branchDoubleENS0_15DoubleConditionENS_12X86Registers13XMMRegisterIDES3_
+__ZNK3JSC13LogicalOpNode26hasConditionContextCodegenEv
+__ZN3JSC13LogicalOpNode30emitBytecodeInConditionContextERNS_17BytecodeGeneratorEPNS_5LabelES4_b
+__ZN3JSC21ReadModifyResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT11emit_op_subEPNS_11InstructionE
+__ZN3JSC3JIT20compileBinaryArithOpENS_8OpcodeIDEjjjNS_12OperandTypesE
+__ZN3JSC3JIT15emitSlow_op_subEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT28compileBinaryArithOpSlowCaseENS_8OpcodeIDERPNS_13SlowCaseEntryEjjjNS_12OperandTypesEbb
+__ZN3JSC3JIT15emitSlow_op_addEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZNK3JSC14JSGlobalObject14isDynamicScopeERb
+_cti_op_jtrue
+__ZNK3JSC8JSObject9toBooleanEPNS_9ExecStateE
+__ZNK3JSC8JSObject24getPropertySpecificValueEPNS_9ExecStateERKNS_10IdentifierERPNS_6JSCellE
+__ZN3JSC27ctiPatchCallByReturnAddressEPNS_9CodeBlockENS_16ReturnAddressPtrENS_11FunctionPtrE
+__ZN3JSC3JIT8linkCallEPNS_10JSFunctionEPNS_9CodeBlockES4_RNS_7JITCodeEPNS_12CallLinkInfoEiPNS_12JSGlobalDataE
+_cti_op_add
+__ZN3JSC13jsAddSlowCaseEPNS_9ExecStateENS_7JSValueES2_
+__ZNK3JSC8JSString11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
+__ZNK3JSC7JSValue8toStringEPNS_9ExecStateE
+__ZN3JSC7UString4fromEi
+__ZNK3JSC8JSString11resolveRopeEPNS_9ExecStateE
+__ZN3WTF13tryFastMallocEm
+__ZN7WebCore10StringImpl4findEPKcib
+__ZN3JSC13LogicalOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZNK3JSC14ExpressionNode6isNullEv
+__ZN3JSC3JIT11emit_op_neqEPNS_11InstructionE
+__ZN3JSC12X86Assembler8shll_i8rEiNS_12X86Registers10RegisterIDE
+__ZN3JSC12X86Assembler6orl_irEiNS_12X86Registers10RegisterIDE
+__ZN3JSC3JIT15emitSlow_op_neqEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_call_arityCheck
+_cti_op_eq
+__ZN3JSC9JITThunks15tryCacheGetByIDEPNS_9ExecStateEPNS_9CodeBlockENS_16ReturnAddressPtrENS_7JSValueERKNS_10IdentifierERKNS_12Pr
+__ZN3JSC14StructureChainC1EPNS_9StructureE
+__ZN3JSC14StructureChainC2EPNS_9StructureE
+__ZN3JSC3JIT26privateCompileGetByIdChainEPNS_17StructureStubInfoEPNS_9StructureEPNS_14StructureChainEmRKNS_10IdentifierERKNS_12
+__ZN3JSC3JIT22compileGetDirectOffsetEPNS_8JSObjectENS_12X86Registers10RegisterIDES4_m
+_cti_op_get_by_id_self_fail
+__ZN3JSC3JIT29privateCompileGetByIdSelfListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureERKNS_1
+_cti_op_get_by_id_custom_stub
+__ZN3JSC10MathObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSCL16mathProtoFuncMaxEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZL17combineCommaNodesPN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_
+__ZNK3JSC14ExpressionNode11isCommaNodeEv
+__ZNK3JSC9CommaNode11isCommaNodeEv
+__ZN3JSC9CommaNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC19ReverseBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC9BreakNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator11breakTargetERKNS_10IdentifierE
+__ZN3JSC17BytecodeGenerator14emitJumpScopesEPNS_5LabelEi
+__ZN3JSC9CommaNodeD1Ev
+__ZN3JSC3JIT14emit_op_jnlessEPNS_11InstructionE
+__ZN3JSC3JIT18emitSlow_op_jnlessEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+_cti_op_jless
+__ZN3JSC3JIT33privateCompilePatchGetArrayLengthENS_16ReturnAddressPtrE
+__ZN3JSC3JIT16patchGetByIdSelfEPNS_9CodeBlockEPNS_17StructureStubInfoEPNS_9StructureEmNS_16ReturnAddressPtrE
+__ZNK3JSC7AddNode5isAddEv
+__ZN3JSC9JITThunks15tryCachePutByIDEPNS_9ExecStateEPNS_9CodeBlockENS_16ReturnAddressPtrENS_7JSValueERKNS_15PutPropertySlotEPNS_
+_cti_op_negate
+__ZN3JSC7UString4fromEj
+__ZL12makeMultNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
+__ZN3JSC9EqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator14emitEqualityOpENS_8OpcodeIDEPNS_10RegisterIDES3_S3_
+__ZN3JSC3JIT11emit_op_notEPNS_11InstructionE
+__ZN3JSC3JIT11emit_op_mulEPNS_11InstructionE
+__ZN3JSC3JIT10emit_op_eqEPNS_11InstructionE
+__ZN3JSC3JIT15emitSlow_op_notEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT15emitSlow_op_mulEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT14emitSlow_op_eqEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_not
+__ZN3JSC17ReadModifyDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator10emitPreDecEPNS_10RegisterIDE
+__ZN3JSC3JIT16emit_op_jnlesseqEPNS_11InstructionE
+__ZN3JSC3JIT15emit_op_jlesseqEPNS_11InstructionEb
+__ZN3JSC3JIT15emit_op_pre_decEPNS_11InstructionE
+__ZN3JSC3JIT22emit_op_loop_if_lesseqEPNS_11InstructionE
+__ZN3JSC3JIT20emitSlow_op_jnlesseqEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT19emitSlow_op_jlesseqEPNS_11InstructionERPNS_13SlowCaseEntryEb
+__ZN3JSC3JIT19emitSlow_op_pre_decEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT26emitSlow_op_loop_if_lesseqEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC17powThunkGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSC19SpecializedThunkJIT12returnDoubleENS_12X86Registers13XMMRegisterIDE
+__ZN3JSC19SpecializedThunkJITD1Ev
+__ZN3JSCL16mathProtoFuncSinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16mathProtoFuncMinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL18mathProtoFuncRoundEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16mathProtoFuncLogEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC3JIT20patchMethodCallProtoEPNS_9CodeBlockERNS_18MethodCallLinkInfoEPNS_10JSFunctionEPNS_9StructureEPNS_8JSObjectENS_16R
+__ZN3JSC3JIT31privateCompilePutByIdTransitionEPNS_17StructureStubInfoEPNS_9StructureES4_mPNS_14StructureChainENS_16ReturnAddres
+__ZN3JSC3JIT13testPrototypeEPNS_9StructureERNS_22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpListE
+__ZN3JSCL16mathProtoFuncAbsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_jlesseq
+__ZN3WTF6VectorIPN3JSC12CallLinkInfoELm0EE15reserveCapacityEm
+__ZN3JSC10IfElseNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17TypeOfResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC7TryNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator9emitCatchEPNS_10RegisterIDEPNS_5LabelES4_
+__ZN3JSC9CodeBlock25createRareDataIfNecessaryEv
+__ZN3WTF6VectorIN3JSC11HandlerInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator16emitPushNewScopeEPNS_10RegisterIDERKNS_10IdentifierES2_
+__ZN3WTF6VectorIN3JSC18ControlFlowContextELm0EE14expandCapacityEm
+__ZN3JSC15TypeOfValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSCeqERKNS_7UStringEPKc
+__ZN3JSC17BytecodeGenerator12emitPopScopeEv
+__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE14shrinkCapacityEm
+__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14shrinkCapacityEm
+__ZN3JSC3JIT13emit_op_catchEPNS_11InstructionE
+__ZN3JSC3JIT22emit_op_push_new_scopeEPNS_11InstructionE
+__ZN3JSC3JIT15emit_op_resolveEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_resolve_baseEPNS_11InstructionE
+__ZN3JSC3JIT17emit_op_pop_scopeEPNS_11InstructionE
+__ZL15makePostfixNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeENS_8OperatorEiii
+__ZN3JSC12BinaryOpNode10emitStrcatERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_PNS_21ReadModifyResolveNodeE
+__ZNK3JSC10StringNode8isStringEv
+__ZNK3JSC14ExpressionNode8isStringEv
+__ZN3JSC17BytecodeGenerator15emitToPrimitiveEPNS_10RegisterIDES2_
+__ZN3JSC17BytecodeGenerator10emitStrcatEPNS_10RegisterIDES2_i
+__ZN3JSC18PostfixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT11emit_op_modEPNS_11InstructionE
+__ZN3JSC23MacroAssemblerX86Common8branch32ENS0_9ConditionENS_12X86Registers10RegisterIDENS_22AbstractMacroAssemblerINS_12X86Ass
+__ZN3JSC3JIT20emit_op_to_primitiveEPNS_11InstructionE
+__ZN3JSC23MacroAssemblerX86Common4moveENS_22AbstractMacroAssemblerINS_12X86AssemblerEE6ImmPtrENS_12X86Registers10RegisterIDE
+__ZN3JSC3JIT14emit_op_strcatEPNS_11InstructionE
+__ZN3JSC3JIT15emitSlow_op_modEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT24emitSlow_op_to_primitiveEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSCL18mathProtoFuncFloorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL17mathProtoFuncCeilEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_strcat
+__ZNK3JSC19BracketAccessorNode10isLocationEv
+__ZNK3JSC19BracketAccessorNode21isBracketAccessorNodeEv
+__ZN3JSC17AssignBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator12emitPutByValEPNS_10RegisterIDES2_S2_
+__ZN3JSC3JIT18emit_op_put_by_valEPNS_11InstructionE
+__ZN3JSC3JIT22emitSlow_op_put_by_valEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC14ArrayPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC14LogicalNotNode30emitBytecodeInConditionContextERNS_17BytecodeGeneratorEPNS_5LabelES4_b
+__ZN3JSC8JSString18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC15StringPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC12StringObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSCL21stringProtoFuncSubstrEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue9toIntegerEPNS_9ExecStateE
+__ZN7WebCore10StringImpl6createEN3WTF10PassRefPtrIS0_EEjj
+__ZN3WTF6RefPtrIN7WebCore10StringImplEED1Ev
+__ZN3JSCL18arrayProtoFuncJoinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZNK3JSC8JSObject12toThisObjectEPNS_9ExecStateE
-__ZNK3JSC8JSString8toStringEPNS_9ExecStateE
-__ZN3JSC8JITStubs23cti_op_get_by_id_secondEPPv
-__ZN3JSC8JITStubs15tryCacheGetByIDEPNS_9ExecStateEPNS_9CodeBlockEPvNS_7JSValueERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC3JIT26privateCompileGetByIdProtoEPNS_17StructureStubInfoEPNS_9StructureES4_mNS_22AbstractMacroAssemblerINS_12X86Assembl
-__ZN3JSC3JIT22compileGetDirectOffsetEPNS_8JSObjectENS_3X8610RegisterIDES4_m
-__ZN3JSC8JITStubs19cti_vm_lazyLinkCallEPPv
-__ZN3JSC3JIT8linkCallEPNS_10JSFunctionEPNS_9CodeBlockENS_7JITCodeEPNS_12CallLinkInfoEi
-__ZN3JSC8JITStubs10cti_op_endEPPv
-__ZThn16_N3JSC12FuncDeclNodeD0Ev
-__ZN3JSC12FuncDeclNodeD0Ev
-__ZN3WTF25TCMalloc_Central_FreeList11ShrinkCacheEib
-__ZN3JSC10JSFunction18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC10JSFunction11getCallDataERNS_8CallDataE
-__ZN3JSC4callEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataES2_RKNS_7ArgListE
-__ZN3JSC11Interpreter7executeEPNS_16FunctionBodyNodeEPNS_9ExecStateEPNS_10JSFunctionEPNS_8JSObjectERKNS_7ArgListEPNS_14ScopeCha
-__ZNK3JSC15DotAccessorNode10isLocationEv
-__ZNK3JSC14ExpressionNode13isResolveNodeEv
-__ZNK3JSC14ExpressionNode21isBracketAccessorNodeEv
-__ZN3JSC19FunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC19FunctionCallDotNodeD0Ev
-__ZL26appendToVarDeclarationListPvRPN3JSC15ParserArenaDataIN3WTF6VectorISt4pairINS0_10IdentifierEjELm0EEEEERKS5_j
-__ZN3WTF6VectorISt4pairIN3JSC10IdentifierEjELm0EE14expandCapacityEm
-__ZL14makeAssignNodePvPN3JSC14ExpressionNodeENS0_8OperatorES2_bbiii
-__ZL11makeAddNodePvPN3JSC14ExpressionNodeES2_b
-__ZN3JSC16VarStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17AssignResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC11UnaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3WTF7HashSetIPN3JSC8JSObjectENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
+__ZN3WTF9HashTableIPN3JSC8JSObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
+__ZNK3JSC8JSObject3getEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSC15JSStringBuilder5buildEPNS_9ExecStateE
+__ZN3WTF6VectorItLm64EE17tryExpandCapacityEm
+__ZN3JSCL18arrayProtoFuncPushEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray4pushEPNS_9ExecStateENS_7JSValueE
+__ZN3WTF14tryFastReallocEPvm
+__ZN3JSC4Heap15recordExtraCostEm
+__ZN3JSC17BytecodeGenerator16emitPutScopedVarEmiPNS_10RegisterIDENS_7JSValueE
+__ZN3JSC3JIT22emit_op_put_scoped_varEPNS_11InstructionE
+__ZN3JSC3JIT29emitPutVariableObjectRegisterENS_12X86Registers10RegisterIDES2_i
+__ZN3JSC3JIT22emit_op_put_global_varEPNS_11InstructionE
+_cti_op_put_by_index
+__ZN3JSC7JSArray3putEPNS_9ExecStateEjNS_7JSValueE
+__ZN3JSC7JSArray11putSlowCaseEPNS_9ExecStateEjNS_7JSValueE
+__ZNK3JSC11BooleanNode6isPureERNS_17BytecodeGeneratorE
+__ZN3JSC10JSFunction3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZNK7WebCore12AtomicString5lowerEv
+_cti_op_get_by_id_proto_list
+__ZN3JSC3JIT30privateCompileGetByIdChainListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureEPNS_1
+__ZN3JSC3JIT22emit_op_get_scoped_varEPNS_11InstructionE
+_cti_op_put_by_val
+__ZNK3JSC12JSActivation14isDynamicScopeERb
+__ZN3JSC24ApplyFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZNK3JSC14ExpressionNode13isSimpleArrayEv
+__ZN3JSC17BytecodeGenerator26emitJumpIfNotFunctionApplyEPNS_10RegisterIDEPNS_5LabelE
+__ZN3JSC17BytecodeGenerator22willResolveToArgumentsERKNS_10IdentifierE
+__ZN3JSC17BytecodeGenerator15emitLoadVarargsEPNS_10RegisterIDES2_
+__ZN3JSC17BytecodeGenerator15emitCallVarargsEPNS_10RegisterIDES2_S2_S2_jjj
+__ZN3JSC3JIT16emit_op_jneq_ptrEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_load_varargsEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_call_varargsEPNS_11InstructionE
+__ZN3JSC3JIT20compileOpCallVarargsEPNS_11InstructionE
+__ZN3JSC3JIT29compileOpCallVarargsSetupArgsEPNS_11InstructionE
+__ZN3JSC3JIT24emitSlow_op_call_varargsEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT28compileOpCallVarargsSlowCaseEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC14ExecutablePool13systemReleaseERKNS0_10AllocationE
+_JSStringCreateWithCFString
+_JSStringRetain
+_JSStringRelease
+_JSEvaluateScript
+__ZN3JSC4Heap14registerThreadEv
+__ZNK14OpaqueJSString7ustringEv
+__ZN3JSC7UStringC1EPKtj
+__ZN3JSC5Lexer10scanRegExpERPKNS_10IdentifierES4_t
+__ZN3JSC18ConstStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC13ConstDeclNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC13ConstDeclNode14emitCodeSingleERNS_17BytecodeGeneratorE
+__ZN3JSC17BytecodeGenerator16constRegisterForERKNS_10IdentifierE
__ZN3JSC10RegExpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZN3JSC6RegExp6createEPNS_12JSGlobalDataERKNS_7UStringES5_
+__ZNK3JSC7UString4findEtj
__ZN3JSC4Yarr15jitCompileRegexEPNS_12JSGlobalDataERNS0_14RegexCodeBlockERKNS_7UStringERjRPKcbb
__ZN3JSC4Yarr12compileRegexERKNS_7UStringERNS0_12RegexPatternE
+__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE5parseEv
__ZN3JSC4Yarr18PatternDisjunction17addNewAlternativeEv
-__ZN3WTF6VectorIPN3JSC4Yarr18PatternAlternativeELm0EE14expandCapacityEm
-__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE11parseTokensEv
+__ZN3WTF6VectorIPN3JSC4Yarr18PatternAlternativeELm0EE15reserveCapacityEm
+__ZN3JSC4Yarr23RegexPatternConstructor20atomPatternCharacterEt
__ZN3WTF6VectorIN3JSC4Yarr11PatternTermELm0EE14expandCapacityEmPKS3_
__ZN3WTF6VectorIN3JSC4Yarr11PatternTermELm0EE14expandCapacityEm
__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE11parseEscapeILb0ES2_EEbRT0_
-__ZN3JSC4Yarr23RegexPatternConstructor25atomBuiltInCharacterClassENS0_23BuiltInCharacterClassIDEb
-__ZN3JSC4Yarr14wordcharCreateEv
-__ZN3WTF6VectorItLm0EE14expandCapacityEm
-__ZN3WTF6VectorIN3JSC4Yarr14CharacterRangeELm0EE14expandCapacityEmPKS3_
-__ZN3WTF6VectorIN3JSC4Yarr14CharacterRangeELm0EE14expandCapacityEm
-__ZN3WTF6VectorIPN3JSC4Yarr14CharacterClassELm0EE14expandCapacityEmPKS4_
-__ZN3WTF6VectorIPN3JSC4Yarr14CharacterClassELm0EE14expandCapacityEm
__ZN3JSC4Yarr14RegexGenerator19generateDisjunctionEPNS0_18PatternDisjunctionE
-__ZN3JSC12X86Assembler7addl_irEiNS_3X8610RegisterIDE
-__ZN3JSC23MacroAssemblerX86Common8branch32ENS0_9ConditionENS_3X8610RegisterIDES3_
-__ZN3JSC22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpList6appendENS2_4JumpE
+__ZN3JSC12X86Assembler7addl_irEiNS_12X86Registers10RegisterIDE
+__ZN3JSC23MacroAssemblerX86Common8branch32ENS0_9ConditionENS_12X86Registers10RegisterIDES3_
__ZN3JSC4Yarr14RegexGenerator12generateTermERNS1_19TermGenerationStateE
-__ZN3JSC23MacroAssemblerX86Common8branch32ENS0_9ConditionENS_3X8610RegisterIDENS_22AbstractMacroAssemblerINS_12X86AssemblerEE5I
+__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_12X86Registers10RegisterIDES4_ii
__ZN3JSC4Yarr14RegexGenerator19TermGenerationState15jumpToBacktrackENS_22AbstractMacroAssemblerINS_12X86AssemblerEE4JumpEPNS_14
-__ZN3JSC4Yarr14RegexGenerator13readCharacterEiNS_3X8610RegisterIDE
-__ZN3JSC4Yarr14RegexGenerator19matchCharacterClassENS_3X8610RegisterIDERNS_22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpLis
-__ZN3JSC4Yarr14RegexGenerator24matchCharacterClassRangeENS_3X8610RegisterIDERNS_22AbstractMacroAssemblerINS_12X86AssemblerEE8Ju
+__ZN3JSC4Yarr14RegexGenerator19jumpIfCharNotEqualsEti
+__ZN3JSC12X86Assembler7subl_irEiNS_12X86Registers10RegisterIDE
__ZN3JSC22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpList4linkEPS2_
-__ZN3JSC23MacroAssemblerX86Common4jumpEv
-__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE4JumpELm16EED1Ev
-__ZN3JSC4Yarr14RegexGenerator28generateCharacterClassGreedyERNS1_19TermGenerationStateE
-__ZN3JSC12X86Assembler7subl_irEiNS_3X8610RegisterIDE
-__ZN3JSC15AssemblerBuffer4growEv
+__ZN3WTF15deleteAllValuesIPN3JSC4Yarr18PatternDisjunctionELm4EEEvRKNS_6VectorIT_XT0_EEE
__ZN3WTF15deleteAllValuesIPN3JSC4Yarr14CharacterClassELm0EEEvRKNS_6VectorIT_XT0_EEE
-__ZN3JSC17BytecodeGenerator13emitNewRegExpEPNS_10RegisterIDEPNS_6RegExpE
-__ZN3JSC15ConditionalNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC9EqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode6isNullEv
-__ZNK3JSC10StringNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC19BracketAccessorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC10NumberNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC10NumberNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEd
-__ZN3JSC17BytecodeGenerator12emitGetByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC17BytecodeGenerator14emitEqualityOpENS_8OpcodeIDEPNS_10RegisterIDES3_S3_
-__ZN3JSC19ReverseBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode5isAddEv
-__ZN3JSC12SmallStrings27createSingleCharacterStringEPNS_12JSGlobalDataEh
-__ZN3JSC13AssignDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11emitPutByIdEPNS_10RegisterIDERKNS_10IdentifierES2_
-__ZN3JSC17AssignResolveNodeD0Ev
-__ZN3JSC15ParserArenaDataIN3WTF6VectorISt4pairINS_10IdentifierEjELm0EEEED0Ev
-__ZN3JSC16VarStatementNodeD0Ev
-__ZN3JSC14LogicalNotNodeD0Ev
-__ZN3JSC10RegExpNodeD0Ev
-__ZN3JSC10NumberNodeD0Ev
-__ZN3JSC19BracketAccessorNodeD0Ev
-__ZN3JSC9EqualNodeD0Ev
-__ZN3JSC15ConditionalNodeD0Ev
-__ZN3JSC7AddNodeD0Ev
-__ZN3JSC13GreaterEqNodeD0Ev
-__ZN3JSC13AssignDotNodeD0Ev
-__ZN3JSC3JIT13emit_op_jtrueEPNS_11InstructionE
-__ZN3JSC3JIT18emit_op_new_regexpEPNS_11InstructionE
-__ZN3JSC3JIT18emit_op_get_by_valEPNS_11InstructionE
-__ZN3JSC3JIT10emit_op_eqEPNS_11InstructionE
-__ZN3JSC3JIT11emit_op_addEPNS_11InstructionE
-__ZN3JSC11JITStubCall11addArgumentEjNS_3X8610RegisterIDE
-__ZN3JSC3JIT16emit_op_jnlesseqEPNS_11InstructionE
-__ZN3JSC3JIT17emit_op_put_by_idEPNS_11InstructionE
-__ZN3JSC3JIT21compilePutByIdHotPathEiPNS_10IdentifierEij
-__ZN3JSC3JIT17emitSlow_op_jtrueEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT22emitSlow_op_get_by_valEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT14emitSlow_op_eqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT20emitSlow_op_jnlesseqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC20MacroAssemblerX86_6413branchTestPtrENS_23MacroAssemblerX86Common9ConditionENS_3X8610RegisterIDES4_
-__ZN3JSC12X86Assembler23X86InstructionFormatter9twoByteOpENS0_15TwoByteOpcodeIDEiNS_3X8610RegisterIDE
-__ZN3JSC23MacroAssemblerX86Common12branchDoubleENS0_15DoubleConditionENS_3X8613XMMRegisterIDES3_
-__ZN3JSC3JIT21emitSlow_op_put_by_idEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT22compilePutByIdSlowCaseEiPNS_10IdentifierEiRPNS_13SlowCaseEntryEj
-__ZN3JSC13LogicalOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3WTF6VectorIN3JSC17GlobalResolveInfoELm0EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator14emitJumpIfTrueEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC13LogicalOpNodeD0Ev
-__ZN3JSC3JIT22emit_op_resolve_globalEPNS_11InstructionE
-__ZN3JSC8JITStubs21cti_op_resolve_globalEPPv
-__ZNK3JSC8JSString9toBooleanEPNS_9ExecStateE
-__ZN3JSC8JSString18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC15StringPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC12StringObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL20stringProtoFuncMatchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSString12toThisStringEPNS_9ExecStateE
-__ZNK3JSC6JSCell8isObjectEPKNS_9ClassInfoE
-__ZNK3JSC6JSCell9classInfoEv
-__ZN3JSC4Yarr23RegexPatternConstructor20atomPatternCharacterEt
+__ZN3JSC17BytecodeGenerator8emitLoadEPNS_10RegisterIDEPNS_6RegExpE
+__ZN3JSC12RegExpObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEENS2_INS_6RegExpEEE
+__ZN3JSC12RegExpObjectC2EN3WTF17NonNullPassRefPtrINS_9StructureEEENS2_INS_6RegExpEEE
+__ZN3JSC4Yarr23RegexPatternConstructor30atomParenthesesSubpatternBeginEb
__ZN3JSC4Yarr25CharacterClassConstructor7putCharEt
__ZN3JSC4Yarr25CharacterClassConstructor9addSortedERN3WTF6VectorItLm0EEEt
+__ZN3WTF6VectorItLm0EE14expandCapacityEm
__ZN3JSC4Yarr23RegexPatternConstructor21atomCharacterClassEndEv
+__ZN3WTF6VectorIPN3JSC4Yarr14CharacterClassELm0EE14expandCapacityEmPKS4_
+__ZN3WTF6VectorIPN3JSC4Yarr14CharacterClassELm0EE14expandCapacityEm
+__ZN3WTF6VectorIPN3JSC4Yarr18PatternDisjunctionELm4EE14expandCapacityEm
__ZN3JSC4Yarr23RegexPatternConstructor23setupDisjunctionOffsetsEPNS0_18PatternDisjunctionEjj
__ZN3JSC4Yarr14RegexGenerator25generateParenthesesSingleERNS1_19TermGenerationStateE
__ZN3JSC4Yarr14RegexGenerator30generateParenthesesDisjunctionERNS0_11PatternTermERNS1_19TermGenerationStateEj
__ZN3WTF6VectorIN3JSC4Yarr14RegexGenerator26AlternativeBacktrackRecordELm0EE14expandCapacityEm
-__ZN3JSC4Yarr14RegexGenerator19jumpIfCharNotEqualsEti
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiNS_3X8610RegisterIDES4_ii
-__ZN3JSC4Yarr14RegexGenerator19TermGenerationState15jumpToBacktrackERNS_22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpListEP
-__ZN3JSC17RegExpConstructor12performMatchEPNS_6RegExpERKNS_7UStringEiRiS6_PPi
-__ZN3JSC6RegExp5matchERKNS_7UStringEiPN3WTF11OwnArrayPtrIiEE
-__ZN3JSC4Yarr12executeRegexERNS0_14RegexCodeBlockEPKtjjPii
-__ZN3JSC8JITStubs17cti_op_new_regexpEPPv
-__ZN3JSC12RegExpObjectC1EN3WTF10PassRefPtrINS_9StructureEEENS2_INS_6RegExpEEE
+__ZN3JSC4Yarr14RegexGenerator28generateCharacterClassGreedyERNS1_19TermGenerationStateE
+__ZN3JSC4Yarr14RegexGenerator19matchCharacterClassENS_12X86Registers10RegisterIDERNS_22AbstractMacroAssemblerINS_12X86Assembler
+__ZN3JSC23MacroAssemblerX86Common4jumpEv
+__ZN3JSC4Yarr14RegexGenerator13readCharacterEiNS_12X86Registers10RegisterIDE
+__ZN3JSC12StringObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+_JSContextGetGlobalObject
+__ZN3WTF13wtfThreadDataEv
+_JSClassCreate
+__ZN13OpaqueJSClass6createEPK17JSClassDefinition
+__ZN13OpaqueJSClassC2EPK17JSClassDefinitionPS_
+__ZN3WTF7Unicode18convertUTF8ToUTF16EPPKcS2_PPtS4_b
+_JSClassRetain
+_JSObjectMake
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE4initEPNS_9ExecStateE
+__ZN13OpaqueJSClass9prototypeEPN3JSC9ExecStateE
+__ZN13OpaqueJSClass11contextDataEPN3JSC9ExecStateE
+__ZN3WTF9HashTableIP13OpaqueJSClassSt4pairIS2_P24OpaqueJSClassContextDataENS_18PairFirstExtractorIS6_EENS_7PtrHashIS2_EENS_14Pa
+__ZN24OpaqueJSClassContextDataC2EP13OpaqueJSClass
+_JSStringCreateWithUTF8CString
+_JSObjectSetProperty
+__ZN3JSC12APIEntryShimC1EPNS_9ExecStateEb
+__ZNK14OpaqueJSString10identifierEPN3JSC12JSGlobalDataE
+_JSClassRelease
+_JSContextGetGlobalContext
+_JSGlobalContextRetain
+_JSObjectGetProperty
+_JSValueToObject
+_JSValueProtect
+_JSObjectIsFunction
+_JSObjectCallAsFunction
+__ZN3JSC4callEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataES2_RKNS_7ArgListE
+__ZN3JSC10JSFunction4callEPNS_9ExecStateENS_7JSValueERKNS_7ArgListE
+__ZN3JSC11Interpreter7executeEPNS_18FunctionExecutableEPNS_9ExecStateEPNS_10JSFunctionEPNS_8JSObjectERKNS_7ArgListEPNS_14ScopeC
+__ZNK3JSC8NullNode6isNullEv
+__ZN3JSC3JIT16emit_op_neq_nullEPNS_11InstructionE
+__ZNK3JSC19JSStaticScopeObject14isDynamicScopeERb
+__ZN3JSC12RegExpObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSCL19regExpProtoFuncTestEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZNK3JSC12RegExpObject9classInfoEv
-__ZN3JSC18RegExpMatchesArrayC2EPNS_9ExecStateEPNS_24RegExpConstructorPrivateE
-__ZN3JSC8JITStubs17cti_op_get_by_valEPPv
-__ZN3JSC18RegExpMatchesArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC18RegExpMatchesArray17fillArrayInstanceEPNS_9ExecStateE
-__ZN3JSC11jsSubstringEPNS_12JSGlobalDataERKNS_7UStringEjj
-__ZN3JSC7JSArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC8JSObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC8JITStubs9cti_op_eqEPPv
-__ZN3JSCeqERKNS_7UStringES2_
-__ZN3JSC8JITStubs10cti_op_addEPPv
-__ZN3JSC11concatenateEPNS_7UString3RepES2_
-__ZN3JSCL22stringProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7UString4findERKS0_i
-__ZN3JSC8JITStubs16cti_op_put_by_idEPPv
+__ZN3JSC12RegExpObject4testEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC12RegExpObject5matchEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC6RegExp5matchERKNS_7UStringEiPN3WTF6VectorIiLm32EEE
+__ZN3JSC9WhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC15StrictEqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT16emit_op_stricteqEPNS_11InstructionE
+__ZN3JSC3JIT17compileOpStrictEqEPNS_11InstructionENS0_21CompileOpStrictEqTypeE
+__ZN3JSC3JIT17emit_op_nstricteqEPNS_11InstructionE
+__ZN3JSC3JIT20emitSlow_op_stricteqEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT21emitSlow_op_nstricteqEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC12ContinueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator14continueTargetERKNS_10IdentifierE
+__ZN3JSCL7dateNowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_get_by_val
+__ZN7WebCore10StringImpl5upperEv
+_cti_op_get_by_id_generic
+_cti_op_put_by_id_fail
__ZNK3JSC7UString8toUInt32EPbb
__ZNK3JSC7UString8toDoubleEbb
-__ZNK3JSC7UString10getCStringERN3WTF6VectorIcLm32EEE
-__ZN3WTF14FastMallocZone11forceUnlockEP14_malloc_zone_t
-__Z15jsRegExpCompilePKti24JSRegExpIgnoreCaseOption23JSRegExpMultilineOptionPjPPKc
-__ZL30calculateCompiledPatternLengthPKti24JSRegExpIgnoreCaseOptionR11CompileDataR9ErrorCode
-__ZL11checkEscapePPKtS0_P9ErrorCodeib
-__ZL13compileBranchiPiPPhPPKtS3_P9ErrorCodeS_S_R11CompileData
-__Z15jsRegExpExecutePK8JSRegExpPKtiiPii
-__ZL5matchPKtPKhiR9MatchData
-__ZNK3JSC7UString14toStrictUInt32EPb
-__ZN3JSC17ObjectLiteralNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC16PropertyListNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC7TryNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator9emitCatchEPNS_10RegisterIDEPNS_5LabelES4_
-__ZN3WTF6VectorIN3JSC11HandlerInfoELm0EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator16emitPushNewScopeEPNS_10RegisterIDERNS_10IdentifierES2_
-__ZN3WTF6VectorIN3JSC18ControlFlowContextELm0EE14expandCapacityEm
-__ZNK3JSC14ExpressionNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC12PropertyNodeD0Ev
-__ZN3JSC16PropertyListNodeD0Ev
-__ZN3JSC17ObjectLiteralNodeD0Ev
-__ZN3JSC7TryNodeD0Ev
-__ZN3JSC3JIT18emit_op_new_objectEPNS_11InstructionE
-__ZN3JSC3JIT13emit_op_catchEPNS_11InstructionE
-__ZN3JSC3JIT22emit_op_push_new_scopeEPNS_11InstructionE
-__ZN3JSC3JIT15emit_op_resolveEPNS_11InstructionE
-__ZN3JSC3JIT17emit_op_pop_scopeEPNS_11InstructionE
-__ZN3JSC8JITStubs17cti_op_new_objectEPPv
-__ZN3JSC20constructEmptyObjectEPNS_9ExecStateE
-__ZN3JSC17StructureStubInfo5derefEv
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEES5_NS_17IdentityExtractorIS5_EENS2_17IdentifierRepHashENS_10HashTraitsIS5_EES
-__ZN3JSC8ThisNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC21ThrowableBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8ThisNodeD0Ev
-__ZN3JSC6InNodeD0Ev
-__ZN3JSC3JIT29emit_op_enter_with_activationEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_convert_thisEPNS_11InstructionE
-__ZN3JSC3JIT27emit_op_tear_off_activationEPNS_11InstructionE
-__ZN3JSC3JIT24emitSlow_op_convert_thisEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs22cti_op_push_activationEPPv
-__ZN3JSC12JSActivationC1EPNS_9ExecStateEN3WTF10PassRefPtrINS_16FunctionBodyNodeEEE
-__ZN3JSC12JSActivationC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_16FunctionBodyNodeEEE
+__ZNK3JSC7UString10UTF8StringEb
+__ZN3JSC18globalFuncParseIntEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12nonInlineNaNEv
+__ZN3JSC15toInt32SlowCaseEdRb
+__ZN3JSCL8parseIntERKNS_7UStringEi
+__ZN3JSC15globalFuncIsNaNEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK7WebCore6String8toDoubleEPb
+__ZN7WebCore10StringImpl8toDoubleEPb
__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE11parseEscapeILb1ENS3_28CharacterClassParserDelegateEEEbRT0_
-__ZN3JSC4Yarr12digitsCreateEv
+__ZN3JSC4Yarr12spacesCreateEv
+__ZN3WTF6VectorIN3JSC4Yarr14CharacterRangeELm0EE14expandCapacityEmPKS3_
+__ZN3WTF6VectorIN3JSC4Yarr14CharacterRangeELm0EE14expandCapacityEm
__ZN3JSC4Yarr25CharacterClassConstructor6appendEPKNS0_14CharacterClassE
__ZN3JSC4Yarr25CharacterClassConstructor14addSortedRangeERN3WTF6VectorINS0_14CharacterRangeELm0EEEtt
__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE28CharacterClassParserDelegate20atomPatternCharacterEt
-__ZN3JSC11GreaterNodeD0Ev
-__ZN3JSCL26stringProtoFuncToLowerCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSString14toThisJSStringEPNS_9ExecStateE
-__ZN3JSC7UStringC2EPtib
-__ZN3JSC18globalFuncParseIntEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11JSImmediate12nonInlineNaNEv
-__ZN3JSC8JITStubs11cti_op_lessEPPv
-__ZN3JSC8JITStubs9cti_op_inEPPv
-__ZNK3JSC6JSCell9getUInt32ERj
-__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZL14makePrefixNodePvPN3JSC14ExpressionNodeENS0_8OperatorEiii
-__ZN3JSC7ForNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13newLabelScopeENS_10LabelScope4TypeEPKNS_10IdentifierE
-__ZN3JSC12ContinueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14continueTargetERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator14emitJumpScopesEPNS_5LabelEi
-__ZN3JSC17PrefixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC21ReadModifyResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC11NewExprNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitConstructEPNS_10RegisterIDES2_PNS_13ArgumentsNodeEjjj
-__ZN3WTF6VectorIN3JSC20GetByIdExceptionInfoELm0EE14expandCapacityEm
-__ZN3JSC8LessNodeD0Ev
-__ZN3JSC17PrefixResolveNodeD0Ev
-__ZN3JSC12ContinueNodeD0Ev
-__ZN3JSC7ForNodeD0Ev
-__ZN3JSC21ReadModifyResolveNodeD0Ev
-__ZN3JSC11NewExprNodeD0Ev
-__ZN3JSC3JIT11emit_op_notEPNS_11InstructionE
-__ZN3JSC3JIT15emit_op_pre_incEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_loop_if_lessEPNS_11InstructionE
-__ZN3JSC3JIT16emitTimeoutCheckEv
-__ZN3JSC3JIT20compileBinaryArithOpENS_8OpcodeIDEjjjNS_12OperandTypesE
-__ZN3JSC3JIT11emit_op_subEPNS_11InstructionE
-__ZN3JSC3JIT17emit_op_constructEPNS_11InstructionE
-__ZN3JSC3JIT24emit_op_construct_verifyEPNS_11InstructionE
-__ZN3JSC3JIT15emitSlow_op_notEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT19emitSlow_op_pre_incEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT24emitSlow_op_loop_if_lessEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT15emitSlow_op_addEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT28compileBinaryArithOpSlowCaseENS_8OpcodeIDERPNS_13SlowCaseEntryEjjjNS_12OperandTypesE
-__ZN3JSC15AssemblerBuffer7putByteEi
-__ZN3JSC12X86Assembler23X86InstructionFormatter11twoByteOp64ENS0_15TwoByteOpcodeIDEiNS_3X8610RegisterIDE
-__ZN3JSC3JIT15emitSlow_op_subEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT21emitSlow_op_constructEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT27compileOpConstructSetupArgsEPNS_11InstructionE
-__ZN3JSC3JIT28emitSlow_op_construct_verifyEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC7UString4fromEj
-__ZN3JSC10Identifier11addSlowCaseEPNS_9ExecStateEPNS_7UString3RepE
-__ZN3JSC8JITStubs10cti_op_notEPPv
-__ZN3JSC8JITStubs24cti_op_get_by_id_genericEPPv
-__ZN3JSC7JSArrayC2EN3WTF10PassRefPtrINS_9StructureEEERKNS_7ArgListE
-__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL24stringProtoFuncSubstringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs31cti_op_construct_NotJSConstructEPPv
-__ZN3JSC3JIT33privateCompilePatchGetArrayLengthENS_22AbstractMacroAssemblerINS_12X86AssemblerEE22ProcessorReturnAddressE
-__ZN3JSC8JITStubs27cti_op_get_by_id_proto_listEPPv
-__ZN3JSC3JIT30privateCompileGetByIdProtoListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureES6_mP
-__ZN3JSC3JIT16patchGetByIdSelfEPNS_17StructureStubInfoEPNS_9StructureEmNS_22AbstractMacroAssemblerINS_12X86AssemblerEE22Process
-__ZN3JSC14StructureChainC1EPNS_9StructureE
-__ZN3JSC14StructureChainC2EPNS_9StructureE
-__ZN3JSC3JIT26privateCompileGetByIdChainEPNS_17StructureStubInfoEPNS_9StructureEPNS_14StructureChainEmmNS_22AbstractMacroAssemb
-__ZN3JSC8JITStubs23cti_op_put_by_id_secondEPPv
-__ZN3JSC8JITStubs15tryCachePutByIDEPNS_9ExecStateEPNS_9CodeBlockEPvNS_7JSValueERKNS_15PutPropertySlotE
-__ZN3JSC8JITStubs24cti_op_put_by_id_genericEPPv
-__ZN3JSC8JITStubs26cti_op_tear_off_activationEPPv
-__ZN3JSC8JITStubs21cti_op_ret_scopeChainEPPv
-__ZN3JSC17BytecodeGenerator16emitPutScopedVarEmiPNS_10RegisterIDENS_7JSValueE
-__ZN3JSC3JIT22emit_op_get_scoped_varEPNS_11InstructionE
-__ZN3JSC3JIT22emit_op_put_scoped_varEPNS_11InstructionE
-__ZN3JSC3JIT29emitPutVariableObjectRegisterENS_3X8610RegisterIDES2_i
-__ZN3JSC12X86Assembler7movq_rrENS_3X8610RegisterIDENS1_13XMMRegisterIDE
-__ZN3WTF20TCMalloc_ThreadCache18DestroyThreadCacheEPv
-__ZN3WTF20TCMalloc_ThreadCache11DeleteCacheEPS0_
-__ZN3JSC15StrictEqualNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15StrictEqualNodeD0Ev
-__ZN3JSC3JIT16emit_op_stricteqEPNS_11InstructionE
-__ZN3JSC3JIT17compileOpStrictEqEPNS_11InstructionENS0_21CompileOpStrictEqTypeE
-__ZN3JSC3JIT20emitSlow_op_stricteqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs15cti_op_stricteqEPPv
-__ZN3WTF12detachThreadEj
-__ZN3WTFL26pthreadHandleForIdentifierEj
-__ZN3WTFL31clearPthreadHandleForIdentifierEj
-__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE14expandCapacityEmPKS4_
-__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE15reserveCapacityEm
-__ZN3JSC8NullNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8NullNodeD0Ev
-__ZN3WTF7HashMapISt4pairINS_6RefPtrIN3JSC7UString3RepEEEjEPNS3_9StructureENS3_28StructureTransitionTableHashENS3_34StructureTra
-__ZN3WTF9HashTableISt4pairINS_6RefPtrIN3JSC7UString3RepEEEjES1_IS7_PNS3_9StructureEENS_18PairFirstExtractorISA_EENS3_28Structur
-__ZN3JSC9Structure22materializePropertyMapEv
-__ZN3JSC15TypeOfValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15TypeOfValueNodeD0Ev
-__ZN3JSC12NotEqualNodeD0Ev
-__ZN3JSC3JIT11emit_op_neqEPNS_11InstructionE
-__ZN3JSC3JIT15emitSlow_op_neqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs13cti_op_typeofEPPv
-__ZN3JSC20jsTypeStringForValueEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC8JITStubs10cti_op_neqEPPv
-__ZN3JSC14ExecutablePool13systemReleaseERKNS0_10AllocationE
-__ZN3WTF6VectorItLm0EE14expandCapacityEmPKt
-__ZNK3JSC10NumberNode8isNumberEv
-__ZNK3JSC14ExpressionNode10isLocationEv
-__ZN3WTF6VectorIPN3JSC10RegisterIDELm32EE14expandCapacityEm
-__ZNK3JSC11BooleanNode6isPureERNS_17BytecodeGeneratorE
-__ZN3JSC4Yarr13newlineCreateEv
-__ZN3JSC12X86Assembler23X86InstructionFormatter15emitRexIfNeededEiii
-__ZN3JSC12X86Assembler23X86InstructionFormatter11memoryModRMEiNS_3X8610RegisterIDES3_ii
-__ZN3JSC17TypeOfResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitResolveBaseEPNS_10RegisterIDERKNS_10IdentifierE
-__ZN3JSC17BytecodeGenerator20emitLoadGlobalObjectEPNS_10RegisterIDEPNS_8JSObjectE
-__ZN3WTF6VectorIN3JSC7JSValueELm0EE14expandCapacityEm
-__ZNK3JSC7AddNode5isAddEv
-__ZN3JSC12BinaryOpNode10emitStrcatERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_PNS_21ReadModifyResolveNodeE
-__ZNK3JSC10StringNode8isStringEv
-__ZNK3JSC14ExpressionNode8isStringEv
-__ZN3JSC17BytecodeGenerator10emitStrcatEPNS_10RegisterIDES2_i
-__ZN3JSC4Yarr12spacesCreateEv
-__ZN3JSC4Yarr15nonspacesCreateEv
-__ZN3JSC8WithNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator13emitPushScopeEPNS_10RegisterIDE
-__ZN3JSC23MacroAssemblerX86Common4moveENS_22AbstractMacroAssemblerINS_12X86AssemblerEE5Imm32ENS_3X8610RegisterIDE
-__ZN3JSC14MacroAssembler4peekENS_3X8610RegisterIDEi
-__ZN3JSC4Yarr14RegexGenerator12atEndOfInputEv
-__ZN3JSC22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpList6linkToENS2_5LabelEPS2_
-__ZN3JSC14MacroAssembler4pokeENS_3X8610RegisterIDEi
-__ZN3JSC21FunctionCallValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC9ArrayNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitNewArrayEPNS_10RegisterIDEPNS_11ElementNodeE
-__ZN3JSC23CallFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator25emitJumpIfNotFunctionCallEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC4Yarr14RegexGenerator29generateAssertionWordBoundaryERNS1_19TermGenerationStateE
-__ZN3JSC4Yarr14RegexGenerator22matchAssertionWordcharERNS1_19TermGenerationStateERNS_22AbstractMacroAssemblerINS_12X86Assembler
-__ZN3WTF6VectorIPN3JSC4Yarr18PatternDisjunctionELm4EE14expandCapacityEm
-__ZL14compileBracketiPiPPhPPKtS3_P9ErrorCodeiS_S_R11CompileData
-__ZN3JSC9ThrowNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC9CommaNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3WTF9HashTableIdSt4pairIdN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_9FloatHashIdEENS_14PairHashTraitsINS_10HashTraitsId
-__ZN3JSC17TypeOfResolveNodeD0Ev
-__ZN3JSC18NotStrictEqualNodeD0Ev
-__ZN3JSC8WithNodeD0Ev
-__ZN3JSC21FunctionCallValueNodeD0Ev
-__ZN3JSC9ArrayNodeD0Ev
-__ZN3JSC11ElementNodeD0Ev
-__ZN3JSC23CallFunctionCallDotNodeD0Ev
-__ZN3JSC9ThrowNodeD0Ev
-__ZN3JSC9CommaNodeD0Ev
-__ZN3JSC3JIT23emit_op_unexpected_loadEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_to_primitiveEPNS_11InstructionE
-__ZN3JSC3JIT14emit_op_strcatEPNS_11InstructionE
-__ZN3JSC3JIT17emit_op_nstricteqEPNS_11InstructionE
-__ZN3JSC3JIT18emit_op_push_scopeEPNS_11InstructionE
-__ZN3JSC3JIT17emit_op_new_arrayEPNS_11InstructionE
-__ZN3JSC3JIT16emit_op_jneq_ptrEPNS_11InstructionE
-__ZN3JSC3JIT13emit_op_throwEPNS_11InstructionE
-__ZN3JSC3JIT14emit_op_jnlessEPNS_11InstructionE
-__ZN3JSC3JIT24emitSlow_op_to_primitiveEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT21emitSlow_op_nstricteqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT18emitSlow_op_jnlessEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZL15makePostfixNodePvPN3JSC14ExpressionNodeENS0_8OperatorEiii
-__ZN3JSC18PostfixResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC18PostfixResolveNodeD0Ev
-__ZN3JSC8JITStubs22cti_op_call_arityCheckEPPv
-__ZN3JSC19FunctionConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL32constructWithFunctionConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
-__ZN3JSCplERKNS_7UStringES2_
-__ZN3JSC7UString6appendERKS0_
-__ZN3JSC7UString17expandPreCapacityEi
-__ZN3WTF11fastReallocILb0EEEPvS1_m
-__ZN3JSC14JSGlobalObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZL11makeDivNodePvPN3JSC14ExpressionNodeES2_b
-__ZL12makeMultNodePvPN3JSC14ExpressionNodeES2_b
-__ZN3JSC9WhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC7ModNodeD0Ev
-__ZN3JSC7DivNodeD0Ev
-__ZN3JSC8MultNodeD0Ev
-__ZN3JSC9WhileNodeD0Ev
-__ZN3JSC3JIT11emit_op_modEPNS_11InstructionE
-__ZN3JSC3JIT11emit_op_mulEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_loop_if_trueEPNS_11InstructionE
-__ZN3JSC3JIT15emitSlow_op_modEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT15emitSlow_op_mulEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT24emitSlow_op_loop_if_trueEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSCL26stringProtoFuncLastIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue20toIntegerPreserveNaNEPNS_9ExecStateE
-__ZN3JSC8JITStubs10cti_op_divEPPv
-__ZN3JSC3JIT22emit_op_loop_if_lesseqEPNS_11InstructionE
-__ZN3JSC3JIT26emitSlow_op_loop_if_lesseqEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs13cti_op_lesseqEPPv
-__ZN3JSCL20stringProtoFuncSplitEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC19constructEmptyArrayEPNS_9ExecStateE
-__ZN3JSC7JSArray3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC7JSArray11putSlowCaseEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC14ArrayPrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL18arrayProtoFuncJoinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF7HashSetIPN3JSC8JSObjectENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
-__ZN3WTF9HashTableIPN3JSC8JSObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
-__ZN3WTF6VectorItLm256EE6appendItEEvPKT_m
-__ZN3WTF6VectorItLm256EE14expandCapacityEm
-__ZN3WTF6VectorIPN3JSC12CallLinkInfoELm0EE15reserveCapacityEm
-__ZN3JSC4Heap7collectEv
-__ZN3JSC4Heap30markStackObjectsConservativelyEv
-__ZN3JSC4Heap31markCurrentThreadConservativelyEv
-__ZN3JSC4Heap39markCurrentThreadConservativelyInternalEv
-__ZN3JSC4Heap18markConservativelyEPvS1_
-__ZN3JSC7JSArray4markEv
-__ZN3JSC8JSObject4markEv
-__ZN3JSC10JSFunction4markEv
-__ZN3JSC6JSCell4markEv
-__ZN3JSC14JSGlobalObject4markEv
-__ZN3JSC15JSWrapperObject4markEv
-__ZN3JSC18GlobalEvalFunction4markEv
-__ZN3JSC16FunctionBodyNode4markEv
-__ZN3JSC9CodeBlock4markEv
-__ZN3JSC4Heap20markProtectedObjectsEv
-__ZN3JSC12SmallStrings4markEv
-__ZN3JSC4Heap5sweepILNS_8HeapTypeE0EEEmv
-__ZN3JSC14JSGlobalObjectD2Ev
-__ZN3JSC17FunctionPrototypeD1Ev
-__ZN3JSC15ObjectPrototypeD1Ev
-__ZN3JSC14ArrayPrototypeD1Ev
-__ZN3JSC15StringPrototypeD1Ev
-__ZN3JSC16BooleanPrototypeD1Ev
-__ZN3JSC15NumberPrototypeD1Ev
-__ZN3JSC13DatePrototypeD1Ev
-__ZN3JSC12DateInstanceD2Ev
-__ZN3JSC15RegExpPrototypeD1Ev
-__ZN3JSC14ErrorPrototypeD1Ev
-__ZN3JSC20NativeErrorPrototypeD1Ev
-__ZN3JSC17ObjectConstructorD1Ev
-__ZN3JSC19FunctionConstructorD1Ev
-__ZN3JSC16ArrayConstructorD1Ev
-__ZN3JSC17StringConstructorD1Ev
-__ZN3JSC18BooleanConstructorD1Ev
-__ZN3JSC17NumberConstructorD1Ev
-__ZN3JSC15DateConstructorD1Ev
-__ZN3JSC17RegExpConstructorD1Ev
-__ZN3JSC16ErrorConstructorD1Ev
-__ZN3JSC22NativeErrorConstructorD1Ev
-__ZN3JSC10MathObjectD1Ev
-__ZN3JSC18GlobalEvalFunctionD1Ev
-__ZN3JSC8JSObjectD1Ev
+__ZN3JSC4Yarr14RegexGenerator24matchCharacterClassRangeENS_12X86Registers10RegisterIDERNS_22AbstractMacroAssemblerINS_12X86Asse
+__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE4JumpELm16EED1Ev
+_cti_op_nstricteq
+__ZN3JSC27ctiPatchCallByReturnAddressEPNS_9CodeBlockENS_16ReturnAddressPtrENS_21MacroAssemblerCodePtrE
+__ZNK3JSC8JSString9toBooleanEPNS_9ExecStateE
+_cti_op_is_string
+__ZN3JSCL16mathProtoFuncPowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_get_by_id_proto_list_full
+_cti_op_put_by_id_transition_realloc
+_cti_timeout_check
+__ZN3JSC14TimeoutChecker10didTimeOutEPNS_9ExecStateE
+_cti_op_mul
+__ZN3JSC4Yarr23RegexPatternConstructor25atomBuiltInCharacterClassENS0_23BuiltInCharacterClassIDEb
+_cti_op_convert_this
+__ZNK3JSC8JSString12toThisObjectEPNS_9ExecStateE
+__ZN3JSC12StringObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS_8JSStringE
+__ZN3JSCL22stringProtoFuncReplaceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue12toThisObjectEPNS_9ExecStateE
+__ZNK3JSC8JSObject8toStringEPNS_9ExecStateE
+__ZNK3JSC8JSObject11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
+__ZNK3JSC8JSObject12defaultValueEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
+__ZN3JSCL23stringProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC12StringObject9classInfoEv
+__ZN7WebCore10StringImpl4costEv
+__ZN3JSC6JSCell11getCallDataERNS_8CallDataE
+__ZN3JSC24charCodeAtThunkGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSCL14stringCharLoadERNS_19SpecializedThunkJITE
+_cti_op_get_by_val_string
+_JSValueToBoolean
+_JSValueUnprotect
+_JSGlobalContextRelease
+__ZN3JSC4Heap17collectAllGarbageEv
+__ZN3JSC4Heap9markRootsEv
+__ZN3JSC4Heap30markStackObjectsConservativelyERNS_9MarkStackE
+__ZN3JSC4Heap31markCurrentThreadConservativelyERNS_9MarkStackE
+__ZN3JSC4Heap39markCurrentThreadConservativelyInternalERNS_9MarkStackE
+__ZN3JSC4Heap18markConservativelyERNS_9MarkStackEPvS3_
+__ZN3JSC9MarkStack12markChildrenEPNS_6JSCellE
+__ZN3JSC8JSObject12markChildrenERNS_9MarkStackE
+__ZN3JSC14JSGlobalObject12markChildrenERNS_9MarkStackE
+__ZN3JSCL12markIfNeededERNS_9MarkStackENS_7JSValueE
+__ZN3JSC10JSFunction12markChildrenERNS_9MarkStackE
+__ZN3JSC18FunctionExecutable13markAggregateERNS_9MarkStackE
+__ZN3JSC12JSActivation12markChildrenERNS_9MarkStackE
+__ZN3JSC15JSWrapperObject12markChildrenERNS_9MarkStackE
+__ZN3JSC7JSArray12markChildrenERNS_9MarkStackE
+__ZN3JSC18GlobalEvalFunction12markChildrenERNS_9MarkStackE
+__ZN3JSC9CodeBlock13markAggregateERNS_9MarkStackE
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE12markChildrenERNS_9MarkStackE
+__ZN3JSC19JSStaticScopeObject12markChildrenERNS_9MarkStackE
+__ZN3JSC9MarkStack12releaseStackEPvm
+__ZN3JSC4Heap20markProtectedObjectsERNS_9MarkStackE
+__ZN3JSC12SmallStrings12markChildrenERNS_9MarkStackE
+__ZN3JSC9MarkStack7compactEv
+__ZN3JSC4Heap5sweepEv
+__ZN3JSC12DateInstanceD1Ev
__ZN3JSC9CodeBlock13unlinkCallersEv
-__ZN3WTF6VectorINS_6RefPtrIN3JSC6RegExpEEELm0EE6shrinkEm
+__ZN3JSC8JSObjectD1Ev
__ZN3JSC12JSActivationD1Ev
__ZN3JSC12JSActivationD2Ev
-__ZN3JSC12RegExpObjectD1Ev
-__ZN3JSC18RegExpMatchesArrayD1Ev
-__ZN3JSC4Heap5sweepILNS_8HeapTypeE1EEEmv
-__ZN3JSC20globalFuncParseFloatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF17TCMalloc_PageHeap3NewEm
-__ZN3JSC8JITStubs28cti_op_construct_JSConstructEPPv
-__ZN3JSC8JSObject17createInheritorIDEv
-__ZNK3JSC19BracketAccessorNode10isLocationEv
-__ZNK3JSC19BracketAccessorNode21isBracketAccessorNodeEv
-__ZN3JSC17AssignBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator12emitPutByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC14PostfixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17ReadModifyDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17AssignBracketNodeD0Ev
-__ZN3JSC14PostfixDotNodeD0Ev
-__ZN3JSC17ReadModifyDotNodeD0Ev
-__ZN3JSC3JIT18emit_op_put_by_valEPNS_11InstructionE
-__ZN3JSC3JIT22emitSlow_op_put_by_valEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC16ArrayConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL29constructWithArrayConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSCL27constructArrayWithSizeQuirkEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC8JITStubs23cti_op_put_by_val_arrayEPPv
-__ZN3JSC8JITStubs13cti_op_strcatEPPv
-__ZN3JSC7UString3Rep15reserveCapacityEi
-__ZN3JSC7UString13appendNumericEi
-__ZN3JSC11concatenateEPNS_7UString3RepEi
-__ZN3JSC12JSActivation18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL18stringFromCharCodeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC16globalFuncEscapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26stringProtoFuncToUpperCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12JSActivation14isDynamicScopeEv
+__ZN3JSC12StringObjectD1Ev
+__ZN3JSC4Heap12resizeBlocksEv
+__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm8EE14expandCapacityEm
+__ZN3WTF6VectorIPNS0_IN3JSC10RegisterIDELm32EEELm32EE14expandCapacityEmPKS4_
+__ZN3WTF6VectorIPNS0_IN3JSC10RegisterIDELm32EEELm32EE15reserveCapacityEm
+__ZN3JSC8VoidNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE15parseQuantifierEbjj
+__ZN3JSC4Yarr14RegexGenerator19TermGenerationState15jumpToBacktrackERNS_22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpListEP
+__ZN3JSC4Yarr14wordcharCreateEv
+__ZN3JSC4Yarr12digitsCreateEv
+__ZN3JSC4Yarr14RegexGenerator30generatePatternCharacterGreedyERNS1_19TermGenerationStateE
__ZN3WTF6VectorINS_6RefPtrIN3JSC10RegisterIDEEELm16EE14expandCapacityEm
-__ZN3JSC17ObjectConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithObjectConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC8JITStubs17cti_op_put_by_valEPPv
-__ZN3JSC15DateConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL28constructWithDateConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC13constructDateEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC8JITStubs18cti_op_is_functionEPPv
-__ZN3JSC16jsIsFunctionTypeENS_7JSValueE
-__ZN3JSC10Identifier5equalEPKNS_7UString3RepEPKc
-__ZN3JSC11JSImmediate8toStringENS_7JSValueE
-__ZN3JSC7UString4fromEi
-__ZN3JSC7UString3Rep11computeHashEPKti
-__ZNK3JSC8NullNode6isNullEv
-__ZN3JSC9BreakNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator11breakTargetERKNS_10IdentifierE
-__ZN3JSC9BreakNodeD0Ev
-__ZN3JSC3JIT15emit_op_eq_nullEPNS_11InstructionE
-__ZN3JSC8JITStubs19cti_op_is_undefinedEPPv
-__ZN3JSC12JSActivation4markEv
-__ZN3JSC12DateInstanceD1Ev
-__ZNK3JSC18EmptyStatementNode16isEmptyStatementEv
-__ZN3JSC18EmptyStatementNodeD0Ev
-__ZN3JSC3JIT15emit_op_pre_decEPNS_11InstructionE
-__ZN3JSC3JIT19emitSlow_op_pre_decEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3WTF13tryFastMallocEm
-__ZN3JSC8JITStubs17cti_timeout_checkEPPv
-__ZN3JSC14TimeoutChecker10didTimeOutEPNS_9ExecStateE
-__ZN3JSC8JITStubs14cti_op_pre_decEPPv
-__ZN3JSC13jsAddSlowCaseEPNS_9ExecStateENS_7JSValueES2_
-__ZNK3JSC8JSString11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZNK3JSC8JSObject11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZNK3JSC8JSObject12defaultValueEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZN3JSCL22objectProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL25functionProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC10JSFunction9classInfoEv
-__ZNK3JSC21UStringSourceProvider8getRangeEii
-__ZNK3JSC7UString6substrEii
-__ZN3JSC8JITStubs26cti_op_get_by_id_self_failEPPv
-__ZN3JSC3JIT29privateCompileGetByIdSelfListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureEm
-__ZN3JSC8JITStubs16cti_op_nstricteqEPPv
-__ZN3JSC9ForInNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator20emitNextPropertyNameEPNS_10RegisterIDES2_PNS_5LabelE
-__ZN3JSC9ForInNodeD0Ev
-__ZN3JSC3JIT18emit_op_next_pnameEPNS_11InstructionE
-__ZN3JSC8JITStubs17cti_op_get_pnamesEPPv
-__ZN3JSC8JSObject16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayE
-__ZN3JSC9Structure26getEnumerablePropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayEPNS_8JSObjectE
-__ZN3JSC9Structure35getEnumerableNamesFromPropertyTableERNS_17PropertyNameArrayE
-__ZN3JSC8JITStubs17cti_op_next_pnameEPPv
-__ZN3JSC13jsOwnedStringEPNS_12JSGlobalDataERKNS_7UStringE
-__ZN3JSC22JSPropertyNameIterator10invalidateEv
+___initializeScavenger_block_invoke_1
+__Z22TCMalloc_SystemReleasePvm
+__Z21TCMalloc_SystemCommitPvm
+__ZNK3JSC7JSValue20toThisObjectSlowCaseEPNS_9ExecStateE
+__ZN3JSCL19mathProtoFuncRandomEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue19synthesizePrototypeEPNS_9ExecStateE
+__ZN3JSCL23numberProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC17NumberConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSCL25numberConstructorMaxValueEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC18sqrtThunkGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSC3JIT26privateCompileGetByIdProtoEPNS_17StructureStubInfoEPNS_9StructureES4_RKNS_10IdentifierERKNS_12PropertySlotEmNS_16
+__ZN3JSC8JSObject3putEPNS_9ExecStateEjNS_7JSValueE
+__ZN3JSC10Identifier4fromEPNS_9ExecStateEj
+__ZN3JSC4Yarr13newlineCreateEv
+__ZN3JSC4Yarr14RegexGenerator29generateAssertionWordBoundaryERNS1_19TermGenerationStateE
+__ZN3JSC4Yarr14RegexGenerator22matchAssertionWordcharERNS1_19TermGenerationStateERNS_22AbstractMacroAssemblerINS_12X86Assembler
+__ZN3JSCL26stringProtoFuncToLowerCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL22stringProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7UString4findERKS0_j
+__ZN3JSC17RegExpConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC17RegExpConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL30constructWithRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC15constructRegExpEPNS_9ExecStateERKNS_7ArgListE
+__ZNK3JSC6JSCell9classInfoEv
+__ZN3JSCL19regExpProtoFuncExecEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12RegExpObject4execEPNS_9ExecStateERKNS_7ArgListE
+__ZNK3JSC17RegExpConstructor14arrayOfMatchesEPNS_9ExecStateE
+__ZN3JSC18RegExpMatchesArrayC2EPNS_9ExecStateEPNS_24RegExpConstructorPrivateE
+__ZN3JSC7JSArrayC2EN3WTF17NonNullPassRefPtrINS_9StructureEEEj
+__ZN3JSC7JSArray15setSubclassDataEPv
+__ZN3JSCL24regExpConstructorDollar1EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC17RegExpConstructor10getBackrefEPNS_9ExecStateEj
+__ZN3JSC20globalFuncParseFloatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_put_by_id_generic
+__ZN3JSC17BytecodeGenerator29uncheckedRegisterForArgumentsEv
__ZN3JSC3JIT22emit_op_init_argumentsEPNS_11InstructionE
__ZN3JSC3JIT24emit_op_create_argumentsEPNS_11InstructionE
-__ZN3JSC8JITStubs33cti_op_create_arguments_no_paramsEPPv
-__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC3JIT16emit_op_post_decEPNS_11InstructionE
-__ZN3JSC3JIT20emitSlow_op_post_decEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs15cti_op_post_decEPPv
-__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC17RegExpConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC17RegExpConstructor3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC6JSCell11getCallDataERNS_8CallDataE
-__ZN3JSC10JSFunction3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC8JITStubs16cti_op_new_arrayEPPv
-__ZN3JSC14constructArrayEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSCL18arrayProtoFuncPushEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL30comparePropertyMapEntryIndicesEPKvS1_
-__ZN3WTF6VectorIN3JSC10IdentifierELm20EE15reserveCapacityEm
-__ZN3JSC12StringObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC8JITStubs17cti_op_push_scopeEPPv
-__ZN3JSC8JITStubs14cti_op_resolveEPPv
-__ZN3JSC8JITStubs16cti_op_pop_scopeEPPv
-__ZN3JSC3JIT31privateCompilePutByIdTransitionEPNS_17StructureStubInfoEPNS_9StructureES4_mPNS_14StructureChainENS_22AbstractMacr
-__ZN3JSC20MacroAssemblerX86_649branchPtrENS_23MacroAssemblerX86Common9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE
-__ZN3JSC3JIT19patchPutByIdReplaceEPNS_17StructureStubInfoEPNS_9StructureEmNS_22AbstractMacroAssemblerINS_12X86AssemblerEE22Proc
-__ZN3JSC17NumberConstructor18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC8JITStubs16cti_op_is_stringEPPv
-__ZN3JSC8JITStubs19cti_op_convert_thisEPPv
-__ZNK3JSC8JSString12toThisObjectEPNS_9ExecStateE
-__ZN3JSCL22stringProtoFuncReplaceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12StringObject14toThisJSStringEPNS_9ExecStateE
-__ZN3JSCL21arrayProtoFuncForEachEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11Interpreter20prepareForRepeatCallEPNS_16FunctionBodyNodeEPNS_9ExecStateEPNS_10JSFunctionEiPNS_14ScopeChainNodeEPNS_7J
-__ZN3JSC3JIT16emit_op_post_incEPNS_11InstructionE
-__ZN3JSC3JIT20emitSlow_op_post_incEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC11Interpreter7executeERNS_16CallFrameClosureEPNS_7JSValueE
-__ZN3JSC10MathObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC11Interpreter13endRepeatCallERNS_16CallFrameClosureE
-__ZN3JSCL21resizePropertyStorageEPNS_8JSObjectEii
-__ZN3JSC8JSObject23allocatePropertyStorageEmm
-__ZN3JSC14ExecutablePool12poolAllocateEm
-__ZN3JSC9Arguments4markEv
-__ZN3JSC22JSPropertyNameIterator4markEv
-__ZN3JSC3JIT10unlinkCallEPNS_12CallLinkInfoE
-__ZN3JSC22JSPropertyNameIteratorD1Ev
-__ZN3JSC9ArgumentsD1Ev
-__ZN3JSC9ArgumentsD2Ev
-__ZN3JSC12StringObjectD1Ev
-__ZN3WTF6VectorIPN3JSC9StructureELm8EE14expandCapacityEmPKS3_
-__ZN3WTF6VectorIPN3JSC9StructureELm8EE15reserveCapacityEm
+__ZN3JSC20MacroAssemblerX86_6413branchTestPtrENS_23MacroAssemblerX86Common9ConditionENS_22AbstractMacroAssemblerINS_12X86Assemb
+_cti_op_load_varargs
+__ZN3JSC3JIT19patchPutByIdReplaceEPNS_9CodeBlockEPNS_17StructureStubInfoEPNS_9StructureEmNS_16ReturnAddressPtrE
+__ZN3JSC21ThrowableBinaryOpNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSCL20stringProtoFuncSplitEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_in
__ZN3JSCL19arrayProtoFuncShiftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL11getPropertyEPNS_9ExecStateEPNS_8JSObjectEj
+__ZNK3JSC7JSValue8toUInt32EPNS_9ExecStateE
+__ZN3JSC7JSArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
__ZN3JSC7JSArray14deletePropertyEPNS_9ExecStateEj
+__ZN3JSC7JSArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
__ZN3JSC7JSArray9setLengthEj
-__ZN3JSC7UString6appendEPKc
-__ZN3JSC8JITStubs23cti_op_create_argumentsEPPv
+__ZN3JSC23CallFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator25emitJumpIfNotFunctionCallEPNS_10RegisterIDEPNS_5LabelE
+_cti_op_create_arguments
+__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC3JIT30privateCompileGetByIdProtoListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureES6_RK
+__ZN3JSC9ForInNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator20emitGetPropertyNamesEPNS_10RegisterIDES2_S2_S2_PNS_5LabelE
+__ZN3WTF6VectorIN3JSC12ForInContextELm0EE15reserveCapacityEm
+__ZN3JSC17BytecodeGenerator20emitNextPropertyNameEPNS_10RegisterIDES2_S2_S2_S2_PNS_5LabelE
+__ZN3JSC3JIT18emit_op_get_pnamesEPNS_11InstructionE
+__ZN3JSC3JIT20emit_op_get_by_pnameEPNS_11InstructionE
+__ZN3JSC3JIT22compileGetDirectOffsetENS_12X86Registers10RegisterIDES2_S2_S2_S2_
+__ZN3JSC3JIT18emit_op_next_pnameEPNS_11InstructionE
+__ZN3JSC3JIT24emitSlow_op_get_by_pnameEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_get_pnames
+__ZN3JSC22JSPropertyNameIterator6createEPNS_9ExecStateEPNS_8JSObjectE
+__ZN3JSC8JSObject16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC8JSObject19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC9Structure16getPropertyNamesERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSCL29objectProtoFuncHasOwnPropertyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC8JSObject14hasOwnPropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSC8JSObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+_cti_op_typeof
+__ZN3JSC20jsTypeStringForValueEPNS_9ExecStateENS_7JSValueE
+__ZN3JSC9Structure27despecifyFunctionTransitionEPS0_RKNS_10IdentifierE
+_cti_has_property
+__ZN3JSC3JIT26emit_op_tear_off_argumentsEPNS_11InstructionE
__ZN3JSCL19arrayProtoFuncSliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7JSValue9toIntegerEPNS_9ExecStateE
-__ZN3JSC24ApplyFunctionCallDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZNK3JSC14ExpressionNode13isSimpleArrayEv
-__ZN3JSC17BytecodeGenerator26emitJumpIfNotFunctionApplyEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC17BytecodeGenerator15emitCallVarargsEPNS_10RegisterIDES2_S2_S2_jjj
-__ZN3JSC24ApplyFunctionCallDotNodeD0Ev
-__ZN3JSC3JIT20emit_op_load_varargsEPNS_11InstructionE
-__ZN3JSC3JIT20emit_op_call_varargsEPNS_11InstructionE
-__ZN3JSC3JIT20compileOpCallVarargsEPNS_11InstructionE
-__ZN3JSC3JIT29compileOpCallVarargsSetupArgsEPNS_11InstructionE
-__ZN3JSC3JIT24emitSlow_op_call_varargsEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC3JIT28compileOpCallVarargsSlowCaseEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs19cti_op_load_varargsEPPv
-__ZNK3JSC7JSArray9classInfoEv
-__ZN3JSC7JSArray15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
-__ZNK3JSC7UString30spliceSubstringsWithSeparatorsEPKNS0_5RangeEiPKS0_i
+__ZN3JSCL11getPropertyEPNS_9ExecStateEPNS_8JSObjectEj
+__ZN3JSC9Arguments18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+_cti_op_tear_off_arguments
+__ZN3JSC10JSFunction19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZNK3JSC10JSFunction9classInfoEv
+__ZN3JSC17DeleteBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator15emitDeleteByValEPNS_10RegisterIDES2_S2_
__ZN3JSC8JSObject18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC8JSObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC7UString4fromEd
-__ZN3WTF4dtoaEPcdiPiS1_PS0_
-__ZN3JSC8JITStubs21cti_op_put_by_id_failEPPv
-__ZN3JSC13DeleteDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14emitDeleteByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3JSC13DeleteDotNodeD0Ev
-__ZN3JSC3JIT17emit_op_del_by_idEPNS_11InstructionE
-__ZN3JSC8JITStubs16cti_op_del_by_idEPPv
-__ZN3JSC10JSFunction14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+_cti_op_del_by_val
__ZN3JSC8JSObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZNK3JSC7ArgList8getSliceEiRS0_
-__ZN3JSC3JIT26emit_op_tear_off_argumentsEPNS_11InstructionE
-__ZN3JSC8JITStubs25cti_op_tear_off_argumentsEPPv
-__ZNK3JSC12StringObject12toThisStringEPNS_9ExecStateE
-__ZN3JSC13PrefixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC13PrefixDotNodeD0Ev
-__ZNK3JSC8JSObject8toStringEPNS_9ExecStateE
-__ZN3JSCL22arrayProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21arrayProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC16ErrorConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL29constructWithErrorConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC14constructErrorEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSCL21stringProtoFuncCharAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs32cti_op_get_by_id_proto_list_fullEPPv
-__ZN3JSC14InstanceOfNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator14emitInstanceOfEPNS_10RegisterIDES2_S2_S2_
-__ZN3JSC14InstanceOfNodeD0Ev
-__ZN3JSC3JIT18emit_op_instanceofEPNS_11InstructionE
-__ZN3JSC3JIT22emitSlow_op_instanceofEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC12X86Assembler6orl_irEiNS_3X8610RegisterIDE
-__ZN3JSC17RegExpConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC15constructRegExpEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC13DatePrototype18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL20dateProtoFuncGetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12DateInstance9classInfoEv
-__ZN3JSC12RegExpObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL19regExpProtoFuncTestEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12RegExpObject5matchEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSC3JIT18emit_op_jmp_scopesEPNS_11InstructionE
-__ZN3JSC3JIT30privateCompileGetByIdChainListEPNS_17StructureStubInfoEPNS_30PolymorphicAccessStructureListEiPNS_9StructureEPNS_1
-__ZN3JSC18globalFuncUnescapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7UString6appendEt
-__ZN3JSC8JSObject3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC17PropertyNameArray3addEPNS_7UString3RepE
-__ZN3WTF7HashSetIPN3JSC7UString3RepENS_7PtrHashIS4_EENS_10HashTraitsIS4_EEE3addERKS4_
-__ZN3WTF9HashTableIPN3JSC7UString3RepES4_NS_17IdentityExtractorIS4_EENS_7PtrHashIS4_EENS_10HashTraitsIS4_EESA_E6rehashEi
-__ZN3WTF6VectorIN3JSC10IdentifierELm20EE14expandCapacityEm
-__ZN3JSCL20arrayProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC9ArrayNode13isSimpleArrayEv
-__ZN3JSC8JITStubs10cti_op_mulEPPv
-__ZN3JSC8JITStubs16cti_op_is_objectEPPv
-__ZN3JSC14jsIsObjectTypeENS_7JSValueE
-__ZNK3JSC11Interpreter18retrieveLastCallerEPNS_9ExecStateERiRlRNS_7UStringERNS_7JSValueE
-__ZN3JSC9CodeBlock34reparseForExceptionInfoIfNecessaryEPNS_9ExecStateE
-__ZNK3JSC10ScopeChain10localDepthEv
-__ZNK3JSC12JSActivation9classInfoEv
-__ZN3JSC6Parser7reparseINS_16FunctionBodyNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPS5_
-__ZN3JSC16FunctionBodyNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairINS_10IdentifierEjELm0EEEPNS6_IP
-__ZN3JSC13StatementNode6setLocEii
-__ZN3JSC16FunctionBodyNode14copyParametersEv
-__ZN3JSC16FunctionBodyNode13finishParsingEPNS_10IdentifierEm
-__ZN3JSC16FunctionBodyNode31bytecodeForExceptionInfoReparseEPNS_14ScopeChainNodeEPNS_9CodeBlockE
-__ZN3JSC9CodeBlock36hasGlobalResolveInfoAtBytecodeOffsetEj
-__ZN3JSC9CodeBlock27lineNumberForBytecodeOffsetEPNS_9ExecStateEj
-__ZN3WTF6VectorIPvLm0EE14expandCapacityEmPKS1_
-__ZN3WTF6VectorIPvLm0EE15reserveCapacityEm
-__ZN3JSC3JIT16emit_op_jeq_nullEPNS_11InstructionE
-__ZN3JSC8JITStubs16cti_op_is_numberEPPv
-__ZN3JSCL23stringProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12StringObject9classInfoEv
-__ZN3JSC8JITStubs28cti_op_get_by_id_string_failEPPv
-__ZN3JSC11JSImmediate9prototypeENS_7JSValueEPNS_9ExecStateE
-__ZN3JSCL23numberProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC3JIT16emit_op_neq_nullEPNS_11InstructionE
+_cti_op_is_boolean
+__ZN3JSC10SwitchNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC13CaseBlockNode20emitBytecodeForBlockERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_
+__ZN3JSC13CaseBlockNode18tryOptimizedSwitchERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERiS7_
+__ZN3JSCL17processClauseListEPNS_14ClauseListNodeERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERNS_10SwitchKindERbRiSB_
+__ZN3JSC17BytecodeGenerator11beginSwitchEPNS_10RegisterIDENS_10SwitchInfo10SwitchTypeE
+__ZN3WTF6VectorIN3JSC10SwitchInfoELm0EE14expandCapacityEm
+__ZN3JSC17BytecodeGenerator9endSwitchEjPN3WTF6RefPtrINS_5LabelEEEPPNS_14ExpressionNodeEPS3_ii
+__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE14expandCapacityEm
+__ZNK3JSC5Label4bindEii
+__ZN3JSC3JIT18emit_op_switch_immEPNS_11InstructionE
+__ZN3WTF6VectorIN3JSC12SwitchRecordELm0EE14expandCapacityEm
+__ZN3WTF6VectorIN3JSC17CodeLocationLabelELm0EE4growEm
+_cti_op_switch_imm
__ZN3JSC4Yarr23RegexPatternConstructor8copyTermERNS0_11PatternTermE
-__ZL17bracketIsAnchoredPKh
+__ZL14compileBracketiPiPPhPPKtS3_P9ErrorCodeiS_S_R11CompileData
__ZL32branchFindFirstAssertedCharacterPKhb
__ZL20branchNeedsLineStartPKhjj
+__ZL17bracketIsAnchoredPKh
+_cti_op_get_by_id_array_fail
+__ZN3JSC17PropertyNameArray3addEPN7WebCore10StringImplE
+__ZN3WTF7HashSetIPN7WebCore10StringImplENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
+__ZN3WTF9HashTableIPN7WebCore10StringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
+__ZN3WTF6VectorIN3JSC10IdentifierELm20EE14expandCapacityEm
+__ZN3JSCL28numberConstructorPosInfinityEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL28numberConstructorNegInfinityEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC4Heap5resetEv
+__ZN3JSC9Arguments12markChildrenERNS_9MarkStackE
+__ZN3JSC22JSPropertyNameIterator12markChildrenERNS_9MarkStackE
+__ZN3JSC3JIT10unlinkCallEPNS_12CallLinkInfoE
+__ZN3JSC9LabelNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC12RegExpObjectD1Ev
+__ZN3JSC18RegExpMatchesArrayD1Ev
+__ZNK3JSC7JSArray12subclassDataEv
+__ZN3JSC15ObjectPrototype18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+__ZN3JSC22JSPropertyNameIteratorD1Ev
+__ZN3JSC22JSPropertyNameIteratorD2Ev
+__ZNK3JSC7JSArray9classInfoEv
+__ZN3JSC7JSArray15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
+__ZN3JSC9ArgumentsD1Ev
+__ZN3JSC9ArgumentsD2Ev
+__ZN3JSC7UString4fromEd
+__ZN3WTF32doubleToStringInJavaScriptFormatEdPcPj
+__ZN3WTF4dtoaEPcdiPiS1_PS0_
+__ZN3JSC10Identifier11addSlowCaseEPNS_12JSGlobalDataEPN7WebCore10StringImplE
+__ZN3WTF6VectorIPN3JSC9StructureELm8EE14expandCapacityEm
+_cti_op_resolve_base
+__ZN3JSC12JSActivation18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC20charAtThunkGeneratorEPNS_12JSGlobalDataEPNS_14ExecutablePoolE
+__ZN3JSCL21stringProtoFuncCharAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC17PrefixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC14PostfixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator11emitPostIncEPNS_10RegisterIDES2_
+__ZN3JSC3JIT16emit_op_post_incEPNS_11InstructionE
+__ZN3JSC3JIT20emitSlow_op_post_incEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC18PostfixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator18pushFinallyContextEPNS_5LabelEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator17popFinallyContextEv
+__ZN3JSC17BytecodeGenerator19highestUsedRegisterEv
+__ZN3JSC17BytecodeGenerator18emitJumpSubroutineEPNS_10RegisterIDEPNS_5LabelE
+__ZN3JSC17BytecodeGenerator20emitSubroutineReturnEPNS_10RegisterIDE
+__ZN3JSC3JIT11emit_op_jsrEPNS_11InstructionE
+__ZN3WTF6VectorIN3JSC3JIT7JSRInfoELm0EE14expandCapacityEm
+__ZN3JSC3JIT13emit_op_throwEPNS_11InstructionE
+__ZN3JSC3JIT12emit_op_sretEPNS_11InstructionE
+__ZN3JSC21ReadModifyBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+_cti_op_lesseq
+__ZN3JSCL20stringProtoFuncMatchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC18RegExpMatchesArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+__ZN3JSC18RegExpMatchesArray17fillArrayInstanceEPNS_9ExecStateE
+__ZN3JSC14jsReplaceRangeEPNS_9ExecStateERKNS_7UStringEiiS4_
__ZN3JSC18RegExpMatchesArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSCL20stringProtoFuncSliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC3JIT17emit_op_jneq_nullEPNS_11InstructionE
-__ZN3JSC8JITStubs25cti_op_call_NotJSFunctionEPPv
-__ZN3JSC17StringConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callStringConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12StringObject8toStringEPNS_9ExecStateE
+__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateEj
+__ZN3JSCL24stringProtoFuncSubstringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_create_arguments_no_params
+__ZN3JSCL21arrayProtoFuncUnShiftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_pre_dec
+_cti_op_is_undefined
+__ZN3JSC13DeleteDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator14emitDeleteByIdEPNS_10RegisterIDES2_RKNS_10IdentifierE
__ZN3JSC23FunctionCallBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT17emit_op_del_by_idEPNS_11InstructionE
+_cti_op_del_by_id
+__ZN3WTF37parseDateFromNullTerminatedCharactersEPKc
+__ZN3WTFL37parseDateFromNullTerminatedCharactersEPKcRbRi
+_cti_op_get_by_id_proto_fail
+__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
+__ZN3WTF6VectorItLm64EE15reserveCapacityEm
__ZN3JSC20EvalFunctionCallNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator19emitResolveWithBaseEPNS_10RegisterIDES2_RKNS_10IdentifierE
-__ZN3JSC23FunctionCallBracketNodeD0Ev
-__ZN3JSC20EvalFunctionCallNodeD0Ev
-__ZN3JSC3JIT25emit_op_resolve_with_baseEPNS_11InstructionE
+__ZN3JSC17BytecodeGenerator12emitCallEvalEPNS_10RegisterIDES2_S2_PNS_13ArgumentsNodeEjjj
__ZN3JSC3JIT17emit_op_call_evalEPNS_11InstructionE
__ZN3JSC3JIT21emitSlow_op_call_evalEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC14MacroAssembler4jumpENS_22AbstractMacroAssemblerINS_12X86AssemblerEE5LabelE
-__ZN3JSCL19regExpProtoFuncExecEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7UString12replaceRangeEiiRKS0_
-__ZN3JSC8JITStubs17cti_op_is_booleanEPPv
-__ZN3JSC3JIT22emit_op_put_global_varEPNS_11InstructionE
-__ZN3JSCL23regExpProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL18regExpObjectSourceEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL18regExpObjectGlobalEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL22regExpObjectIgnoreCaseEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL21regExpObjectMultilineEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC4Yarr14RegexGenerator30generatePatternCharacterGreedyERNS1_19TermGenerationStateE
-__ZN3JSC8JITStubs27cti_op_get_by_id_proto_failEPPv
-__ZN3JSC17DeleteResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17DeleteResolveNodeD0Ev
-__ZN3JSC3JIT20emit_op_resolve_baseEPNS_11InstructionE
-__ZN3JSC8JITStubs19cti_op_resolve_baseEPPv
-__ZN3JSC12JSActivation14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC16JSVariableObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZNK3JSC8JSString8toNumberEPNS_9ExecStateE
-__ZN3JSC8JITStubs24cti_op_resolve_with_baseEPPv
-__ZN3JSC8JITStubs16cti_op_call_evalEPPv
+_cti_op_resolve
+_cti_op_call_eval
__ZN3JSC11Interpreter8callEvalEPNS_9ExecStateEPNS_12RegisterFileEPNS_8RegisterEiiRNS_7JSValueE
__ZN3JSC13LiteralParser5Lexer3lexERNS1_18LiteralParserTokenE
-__ZN3JSC13LiteralParser14parseStatementEv
-__ZN3JSC13LiteralParser15parseExpressionEv
-__ZN3JSC13LiteralParser10parseArrayEv
-__ZN3JSC13LiteralParser11parseObjectEv
-__ZN3JSC10Identifier3addEPNS_9ExecStateEPKti
-__ZN3JSC7JSArray4pushEPNS_9ExecStateENS_7JSValueE
-__ZN3JSCL19mathProtoFuncRandomEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF16weakRandomNumberEv
-__ZN3JSCL18mathProtoFuncFloorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC4Heap15recordExtraCostEm
-__ZN3JSC6Parser5parseINS_8EvalNodeEEEN3WTF10PassRefPtrIT_EEPNS_9ExecStateEPNS_8DebuggerERKNS_10SourceCodeEPiPNS_7UStringE
-__ZN3JSC9ExecState9thisValueEv
-__ZN3JSC11Interpreter7executeEPNS_8EvalNodeEPNS_9ExecStateEPNS_8JSObjectEiPNS_14ScopeChainNodeEPNS_7JSValueE
-__ZN3JSC8EvalNode16generateBytecodeEPNS_14ScopeChainNodeE
-__ZN3JSC17BytecodeGeneratorC2EPNS_8EvalNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrINS_7UString3RepEEENS_16
+__ZN3JSC13LiteralParser5parseENS0_11ParserStateE
+__ZN3JSC14EvalExecutable7compileEPNS_9ExecStateEPNS_14ScopeChainNodeE
+__ZN3JSC6Parser5parseINS_8EvalNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPNS_8DebuggerEPNS_9ExecStateERKNS_10SourceCodeEPi
+__ZN3JSC8EvalNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairIPKNS_10IdentifierEjELm0EEEPNS6_IPNS_16Fu
+__ZNK3JSC10ScopeChain10localDepthEv
+__ZNK3JSC12JSActivation9classInfoEv
+__ZN3JSC17BytecodeGeneratorC1EPNS_8EvalNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10StringImplEE
+__ZN3JSC17BytecodeGeneratorC2EPNS_8EvalNodeEPKNS_8DebuggerERKNS_10ScopeChainEPN3WTF7HashMapINS9_6RefPtrIN7WebCore10StringImplEE
__ZN3JSC8EvalNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
__ZThn16_N3JSC8EvalNodeD0Ev
__ZN3JSC8EvalNodeD0Ev
-__ZN3JSC23objectProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSObject9classNameEv
-__ZN3JSC11JSImmediate12toThisObjectENS_7JSValueEPNS_9ExecStateE
-__ZNK3JSC6JSCell17getTruncatedInt32ERi
-__ZN3JSC15toInt32SlowCaseEdRb
-__ZN3JSCL20dateProtoFuncSetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12DateInstance21msToGregorianDateTimeEdbRNS_17GregorianDateTimeE
-__ZN3JSC21msToGregorianDateTimeEdbRNS_17GregorianDateTimeE
-__ZN3JSCL12getDSTOffsetEdd
-__ZN3JSC21gregorianDateTimeToMSERKNS_17GregorianDateTimeEdb
-__ZN3JSCL15dateToDayInYearEiii
-__ZN3JSC8JITStubs19cti_op_to_primitiveEPPv
-__ZN3JSCL21dateProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC10formatTimeERKNS_17GregorianDateTimeEb
-__ZN3JSCL24dateProtoFuncToGMTStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7UString13appendNumericEd
-__ZN3JSC11concatenateEPNS_7UString3RepEd
-__ZN3JSCL20dateProtoFuncGetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL20dateProtoFuncGetDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_NS1_IN3JSC14EvalExecutableEEEENS_18PairFirstExtractorIS9_EENS2
+__ZN3JSC9ExecState9thisValueEv
+__ZN3JSC11Interpreter7executeEPNS_14EvalExecutableEPNS_9ExecStateEPNS_8JSObjectEiPNS_14ScopeChainNodeEPNS_7JSValueE
+__ZN3JSC14EvalExecutable15generateJITCodeEPNS_9ExecStateEPNS_14ScopeChainNodeE
+__ZN3JSC9ThrowNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+_cti_op_throw
+__ZN3JSC9CodeBlock34reparseForExceptionInfoIfNecessaryEPNS_9ExecStateE
+__ZN3JSC18FunctionExecutable20reparseExceptionInfoEPNS_12JSGlobalDataEPNS_14ScopeChainNodeEPNS_9CodeBlockE
+__ZN3JSC11Interpreter14throwExceptionERPNS_9ExecStateERNS_7JSValueEjb
+__ZN3JSC9CodeBlock24handlerForBytecodeOffsetEj
+__ZN3JSC11Interpreter15unwindCallFrameERPNS_9ExecStateENS_7JSValueERjRPNS_9CodeBlockE
+__ZN3JSC9CodeBlock36hasGlobalResolveInfoAtBytecodeOffsetEj
+_cti_op_push_new_scope
+__ZN3WTF7HashMapINS_6RefPtrIN7WebCore10StringImplEEEN3JSC16SymbolTableEntryENS5_17IdentifierRepHashENS_10HashTraitsIS4_EENS5_26
+__ZN3JSC19JSStaticScopeObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC12JSActivation3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+_cti_op_pop_scope
+_cti_op_is_number
+__ZN3JSCL20arrayProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_stricteq
+__ZN3JSCL20arrayProtoFuncSpliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC11Interpreter18retrieveLastCallerEPNS_9ExecStateERiRlRNS_7UStringERNS_7JSValueE
+__ZN3JSC9CodeBlock27lineNumberForBytecodeOffsetEPNS_9ExecStateEj
+__ZN3JSC8RopeImpl20destructNonRecursiveEv
+__ZN3JSC8RopeImpl23derefFibersNonRecursiveERN3WTF6VectorIPS0_Lm32EEE
+__ZN3JSCL17arrayProtoFuncPopEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray3popEv
+__ZN7WebCore6String6numberEd
+__ZN3JSC10Identifier5equalEPKN7WebCore10StringImplEPKc
+__ZNK3JSC9Arguments9classInfoEv
+__ZN3JSC9Arguments15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
+__ZN3JSC14InstanceOfNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator14emitInstanceOfEPNS_10RegisterIDES2_S2_S2_
+__ZN3JSC3JIT18emit_op_instanceofEPNS_11InstructionE
+__ZN3JSC3JIT22emitSlow_op_instanceofEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC17BytecodeGenerator11emitPostDecEPNS_10RegisterIDES2_
+__ZN3JSC3JIT16emit_op_post_decEPNS_11InstructionE
+__ZN3JSC3JIT20emitSlow_op_post_decEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_less
+__ZN3JSC13PrefixDotNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+_cti_op_call_NotJSFunction
+__ZN3JSC17StringConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL21callStringConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC8JSString8toNumberEPNS_9ExecStateE
+__ZNK3JSC7UString8toDoubleEv
+__ZN3JSC18BooleanConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL22callBooleanConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC9ExecState10arrayTableEPS0_
+__ZN3JSC17DeleteResolveNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSCL18arrayProtoFuncSortEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray4sortEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
+__ZN3WTF6VectorIN3JSC26AVLTreeNodeForArrayCompareELm0EE14expandCapacityEm
+__ZN3JSC11Interpreter20prepareForRepeatCallEPNS_18FunctionExecutableEPNS_9ExecStateEPNS_10JSFunctionEiPNS_14ScopeChainNodeEPNS_
+__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE6insertEi
+__ZN3JSC11Interpreter7executeERNS_16CallFrameClosureEPNS_7JSValueE
+__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE7balanceEi
+__ZN3JSC11Interpreter13endRepeatCallERNS_16CallFrameClosureE
+__ZN3JSCL16mathProtoFuncExpEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL17mathProtoFuncATanEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZL17makeLeftShiftNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
+__ZN3JSC3JIT14emit_op_lshiftEPNS_11InstructionE
+__ZN3JSC3JIT14emit_op_bitandEPNS_11InstructionE
+__ZN3JSC3JIT14emit_op_rshiftEPNS_11InstructionE
+__ZN3JSC3JIT18emitSlow_op_lshiftEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT18emitSlow_op_bitandEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC3JIT18emitSlow_op_rshiftEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC32jsSpliceSubstringsWithSeparatorsEPNS_9ExecStateEPNS_8JSStringERKNS_7UStringEPKNS_11StringRangeEiPS5_i
+__ZN3JSCL26stringProtoFuncLastIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue20toIntegerPreserveNaNEPNS_9ExecStateE
+__ZNK3JSC7UString5rfindERKS0_j
+__ZN3WTF6VectorIiLm0EE15reserveCapacityEm
+__ZN3WTF6VectorINS_6RefPtrIN3JSC5LabelEEELm8EE15reserveCapacityEm
+__ZN3WTF6VectorISt4pairIiiELm8EE14expandCapacityEm
+__ZN3JSC11DoWhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC3JIT19emit_op_switch_charEPNS_11InstructionE
+_cti_op_switch_char
+__ZN3JSCL21stringProtoFuncSearchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF10RefCountedIN3JSC6RegExpEE5derefEv
+__ZN3JSC14JSGlobalObjectD2Ev
+__ZN3JSC12RegisterFile17clearGlobalObjectEPNS_14JSGlobalObjectE
+__ZN3JSC17FunctionPrototypeD1Ev
+__ZN3JSC15ObjectPrototypeD1Ev
+__ZN3JSC14ArrayPrototypeD1Ev
+__ZN3JSC15StringPrototypeD1Ev
+__ZN3JSC16BooleanPrototypeD1Ev
+__ZN3JSC15NumberPrototypeD1Ev
+__ZN3JSC13DatePrototypeD1Ev
+__ZN3JSC15RegExpPrototypeD1Ev
+__ZN3JSC14ErrorPrototypeD1Ev
+__ZN3JSC20NativeErrorPrototypeD1Ev
+__ZN3JSC17ObjectConstructorD1Ev
+__ZN3JSC19FunctionConstructorD1Ev
+__ZN3JSC16ArrayConstructorD1Ev
+__ZN3JSC17StringConstructorD1Ev
+__ZN3JSC18BooleanConstructorD1Ev
+__ZN3JSC17NumberConstructorD1Ev
+__ZN3JSC15DateConstructorD1Ev
+__ZN3JSC17RegExpConstructorD1Ev
+__ZN3JSC16ErrorConstructorD1Ev
+__ZN3JSC22NativeErrorConstructorD1Ev
+__ZN3JSC10MathObjectD1Ev
+__ZN3JSC10JSONObjectD1Ev
+__ZN3JSC18GlobalEvalFunctionD1Ev
+__ZN3JSC19JSStaticScopeObjectD1Ev
+__ZN3JSC19JSStaticScopeObjectD2Ev
+__ZN3WTF6VectorIPvLm0EE14expandCapacityEmPKS1_
+__ZN3WTF6VectorIPvLm0EE15reserveCapacityEm
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEED1Ev
+__ZL25clearReferenceToPrototypeP13OpaqueJSValue
+_JSObjectGetPrivate
+__ZNK3JSC16JSCallbackObjectINS_8JSObjectEE9classInfoEv
+__ZN3JSC28globalFuncEncodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL6encodeEPNS_9ExecStateERKNS_7ArgListEPKc
+__ZN3WTF6VectorItLm64EE9tryAppendItEEbPKT_m
+__ZN3WTF6VectorItLm64EE18tryReserveCapacityEm
+__ZN3JSC28globalFuncDecodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL6decodeEPNS_9ExecStateERKNS_7ArgListEPKcb
+__ZN3WTF7Unicode18UTF8SequenceLengthEc
+__ZN3WTF7Unicode18decodeUTF8SequenceEPKc
+__ZN3JSC18EmptyStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSCltERKNS_7UStringES2_
+__ZN3WTF6VectorItLm0EE14expandCapacityEmPKt
+__ZN3WTF6VectorItLm0EE15reserveCapacityEm
+__ZN3JSC18RegExpMatchesArray3putEPNS_9ExecStateEjNS_7JSValueE
+__ZN3JSC18RegExpMatchesArray14deletePropertyEPNS_9ExecStateEj
+__ZN3JSC18RegExpMatchesArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSC14EvalExecutableD0Ev
+__ZN3JSC13EvalCodeBlockD0Ev
+__ZN3JSC3JIT30emit_op_resolve_global_dynamicEPNS_11InstructionE
+__ZN3JSC3JIT34emitSlow_op_resolve_global_dynamicEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSC19FunctionConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL32constructWithFunctionConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC17constructFunctionEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC3JIT18emit_op_jmp_scopesEPNS_11InstructionE
+__ZN3JSC8WithNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC17BytecodeGenerator13emitPushScopeEPNS_10RegisterIDE
+__ZN3JSC3JIT18emit_op_push_scopeEPNS_11InstructionE
+_cti_op_push_scope
+_cti_op_jmp_scopes
+__ZN3WTF6VectorIN3JSC14ExecutablePool10AllocationELm2EE14expandCapacityEm
+__ZN3JSCL26stringProtoFuncToUpperCaseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL21arrayProtoFuncIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16mathProtoFuncCosEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL17mathProtoFuncASinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_loop_if_lesseq
+__ZN3JSC7JSArray4sortEPNS_9ExecStateE
+__ZN3JSC7JSArray17compactForSortingEv
+__ZL18makeRightShiftNodePN3JSC12JSGlobalDataEPNS_14ExpressionNodeES3_b
+__ZN3JSC3JIT13emit_op_bitorEPNS_11InstructionE
+__ZN3JSC3JIT17emitSlow_op_bitorEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3WTF6VectorIN3JSC11StringRangeELm16EE14expandCapacityEm
+__ZN3WTF6VectorIN3JSC7UStringELm16EE14expandCapacityEmPKS2_
+__ZN3WTF6VectorIN3JSC7UStringELm16EE15reserveCapacityEm
+__ZNK3JSC8JSString8toObjectEPNS_9ExecStateE
+__ZN3JSC12StringObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+_cti_op_post_dec
+__ZN3JSC3JIT14emit_op_bitxorEPNS_11InstructionE
+__ZN3JSC3JIT18emitSlow_op_bitxorEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_bitand
+_cti_op_bitor
+__ZN7WebCore10StringImpl18simplifyWhiteSpaceEv
+_cti_op_is_object
+__ZN3JSC14jsIsObjectTypeENS_7JSValueE
+__ZN3JSC17PrototypeFunction11getCallDataERNS_8CallDataE
+__ZN3JSC14globalFuncEvalEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL19isNonASCIIIdentPartEi
+__ZN3JSC11Interpreter7executeEPNS_14EvalExecutableEPNS_9ExecStateEPNS_8JSObjectEPNS_14ScopeChainNodeEPNS_7JSValueE
+_cti_op_mod
+__ZN7WebCore6String6appendEc
+__ZN7WebCore6String6appendEPKtj
+__ZN3JSC16globalFuncEscapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK7WebCore6String8foldCaseEv
+__ZN7WebCore10StringImpl8foldCaseEv
+__ZN3JSC10JSFunction15argumentsGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC11Interpreter17retrieveArgumentsEPNS_9ExecStateEPNS_10JSFunctionE
+__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm16EE14expandCapacityEm
+__ZN3WTF6VectorINS_6RefPtrIN3JSC10RegisterIDEEELm16EE15reserveCapacityEm
+__ZN3WTF6VectorIN7WebCore6StringELm0EE14expandCapacityEm
+__ZN3JSC8JSObject14deletePropertyEPNS_9ExecStateEj
+__ZN3JSC18globalFuncUnescapeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF6VectorItLm64EE14expandCapacityEm
+__ZN3JSC3JIT16emit_op_jeq_nullEPNS_11InstructionE
+__ZN3WTF7CStringC1EPKc
+__ZN3WTFeqERKNS_7CStringES2_
+__ZN7WebCore17equalIgnoringCaseEPKtPKcj
+__ZNK7WebCore6String6toUIntEPb
+__ZN7WebCore10StringImpl6toUIntEPb
+__ZN7WebCore16charactersToUIntEPKtmPb
+__ZN7WebCore10StringImpl11reverseFindEti
+__ZN3WTF23waitForThreadCompletionEjPPv
+__ZN3WTF14FastMallocZone10statisticsEP14_malloc_zone_tP19malloc_statistics_t
+__ZNK7WebCore6String18simplifyWhiteSpaceEv
+__ZN7WebCore10StringImpl23defaultWritingDirectionEv
+__ZN3JSCL20dateProtoFuncSetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL21dateProtoFuncGetMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC12DateInstance26calculateGregorianDateTimeEPNS_9ExecStateE
+__ZN3JSC21msToGregorianDateTimeEPNS_9ExecStateEdbRNS_17GregorianDateTimeE
+__ZN3WTFL18calculateDSTOffsetEdd
+__ZN3JSCL20dateProtoFuncGetDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue8inheritsEPKNS_9ClassInfoE
+__ZN3JSCL20dateProtoFuncGetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL21dateProtoFuncGetHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL23dateProtoFuncGetMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC3JIT17emit_op_jneq_nullEPNS_11InstructionE
+__ZN3JSCL20dateProtoFuncSetYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7JSValue7toInt32EPNS_9ExecStateERb
+__ZN3JSC21gregorianDateTimeToMSEPNS_9ExecStateERKNS_17GregorianDateTimeEdb
+__ZN3WTF18dateToDaysFrom1970Eiii
+__ZN3JSCL21dateProtoFuncSetMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL23setNewValueFromDateArgsEPNS_9ExecStateENS_7JSValueERKNS_7ArgListEib
+__ZN3JSCL20dateProtoFuncSetDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL24dateProtoFuncToGMTStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC12DateInstance29calculateGregorianDateTimeUTCEPNS_9ExecStateE
+__ZN3JSC20formatDateUTCVariantERKNS_17GregorianDateTimeERA100_c
+__ZN3JSC13formatTimeUTCERKNS_17GregorianDateTimeERA100_c
+__ZN3JSC13tryMakeStringIPcPKcS1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
+__ZN3JSC9parseDateEPNS_9ExecStateERKNS_7UStringE
+__ZN3JSC37parseDateFromNullTerminatedCharactersEPNS_9ExecStateEPKc
+__ZN3JSC16ArrayConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL29constructWithArrayConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSCL27constructArrayWithSizeQuirkEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC7JSArrayC1EN3WTF17NonNullPassRefPtrINS_9StructureEEEj
+__ZN3JSC17StringConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL30constructWithStringConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC12StringObjectC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringE
+__ZN3JSC12StringObjectC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEERKNS_7UStringE
+_cti_op_pre_inc
__ZN3JSCL23dateProtoFuncGetSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL19dateProtoFuncGetDayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN7WebCore6String6removeEji
+__ZN3JSC23MacroAssemblerX86Common12branchTest32ENS0_9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE7AddressENS4_5Imm
+__ZN3JSC23MacroAssemblerX86Common8branch32ENS0_9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE7AddressENS4_5Imm32E
+__ZN3JSC12X86Assembler23X86InstructionFormatter9twoByteOpENS0_15TwoByteOpcodeIDEiNS_12X86Registers10RegisterIDEi
+__ZNK3JSC7UStringixEj
+__ZN3JSC3JIT19emit_op_to_jsnumberEPNS_11InstructionE
+__ZN3JSC3JIT23emitSlow_op_to_jsnumberEPNS_11InstructionERPNS_13SlowCaseEntryE
+_cti_op_to_jsnumber
__ZN3JSCL30dateProtoFuncGetTimezoneOffsetEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC28createUndefinedVariableErrorEPNS_9ExecStateERKNS_10IdentifierEjPNS_9CodeBlockE
-__ZN3JSC9CodeBlock32expressionRangeForBytecodeOffsetEPNS_9ExecStateEjRiS3_S3_
__ZN3JSC5Error6createEPNS_9ExecStateENS_9ErrorTypeERKNS_7UStringEilS6_
__ZN3JSC22NativeErrorConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSC9constructEPNS_9ExecStateENS_7JSValueENS_13ConstructTypeERKNS_13ConstructDataERKNS_7ArgListE
__ZN3JSCL35constructWithNativeErrorConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
__ZN3JSC22NativeErrorConstructor9constructEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC13ErrorInstanceC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
__ZN3JSC8JSObject17putWithAttributesEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEj
-__ZN3JSCL23returnToThrowTrampolineEPNS_12JSGlobalDataEPvRS2_
+__ZNK3JSC8JSObject13exceptionTypeEv
+__ZN3JSCL22errorProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC13tryMakeStringINS_7UStringEPKcS1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
+__ZN3JSC13ErrorInstanceD1Ev
+__ZN3JSC4Heap9freeBlockEm
+__ZN3JSC4Heap12freeBlockPtrEPNS_14CollectorBlockE
+__ZN3JSC28createUndefinedVariableErrorEPNS_9ExecStateERKNS_10IdentifierEjPNS_9CodeBlockE
+__ZN3JSC9CodeBlock32expressionRangeForBytecodeOffsetEPNS_9ExecStateEjRiS3_S3_
+__ZN3JSC10makeStringIPKcNS_7UStringEEES3_T_T0_
+__ZN3JSC13tryMakeStringIPKcNS_7UStringEEEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_
+__ZN3JSCL23returnToThrowTrampolineEPNS_12JSGlobalDataENS_16ReturnAddressPtrERS2_
_ctiVMThrowTrampoline
-__ZN3JSC8JITStubs12cti_vm_throwEPPv
-__ZN3JSC11Interpreter14throwExceptionERPNS_9ExecStateERNS_7JSValueEjb
+_cti_vm_throw
__ZNK3JSC8JSObject22isNotAnObjectErrorStubEv
-__ZNK3JSC8JSObject19isWatchdogExceptionEv
-__ZN3JSC9CodeBlock24handlerForBytecodeOffsetEj
-__ZN3JSC8JITStubs21cti_op_push_new_scopeEPPv
-__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE4JumpELm16EE14expandCapacityEm
-__ZN3JSCL20dateProtoFuncSetTimeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEENS1_INS2_8EvalNodeEEENS_7StrHashIS5_EENS_10HashTraitsIS5_EENSA_IS7_EEE3getEPS4
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEENS1_INS2_8EvalNodeEEENS_7StrHashIS5_EENS_10HashTraitsIS5_EENSA_IS7_EEE3setEPS4_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEESt4pairIS5_NS1_INS2_8EvalNodeEEEENS_18PairFirstExtractorIS9_EENS_7StrHashIS5_
-__ZN3JSC10LessEqNodeD0Ev
-__ZN3JSC8JITStubs14cti_op_jlesseqEPPv
-__ZN3JSC8JSString18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
-__ZL18makeRightShiftNodePvPN3JSC14ExpressionNodeES2_b
-__ZN3JSC14RightShiftNodeD0Ev
-__ZN3JSC3JIT14emit_op_rshiftEPNS_11InstructionE
-__ZN3JSC3JIT18emitSlow_op_rshiftEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC18PostfixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC18PostfixBracketNodeD0Ev
-__ZN3JSC21ReadModifyBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC21ReadModifyBracketNodeD0Ev
-__ZN3JSC11Interpreter15unwindCallFrameERPNS_9ExecStateENS_7JSValueERjRPNS_9CodeBlockE
-__ZN3JSCL22errorProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF23waitForThreadCompletionEjPPv
-__ZN3WTF15ThreadConditionD1Ev
-__ZN3JSC9Structure24removePropertyTransitionEPS0_RKNS_10IdentifierERm
-__ZN3JSC12JSActivation3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZNK3JSC7UString5asciiEv
+__ZN3JSC6JSLock12DropAllLocksC1ENS_14JSLockBehaviorE
+__ZN3JSCL17createJSLockCountEv
+__ZN3JSC6JSLock12DropAllLocksD1Ev
+__ZNK7WebCore6String5upperEv
+__ZN3JSC4Yarr23RegexPatternConstructor31atomParentheticalAssertionBeginEb
+__ZN3JSC4Yarr14RegexGenerator30generateParentheticalAssertionERNS1_19TermGenerationStateE
+__ZNK3JSC8JSObject8toNumberEPNS_9ExecStateE
+__ZN3WTF6VectorIPN3JSC4Yarr18PatternDisjunctionELm4EE14expandCapacityEmPKS4_
+__ZN3JSCL18regExpObjectSourceEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC17RegExpConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL21callRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC23objectProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC8JSObject9classNameEv
+__ZN3JSC13tryMakeStringIPKcNS_7UStringES2_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
+__ZN3JSC16JSVariableObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSC9Structure27despecifyDictionaryFunctionERKNS_10IdentifierE
+__ZN3JSC36constructBooleanFromImmediateBooleanEPNS_9ExecStateENS_7JSValueE
+__ZN3JSC13BooleanObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC17ObjectConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL30constructWithObjectConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZNK3JSC9ArrayNode13isSimpleArrayEv
+__ZN3JSC7JSArray14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSCL27compareByStringPairForQSortEPKvS1_
+__ZN3JSC7compareERKNS_7UStringES2_
+__ZN3JSCL24dateProtoFuncToUTCStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC17NumberConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL30constructWithNumberConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC12NumberObjectC1EN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSCL20isNonASCIIIdentStartEi
+__ZN3JSC9Arguments3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSC14MacroAssembler4peekENS_12X86Registers10RegisterIDEi
+__ZN3JSC4Yarr14RegexGenerator12atEndOfInputEv
+__ZN3JSC14MacroAssembler8branch32ENS_23MacroAssemblerX86Common9ConditionENS_12X86Registers10RegisterIDENS_22AbstractMacroAssemb
+__ZN3JSC22AbstractMacroAssemblerINS_12X86AssemblerEE8JumpList6linkToENS2_5LabelEPS2_
+__ZN3JSC14MacroAssembler4pokeENS_12X86Registers10RegisterIDEi
+__ZN3JSCL28substituteBackreferencesSlowERKNS_7UStringES2_PKiPNS_6RegExpEj
+__ZN3WTF6VectorItLm0EE6appendItEEvPKT_m
+_cti_op_to_primitive
+__ZN3JSC15constructNumberEPNS_9ExecStateENS_7JSValueE
+__ZNK3JSC12NumberObject9classInfoEv
+__Z12jsRegExpFreeP8JSRegExp
+__ZN3JSC13BooleanObjectD1Ev
+__ZN3JSCL18stringFromCharCodeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10makeStringIPKcNS_7UStringES2_EES3_T_T0_T1_
+__ZN3JSCL19dateProtoFuncGetDayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC14EvalExecutable20reparseExceptionInfoEPNS_12JSGlobalDataEPNS_14ScopeChainNodeEPNS_9CodeBlockE
+__ZN3JSCL22objectProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSObject18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
+__ZN3JSCL24dateProtoFuncGetFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12RegExpObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSCL24setRegExpObjectLastIndexEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueE
+__Z15jsc_pcre_xclassiPKh
+__ZN3JSCL28regExpConstructorLeftContextEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC17RegExpConstructor14getLeftContextEPNS_9ExecStateE
+_cti_to_object
+__ZNK3JSC7JSValue16toObjectSlowCaseEPNS_9ExecStateE
+_cti_op_sub
+__ZN3JSC17NumberConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL21callNumberConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+_cti_op_bitxor
+_cti_op_is_function
+__ZN3JSC16jsIsFunctionTypeENS_7JSValueE
+__ZN3JSC12NumberObjectD1Ev
+__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE4JumpELm16EE14expandCapacityEm
+__ZN3JSC8JSString18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
+__ZN7WebCore6String26fromUTF8WithLatin1FallbackEPKcm
+__ZN7WebCore6String8fromUTF8EPKcm
+__ZN3WTF10fastStrDupEPKc
+__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeE
+__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14expandCapacityEm
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_N3JSC14OffsetLocationEENS_18PairFirstExtractorIS8_EENS2_10Stri
+__ZN3JSC3JIT21emit_op_switch_stringEPNS_11InstructionE
+__ZN3JSC13LiteralParser5Lexer9lexNumberERNS1_18LiteralParserTokenE
+__ZN3WTF6VectorIPNS_14StringImplBaseELm32EE14expandCapacityEm
+__ZN3JSC4Yarr17nonwordcharCreateEv
+__ZN3JSC3JIT15emit_op_eq_nullEPNS_11InstructionE
+_cti_op_switch_string
+__ZN3JSC12X86Assembler23X86InstructionFormatter15emitRexIfNeededEiii
+__ZN3JSC12X86Assembler23X86InstructionFormatter11memoryModRMEiNS_12X86Registers10RegisterIDES3_ii
+__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE28CharacterClassParserDelegate25atomBuiltInCharacterClassENS0_23BuiltInChar
+__ZN3JSC4Yarr15nonspacesCreateEv
+__ZN3JSCL25functionProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC21UStringSourceProvider8getRangeEii
+__ZNK3JSC7UString6substrEjj
+__ZN3JSC10makeStringINS_7UStringEPKcS1_EES1_T_T0_T1_
+__ZNK3JSC18FunctionExecutable11paramStringEv
+__ZN3WTF6VectorItLm64EE6appendItEEvPKT_m
+__ZN3JSC13tryMakeStringIPKcNS_7UStringES2_S3_S2_S3_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_T2_T3_T4_
+__ZN3JSC17RegExpConstructor3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSCL21arrayProtoFuncForEachEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10JSFunction14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSCL22arrayProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC16ErrorConstructor16getConstructDataERNS_13ConstructDataE
+__ZN3JSCL29constructWithErrorConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
+__ZN3JSC14constructErrorEPNS_9ExecStateERKNS_7ArgListE
+_cti_op_instanceof
+__ZN3JSC6RegExp6createEPNS_12JSGlobalDataERKNS_7UStringE
+__ZN3WTF9HashTableIPN7WebCore10StringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findIS3_NS_
+_cti_op_get_by_id_string_fail
+__ZN3JSCL20stringProtoFuncSliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC14MacroAssembler4jumpENS_22AbstractMacroAssemblerINS_12X86AssemblerEE5LabelE
+__ZN3JSCL23regExpProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL18regExpObjectGlobalEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL22regExpObjectIgnoreCaseEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL21regExpObjectMultilineEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC13tryMakeStringIPKcNS_7UStringEPcEEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
+__ZN3JSC12JSActivation14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3WTFL10timerFiredEP16__CFRunLoopTimerPv
+__ZN7WebCore22charactersToUIntStrictEPKtmPbi
+__ZN3WTF9HashTableIjSt4pairIjN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTraitsIjEE
+__ZN3JSCL23dateProtoFuncSetMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL23setNewValueFromTimeArgsEPNS_9ExecStateENS_7JSValueERKNS_7ArgListEib
+__ZN3JSCL23dateProtoFuncSetSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL28dateProtoFuncSetMilliSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL9dateParseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL21dateProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10formatDateERKNS_17GregorianDateTimeERA100_c
+__ZN3JSC10formatTimeERKNS_17GregorianDateTimeERA100_c
+__ZN3JSC19globalFuncEncodeURIEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC13UnaryPlusNode14stripUnaryPlusEv
+__ZN3JSCL20arrayProtoFuncFilterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL27objectProtoFuncLookupGetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSObject12lookupGetterEPNS_9ExecStateERKNS_10IdentifierE
+__ZNK3JSC6JSCell14isGetterSetterEv
+__ZN3JSCL27objectProtoFuncDefineSetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSObject12defineSetterEPNS_9ExecStateERKNS_10IdentifierEPS0_j
+__ZN3JSCL27objectProtoFuncDefineGetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSObject12defineGetterEPNS_9ExecStateERKNS_10IdentifierEPS0_j
+__ZNK3JSC12GetterSetter14isGetterSetterEv
+__ZN3JSC8JSObject15unwrappedObjectEv
+__ZN3JSC10JSFunction12callerGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC11Interpreter14retrieveCallerEPNS_9ExecStateEPNS_16InternalFunctionE
+__ZN3JSC16toUInt32SlowCaseEdRb
+__ZN3JSC8JSObject22fillGetterPropertySlotERNS_12PropertySlotEPNS_7JSValueE
+__ZNK3JSC12PropertySlot14functionGetterEPNS_9ExecStateE
+_cti_op_get_by_id_getter_stub
+__ZN3JSC12GetterSetter12markChildrenERNS_9MarkStackE
+__ZN3JSCL17mathProtoFuncACosEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF13tryFastCallocEmm
+__ZN3JSC3JIT20emit_op_resolve_skipEPNS_11InstructionE
+_cti_op_resolve_skip
+__ZN3JSC3JIT15emit_op_urshiftEPNS_11InstructionE
+__ZN3JSC3JIT19emitSlow_op_urshiftEPNS_11InstructionERPNS_13SlowCaseEntryE
+__ZN3JSCL25stringProtoFuncCharCodeAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF6VectorIcLm0EE14expandCapacityEm
+__ZN3JSC15AssignErrorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC23ThrowableExpressionData14emitThrowErrorERNS_17BytecodeGeneratorENS_9ErrorTypeEPKc
+__ZN3JSC17BytecodeGenerator12emitNewErrorEPNS_10RegisterIDENS_9ErrorTypeENS_7JSValueE
+__ZN3JSC3JIT17emit_op_new_errorEPNS_11InstructionE
+__ZN3JSCL27objectProtoFuncLookupSetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSObject12lookupSetterEPNS_9ExecStateERKNS_10IdentifierE
__ZN3JSC26createNotAnObjectErrorStubEPNS_9ExecStateEb
__ZN3JSC13JSNotAnObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
__ZNK3JSC22JSNotAnObjectErrorStub22isNotAnObjectErrorStubEv
__ZN3JSC22createNotAnObjectErrorEPNS_9ExecStateEPNS_22JSNotAnObjectErrorStubEjPNS_9CodeBlockE
__ZN3JSC9CodeBlock37getByIdExceptionInfoForBytecodeOffsetEPNS_9ExecStateEjRNS_8OpcodeIDE
__ZN3JSCL18createErrorMessageEPNS_9ExecStateEPNS_9CodeBlockEiiiNS_7JSValueENS_7UStringE
-__ZN3JSC13ErrorInstanceD1Ev
+__ZN3JSC10makeStringIPKcNS_7UStringES2_S3_S2_S3_S2_EES3_T_T0_T1_T2_T3_T4_T5_
+__ZN3JSC13tryMakeStringIPKcNS_7UStringES2_S3_S2_S3_S2_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_T2_T3_T4_T5_
__ZN3JSC22JSNotAnObjectErrorStubD1Ev
__ZN3JSC13JSNotAnObjectD1Ev
-__ZN3JSC19JSStaticScopeObjectD1Ev
-__ZN3JSC19JSStaticScopeObjectD2Ev
-__ZN3JSC17DeleteBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17BytecodeGenerator15emitDeleteByValEPNS_10RegisterIDES2_S2_
-__ZN3JSC17DeleteBracketNodeD0Ev
-__ZN3JSC8JITStubs17cti_op_del_by_valEPPv
-__ZN3JSC8JSObject14deletePropertyEPNS_9ExecStateEj
-__ZN3JSC28globalFuncEncodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL6encodeEPNS_9ExecStateERKNS_7ArgListEPKc
-__ZNK3JSC7UString10UTF8StringEb
-__ZN3WTF7Unicode18convertUTF16ToUTF8EPPKtS2_PPcS4_b
-__ZN3JSC10NegateNodeD0Ev
-__ZN3JSC8JITStubs13cti_op_negateEPPv
-__ZN3JSCL17mathProtoFuncSqrtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncAbsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL18mathProtoFuncRoundEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncCosEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncSinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs10cti_op_subEPPv
-__ZNK3JSC8JSObject8toNumberEPNS_9ExecStateE
+__ZN3JSC12GetterSetterD1Ev
+__ZN3JSC20FixedVMPoolAllocator17coalesceFreeSpaceEv
+__ZN3WTF6VectorIPN3JSC13FreeListEntryELm0EE15reserveCapacityEm
+__ZN3JSCL35reverseSortFreeListEntriesByPointerEPKvS1_
+__ZN3JSCL33reverseSortCommonSizedAllocationsEPKvS1_
+__ZN7WebCore20equalIgnoringNullityEPNS_10StringImplES1_
+__ZN3JSC19globalFuncDecodeURIEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC16ErrorConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL20callErrorConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL22functionProtoFuncApplyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC9Arguments11fillArgListEPNS_9ExecStateERNS_20MarkedArgumentBufferE
+__ZN3JSCL21functionProtoFuncCallEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10JSONObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+_cti_op_lshift
+__ZN3JSCL29regExpConstructorRightContextEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC17RegExpConstructor15getRightContextEPNS_9ExecStateE
+__ZN3JSC20MarkedArgumentBuffer10slowAppendENS_7JSValueE
+__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6expan
+__ZN3WTF6VectorIN3JSC8RegisterELm8EE15reserveCapacityEm
+__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findI
+__ZN3JSCL26stringFromCharCodeSlowCaseEPNS_9ExecStateERKNS_7ArgListE
+__ZN3JSC4Yarr12RegexPattern21newlineCharacterClassEv
+__ZN3JSC10JSFunction12lengthGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC18globalFuncIsFiniteEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSC16ArrayConstructor11getCallDataERNS_8CallDataE
__ZN3JSCL20callArrayConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs10cti_op_modEPPv
-__ZN3JSC8JITStubs12cti_op_jlessEPPv
-__ZL17makeLeftShiftNodePvPN3JSC14ExpressionNodeES2_b
-__ZN3JSC13LeftShiftNodeD0Ev
-__ZN3JSC3JIT14emit_op_lshiftEPNS_11InstructionE
-__ZN3JSC3JIT18emitSlow_op_lshiftEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC11JITStubCall11addArgumentENS_3X8610RegisterIDE
-__ZN3JSCL16mathProtoFuncMaxEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC10BitAndNodeD0Ev
-__ZN3JSC3JIT14emit_op_bitandEPNS_11InstructionE
-__ZN3JSC3JIT18emitSlow_op_bitandEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs13cti_op_bitandEPPv
-__ZN3JSC14BitwiseNotNodeD0Ev
+__ZN3JSCL19stringProtoFuncTrimEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC15isStrWhiteSpaceEt
+__ZN3JSC6JSCell9getObjectEv
+_ctiOpThrowNotCaught
+__ZN3JSCL22numberProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12NumberObject11getJSNumberEv
+__ZN7WebCore10StringImpl6secureEt
+__ZN3JSC23createNotAFunctionErrorEPNS_9ExecStateENS_7JSValueEjPNS_9CodeBlockE
+_cti_op_rshift
+__ZN3JSC13JSNotAnObject18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE4JumpELm16EEC2ERKS6_
+__ZN3JSCL21arrayProtoFuncReverseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF21CrossThreadRefCountedINS_16OwnFastMallocPtrIKtEEE5derefEv
+_cti_op_post_inc
+__ZN3JSCL21regExpObjectLastIndexEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
__ZN3JSC3JIT14emit_op_bitnotEPNS_11InstructionE
__ZN3JSC3JIT18emitSlow_op_bitnotEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC22UnsignedRightShiftNodeD0Ev
-__ZN3JSC10BitXOrNodeD0Ev
-__ZN3JSC3JIT14emit_op_bitxorEPNS_11InstructionE
-__ZN3JSC3JIT18emitSlow_op_bitxorEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSCL25stringProtoFuncCharCodeAtEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs14cti_op_urshiftEPPv
-__ZN3JSC16toUInt32SlowCaseEdRb
-__ZN3JSCL17mathProtoFuncCeilEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC6JSCell18getTruncatedUInt32ERj
-__ZN3JSC3JIT13emit_op_bitorEPNS_11InstructionE
-__ZN3JSC3JIT17emitSlow_op_bitorEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs12cti_op_bitorEPPv
-__ZN3JSC9BitOrNodeD0Ev
-__ZN3JSC8JITStubs13cti_op_rshiftEPPv
-__ZN3JSC8JITStubs13cti_op_bitxorEPPv
-__ZN3JSC9parseDateERKNS_7UStringE
-__ZN3WTF6VectorIN3JSC10CallRecordELm0EE14expandCapacityEmPKS2_
-__ZNK3JSC12JSActivation12toThisObjectEPNS_9ExecStateE
-__ZN3JSC3JIT20emit_op_resolve_skipEPNS_11InstructionE
-__ZN3JSC8JITStubs19cti_op_resolve_skipEPPv
-__ZN3JSCL24dateProtoFuncGetFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17StringConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithStringConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC5equalEPKNS_7UString3RepES3_
-__ZN3JSC8EvalNode4markEv
-__ZN3JSC10SwitchNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC13CaseBlockNode20emitBytecodeForBlockERNS_17BytecodeGeneratorEPNS_10RegisterIDES4_
-__ZN3JSC13CaseBlockNode18tryOptimizedSwitchERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERiS7_
-__ZN3JSCL17processClauseListEPNS_14ClauseListNodeERN3WTF6VectorIPNS_14ExpressionNodeELm8EEERNS_10SwitchKindERbRiSB_
-__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm8EE14expandCapacityEm
-__ZN3WTF6VectorINS_6RefPtrIN3JSC5LabelEEELm8EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator11beginSwitchEPNS_10RegisterIDENS_10SwitchInfo10SwitchTypeE
-__ZN3WTF6VectorIN3JSC10SwitchInfoELm0EE14expandCapacityEm
-__ZN3JSC17BytecodeGenerator9endSwitchEjPN3WTF6RefPtrINS_5LabelEEEPPNS_14ExpressionNodeEPS3_ii
-__ZN3WTF6VectorIN3JSC15SimpleJumpTableELm0EE14expandCapacityEm
-__ZN3WTF6VectorIiLm0EE15reserveCapacityEm
-__ZN3JSC14CaseClauseNodeD0Ev
-__ZN3JSC14ClauseListNodeD0Ev
-__ZN3JSC13CaseBlockNodeD0Ev
-__ZN3JSC10SwitchNodeD0Ev
-__ZN3JSC3JIT19emit_op_switch_charEPNS_11InstructionE
-__ZN3WTF6VectorIN3JSC12SwitchRecordELm0EE14expandCapacityEm
-__ZN3WTF6VectorIN3JSC22AbstractMacroAssemblerINS1_12X86AssemblerEE17CodeLocationLabelELm0EE4growEm
-__ZN3JSC8JITStubs18cti_op_switch_charEPPv
-__ZN3JSCL16mathProtoFuncPowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF6VectorIcLm0EE14expandCapacityEm
-__ZN3WTF6VectorIN3JSC7UString5RangeELm16EE14expandCapacityEm
-__ZN3WTF6VectorIN3JSC7UStringELm16EE14expandCapacityEmPKS2_
-__ZN3WTF6VectorIN3JSC7UStringELm16EE15reserveCapacityEm
-__ZN3JSC7JSArray16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayE
-__ZN3JSC9ExecState10arrayTableEPS0_
-__ZN3JSC20MarkedArgumentBuffer10slowAppendENS_7JSValueE
-__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehas
-__ZN3JSC8JITStubs24cti_op_get_by_val_stringEPPv
-__ZN3JSCL16mathProtoFuncLogEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7UString8toDoubleEv
-__ZN3WTF9HashTableIPN3JSC7UString3RepES4_NS_17IdentityExtractorIS4_EENS_7PtrHashIS4_EENS_10HashTraitsIS4_EESA_E4findIS4_NS_22Id
-__ZN3JSCL29objectProtoFuncHasOwnPropertyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL18arrayProtoFuncSortEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray4sortEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
-__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE6insertEi
-__ZN3JSCltERKNS_7UStringES2_
-__ZN3WTF7AVLTreeIN3JSC32AVLTreeAbstractorForArrayCompareELj44ENS_18AVLTreeDefaultBSetILj44EEEE7balanceEi
-__Z12jsRegExpFreeP8JSRegExp
-__ZN3JSCL21stringProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC19globalFuncEncodeURIEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC19globalFuncDecodeURIEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL6decodeEPNS_9ExecStateERKNS_7ArgListEPKcb
-__ZN3WTF7Unicode18UTF8SequenceLengthEc
-__ZN3WTF7Unicode18decodeUTF8SequenceEPKc
-__ZN3JSCL22numberProtoFuncToFixedEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16integerPartNoExpEd
-__ZN3WTF14FastMallocZone10statisticsEP14_malloc_zone_tP19malloc_statistics_t
-__ZN3JSC4Heap26protectedGlobalObjectCountEv
-__ZN3JSC10JSFunction15argumentsGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZNK3JSC11Interpreter17retrieveArgumentsEPNS_9ExecStateEPNS_10JSFunctionE
-__ZN3JSCL21dateProtoFuncSetMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23setNewValueFromDateArgsEPNS_9ExecStateENS_7JSValueERKNS_7ArgListEib
-__ZN3JSCL20dateProtoFuncSetDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF6VectorIPNS0_IN3JSC10RegisterIDELm32EEELm32EE14expandCapacityEm
-__ZN3JSC8JITStubs14cti_op_pre_incEPPv
-__ZN3WTF6VectorIPN3JSC14ExpressionNodeELm16EE14expandCapacityEm
-__ZN3JSC13UnaryPlusNodeD0Ev
-__ZN3JSC3JIT19emit_op_to_jsnumberEPNS_11InstructionE
-__ZN3JSC3JIT23emitSlow_op_to_jsnumberEPNS_11InstructionERPNS_13SlowCaseEntryE
-__ZN3JSC8JITStubs18cti_op_to_jsnumberEPPv
-__ZN3JSC6JSLock12DropAllLocksC1Eb
-__ZN3JSCL17createJSLockCountEv
-__ZN3JSC6JSLock12DropAllLocksD1Ev
-__ZN3JSCL24dateProtoFuncSetFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE15reserveCapacityEm
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEENS2_14OffsetLocationENS_7StrHashIS5_EENS_10HashTraitsIS5_EENS9_IS6_EEE3addEPS4_
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEESt4pairIS5_NS2_14OffsetLocationEENS_18PairFirstExtractorIS8_EENS_7StrHashIS5_
-__ZN3JSC3JIT21emit_op_switch_stringEPNS_11InstructionE
-__ZN3JSC8JITStubs20cti_op_switch_stringEPPv
-__ZN3WTF6VectorIN3JSC14ExecutablePool10AllocationELm2EE14expandCapacityEm
-__ZN3JSC12JSGlobalData6createEb
-__ZN3JSCL13allocateBlockILNS_8HeapTypeE1EEEPNS_14CollectorBlockEv
-__ZN3JSC7JSValueC1EPNS_9ExecStateEd
-__ZN3JSC10JSFunctionC1EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectESA_RK
-__ZN3JSC8JSObject17putDirectFunctionEPNS_9ExecStateEPNS_16InternalFunctionEj
-__ZN3JSC7CStringD1Ev
-__ZN3WTF7HashMapIPvjNS_7PtrHashIS1_EEN3JSC17JSValueHashTraitsENS_10HashTraitsIjEEE3addERKS1_RKj
-__ZN3WTF6VectorINS_6RefPtrIN3JSC12FuncExprNodeEEELm0EE14shrinkCapacityEm
-__ZN3JSC14ExpressionNodeD2Ev
-__ZThn12_N3JSC11ProgramNodeD0Ev
-__ZThn12_N3JSC12FuncExprNodeD0Ev
-__ZThn12_N3JSC16FunctionBodyNodeD0Ev
-__ZN3JSC8JITStubs16cti_op_new_arrayEPvz
-__ZN3WTF6VectorIN3JSC17StructureStubInfoELm0EE15reserveCapacityEm
-__ZN3JSC17BytecodeGenerator10emitOpcodeENS_8OpcodeIDE
-__ZN3JSC23MacroAssemblerX86Common4moveENS_3X8610RegisterIDES2_
-__ZN3JSC8JITStubs15cti_op_new_funcEPvz
-__ZN3JSC8JITStubs21cti_op_resolve_globalEPvz
-__ZN3JSC8JITStubs16cti_op_get_by_idEPvz
-__ZN3JSC8JITStubs31cti_op_construct_NotJSConstructEPvz
-__ZN3JSC8JITStubs16cti_op_put_by_idEPvz
-__ZN3JSC8JITStubs13cti_op_strcatEPvz
-__ZN3JSC8JITStubs19cti_op_resolve_funcEPvz
-__ZN3JSC8JITStubs23cti_vm_dontLazyLinkCallEPvz
-__ZN3JSC8JITStubs22cti_op_call_JSFunctionEPvz
-__ZN3JSC8JITStubs23cti_register_file_checkEPvz
-__ZN3JSC8JITStubs13cti_op_negateEPvz
-__ZN3JSC8JITStubs28cti_op_construct_JSConstructEPvz
-__ZN3JSC23MacroAssemblerX86Common12branchTest32ENS0_9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE7AddressENS4_5Imm
-__ZN3JSC8JITStubs23cti_op_put_by_val_arrayEPvz
-__ZN3JSC8JITStubs23cti_op_put_by_id_secondEPvz
-__ZN3JSC15AssemblerBuffer14executableCopyEPNS_14ExecutablePoolE
-__ZN3JSC12X86Assembler8sarl_i8rEiNS_3X8610RegisterIDE
-__ZN3JSC12X86Assembler23X86InstructionFormatter9twoByteOpENS0_15TwoByteOpcodeIDEiNS_3X8610RegisterIDEi
-__ZN3JSC8JITStubs10cti_op_mulEPvz
-__ZN3JSC12jsNumberCellEPNS_12JSGlobalDataEd
-__ZN3JSC8JITStubs10cti_op_subEPvz
-__ZN3JSC8JITStubs10cti_op_divEPvz
-__ZN3JSC8JITStubs23cti_op_get_by_id_secondEPvz
-__ZN3JSC8JITStubs19cti_vm_lazyLinkCallEPvz
-__ZN3WTF6VectorIPN3JSC12CallLinkInfoELm0EE14expandCapacityEm
-__ZN3JSC8JITStubs19cti_op_convert_thisEPvz
-__ZN3JSC8JITStubs21cti_op_put_by_id_failEPvz
-__ZN3JSC8JITStubs10cti_op_addEPvz
-__ZN3JSC8JITStubs17cti_timeout_checkEPvz
-__ZN3JSC9jsBooleanEb
-__ZN3JSC9CodeBlock19isKnownNotImmediateEi
-__ZN3JSC12X86Assembler8movsd_mrEiNS_3X8610RegisterIDENS1_13XMMRegisterIDE
-__ZN3JSC8JITStubs25cti_op_call_NotJSFunctionEPvz
-__ZNK3JSC12JSNumberCell8toNumberEPNS_9ExecStateE
-__ZN3JSC8JITStubs26cti_op_get_by_id_self_failEPvz
-__ZN3JSC8JITStubs10cti_op_endEPvz
-__ZThn12_N3JSC12FuncDeclNodeD0Ev
-__ZN3JSC8JITStubs24cti_op_resolve_with_baseEPvz
-__ZN3JSC8JITStubs19cti_op_new_func_expEPvz
-__ZN3JSC8JITStubs22cti_op_push_activationEPvz
-__ZN3JSC8JITStubs17cti_op_get_by_valEPvz
-__ZN3JSC8JITStubs22cti_op_call_arityCheckEPvz
-__ZN3JSC8JITStubs11cti_op_lessEPvz
-__ZN3JSC12JSNumberCell18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDE
-__ZN3JSC8JITStubs27cti_op_get_by_id_proto_listEPvz
-__ZN3JSC8JITStubs12cti_op_jtrueEPvz
-__ZN3JSC8JITStubs10cti_op_modEPvz
-__ZN3JSC8JITStubs10cti_op_neqEPvz
-__ZN3JSC8JITStubs12cti_op_jlessEPvz
-__ZN3JSC8JITStubs24cti_op_get_by_id_genericEPvz
-__ZN3JSC8JITStubs14cti_op_jlesseqEPvz
-__ZN3JSC8JITStubs26cti_op_tear_off_activationEPvz
-__ZN3JSC8JITStubs21cti_op_ret_scopeChainEPvz
-__ZN3JSC8JITStubs19cti_op_to_primitiveEPvz
-__ZNK3JSC12JSNumberCell8toStringEPNS_9ExecStateE
-__ZN3JSC8JITStubs13cti_op_bitandEPvz
-__ZN3JSC8JITStubs13cti_op_lshiftEPvz
-__ZN3JSC8JITStubs13cti_op_bitnotEPvz
-__ZNK3JSC12JSNumberCell9toBooleanEPNS_9ExecStateE
-__ZN3JSC8JITStubs14cti_op_urshiftEPvz
-__ZNK3JSC12JSNumberCell18getTruncatedUInt32ERj
-__ZN3JSC4Yarr14RegexGenerator28generateCharacterClassSingleERNS1_19TermGenerationStateE
-__ZN3WTF15deleteAllValuesIPN3JSC4Yarr18PatternDisjunctionELm4EEEvRKNS_6VectorIT_XT0_EEE
-__ZN3JSC8JITStubs17cti_op_new_regexpEPvz
-__ZN3JSC8JITStubs12cti_op_bitorEPvz
-__ZNK3JSC12JSNumberCell17getTruncatedInt32ERi
-__ZN3JSC8JITStubs13cti_op_rshiftEPvz
-__ZN3JSC8JITStubs13cti_op_bitxorEPvz
-__ZN3WTF7HashSetINS_6RefPtrIN3JSC7UString3RepEEENS2_17IdentifierRepHashENS_10HashTraitsIS5_EEE3addERKS5_
-__ZN3JSC8JITStubs9cti_op_eqEPvz
-__ZN3JSC8JITStubs16cti_op_call_evalEPvz
-__ZN3JSC8JITStubs19cti_op_resolve_skipEPvz
-__ZN3JSC8JITStubs17cti_op_new_objectEPvz
-__ZN3JSC8JITStubs14cti_op_resolveEPvz
-__ZN3JSC8JITStubs17cti_op_put_by_valEPvz
-__ZN3JSC8JITStubs18cti_op_switch_charEPvz
-__ZN3JSC8JITStubs28cti_op_get_by_id_string_failEPvz
-__ZThn12_N3JSC8EvalNodeD0Ev
-__ZN3WTF6VectorIN3JSC7UStringELm16EE14expandCapacityEm
-__ZN3JSC8JITStubs17cti_op_get_pnamesEPvz
-__ZN3JSC8JITStubs17cti_op_next_pnameEPvz
-__ZN3WTF7HashSetIPN3JSC20MarkedArgumentBufferENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
-__ZN3WTF9HashTableIPN3JSC20MarkedArgumentBufferES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E4findI
-__ZN3JSC8JITStubs24cti_op_get_by_val_stringEPvz
-__ZN3JSC4Yarr6ParserINS0_23RegexPatternConstructorEE28CharacterClassParserDelegate25atomBuiltInCharacterClassENS0_23BuiltInChar
-__ZN3JSC12jsNumberCellEPNS_9ExecStateEd
-__ZN3JSC8JITStubs18cti_op_is_functionEPvz
-__ZN3JSC8JITStubs16cti_op_is_objectEPvz
-__ZN3JSC8JITStubs16cti_op_nstricteqEPvz
-__ZN3JSC8JITStubs13cti_op_lesseqEPvz
-__ZNK3JSC12JSNumberCell11toPrimitiveEPNS_9ExecStateENS_22PreferredPrimitiveTypeE
-__ZN3JSC4Yarr14RegexGenerator27generateCharacterClassFixedERNS1_19TermGenerationStateE
+_cti_op_bitnot
+__ZNK3JSC10JSONObject9classInfoEv
+_JSGlobalContextCreate
+_JSGlobalContextCreateInGroup
+__ZN3JSC12JSGlobalData18createContextGroupENS_15ThreadStackTypeE
+__ZN3JSC21createIdentifierTableEv
+__ZN3JSC4Heap29makeUsableFromMultipleThreadsEv
+__ZN3JSC6JSLock6unlockENS_14JSLockBehaviorE
+_JSValueMakeFromJSONString
+_JSValueIsNull
+_JSValueIsUndefined
+_JSValueIsBoolean
+_JSValueIsNumber
+_JSValueIsString
+__ZN3JSC14JSGlobalObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+_JSValueIsInstanceOfConstructor
+__ZN3JSC8JSObject11hasInstanceEPNS_9ExecStateENS_7JSValueES3_
+_JSValueToNumber
+_JSObjectGetPropertyAtIndex
+_JSValueToStringCopy
+__ZN14OpaqueJSString6createERKN3JSC7UStringE
+_JSStringCopyCFString
__ZN3JSC4Heap7destroyEv
+__ZN3JSC4Heap10freeBlocksEv
+__ZN3JSC14JSGlobalObjectD1Ev
+__ZN3JSC14JSGlobalObject25destroyJSGlobalObjectDataEPv
__ZN3JSC12JSGlobalDataD1Ev
__ZN3JSC12JSGlobalDataD2Ev
__ZN3JSC12RegisterFileD1Ev
__ZNK3JSC9HashTable11deleteTableEv
__ZN3JSC5LexerD1Ev
-__ZN3JSC5LexerD2Ev
__ZN3WTF20deleteAllPairSecondsIP24OpaqueJSClassContextDataKNS_7HashMapIP13OpaqueJSClassS2_NS_7PtrHashIS5_EENS_10HashTraitsIS5_E
__ZN3JSC17CommonIdentifiersD2Ev
__ZN3JSC21deleteIdentifierTableEPNS_15IdentifierTableE
+__ZN3JSC15IdentifierTableD2Ev
__ZN3JSC4HeapD1Ev
+__ZN3JSC9JITThunksD1Ev
+__ZN3JSC16NativeExecutableD0Ev
__ZN3JSC12SmallStringsD1Ev
-__ZN3JSCL16mathProtoFuncMinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL17arrayProtoFuncPopEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray3popEv
-__ZN3JSC11DoWhileNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC11DoWhileNodeD0Ev
-__ZN3JSC3JIT18emit_op_switch_immEPNS_11InstructionE
-__ZN3JSC8JITStubs17cti_op_switch_immEPPv
-__ZN3JSC13UnaryPlusNode14stripUnaryPlusEv
-__ZN3JSC15globalFuncIsNaNEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17NumberConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callNumberConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE14expandCapacityEm
-__ZN3JSC8JITStubs19cti_op_is_undefinedEPvz
-__ZN3JSC8JITStubs13cti_op_typeofEPvz
-__ZN3JSC8JITStubs33cti_op_create_arguments_no_paramsEPvz
-__ZN3JSC8JITStubs19cti_op_load_varargsEPvz
-__ZN3JSC8JITStubs10cti_op_notEPvz
-__ZN3JSC8JITStubs16cti_op_is_stringEPvz
-__ZN3JSCL24regExpConstructorDollar1EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3WTF6VectorIN3JSC15StringJumpTableELm0EE14expandCapacityEm
-__ZN3JSC8JITStubs20cti_op_switch_stringEPvz
-__ZN3JSC9Arguments3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC8JITStubs18cti_op_to_jsnumberEPvz
-__ZN3JSC8JITStubs19cti_op_loop_if_lessEPvz
-__ZN3JSC9LabelNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC9LabelNodeD0Ev
-__ZNK3JSC7UString5asciiEv
-__ZN3JSC8JITStubs27cti_op_get_by_id_array_failEPvz
-__ZN3JSC12X86Assembler23X86InstructionFormatter9oneByteOpENS0_15OneByteOpcodeIDEiPv
-__ZN3JSC8JITStubs23cti_op_create_argumentsEPvz
-__ZN3JSCL21arrayProtoFuncUnShiftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs25cti_op_tear_off_argumentsEPvz
-__ZN3JSC7JSArray11sortNumericEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
-__ZN3JSC7JSArray17compactForSortingEv
-__ZN3JSCL22compareNumbersForQSortEPKvS1_
-__ZN3JSC8JITStubs15cti_op_post_incEPPv
-__ZN3JSC8JITStubs24cti_op_put_by_id_genericEPvz
-__ZN3JSCL24regExpConstructorDollar2EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar3EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar4EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar5EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar6EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL21stringProtoFuncSubstrEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23stringProtoFuncFontsizeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24dateProtoFuncToUTCStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL19stringProtoFuncLinkEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL9dateParseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs21cti_op_loop_if_lesseqEPPv
-__ZN3JSCL16mathProtoFuncExpEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC4Yarr17nonwordcharCreateEv
-__ZN3WTF6VectorIPN3JSC4Yarr18PatternDisjunctionELm4EE14expandCapacityEmPKS4_
-__Z15jsc_pcre_xclassiPKh
-__ZN3JSC18RegExpMatchesArray3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC28globalFuncDecodeURIComponentEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs27cti_op_get_by_id_array_failEPPv
-__ZNK3JSC9Arguments9classInfoEv
-__ZN3JSC9Arguments15copyToRegistersEPNS_9ExecStateEPNS_8RegisterEj
-__ZN3JSC19JSStaticScopeObject4markEv
-__ZN3JSC8JITStubs19cti_op_loop_if_lessEPPv
-__ZN3JSC8JITStubs16cti_op_del_by_idEPvz
-__ZN3JSC7JSArray14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC7UString6appendEPKti
-__ZN3JSC8JITStubs17cti_op_push_scopeEPvz
-__ZN3JSC8JITStubs19cti_op_resolve_baseEPvz
-__ZN3JSC8JITStubs16cti_op_pop_scopeEPvz
-__ZN3JSC8JITStubs17cti_op_is_booleanEPvz
-__ZN3JSCL20arrayProtoFuncSpliceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs17cti_op_jmp_scopesEPvz
-__ZN3JSC8JITStubs9cti_op_inEPvz
-__ZN3JSC8JITStubs15cti_op_stricteqEPvz
-__ZN3JSC8JITStubs32cti_op_get_by_id_proto_list_fullEPvz
-__ZN3WTF6VectorIiLm8EE14expandCapacityEm
-__ZN3JSCL21stringProtoFuncSearchEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs12cti_vm_throwEPvz
-__ZN3JSC8JITStubs21cti_op_push_new_scopeEPvz
-__ZN3JSC8JITStubs16cti_op_is_numberEPvz
-__ZN3JSC16JSVariableObject16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayE
-__ZNK3JSC8JSString8toObjectEPNS_9ExecStateE
-__ZN3JSC12StringObject16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayE
-__ZN3JSC9ExecState11stringTableEPS0_
-__ZN3JSC11JSImmediate8toObjectENS_7JSValueEPNS_9ExecStateE
-__ZN3JSC36constructBooleanFromImmediateBooleanEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC13BooleanObjectD1Ev
+__ZN3JSC14JSGlobalObject17putWithAttributesEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEj
+__ZN3JSC17ObjectConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL21callObjectConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC9Arguments14deletePropertyEPNS_9ExecStateEj
+__ZN3JSCL18JSONProtoFuncParseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray11fillArgListEPNS_9ExecStateERNS_20MarkedArgumentBufferE
__ZN3JSCL17arrayProtoFuncMapEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArrayC2EN3WTF10PassRefPtrINS_9StructureEEEj
-__ZN3JSC8JITStubs17cti_op_del_by_valEPvz
-__ZN3JSC8JITStubs27cti_op_get_by_id_proto_failEPvz
-__ZN3JSC10JSFunction12callerGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZNK3JSC11Interpreter14retrieveCallerEPNS_9ExecStateEPNS_16InternalFunctionE
-__ZN3JSC18globalFuncIsFiniteEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC6JSCell18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZNK3JSC12JSNumberCell8toObjectEPNS_9ExecStateE
-__ZN3JSC15constructNumberEPNS_9ExecStateENS_7JSValueE
-__ZN3JSC12NumberObject11getJSNumberEv
-__ZN3JSCL7dateNowEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12NumberObjectD1Ev
-__ZN3JSC8JSObject18getPrimitiveNumberEPNS_9ExecStateERdRNS_7JSValueE
-__ZN3JSCL22numberProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC13JSNotAnObject18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC19JSStaticScopeObject18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC16InternalFunction4nameEPNS_12JSGlobalDataE
-__ZN3JSCL18arrayProtoFuncSomeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSString18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC12JSNumberCell11getJSNumberEv
-__ZN3JSC23createNotAFunctionErrorEPNS_9ExecStateENS_7JSValueEjPNS_9CodeBlockE
-__ZN3JSC17PrefixBracketNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC17PrefixBracketNodeD0Ev
-__ZN3JSC17RegExpConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL21callRegExpConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC7JSArray4sortEPNS_9ExecStateE
-__ZN3JSCL27dateProtoFuncSetUTCFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24dateProtoFuncSetUTCHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23setNewValueFromTimeArgsEPNS_9ExecStateENS_7JSValueERKNS_7ArgListEib
-__ZN3JSC8JITStubs17cti_op_switch_immEPvz
-__ZN3JSC12RegExpObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSCL24setRegExpObjectLastIndexEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueE
-__ZN3JSCL28regExpConstructorLeftContextEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC18RegExpMatchesArray14deletePropertyEPNS_9ExecStateEj
-__ZN3JSC18RegExpMatchesArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC10JSFunction12lengthGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZNK3JSC12NumberObject9classInfoEv
-__ZN3JSC8JITStubs12cti_op_throwEPvz
-__ZN3JSCL19isNonASCIIIdentPartEi
-__ZN3JSCL27dateProtoFuncToLocaleStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16formatLocaleDateEPNS_9ExecStateEPNS_12DateInstanceEdNS_20LocaleDateTimeFormatERKNS_7ArgListE
+__ZN3JSC12StringObjectC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEE
+__ZN3JSC23MacroAssemblerX86Common9urshift32ENS_12X86Registers10RegisterIDES2_
+_cti_op_urshift
+__ZN3JSC15ObjectPrototype3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSCL21stringProtoFuncConcatEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL24regExpConstructorDollar2EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar3EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar4EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC20MarkedArgumentBuffer9markListsERNS_9MarkStackERN3WTF7HashSetIPS0_NS3_7PtrHashIS5_EENS3_10HashTraitsIS5_EEEE
+__ZN3JSCL22numberProtoFuncToFixedEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16integerPartNoExpEd
+__ZN3JSC10makeStringINS_7UStringES1_PKcS1_EES1_T_T0_T1_T2_
+__ZN3JSC13tryMakeStringINS_7UStringES1_PKcS1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_T2_
+__ZN3JSCL26numberProtoFuncToPrecisionEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL12charSequenceEci
+__ZN3JSC13tryMakeStringINS_7UStringEPKcS1_S1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_T2_
+__ZN3JSCL24booleanProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC12JSActivation12toThisObjectEPNS_9ExecStateE
+_cti_op_div
+__ZN3JSC17BytecodeGenerator21emitComplexJumpScopesEPNS_5LabelEPNS_18ControlFlowContextES4_
+__ZN7WebCore10StringImpl7replaceEjjPS0_
+__ZN3JSC12RegExpObject11getCallDataERNS_8CallDataE
+__ZN3JSC12StringObject19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC9ExecState11stringTableEPS0_
+__ZNK3JSC7JSValue16synthesizeObjectEPNS_9ExecStateE
+__ZN3JSC13JSNotAnObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSCL35objectProtoFuncPropertyIsEnumerableEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC8JSObject20propertyIsEnumerableEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSC7JSArray24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC18PropertyDescriptor13setDescriptorENS_7JSValueEj
+__ZNK3JSC18PropertyDescriptor10enumerableEv
+__ZN3JSCL22JSONProtoFuncStringifyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC11StringifierC2EPNS_9ExecStateENS_7JSValueES3_
+__ZN3JSC11Stringifier9stringifyENS_7JSValueE
+__ZN3JSC11Stringifier22appendStringifiedValueERNS_13StringBuilderENS_7JSValueEPNS_8JSObjectERKNS_27PropertyNameForFunctionCallE
+__ZNK3JSC6JSCell9getStringEPNS_9ExecStateERNS_7UStringE
+__ZN3JSC11Stringifier6Holder18appendNextPropertyERS0_RNS_13StringBuilderE
+__ZN3JSC11Stringifier18appendQuotedStringERNS_13StringBuilderERKNS_7UStringE
+__ZN3JSC11StringifierD2Ev
+__ZN3JSC5Lexer19copyCodeWithoutBOMsEv
__ZN3JSCL21dateProtoFuncSetHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23dateProtoFuncSetMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23dateProtoFuncSetSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL28dateProtoFuncSetMilliSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC12JSNumberCell12toThisObjectEPNS_9ExecStateE
-__ZN3JSC16ErrorConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL20callErrorConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17PrototypeFunctionC1EPNS_9ExecStateEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectES6_RKNS_7ArgListEE
-__ZN3JSC17PrototypeFunctionC2EPNS_9ExecStateEiRKNS_10IdentifierEPFNS_7JSValueES2_PNS_8JSObjectES6_RKNS_7ArgListEE
-__ZN3JSC17PrototypeFunction11getCallDataERNS_8CallDataE
-__ZN3JSC17PrototypeFunctionD1Ev
-__ZN3JSCL24booleanProtoFuncToStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17BytecodeGenerator18emitJumpSubroutineEPNS_10RegisterIDEPNS_5LabelE
-__ZN3JSC3JIT11emit_op_jsrEPNS_11InstructionE
-__ZN3WTF6VectorIN3JSC3JIT7JSRInfoELm0EE14expandCapacityEm
-__ZN3JSC3JIT12emit_op_sretEPNS_11InstructionE
-__ZN3JSC6Parser7reparseINS_8EvalNodeEEEN3WTF10PassRefPtrIT_EEPNS_12JSGlobalDataEPS5_
-__ZN3JSC8EvalNode6createEPNS_12JSGlobalDataEPNS_14SourceElementsEPN3WTF6VectorISt4pairINS_10IdentifierEjELm0EEEPNS6_IPNS_12Func
-__ZN3JSC8EvalNode31bytecodeForExceptionInfoReparseEPNS_14ScopeChainNodeEPNS_9CodeBlockE
-__ZN3JSC20FixedVMPoolAllocator17coalesceFreeSpaceEv
-__ZN3WTF6VectorIPN3JSC13FreeListEntryELm0EE15reserveCapacityEm
-__ZN3JSCL35reverseSortFreeListEntriesByPointerEPKvS1_
-__ZN3JSC14globalFuncEvalEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21functionProtoFuncCallEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL22functionProtoFuncApplyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC9Arguments11fillArgListEPNS_9ExecStateERNS_20MarkedArgumentBufferE
-__ZNK3JSC7JSValue12toThisObjectEPNS_9ExecStateE
-__ZN3JSC8VoidNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8VoidNodeD0Ev
-__ZN3JSC16InternalFunctionC2EPNS_12JSGlobalDataEN3WTF10PassRefPtrINS_9StructureEEERKNS_10IdentifierE
-__ZN3JSC20MarkedArgumentBuffer9markListsERN3WTF7HashSetIPS0_NS1_7PtrHashIS3_EENS1_10HashTraitsIS3_EEEE
-__ZN3JSC7CStringaSERKS0_
-__ZNK3JSC19JSStaticScopeObject14isDynamicScopeEv
-__ZN3JSCL33reverseSortCommonSizedAllocationsEPKvS1_
-__ZN3JSCL20arrayProtoFuncFilterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC17NumberConstructor16getConstructDataERNS_13ConstructDataE
-__ZN3JSCL30constructWithNumberConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
-__ZN3JSC17BytecodeGenerator18emitUnexpectedLoadEPNS_10RegisterIDEb
-__ZN3JSC8JITStubs12cti_op_throwEPPv
-__ZN3JSC6JSCell9getObjectEv
-__ZN3JSCL21arrayProtoFuncReverseEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC8JSObject16isVariableObjectEv
-__ZN3JSC18EmptyStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSCL27compareByStringPairForQSortEPKvS1_
+__ZN3JSC17FunctionPrototype11getCallDataERNS_8CallDataE
+__ZN3JSCL28dateProtoFuncGetMilliSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10makeStringINS_7UStringES1_EES1_T_T0_
+__ZN3JSC13tryMakeStringINS_7UStringES1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_
+__ZN3JSC23MacroAssemblerX86Common8branch16ENS0_9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE9BaseIndexENS4_5Imm32E
__Z22jsc_pcre_ucp_othercasej
-__ZN3JSCL35objectProtoFuncPropertyIsEnumerableEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3WTF7HashMapIjN3JSC7JSValueENS_7IntHashIjEENS_10HashTraitsIjEENS5_IS2_EEE3setERKjRKS2_
-__ZN3WTF9HashTableIjSt4pairIjN3JSC7JSValueEENS_18PairFirstExtractorIS4_EENS_7IntHashIjEENS_14PairHashTraitsINS_10HashTraitsIjEE
-__ZN3JSC12RegisterFile21releaseExcessCapacityEv
-__ZN3JSCL20isNonASCIIIdentStartEi
-__ZN3JSC17BytecodeGenerator14emitPutByIndexEPNS_10RegisterIDEjS2_
-__ZN3JSC3JIT20emit_op_put_by_indexEPNS_11InstructionE
-__ZN3JSC8JITStubs19cti_op_put_by_indexEPPv
-__ZN3JSCL25numberConstructorMaxValueEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL28numberConstructorPosInfinityEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL28numberConstructorNegInfinityEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC18BooleanConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL22callBooleanConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL17mathProtoFuncATanEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JITStubs17cti_op_jmp_scopesEPPv
-__ZNK3JSC8JSObject11hasPropertyEPNS_9ExecStateEj
-__ZN3JSCL17mathProtoFuncASinEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC11Interpreter7executeEPNS_8EvalNodeEPNS_9ExecStateEPNS_8JSObjectEPNS_14ScopeChainNodeEPNS_7JSValueE
-_JSContextGetGlobalObject
-__ZN3JSC4Heap14registerThreadEv
-__ZN3JSC6JSLockC1EPNS_9ExecStateE
-_JSStringCreateWithUTF8CString
-__ZN3WTF7Unicode18convertUTF8ToUTF16EPPKcS2_PPtS4_b
-_JSClassCreate
-__ZN13OpaqueJSClass6createEPK17JSClassDefinition
-__ZN13OpaqueJSClassC2EPK17JSClassDefinitionPS_
-__ZN3JSC7UString3Rep14createFromUTF8EPKc
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEEP19StaticFunctionEntryNS_7StrHashIS5_EENS_10HashTraitsIS5_EENSA_IS7_EEE3addERKS
-__ZN3WTF9HashTableINS_6RefPtrIN3JSC7UString3RepEEESt4pairIS5_P19StaticFunctionEntryENS_18PairFirstExtractorIS9_EENS_7StrHashIS5
-__ZN3WTF7HashMapINS_6RefPtrIN3JSC7UString3RepEEEP16StaticValueEntryNS_7StrHashIS5_EENS_10HashTraitsIS5_EENSA_IS7_EEE3addERKS5_R
-_JSClassRetain
-_JSObjectMake
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE4initEPNS_9ExecStateE
-__ZN13OpaqueJSClass9prototypeEPN3JSC9ExecStateE
-__ZN13OpaqueJSClass11contextDataEPN3JSC9ExecStateE
-__ZN3WTF9HashTableIP13OpaqueJSClassSt4pairIS2_P24OpaqueJSClassContextDataENS_18PairFirstExtractorIS6_EENS_7PtrHashIS2_EENS_14Pa
-__ZN24OpaqueJSClassContextDataC2EP13OpaqueJSClass
-__ZN3JSC7UString3Rep13createCopyingEPKti
-_JSObjectSetProperty
-__ZNK14OpaqueJSString10identifierEPN3JSC12JSGlobalDataE
-__ZN3JSC14JSGlobalObject17putWithAttributesEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueEj
-_JSStringRelease
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE20staticFunctionGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC18JSCallbackFunctionC1EPNS_9ExecStateEPFPK13OpaqueJSValuePK15OpaqueJSContextPS3_S9_mPKS5_PS5_ERKNS_10IdentifierE
-__ZN3JSC18JSCallbackFunction11getCallDataERNS_8CallDataE
-__ZN3JSC18JSCallbackFunction4callEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC6JSLock12DropAllLocksC1EPNS_9ExecStateE
-_JSObjectGetPrivate
-__ZNK3JSC16JSCallbackObjectINS_8JSObjectEE9classInfoEv
-_JSValueMakeUndefined
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE17staticValueGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN14OpaqueJSString6createERKN3JSC7UStringE
-_JSStringCreateWithCharacters
-_JSValueMakeString
-__ZNK14OpaqueJSString7ustringEv
-__ZN3JSC7UStringC1EPtib
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEED1Ev
-_JSClassRelease
-__ZL25clearReferenceToPrototypeP13OpaqueJSValue
-_JSObjectGetProperty
-_JSValueToObject
-__ZN3JSCL22dateProtoFuncGetUTCDayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24dateProtoFuncGetUTCMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC8JSString18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+__ZN3JSCL24dateProtoFuncSetUTCHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL26dateProtoFuncSetUTCMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL26dateProtoFuncSetUTCSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL23dateProtoFuncGetUTCDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL27dateProtoFuncGetUTCFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZNK3JSC7UString8toUInt32EPb
+__ZN3JSCL23dateProtoFuncSetUTCDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL24dateProtoFuncGetUTCHoursEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL26dateProtoFuncGetUTCMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL26dateProtoFuncGetUTCSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL7dateUTCEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12RegExpObject11getCallDataERNS_8CallDataE
-__ZN3JSC9Arguments14deletePropertyEPNS_9ExecStateEj
-_JSValueMakeBoolean
-_JSValueToNumber
-_JSStringCreateWithCFString
-__ZN3WTF13tryFastCallocEmm
-_JSValueMakeNumber
-__ZN3JSC18JSCallbackFunctionD1Ev
-_JSValueToStringCopy
-_JSStringCopyCFString
-__ZN3JSC18ConstStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC13ConstDeclNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC13ConstDeclNode14emitCodeSingleERNS_17BytecodeGeneratorE
-__ZN3JSC13ConstDeclNodeD0Ev
-__ZN3JSC18ConstStatementNodeD0Ev
+__ZN3JSCL24dateProtoFuncSetFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF6VectorIiLm32EE15reserveCapacityEm
+__ZN3JSCL22regExpProtoFuncCompileEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC27PropertyNameForFunctionCall5valueEPNS_9ExecStateE
+_cti_op_new_error
+__ZN7WebCore6String6numberEj
+__ZN3JSCL27dateProtoFuncGetUTCFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL24dateProtoFuncGetUTCMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL31dateProtoFuncToLocaleDateStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16formatLocaleDateEPNS_9ExecStateEPNS_12DateInstanceEdNS_20LocaleDateTimeFormatERKNS_7ArgListE
+__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeEPKc
+__ZN3JSCL23throwStackOverflowErrorEPNS_9ExecStateEPNS_12JSGlobalDataENS_16ReturnAddressPtrERS4_
+__ZN3JSC24createStackOverflowErrorEPNS_9ExecStateE
+__ZN3JSC12RegisterFile21releaseExcessCapacityEv
+__ZN3JSC12StringObject18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
+__ZN3JSC19FunctionConstructor11getCallDataERNS_8CallDataE
+__ZN7WebCore6String8fromUTF8EPKc
+__ZN3JSCL27dateProtoFuncToLocaleStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSC18BooleanConstructor16getConstructDataERNS_13ConstructDataE
__ZN3JSCL31constructWithBooleanConstructorEPNS_9ExecStateEPNS_8JSObjectERKNS_7ArgListE
__ZN3JSC16constructBooleanEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSCL31dateProtoFuncGetUTCMillisecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL28dateProtoFuncGetMilliSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC7JSArray11sortNumericEPNS_9ExecStateENS_7JSValueENS_8CallTypeERKNS_8CallDataE
+__ZN3JSCL22compareNumbersForQSortEPKvS1_
+__ZN3WTF6VectorIjLm16EE15reserveCapacityEm
+__ZN3JSC8JSString16getIndexSlowCaseEPNS_9ExecStateEj
+__ZN3JSCL25numberConstructorNaNValueEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC6JSCell3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
+__ZN3JSC4Yarr15nondigitsCreateEv
+__ZN3JSCL7dateUTCEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC7ArgList2atEm
+__ZN3JSCL16mathProtoFuncTanEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC8JSObject16isVariableObjectEv
+__ZN7WebCore6String6insertERKS0_j
+__ZN7WebCore6String6insertEPKtjj
+__ZN7WebCore10StringImpl37createStrippingNullCharactersSlowCaseEPKtj
__ZN3JSCL31dateProtoFuncToLocaleTimeStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL21regExpObjectLastIndexEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
+__ZN3WTF7CString11mutableDataEv
+__ZN3WTF7CString18copyBufferIfNeededEv
+__ZN3JSCL18arrayProtoFuncSomeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSC21DebuggerStatementNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC21DebuggerStatementNodeD0Ev
-__ZN3JSC4Yarr12RegexPattern21newlineCharacterClassEv
-__ZN3JSC17ObjectConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL23dateProtoFuncSetUTCDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26stringFromCharCodeSlowCaseEPNS_9ExecStateERKNS_7ArgListE
-__ZN3JSCL21callObjectConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL27objectProtoFuncDefineGetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSObject12defineGetterEPNS_9ExecStateERKNS_10IdentifierEPS0_
-__ZN3JSC12GetterSetter4markEv
-__ZN3JSC12GetterSetterD1Ev
-__ZN3JSCL22regExpProtoFuncCompileEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL31dateProtoFuncGetUTCMillisecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF15ThreadConditionD1Ev
+__ZN3JSC18RegExpMatchesArray19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3WTF6VectorIPNS0_IN3JSC10IdentifierELm64EEELm32EE14expandCapacityEm
+__ZN3JSC15DateConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL8callDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL22dateProtoFuncGetUTCDayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL25arrayProtoFuncLastIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL19arrayProtoFuncEveryEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL23callFunctionConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL19stringProtoFuncLinkEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL25dateProtoFuncToDateStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC10throwErrorEPNS_9ExecStateEPNS_8JSObjectE
+__ZN3JSCL27dateProtoFuncSetUTCFullYearEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL24dateProtoFuncSetUTCMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL31dateProtoFuncSetUTCMillisecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN7WebCore6String6numberEx
__ZNK3JSC17NumberConstructor9classInfoEv
__ZNK3JSC17RegExpConstructor9classInfoEv
-__ZN3JSCL31dateProtoFuncToLocaleDateStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_P19StaticFunctionEntryENS_18PairFirstExtractorIS8_EENS2_10Stri
+__ZN3WTF9HashTableINS_6RefPtrIN7WebCore10StringImplEEESt4pairIS4_P16StaticValueEntryENS_18PairFirstExtractorIS8_EENS2_10StringH
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE20staticFunctionGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC18JSCallbackFunctionC1EPNS_9ExecStateEPFPK13OpaqueJSValuePK15OpaqueJSContextPS3_S9_mPKS5_PS5_ERKNS_10IdentifierE
+__ZN3JSC18JSCallbackFunction11getCallDataERNS_8CallDataE
+__ZN3JSC18JSCallbackFunction4callEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC6JSLock12DropAllLocksC1EPNS_9ExecStateE
+_JSValueMakeUndefined
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE17staticValueGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+_JSValueMakeString
+__ZN3JSC18JSCallbackFunctionD1Ev
+_JSValueMakeBoolean
+_JSStringCreateWithCharacters
+_JSValueMakeNumber
+__ZN3JSC4Heap6isBusyEv
+__ZN7WebCore10StringImpl7replaceEtPS0_
__ZNK3JSC8JSObject14isGlobalObjectEv
-_JSValueToBoolean
-__ZN3JSC8JITStubs13cti_op_lshiftEPPv
-__ZN3JSC8JITStubs13cti_op_bitnotEPPv
-__ZN3JSC6JSCell3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC19FunctionConstructor11getCallDataERNS_8CallDataE
+__ZNK7WebCore6String5splitEtbRN3WTF6VectorIS0_Lm0EEE
+__ZN7WebCore6String6appendEt
+__ZNK3JSC7UString8toUInt32EPb
__ZN3WTF9ByteArray6createEm
-__ZNK3JSC6JSCell9getStringERNS_7UStringE
-__ZN3JSC3JIT12emit_op_loopEPNS_11InstructionE
-__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeE
-__ZN3JSC11JSByteArrayC1EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
-__ZN3JSC11JSByteArrayC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
+__ZN3JSC11JSByteArrayC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
+__ZN3JSC11JSByteArrayC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEPNS3_9ByteArrayEPKNS_9ClassInfoE
__ZN3JSC11JSByteArray18getOwnPropertySlotEPNS_9ExecStateERKNS_10IdentifierERNS_12PropertySlotE
__ZN3JSC11JSByteArray3putEPNS_9ExecStateEjNS_7JSValueE
__ZN3JSC11JSByteArray3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
__ZN3JSC11JSByteArray18getOwnPropertySlotEPNS_9ExecStateEjRNS_12PropertySlotE
-__ZN3JSC8JITStubs28cti_op_get_by_val_byte_arrayEPPv
-__ZN3JSC8JITStubs28cti_op_put_by_val_byte_arrayEPPv
-__ZL30makeGetterOrSetterPropertyNodePvRKN3JSC10IdentifierES3_PNS0_13ParameterNodeEPNS0_16FunctionBodyNodeERKNS0_10SourceCodeE
-__ZN3JSC17BytecodeGenerator13emitPutGetterEPNS_10RegisterIDERKNS_10IdentifierES2_
-__ZN3JSC17BytecodeGenerator13emitPutSetterEPNS_10RegisterIDERKNS_10IdentifierES2_
-__ZN3JSC3JIT18emit_op_put_getterEPNS_11InstructionE
-__ZN3JSC3JIT18emit_op_put_setterEPNS_11InstructionE
-__ZN3JSC8JITStubs17cti_op_put_getterEPPv
-__ZN3JSC8JITStubs17cti_op_put_setterEPPv
-__ZN3JSC8JSObject12defineSetterEPNS_9ExecStateERKNS_10IdentifierEPS0_
-__ZNK3JSC12GetterSetter14isGetterSetterEv
-__ZNK3JSC6JSCell14isGetterSetterEv
-__ZN3JSCL29regExpConstructorRightContextEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSC5Lexer19copyCodeWithoutBOMsEv
-__ZN3JSC13JSNotAnObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC6JSCell16getConstructDataERNS_13ConstructDataE
-__ZN3JSC26createNotAConstructorErrorEPNS_9ExecStateENS_7JSValueEjPNS_9CodeBlockE
-__ZN3JSC15isStrWhiteSpaceEt
-__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeEPKc
+_cti_op_get_by_val_byte_array
+_cti_op_put_by_val_byte_array
+_JSStringGetUTF8CString
+__ZN3JSC16JSVariableObject19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
__ZNK3JSC22NativeErrorConstructor9classInfoEv
__ZNK3JSC16JSCallbackObjectINS_8JSObjectEE9classNameEv
-__ZN3JSC4Heap11objectCountEv
-__ZNK3JSC12SmallStrings5countEv
-__ZN3JSC14JSGlobalObject12defineGetterEPNS_9ExecStateERKNS_10IdentifierEPNS_8JSObjectE
-__ZN3JSCL27objectProtoFuncLookupGetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSObject12lookupGetterEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSCL27objectProtoFuncDefineSetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC14JSGlobalObject12defineSetterEPNS_9ExecStateERKNS_10IdentifierEPNS_8JSObjectE
+__ZN3JSC6JSCell16getConstructDataERNS_13ConstructDataE
+__ZN3JSC26createNotAConstructorErrorEPNS_9ExecStateENS_7JSValueEjPNS_9CodeBlockE
+__ZNK3JSC4Heap11objectCountEv
+__ZN3JSC14JSGlobalObject12defineGetterEPNS_9ExecStateERKNS_10IdentifierEPNS_8JSObjectEj
+__ZN3JSC14JSGlobalObject12defineSetterEPNS_9ExecStateERKNS_10IdentifierEPNS_8JSObjectEj
__ZN3JSC9Structure22getterSetterTransitionEPS0_
-__ZN3JSC8JSObject22fillGetterPropertySlotERNS_12PropertySlotEPNS_7JSValueE
-__ZN3JSC12PropertySlot14functionGetterEPNS_9ExecStateERKNS_10IdentifierERKS0_
__ZN3JSCL28objectProtoFuncIsPrototypeOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC12StringObjectC2EPNS_9ExecStateEN3WTF10PassRefPtrINS_9StructureEEERKNS_7UStringE
-__ZNK3JSC7UString6is8BitEv
-__ZN3JSC8JSObject15unwrappedObjectEv
-__ZN3JSC22NativeErrorConstructor11getCallDataERNS_8CallDataE
__ZN3JSC16JSCallbackObjectINS_8JSObjectEE11getCallDataERNS_8CallDataE
-__ZN3JSC17BytecodeGenerator21emitComplexJumpScopesEPNS_5LabelEPNS_18ControlFlowContextES4_
-__ZN3JSC23ThrowableExpressionData14emitThrowErrorERNS_17BytecodeGeneratorENS_9ErrorTypeEPKc
-__ZN3JSC17BytecodeGenerator12emitNewErrorEPNS_10RegisterIDENS_9ErrorTypeENS_7JSValueE
-__ZN3JSC3JIT17emit_op_new_errorEPNS_11InstructionE
-__ZN3JSC23MacroAssemblerX86Common8branch16ENS0_9ConditionENS_22AbstractMacroAssemblerINS_12X86AssemblerEE9BaseIndexENS4_5Imm32E
-_JSStringRetain
-__ZN3JSCL19arrayProtoFuncEveryEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL23arrayConstructorIsArrayEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL21objectConstructorKeysEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12DateInstanceC1EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEd
+__ZN3JSC12DateInstanceC2EPNS_9ExecStateEN3WTF17NonNullPassRefPtrINS_9StructureEEEd
+__ZN3JSCL24dateProtoFuncToISOStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC15createTypeErrorEPNS_9ExecStateEPKc
+__ZNK3JSC11JSByteArray9classInfoEv
+__ZN3JSC11JSByteArray19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC22NativeErrorConstructor11getCallDataERNS_8CallDataE
+__ZN3JSCL36objectConstructorGetOwnPropertyNamesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL41objectConstructorGetOwnPropertyDescriptorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC16JSVariableObject14symbolTableGetERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC8JSObject24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZNK3JSC18PropertyDescriptor20isAccessorDescriptorEv
+__ZNK3JSC18PropertyDescriptor8writableEv
+__ZNK3JSC18PropertyDescriptor12configurableEv
+__ZN3JSC12JSGlobalData6createENS_15ThreadStackTypeE
+__ZN3JSC12JSGlobalData10ClientDataD2Ev
+__ZN3WTF14ThreadSpecificINS_13WTFThreadDataEE7destroyEPv
+__ZN3WTF13WTFThreadDataD1Ev
+__ZN7WebCore17AtomicStringTable7destroyEPS0_
+__ZN3JSC15IdentifierTableD1Ev
+__ZN3WTF9dayInYearEdi
+__ZN3WTF18monthFromDayInYearEib
+__ZN3WTF23dayInMonthFromDayInYearEib
+__ZN3JSC6Walker4walkENS_7JSValueE
+__ZN3WTF9HashTableIPN7WebCore10StringImplES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_EC2ERKSA_
+__ZN3WTF6VectorIPN3JSC7JSArrayELm16EE14expandCapacityEm
+__ZN3WTF6VectorIjLm16EE14expandCapacityEm
+__ZN3WTF6VectorIN3JSC11WalkerStateELm16EE14expandCapacityEm
+__ZL30makeGetterOrSetterPropertyNodePN3JSC12JSGlobalDataERKNS_10IdentifierES4_PNS_13ParameterNodeEPNS_16FunctionBodyNodeERKNS_1
+__ZN3JSC17BytecodeGenerator13emitPutGetterEPNS_10RegisterIDERKNS_10IdentifierES2_
+__ZN3JSC3JIT18emit_op_put_getterEPNS_11InstructionE
+_cti_op_put_getter
+__ZN3JSCL23booleanProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL19dateProtoFuncToJSONEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC9ExecState9dateTableEPS0_
+__ZN3JSC10Identifier4fromEPNS_9ExecStateEd
+__ZN3JSC13StringBuilder6appendEt
+__ZN3JSC17BytecodeGenerator13emitPutSetterEPNS_10RegisterIDERKNS_10IdentifierES2_
+__ZN3JSC3JIT18emit_op_put_setterEPNS_11InstructionE
+_cti_op_put_setter
+__ZN3JSCL23objectConstructorCreateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL16definePropertiesEPNS_9ExecStateEPNS_8JSObjectES3_
+__ZN3JSCL20toPropertyDescriptorEPNS_9ExecStateENS_7JSValueERNS_18PropertyDescriptorE
+__ZN3JSC18PropertyDescriptor13setEnumerableEb
+__ZN3WTF6VectorIN3JSC18PropertyDescriptorELm0EE14expandCapacityEm
+__ZNK3JSC18PropertyDescriptor16isDataDescriptorEv
+__ZN3JSC8JSObject17defineOwnPropertyEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorEb
+__ZN3JSCL13putDescriptorEPNS_9ExecStateEPNS_8JSObjectERKNS_10IdentifierERNS_18PropertyDescriptorEjNS_7JSValueE
+__ZNK3JSC18PropertyDescriptor19isGenericDescriptorEv
+__ZN3JSCL31objectConstructorGetPrototypeOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC18PropertyDescriptor9setGetterENS_7JSValueE
+__ZNK3JSC18PropertyDescriptor6getterEv
+__ZNK3JSC18PropertyDescriptor6setterEv
+__ZN3JSC18PropertyDescriptor15setConfigurableEb
+__ZN3JSC18PropertyDescriptor11setWritableEb
+__ZN3JSC18PropertyDescriptor9setSetterENS_7JSValueE
+__ZN3JSCL33objectConstructorDefinePropertiesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZNK3JSC18PropertyDescriptor7equalToEPNS_9ExecStateERKS0_
+__ZNK3JSC18PropertyDescriptor15attributesEqualERKS0_
+__ZNK3JSC18PropertyDescriptor22attributesWithOverrideERKS0_
+__ZN3JSCL31objectConstructorDefinePropertyEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC9ExecState11regExpTableEPS0_
+__ZN3JSC9Arguments19getOwnPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayENS_15EnumerationModeE
+__ZN3JSC9ExecState11numberTableEPS0_
+__ZN3JSC9ExecState22regExpConstructorTableEPS0_
+__ZNK3JSC15RegExpPrototype9classInfoEv
+__ZNK3JSC10MathObject9classInfoEv
+__ZN3JSC9ExecState9mathTableEPS0_
+__ZN3JSC9ExecState9jsonTableEPS0_
__ZN3JSCL20arrayProtoFuncReduceEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL25arrayProtoFuncReduceRightEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL28arrayProtoFuncToLocaleStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL25arrayProtoFuncLastIndexOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC15AssignErrorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC8JITStubs16cti_op_new_errorEPPv
-__ZN3JSC15AssignErrorNodeD0Ev
-__ZN3JSC17BytecodeGenerator18emitUnexpectedLoadEPNS_10RegisterIDEd
__ZN3JSC19JSStaticScopeObject3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZN3JSC9ExecState9dateTableEPS0_
-__ZNK3JSC15RegExpPrototype9classInfoEv
-__ZN3JSC12StringObject14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSCL25dateProtoFuncToDateStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL25dateProtoFuncToTimeStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL25numberConstructorNaNValueEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL31dateProtoFuncSetUTCMillisecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26dateProtoFuncSetUTCSecondsEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26dateProtoFuncSetUTCMinutesEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL24dateProtoFuncSetUTCMonthEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL23throwStackOverflowErrorEPNS_9ExecStateEPNS_12JSGlobalDataEPvRS4_
-__ZN3JSC24createStackOverflowErrorEPNS_9ExecStateE
+__ZN3JSC7JSValueC1EPNS_9ExecStateEd
__ZN3JSC15DeleteValueNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC15DeleteValueNodeD0Ev
__ZN3JSC16PostfixErrorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
+__ZN3JSC23ThrowableExpressionData14emitThrowErrorERNS_17BytecodeGeneratorENS_9ErrorTypeEPKcRKNS_7UStringE
+__ZN3JSC10makeStringINS_7UStringES1_S1_EES1_T_T0_T1_
+__ZN3JSC13tryMakeStringINS_7UStringES1_S1_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
__ZN3JSC15PrefixErrorNode12emitBytecodeERNS_17BytecodeGeneratorEPNS_10RegisterIDE
-__ZN3JSC16PostfixErrorNodeD0Ev
-__ZN3JSC15PrefixErrorNodeD0Ev
__ZN3JSC23createInvalidParamErrorEPNS_9ExecStateEPKcNS_7JSValueEjPNS_9CodeBlockE
+__ZN3JSC10makeStringIPKcS2_S2_EENS_7UStringET_T0_T1_
+__ZN3JSC13tryMakeStringIPKcS2_S2_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_T1_
__ZNK3JSC15DotAccessorNode17isDotAccessorNodeEv
__ZNK3JSC14ExpressionNode17isDotAccessorNodeEv
__ZN3JSC13JSNotAnObject3putEPNS_9ExecStateEjNS_7JSValueE
-__ZN3JSC4Heap24setGCProtectNeedsLockingEv
-__ZN3JSCL23callFunctionConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC14ArrayPrototype24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC13DatePrototype24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC15StringPrototype24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC12StringObject24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC8JSString27getStringPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC17NumberConstructor24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC17RegExpConstructor24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSCL22regExpConstructorInputEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSC12RegExpObject24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC10MathObject24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC18RegExpMatchesArray24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC10JSFunction24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
+__ZN3JSC11JSByteArray24getOwnPropertyDescriptorEPNS_9ExecStateERKNS_10IdentifierERNS_18PropertyDescriptorE
__ZNK3JSC16JSCallbackObjectINS_8JSObjectEE8toStringEPNS_9ExecStateE
-__ZN3JSC8JITStubs17cti_op_instanceofEPPv
+__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeERKNS_7UStringEilS5_
__ZN3JSC17BytecodeGenerator35emitThrowExpressionTooDeepExceptionEv
-__ZN3JSCL25numberConstructorMinValueEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL17mathProtoFuncACosEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL25numberConstructorMinValueEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
__ZN3JSCL18mathProtoFuncATan2EPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL16mathProtoFuncTanEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSCL28numberProtoFuncToExponentialEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL26numberProtoFuncToPrecisionEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSCL12charSequenceEci
+__ZN3JSC16parseIntOverflowEPKcii
__ZN3JSCL29objectProtoFuncToLocaleStringEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC6JSCell14toThisJSStringEPNS_9ExecStateE
-__ZNK3JSC6JSCell12toThisStringEPNS_9ExecStateE
-__ZN3JSCL27objectProtoFuncLookupSetterEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC8JSObject12lookupSetterEPNS_9ExecStateERKNS_10IdentifierE
-__ZN3JSC9ExecState22regExpConstructorTableEPS0_
-__ZN3JSCL24regExpConstructorDollar7EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar8EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL24regExpConstructorDollar9EPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL22regExpConstructorInputEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
+__ZN3JSCL24regExpConstructorDollar5EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar6EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar7EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar8EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL24regExpConstructorDollar9EPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
__ZN3JSCL25setRegExpConstructorInputEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueE
-__ZN3JSCL26regExpConstructorLastMatchEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL26regExpConstructorLastParenEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL26regExpConstructorMultilineEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
+__ZN3JSCL26regExpConstructorLastMatchEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZN3JSCL26regExpConstructorLastParenEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+__ZNK3JSC17RegExpConstructor12getLastParenEPNS_9ExecStateE
+__ZN3JSCL26regExpConstructorMultilineEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
__ZN3JSCL29setRegExpConstructorMultilineEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueE
-__ZN3JSC4Yarr15nondigitsCreateEv
+__ZNK3JSC17RegExpConstructor5inputEv
+__ZN3JSC10makeStringIPKcS2_EENS_7UStringET_T0_
+__ZN3JSC13tryMakeStringIPKcS2_EEN3WTF10PassRefPtrIN7WebCore10StringImplEEET_T0_
+__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeERKNS_7UStringE
__ZNK3JSC19JSStaticScopeObject12toThisObjectEPNS_9ExecStateE
+__ZN3JSCL23stringProtoFuncTrimLeftEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL24stringProtoFuncTrimRightEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3JSC12JSActivation18getArgumentsGetterEv
-__ZN3JSC12JSActivation15argumentsGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-__ZN3JSCL23booleanProtoFuncValueOfEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC12JSActivation15argumentsGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
__ZN3JSCL28stringProtoFuncLocaleCompareEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3WTF8Collator11userDefaultEv
__ZNK3WTF8Collator7collateEPKtmS2_m
__ZNK3WTF8Collator14createCollatorEv
__ZN3WTF8CollatorD1Ev
__ZN3WTF8Collator15releaseCollatorEv
-__ZNK3JSC10MathObject9classInfoEv
-__ZN3JSC9ExecState9mathTableEPS0_
+__ZN3JSC9Arguments14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
+__ZN3JSCL26callNativeErrorConstructorEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSCL21callFunctionPrototypeEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
+__ZN3JSC6JSCell11getJSNumberEv
+__ZN3JSCL16callRegExpObjectEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
__ZN3WTF6VectorIN3JSC20FunctionRegisterInfoELm0EE14expandCapacityEm
__ZN3JSC3JIT25emit_op_profile_will_callEPNS_11InstructionE
__ZN3JSC3JIT24emit_op_profile_did_callEPNS_11InstructionE
__ZN3JSC7Profile6createERKNS_7UStringEj
__ZN3JSC7ProfileC2ERKNS_7UStringEj
__ZN3JSC11ProfileNodeC1ERKNS_14CallIdentifierEPS0_S4_
-__ZN3JSC33getCurrentUTCTimeWithMicrosecondsEv
__ZN3JSC16ProfileGenerator24addParentForConsoleStartEPNS_9ExecStateE
-__ZN3JSC8Profiler20createCallIdentifierEPNS_12JSGlobalDataENS_7JSValueERKNS_7UStringEi
-__ZN3JSC16InternalFunction21calculatedDisplayNameEPNS_12JSGlobalDataE
+__ZN3JSC8Profiler20createCallIdentifierEPNS_9ExecStateENS_7JSValueERKNS_7UStringEi
+__ZN3JSC16InternalFunction21calculatedDisplayNameEPNS_9ExecStateE
+__ZN3JSC16InternalFunction11displayNameEPNS_9ExecStateE
__ZN3JSC11ProfileNode10insertNodeEN3WTF10PassRefPtrIS0_EE
__ZN3WTF6VectorINS_6RefPtrIN3JSC11ProfileNodeEEELm0EE14expandCapacityEm
-__ZN3WTF6VectorINS_6RefPtrIN3JSC16ProfileGeneratorEEELm0EE14expandCapacityEm
-__ZN3JSC8JITStubs23cti_op_profile_did_callEPPv
+__ZN3WTF6VectorINS_6RefPtrIN3JSC16ProfileGeneratorEEELm0EE15reserveCapacityEm
+_cti_op_profile_did_call
__ZN3JSC8Profiler10didExecuteEPNS_9ExecStateENS_7JSValueE
__ZN3JSC16ProfileGenerator10didExecuteERKNS_14CallIdentifierE
__ZN3JSC11ProfileNode10didExecuteEv
-__ZN3JSC8JITStubs24cti_op_profile_will_callEPPv
+_cti_op_profile_will_call
__ZN3JSC8Profiler11willExecuteEPNS_9ExecStateENS_7JSValueE
__ZN3JSC16ProfileGenerator11willExecuteERKNS_14CallIdentifierE
__ZN3JSC11ProfileNode11willExecuteERKNS_14CallIdentifierE
__ZN3JSC7Profile7forEachEMNS_11ProfileNodeEFvvE
__ZNK3JSC11ProfileNode25traverseNextNodePostOrderEv
__ZN3JSC11ProfileNode13stopProfilingEv
-__ZN3JSCeqERKNS_7UStringEPKc
__ZN3JSC11ProfileNode11removeChildEPS0_
__ZN3JSC11ProfileNode8addChildEN3WTF10PassRefPtrIS0_EE
-_JSValueIsObjectOfClass
-_JSObjectCallAsConstructor
-__ZN3JSC9constructEPNS_9ExecStateENS_7JSValueENS_13ConstructTypeERKNS_13ConstructDataERKNS_7ArgListE
-_JSObjectCallAsFunction
-__ZN3JSC4Heap14primaryHeapEndEv
+__ZN3JSC8Debugger23recompileAllJSFunctionsEPNS_12JSGlobalDataE
__ZN3JSC4Heap16primaryHeapBeginEv
+__ZN3JSC4Heap14primaryHeapEndEv
__ZNK3JSC18JSCallbackFunction9classInfoEv
+__ZN3WTF7HashSetIPN3JSC18FunctionExecutableENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
+__ZN3WTF9HashTableIPN3JSC18FunctionExecutableES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6expandE
+__ZN3JSC18FunctionExecutable9recompileEPNS_9ExecStateE
__ZN3JSC8Profiler11willExecuteEPNS_9ExecStateERKNS_7UStringEi
__ZN3JSC8Profiler10didExecuteEPNS_9ExecStateERKNS_7UStringEi
__ZNK3JSC16ProfileGenerator5titleEv
+__ZN3JSC9CodeBlock33functionRegisterForBytecodeOffsetEjRi
__ZN3JSC7ProfileD0Ev
__ZN3WTF10RefCountedIN3JSC11ProfileNodeEE5derefEv
-__ZN3JSC4Yarr14RegexGenerator33generatePatternCharacterNonGreedyERNS1_19TermGenerationStateE
-__ZN3JSC35createInterruptedExecutionExceptionEPNS_12JSGlobalDataE
-__ZNK3JSC25InterruptedExecutionError19isWatchdogExceptionEv
-__ZN3JSC25InterruptedExecutionErrorD1Ev
-__ZN3JSC12JSGlobalData10ClientDataD2Ev
-__ZN3JSC18RegExpMatchesArray16getPropertyNamesEPNS_9ExecStateERNS_17PropertyNameArrayE
+__ZN3JSC34createTerminatedExecutionExceptionEPNS_12JSGlobalDataE
+__ZNK3JSC24TerminatedExecutionError13exceptionTypeEv
+__ZN3JSC24TerminatedExecutionErrorD1Ev
__ZN3WTF8CollatorC1EPKc
__ZN3WTF8Collator18setOrderLowerFirstEb
+_JSValueIsEqual
+__ZN3JSC7JSValue13equalSlowCaseEPNS_9ExecStateES0_S0_
+__ZNK3JSC14JSGlobalObject17supportsProfilingEv
+__ZNK7WebCore6String12toUIntStrictEPbi
+__ZN7WebCore10StringImpl12toUIntStrictEPbi
__ZN3WTF12randomNumberEv
+__ZNK7WebCore6String8toUInt64EPb
+__ZN7WebCore10StringImpl8toUInt64EPb
+__ZN7WebCore18charactersToUInt64EPKtmPb
+_JSStringGetMaximumUTF8CStringSize
__ZN3JSC16JSCallbackObjectINS_8JSObjectEE3putEPNS_9ExecStateERKNS_10IdentifierENS_7JSValueERNS_15PutPropertySlotE
-__ZNK3JSC6JSCell9getStringEv
-__ZNK3JSC12DateInstance7getTimeERdRi
-__ZN3JSC10throwErrorEPNS_9ExecStateENS_9ErrorTypeERKNS_7UStringE
-_JSGlobalContextCreate
-_JSGlobalContextCreateInGroup
-__ZN3JSC4Heap29makeUsableFromMultipleThreadsEv
-_JSGlobalContextRetain
-__ZN3JSC6JSLock6unlockEb
-_JSEvaluateScript
-__ZNK3JSC14JSGlobalObject17supportsProfilingEv
-_JSGlobalContextRelease
-__ZN3JSC14JSGlobalObjectD1Ev
-__ZN3JSC14JSGlobalObject18JSGlobalObjectDataD0Ev
-__ZN3JSC17FunctionPrototype11getCallDataERNS_8CallDataE
-__ZN3JSC15DateConstructor11getCallDataERNS_8CallDataE
-__ZN3JSCL8callDateEPNS_9ExecStateEPNS_8JSObjectENS_7JSValueERKNS_7ArgListE
-__ZN3JSC13JSNotAnObject4markEv
-_JSObjectIsFunction
+__ZN7WebCore6String6numberEm
__ZN3JSC4Heap17globalObjectCountEv
__ZN3JSC4Heap20protectedObjectCountEv
__ZN3JSC4Heap25protectedObjectTypeCountsEv
__ZN3WTF9HashTableIPKcSt4pairIS2_jENS_18PairFirstExtractorIS4_EENS_7PtrHashIS2_EENS_14PairHashTraitsINS_10HashTraitsIS2_EENSA_I
+__ZNK3JSC6JSCell17isAPIValueWrapperEv
+__ZNK3JSC6JSCell22isPropertyNameIteratorEv
+__ZN3JSC4Heap16objectTypeCountsEv
+__ZNK3JSC22JSPropertyNameIterator22isPropertyNameIteratorEv
__ZN3WTF20fastMallocStatisticsEv
__ZNK3JSC4Heap10statisticsEv
__ZN3WTF27releaseFastMallocFreeMemoryEv
-__ZN3JSC10JSFunction16getConstructDataERNS_13ConstructDataE
-__ZN3JSC10JSFunction9constructEPNS_9ExecStateERKNS_7ArgListE
+_JSStringIsEqualToUTF8CString
+__ZN3JSC16JSCallbackObjectINS_8JSObjectEE14callbackGetterEPNS_9ExecStateENS_7JSValueERKNS_10IdentifierE
+_JSObjectSetPrivate
__ZN3JSC8Debugger6attachEPNS_14JSGlobalObjectE
-__ZN3WTF7HashSetIPN3JSC14JSGlobalObjectENS_7PtrHashIS3_EENS_10HashTraitsIS3_EEE3addERKS3_
__ZN3WTF9HashTableIPN3JSC14JSGlobalObjectES3_NS_17IdentityExtractorIS3_EENS_7PtrHashIS3_EENS_10HashTraitsIS3_EES9_E6rehashEi
+__ZN3WTF9HashTableIPN3JSC14SourceProviderESt4pairIS3_PNS1_9ExecStateEENS_18PairFirstExtractorIS7_EENS_7PtrHashIS3_EENS_14PairHa
+__ZN3JSC7UString4fromEl
__ZN3JSC3JIT13emit_op_debugEPNS_11InstructionE
-__ZN3JSC8JITStubs12cti_op_debugEPPv
+_cti_op_debug
__ZN3JSC11Interpreter5debugEPNS_9ExecStateENS_11DebugHookIDEii
+__ZNK3JSC17DebuggerCallFrame4typeEv
+__ZNK3JSC17DebuggerCallFrame22calculatedFunctionNameEv
+__ZNK3JSC12JSActivation18isActivationObjectEv
+__ZNK3JSC17DebuggerCallFrame10thisObjectEv
+__ZN3WTF28setMainThreadCallbacksPausedEb
+__ZNK3JSC17DebuggerCallFrame8evaluateERKNS_7UStringERNS_7JSValueE
__ZN3JSC8Debugger6detachEPNS_14JSGlobalObjectE
-__ZN3JSC9CodeBlock33functionRegisterForBytecodeOffsetEjRi
-_JSStringIsEqualToUTF8CString
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE14callbackGetterEPNS_9ExecStateERKNS_10IdentifierERKNS_12PropertySlotE
-_JSObjectSetPrivate
-__ZN3JSC7UString3Rep11computeHashEPKci
-__ZN3JSC16JSCallbackObjectINS_8JSObjectEE14deletePropertyEPNS_9ExecStateERKNS_10IdentifierE
-_JSGarbageCollect
-__ZN3JSC4Heap6isBusyEv
-__ZN3JSCL18styleFromArgStringERKNS_7UStringEl
+_JSValueIsObjectOfClass
# JavaScriptCore - Qt4 build info
VPATH += $$PWD
-
+CONFIG(debug, debug|release) {
+ # Output in JavaScriptCore/<config>
+ JAVASCRIPTCORE_DESTDIR = debug
+ # Use a config-specific target to prevent parallel builds file clashes on Mac
+ JAVASCRIPTCORE_TARGET = jscored
+} else {
+ JAVASCRIPTCORE_DESTDIR = release
+ JAVASCRIPTCORE_TARGET = jscore
+}
CONFIG(standalone_package) {
isEmpty(JSC_GENERATED_SOURCES_DIR):JSC_GENERATED_SOURCES_DIR = $$PWD/generated
} else {
isEmpty(JSC_GENERATED_SOURCES_DIR):JSC_GENERATED_SOURCES_DIR = generated
}
-CONFIG(debug, debug|release) {
- OBJECTS_DIR = obj/debug
-} else { # Release
- OBJECTS_DIR = obj/release
-}
+CONFIG(standalone_package): DEFINES *= NDEBUG
symbian: {
# Need to guarantee this comes before system includes of /epoc32/include
MMP_RULES += "USERINCLUDE ../JavaScriptCore/profiler"
+ LIBS += -lhal
+ # For hal.h
+ INCLUDEPATH *= $$MW_LAYER_SYSTEMINCLUDE
}
INCLUDEPATH = \
$$PWD/pcre \
$$PWD/profiler \
$$PWD/runtime \
- $$PWD/wrec \
$$PWD/wtf \
+ $$PWD/wtf/symbian \
$$PWD/wtf/unicode \
$$PWD/yarr \
$$PWD/API \
$$JSC_GENERATED_SOURCES_DIR \
$$INCLUDEPATH
+win32-*: DEFINES += _HAS_TR1=0
+
DEFINES += BUILDING_QT__ BUILDING_JavaScriptCore BUILDING_WTF
-win32-* {
- LIBS += -lwinmm
-}
contains(JAVASCRIPTCORE_JIT,yes) {
DEFINES+=ENABLE_JIT=1
DEFINES+=ENABLE_YARR_JIT=1
DEFINES+=ENABLE_YARR=0
}
-# Rules when JIT enabled (not disabled)
-!contains(DEFINES, ENABLE_JIT=0) {
- linux*-g++*:greaterThan(QT_GCC_MAJOR_VERSION,3):greaterThan(QT_GCC_MINOR_VERSION,0) {
- QMAKE_CXXFLAGS += -fno-stack-protector
- QMAKE_CFLAGS += -fno-stack-protector
- }
-}
-
wince* {
INCLUDEPATH += $$QT_SOURCE_TREE/src/3rdparty/ce-compat
- SOURCES += $$QT_SOURCE_TREE/src/3rdparty/ce-compat/ce_time.c
DEFINES += WINCEBASIC
+
+ INCLUDEPATH += $$PWD/../JavaScriptCore/os-wince
+ INCLUDEPATH += $$PWD/../JavaScriptCore/os-win32
}
-include(pcre/pcre.pri)
-SOURCES += \
- API/JSBase.cpp \
- API/JSCallbackConstructor.cpp \
- API/JSCallbackFunction.cpp \
- API/JSCallbackObject.cpp \
- API/JSClassRef.cpp \
- API/JSContextRef.cpp \
- API/JSObjectRef.cpp \
- API/JSStringRef.cpp \
- API/JSValueRef.cpp \
- API/OpaqueJSString.cpp \
- assembler/ARMAssembler.cpp \
- assembler/MacroAssemblerARM.cpp \
- bytecode/CodeBlock.cpp \
- bytecode/JumpTable.cpp \
- bytecode/Opcode.cpp \
- bytecode/SamplingTool.cpp \
- bytecode/StructureStubInfo.cpp \
- bytecompiler/BytecodeGenerator.cpp \
- bytecompiler/NodesCodegen.cpp \
- debugger/DebuggerActivation.cpp \
- debugger/DebuggerCallFrame.cpp \
- debugger/Debugger.cpp \
- interpreter/CallFrame.cpp \
- interpreter/Interpreter.cpp \
- interpreter/RegisterFile.cpp \
- jit/ExecutableAllocatorPosix.cpp \
- jit/ExecutableAllocatorSymbian.cpp \
- jit/ExecutableAllocatorWin.cpp \
- jit/ExecutableAllocator.cpp \
- jit/JITArithmetic.cpp \
- jit/JITCall.cpp \
- jit/JIT.cpp \
- jit/JITOpcodes.cpp \
- jit/JITPropertyAccess.cpp \
- jit/JITStubs.cpp \
- parser/Lexer.cpp \
- parser/Nodes.cpp \
- parser/ParserArena.cpp \
- parser/Parser.cpp \
- profiler/Profile.cpp \
- profiler/ProfileGenerator.cpp \
- profiler/ProfileNode.cpp \
- profiler/Profiler.cpp \
- runtime/ArgList.cpp \
- runtime/Arguments.cpp \
- runtime/ArrayConstructor.cpp \
- runtime/ArrayPrototype.cpp \
- runtime/BooleanConstructor.cpp \
- runtime/BooleanObject.cpp \
- runtime/BooleanPrototype.cpp \
- runtime/CallData.cpp \
- runtime/Collector.cpp \
- runtime/CommonIdentifiers.cpp \
- runtime/Completion.cpp \
- runtime/ConstructData.cpp \
- runtime/DateConstructor.cpp \
- runtime/DateConversion.cpp \
- runtime/DateInstance.cpp \
- runtime/DatePrototype.cpp \
- runtime/ErrorConstructor.cpp \
- runtime/Error.cpp \
- runtime/ErrorInstance.cpp \
- runtime/ErrorPrototype.cpp \
- runtime/ExceptionHelpers.cpp \
- runtime/Executable.cpp \
- runtime/FunctionConstructor.cpp \
- runtime/FunctionPrototype.cpp \
- runtime/GetterSetter.cpp \
- runtime/GlobalEvalFunction.cpp \
- runtime/Identifier.cpp \
- runtime/InitializeThreading.cpp \
- runtime/InternalFunction.cpp \
- runtime/JSActivation.cpp \
- runtime/JSAPIValueWrapper.cpp \
- runtime/JSArray.cpp \
- runtime/JSByteArray.cpp \
- runtime/JSCell.cpp \
- runtime/JSFunction.cpp \
- runtime/JSGlobalData.cpp \
- runtime/JSGlobalObject.cpp \
- runtime/JSGlobalObjectFunctions.cpp \
- runtime/JSImmediate.cpp \
- runtime/JSLock.cpp \
- runtime/JSNotAnObject.cpp \
- runtime/JSNumberCell.cpp \
- runtime/JSObject.cpp \
- runtime/JSONObject.cpp \
- runtime/JSPropertyNameIterator.cpp \
- runtime/JSStaticScopeObject.cpp \
- runtime/JSString.cpp \
- runtime/JSValue.cpp \
- runtime/JSVariableObject.cpp \
- runtime/JSWrapperObject.cpp \
- runtime/LiteralParser.cpp \
- runtime/Lookup.cpp \
- runtime/MarkStackPosix.cpp \
- runtime/MarkStackSymbian.cpp \
- runtime/MarkStackWin.cpp \
- runtime/MarkStack.cpp \
- runtime/MathObject.cpp \
- runtime/NativeErrorConstructor.cpp \
- runtime/NativeErrorPrototype.cpp \
- runtime/NumberConstructor.cpp \
- runtime/NumberObject.cpp \
- runtime/NumberPrototype.cpp \
- runtime/ObjectConstructor.cpp \
- runtime/ObjectPrototype.cpp \
- runtime/Operations.cpp \
- runtime/PropertyDescriptor.cpp \
- runtime/PropertyNameArray.cpp \
- runtime/PropertySlot.cpp \
- runtime/PrototypeFunction.cpp \
- runtime/RegExpConstructor.cpp \
- runtime/RegExp.cpp \
- runtime/RegExpObject.cpp \
- runtime/RegExpPrototype.cpp \
- runtime/ScopeChain.cpp \
- runtime/SmallStrings.cpp \
- runtime/StringConstructor.cpp \
- runtime/StringObject.cpp \
- runtime/StringPrototype.cpp \
- runtime/StructureChain.cpp \
- runtime/Structure.cpp \
- runtime/TimeoutChecker.cpp \
- runtime/UString.cpp \
- runtime/UStringImpl.cpp \
- wtf/Assertions.cpp \
- wtf/ByteArray.cpp \
- wtf/CurrentTime.cpp \
- wtf/DateMath.cpp \
- wtf/dtoa.cpp \
- wtf/FastMalloc.cpp \
- wtf/HashTable.cpp \
- wtf/MainThread.cpp \
- wtf/qt/MainThreadQt.cpp \
- wtf/qt/ThreadingQt.cpp \
- wtf/RandomNumber.cpp \
- wtf/RefCountedLeakCounter.cpp \
- wtf/ThreadingNone.cpp \
- wtf/Threading.cpp \
- wtf/TypeTraits.cpp \
- wtf/unicode/CollatorDefault.cpp \
- wtf/unicode/icu/CollatorICU.cpp \
- wtf/unicode/UTF8.cpp \
- yarr/RegexCompiler.cpp \
- yarr/RegexInterpreter.cpp \
- yarr/RegexJIT.cpp
+defineTest(addJavaScriptCoreLib) {
+ # Argument is the relative path to JavaScriptCore.pro's qmake output
+ pathToJavaScriptCoreOutput = $$ARGS/$$JAVASCRIPTCORE_DESTDIR
-# Generated files, simply list them for JavaScriptCore
-SOURCES += \
- $${JSC_GENERATED_SOURCES_DIR}/Grammar.cpp
+ win32-msvc*|wince* {
+ LIBS += -L$$pathToJavaScriptCoreOutput
+ LIBS += -l$$JAVASCRIPTCORE_TARGET
+ POST_TARGETDEPS += $${pathToJavaScriptCoreOutput}$${QMAKE_DIR_SEP}$${JAVASCRIPTCORE_TARGET}.lib
+ } else:symbian {
+ LIBS += -l$${JAVASCRIPTCORE_TARGET}.lib
+ # The default symbian build system does not use library paths at all. However when building with
+ # qmake's symbian makespec that uses Makefiles
+ QMAKE_LIBDIR += $$pathToJavaScriptCoreOutput
+ POST_TARGETDEPS += $${pathToJavaScriptCoreOutput}$${QMAKE_DIR_SEP}$${JAVASCRIPTCORE_TARGET}.lib
+ } else {
+ # Make sure jscore will be early in the list of libraries to workaround a bug in MinGW
+ # that can't resolve symbols from QtCore if libjscore comes after.
+ QMAKE_LIBDIR = $$pathToJavaScriptCoreOutput $$QMAKE_LIBDIR
+ LIBS += -l$$JAVASCRIPTCORE_TARGET
+ POST_TARGETDEPS += $${pathToJavaScriptCoreOutput}$${QMAKE_DIR_SEP}lib$${JAVASCRIPTCORE_TARGET}.a
+ }
-!contains(DEFINES, USE_SYSTEM_MALLOC) {
- SOURCES += wtf/TCSystemAlloc.cpp
-}
+ win32-* {
+ LIBS += -lwinmm
+ }
+
+ # The following line is to prevent qmake from adding jscore to libQtWebKit's prl dependencies.
+ # The compromise we have to accept by disabling explicitlib is to drop support to link QtWebKit and QtScript
+ # statically in applications (which isn't used often because, among other things, of licensing obstacles).
+ CONFIG -= explicitlib
+ export(QMAKE_LIBDIR)
+ export(LIBS)
+ export(POST_TARGETDEPS)
+ export(CONFIG)
+
+ return(true)
+}
# JavaScriptCore - qmake build info
CONFIG += building-libs
include($$PWD/../WebKit.pri)
+include(JavaScriptCore.pri)
TEMPLATE = lib
CONFIG += staticlib
-TARGET = JavaScriptCore
+# Don't use JavaScriptCore as the target name. qmake would create a JavaScriptCore.vcproj for msvc
+# which already exists as a directory
+TARGET = $$JAVASCRIPTCORE_TARGET
+DESTDIR = $$JAVASCRIPTCORE_DESTDIR
+QT += core
+QT -= gui
CONFIG += depend_includepath
contains(QT_CONFIG, embedded):CONFIG += embedded
CONFIG(QTDIR_build) {
- GENERATED_SOURCES_DIR = $$PWD/generated
- OLDDESTDIR = $$DESTDIR
- include($$QT_SOURCE_TREE/src/qbase.pri)
- INSTALLS =
- DESTDIR = $$OLDDESTDIR
- DEFINES *= NDEBUG
-}
-
-isEmpty(GENERATED_SOURCES_DIR):GENERATED_SOURCES_DIR = tmp
-GENERATED_SOURCES_DIR_SLASH = $${GENERATED_SOURCES_DIR}$${QMAKE_DIR_SEP}
-
-INCLUDEPATH += $$GENERATED_SOURCES_DIR
-
-!CONFIG(QTDIR_build) {
+ # Make sure we compile both debug and release on mac when inside Qt.
+ # This line was extracted from qbase.pri instead of including the whole file
+ win32|mac:!macx-xcode:CONFIG += debug_and_release
+} else {
CONFIG(debug, debug|release) {
OBJECTS_DIR = obj/debug
} else { # Release
OBJECTS_DIR = obj/release
}
+ # Make sure that build_all follows the build_all config in WebCore
+ mac:contains(QT_CONFIG, qt_framework):!CONFIG(webkit_no_framework):!build_pass:CONFIG += build_all
}
-CONFIG(release):!CONFIG(QTDIR_build) {
- contains(QT_CONFIG, reduce_exports):CONFIG += hide_symbols
- unix:contains(QT_CONFIG, reduce_relocations):CONFIG += bsymbolic_functions
-}
-
-linux-*: DEFINES += HAVE_STDINT_H
-freebsd-*: DEFINES += HAVE_PTHREAD_NP_H
-
-DEFINES += BUILD_WEBKIT
+# WebCore adds these config only when in a standalone build.
+# qbase.pri takes care of that when in a QTDIR_build
+# Here we add the config for both cases since we don't include qbase.pri
+contains(QT_CONFIG, reduce_exports):CONFIG += hide_symbols
+unix:contains(QT_CONFIG, reduce_relocations):CONFIG += bsymbolic_functions
-win32-*: DEFINES += _HAS_TR1=0
+CONFIG(QTDIR_build) {
+ # Remove the following 2 lines if you want debug information in JavaScriptCore
+ CONFIG -= separate_debug_info
+ CONFIG += no_debug_info
+}
# Pick up 3rdparty libraries from INCLUDE/LIB just like with MSVC
win32-g++ {
QMAKE_LIBDIR_POST += $$split(TMPPATH,";")
}
-DEFINES += WTF_CHANGES=1
+*-g++*:QMAKE_CXXFLAGS_RELEASE -= -O2
+*-g++*:QMAKE_CXXFLAGS_RELEASE += -O3
-include(JavaScriptCore.pri)
+# Rules when JIT enabled (not disabled)
+!contains(DEFINES, ENABLE_JIT=0) {
+ linux*-g++*:greaterThan(QT_GCC_MAJOR_VERSION,3):greaterThan(QT_GCC_MINOR_VERSION,0) {
+ QMAKE_CXXFLAGS += -fno-stack-protector
+ QMAKE_CFLAGS += -fno-stack-protector
+ }
+}
-QMAKE_EXTRA_TARGETS += generated_files
+wince* {
+ SOURCES += $$QT_SOURCE_TREE/src/3rdparty/ce-compat/ce_time.c
+}
-*-g++*:QMAKE_CXXFLAGS_RELEASE -= -O2
-*-g++*:QMAKE_CXXFLAGS_RELEASE += -O3
+include(pcre/pcre.pri)
+
+SOURCES += \
+ API/JSBase.cpp \
+ API/JSCallbackConstructor.cpp \
+ API/JSCallbackFunction.cpp \
+ API/JSCallbackObject.cpp \
+ API/JSClassRef.cpp \
+ API/JSContextRef.cpp \
+ API/JSObjectRef.cpp \
+ API/JSStringRef.cpp \
+ API/JSValueRef.cpp \
+ API/OpaqueJSString.cpp \
+ assembler/ARMAssembler.cpp \
+ assembler/MacroAssemblerARM.cpp \
+ bytecode/CodeBlock.cpp \
+ bytecode/JumpTable.cpp \
+ bytecode/Opcode.cpp \
+ bytecode/SamplingTool.cpp \
+ bytecode/StructureStubInfo.cpp \
+ bytecompiler/BytecodeGenerator.cpp \
+ bytecompiler/NodesCodegen.cpp \
+ debugger/DebuggerActivation.cpp \
+ debugger/DebuggerCallFrame.cpp \
+ debugger/Debugger.cpp \
+ interpreter/CallFrame.cpp \
+ interpreter/Interpreter.cpp \
+ interpreter/RegisterFile.cpp \
+ jit/ExecutableAllocatorFixedVMPool.cpp \
+ jit/ExecutableAllocatorPosix.cpp \
+ jit/ExecutableAllocatorSymbian.cpp \
+ jit/ExecutableAllocatorWin.cpp \
+ jit/ExecutableAllocator.cpp \
+ jit/JITArithmetic.cpp \
+ jit/JITArithmetic32_64.cpp \
+ jit/JITCall.cpp \
+ jit/JIT.cpp \
+ jit/JITOpcodes.cpp \
+ jit/JITOpcodes32_64.cpp \
+ jit/JITPropertyAccess.cpp \
+ jit/JITPropertyAccess32_64.cpp \
+ jit/JITStubs.cpp \
+ jit/ThunkGenerators.cpp \
+ parser/Lexer.cpp \
+ parser/Nodes.cpp \
+ parser/ParserArena.cpp \
+ parser/Parser.cpp \
+ profiler/Profile.cpp \
+ profiler/ProfileGenerator.cpp \
+ profiler/ProfileNode.cpp \
+ profiler/Profiler.cpp \
+ runtime/ArgList.cpp \
+ runtime/Arguments.cpp \
+ runtime/ArrayConstructor.cpp \
+ runtime/ArrayPrototype.cpp \
+ runtime/BooleanConstructor.cpp \
+ runtime/BooleanObject.cpp \
+ runtime/BooleanPrototype.cpp \
+ runtime/CallData.cpp \
+ runtime/Collector.cpp \
+ runtime/CommonIdentifiers.cpp \
+ runtime/Completion.cpp \
+ runtime/ConstructData.cpp \
+ runtime/DateConstructor.cpp \
+ runtime/DateConversion.cpp \
+ runtime/DateInstance.cpp \
+ runtime/DatePrototype.cpp \
+ runtime/ErrorConstructor.cpp \
+ runtime/Error.cpp \
+ runtime/ErrorInstance.cpp \
+ runtime/ErrorPrototype.cpp \
+ runtime/ExceptionHelpers.cpp \
+ runtime/Executable.cpp \
+ runtime/FunctionConstructor.cpp \
+ runtime/FunctionPrototype.cpp \
+ runtime/GetterSetter.cpp \
+ runtime/GlobalEvalFunction.cpp \
+ runtime/Identifier.cpp \
+ runtime/InitializeThreading.cpp \
+ runtime/InternalFunction.cpp \
+ runtime/JSActivation.cpp \
+ runtime/JSAPIValueWrapper.cpp \
+ runtime/JSArray.cpp \
+ runtime/JSByteArray.cpp \
+ runtime/JSCell.cpp \
+ runtime/JSFunction.cpp \
+ runtime/JSGlobalData.cpp \
+ runtime/JSGlobalObject.cpp \
+ runtime/JSGlobalObjectFunctions.cpp \
+ runtime/JSImmediate.cpp \
+ runtime/JSLock.cpp \
+ runtime/JSNotAnObject.cpp \
+ runtime/JSNumberCell.cpp \
+ runtime/JSObject.cpp \
+ runtime/JSONObject.cpp \
+ runtime/JSPropertyNameIterator.cpp \
+ runtime/JSStaticScopeObject.cpp \
+ runtime/JSString.cpp \
+ runtime/JSValue.cpp \
+ runtime/JSVariableObject.cpp \
+ runtime/JSWrapperObject.cpp \
+ runtime/LiteralParser.cpp \
+ runtime/Lookup.cpp \
+ runtime/MarkStackPosix.cpp \
+ runtime/MarkStackSymbian.cpp \
+ runtime/MarkStackWin.cpp \
+ runtime/MarkStack.cpp \
+ runtime/MathObject.cpp \
+ runtime/NativeErrorConstructor.cpp \
+ runtime/NativeErrorPrototype.cpp \
+ runtime/NumberConstructor.cpp \
+ runtime/NumberObject.cpp \
+ runtime/NumberPrototype.cpp \
+ runtime/ObjectConstructor.cpp \
+ runtime/ObjectPrototype.cpp \
+ runtime/Operations.cpp \
+ runtime/PropertyDescriptor.cpp \
+ runtime/PropertyNameArray.cpp \
+ runtime/PropertySlot.cpp \
+ runtime/PrototypeFunction.cpp \
+ runtime/RegExpConstructor.cpp \
+ runtime/RegExp.cpp \
+ runtime/RegExpObject.cpp \
+ runtime/RegExpPrototype.cpp \
+ runtime/RegExpCache.cpp \
+ runtime/RopeImpl.cpp \
+ runtime/ScopeChain.cpp \
+ runtime/SmallStrings.cpp \
+ runtime/StringConstructor.cpp \
+ runtime/StringObject.cpp \
+ runtime/StringPrototype.cpp \
+ runtime/StructureChain.cpp \
+ runtime/Structure.cpp \
+ runtime/TimeoutChecker.cpp \
+ runtime/UString.cpp \
+ wtf/Assertions.cpp \
+ wtf/ByteArray.cpp \
+ wtf/CurrentTime.cpp \
+ wtf/DateMath.cpp \
+ wtf/dtoa.cpp \
+ wtf/FastMalloc.cpp \
+ wtf/HashTable.cpp \
+ wtf/MD5.cpp \
+ wtf/MainThread.cpp \
+ wtf/qt/MainThreadQt.cpp \
+ wtf/qt/StringQt.cpp \
+ wtf/qt/ThreadingQt.cpp \
+ wtf/RandomNumber.cpp \
+ wtf/RefCountedLeakCounter.cpp \
+ wtf/symbian/BlockAllocatorSymbian.cpp \
+ wtf/ThreadingNone.cpp \
+ wtf/Threading.cpp \
+ wtf/TypeTraits.cpp \
+ wtf/WTFThreadData.cpp \
+ wtf/text/AtomicString.cpp \
+ wtf/text/CString.cpp \
+ wtf/text/StringImpl.cpp \
+ wtf/text/StringStatics.cpp \
+ wtf/text/WTFString.cpp \
+ wtf/unicode/CollatorDefault.cpp \
+ wtf/unicode/icu/CollatorICU.cpp \
+ wtf/unicode/UTF8.cpp \
+ yarr/RegexCompiler.cpp \
+ yarr/RegexInterpreter.cpp \
+ yarr/RegexJIT.cpp
+
+# Generated files, simply list them for JavaScriptCore
+SOURCES += \
+ $${JSC_GENERATED_SOURCES_DIR}/Grammar.cpp
+
+!contains(DEFINES, USE_SYSTEM_MALLOC) {
+ SOURCES += wtf/TCSystemAlloc.cpp
+}
+
+# Disable C++0x mode in JSC for those who enabled it in their Qt's mkspec
+*-g++*:QMAKE_CXXFLAGS -= -std=c++0x -std=gnu++0x
// Memory load/store helpers
-void ARMAssembler::dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID base, int32_t offset)
+void ARMAssembler::dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID base, int32_t offset, bool bytes)
{
+ ARMWord transferFlag = bytes ? DT_BYTE : 0;
if (offset >= 0) {
if (offset <= 0xfff)
- dtr_u(isLoad, srcDst, base, offset);
+ dtr_u(isLoad, srcDst, base, offset | transferFlag);
else if (offset <= 0xfffff) {
add_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
- dtr_u(isLoad, srcDst, ARMRegisters::S0, offset & 0xfff);
+ dtr_u(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff) | transferFlag);
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
- dtr_ur(isLoad, srcDst, base, reg);
+ dtr_ur(isLoad, srcDst, base, reg | transferFlag);
}
} else {
offset = -offset;
if (offset <= 0xfff)
- dtr_d(isLoad, srcDst, base, offset);
+ dtr_d(isLoad, srcDst, base, offset | transferFlag);
else if (offset <= 0xfffff) {
sub_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
- dtr_d(isLoad, srcDst, ARMRegisters::S0, offset & 0xfff);
+ dtr_d(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff) | transferFlag);
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
- dtr_dr(isLoad, srcDst, base, reg);
+ dtr_dr(isLoad, srcDst, base, reg | transferFlag);
}
}
}
int pos = (*iter) & (~0x1);
ARMWord* ldrAddr = reinterpret_cast<ARMWord*>(data + pos);
ARMWord* addr = getLdrImmAddress(ldrAddr);
- if (*addr != 0xffffffff) {
+ if (*addr != InvalidBranchTarget) {
if (!(*iter & 1)) {
int diff = reinterpret_cast<ARMWord*>(data + *addr) - (ldrAddr + DefaultPrefetching);
/*
- * Copyright (C) 2009 University of Szeged
+ * Copyright (C) 2009, 2010 University of Szeged
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
#ifndef ARMAssembler_h
#define ARMAssembler_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
#include "AssemblerBufferWithConstantPool.h"
FSUBD = 0x0e300b40,
FMULD = 0x0e200b00,
FCMPD = 0x0eb40b40,
+ FSQRTD = 0x0eb10bc0,
DTR = 0x05000000,
LDRH = 0x00100090,
STRH = 0x00000090,
FDTR = 0x0d000b00,
B = 0x0a000000,
BL = 0x0b000000,
+#if WTF_ARM_ARCH_AT_LEAST(5) || defined(__ARM_ARCH_4T__)
+ BX = 0x012fff10,
+#endif
FMSR = 0x0e000a10,
FMRS = 0x0e100a10,
FSITOD = 0x0eb80bc0,
#if WTF_ARM_ARCH_AT_LEAST(5)
CLZ = 0x016f0f10,
BKPT = 0xe120070,
+ BLX = 0x012fff30,
#endif
#if WTF_ARM_ARCH_AT_LEAST(7)
MOVW = 0x03000000,
SET_CC = (1 << 20),
OP2_OFSREG = (1 << 25),
DT_UP = (1 << 23),
+ DT_BYTE = (1 << 22),
DT_WB = (1 << 21),
// This flag is inlcuded in LDR and STR
DT_PRE = (1 << 24),
};
static const ARMWord INVALID_IMM = 0xf0000000;
+ static const ARMWord InvalidBranchTarget = 0xffffffff;
static const int DefaultPrefetching = 2;
class JmpSrc {
emitInst(static_cast<ARMWord>(cc) | FCMPD, dd, 0, dm);
}
+ void fsqrtd_r(int dd, int dm, Condition cc = AL)
+ {
+ emitInst(static_cast<ARMWord>(cc) | FSQRTD, dd, 0, dm);
+ }
+
void ldr_imm(int rd, ARMWord imm, Condition cc = AL)
{
m_buffer.putIntWithConstantInt(static_cast<ARMWord>(cc) | DTR | DT_LOAD | DT_UP | RN(ARMRegisters::pc) | RD(rd), imm, true);
#endif
}
+ void bx(int rm, Condition cc = AL)
+ {
+#if WTF_ARM_ARCH_AT_LEAST(5) || defined(__ARM_ARCH_4T__)
+ emitInst(static_cast<ARMWord>(cc) | BX, 0, 0, RM(rm));
+#else
+ mov_r(ARMRegisters::pc, RM(rm), cc);
+#endif
+ }
+
+ JmpSrc blx(int rm, Condition cc = AL)
+ {
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ int s = m_buffer.uncheckedSize();
+ emitInst(static_cast<ARMWord>(cc) | BLX, 0, 0, RM(rm));
+#else
+ ASSERT(rm != 14);
+ ensureSpace(2 * sizeof(ARMWord), 0);
+ mov_r(ARMRegisters::lr, ARMRegisters::pc, cc);
+ int s = m_buffer.uncheckedSize();
+ bx(rm, cc);
+#endif
+ return JmpSrc(s);
+ }
+
static ARMWord lsl(int reg, ARMWord value)
{
ASSERT(reg <= ARMRegisters::pc);
return label();
}
- JmpSrc jmp(Condition cc = AL, int useConstantPool = 0)
+ JmpSrc loadBranchTarget(int rd, Condition cc = AL, int useConstantPool = 0)
{
ensureSpace(sizeof(ARMWord), sizeof(ARMWord));
int s = m_buffer.uncheckedSize();
- ldr_un_imm(ARMRegisters::pc, 0xffffffff, cc);
+ ldr_un_imm(rd, InvalidBranchTarget, cc);
m_jumps.append(s | (useConstantPool & 0x1));
return JmpSrc(s);
}
+ JmpSrc jmp(Condition cc = AL, int useConstantPool = 0)
+ {
+ return loadBranchTarget(ARMRegisters::pc, cc, useConstantPool);
+ }
+
void* executableCopy(ExecutablePool* allocator);
// Patching helpers
static ARMWord* getLdrImmAddress(ARMWord* insn)
{
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ // Check for call
+ if ((*insn & 0x0f7f0000) != 0x051f0000) {
+ // Must be BLX
+ ASSERT((*insn & 0x012fff30) == 0x012fff30);
+ insn--;
+ }
+#endif
// Must be an ldr ..., [pc +/- imm]
ASSERT((*insn & 0x0f7f0000) == 0x051f0000);
// Memory load/store helpers
- void dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID base, int32_t offset);
+ void dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID base, int32_t offset, bool bytes = false);
void baseIndexTransfer32(bool isLoad, RegisterID srcDst, RegisterID base, RegisterID index, int scale, int32_t offset);
void doubleTransfer(bool isLoad, FPRegisterID srcDst, RegisterID base, int32_t offset);
/*
* Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 University of Szeged
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#ifndef ARMAssembler_h
#define ARMAssembler_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && CPU(ARM_THUMB2)
#include "AssemblerBuffer.h"
r15, pc = r15,
} RegisterID;
- // s0 == d0 == q0
- // s4 == d2 == q1
- // etc
typedef enum {
- s0 = 0,
- s1 = 1,
- s2 = 2,
- s3 = 3,
- s4 = 4,
- s5 = 5,
- s6 = 6,
- s7 = 7,
- s8 = 8,
- s9 = 9,
- s10 = 10,
- s11 = 11,
- s12 = 12,
- s13 = 13,
- s14 = 14,
- s15 = 15,
- s16 = 16,
- s17 = 17,
- s18 = 18,
- s19 = 19,
- s20 = 20,
- s21 = 21,
- s22 = 22,
- s23 = 23,
- s24 = 24,
- s25 = 25,
- s26 = 26,
- s27 = 27,
- s28 = 28,
- s29 = 29,
- s30 = 30,
- s31 = 31,
- d0 = 0 << 1,
- d1 = 1 << 1,
- d2 = 2 << 1,
- d3 = 3 << 1,
- d4 = 4 << 1,
- d5 = 5 << 1,
- d6 = 6 << 1,
- d7 = 7 << 1,
- d8 = 8 << 1,
- d9 = 9 << 1,
- d10 = 10 << 1,
- d11 = 11 << 1,
- d12 = 12 << 1,
- d13 = 13 << 1,
- d14 = 14 << 1,
- d15 = 15 << 1,
- d16 = 16 << 1,
- d17 = 17 << 1,
- d18 = 18 << 1,
- d19 = 19 << 1,
- d20 = 20 << 1,
- d21 = 21 << 1,
- d22 = 22 << 1,
- d23 = 23 << 1,
- d24 = 24 << 1,
- d25 = 25 << 1,
- d26 = 26 << 1,
- d27 = 27 << 1,
- d28 = 28 << 1,
- d29 = 29 << 1,
- d30 = 30 << 1,
- d31 = 31 << 1,
- q0 = 0 << 2,
- q1 = 1 << 2,
- q2 = 2 << 2,
- q3 = 3 << 2,
- q4 = 4 << 2,
- q5 = 5 << 2,
- q6 = 6 << 2,
- q7 = 7 << 2,
- q8 = 8 << 2,
- q9 = 9 << 2,
- q10 = 10 << 2,
- q11 = 11 << 2,
- q12 = 12 << 2,
- q13 = 13 << 2,
- q14 = 14 << 2,
- q15 = 15 << 2,
- q16 = 16 << 2,
- q17 = 17 << 2,
- q18 = 18 << 2,
- q19 = 19 << 2,
- q20 = 20 << 2,
- q21 = 21 << 2,
- q22 = 22 << 2,
- q23 = 23 << 2,
- q24 = 24 << 2,
- q25 = 25 << 2,
- q26 = 26 << 2,
- q27 = 27 << 2,
- q28 = 28 << 2,
- q29 = 29 << 2,
- q30 = 30 << 2,
- q31 = 31 << 2,
- } FPRegisterID;
+ s0,
+ s1,
+ s2,
+ s3,
+ s4,
+ s5,
+ s6,
+ s7,
+ s8,
+ s9,
+ s10,
+ s11,
+ s12,
+ s13,
+ s14,
+ s15,
+ s16,
+ s17,
+ s18,
+ s19,
+ s20,
+ s21,
+ s22,
+ s23,
+ s24,
+ s25,
+ s26,
+ s27,
+ s28,
+ s29,
+ s30,
+ s31,
+ } FPSingleRegisterID;
+
+ typedef enum {
+ d0,
+ d1,
+ d2,
+ d3,
+ d4,
+ d5,
+ d6,
+ d7,
+ d8,
+ d9,
+ d10,
+ d11,
+ d12,
+ d13,
+ d14,
+ d15,
+ d16,
+ d17,
+ d18,
+ d19,
+ d20,
+ d21,
+ d22,
+ d23,
+ d24,
+ d25,
+ d26,
+ d27,
+ d28,
+ d29,
+ d30,
+ d31,
+ } FPDoubleRegisterID;
+
+ typedef enum {
+ q0,
+ q1,
+ q2,
+ q3,
+ q4,
+ q5,
+ q6,
+ q7,
+ q8,
+ q9,
+ q10,
+ q11,
+ q12,
+ q13,
+ q14,
+ q15,
+ q16,
+ q17,
+ q18,
+ q19,
+ q20,
+ q21,
+ q22,
+ q23,
+ q24,
+ q25,
+ q26,
+ q27,
+ q28,
+ q29,
+ q30,
+ q31,
+ } FPQuadRegisterID;
+
+ inline FPSingleRegisterID asSingle(FPDoubleRegisterID reg)
+ {
+ ASSERT(reg < d16);
+ return (FPSingleRegisterID)(reg << 1);
+ }
+
+ inline FPDoubleRegisterID asDouble(FPSingleRegisterID reg)
+ {
+ ASSERT(!(reg & 1));
+ return (FPDoubleRegisterID)(reg >> 1);
+ }
}
class ARMv7Assembler;
ThumbImmediateValue m_value;
};
+class VFPImmediate {
+public:
+ VFPImmediate(double d)
+ : m_value(-1)
+ {
+ union {
+ uint64_t i;
+ double d;
+ } u;
+
+ u.d = d;
+
+ int sign = static_cast<int>(u.i >> 63);
+ int exponent = static_cast<int>(u.i >> 52) & 0x7ff;
+ uint64_t mantissa = u.i & 0x000fffffffffffffull;
+
+ if ((exponent >= 0x3fc) && (exponent <= 0x403) && !(mantissa & 0x0000ffffffffffffull))
+ m_value = (sign << 7) | ((exponent & 7) << 4) | (int)(mantissa >> 48);
+ }
+
+ bool isValid()
+ {
+ return m_value != -1;
+ }
+
+ uint8_t value()
+ {
+ return (uint8_t)m_value;
+ }
+
+private:
+ int m_value;
+};
typedef enum {
SRType_LSL,
};
-/*
-Some features of the Thumb instruction set are deprecated in ARMv7. Deprecated features affecting
-instructions supported by ARMv7-M are as follows:
-• use of the PC as <Rd> or <Rm> in a 16-bit ADD (SP plus register) instruction
-• use of the SP as <Rm> in a 16-bit ADD (SP plus register) instruction
-• use of the SP as <Rm> in a 16-bit CMP (register) instruction
-• use of MOV (register) instructions in which <Rd> is the SP or PC and <Rm> is also the SP or PC.
-• use of <Rn> as the lowest-numbered register in the register list of a 16-bit STM instruction with base
-register writeback
-*/
-
class ARMv7Assembler {
public:
~ARMv7Assembler()
}
typedef ARMRegisters::RegisterID RegisterID;
- typedef ARMRegisters::FPRegisterID FPRegisterID;
+ typedef ARMRegisters::FPSingleRegisterID FPSingleRegisterID;
+ typedef ARMRegisters::FPDoubleRegisterID FPDoubleRegisterID;
+ typedef ARMRegisters::FPQuadRegisterID FPQuadRegisterID;
// (HS, LO, HI, LS) -> (AE, B, A, BE)
// (VS, VC) -> (O, NO)
return (reg == ARMRegisters::sp) || (reg == ARMRegisters::pc);
}
- bool isSingleRegister(FPRegisterID reg)
- {
- // Check that the high bit isn't set (q16+), and that the low bit isn't (s1, s3, etc).
- return !(reg & ~31);
- }
-
- bool isDoubleRegister(FPRegisterID reg)
- {
- // Check that the high bit isn't set (q16+), and that the low bit isn't (s1, s3, etc).
- return !(reg & ~(31 << 1));
- }
-
- bool isQuadRegister(FPRegisterID reg)
- {
- return !(reg & ~(31 << 2));
- }
-
- uint32_t singleRegisterNum(FPRegisterID reg)
- {
- ASSERT(isSingleRegister(reg));
- return reg;
- }
-
- uint32_t doubleRegisterNum(FPRegisterID reg)
- {
- ASSERT(isDoubleRegister(reg));
- return reg >> 1;
- }
-
- uint32_t quadRegisterNum(FPRegisterID reg)
- {
- ASSERT(isQuadRegister(reg));
- return reg >> 2;
- }
-
- uint32_t singleRegisterMask(FPRegisterID rd, int highBitsShift, int lowBitShift)
+ uint32_t singleRegisterMask(FPSingleRegisterID rdNum, int highBitsShift, int lowBitShift)
{
- uint32_t rdNum = singleRegisterNum(rd);
uint32_t rdMask = (rdNum >> 1) << highBitsShift;
if (rdNum & 1)
rdMask |= 1 << lowBitShift;
return rdMask;
}
- uint32_t doubleRegisterMask(FPRegisterID rd, int highBitShift, int lowBitsShift)
+ uint32_t doubleRegisterMask(FPDoubleRegisterID rdNum, int highBitShift, int lowBitsShift)
{
- uint32_t rdNum = doubleRegisterNum(rd);
uint32_t rdMask = (rdNum & 0xf) << lowBitsShift;
if (rdNum & 16)
rdMask |= 1 << highBitShift;
typedef enum {
OP_ADD_reg_T1 = 0x1800,
- OP_ADD_S_reg_T1 = 0x1800,
OP_SUB_reg_T1 = 0x1A00,
- OP_SUB_S_reg_T1 = 0x1A00,
OP_ADD_imm_T1 = 0x1C00,
- OP_ADD_S_imm_T1 = 0x1C00,
OP_SUB_imm_T1 = 0x1E00,
- OP_SUB_S_imm_T1 = 0x1E00,
OP_MOV_imm_T1 = 0x2000,
OP_CMP_imm_T1 = 0x2800,
OP_ADD_imm_T2 = 0x3000,
- OP_ADD_S_imm_T2 = 0x3000,
OP_SUB_imm_T2 = 0x3800,
- OP_SUB_S_imm_T2 = 0x3800,
OP_AND_reg_T1 = 0x4000,
OP_EOR_reg_T1 = 0x4040,
OP_TST_reg_T1 = 0x4200,
+ OP_RSB_imm_T1 = 0x4240,
OP_CMP_reg_T1 = 0x4280,
OP_ORR_reg_T1 = 0x4300,
OP_MVN_reg_T1 = 0x43C0,
OP_MOV_reg_T1 = 0x4600,
OP_BLX = 0x4700,
OP_BX = 0x4700,
- OP_LDRH_reg_T1 = 0x5A00,
OP_STR_reg_T1 = 0x5000,
OP_LDR_reg_T1 = 0x5800,
+ OP_LDRH_reg_T1 = 0x5A00,
+ OP_LDRB_reg_T1 = 0x5C00,
OP_STR_imm_T1 = 0x6000,
OP_LDR_imm_T1 = 0x6800,
+ OP_LDRB_imm_T1 = 0x7800,
OP_LDRH_imm_T1 = 0x8800,
OP_STR_imm_T2 = 0x9000,
OP_LDR_imm_T2 = 0x9800,
OP_AND_reg_T2 = 0xEA00,
OP_TST_reg_T2 = 0xEA10,
OP_ORR_reg_T2 = 0xEA40,
+ OP_ORR_S_reg_T2 = 0xEA50,
OP_ASR_imm_T1 = 0xEA4F,
OP_LSL_imm_T1 = 0xEA4F,
OP_LSR_imm_T1 = 0xEA4F,
OP_SUB_reg_T2 = 0xEBA0,
OP_SUB_S_reg_T2 = 0xEBB0,
OP_CMP_reg_T2 = 0xEBB0,
+ OP_VSTR = 0xED00,
+ OP_VLDR = 0xED10,
+ OP_VMOV_StoC = 0xEE00,
+ OP_VMOV_CtoS = 0xEE10,
+ OP_VMUL_T2 = 0xEE20,
+ OP_VADD_T2 = 0xEE30,
+ OP_VSUB_T2 = 0xEE30,
+ OP_VDIV = 0xEE80,
+ OP_VCMP_T1 = 0xEEB0,
+ OP_VCVT_FPIVFP = 0xEEB0,
+ OP_VMOV_IMM_T2 = 0xEEB0,
+ OP_VMRS = 0xEEB0,
OP_B_T4a = 0xF000,
OP_AND_imm_T1 = 0xF000,
OP_TST_imm = 0xF010,
OP_SUB_imm_T3 = 0xF1A0,
OP_SUB_S_imm_T3 = 0xF1B0,
OP_CMP_imm_T2 = 0xF1B0,
+ OP_RSB_imm_T2 = 0xF1C0,
OP_ADD_imm_T4 = 0xF200,
OP_MOV_imm_T3 = 0xF240,
OP_SUB_imm_T4 = 0xF2A0,
OP_MOVT = 0xF2C0,
OP_NOP_T2a = 0xF3AF,
+ OP_LDRB_imm_T3 = 0xF810,
+ OP_LDRB_reg_T2 = 0xF810,
OP_LDRH_reg_T2 = 0xF830,
OP_LDRH_imm_T3 = 0xF830,
OP_STR_imm_T4 = 0xF840,
OP_STR_reg_T2 = 0xF840,
OP_LDR_imm_T4 = 0xF850,
OP_LDR_reg_T2 = 0xF850,
+ OP_LDRB_imm_T2 = 0xF890,
OP_LDRH_imm_T2 = 0xF8B0,
OP_STR_imm_T3 = 0xF8C0,
OP_LDR_imm_T3 = 0xF8D0,
} OpcodeID1;
typedef enum {
- OP_B_T4b = 0x9000,
+ OP_VADD_T2b = 0x0A00,
+ OP_VDIVb = 0x0A00,
+ OP_VLDRb = 0x0A00,
+ OP_VMOV_IMM_T2b = 0x0A00,
+ OP_VMUL_T2b = 0x0A00,
+ OP_VSTRb = 0x0A00,
+ OP_VMOV_CtoSb = 0x0A10,
+ OP_VMOV_StoCb = 0x0A10,
+ OP_VMRSb = 0x0A10,
+ OP_VCMP_T1b = 0x0A40,
+ OP_VCVT_FPIVFPb = 0x0A40,
+ OP_VSUB_T2b = 0x0A40,
OP_NOP_T2b = 0x8000,
+ OP_B_T4b = 0x9000,
} OpcodeID2;
struct FourFours {
if (!((rd | rn) & 8)) {
if (imm.isUInt3()) {
- m_formatter.oneWordOp7Reg3Reg3Reg3(OP_ADD_S_imm_T1, (RegisterID)imm.getUInt3(), rn, rd);
+ m_formatter.oneWordOp7Reg3Reg3Reg3(OP_ADD_imm_T1, (RegisterID)imm.getUInt3(), rn, rd);
return;
} else if ((rd == rn) && imm.isUInt8()) {
- m_formatter.oneWordOp5Reg3Imm8(OP_ADD_S_imm_T2, rd, imm.getUInt8());
+ m_formatter.oneWordOp5Reg3Imm8(OP_ADD_imm_T2, rd, imm.getUInt8());
return;
}
}
void add_S(RegisterID rd, RegisterID rn, RegisterID rm)
{
if (!((rd | rn | rm) & 8))
- m_formatter.oneWordOp7Reg3Reg3Reg3(OP_ADD_S_reg_T1, rm, rn, rd);
+ m_formatter.oneWordOp7Reg3Reg3Reg3(OP_ADD_reg_T1, rm, rn, rd);
else
add_S(rd, rn, rm, ShiftTypeAndAmount());
}
m_formatter.twoWordOp12Reg4FourFours(OP_LDRH_reg_T2, rn, FourFours(rt, 0, shift, rm));
}
+ void ldrb(RegisterID rt, RegisterID rn, ARMThumbImmediate imm)
+ {
+ ASSERT(rn != ARMRegisters::pc); // LDR (literal)
+ ASSERT(imm.isUInt12());
+
+ if (!((rt | rn) & 8) && imm.isUInt5())
+ m_formatter.oneWordOp5Imm5Reg3Reg3(OP_LDRB_imm_T1, imm.getUInt5(), rn, rt);
+ else
+ m_formatter.twoWordOp12Reg4Reg4Imm12(OP_LDRB_imm_T2, rn, rt, imm.getUInt12());
+ }
+
+ void ldrb(RegisterID rt, RegisterID rn, int offset, bool index, bool wback)
+ {
+ ASSERT(rt != ARMRegisters::pc);
+ ASSERT(rn != ARMRegisters::pc);
+ ASSERT(index || wback);
+ ASSERT(!wback | (rt != rn));
+
+ bool add = true;
+ if (offset < 0) {
+ add = false;
+ offset = -offset;
+ }
+
+ ASSERT(!(offset & ~0xff));
+
+ offset |= (wback << 8);
+ offset |= (add << 9);
+ offset |= (index << 10);
+ offset |= (1 << 11);
+
+ m_formatter.twoWordOp12Reg4Reg4Imm12(OP_LDRB_imm_T3, rn, rt, offset);
+ }
+
+ void ldrb(RegisterID rt, RegisterID rn, RegisterID rm, unsigned shift = 0)
+ {
+ ASSERT(rn != ARMRegisters::pc); // LDR (literal)
+ ASSERT(!BadReg(rm));
+ ASSERT(shift <= 3);
+
+ if (!shift && !((rt | rn | rm) & 8))
+ m_formatter.oneWordOp7Reg3Reg3Reg3(OP_LDRB_reg_T1, rm, rn, rt);
+ else
+ m_formatter.twoWordOp12Reg4FourFours(OP_LDRB_reg_T2, rn, FourFours(rt, 0, shift, rm));
+ }
+
void lsl(RegisterID rd, RegisterID rm, int32_t shiftAmount)
{
ASSERT(!BadReg(rd));
mvn(rd, rm, ShiftTypeAndAmount());
}
+ void neg(RegisterID rd, RegisterID rm)
+ {
+ ARMThumbImmediate zero = ARMThumbImmediate::makeUInt12(0);
+ sub(rd, zero, rm);
+ }
+
void orr(RegisterID rd, RegisterID rn, ARMThumbImmediate imm)
{
ASSERT(!BadReg(rd));
orr(rd, rn, rm, ShiftTypeAndAmount());
}
+ void orr_S(RegisterID rd, RegisterID rn, RegisterID rm, ShiftTypeAndAmount shift)
+ {
+ ASSERT(!BadReg(rd));
+ ASSERT(!BadReg(rn));
+ ASSERT(!BadReg(rm));
+ m_formatter.twoWordOp12Reg4FourFours(OP_ORR_S_reg_T2, rn, FourFours(shift.hi4(), rd, shift.lo4(), rm));
+ }
+
+ void orr_S(RegisterID rd, RegisterID rn, RegisterID rm)
+ {
+ if ((rd == rn) && !((rd | rm) & 8))
+ m_formatter.oneWordOp10Reg3Reg3(OP_ORR_reg_T1, rm, rd);
+ else if ((rd == rm) && !((rd | rn) & 8))
+ m_formatter.oneWordOp10Reg3Reg3(OP_ORR_reg_T1, rn, rd);
+ else
+ orr_S(rd, rn, rm, ShiftTypeAndAmount());
+ }
+
void ror(RegisterID rd, RegisterID rm, int32_t shiftAmount)
{
ASSERT(!BadReg(rd));
}
}
+ void sub(RegisterID rd, ARMThumbImmediate imm, RegisterID rn)
+ {
+ ASSERT(rd != ARMRegisters::pc);
+ ASSERT(rn != ARMRegisters::pc);
+ ASSERT(imm.isValid());
+ ASSERT(imm.isUInt12());
+
+ if (!((rd | rn) & 8) && !imm.getUInt12())
+ m_formatter.oneWordOp10Reg3Reg3(OP_RSB_imm_T1, rn, rd);
+ else
+ m_formatter.twoWordOp5i6Imm4Reg4EncodedImm(OP_RSB_imm_T2, rn, rd, imm);
+ }
+
void sub(RegisterID rd, RegisterID rn, RegisterID rm, ShiftTypeAndAmount shift)
{
ASSERT((rd != ARMRegisters::sp) || (rn == ARMRegisters::sp));
return;
} else if (!((rd | rn) & 8)) {
if (imm.isUInt3()) {
- m_formatter.oneWordOp7Reg3Reg3Reg3(OP_SUB_S_imm_T1, (RegisterID)imm.getUInt3(), rn, rd);
+ m_formatter.oneWordOp7Reg3Reg3Reg3(OP_SUB_imm_T1, (RegisterID)imm.getUInt3(), rn, rd);
return;
} else if ((rd == rn) && imm.isUInt8()) {
- m_formatter.oneWordOp5Reg3Imm8(OP_SUB_S_imm_T2, rd, imm.getUInt8());
+ m_formatter.oneWordOp5Reg3Imm8(OP_SUB_imm_T2, rd, imm.getUInt8());
return;
}
}
void sub_S(RegisterID rd, RegisterID rn, RegisterID rm)
{
if (!((rd | rn | rm) & 8))
- m_formatter.oneWordOp7Reg3Reg3Reg3(OP_SUB_S_reg_T1, rm, rn, rd);
+ m_formatter.oneWordOp7Reg3Reg3Reg3(OP_SUB_reg_T1, rm, rn, rd);
else
sub_S(rd, rn, rm, ShiftTypeAndAmount());
}
m_formatter.oneWordOp10Reg3Reg3(OP_TST_reg_T1, rm, rn);
}
- void vadd_F64(FPRegisterID rd, FPRegisterID rn, FPRegisterID rm)
+ void vadd_F64(FPDoubleRegisterID rd, FPDoubleRegisterID rn, FPDoubleRegisterID rm)
{
- m_formatter.vfpOp(0x0b00ee30 | doubleRegisterMask(rd, 6, 28) | doubleRegisterMask(rn, 23, 0) | doubleRegisterMask(rm, 21, 16));
+ m_formatter.vfpOp(OP_VADD_T2, OP_VADD_T2b, true, rn, rd, rm);
}
- void vcmp_F64(FPRegisterID rd, FPRegisterID rm)
+ void vcmp_F64(FPDoubleRegisterID rd, FPDoubleRegisterID rm)
{
- m_formatter.vfpOp(0x0bc0eeb4 | doubleRegisterMask(rd, 6, 28) | doubleRegisterMask(rm, 21, 16));
+ m_formatter.vfpOp(OP_VCMP_T1, OP_VCMP_T1b, true, VFPOperand(4), rd, rm);
}
- void vcvt_F64_S32(FPRegisterID fd, FPRegisterID sm)
+ void vcvt_F64_S32(FPDoubleRegisterID rd, FPSingleRegisterID rm)
{
- m_formatter.vfpOp(0x0bc0eeb8 | doubleRegisterMask(fd, 6, 28) | singleRegisterMask(sm, 16, 21));
+ // boolean values are 64bit (toInt, unsigned, roundZero)
+ m_formatter.vfpOp(OP_VCVT_FPIVFP, OP_VCVT_FPIVFPb, true, vcvtOp(false, false, false), rd, rm);
}
- void vcvt_S32_F64(FPRegisterID sd, FPRegisterID fm)
+ void vcvtr_S32_F64(FPSingleRegisterID rd, FPDoubleRegisterID rm)
{
- m_formatter.vfpOp(0x0bc0eebd | singleRegisterMask(sd, 28, 6) | doubleRegisterMask(fm, 21, 16));
+ // boolean values are 64bit (toInt, unsigned, roundZero)
+ m_formatter.vfpOp(OP_VCVT_FPIVFP, OP_VCVT_FPIVFPb, true, vcvtOp(true, false, true), rd, rm);
}
- void vldr(FPRegisterID rd, RegisterID rn, int32_t imm)
+ void vdiv_F64(FPDoubleRegisterID rd, FPDoubleRegisterID rn, FPDoubleRegisterID rm)
{
- vmem(rd, rn, imm, true);
+ m_formatter.vfpOp(OP_VDIV, OP_VDIVb, true, rn, rd, rm);
}
- void vmov(RegisterID rd, FPRegisterID sn)
+ void vldr(FPDoubleRegisterID rd, RegisterID rn, int32_t imm)
{
- m_formatter.vfpOp(0x0a10ee10 | (rd << 28) | singleRegisterMask(sn, 0, 23));
+ m_formatter.vfpMemOp(OP_VLDR, OP_VLDRb, true, rn, rd, imm);
}
- void vmov(FPRegisterID sn, RegisterID rd)
+ void vmov_F64_0(FPDoubleRegisterID rd)
{
- m_formatter.vfpOp(0x0a10ee00 | (rd << 28) | singleRegisterMask(sn, 0, 23));
+ m_formatter.vfpOp(OP_VMOV_IMM_T2, OP_VMOV_IMM_T2b, true, VFPOperand(0), rd, VFPOperand(0));
+ }
+
+ void vmov(RegisterID rd, FPSingleRegisterID rn)
+ {
+ ASSERT(!BadReg(rd));
+ m_formatter.vfpOp(OP_VMOV_CtoS, OP_VMOV_CtoSb, false, rn, rd, VFPOperand(0));
}
- // move FPSCR flags to APSR.
- void vmrs_APSR_nzcv_FPSCR()
+ void vmov(FPSingleRegisterID rd, RegisterID rn)
{
- m_formatter.vfpOp(0xfa10eef1);
+ ASSERT(!BadReg(rn));
+ m_formatter.vfpOp(OP_VMOV_StoC, OP_VMOV_StoCb, false, rd, rn, VFPOperand(0));
}
- void vmul_F64(FPRegisterID rd, FPRegisterID rn, FPRegisterID rm)
+ void vmrs(RegisterID reg = ARMRegisters::pc)
{
- m_formatter.vfpOp(0x0b00ee20 | doubleRegisterMask(rd, 6, 28) | doubleRegisterMask(rn, 23, 0) | doubleRegisterMask(rm, 21, 16));
+ ASSERT(reg != ARMRegisters::sp);
+ m_formatter.vfpOp(OP_VMRS, OP_VMRSb, false, VFPOperand(1), VFPOperand(0x10 | reg), VFPOperand(0));
}
- void vstr(FPRegisterID rd, RegisterID rn, int32_t imm)
+ void vmul_F64(FPDoubleRegisterID rd, FPDoubleRegisterID rn, FPDoubleRegisterID rm)
{
- vmem(rd, rn, imm, false);
+ m_formatter.vfpOp(OP_VMUL_T2, OP_VMUL_T2b, true, rn, rd, rm);
}
- void vsub_F64(FPRegisterID rd, FPRegisterID rn, FPRegisterID rm)
+ void vstr(FPDoubleRegisterID rd, RegisterID rn, int32_t imm)
{
- m_formatter.vfpOp(0x0b40ee30 | doubleRegisterMask(rd, 6, 28) | doubleRegisterMask(rn, 23, 0) | doubleRegisterMask(rm, 21, 16));
+ m_formatter.vfpMemOp(OP_VSTR, OP_VSTRb, true, rn, rd, imm);
}
+ void vsub_F64(FPDoubleRegisterID rd, FPDoubleRegisterID rn, FPDoubleRegisterID rm)
+ {
+ m_formatter.vfpOp(OP_VSUB_T2, OP_VSUB_T2b, true, rn, rd, rm);
+ }
JmpDst label()
{
static void repatchLoadPtrToLEA(void* where)
{
ASSERT(!(reinterpret_cast<intptr_t>(where) & 1));
-
uint16_t* loadOp = reinterpret_cast<uint16_t*>(where) + 4;
- ASSERT((*loadOp & 0xfff0) == OP_LDR_reg_T2);
- *loadOp = OP_ADD_reg_T3 | (*loadOp & 0xf);
- ExecutableAllocator::cacheFlush(loadOp, sizeof(uint16_t));
+ ASSERT((loadOp[0] & 0xfff0) == OP_LDR_reg_T2);
+ ASSERT((loadOp[1] & 0x0ff0) == 0);
+ int rn = loadOp[0] & 0xf;
+ int rt = loadOp[1] >> 12;
+ int rm = loadOp[1] & 0xf;
+
+ loadOp[0] = OP_ADD_reg_T3 | rn;
+ loadOp[1] = rt << 8 | rm;
+ ExecutableAllocator::cacheFlush(loadOp, sizeof(uint32_t));
}
private:
+ // VFP operations commonly take one or more 5-bit operands, typically representing a
+ // floating point register number. This will commonly be encoded in the instruction
+ // in two parts, with one single bit field, and one 4-bit field. In the case of
+ // double precision operands the high bit of the register number will be encoded
+ // separately, and for single precision operands the high bit of the register number
+ // will be encoded individually.
+ // VFPOperand encapsulates a 5-bit VFP operand, with bits 0..3 containing the 4-bit
+ // field to be encoded together in the instruction (the low 4-bits of a double
+ // register number, or the high 4-bits of a single register number), and bit 4
+ // contains the bit value to be encoded individually.
+ struct VFPOperand {
+ explicit VFPOperand(uint32_t value)
+ : m_value(value)
+ {
+ ASSERT(!(m_value & ~0x1f));
+ }
- // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
- // (i.e. +/-(0..255) 32-bit words)
- void vmem(FPRegisterID rd, RegisterID rn, int32_t imm, bool isLoad)
- {
- bool up;
- uint32_t offset;
- if (imm < 0) {
- offset = -imm;
- up = false;
- } else {
- offset = imm;
- up = true;
+ VFPOperand(FPDoubleRegisterID reg)
+ : m_value(reg)
+ {
}
- // offset is effectively leftshifted by 2 already (the bottom two bits are zero, and not
- // reperesented in the instruction. Left shift by 14, to mov it into position 0x00AA0000.
- ASSERT((offset & ~(0xff << 2)) == 0);
- offset <<= 14;
+ VFPOperand(RegisterID reg)
+ : m_value(reg)
+ {
+ }
- m_formatter.vfpOp(0x0b00ed00 | offset | (up << 7) | (isLoad << 4) | doubleRegisterMask(rd, 6, 28) | rn);
+ VFPOperand(FPSingleRegisterID reg)
+ : m_value(((reg & 1) << 4) | (reg >> 1)) // rotate the lowest bit of 'reg' to the top.
+ {
+ }
+
+ uint32_t bits1()
+ {
+ return m_value >> 4;
+ }
+
+ uint32_t bits4()
+ {
+ return m_value & 0xf;
+ }
+
+ uint32_t m_value;
+ };
+
+ VFPOperand vcvtOp(bool toInteger, bool isUnsigned, bool isRoundZero)
+ {
+ // Cannot specify rounding when converting to float.
+ ASSERT(toInteger || !isRoundZero);
+
+ uint32_t op = 0x8;
+ if (toInteger) {
+ // opc2 indicates both toInteger & isUnsigned.
+ op |= isUnsigned ? 0x4 : 0x5;
+ // 'op' field in instruction is isRoundZero
+ if (isRoundZero)
+ op |= 0x10;
+ } else {
+ // 'op' field in instruction is isUnsigned
+ if (!isUnsigned)
+ op |= 0x10;
+ }
+ return VFPOperand(op);
}
static void setInt32(void* code, uint32_t value)
|| (isNOP_T1(instruction - 5) && isNOP_T2(instruction - 4) && isB(instruction - 2)) );
intptr_t relative = reinterpret_cast<intptr_t>(target) - (reinterpret_cast<intptr_t>(instruction));
- if (((relative << 7) >> 7) == relative) {
+
+ // From Cortex-A8 errata:
+ // If the 32-bit Thumb-2 branch instruction spans two 4KiB regions and
+ // the target of the branch falls within the first region it is
+ // possible for the processor to incorrectly determine the branch
+ // instruction, and it is also possible in some cases for the processor
+ // to enter a deadlock state.
+ // The instruction is spanning two pages if it ends at an address ending 0x002
+ bool spansTwo4K = ((reinterpret_cast<intptr_t>(instruction) & 0xfff) == 0x002);
+ // The target is in the first page if the jump branch back by [3..0x1002] bytes
+ bool targetInFirstPage = (relative >= -0x1002) && (relative < -2);
+ bool wouldTriggerA8Errata = spansTwo4K && targetInFirstPage;
+
+ if (((relative << 7) >> 7) == relative && !wouldTriggerA8Errata) {
// ARM encoding for the top two bits below the sign bit is 'peculiar'.
if (relative >= 0)
relative ^= 0xC00000;
m_buffer.putShort((reg2 << 12) | imm);
}
- void vfpOp(int32_t op)
+ // Formats up instructions of the pattern:
+ // 111111111B11aaaa:bbbb222SA2C2cccc
+ // Where 1s in the pattern come from op1, 2s in the pattern come from op2, S is the provided size bit.
+ // Operands provide 5 bit values of the form Aaaaa, Bbbbb, Ccccc.
+ void vfpOp(OpcodeID1 op1, OpcodeID2 op2, bool size, VFPOperand a, VFPOperand b, VFPOperand c)
{
- m_buffer.putInt(op);
+ ASSERT(!(op1 & 0x004f));
+ ASSERT(!(op2 & 0xf1af));
+ m_buffer.putShort(op1 | b.bits1() << 6 | a.bits4());
+ m_buffer.putShort(op2 | b.bits4() << 12 | size << 8 | a.bits1() << 7 | c.bits1() << 5 | c.bits4());
}
+ // Arm vfp addresses can be offset by a 9-bit ones-comp immediate, left shifted by 2.
+ // (i.e. +/-(0..255) 32-bit words)
+ void vfpMemOp(OpcodeID1 op1, OpcodeID2 op2, bool size, RegisterID rn, VFPOperand rd, int32_t imm)
+ {
+ bool up = true;
+ if (imm < 0) {
+ imm = -imm;
+ up = false;
+ }
+
+ uint32_t offset = imm;
+ ASSERT(!(offset & ~0x3fc));
+ offset >>= 2;
+
+ m_buffer.putShort(op1 | (up << 7) | rd.bits1() << 6 | rn);
+ m_buffer.putShort(op2 | rd.bits4() << 12 | size << 8 | offset);
+ }
// Administrative methods:
#ifndef AbstractMacroAssembler_h
#define AbstractMacroAssembler_h
-#include <wtf/Platform.h>
-
#include <MacroAssemblerCodeRef.h>
#include <CodeLocation.h>
#include <wtf/Noncopyable.h>
class Jump;
typedef typename AssemblerType::RegisterID RegisterID;
- typedef typename AssemblerType::FPRegisterID FPRegisterID;
typedef typename AssemblerType::JmpSrc JmpSrc;
typedef typename AssemblerType::JmpDst JmpDst;
int32_t offset;
};
+ struct ExtendedAddress {
+ explicit ExtendedAddress(RegisterID base, intptr_t offset = 0)
+ : base(base)
+ , offset(offset)
+ {
+ }
+
+ RegisterID base;
+ intptr_t offset;
+ };
+
// ImplicitAddress:
//
// This class is used for explicit 'load' and 'store' operations
// in a class requiring explicit construction in order to differentiate
// from pointers used as absolute addresses to memory operations
struct ImmPtr {
- explicit ImmPtr(void* value)
+ explicit ImmPtr(const void* value)
: m_value(value)
{
}
return reinterpret_cast<intptr_t>(m_value);
}
- void* m_value;
+ const void* m_value;
};
// Imm32:
struct Imm32 {
explicit Imm32(int32_t value)
: m_value(value)
-#if CPU(ARM)
+#if CPU(ARM) || CPU(MIPS)
, m_isPointer(false)
#endif
{
#if !CPU(X86_64)
explicit Imm32(ImmPtr ptr)
: m_value(ptr.asIntptr())
-#if CPU(ARM)
+#if CPU(ARM) || CPU(MIPS)
, m_isPointer(true)
#endif
{
#endif
int32_t m_value;
-#if CPU(ARM)
+#if CPU(ARM) || CPU(MIPS)
// We rely on being able to regenerate code to recover exception handling
// information. Since ARMv7 supports 16-bit immediates there is a danger
// that if pointer values change the layout of the generated code will change.
// To avoid this problem, always generate pointers (and thus Imm32s constructed
// from ImmPtrs) with a code sequence that is able to represent any pointer
// value - don't use a more compact form in these cases.
+ // Same for MIPS.
bool m_isPointer;
#endif
};
#ifndef AssemblerBuffer_h
#define AssemblerBuffer_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include "stdint.h"
#ifndef AssemblerBufferWithConstantPool_h
#define AssemblerBufferWithConstantPool_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include "AssemblerBuffer.h"
#ifndef CodeLocation_h
#define CodeLocation_h
-#include <wtf/Platform.h>
#include <MacroAssemblerCodeRef.h>
#ifndef LinkBuffer_h
#define LinkBuffer_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include <MacroAssembler.h>
--- /dev/null
+/*
+ * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2009 University of Szeged
+ * All rights reserved.
+ * Copyright (C) 2010 MIPS Technologies, Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY MIPS TECHNOLOGIES, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL MIPS TECHNOLOGIES, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MIPSAssembler_h
+#define MIPSAssembler_h
+
+#if ENABLE(ASSEMBLER) && CPU(MIPS)
+
+#include "AssemblerBuffer.h"
+#include <wtf/Assertions.h>
+#include <wtf/SegmentedVector.h>
+
+namespace JSC {
+
+typedef uint32_t MIPSWord;
+
+namespace MIPSRegisters {
+typedef enum {
+ r0 = 0,
+ r1,
+ r2,
+ r3,
+ r4,
+ r5,
+ r6,
+ r7,
+ r8,
+ r9,
+ r10,
+ r11,
+ r12,
+ r13,
+ r14,
+ r15,
+ r16,
+ r17,
+ r18,
+ r19,
+ r20,
+ r21,
+ r22,
+ r23,
+ r24,
+ r25,
+ r26,
+ r27,
+ r28,
+ r29,
+ r30,
+ r31,
+ zero = r0,
+ at = r1,
+ v0 = r2,
+ v1 = r3,
+ a0 = r4,
+ a1 = r5,
+ a2 = r6,
+ a3 = r7,
+ t0 = r8,
+ t1 = r9,
+ t2 = r10,
+ t3 = r11,
+ t4 = r12,
+ t5 = r13,
+ t6 = r14,
+ t7 = r15,
+ s0 = r16,
+ s1 = r17,
+ s2 = r18,
+ s3 = r19,
+ s4 = r20,
+ s5 = r21,
+ s6 = r22,
+ s7 = r23,
+ t8 = r24,
+ t9 = r25,
+ k0 = r26,
+ k1 = r27,
+ gp = r28,
+ sp = r29,
+ fp = r30,
+ ra = r31
+} RegisterID;
+
+typedef enum {
+ f0,
+ f1,
+ f2,
+ f3,
+ f4,
+ f5,
+ f6,
+ f7,
+ f8,
+ f9,
+ f10,
+ f11,
+ f12,
+ f13,
+ f14,
+ f15,
+ f16,
+ f17,
+ f18,
+ f19,
+ f20,
+ f21,
+ f22,
+ f23,
+ f24,
+ f25,
+ f26,
+ f27,
+ f28,
+ f29,
+ f30,
+ f31
+} FPRegisterID;
+
+} // namespace MIPSRegisters
+
+class MIPSAssembler {
+public:
+ typedef MIPSRegisters::RegisterID RegisterID;
+ typedef MIPSRegisters::FPRegisterID FPRegisterID;
+ typedef SegmentedVector<int, 64> Jumps;
+
+ MIPSAssembler()
+ {
+ }
+
+ // MIPS instruction opcode field position
+ enum {
+ OP_SH_RD = 11,
+ OP_SH_RT = 16,
+ OP_SH_RS = 21,
+ OP_SH_SHAMT = 6,
+ OP_SH_CODE = 16,
+ OP_SH_FD = 6,
+ OP_SH_FS = 11,
+ OP_SH_FT = 16
+ };
+
+ class JmpSrc {
+ friend class MIPSAssembler;
+ public:
+ JmpSrc()
+ : m_offset(-1)
+ {
+ }
+
+ private:
+ JmpSrc(int offset)
+ : m_offset(offset)
+ {
+ }
+
+ int m_offset;
+ };
+
+ class JmpDst {
+ friend class MIPSAssembler;
+ public:
+ JmpDst()
+ : m_offset(-1)
+ , m_used(false)
+ {
+ }
+
+ bool isUsed() const { return m_used; }
+ void used() { m_used = true; }
+ private:
+ JmpDst(int offset)
+ : m_offset(offset)
+ , m_used(false)
+ {
+ ASSERT(m_offset == offset);
+ }
+
+ int m_offset : 31;
+ int m_used : 1;
+ };
+
+ void emitInst(MIPSWord op)
+ {
+ void* oldBase = m_buffer.data();
+
+ m_buffer.putInt(op);
+
+ void* newBase = m_buffer.data();
+ if (oldBase != newBase)
+ relocateJumps(oldBase, newBase);
+ }
+
+ void nop()
+ {
+ emitInst(0x00000000);
+ }
+
+ /* Need to insert one load data delay nop for mips1. */
+ void loadDelayNop()
+ {
+#if WTF_MIPS_ISA(1)
+ nop();
+#endif
+ }
+
+ /* Need to insert one coprocessor access delay nop for mips1. */
+ void copDelayNop()
+ {
+#if WTF_MIPS_ISA(1)
+ nop();
+#endif
+ }
+
+ void move(RegisterID rd, RegisterID rs)
+ {
+ /* addu */
+ emitInst(0x00000021 | (rd << OP_SH_RD) | (rs << OP_SH_RS));
+ }
+
+ /* Set an immediate value to a register. This may generate 1 or 2
+ instructions. */
+ void li(RegisterID dest, int imm)
+ {
+ if (imm >= -32768 && imm <= 32767)
+ addiu(dest, MIPSRegisters::zero, imm);
+ else if (imm >= 0 && imm < 65536)
+ ori(dest, MIPSRegisters::zero, imm);
+ else {
+ lui(dest, imm >> 16);
+ if (imm & 0xffff)
+ ori(dest, dest, imm);
+ }
+ }
+
+ void lui(RegisterID rt, int imm)
+ {
+ emitInst(0x3c000000 | (rt << OP_SH_RT) | (imm & 0xffff));
+ }
+
+ void addiu(RegisterID rt, RegisterID rs, int imm)
+ {
+ emitInst(0x24000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (imm & 0xffff));
+ }
+
+ void addu(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000021 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void subu(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000023 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void mult(RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000018 | (rs << OP_SH_RS) | (rt << OP_SH_RT));
+ }
+
+ void mfhi(RegisterID rd)
+ {
+ emitInst(0x00000010 | (rd << OP_SH_RD));
+ }
+
+ void mflo(RegisterID rd)
+ {
+ emitInst(0x00000012 | (rd << OP_SH_RD));
+ }
+
+ void mul(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+#if WTF_MIPS_ISA_AT_LEAST(32)
+ emitInst(0x70000002 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+#else
+ mult(rs, rt);
+ mflo(rd);
+#endif
+ }
+
+ void andInsn(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000024 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void andi(RegisterID rt, RegisterID rs, int imm)
+ {
+ emitInst(0x30000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (imm & 0xffff));
+ }
+
+ void nor(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000027 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void orInsn(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000025 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void ori(RegisterID rt, RegisterID rs, int imm)
+ {
+ emitInst(0x34000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (imm & 0xffff));
+ }
+
+ void xorInsn(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x00000026 | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void xori(RegisterID rt, RegisterID rs, int imm)
+ {
+ emitInst(0x38000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (imm & 0xffff));
+ }
+
+ void slt(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x0000002a | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void sltu(RegisterID rd, RegisterID rs, RegisterID rt)
+ {
+ emitInst(0x0000002b | (rd << OP_SH_RD) | (rs << OP_SH_RS)
+ | (rt << OP_SH_RT));
+ }
+
+ void sltiu(RegisterID rt, RegisterID rs, int imm)
+ {
+ emitInst(0x2c000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (imm & 0xffff));
+ }
+
+ void sll(RegisterID rd, RegisterID rt, int shamt)
+ {
+ emitInst(0x00000000 | (rd << OP_SH_RD) | (rt << OP_SH_RT)
+ | ((shamt & 0x1f) << OP_SH_SHAMT));
+ }
+
+ void sllv(RegisterID rd, RegisterID rt, int rs)
+ {
+ emitInst(0x00000004 | (rd << OP_SH_RD) | (rt << OP_SH_RT)
+ | (rs << OP_SH_RS));
+ }
+
+ void sra(RegisterID rd, RegisterID rt, int shamt)
+ {
+ emitInst(0x00000003 | (rd << OP_SH_RD) | (rt << OP_SH_RT)
+ | ((shamt & 0x1f) << OP_SH_SHAMT));
+ }
+
+ void srav(RegisterID rd, RegisterID rt, RegisterID rs)
+ {
+ emitInst(0x00000007 | (rd << OP_SH_RD) | (rt << OP_SH_RT)
+ | (rs << OP_SH_RS));
+ }
+
+ void lbu(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0x90000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ loadDelayNop();
+ }
+
+ void lw(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0x8c000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ loadDelayNop();
+ }
+
+ void lwl(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0x88000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ loadDelayNop();
+ }
+
+ void lwr(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0x98000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ loadDelayNop();
+ }
+
+ void lhu(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0x94000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ loadDelayNop();
+ }
+
+ void sw(RegisterID rt, RegisterID rs, int offset)
+ {
+ emitInst(0xac000000 | (rt << OP_SH_RT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ }
+
+ void jr(RegisterID rs)
+ {
+ emitInst(0x00000008 | (rs << OP_SH_RS));
+ }
+
+ void jalr(RegisterID rs)
+ {
+ emitInst(0x0000f809 | (rs << OP_SH_RS));
+ }
+
+ void jal()
+ {
+ emitInst(0x0c000000);
+ }
+
+ void bkpt()
+ {
+ int value = 512; /* BRK_BUG */
+ emitInst(0x0000000d | ((value & 0x3ff) << OP_SH_CODE));
+ }
+
+ void bgez(RegisterID rs, int imm)
+ {
+ emitInst(0x04010000 | (rs << OP_SH_RS) | (imm & 0xffff));
+ }
+
+ void bltz(RegisterID rs, int imm)
+ {
+ emitInst(0x04000000 | (rs << OP_SH_RS) | (imm & 0xffff));
+ }
+
+ void beq(RegisterID rs, RegisterID rt, int imm)
+ {
+ emitInst(0x10000000 | (rs << OP_SH_RS) | (rt << OP_SH_RT) | (imm & 0xffff));
+ }
+
+ void bne(RegisterID rs, RegisterID rt, int imm)
+ {
+ emitInst(0x14000000 | (rs << OP_SH_RS) | (rt << OP_SH_RT) | (imm & 0xffff));
+ }
+
+ void bc1t()
+ {
+ emitInst(0x45010000);
+ }
+
+ void bc1f()
+ {
+ emitInst(0x45000000);
+ }
+
+ JmpSrc newJmpSrc()
+ {
+ return JmpSrc(m_buffer.size());
+ }
+
+ void appendJump()
+ {
+ m_jumps.append(m_buffer.size());
+ }
+
+ void addd(FPRegisterID fd, FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200000 | (fd << OP_SH_FD) | (fs << OP_SH_FS)
+ | (ft << OP_SH_FT));
+ }
+
+ void subd(FPRegisterID fd, FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200001 | (fd << OP_SH_FD) | (fs << OP_SH_FS)
+ | (ft << OP_SH_FT));
+ }
+
+ void muld(FPRegisterID fd, FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200002 | (fd << OP_SH_FD) | (fs << OP_SH_FS)
+ | (ft << OP_SH_FT));
+ }
+
+ void lwc1(FPRegisterID ft, RegisterID rs, int offset)
+ {
+ emitInst(0xc4000000 | (ft << OP_SH_FT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ copDelayNop();
+ }
+
+ void ldc1(FPRegisterID ft, RegisterID rs, int offset)
+ {
+ emitInst(0xd4000000 | (ft << OP_SH_FT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ }
+
+ void swc1(FPRegisterID ft, RegisterID rs, int offset)
+ {
+ emitInst(0xe4000000 | (ft << OP_SH_FT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ }
+
+ void sdc1(FPRegisterID ft, RegisterID rs, int offset)
+ {
+ emitInst(0xf4000000 | (ft << OP_SH_FT) | (rs << OP_SH_RS)
+ | (offset & 0xffff));
+ }
+
+ void mtc1(RegisterID rt, FPRegisterID fs)
+ {
+ emitInst(0x44800000 | (fs << OP_SH_FS) | (rt << OP_SH_RT));
+ copDelayNop();
+ }
+
+ void mfc1(RegisterID rt, FPRegisterID fs)
+ {
+ emitInst(0x44000000 | (fs << OP_SH_FS) | (rt << OP_SH_RT));
+ copDelayNop();
+ }
+
+ void truncwd(FPRegisterID fd, FPRegisterID fs)
+ {
+ emitInst(0x4620000d | (fd << OP_SH_FD) | (fs << OP_SH_FS));
+ }
+
+ void cvtdw(FPRegisterID fd, FPRegisterID fs)
+ {
+ emitInst(0x46800021 | (fd << OP_SH_FD) | (fs << OP_SH_FS));
+ }
+
+ void ceqd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200032 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cngtd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x4620003f | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cnged(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x4620003d | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cltd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x4620003c | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cled(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x4620003e | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cueqd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200033 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void coled(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200036 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void coltd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200034 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void culed(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200037 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ void cultd(FPRegisterID fs, FPRegisterID ft)
+ {
+ emitInst(0x46200035 | (fs << OP_SH_FS) | (ft << OP_SH_FT));
+ copDelayNop();
+ }
+
+ // General helpers
+
+ JmpDst label()
+ {
+ return JmpDst(m_buffer.size());
+ }
+
+ JmpDst align(int alignment)
+ {
+ while (!m_buffer.isAligned(alignment))
+ bkpt();
+
+ return label();
+ }
+
+ static void* getRelocatedAddress(void* code, JmpSrc jump)
+ {
+ ASSERT(jump.m_offset != -1);
+ void* b = reinterpret_cast<void*>((reinterpret_cast<intptr_t>(code)) + jump.m_offset);
+ return b;
+ }
+
+ static void* getRelocatedAddress(void* code, JmpDst label)
+ {
+ void* b = reinterpret_cast<void*>((reinterpret_cast<intptr_t>(code)) + label.m_offset);
+ return b;
+ }
+
+ static int getDifferenceBetweenLabels(JmpDst from, JmpDst to)
+ {
+ return to.m_offset - from.m_offset;
+ }
+
+ static int getDifferenceBetweenLabels(JmpDst from, JmpSrc to)
+ {
+ return to.m_offset - from.m_offset;
+ }
+
+ static int getDifferenceBetweenLabels(JmpSrc from, JmpDst to)
+ {
+ return to.m_offset - from.m_offset;
+ }
+
+ // Assembler admin methods:
+
+ size_t size() const
+ {
+ return m_buffer.size();
+ }
+
+ void* executableCopy(ExecutablePool* allocator)
+ {
+ void *result = m_buffer.executableCopy(allocator);
+ if (!result)
+ return 0;
+
+ relocateJumps(m_buffer.data(), result);
+ return result;
+ }
+
+ static unsigned getCallReturnOffset(JmpSrc call)
+ {
+ // The return address is after a call and a delay slot instruction
+ return call.m_offset;
+ }
+
+ // Linking & patching:
+ //
+ // 'link' and 'patch' methods are for use on unprotected code - such as the code
+ // within the AssemblerBuffer, and code being patched by the patch buffer. Once
+ // code has been finalized it is (platform support permitting) within a non-
+ // writable region of memory; to modify the code in an execute-only execuable
+ // pool the 'repatch' and 'relink' methods should be used.
+
+ void linkJump(JmpSrc from, JmpDst to)
+ {
+ ASSERT(to.m_offset != -1);
+ ASSERT(from.m_offset != -1);
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(m_buffer.data()) + from.m_offset);
+ MIPSWord* toPos = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(m_buffer.data()) + to.m_offset);
+
+ ASSERT(!(*(insn - 1)) && !(*(insn - 2)) && !(*(insn - 3)) && !(*(insn - 5)));
+ insn = insn - 6;
+ linkWithOffset(insn, toPos);
+ }
+
+ static void linkJump(void* code, JmpSrc from, void* to)
+ {
+ ASSERT(from.m_offset != -1);
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(code) + from.m_offset);
+
+ ASSERT(!(*(insn - 1)) && !(*(insn - 2)) && !(*(insn - 3)) && !(*(insn - 5)));
+ insn = insn - 6;
+ linkWithOffset(insn, to);
+ }
+
+ static void linkCall(void* code, JmpSrc from, void* to)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(code) + from.m_offset);
+ linkCallInternal(insn, to);
+ }
+
+ static void linkPointer(void* code, JmpDst from, void* to)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(code) + from.m_offset);
+ ASSERT((*insn & 0xffe00000) == 0x3c000000); // lui
+ *insn = (*insn & 0xffff0000) | ((reinterpret_cast<intptr_t>(to) >> 16) & 0xffff);
+ insn++;
+ ASSERT((*insn & 0xfc000000) == 0x34000000); // ori
+ *insn = (*insn & 0xffff0000) | (reinterpret_cast<intptr_t>(to) & 0xffff);
+ }
+
+ static void relinkJump(void* from, void* to)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(from);
+
+ ASSERT(!(*(insn - 1)) && !(*(insn - 5)));
+ insn = insn - 6;
+ int flushSize = linkWithOffset(insn, to);
+
+ ExecutableAllocator::cacheFlush(insn, flushSize);
+ }
+
+ static void relinkCall(void* from, void* to)
+ {
+ void* start;
+ int size = linkCallInternal(from, to);
+ if (size == sizeof(MIPSWord))
+ start = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(from) - 2 * sizeof(MIPSWord));
+ else
+ start = reinterpret_cast<void*>(reinterpret_cast<intptr_t>(from) - 4 * sizeof(MIPSWord));
+
+ ExecutableAllocator::cacheFlush(start, size);
+ }
+
+ static void repatchInt32(void* from, int32_t to)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(from);
+ ASSERT((*insn & 0xffe00000) == 0x3c000000); // lui
+ *insn = (*insn & 0xffff0000) | ((to >> 16) & 0xffff);
+ insn++;
+ ASSERT((*insn & 0xfc000000) == 0x34000000); // ori
+ *insn = (*insn & 0xffff0000) | (to & 0xffff);
+ insn--;
+ ExecutableAllocator::cacheFlush(insn, 2 * sizeof(MIPSWord));
+ }
+
+ static void repatchPointer(void* from, void* to)
+ {
+ repatchInt32(from, reinterpret_cast<int32_t>(to));
+ }
+
+ static void repatchLoadPtrToLEA(void* from)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(from);
+ insn = insn + 3;
+ ASSERT((*insn & 0xfc000000) == 0x8c000000); // lw
+ /* lw -> addiu */
+ *insn = 0x24000000 | (*insn & 0x03ffffff);
+
+ ExecutableAllocator::cacheFlush(insn, sizeof(MIPSWord));
+ }
+
+private:
+
+ /* Update each jump in the buffer of newBase. */
+ void relocateJumps(void* oldBase, void* newBase)
+ {
+ // Check each jump
+ for (Jumps::Iterator iter = m_jumps.begin(); iter != m_jumps.end(); ++iter) {
+ int pos = *iter;
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(reinterpret_cast<intptr_t>(newBase) + pos);
+ insn = insn + 2;
+ // Need to make sure we have 5 valid instructions after pos
+ if ((unsigned int)pos >= m_buffer.size() - 5 * sizeof(MIPSWord))
+ continue;
+
+ if ((*insn & 0xfc000000) == 0x08000000) { // j
+ int offset = *insn & 0x03ffffff;
+ int oldInsnAddress = (int)insn - (int)newBase + (int)oldBase;
+ int topFourBits = (oldInsnAddress + 4) >> 28;
+ int oldTargetAddress = (topFourBits << 28) | (offset << 2);
+ int newTargetAddress = oldTargetAddress - (int)oldBase + (int)newBase;
+ int newInsnAddress = (int)insn;
+ if (((newInsnAddress + 4) >> 28) == (newTargetAddress >> 28))
+ *insn = 0x08000000 | ((newTargetAddress >> 2) & 0x3ffffff);
+ else {
+ /* lui */
+ *insn = 0x3c000000 | (MIPSRegisters::t9 << OP_SH_RT) | ((newTargetAddress >> 16) & 0xffff);
+ /* ori */
+ *(insn + 1) = 0x34000000 | (MIPSRegisters::t9 << OP_SH_RT) | (MIPSRegisters::t9 << OP_SH_RS) | (newTargetAddress & 0xffff);
+ /* jr */
+ *(insn + 2) = 0x00000008 | (MIPSRegisters::t9 << OP_SH_RS);
+ }
+ } else if ((*insn & 0xffe00000) == 0x3c000000) { // lui
+ int high = (*insn & 0xffff) << 16;
+ int low = *(insn + 1) & 0xffff;
+ int oldTargetAddress = high | low;
+ int newTargetAddress = oldTargetAddress - (int)oldBase + (int)newBase;
+ /* lui */
+ *insn = 0x3c000000 | (MIPSRegisters::t9 << OP_SH_RT) | ((newTargetAddress >> 16) & 0xffff);
+ /* ori */
+ *(insn + 1) = 0x34000000 | (MIPSRegisters::t9 << OP_SH_RT) | (MIPSRegisters::t9 << OP_SH_RS) | (newTargetAddress & 0xffff);
+ }
+ }
+ }
+
+ static int linkWithOffset(MIPSWord* insn, void* to)
+ {
+ ASSERT((*insn & 0xfc000000) == 0x10000000 // beq
+ || (*insn & 0xfc000000) == 0x14000000 // bne
+ || (*insn & 0xffff0000) == 0x45010000 // bc1t
+ || (*insn & 0xffff0000) == 0x45000000); // bc1f
+ intptr_t diff = (reinterpret_cast<intptr_t>(to)
+ - reinterpret_cast<intptr_t>(insn) - 4) >> 2;
+
+ if (diff < -32768 || diff > 32767 || *(insn + 2) != 0x10000003) {
+ /*
+ Convert the sequence:
+ beq $2, $3, target
+ nop
+ b 1f
+ nop
+ nop
+ nop
+ 1:
+
+ to the new sequence if possible:
+ bne $2, $3, 1f
+ nop
+ j target
+ nop
+ nop
+ nop
+ 1:
+
+ OR to the new sequence:
+ bne $2, $3, 1f
+ nop
+ lui $25, target >> 16
+ ori $25, $25, target & 0xffff
+ jr $25
+ nop
+ 1:
+
+ Note: beq/bne/bc1t/bc1f are converted to bne/beq/bc1f/bc1t.
+ */
+
+ if (*(insn + 2) == 0x10000003) {
+ if ((*insn & 0xfc000000) == 0x10000000) // beq
+ *insn = (*insn & 0x03ff0000) | 0x14000005; // bne
+ else if ((*insn & 0xfc000000) == 0x14000000) // bne
+ *insn = (*insn & 0x03ff0000) | 0x10000005; // beq
+ else if ((*insn & 0xffff0000) == 0x45010000) // bc1t
+ *insn = 0x45000005; // bc1f
+ else if ((*insn & 0xffff0000) == 0x45000000) // bc1f
+ *insn = 0x45010005; // bc1t
+ else
+ ASSERT(0);
+ }
+
+ insn = insn + 2;
+ if ((reinterpret_cast<intptr_t>(insn) + 4) >> 28
+ == reinterpret_cast<intptr_t>(to) >> 28) {
+ *insn = 0x08000000 | ((reinterpret_cast<intptr_t>(to) >> 2) & 0x3ffffff);
+ *(insn + 1) = 0;
+ return 4 * sizeof(MIPSWord);
+ }
+
+ intptr_t newTargetAddress = reinterpret_cast<intptr_t>(to);
+ /* lui */
+ *insn = 0x3c000000 | (MIPSRegisters::t9 << OP_SH_RT) | ((newTargetAddress >> 16) & 0xffff);
+ /* ori */
+ *(insn + 1) = 0x34000000 | (MIPSRegisters::t9 << OP_SH_RT) | (MIPSRegisters::t9 << OP_SH_RS) | (newTargetAddress & 0xffff);
+ /* jr */
+ *(insn + 2) = 0x00000008 | (MIPSRegisters::t9 << OP_SH_RS);
+ return 5 * sizeof(MIPSWord);
+ }
+
+ *insn = (*insn & 0xffff0000) | (diff & 0xffff);
+ return sizeof(MIPSWord);
+ }
+
+ static int linkCallInternal(void* from, void* to)
+ {
+ MIPSWord* insn = reinterpret_cast<MIPSWord*>(from);
+ insn = insn - 4;
+
+ if ((*(insn + 2) & 0xfc000000) == 0x0c000000) { // jal
+ if ((reinterpret_cast<intptr_t>(from) - 4) >> 28
+ == reinterpret_cast<intptr_t>(to) >> 28) {
+ *(insn + 2) = 0x0c000000 | ((reinterpret_cast<intptr_t>(to) >> 2) & 0x3ffffff);
+ return sizeof(MIPSWord);
+ }
+
+ /* lui $25, (to >> 16) & 0xffff */
+ *insn = 0x3c000000 | (MIPSRegisters::t9 << OP_SH_RT) | ((reinterpret_cast<intptr_t>(to) >> 16) & 0xffff);
+ /* ori $25, $25, to & 0xffff */
+ *(insn + 1) = 0x34000000 | (MIPSRegisters::t9 << OP_SH_RT) | (MIPSRegisters::t9 << OP_SH_RS) | (reinterpret_cast<intptr_t>(to) & 0xffff);
+ /* jalr $25 */
+ *(insn + 2) = 0x0000f809 | (MIPSRegisters::t9 << OP_SH_RS);
+ return 3 * sizeof(MIPSWord);
+ }
+
+ ASSERT((*insn & 0xffe00000) == 0x3c000000); // lui
+ ASSERT((*(insn + 1) & 0xfc000000) == 0x34000000); // ori
+
+ /* lui */
+ *insn = (*insn & 0xffff0000) | ((reinterpret_cast<intptr_t>(to) >> 16) & 0xffff);
+ /* ori */
+ *(insn + 1) = (*(insn + 1) & 0xffff0000) | (reinterpret_cast<intptr_t>(to) & 0xffff);
+ return 2 * sizeof(MIPSWord);
+ }
+
+ AssemblerBuffer m_buffer;
+ Jumps m_jumps;
+};
+
+} // namespace JSC
+
+#endif // ENABLE(ASSEMBLER) && CPU(MIPS)
+
+#endif // MIPSAssembler_h
#ifndef MacroAssembler_h
#define MacroAssembler_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#if CPU(ARM_THUMB2)
#include "MacroAssemblerARM.h"
namespace JSC { typedef MacroAssemblerARM MacroAssemblerBase; };
+#elif CPU(MIPS)
+#include "MacroAssemblerMIPS.h"
+namespace JSC {
+typedef MacroAssemblerMIPS MacroAssemblerBase;
+};
+
#elif CPU(X86)
#include "MacroAssemblerX86.h"
namespace JSC { typedef MacroAssemblerX86 MacroAssemblerBase; };
{
return branchSub32(cond, imm, dest);
}
+ using MacroAssemblerBase::branchTest8;
+ Jump branchTest8(Condition cond, ExtendedAddress address, Imm32 mask = Imm32(-1))
+ {
+ return MacroAssemblerBase::branchTest8(cond, Address(address.base, address.offset), mask);
+ }
#endif
};
/*
* Copyright (C) 2008 Apple Inc.
- * Copyright (C) 2009 University of Szeged
+ * Copyright (C) 2009, 2010 University of Szeged
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
#ifndef MacroAssemblerARM_h
#define MacroAssemblerARM_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && CPU(ARM_TRADITIONAL)
#include "ARMAssembler.h"
static const int DoubleConditionBitSpecial = 0x10;
COMPILE_ASSERT(!(DoubleConditionBitSpecial & DoubleConditionMask), DoubleConditionBitSpecial_should_not_interfere_with_ARMAssembler_Condition_codes);
public:
+ typedef ARMRegisters::FPRegisterID FPRegisterID;
+
enum Condition {
Equal = ARMAssembler::EQ,
NotEqual = ARMAssembler::NE,
{
m_assembler.movs_r(dest, m_assembler.asr(dest, imm.m_value & 0x1f));
}
+
+ void urshift32(RegisterID shift_amount, RegisterID dest)
+ {
+ ARMWord w = ARMAssembler::getOp2(0x1f);
+ ASSERT(w != ARMAssembler::INVALID_IMM);
+ m_assembler.and_r(ARMRegisters::S0, shift_amount, w);
+
+ m_assembler.movs_r(dest, m_assembler.lsr_r(dest, ARMRegisters::S0));
+ }
+
+ void urshift32(Imm32 imm, RegisterID dest)
+ {
+ m_assembler.movs_r(dest, m_assembler.lsr(dest, imm.m_value & 0x1f));
+ }
void sub32(RegisterID src, RegisterID dest)
{
m_assembler.eors_r(dest, dest, m_assembler.getImm(imm.m_value, ARMRegisters::S0));
}
+ void load8(ImplicitAddress address, RegisterID dest)
+ {
+ m_assembler.dataTransfer32(true, dest, address.base, address.offset, true);
+ }
+
void load32(ImplicitAddress address, RegisterID dest)
{
m_assembler.dataTransfer32(true, dest, address.base, address.offset);
else
m_assembler.ldrh_d(dest, ARMRegisters::S0, ARMAssembler::getOp2Byte(-address.offset));
}
+
+ void load16(ImplicitAddress address, RegisterID dest)
+ {
+ if (address.offset >= 0)
+ m_assembler.ldrh_u(dest, address.base, ARMAssembler::getOp2Byte(address.offset));
+ else
+ m_assembler.ldrh_d(dest, address.base, ARMAssembler::getOp2Byte(-address.offset));
+ }
DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
{
move(src, dest);
}
+ Jump branch8(Condition cond, Address left, Imm32 right)
+ {
+ load8(left, ARMRegisters::S1);
+ return branch32(cond, ARMRegisters::S1, right);
+ }
+
Jump branch32(Condition cond, RegisterID left, RegisterID right, int useConstantPool = 0)
{
m_assembler.cmp_r(left, right);
return m_assembler.jmp(ARMCondition(cond));
}
+ Jump branchTest8(Condition cond, Address address, Imm32 mask = Imm32(-1))
+ {
+ load8(address, ARMRegisters::S1);
+ return branchTest32(cond, ARMRegisters::S1, mask);
+ }
+
Jump branchTest32(Condition cond, RegisterID reg, RegisterID mask)
{
ASSERT((cond == Zero) || (cond == NonZero));
void jump(RegisterID target)
{
- move(target, ARMRegisters::pc);
+ m_assembler.bx(target);
}
void jump(Address address)
return Jump(m_assembler.jmp(ARMCondition(cond)));
}
+ Jump branchNeg32(Condition cond, RegisterID srcDest)
+ {
+ ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
+ neg32(srcDest);
+ return Jump(m_assembler.jmp(ARMCondition(cond)));
+ }
+
Jump branchOr32(Condition cond, RegisterID src, RegisterID dest)
{
ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
Call nearCall()
{
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ ensureSpace(2 * sizeof(ARMWord), sizeof(ARMWord));
+ m_assembler.loadBranchTarget(ARMRegisters::S1, ARMAssembler::AL, true);
+ return Call(m_assembler.blx(ARMRegisters::S1), Call::LinkableNear);
+#else
prepareCall();
return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::LinkableNear);
+#endif
}
Call call(RegisterID target)
{
- prepareCall();
- move(ARMRegisters::pc, target);
+ m_assembler.blx(target);
JmpSrc jmpSrc;
return Call(jmpSrc, Call::None);
}
void ret()
{
- m_assembler.mov_r(ARMRegisters::pc, linkRegister);
+ m_assembler.bx(linkRegister);
}
void set32(Condition cond, RegisterID left, RegisterID right, RegisterID dest)
Call call()
{
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ ensureSpace(2 * sizeof(ARMWord), sizeof(ARMWord));
+ m_assembler.loadBranchTarget(ARMRegisters::S1, ARMAssembler::AL, true);
+ return Call(m_assembler.blx(ARMRegisters::S1), Call::Linkable);
+#else
prepareCall();
return Call(m_assembler.jmp(ARMAssembler::AL, true), Call::Linkable);
+#endif
}
Call tailRecursiveCall()
return false;
}
+ bool supportsFloatingPointSqrt() const
+ {
+ return s_isVFPPresent;
+ }
+
void loadDouble(ImplicitAddress address, FPRegisterID dest)
{
m_assembler.doubleTransfer(true, dest, address.base, address.offset);
}
- void loadDouble(void* address, FPRegisterID dest)
+ void loadDouble(const void* address, FPRegisterID dest)
{
m_assembler.ldr_un_imm(ARMRegisters::S0, (ARMWord)address);
m_assembler.fdtr_u(true, dest, ARMRegisters::S0, 0);
mulDouble(ARMRegisters::SD0, dest);
}
+ void sqrtDouble(FPRegisterID src, FPRegisterID dest)
+ {
+ m_assembler.fsqrtd_r(dest, src);
+ }
+
void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
{
m_assembler.fmsr_r(dest, src);
void prepareCall()
{
+#if WTF_ARM_ARCH_VERSION < 5
ensureSpace(2 * sizeof(ARMWord), sizeof(ARMWord));
m_assembler.mov_r(linkRegister, ARMRegisters::pc);
+#endif
}
void call32(RegisterID base, int32_t offset)
{
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ int targetReg = ARMRegisters::S1;
+#else
+ int targetReg = ARMRegisters::pc;
+#endif
+ int tmpReg = ARMRegisters::S1;
+
if (base == ARMRegisters::sp)
offset += 4;
if (offset >= 0) {
if (offset <= 0xfff) {
prepareCall();
- m_assembler.dtr_u(true, ARMRegisters::pc, base, offset);
+ m_assembler.dtr_u(true, targetReg, base, offset);
} else if (offset <= 0xfffff) {
- m_assembler.add_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
+ m_assembler.add_r(tmpReg, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
prepareCall();
- m_assembler.dtr_u(true, ARMRegisters::pc, ARMRegisters::S0, offset & 0xfff);
+ m_assembler.dtr_u(true, targetReg, tmpReg, offset & 0xfff);
} else {
- ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
+ ARMWord reg = m_assembler.getImm(offset, tmpReg);
prepareCall();
- m_assembler.dtr_ur(true, ARMRegisters::pc, base, reg);
+ m_assembler.dtr_ur(true, targetReg, base, reg);
}
} else {
offset = -offset;
if (offset <= 0xfff) {
prepareCall();
- m_assembler.dtr_d(true, ARMRegisters::pc, base, offset);
+ m_assembler.dtr_d(true, targetReg, base, offset);
} else if (offset <= 0xfffff) {
- m_assembler.sub_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
+ m_assembler.sub_r(tmpReg, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
prepareCall();
- m_assembler.dtr_d(true, ARMRegisters::pc, ARMRegisters::S0, offset & 0xfff);
+ m_assembler.dtr_d(true, targetReg, tmpReg, offset & 0xfff);
} else {
- ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
+ ARMWord reg = m_assembler.getImm(offset, tmpReg);
prepareCall();
- m_assembler.dtr_dr(true, ARMRegisters::pc, base, reg);
+ m_assembler.dtr_dr(true, targetReg, base, reg);
}
}
+#if WTF_ARM_ARCH_AT_LEAST(5)
+ m_assembler.blx(targetReg);
+#endif
}
private:
/*
* Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 University of Szeged
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#ifndef MacroAssemblerARMv7_h
#define MacroAssemblerARMv7_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include "ARMv7Assembler.h"
// FIXME: switch dataTempRegister & addressTempRegister, or possibly use r7?
// - dTR is likely used more than aTR, and we'll get better instruction
// encoding if it's in the low 8 registers.
- static const ARMRegisters::RegisterID dataTempRegister = ARMRegisters::ip;
+ static const RegisterID dataTempRegister = ARMRegisters::ip;
static const RegisterID addressTempRegister = ARMRegisters::r3;
- static const FPRegisterID fpTempRegister = ARMRegisters::d7;
+ static const ARMRegisters::FPDoubleRegisterID fpTempRegister = ARMRegisters::d7;
+ inline ARMRegisters::FPSingleRegisterID fpTempRegisterAsSingle() { return ARMRegisters::asSingle(fpTempRegister); }
+
+public:
struct ArmAddress {
enum AddressType {
HasOffset,
};
public:
+ typedef ARMRegisters::FPDoubleRegisterID FPRegisterID;
static const Scale ScalePtr = TimesFour;
m_assembler.smull(dest, dataTempRegister, src, dataTempRegister);
}
+ void neg32(RegisterID srcDest)
+ {
+ m_assembler.neg(srcDest, srcDest);
+ }
+
void not32(RegisterID srcDest)
{
m_assembler.mvn(srcDest, srcDest);
{
m_assembler.asr(dest, dest, imm.m_value & 0x1f);
}
+
+ void urshift32(RegisterID shift_amount, RegisterID dest)
+ {
+ // Clamp the shift to the range 0..31
+ ARMThumbImmediate armImm = ARMThumbImmediate::makeEncodedImm(0x1f);
+ ASSERT(armImm.isValid());
+ m_assembler.ARM_and(dataTempRegister, shift_amount, armImm);
+
+ m_assembler.lsr(dest, dest, dataTempRegister);
+ }
+
+ void urshift32(Imm32 imm, RegisterID dest)
+ {
+ m_assembler.lsr(dest, dest, imm.m_value & 0x1f);
+ }
void sub32(RegisterID src, RegisterID dest)
{
}
}
+ void load8(ArmAddress address, RegisterID dest)
+ {
+ if (address.type == ArmAddress::HasIndex)
+ m_assembler.ldrb(dest, address.base, address.u.index, address.u.scale);
+ else if (address.u.offset >= 0) {
+ ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.u.offset);
+ ASSERT(armImm.isValid());
+ m_assembler.ldrb(dest, address.base, armImm);
+ } else {
+ ASSERT(address.u.offset >= -255);
+ m_assembler.ldrb(dest, address.base, address.u.offset, true, false);
+ }
+ }
+
void store32(RegisterID src, ArmAddress address)
{
if (address.type == ArmAddress::HasIndex)
m_assembler.ldr(dest, addressTempRegister, ARMThumbImmediate::makeUInt16(0));
}
+ void load8(ImplicitAddress address, RegisterID dest)
+ {
+ load8(setupArmAddress(address), dest);
+ }
+
DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
{
DataLabel32 label = moveWithPatch(Imm32(address.offset), dataTempRegister);
{
m_assembler.ldrh(dest, makeBaseIndexBase(address), address.index, address.scale);
}
+
+ void load16(ImplicitAddress address, RegisterID dest)
+ {
+ ARMThumbImmediate armImm = ARMThumbImmediate::makeUInt12(address.offset);
+ if (armImm.isValid())
+ m_assembler.ldrh(dest, address.base, armImm);
+ else {
+ move(Imm32(address.offset), dataTempRegister);
+ m_assembler.ldrh(dest, address.base, dataTempRegister);
+ }
+ }
DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
{
// In short, FIXME:.
bool supportsFloatingPointTruncate() const { return false; }
+ bool supportsFloatingPointSqrt() const
+ {
+ return false;
+ }
+
void loadDouble(ImplicitAddress address, FPRegisterID dest)
{
RegisterID base = address.base;
m_assembler.vldr(dest, base, offset);
}
+ void loadDouble(const void* address, FPRegisterID dest)
+ {
+ move(ImmPtr(address), addressTempRegister);
+ m_assembler.vldr(dest, addressTempRegister, 0);
+ }
+
void storeDouble(FPRegisterID src, ImplicitAddress address)
{
RegisterID base = address.base;
addDouble(fpTempRegister, dest);
}
+ void divDouble(FPRegisterID src, FPRegisterID dest)
+ {
+ m_assembler.vdiv_F64(dest, dest, src);
+ }
+
void subDouble(FPRegisterID src, FPRegisterID dest)
{
m_assembler.vsub_F64(dest, dest, src);
mulDouble(fpTempRegister, dest);
}
+ void sqrtDouble(FPRegisterID, FPRegisterID)
+ {
+ ASSERT_NOT_REACHED();
+ }
+
void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
{
- m_assembler.vmov(fpTempRegister, src);
- m_assembler.vcvt_F64_S32(dest, fpTempRegister);
+ m_assembler.vmov(fpTempRegisterAsSingle(), src);
+ m_assembler.vcvt_F64_S32(dest, fpTempRegisterAsSingle());
+ }
+
+ void convertInt32ToDouble(Address address, FPRegisterID dest)
+ {
+ // Fixme: load directly into the fpr!
+ load32(address, dataTempRegister);
+ m_assembler.vmov(fpTempRegisterAsSingle(), dataTempRegister);
+ m_assembler.vcvt_F64_S32(dest, fpTempRegisterAsSingle());
+ }
+
+ void convertInt32ToDouble(AbsoluteAddress address, FPRegisterID dest)
+ {
+ // Fixme: load directly into the fpr!
+ load32(address.m_ptr, dataTempRegister);
+ m_assembler.vmov(fpTempRegisterAsSingle(), dataTempRegister);
+ m_assembler.vcvt_F64_S32(dest, fpTempRegisterAsSingle());
}
Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
{
m_assembler.vcmp_F64(left, right);
- m_assembler.vmrs_APSR_nzcv_FPSCR();
+ m_assembler.vmrs();
if (cond == DoubleNotEqual) {
// ConditionNE jumps if NotEqual *or* unordered - force the unordered cases not to jump.
return jump();
}
+ // Convert 'src' to an integer, and places the resulting 'dest'.
+ // If the result is not representable as a 32 bit value, branch.
+ // May also branch for some values that are representable in 32 bits
+ // (specifically, in this case, 0).
+ void branchConvertDoubleToInt32(FPRegisterID src, RegisterID dest, JumpList& failureCases, FPRegisterID)
+ {
+ m_assembler.vcvtr_S32_F64(fpTempRegisterAsSingle(), src);
+ m_assembler.vmov(dest, fpTempRegisterAsSingle());
+
+ // Convert the integer result back to float & compare to the original value - if not equal or unordered (NaN) then jump.
+ m_assembler.vcvt_F64_S32(fpTempRegister, fpTempRegisterAsSingle());
+ failureCases.append(branchDouble(DoubleNotEqualOrUnordered, src, fpTempRegister));
+
+ // If the result is zero, it might have been -0.0, and the double comparison won't catch this!
+ failureCases.append(branchTest32(Zero, dest));
+ }
+
+ void zeroDouble(FPRegisterID dest)
+ {
+ m_assembler.vmov_F64_0(dest);
+ }
// Stack manipulation operations:
//
return branch32(cond, addressTempRegister, Imm32(right.m_value << 16));
}
+ Jump branch8(Condition cond, RegisterID left, Imm32 right)
+ {
+ compare32(left, right);
+ return Jump(makeBranch(cond));
+ }
+
+ Jump branch8(Condition cond, Address left, Imm32 right)
+ {
+ // use addressTempRegister incase the branch8 we call uses dataTempRegister. :-/
+ load8(left, addressTempRegister);
+ return branch8(cond, addressTempRegister, right);
+ }
+
Jump branchTest32(Condition cond, RegisterID reg, RegisterID mask)
{
ASSERT((cond == Zero) || (cond == NonZero));
return branchTest32(cond, addressTempRegister, mask);
}
+ Jump branchTest8(Condition cond, RegisterID reg, Imm32 mask = Imm32(-1))
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ test32(reg, mask);
+ return Jump(makeBranch(cond));
+ }
+
+ Jump branchTest8(Condition cond, Address address, Imm32 mask = Imm32(-1))
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ // use addressTempRegister incase the branchTest8 we call uses dataTempRegister. :-/
+ load8(address, addressTempRegister);
+ return branchTest8(cond, addressTempRegister, mask);
+ }
+
Jump jump()
{
return Jump(makeJump());
return branch32(NotEqual, addressTempRegister, dataTempRegister);
}
+ Jump branchOr32(Condition cond, RegisterID src, RegisterID dest)
+ {
+ ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
+ m_assembler.orr_S(dest, dest, src);
+ return Jump(makeBranch(cond));
+ }
+
Jump branchSub32(Condition cond, RegisterID src, RegisterID dest)
{
ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
}
+ void set32(Condition cond, Address left, RegisterID right, RegisterID dest)
+ {
+ load32(left, dataTempRegister);
+ set32(cond, dataTempRegister, right, dest);
+ }
+
void set32(Condition cond, RegisterID left, Imm32 right, RegisterID dest)
{
compare32(left, right);
m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
}
+ void set8(Condition cond, RegisterID left, RegisterID right, RegisterID dest)
+ {
+ set32(cond, left, right, dest);
+ }
+
+ void set8(Condition cond, Address left, RegisterID right, RegisterID dest)
+ {
+ set32(cond, left, right, dest);
+ }
+
+ void set8(Condition cond, RegisterID left, Imm32 right, RegisterID dest)
+ {
+ set32(cond, left, right, dest);
+ }
+
// FIXME:
// The mask should be optional... paerhaps the argument order should be
// dest-src, operations always have a dest? ... possibly not true, considering
m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
}
+ void setTest8(Condition cond, Address address, Imm32 mask, RegisterID dest)
+ {
+ load8(address, dataTempRegister);
+ test32(dataTempRegister, mask);
+ m_assembler.it(armV7Condition(cond), false);
+ m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(1));
+ m_assembler.mov(dest, ARMThumbImmediate::makeUInt16(0));
+ }
DataLabel32 moveWithPatch(Imm32 imm, RegisterID dst)
{
#ifndef MacroAssemblerCodeRef_h
#define MacroAssemblerCodeRef_h
-#include <wtf/Platform.h>
-
#include "ExecutableAllocator.h"
#include "PassRefPtr.h"
#include "RefPtr.h"
--- /dev/null
+/*
+ * Copyright (C) 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 MIPS Technologies, Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY MIPS TECHNOLOGIES, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL MIPS TECHNOLOGIES, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MacroAssemblerMIPS_h
+#define MacroAssemblerMIPS_h
+
+#if ENABLE(ASSEMBLER) && CPU(MIPS)
+
+#include "AbstractMacroAssembler.h"
+#include "MIPSAssembler.h"
+
+namespace JSC {
+
+class MacroAssemblerMIPS : public AbstractMacroAssembler<MIPSAssembler> {
+public:
+
+ MacroAssemblerMIPS()
+ : m_fixedWidth(false)
+ {
+ }
+
+ static const Scale ScalePtr = TimesFour;
+
+ // For storing immediate number
+ static const RegisterID immTempRegister = MIPSRegisters::t0;
+ // For storing data loaded from the memory
+ static const RegisterID dataTempRegister = MIPSRegisters::t1;
+ // For storing address base
+ static const RegisterID addrTempRegister = MIPSRegisters::t2;
+ // For storing compare result
+ static const RegisterID cmpTempRegister = MIPSRegisters::t3;
+
+ // FP temp register
+ static const FPRegisterID fpTempRegister = MIPSRegisters::f16;
+
+ enum Condition {
+ Equal,
+ NotEqual,
+ Above,
+ AboveOrEqual,
+ Below,
+ BelowOrEqual,
+ GreaterThan,
+ GreaterThanOrEqual,
+ LessThan,
+ LessThanOrEqual,
+ Overflow,
+ Signed,
+ Zero,
+ NonZero
+ };
+
+ enum DoubleCondition {
+ DoubleEqual,
+ DoubleNotEqual,
+ DoubleGreaterThan,
+ DoubleGreaterThanOrEqual,
+ DoubleLessThan,
+ DoubleLessThanOrEqual,
+ DoubleEqualOrUnordered,
+ DoubleNotEqualOrUnordered,
+ DoubleGreaterThanOrUnordered,
+ DoubleGreaterThanOrEqualOrUnordered,
+ DoubleLessThanOrUnordered,
+ DoubleLessThanOrEqualOrUnordered
+ };
+
+ static const RegisterID stackPointerRegister = MIPSRegisters::sp;
+ static const RegisterID returnAddressRegister = MIPSRegisters::ra;
+
+ // Integer arithmetic operations:
+ //
+ // Operations are typically two operand - operation(source, srcDst)
+ // For many operations the source may be an Imm32, the srcDst operand
+ // may often be a memory location (explictly described using an Address
+ // object).
+
+ void add32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.addu(dest, dest, src);
+ }
+
+ void add32(Imm32 imm, RegisterID dest)
+ {
+ add32(imm, dest, dest);
+ }
+
+ void add32(Imm32 imm, RegisterID src, RegisterID dest)
+ {
+ if (!imm.m_isPointer && imm.m_value >= -32768 && imm.m_value <= 32767
+ && !m_fixedWidth) {
+ /*
+ addiu dest, src, imm
+ */
+ m_assembler.addiu(dest, src, imm.m_value);
+ } else {
+ /*
+ li immTemp, imm
+ addu dest, src, immTemp
+ */
+ move(imm, immTempRegister);
+ m_assembler.addu(dest, src, immTempRegister);
+ }
+ }
+
+ void add32(Imm32 imm, Address address)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ /*
+ lw dataTemp, offset(base)
+ li immTemp, imm
+ addu dataTemp, dataTemp, immTemp
+ sw dataTemp, offset(base)
+ */
+ m_assembler.lw(dataTempRegister, address.base, address.offset);
+ if (!imm.m_isPointer
+ && imm.m_value >= -32768 && imm.m_value <= 32767
+ && !m_fixedWidth)
+ m_assembler.addiu(dataTempRegister, dataTempRegister,
+ imm.m_value);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.addu(dataTempRegister, dataTempRegister,
+ immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, address.base, address.offset);
+ } else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ lw dataTemp, (offset & 0xffff)(addrTemp)
+ li immtemp, imm
+ addu dataTemp, dataTemp, immTemp
+ sw dataTemp, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dataTempRegister, addrTempRegister, address.offset);
+
+ if (imm.m_value >= -32768 && imm.m_value <= 32767 && !m_fixedWidth)
+ m_assembler.addiu(dataTempRegister, dataTempRegister,
+ imm.m_value);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.addu(dataTempRegister, dataTempRegister,
+ immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, addrTempRegister, address.offset);
+ }
+ }
+
+ void add32(Address src, RegisterID dest)
+ {
+ load32(src, dataTempRegister);
+ add32(dataTempRegister, dest);
+ }
+
+ void add32(RegisterID src, Address dest)
+ {
+ if (dest.offset >= -32768 && dest.offset <= 32767 && !m_fixedWidth) {
+ /*
+ lw dataTemp, offset(base)
+ addu dataTemp, dataTemp, src
+ sw dataTemp, offset(base)
+ */
+ m_assembler.lw(dataTempRegister, dest.base, dest.offset);
+ m_assembler.addu(dataTempRegister, dataTempRegister, src);
+ m_assembler.sw(dataTempRegister, dest.base, dest.offset);
+ } else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ lw dataTemp, (offset & 0xffff)(addrTemp)
+ addu dataTemp, dataTemp, src
+ sw dataTemp, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (dest.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, dest.base);
+ m_assembler.lw(dataTempRegister, addrTempRegister, dest.offset);
+ m_assembler.addu(dataTempRegister, dataTempRegister, src);
+ m_assembler.sw(dataTempRegister, addrTempRegister, dest.offset);
+ }
+ }
+
+ void add32(Imm32 imm, AbsoluteAddress address)
+ {
+ /*
+ li addrTemp, address
+ li immTemp, imm
+ lw dataTemp, 0(addrTemp)
+ addu dataTemp, dataTemp, immTemp
+ sw dataTemp, 0(addrTemp)
+ */
+ move(ImmPtr(address.m_ptr), addrTempRegister);
+ m_assembler.lw(dataTempRegister, addrTempRegister, 0);
+ if (!imm.m_isPointer && imm.m_value >= -32768 && imm.m_value <= 32767
+ && !m_fixedWidth)
+ m_assembler.addiu(dataTempRegister, dataTempRegister, imm.m_value);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.addu(dataTempRegister, dataTempRegister, immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, addrTempRegister, 0);
+ }
+
+ void and32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.andInsn(dest, dest, src);
+ }
+
+ void and32(Imm32 imm, RegisterID dest)
+ {
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth)
+ move(MIPSRegisters::zero, dest);
+ else if (!imm.m_isPointer && imm.m_value > 0 && imm.m_value < 65535
+ && !m_fixedWidth)
+ m_assembler.andi(dest, dest, imm.m_value);
+ else {
+ /*
+ li immTemp, imm
+ and dest, dest, immTemp
+ */
+ move(imm, immTempRegister);
+ m_assembler.andInsn(dest, dest, immTempRegister);
+ }
+ }
+
+ void lshift32(Imm32 imm, RegisterID dest)
+ {
+ m_assembler.sll(dest, dest, imm.m_value);
+ }
+
+ void lshift32(RegisterID shiftAmount, RegisterID dest)
+ {
+ m_assembler.sllv(dest, dest, shiftAmount);
+ }
+
+ void mul32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.mul(dest, dest, src);
+ }
+
+ void mul32(Imm32 imm, RegisterID src, RegisterID dest)
+ {
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth)
+ move(MIPSRegisters::zero, dest);
+ else if (!imm.m_isPointer && imm.m_value == 1 && !m_fixedWidth)
+ move(src, dest);
+ else {
+ /*
+ li dataTemp, imm
+ mul dest, src, dataTemp
+ */
+ move(imm, dataTempRegister);
+ m_assembler.mul(dest, src, dataTempRegister);
+ }
+ }
+
+ void not32(RegisterID srcDest)
+ {
+ m_assembler.nor(srcDest, srcDest, MIPSRegisters::zero);
+ }
+
+ void or32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.orInsn(dest, dest, src);
+ }
+
+ void or32(Imm32 imm, RegisterID dest)
+ {
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth)
+ return;
+
+ if (!imm.m_isPointer && imm.m_value > 0 && imm.m_value < 65535
+ && !m_fixedWidth) {
+ m_assembler.ori(dest, dest, imm.m_value);
+ return;
+ }
+
+ /*
+ li dataTemp, imm
+ or dest, dest, dataTemp
+ */
+ move(imm, dataTempRegister);
+ m_assembler.orInsn(dest, dest, dataTempRegister);
+ }
+
+ void rshift32(RegisterID shiftAmount, RegisterID dest)
+ {
+ m_assembler.srav(dest, dest, shiftAmount);
+ }
+
+ void rshift32(Imm32 imm, RegisterID dest)
+ {
+ m_assembler.sra(dest, dest, imm.m_value);
+ }
+
+ void sub32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.subu(dest, dest, src);
+ }
+
+ void sub32(Imm32 imm, RegisterID dest)
+ {
+ if (!imm.m_isPointer && imm.m_value >= -32767 && imm.m_value <= 32768
+ && !m_fixedWidth) {
+ /*
+ addiu dest, src, imm
+ */
+ m_assembler.addiu(dest, dest, -imm.m_value);
+ } else {
+ /*
+ li immTemp, imm
+ subu dest, src, immTemp
+ */
+ move(imm, immTempRegister);
+ m_assembler.subu(dest, dest, immTempRegister);
+ }
+ }
+
+ void sub32(Imm32 imm, Address address)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ /*
+ lw dataTemp, offset(base)
+ li immTemp, imm
+ subu dataTemp, dataTemp, immTemp
+ sw dataTemp, offset(base)
+ */
+ m_assembler.lw(dataTempRegister, address.base, address.offset);
+ if (!imm.m_isPointer
+ && imm.m_value >= -32767 && imm.m_value <= 32768
+ && !m_fixedWidth)
+ m_assembler.addiu(dataTempRegister, dataTempRegister,
+ -imm.m_value);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.subu(dataTempRegister, dataTempRegister,
+ immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, address.base, address.offset);
+ } else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ lw dataTemp, (offset & 0xffff)(addrTemp)
+ li immtemp, imm
+ subu dataTemp, dataTemp, immTemp
+ sw dataTemp, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dataTempRegister, addrTempRegister, address.offset);
+
+ if (!imm.m_isPointer
+ && imm.m_value >= -32767 && imm.m_value <= 32768
+ && !m_fixedWidth)
+ m_assembler.addiu(dataTempRegister, dataTempRegister,
+ -imm.m_value);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.subu(dataTempRegister, dataTempRegister,
+ immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, addrTempRegister, address.offset);
+ }
+ }
+
+ void sub32(Address src, RegisterID dest)
+ {
+ load32(src, dataTempRegister);
+ sub32(dataTempRegister, dest);
+ }
+
+ void sub32(Imm32 imm, AbsoluteAddress address)
+ {
+ /*
+ li addrTemp, address
+ li immTemp, imm
+ lw dataTemp, 0(addrTemp)
+ subu dataTemp, dataTemp, immTemp
+ sw dataTemp, 0(addrTemp)
+ */
+ move(ImmPtr(address.m_ptr), addrTempRegister);
+ m_assembler.lw(dataTempRegister, addrTempRegister, 0);
+
+ if (!imm.m_isPointer && imm.m_value >= -32767 && imm.m_value <= 32768
+ && !m_fixedWidth) {
+ m_assembler.addiu(dataTempRegister, dataTempRegister,
+ -imm.m_value);
+ } else {
+ move(imm, immTempRegister);
+ m_assembler.subu(dataTempRegister, dataTempRegister, immTempRegister);
+ }
+ m_assembler.sw(dataTempRegister, addrTempRegister, 0);
+ }
+
+ void xor32(RegisterID src, RegisterID dest)
+ {
+ m_assembler.xorInsn(dest, dest, src);
+ }
+
+ void xor32(Imm32 imm, RegisterID dest)
+ {
+ /*
+ li immTemp, imm
+ xor dest, dest, immTemp
+ */
+ move(imm, immTempRegister);
+ m_assembler.xorInsn(dest, dest, immTempRegister);
+ }
+
+ // Memory access operations:
+ //
+ // Loads are of the form load(address, destination) and stores of the form
+ // store(source, address). The source for a store may be an Imm32. Address
+ // operand objects to loads and store will be implicitly constructed if a
+ // register is passed.
+
+ /* Need to use zero-extened load byte for load8. */
+ void load8(ImplicitAddress address, RegisterID dest)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth)
+ m_assembler.lbu(dest, address.base, address.offset);
+ else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ lbu dest, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lbu(dest, addrTempRegister, address.offset);
+ }
+ }
+
+ void load32(ImplicitAddress address, RegisterID dest)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth)
+ m_assembler.lw(dest, address.base, address.offset);
+ else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ lw dest, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dest, addrTempRegister, address.offset);
+ }
+ }
+
+ void load32(BaseIndex address, RegisterID dest)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lw dest, address.offset(addrTemp)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dest, addrTempRegister, address.offset);
+ } else {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lui immTemp, (address.offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, immTemp
+ lw dest, (address.offset & 0xffff)(at)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lui(immTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister,
+ immTempRegister);
+ m_assembler.lw(dest, addrTempRegister, address.offset);
+ }
+ }
+
+ void load32WithUnalignedHalfWords(BaseIndex address, RegisterID dest)
+ {
+ if (address.offset >= -32768 && address.offset <= 32764
+ && !m_fixedWidth) {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ (Big-Endian)
+ lwl dest, address.offset(addrTemp)
+ lwr dest, address.offset+3(addrTemp)
+ (Little-Endian)
+ lwl dest, address.offset+3(addrTemp)
+ lwr dest, address.offset(addrTemp)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+#if CPU(BIG_ENDIAN)
+ m_assembler.lwl(dest, addrTempRegister, address.offset);
+ m_assembler.lwr(dest, addrTempRegister, address.offset + 3);
+#else
+ m_assembler.lwl(dest, addrTempRegister, address.offset + 3);
+ m_assembler.lwr(dest, addrTempRegister, address.offset);
+
+#endif
+ } else {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lui immTemp, address.offset >> 16
+ ori immTemp, immTemp, address.offset & 0xffff
+ addu addrTemp, addrTemp, immTemp
+ (Big-Endian)
+ lw dest, 0(at)
+ lw dest, 3(at)
+ (Little-Endian)
+ lw dest, 3(at)
+ lw dest, 0(at)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lui(immTempRegister, address.offset >> 16);
+ m_assembler.ori(immTempRegister, immTempRegister, address.offset);
+ m_assembler.addu(addrTempRegister, addrTempRegister,
+ immTempRegister);
+#if CPU(BIG_ENDIAN)
+ m_assembler.lwl(dest, addrTempRegister, 0);
+ m_assembler.lwr(dest, addrTempRegister, 3);
+#else
+ m_assembler.lwl(dest, addrTempRegister, 3);
+ m_assembler.lwr(dest, addrTempRegister, 0);
+#endif
+ }
+ }
+
+ void load32(void* address, RegisterID dest)
+ {
+ /*
+ li addrTemp, address
+ lw dest, 0(addrTemp)
+ */
+ move(ImmPtr(address), addrTempRegister);
+ m_assembler.lw(dest, addrTempRegister, 0);
+ }
+
+ DataLabel32 load32WithAddressOffsetPatch(Address address, RegisterID dest)
+ {
+ m_fixedWidth = true;
+ /*
+ lui addrTemp, address.offset >> 16
+ ori addrTemp, addrTemp, address.offset & 0xffff
+ addu addrTemp, addrTemp, address.base
+ lw dest, 0(addrTemp)
+ */
+ DataLabel32 dataLabel(this);
+ move(Imm32(address.offset), addrTempRegister);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dest, addrTempRegister, 0);
+ m_fixedWidth = false;
+ return dataLabel;
+ }
+
+ Label loadPtrWithPatchToLEA(Address address, RegisterID dest)
+ {
+ m_fixedWidth = true;
+ /*
+ lui addrTemp, address.offset >> 16
+ ori addrTemp, addrTemp, address.offset & 0xffff
+ addu addrTemp, addrTemp, address.base
+ lw dest, 0(addrTemp)
+ */
+ Label label(this);
+ move(Imm32(address.offset), addrTempRegister);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lw(dest, addrTempRegister, 0);
+ m_fixedWidth = false;
+ return label;
+ }
+
+ Label loadPtrWithAddressOffsetPatch(Address address, RegisterID dest)
+ {
+ return loadPtrWithPatchToLEA(address, dest);
+ }
+
+ /* Need to use zero-extened load half-word for load16. */
+ void load16(BaseIndex address, RegisterID dest)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lhu dest, address.offset(addrTemp)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lhu(dest, addrTempRegister, address.offset);
+ } else {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lui immTemp, (address.offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, immTemp
+ lhu dest, (address.offset & 0xffff)(addrTemp)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lui(immTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister,
+ immTempRegister);
+ m_assembler.lhu(dest, addrTempRegister, address.offset);
+ }
+ }
+
+ DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
+ {
+ m_fixedWidth = true;
+ /*
+ lui addrTemp, address.offset >> 16
+ ori addrTemp, addrTemp, address.offset & 0xffff
+ addu addrTemp, addrTemp, address.base
+ sw src, 0(addrTemp)
+ */
+ DataLabel32 dataLabel(this);
+ move(Imm32(address.offset), addrTempRegister);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.sw(src, addrTempRegister, 0);
+ m_fixedWidth = false;
+ return dataLabel;
+ }
+
+ void store32(RegisterID src, ImplicitAddress address)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth)
+ m_assembler.sw(src, address.base, address.offset);
+ else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ sw src, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.sw(src, addrTempRegister, address.offset);
+ }
+ }
+
+ void store32(RegisterID src, BaseIndex address)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ sw src, address.offset(addrTemp)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.sw(src, addrTempRegister, address.offset);
+ } else {
+ /*
+ sll addrTemp, address.index, address.scale
+ addu addrTemp, addrTemp, address.base
+ lui immTemp, (address.offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, immTemp
+ sw src, (address.offset & 0xffff)(at)
+ */
+ m_assembler.sll(addrTempRegister, address.index, address.scale);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lui(immTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister,
+ immTempRegister);
+ m_assembler.sw(src, addrTempRegister, address.offset);
+ }
+ }
+
+ void store32(Imm32 imm, ImplicitAddress address)
+ {
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ if (!imm.m_isPointer && !imm.m_value)
+ m_assembler.sw(MIPSRegisters::zero, address.base,
+ address.offset);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.sw(immTempRegister, address.base, address.offset);
+ }
+ } else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ sw immTemp, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth)
+ m_assembler.sw(MIPSRegisters::zero, addrTempRegister,
+ address.offset);
+ else {
+ move(imm, immTempRegister);
+ m_assembler.sw(immTempRegister, addrTempRegister,
+ address.offset);
+ }
+ }
+ }
+
+ void store32(RegisterID src, void* address)
+ {
+ /*
+ li addrTemp, address
+ sw src, 0(addrTemp)
+ */
+ move(ImmPtr(address), addrTempRegister);
+ m_assembler.sw(src, addrTempRegister, 0);
+ }
+
+ void store32(Imm32 imm, void* address)
+ {
+ /*
+ li immTemp, imm
+ li addrTemp, address
+ sw src, 0(addrTemp)
+ */
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth) {
+ move(ImmPtr(address), addrTempRegister);
+ m_assembler.sw(MIPSRegisters::zero, addrTempRegister, 0);
+ } else {
+ move(imm, immTempRegister);
+ move(ImmPtr(address), addrTempRegister);
+ m_assembler.sw(immTempRegister, addrTempRegister, 0);
+ }
+ }
+
+ // Floating-point operations:
+
+ bool supportsFloatingPoint() const
+ {
+#if WTF_MIPS_DOUBLE_FLOAT
+ return true;
+#else
+ return false;
+#endif
+ }
+
+ bool supportsFloatingPointTruncate() const
+ {
+#if WTF_MIPS_DOUBLE_FLOAT && WTF_MIPS_ISA_AT_LEAST(2)
+ return true;
+#else
+ return false;
+#endif
+ }
+
+ // Stack manipulation operations:
+ //
+ // The ABI is assumed to provide a stack abstraction to memory,
+ // containing machine word sized units of data. Push and pop
+ // operations add and remove a single register sized unit of data
+ // to or from the stack. Peek and poke operations read or write
+ // values on the stack, without moving the current stack position.
+
+ void pop(RegisterID dest)
+ {
+ m_assembler.lw(dest, MIPSRegisters::sp, 0);
+ m_assembler.addiu(MIPSRegisters::sp, MIPSRegisters::sp, 4);
+ }
+
+ void push(RegisterID src)
+ {
+ m_assembler.addiu(MIPSRegisters::sp, MIPSRegisters::sp, -4);
+ m_assembler.sw(src, MIPSRegisters::sp, 0);
+ }
+
+ void push(Address address)
+ {
+ load32(address, dataTempRegister);
+ push(dataTempRegister);
+ }
+
+ void push(Imm32 imm)
+ {
+ move(imm, immTempRegister);
+ push(immTempRegister);
+ }
+
+ // Register move operations:
+ //
+ // Move values in registers.
+
+ void move(Imm32 imm, RegisterID dest)
+ {
+ if (!imm.m_isPointer && !imm.m_value && !m_fixedWidth)
+ move(MIPSRegisters::zero, dest);
+ else if (imm.m_isPointer || m_fixedWidth) {
+ m_assembler.lui(dest, imm.m_value >> 16);
+ m_assembler.ori(dest, dest, imm.m_value);
+ } else
+ m_assembler.li(dest, imm.m_value);
+ }
+
+ void move(RegisterID src, RegisterID dest)
+ {
+ if (src != dest || m_fixedWidth)
+ m_assembler.move(dest, src);
+ }
+
+ void move(ImmPtr imm, RegisterID dest)
+ {
+ move(Imm32(imm), dest);
+ }
+
+ void swap(RegisterID reg1, RegisterID reg2)
+ {
+ move(reg1, immTempRegister);
+ move(reg2, reg1);
+ move(immTempRegister, reg2);
+ }
+
+ void signExtend32ToPtr(RegisterID src, RegisterID dest)
+ {
+ if (src != dest || m_fixedWidth)
+ move(src, dest);
+ }
+
+ void zeroExtend32ToPtr(RegisterID src, RegisterID dest)
+ {
+ if (src != dest || m_fixedWidth)
+ move(src, dest);
+ }
+
+ // Forwards / external control flow operations:
+ //
+ // This set of jump and conditional branch operations return a Jump
+ // object which may linked at a later point, allow forwards jump,
+ // or jumps that will require external linkage (after the code has been
+ // relocated).
+ //
+ // For branches, signed <, >, <= and >= are denoted as l, g, le, and ge
+ // respecitvely, for unsigned comparisons the names b, a, be, and ae are
+ // used (representing the names 'below' and 'above').
+ //
+ // Operands to the comparision are provided in the expected order, e.g.
+ // jle32(reg1, Imm32(5)) will branch if the value held in reg1, when
+ // treated as a signed 32bit value, is less than or equal to 5.
+ //
+ // jz and jnz test whether the first operand is equal to zero, and take
+ // an optional second operand of a mask under which to perform the test.
+
+ Jump branch8(Condition cond, Address left, Imm32 right)
+ {
+ // Make sure the immediate value is unsigned 8 bits.
+ ASSERT(!(right.m_value & 0xFFFFFF00));
+ load8(left, dataTempRegister);
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branch32(Condition cond, RegisterID left, RegisterID right)
+ {
+ if (cond == Equal || cond == Zero)
+ return branchEqual(left, right);
+ if (cond == NotEqual || cond == NonZero)
+ return branchNotEqual(left, right);
+ if (cond == Above) {
+ m_assembler.sltu(cmpTempRegister, right, left);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == AboveOrEqual) {
+ m_assembler.sltu(cmpTempRegister, left, right);
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Below) {
+ m_assembler.sltu(cmpTempRegister, left, right);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == BelowOrEqual) {
+ m_assembler.sltu(cmpTempRegister, right, left);
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == GreaterThan) {
+ m_assembler.slt(cmpTempRegister, right, left);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == GreaterThanOrEqual) {
+ m_assembler.slt(cmpTempRegister, left, right);
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == LessThan) {
+ m_assembler.slt(cmpTempRegister, left, right);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == LessThanOrEqual) {
+ m_assembler.slt(cmpTempRegister, right, left);
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Overflow) {
+ /*
+ xor cmpTemp, left, right
+ bgez No_overflow, cmpTemp # same sign bit -> no overflow
+ nop
+ subu cmpTemp, left, right
+ xor cmpTemp, cmpTemp, left
+ bgez No_overflow, cmpTemp # same sign bit -> no overflow
+ nop
+ b Overflow
+ nop
+ nop
+ nop
+ nop
+ nop
+ No_overflow:
+ */
+ m_assembler.xorInsn(cmpTempRegister, left, right);
+ m_assembler.bgez(cmpTempRegister, 11);
+ m_assembler.nop();
+ m_assembler.subu(cmpTempRegister, left, right);
+ m_assembler.xorInsn(cmpTempRegister, cmpTempRegister, left);
+ m_assembler.bgez(cmpTempRegister, 7);
+ m_assembler.nop();
+ return jump();
+ }
+ if (cond == Signed) {
+ m_assembler.subu(cmpTempRegister, left, right);
+ // Check if the result is negative.
+ m_assembler.slt(cmpTempRegister, cmpTempRegister,
+ MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ ASSERT(0);
+
+ return Jump();
+ }
+
+ Jump branch32(Condition cond, RegisterID left, Imm32 right)
+ {
+ move(right, immTempRegister);
+ return branch32(cond, left, immTempRegister);
+ }
+
+ Jump branch32(Condition cond, RegisterID left, Address right)
+ {
+ load32(right, dataTempRegister);
+ return branch32(cond, left, dataTempRegister);
+ }
+
+ Jump branch32(Condition cond, Address left, RegisterID right)
+ {
+ load32(left, dataTempRegister);
+ return branch32(cond, dataTempRegister, right);
+ }
+
+ Jump branch32(Condition cond, Address left, Imm32 right)
+ {
+ load32(left, dataTempRegister);
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branch32(Condition cond, BaseIndex left, Imm32 right)
+ {
+ load32(left, dataTempRegister);
+ // Be careful that the previous load32() uses immTempRegister.
+ // So, we need to put move() after load32().
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branch32WithUnalignedHalfWords(Condition cond, BaseIndex left, Imm32 right)
+ {
+ load32WithUnalignedHalfWords(left, dataTempRegister);
+ // Be careful that the previous load32WithUnalignedHalfWords()
+ // uses immTempRegister.
+ // So, we need to put move() after load32WithUnalignedHalfWords().
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branch32(Condition cond, AbsoluteAddress left, RegisterID right)
+ {
+ load32(left.m_ptr, dataTempRegister);
+ return branch32(cond, dataTempRegister, right);
+ }
+
+ Jump branch32(Condition cond, AbsoluteAddress left, Imm32 right)
+ {
+ load32(left.m_ptr, dataTempRegister);
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branch16(Condition cond, BaseIndex left, RegisterID right)
+ {
+ load16(left, dataTempRegister);
+ return branch32(cond, dataTempRegister, right);
+ }
+
+ Jump branch16(Condition cond, BaseIndex left, Imm32 right)
+ {
+ ASSERT(!(right.m_value & 0xFFFF0000));
+ load16(left, dataTempRegister);
+ // Be careful that the previous load16() uses immTempRegister.
+ // So, we need to put move() after load16().
+ move(right, immTempRegister);
+ return branch32(cond, dataTempRegister, immTempRegister);
+ }
+
+ Jump branchTest32(Condition cond, RegisterID reg, RegisterID mask)
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ m_assembler.andInsn(cmpTempRegister, reg, mask);
+ if (cond == Zero)
+ return branchEqual(cmpTempRegister, MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+
+ Jump branchTest32(Condition cond, RegisterID reg, Imm32 mask = Imm32(-1))
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ if (mask.m_value == -1 && !m_fixedWidth) {
+ if (cond == Zero)
+ return branchEqual(reg, MIPSRegisters::zero);
+ return branchNotEqual(reg, MIPSRegisters::zero);
+ }
+ move(mask, immTempRegister);
+ return branchTest32(cond, reg, immTempRegister);
+ }
+
+ Jump branchTest32(Condition cond, Address address, Imm32 mask = Imm32(-1))
+ {
+ load32(address, dataTempRegister);
+ return branchTest32(cond, dataTempRegister, mask);
+ }
+
+ Jump branchTest32(Condition cond, BaseIndex address, Imm32 mask = Imm32(-1))
+ {
+ load32(address, dataTempRegister);
+ return branchTest32(cond, dataTempRegister, mask);
+ }
+
+ Jump branchTest8(Condition cond, Address address, Imm32 mask = Imm32(-1))
+ {
+ load8(address, dataTempRegister);
+ return branchTest32(cond, dataTempRegister, mask);
+ }
+
+ Jump jump()
+ {
+ return branchEqual(MIPSRegisters::zero, MIPSRegisters::zero);
+ }
+
+ void jump(RegisterID target)
+ {
+ m_assembler.jr(target);
+ m_assembler.nop();
+ }
+
+ void jump(Address address)
+ {
+ m_fixedWidth = true;
+ load32(address, MIPSRegisters::t9);
+ m_assembler.jr(MIPSRegisters::t9);
+ m_assembler.nop();
+ m_fixedWidth = false;
+ }
+
+ // Arithmetic control flow operations:
+ //
+ // This set of conditional branch operations branch based
+ // on the result of an arithmetic operation. The operation
+ // is performed as normal, storing the result.
+ //
+ // * jz operations branch if the result is zero.
+ // * jo operations branch if the (signed) arithmetic
+ // operation caused an overflow to occur.
+
+ Jump branchAdd32(Condition cond, RegisterID src, RegisterID dest)
+ {
+ ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
+ if (cond == Overflow) {
+ /*
+ move dest, dataTemp
+ xor cmpTemp, dataTemp, src
+ bltz cmpTemp, No_overflow # diff sign bit -> no overflow
+ addu dest, dataTemp, src
+ xor cmpTemp, dest, dataTemp
+ bgez cmpTemp, No_overflow # same sign big -> no overflow
+ nop
+ b Overflow
+ nop
+ nop
+ nop
+ nop
+ nop
+ No_overflow:
+ */
+ move(dest, dataTempRegister);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, src);
+ m_assembler.bltz(cmpTempRegister, 10);
+ m_assembler.addu(dest, dataTempRegister, src);
+ m_assembler.xorInsn(cmpTempRegister, dest, dataTempRegister);
+ m_assembler.bgez(cmpTempRegister, 7);
+ m_assembler.nop();
+ return jump();
+ }
+ if (cond == Signed) {
+ add32(src, dest);
+ // Check if dest is negative.
+ m_assembler.slt(cmpTempRegister, dest, MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Zero) {
+ add32(src, dest);
+ return branchEqual(dest, MIPSRegisters::zero);
+ }
+ if (cond == NonZero) {
+ add32(src, dest);
+ return branchNotEqual(dest, MIPSRegisters::zero);
+ }
+ ASSERT(0);
+ return Jump();
+ }
+
+ Jump branchAdd32(Condition cond, Imm32 imm, RegisterID dest)
+ {
+ move(imm, immTempRegister);
+ return branchAdd32(cond, immTempRegister, dest);
+ }
+
+ Jump branchMul32(Condition cond, RegisterID src, RegisterID dest)
+ {
+ ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
+ if (cond == Overflow) {
+ /*
+ mult src, dest
+ mfhi dataTemp
+ mflo dest
+ sra addrTemp, dest, 31
+ beq dataTemp, addrTemp, No_overflow # all sign bits (bit 63 to bit 31) are the same -> no overflow
+ nop
+ b Overflow
+ nop
+ nop
+ nop
+ nop
+ nop
+ No_overflow:
+ */
+ m_assembler.mult(src, dest);
+ m_assembler.mfhi(dataTempRegister);
+ m_assembler.mflo(dest);
+ m_assembler.sra(addrTempRegister, dest, 31);
+ m_assembler.beq(dataTempRegister, addrTempRegister, 7);
+ m_assembler.nop();
+ return jump();
+ }
+ if (cond == Signed) {
+ mul32(src, dest);
+ // Check if dest is negative.
+ m_assembler.slt(cmpTempRegister, dest, MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Zero) {
+ mul32(src, dest);
+ return branchEqual(dest, MIPSRegisters::zero);
+ }
+ if (cond == NonZero) {
+ mul32(src, dest);
+ return branchNotEqual(dest, MIPSRegisters::zero);
+ }
+ ASSERT(0);
+ return Jump();
+ }
+
+ Jump branchMul32(Condition cond, Imm32 imm, RegisterID src, RegisterID dest)
+ {
+ move(imm, immTempRegister);
+ move(src, dest);
+ return branchMul32(cond, immTempRegister, dest);
+ }
+
+ Jump branchSub32(Condition cond, RegisterID src, RegisterID dest)
+ {
+ ASSERT((cond == Overflow) || (cond == Signed) || (cond == Zero) || (cond == NonZero));
+ if (cond == Overflow) {
+ /*
+ move dest, dataTemp
+ xor cmpTemp, dataTemp, src
+ bgez cmpTemp, No_overflow # same sign bit -> no overflow
+ subu dest, dataTemp, src
+ xor cmpTemp, dest, dataTemp
+ bgez cmpTemp, No_overflow # same sign bit -> no overflow
+ nop
+ b Overflow
+ nop
+ nop
+ nop
+ nop
+ nop
+ No_overflow:
+ */
+ move(dest, dataTempRegister);
+ m_assembler.xorInsn(cmpTempRegister, dataTempRegister, src);
+ m_assembler.bgez(cmpTempRegister, 10);
+ m_assembler.subu(dest, dataTempRegister, src);
+ m_assembler.xorInsn(cmpTempRegister, dest, dataTempRegister);
+ m_assembler.bgez(cmpTempRegister, 7);
+ m_assembler.nop();
+ return jump();
+ }
+ if (cond == Signed) {
+ sub32(src, dest);
+ // Check if dest is negative.
+ m_assembler.slt(cmpTempRegister, dest, MIPSRegisters::zero);
+ return branchNotEqual(cmpTempRegister, MIPSRegisters::zero);
+ }
+ if (cond == Zero) {
+ sub32(src, dest);
+ return branchEqual(dest, MIPSRegisters::zero);
+ }
+ if (cond == NonZero) {
+ sub32(src, dest);
+ return branchNotEqual(dest, MIPSRegisters::zero);
+ }
+ ASSERT(0);
+ return Jump();
+ }
+
+ Jump branchSub32(Condition cond, Imm32 imm, RegisterID dest)
+ {
+ move(imm, immTempRegister);
+ return branchSub32(cond, immTempRegister, dest);
+ }
+
+ // Miscellaneous operations:
+
+ void breakpoint()
+ {
+ m_assembler.bkpt();
+ }
+
+ Call nearCall()
+ {
+ /* We need two words for relaxation. */
+ m_assembler.nop();
+ m_assembler.nop();
+ m_assembler.jal();
+ m_assembler.nop();
+ return Call(m_assembler.newJmpSrc(), Call::LinkableNear);
+ }
+
+ Call call()
+ {
+ m_assembler.lui(MIPSRegisters::t9, 0);
+ m_assembler.ori(MIPSRegisters::t9, MIPSRegisters::t9, 0);
+ m_assembler.jalr(MIPSRegisters::t9);
+ m_assembler.nop();
+ return Call(m_assembler.newJmpSrc(), Call::Linkable);
+ }
+
+ Call call(RegisterID target)
+ {
+ m_assembler.jalr(target);
+ m_assembler.nop();
+ return Call(m_assembler.newJmpSrc(), Call::None);
+ }
+
+ Call call(Address address)
+ {
+ m_fixedWidth = true;
+ load32(address, MIPSRegisters::t9);
+ m_assembler.jalr(MIPSRegisters::t9);
+ m_assembler.nop();
+ m_fixedWidth = false;
+ return Call(m_assembler.newJmpSrc(), Call::None);
+ }
+
+ void ret()
+ {
+ m_assembler.jr(MIPSRegisters::ra);
+ m_assembler.nop();
+ }
+
+ void set32(Condition cond, RegisterID left, RegisterID right, RegisterID dest)
+ {
+ if (cond == Equal || cond == Zero) {
+ m_assembler.xorInsn(dest, left, right);
+ m_assembler.sltiu(dest, dest, 1);
+ } else if (cond == NotEqual || cond == NonZero) {
+ m_assembler.xorInsn(dest, left, right);
+ m_assembler.sltu(dest, MIPSRegisters::zero, dest);
+ } else if (cond == Above)
+ m_assembler.sltu(dest, right, left);
+ else if (cond == AboveOrEqual) {
+ m_assembler.sltu(dest, left, right);
+ m_assembler.xori(dest, dest, 1);
+ } else if (cond == Below)
+ m_assembler.sltu(dest, left, right);
+ else if (cond == BelowOrEqual) {
+ m_assembler.sltu(dest, right, left);
+ m_assembler.xori(dest, dest, 1);
+ } else if (cond == GreaterThan)
+ m_assembler.slt(dest, right, left);
+ else if (cond == GreaterThanOrEqual) {
+ m_assembler.slt(dest, left, right);
+ m_assembler.xori(dest, dest, 1);
+ } else if (cond == LessThan)
+ m_assembler.slt(dest, left, right);
+ else if (cond == LessThanOrEqual) {
+ m_assembler.slt(dest, right, left);
+ m_assembler.xori(dest, dest, 1);
+ } else if (cond == Overflow) {
+ /*
+ xor cmpTemp, left, right
+ bgez Done, cmpTemp # same sign bit -> no overflow
+ move dest, 0
+ subu cmpTemp, left, right
+ xor cmpTemp, cmpTemp, left # diff sign bit -> overflow
+ slt dest, cmpTemp, 0
+ Done:
+ */
+ m_assembler.xorInsn(cmpTempRegister, left, right);
+ m_assembler.bgez(cmpTempRegister, 4);
+ m_assembler.move(dest, MIPSRegisters::zero);
+ m_assembler.subu(cmpTempRegister, left, right);
+ m_assembler.xorInsn(cmpTempRegister, cmpTempRegister, left);
+ m_assembler.slt(dest, cmpTempRegister, MIPSRegisters::zero);
+ } else if (cond == Signed) {
+ m_assembler.subu(dest, left, right);
+ // Check if the result is negative.
+ m_assembler.slt(dest, dest, MIPSRegisters::zero);
+ }
+ }
+
+ void set32(Condition cond, RegisterID left, Imm32 right, RegisterID dest)
+ {
+ move(right, immTempRegister);
+ set32(cond, left, immTempRegister, dest);
+ }
+
+ void setTest8(Condition cond, Address address, Imm32 mask, RegisterID dest)
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ load8(address, dataTempRegister);
+ if (mask.m_value == -1 && !m_fixedWidth) {
+ if (cond == Zero)
+ m_assembler.sltiu(dest, dataTempRegister, 1);
+ else
+ m_assembler.sltu(dest, MIPSRegisters::zero, dataTempRegister);
+ } else {
+ move(mask, immTempRegister);
+ m_assembler.andInsn(cmpTempRegister, dataTempRegister,
+ immTempRegister);
+ if (cond == Zero)
+ m_assembler.sltiu(dest, cmpTempRegister, 1);
+ else
+ m_assembler.sltu(dest, MIPSRegisters::zero, cmpTempRegister);
+ }
+ }
+
+ void setTest32(Condition cond, Address address, Imm32 mask, RegisterID dest)
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ load32(address, dataTempRegister);
+ if (mask.m_value == -1 && !m_fixedWidth) {
+ if (cond == Zero)
+ m_assembler.sltiu(dest, dataTempRegister, 1);
+ else
+ m_assembler.sltu(dest, MIPSRegisters::zero, dataTempRegister);
+ } else {
+ move(mask, immTempRegister);
+ m_assembler.andInsn(cmpTempRegister, dataTempRegister,
+ immTempRegister);
+ if (cond == Zero)
+ m_assembler.sltiu(dest, cmpTempRegister, 1);
+ else
+ m_assembler.sltu(dest, MIPSRegisters::zero, cmpTempRegister);
+ }
+ }
+
+ DataLabel32 moveWithPatch(Imm32 imm, RegisterID dest)
+ {
+ m_fixedWidth = true;
+ DataLabel32 label(this);
+ move(imm, dest);
+ m_fixedWidth = false;
+ return label;
+ }
+
+ DataLabelPtr moveWithPatch(ImmPtr initialValue, RegisterID dest)
+ {
+ m_fixedWidth = true;
+ DataLabelPtr label(this);
+ move(initialValue, dest);
+ m_fixedWidth = false;
+ return label;
+ }
+
+ Jump branchPtrWithPatch(Condition cond, RegisterID left, DataLabelPtr& dataLabel, ImmPtr initialRightValue = ImmPtr(0))
+ {
+ m_fixedWidth = true;
+ dataLabel = moveWithPatch(initialRightValue, immTempRegister);
+ Jump temp = branch32(cond, left, immTempRegister);
+ m_fixedWidth = false;
+ return temp;
+ }
+
+ Jump branchPtrWithPatch(Condition cond, Address left, DataLabelPtr& dataLabel, ImmPtr initialRightValue = ImmPtr(0))
+ {
+ m_fixedWidth = true;
+ load32(left, dataTempRegister);
+ dataLabel = moveWithPatch(initialRightValue, immTempRegister);
+ Jump temp = branch32(cond, dataTempRegister, immTempRegister);
+ m_fixedWidth = false;
+ return temp;
+ }
+
+ DataLabelPtr storePtrWithPatch(ImmPtr initialValue, ImplicitAddress address)
+ {
+ m_fixedWidth = true;
+ DataLabelPtr dataLabel = moveWithPatch(initialValue, dataTempRegister);
+ store32(dataTempRegister, address);
+ m_fixedWidth = false;
+ return dataLabel;
+ }
+
+ DataLabelPtr storePtrWithPatch(ImplicitAddress address)
+ {
+ return storePtrWithPatch(ImmPtr(0), address);
+ }
+
+ Call tailRecursiveCall()
+ {
+ // Like a normal call, but don't update the returned address register
+ m_fixedWidth = true;
+ move(Imm32(0), MIPSRegisters::t9);
+ m_assembler.jr(MIPSRegisters::t9);
+ m_assembler.nop();
+ m_fixedWidth = false;
+ return Call(m_assembler.newJmpSrc(), Call::Linkable);
+ }
+
+ Call makeTailRecursiveCall(Jump oldJump)
+ {
+ oldJump.link(this);
+ return tailRecursiveCall();
+ }
+
+ void loadDouble(ImplicitAddress address, FPRegisterID dest)
+ {
+#if WTF_MIPS_ISA(1)
+ /*
+ li addrTemp, address.offset
+ addu addrTemp, addrTemp, base
+ lwc1 dest, 0(addrTemp)
+ lwc1 dest+1, 4(addrTemp)
+ */
+ move(Imm32(address.offset), addrTempRegister);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.lwc1(dest, addrTempRegister, 0);
+ m_assembler.lwc1(FPRegisterID(dest + 1), addrTempRegister, 4);
+#else
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth) {
+ m_assembler.ldc1(dest, address.base, address.offset);
+ } else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ ldc1 dest, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.ldc1(dest, addrTempRegister, address.offset);
+ }
+#endif
+ }
+
+ void storeDouble(FPRegisterID src, ImplicitAddress address)
+ {
+#if WTF_MIPS_ISA(1)
+ /*
+ li addrTemp, address.offset
+ addu addrTemp, addrTemp, base
+ swc1 dest, 0(addrTemp)
+ swc1 dest+1, 4(addrTemp)
+ */
+ move(Imm32(address.offset), addrTempRegister);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.swc1(src, addrTempRegister, 0);
+ m_assembler.swc1(FPRegisterID(src + 1), addrTempRegister, 4);
+#else
+ if (address.offset >= -32768 && address.offset <= 32767
+ && !m_fixedWidth)
+ m_assembler.sdc1(src, address.base, address.offset);
+ else {
+ /*
+ lui addrTemp, (offset + 0x8000) >> 16
+ addu addrTemp, addrTemp, base
+ sdc1 src, (offset & 0xffff)(addrTemp)
+ */
+ m_assembler.lui(addrTempRegister, (address.offset + 0x8000) >> 16);
+ m_assembler.addu(addrTempRegister, addrTempRegister, address.base);
+ m_assembler.sdc1(src, addrTempRegister, address.offset);
+ }
+#endif
+ }
+
+ void addDouble(FPRegisterID src, FPRegisterID dest)
+ {
+ m_assembler.addd(dest, dest, src);
+ }
+
+ void addDouble(Address src, FPRegisterID dest)
+ {
+ loadDouble(src, fpTempRegister);
+ m_assembler.addd(dest, dest, fpTempRegister);
+ }
+
+ void subDouble(FPRegisterID src, FPRegisterID dest)
+ {
+ m_assembler.subd(dest, dest, src);
+ }
+
+ void subDouble(Address src, FPRegisterID dest)
+ {
+ loadDouble(src, fpTempRegister);
+ m_assembler.subd(dest, dest, fpTempRegister);
+ }
+
+ void mulDouble(FPRegisterID src, FPRegisterID dest)
+ {
+ m_assembler.muld(dest, dest, src);
+ }
+
+ void mulDouble(Address src, FPRegisterID dest)
+ {
+ loadDouble(src, fpTempRegister);
+ m_assembler.muld(dest, dest, fpTempRegister);
+ }
+
+ void convertInt32ToDouble(RegisterID src, FPRegisterID dest)
+ {
+ m_assembler.mtc1(src, fpTempRegister);
+ m_assembler.cvtdw(dest, fpTempRegister);
+ }
+
+ void insertRelaxationWords()
+ {
+ /* We need four words for relaxation. */
+ m_assembler.beq(MIPSRegisters::zero, MIPSRegisters::zero, 3); // Jump over nops;
+ m_assembler.nop();
+ m_assembler.nop();
+ m_assembler.nop();
+ }
+
+ Jump branchTrue()
+ {
+ m_assembler.appendJump();
+ m_assembler.bc1t();
+ m_assembler.nop();
+ insertRelaxationWords();
+ return Jump(m_assembler.newJmpSrc());
+ }
+
+ Jump branchFalse()
+ {
+ m_assembler.appendJump();
+ m_assembler.bc1f();
+ m_assembler.nop();
+ insertRelaxationWords();
+ return Jump(m_assembler.newJmpSrc());
+ }
+
+ Jump branchEqual(RegisterID rs, RegisterID rt)
+ {
+ m_assembler.appendJump();
+ m_assembler.beq(rs, rt, 0);
+ m_assembler.nop();
+ insertRelaxationWords();
+ return Jump(m_assembler.newJmpSrc());
+ }
+
+ Jump branchNotEqual(RegisterID rs, RegisterID rt)
+ {
+ m_assembler.appendJump();
+ m_assembler.bne(rs, rt, 0);
+ m_assembler.nop();
+ insertRelaxationWords();
+ return Jump(m_assembler.newJmpSrc());
+ }
+
+ Jump branchDouble(DoubleCondition cond, FPRegisterID left, FPRegisterID right)
+ {
+ if (cond == DoubleEqual) {
+ m_assembler.ceqd(left, right);
+ return branchTrue();
+ }
+ if (cond == DoubleNotEqual) {
+ m_assembler.ceqd(left, right);
+ return branchFalse(); // false
+ }
+ if (cond == DoubleGreaterThan) {
+ m_assembler.cngtd(left, right);
+ return branchFalse(); // false
+ }
+ if (cond == DoubleGreaterThanOrEqual) {
+ m_assembler.cnged(right, left);
+ return branchFalse(); // false
+ }
+ if (cond == DoubleLessThan) {
+ m_assembler.cltd(left, right);
+ return branchTrue();
+ }
+ if (cond == DoubleLessThanOrEqual) {
+ m_assembler.cled(left, right);
+ return branchTrue();
+ }
+ if (cond == DoubleEqualOrUnordered) {
+ m_assembler.cueqd(left, right);
+ return branchTrue();
+ }
+ if (cond == DoubleGreaterThanOrUnordered) {
+ m_assembler.coled(left, right);
+ return branchFalse(); // false
+ }
+ if (cond == DoubleGreaterThanOrEqualOrUnordered) {
+ m_assembler.coltd(left, right);
+ return branchFalse(); // false
+ }
+ if (cond == DoubleLessThanOrUnordered) {
+ m_assembler.cultd(left, right);
+ return branchTrue();
+ }
+ if (cond == DoubleLessThanOrEqualOrUnordered) {
+ m_assembler.culed(left, right);
+ return branchTrue();
+ }
+ ASSERT(0);
+
+ return Jump();
+ }
+
+ // Truncates 'src' to an integer, and places the resulting 'dest'.
+ // If the result is not representable as a 32 bit value, branch.
+ // May also branch for some values that are representable in 32 bits
+ // (specifically, in this case, INT_MAX 0x7fffffff).
+ Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest)
+ {
+ m_assembler.truncwd(fpTempRegister, src);
+ m_assembler.mfc1(dest, fpTempRegister);
+ return branch32(Equal, dest, Imm32(0x7fffffff));
+ }
+
+private:
+ // If m_fixedWidth is true, we will generate a fixed number of instructions.
+ // Otherwise, we can emit any number of instructions.
+ bool m_fixedWidth;
+
+ friend class LinkBuffer;
+ friend class RepatchBuffer;
+
+ static void linkCall(void* code, Call call, FunctionPtr function)
+ {
+ MIPSAssembler::linkCall(code, call.m_jmp, function.value());
+ }
+
+ static void repatchCall(CodeLocationCall call, CodeLocationLabel destination)
+ {
+ MIPSAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
+ }
+
+ static void repatchCall(CodeLocationCall call, FunctionPtr destination)
+ {
+ MIPSAssembler::relinkCall(call.dataLocation(), destination.executableAddress());
+ }
+
+};
+
+}
+
+#endif // ENABLE(ASSEMBLER) && CPU(MIPS)
+
+#endif // MacroAssemblerMIPS_h
#ifndef MacroAssemblerX86_h
#define MacroAssemblerX86_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && CPU(X86)
#include "MacroAssemblerX86Common.h"
m_assembler.movl_mr(address, dest);
}
- void loadDouble(void* address, FPRegisterID dest)
+ void loadDouble(const void* address, FPRegisterID dest)
{
ASSERT(isSSE2Present());
m_assembler.movsd_mr(address, dest);
bool supportsFloatingPoint() const { return m_isSSE2Present; }
// See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
bool supportsFloatingPointTruncate() const { return m_isSSE2Present; }
+ bool supportsFloatingPointSqrt() const { return m_isSSE2Present; }
private:
const bool m_isSSE2Present;
#ifndef MacroAssemblerX86Common_h
#define MacroAssemblerX86Common_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include "X86Assembler.h"
static const int DoubleConditionBits = DoubleConditionBitInvert | DoubleConditionBitSpecial;
public:
+ typedef X86Assembler::FPRegisterID FPRegisterID;
enum Condition {
Equal = X86Assembler::ConditionE,
{
m_assembler.sarl_i8r(imm.m_value, dest);
}
+
+ void urshift32(RegisterID shift_amount, RegisterID dest)
+ {
+ // On x86 we can only shift by ecx; if asked to shift by another register we'll
+ // need rejig the shift amount into ecx first, and restore the registers afterwards.
+ if (shift_amount != X86Registers::ecx) {
+ swap(shift_amount, X86Registers::ecx);
+
+ // E.g. transform "shrl %eax, %eax" -> "xchgl %eax, %ecx; shrl %ecx, %ecx; xchgl %eax, %ecx"
+ if (dest == shift_amount)
+ m_assembler.shrl_CLr(X86Registers::ecx);
+ // E.g. transform "shrl %eax, %ecx" -> "xchgl %eax, %ecx; shrl %ecx, %eax; xchgl %eax, %ecx"
+ else if (dest == X86Registers::ecx)
+ m_assembler.shrl_CLr(shift_amount);
+ // E.g. transform "shrl %eax, %ebx" -> "xchgl %eax, %ecx; shrl %ecx, %ebx; xchgl %eax, %ecx"
+ else
+ m_assembler.shrl_CLr(dest);
+
+ swap(shift_amount, X86Registers::ecx);
+ } else
+ m_assembler.shrl_CLr(dest);
+ }
+
+ void urshift32(Imm32 imm, RegisterID dest)
+ {
+ m_assembler.shrl_i8r(imm.m_value, dest);
+ }
void sub32(RegisterID src, RegisterID dest)
{
m_assembler.xorl_mr(src.offset, src.base, dest);
}
+ void sqrtDouble(FPRegisterID src, FPRegisterID dst)
+ {
+ m_assembler.sqrtsd_rr(src, dst);
+ }
// Memory access operations:
//
{
m_assembler.movzwl_mr(address.offset, address.base, address.index, address.scale, dest);
}
+
+ void load16(Address address, RegisterID dest)
+ {
+ m_assembler.movzwl_mr(address.offset, address.base, dest);
+ }
DataLabel32 store32WithAddressOffsetPatch(RegisterID src, Address address)
{
// an optional second operand of a mask under which to perform the test.
public:
+ Jump branch8(Condition cond, Address left, Imm32 right)
+ {
+ m_assembler.cmpb_im(right.m_value, left.offset, left.base);
+ return Jump(m_assembler.jCC(x86Condition(cond)));
+ }
+
Jump branch32(Condition cond, RegisterID left, RegisterID right)
{
m_assembler.cmpl_rr(right, left);
m_assembler.testl_i32m(mask.m_value, address.offset, address.base, address.index, address.scale);
return Jump(m_assembler.jCC(x86Condition(cond)));
}
+
+ Jump branchTest8(Condition cond, Address address, Imm32 mask = Imm32(-1))
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ if (mask.m_value == -1)
+ m_assembler.cmpb_im(0, address.offset, address.base);
+ else
+ m_assembler.testb_im(mask.m_value, address.offset, address.base);
+ return Jump(m_assembler.jCC(x86Condition(cond)));
+ }
+
+ Jump branchTest8(Condition cond, BaseIndex address, Imm32 mask = Imm32(-1))
+ {
+ ASSERT((cond == Zero) || (cond == NonZero));
+ if (mask.m_value == -1)
+ m_assembler.cmpb_im(0, address.offset, address.base, address.index, address.scale);
+ else
+ m_assembler.testb_im(mask.m_value, address.offset, address.base, address.index, address.scale);
+ return Jump(m_assembler.jCC(x86Condition(cond)));
+ }
Jump jump()
{
return Jump(m_assembler.jCC(x86Condition(cond)));
}
+ Jump branchNeg32(Condition cond, RegisterID srcDest)
+ {
+ ASSERT((cond == Overflow) || (cond == Zero) || (cond == NonZero));
+ neg32(srcDest);
+ return Jump(m_assembler.jCC(x86Condition(cond)));
+ }
+
Jump branchOr32(Condition cond, RegisterID src, RegisterID dest)
{
ASSERT((cond == Signed) || (cond == Zero) || (cond == NonZero));
void setTest8(Condition cond, Address address, Imm32 mask, RegisterID dest)
{
if (mask.m_value == -1)
- m_assembler.cmpl_im(0, address.offset, address.base);
+ m_assembler.cmpb_im(0, address.offset, address.base);
else
- m_assembler.testl_i32m(mask.m_value, address.offset, address.base);
+ m_assembler.testb_im(mask.m_value, address.offset, address.base);
m_assembler.setCC_r(x86Condition(cond), dest);
+ m_assembler.movzbl_rr(dest, dest);
}
void setTest32(Condition cond, Address address, Imm32 mask, RegisterID dest)
#ifndef MacroAssemblerX86_64_h
#define MacroAssemblerX86_64_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && CPU(X86_64)
#include "MacroAssemblerX86Common.h"
}
}
- void loadDouble(void* address, FPRegisterID dest)
+ void loadDouble(const void* address, FPRegisterID dest)
{
move(ImmPtr(address), scratchRegister);
loadDouble(scratchRegister, dest);
return label;
}
+ using MacroAssemblerX86Common::branchTest8;
+ Jump branchTest8(Condition cond, ExtendedAddress address, Imm32 mask = Imm32(-1))
+ {
+ ImmPtr addr(reinterpret_cast<void*>(address.offset));
+ MacroAssemblerX86Common::move(addr, scratchRegister);
+ return MacroAssemblerX86Common::branchTest8(cond, BaseIndex(scratchRegister, address.base, TimesOne), mask);
+ }
+
Label loadPtrWithPatchToLEA(Address address, RegisterID dest)
{
Label label(this);
bool supportsFloatingPoint() const { return true; }
// See comment on MacroAssemblerARMv7::supportsFloatingPointTruncate()
bool supportsFloatingPointTruncate() const { return true; }
+ bool supportsFloatingPointSqrt() const { return true; }
private:
friend class LinkBuffer;
#ifndef RepatchBuffer_h
#define RepatchBuffer_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER)
#include <MacroAssembler.h>
#ifndef X86Assembler_h
#define X86Assembler_h
-#include <wtf/Platform.h>
-
#if ENABLE(ASSEMBLER) && (CPU(X86) || CPU(X86_64))
#include "AssemblerBuffer.h"
PRE_SSE_66 = 0x66,
OP_PUSH_Iz = 0x68,
OP_IMUL_GvEvIz = 0x69,
+ OP_GROUP1_EbIb = 0x80,
OP_GROUP1_EvIz = 0x81,
OP_GROUP1_EvIb = 0x83,
OP_TEST_EvGv = 0x85,
OP2_MULSD_VsdWsd = 0x59,
OP2_SUBSD_VsdWsd = 0x5C,
OP2_DIVSD_VsdWsd = 0x5E,
+ OP2_SQRTSD_VsdWsd = 0x51,
OP2_XORPD_VpdWpd = 0x57,
OP2_MOVD_VdEd = 0x6E,
OP2_MOVD_EdVd = 0x7E,
GROUP1A_OP_POP = 0,
GROUP2_OP_SHL = 4,
+ GROUP2_OP_SHR = 5,
GROUP2_OP_SAR = 7,
GROUP3_OP_TEST = 0,
{
m_formatter.oneByteOp(OP_GROUP2_EvCL, GROUP2_OP_SAR, dst);
}
+
+ void shrl_i8r(int imm, RegisterID dst)
+ {
+ if (imm == 1)
+ m_formatter.oneByteOp(OP_GROUP2_Ev1, GROUP2_OP_SHR, dst);
+ else {
+ m_formatter.oneByteOp(OP_GROUP2_EvIb, GROUP2_OP_SHR, dst);
+ m_formatter.immediate8(imm);
+ }
+ }
+
+ void shrl_CLr(RegisterID dst)
+ {
+ m_formatter.oneByteOp(OP_GROUP2_EvCL, GROUP2_OP_SHR, dst);
+ }
void shll_i8r(int imm, RegisterID dst)
{
m_formatter.oneByteOp(OP_GROUP1_EvIz, GROUP1_OP_CMP, dst);
m_formatter.immediate32(imm);
}
-
+
void cmpl_im(int imm, int offset, RegisterID base)
{
if (CAN_SIGN_EXTEND_8_32(imm)) {
m_formatter.immediate32(imm);
}
}
+
+ void cmpb_im(int imm, int offset, RegisterID base)
+ {
+ m_formatter.oneByteOp(OP_GROUP1_EbIb, GROUP1_OP_CMP, base, offset);
+ m_formatter.immediate8(imm);
+ }
+
+ void cmpb_im(int imm, int offset, RegisterID base, RegisterID index, int scale)
+ {
+ m_formatter.oneByteOp(OP_GROUP1_EbIb, GROUP1_OP_CMP, base, index, scale, offset);
+ m_formatter.immediate8(imm);
+ }
void cmpl_im(int imm, int offset, RegisterID base, RegisterID index, int scale)
{
m_formatter.oneByteOp(OP_GROUP3_EvIz, GROUP3_OP_TEST, base, offset);
m_formatter.immediate32(imm);
}
+
+ void testb_im(int imm, int offset, RegisterID base)
+ {
+ m_formatter.oneByteOp(OP_GROUP3_EbIb, GROUP3_OP_TEST, base, offset);
+ m_formatter.immediate8(imm);
+ }
+
+ void testb_im(int imm, int offset, RegisterID base, RegisterID index, int scale)
+ {
+ m_formatter.oneByteOp(OP_GROUP3_EbIb, GROUP3_OP_TEST, base, index, scale, offset);
+ m_formatter.immediate8(imm);
+ }
void testl_i32m(int imm, int offset, RegisterID base, RegisterID index, int scale)
{
}
#if !CPU(X86_64)
- void movsd_mr(void* address, XMMRegisterID dst)
+ void movsd_mr(const void* address, XMMRegisterID dst)
{
m_formatter.prefix(PRE_SSE_F2);
m_formatter.twoByteOp(OP2_MOVSD_VsdWsd, (RegisterID)dst, address);
m_formatter.twoByteOp(OP2_XORPD_VpdWpd, (RegisterID)dst, (RegisterID)src);
}
+ void sqrtsd_rr(XMMRegisterID src, XMMRegisterID dst)
+ {
+ m_formatter.prefix(PRE_SSE_F2);
+ m_formatter.twoByteOp(OP2_SQRTSD_VsdWsd, (RegisterID)dst, (RegisterID)src);
+ }
+
// Misc instructions:
void int3()
}
#if !CPU(X86_64)
- void twoByteOp(TwoByteOpcodeID opcode, int reg, void* address)
+ void twoByteOp(TwoByteOpcodeID opcode, int reg, const void* address)
{
m_buffer.ensureSpace(maxInstructionSize);
m_buffer.putByteUnchecked(OP_2BYTE_ESCAPE);
}
#if !CPU(X86_64)
- void memoryModRM(int reg, void* address)
+ void memoryModRM(int reg, const void* address)
{
// noBase + ModRmMemoryNoDisp means noBase + ModRmMemoryDisp32!
putModRm(ModRmMemoryNoDisp, reg, noBase);
static UString escapeQuotes(const UString& str)
{
UString result = str;
- int pos = 0;
- while ((pos = result.find('\"', pos)) >= 0) {
+ unsigned pos = 0;
+ while ((pos = result.find('\"', pos)) != UString::NotFound) {
result = makeString(result.substr(0, pos), "\"\\\"\"", result.substr(pos + 1));
pos += 4;
}
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] %s\t\t %s, %s\n", location, op, registerName(exec, r0).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] %s\t\t %s, %s\n", location, op, registerName(exec, r0).data(), registerName(exec, r1).data());
}
void CodeBlock::printBinaryOp(ExecState* exec, int location, Vector<Instruction>::const_iterator& it, const char* op) const
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int r2 = (++it)->u.operand;
- printf("[%4d] %s\t\t %s, %s, %s\n", location, op, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str());
+ printf("[%4d] %s\t\t %s, %s, %s\n", location, op, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data());
}
void CodeBlock::printConditionalJump(ExecState* exec, const Vector<Instruction>::const_iterator&, Vector<Instruction>::const_iterator& it, int location, const char* op) const
{
int r0 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] %s\t\t %s, %d(->%d)\n", location, op, registerName(exec, r0).c_str(), offset, location + offset);
+ printf("[%4d] %s\t\t %s, %d(->%d)\n", location, op, registerName(exec, r0).data(), offset, location + offset);
}
void CodeBlock::printGetByIdOp(ExecState* exec, int location, Vector<Instruction>::const_iterator& it, const char* op) const
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int id0 = (++it)->u.operand;
- printf("[%4d] %s\t %s, %s, %s\n", location, op, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] %s\t %s, %s, %s\n", location, op, registerName(exec, r0).data(), registerName(exec, r1).data(), idName(id0, m_identifiers[id0]).data());
it += 4;
}
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] %s\t %s, %s, %s\n", location, op, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] %s\t %s, %s, %s\n", location, op, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data(), registerName(exec, r1).data());
it += 4;
}
#if ENABLE(JIT)
static bool isGlobalResolve(OpcodeID opcodeID)
{
- return opcodeID == op_resolve_global;
+ return opcodeID == op_resolve_global || opcodeID == op_resolve_global_dynamic;
}
static bool isPropertyAccess(OpcodeID opcodeID)
static void printGlobalResolveInfo(const GlobalResolveInfo& resolveInfo, unsigned instructionOffset)
{
- printf(" [%4d] %s: %s\n", instructionOffset, "resolve_global", pointerToSourceString(resolveInfo.structure).UTF8String().c_str());
+ printf(" [%4d] %s: %s\n", instructionOffset, "resolve_global", pointerToSourceString(resolveInfo.structure).UTF8String().data());
}
static void printStructureStubInfo(const StructureStubInfo& stubInfo, unsigned instructionOffset)
{
switch (stubInfo.accessType) {
case access_get_by_id_self:
- printf(" [%4d] %s: %s\n", instructionOffset, "get_by_id_self", pointerToSourceString(stubInfo.u.getByIdSelf.baseObjectStructure).UTF8String().c_str());
+ printf(" [%4d] %s: %s\n", instructionOffset, "get_by_id_self", pointerToSourceString(stubInfo.u.getByIdSelf.baseObjectStructure).UTF8String().data());
return;
case access_get_by_id_proto:
- printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_proto", pointerToSourceString(stubInfo.u.getByIdProto.baseObjectStructure).UTF8String().c_str(), pointerToSourceString(stubInfo.u.getByIdProto.prototypeStructure).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_proto", pointerToSourceString(stubInfo.u.getByIdProto.baseObjectStructure).UTF8String().data(), pointerToSourceString(stubInfo.u.getByIdProto.prototypeStructure).UTF8String().data());
return;
case access_get_by_id_chain:
- printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_chain", pointerToSourceString(stubInfo.u.getByIdChain.baseObjectStructure).UTF8String().c_str(), pointerToSourceString(stubInfo.u.getByIdChain.chain).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_chain", pointerToSourceString(stubInfo.u.getByIdChain.baseObjectStructure).UTF8String().data(), pointerToSourceString(stubInfo.u.getByIdChain.chain).UTF8String().data());
return;
case access_get_by_id_self_list:
- printf(" [%4d] %s: %s (%d)\n", instructionOffset, "op_get_by_id_self_list", pointerToSourceString(stubInfo.u.getByIdSelfList.structureList).UTF8String().c_str(), stubInfo.u.getByIdSelfList.listSize);
+ printf(" [%4d] %s: %s (%d)\n", instructionOffset, "op_get_by_id_self_list", pointerToSourceString(stubInfo.u.getByIdSelfList.structureList).UTF8String().data(), stubInfo.u.getByIdSelfList.listSize);
return;
case access_get_by_id_proto_list:
- printf(" [%4d] %s: %s (%d)\n", instructionOffset, "op_get_by_id_proto_list", pointerToSourceString(stubInfo.u.getByIdProtoList.structureList).UTF8String().c_str(), stubInfo.u.getByIdProtoList.listSize);
+ printf(" [%4d] %s: %s (%d)\n", instructionOffset, "op_get_by_id_proto_list", pointerToSourceString(stubInfo.u.getByIdProtoList.structureList).UTF8String().data(), stubInfo.u.getByIdProtoList.listSize);
return;
case access_put_by_id_transition:
- printf(" [%4d] %s: %s, %s, %s\n", instructionOffset, "put_by_id_transition", pointerToSourceString(stubInfo.u.putByIdTransition.previousStructure).UTF8String().c_str(), pointerToSourceString(stubInfo.u.putByIdTransition.structure).UTF8String().c_str(), pointerToSourceString(stubInfo.u.putByIdTransition.chain).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s, %s\n", instructionOffset, "put_by_id_transition", pointerToSourceString(stubInfo.u.putByIdTransition.previousStructure).UTF8String().data(), pointerToSourceString(stubInfo.u.putByIdTransition.structure).UTF8String().data(), pointerToSourceString(stubInfo.u.putByIdTransition.chain).UTF8String().data());
return;
case access_put_by_id_replace:
- printf(" [%4d] %s: %s\n", instructionOffset, "put_by_id_replace", pointerToSourceString(stubInfo.u.putByIdReplace.baseObjectStructure).UTF8String().c_str());
+ printf(" [%4d] %s: %s\n", instructionOffset, "put_by_id_replace", pointerToSourceString(stubInfo.u.putByIdReplace.baseObjectStructure).UTF8String().data());
return;
case access_get_by_id:
printf(" [%4d] %s\n", instructionOffset, "get_by_id");
void CodeBlock::printStructure(const char* name, const Instruction* vPC, int operand) const
{
unsigned instructionOffset = vPC - m_instructions.begin();
- printf(" [%4d] %s: %s\n", instructionOffset, name, pointerToSourceString(vPC[operand].u.structure).UTF8String().c_str());
+ printf(" [%4d] %s: %s\n", instructionOffset, name, pointerToSourceString(vPC[operand].u.structure).UTF8String().data());
}
void CodeBlock::printStructures(const Instruction* vPC) const
return;
}
if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto)) {
- printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_proto", pointerToSourceString(vPC[4].u.structure).UTF8String().c_str(), pointerToSourceString(vPC[5].u.structure).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_proto", pointerToSourceString(vPC[4].u.structure).UTF8String().data(), pointerToSourceString(vPC[5].u.structure).UTF8String().data());
return;
}
if (vPC[0].u.opcode == interpreter->getOpcode(op_put_by_id_transition)) {
- printf(" [%4d] %s: %s, %s, %s\n", instructionOffset, "put_by_id_transition", pointerToSourceString(vPC[4].u.structure).UTF8String().c_str(), pointerToSourceString(vPC[5].u.structure).UTF8String().c_str(), pointerToSourceString(vPC[6].u.structureChain).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s, %s\n", instructionOffset, "put_by_id_transition", pointerToSourceString(vPC[4].u.structure).UTF8String().data(), pointerToSourceString(vPC[5].u.structure).UTF8String().data(), pointerToSourceString(vPC[6].u.structureChain).UTF8String().data());
return;
}
if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_chain)) {
- printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_chain", pointerToSourceString(vPC[4].u.structure).UTF8String().c_str(), pointerToSourceString(vPC[5].u.structureChain).UTF8String().c_str());
+ printf(" [%4d] %s: %s, %s\n", instructionOffset, "get_by_id_chain", pointerToSourceString(vPC[4].u.structure).UTF8String().data(), pointerToSourceString(vPC[5].u.structureChain).UTF8String().data());
return;
}
if (vPC[0].u.opcode == interpreter->getOpcode(op_put_by_id)) {
printStructure("resolve_global", vPC, 4);
return;
}
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_resolve_global_dynamic)) {
+ printStructure("resolve_global_dynamic", vPC, 4);
+ return;
+ }
// These m_instructions doesn't ref Structures.
ASSERT(vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_generic) || vPC[0].u.opcode == interpreter->getOpcode(op_put_by_id_generic) || vPC[0].u.opcode == interpreter->getOpcode(op_call) || vPC[0].u.opcode == interpreter->getOpcode(op_call_eval) || vPC[0].u.opcode == interpreter->getOpcode(op_construct));
}
case op_enter_with_activation: {
int r0 = (++it)->u.operand;
- printf("[%4d] enter_with_activation %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] enter_with_activation %s\n", location, registerName(exec, r0).data());
break;
}
case op_create_arguments: {
}
case op_convert_this: {
int r0 = (++it)->u.operand;
- printf("[%4d] convert_this %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] convert_this %s\n", location, registerName(exec, r0).data());
break;
}
case op_new_object: {
int r0 = (++it)->u.operand;
- printf("[%4d] new_object\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] new_object\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_new_array: {
int dst = (++it)->u.operand;
int argv = (++it)->u.operand;
int argc = (++it)->u.operand;
- printf("[%4d] new_array\t %s, %s, %d\n", location, registerName(exec, dst).c_str(), registerName(exec, argv).c_str(), argc);
+ printf("[%4d] new_array\t %s, %s, %d\n", location, registerName(exec, dst).data(), registerName(exec, argv).data(), argc);
break;
}
case op_new_regexp: {
int r0 = (++it)->u.operand;
int re0 = (++it)->u.operand;
- printf("[%4d] new_regexp\t %s, %s\n", location, registerName(exec, r0).c_str(), regexpName(re0, regexp(re0)).c_str());
+ printf("[%4d] new_regexp\t %s, %s\n", location, registerName(exec, r0).data(), regexpName(re0, regexp(re0)).data());
break;
}
case op_mov: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] mov\t\t %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] mov\t\t %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data());
break;
}
case op_not: {
}
case op_pre_inc: {
int r0 = (++it)->u.operand;
- printf("[%4d] pre_inc\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] pre_inc\t\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_pre_dec: {
int r0 = (++it)->u.operand;
- printf("[%4d] pre_dec\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] pre_dec\t\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_post_inc: {
int r1 = (++it)->u.operand;
int r2 = (++it)->u.operand;
int r3 = (++it)->u.operand;
- printf("[%4d] instanceof\t\t %s, %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str(), registerName(exec, r3).c_str());
+ printf("[%4d] instanceof\t\t %s, %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data(), registerName(exec, r3).data());
break;
}
case op_typeof: {
case op_resolve: {
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
- printf("[%4d] resolve\t\t %s, %s\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] resolve\t\t %s, %s\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data());
break;
}
case op_resolve_skip: {
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
int skipLevels = (++it)->u.operand;
- printf("[%4d] resolve_skip\t %s, %s, %d\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str(), skipLevels);
+ printf("[%4d] resolve_skip\t %s, %s, %d\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data(), skipLevels);
break;
}
case op_resolve_global: {
int r0 = (++it)->u.operand;
JSValue scope = JSValue((++it)->u.jsCell);
int id0 = (++it)->u.operand;
- printf("[%4d] resolve_global\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), valueToSourceString(exec, scope).ascii(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] resolve_global\t %s, %s, %s\n", location, registerName(exec, r0).data(), valueToSourceString(exec, scope).ascii(), idName(id0, m_identifiers[id0]).data());
it += 2;
break;
}
+ case op_resolve_global_dynamic: {
+ int r0 = (++it)->u.operand;
+ JSValue scope = JSValue((++it)->u.jsCell);
+ int id0 = (++it)->u.operand;
+ int depth = it[2].u.operand;
+ printf("[%4d] resolve_global_dynamic\t %s, %s, %s, %d\n", location, registerName(exec, r0).data(), valueToSourceString(exec, scope).ascii(), idName(id0, m_identifiers[id0]).data(), depth);
+ it += 3;
+ break;
+ }
case op_get_scoped_var: {
int r0 = (++it)->u.operand;
int index = (++it)->u.operand;
int skipLevels = (++it)->u.operand;
- printf("[%4d] get_scoped_var\t %s, %d, %d\n", location, registerName(exec, r0).c_str(), index, skipLevels);
+ printf("[%4d] get_scoped_var\t %s, %d, %d\n", location, registerName(exec, r0).data(), index, skipLevels);
break;
}
case op_put_scoped_var: {
int index = (++it)->u.operand;
int skipLevels = (++it)->u.operand;
int r0 = (++it)->u.operand;
- printf("[%4d] put_scoped_var\t %d, %d, %s\n", location, index, skipLevels, registerName(exec, r0).c_str());
+ printf("[%4d] put_scoped_var\t %d, %d, %s\n", location, index, skipLevels, registerName(exec, r0).data());
break;
}
case op_get_global_var: {
int r0 = (++it)->u.operand;
JSValue scope = JSValue((++it)->u.jsCell);
int index = (++it)->u.operand;
- printf("[%4d] get_global_var\t %s, %s, %d\n", location, registerName(exec, r0).c_str(), valueToSourceString(exec, scope).ascii(), index);
+ printf("[%4d] get_global_var\t %s, %s, %d\n", location, registerName(exec, r0).data(), valueToSourceString(exec, scope).ascii(), index);
break;
}
case op_put_global_var: {
JSValue scope = JSValue((++it)->u.jsCell);
int index = (++it)->u.operand;
int r0 = (++it)->u.operand;
- printf("[%4d] put_global_var\t %s, %d, %s\n", location, valueToSourceString(exec, scope).ascii(), index, registerName(exec, r0).c_str());
+ printf("[%4d] put_global_var\t %s, %d, %s\n", location, valueToSourceString(exec, scope).ascii(), index, registerName(exec, r0).data());
break;
}
case op_resolve_base: {
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
- printf("[%4d] resolve_base\t %s, %s\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] resolve_base\t %s, %s\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data());
break;
}
case op_resolve_with_base: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int id0 = (++it)->u.operand;
- printf("[%4d] resolve_with_base %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] resolve_with_base %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), idName(id0, m_identifiers[id0]).data());
break;
}
case op_get_by_id: {
printGetByIdOp(exec, location, it, "get_by_id_chain");
break;
}
+ case op_get_by_id_getter_self: {
+ printGetByIdOp(exec, location, it, "get_by_id_getter_self");
+ break;
+ }
+ case op_get_by_id_getter_self_list: {
+ printGetByIdOp(exec, location, it, "get_by_id_getter_self_list");
+ break;
+ }
+ case op_get_by_id_getter_proto: {
+ printGetByIdOp(exec, location, it, "get_by_id_getter_proto");
+ break;
+ }
+ case op_get_by_id_getter_proto_list: {
+ printGetByIdOp(exec, location, it, "get_by_id_getter_proto_list");
+ break;
+ }
+ case op_get_by_id_getter_chain: {
+ printGetByIdOp(exec, location, it, "get_by_id_getter_chain");
+ break;
+ }
+ case op_get_by_id_custom_self: {
+ printGetByIdOp(exec, location, it, "get_by_id_custom_self");
+ break;
+ }
+ case op_get_by_id_custom_self_list: {
+ printGetByIdOp(exec, location, it, "get_by_id_custom_self_list");
+ break;
+ }
+ case op_get_by_id_custom_proto: {
+ printGetByIdOp(exec, location, it, "get_by_id_custom_proto");
+ break;
+ }
+ case op_get_by_id_custom_proto_list: {
+ printGetByIdOp(exec, location, it, "get_by_id_custom_proto_list");
+ break;
+ }
+ case op_get_by_id_custom_chain: {
+ printGetByIdOp(exec, location, it, "get_by_id_custom_chain");
+ break;
+ }
case op_get_by_id_generic: {
printGetByIdOp(exec, location, it, "get_by_id_generic");
break;
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] put_getter\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] put_getter\t %s, %s, %s\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data(), registerName(exec, r1).data());
break;
}
case op_put_setter: {
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] put_setter\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] put_setter\t %s, %s, %s\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data(), registerName(exec, r1).data());
break;
}
case op_method_check: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int id0 = (++it)->u.operand;
- printf("[%4d] del_by_id\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), idName(id0, m_identifiers[id0]).c_str());
+ printf("[%4d] del_by_id\t %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), idName(id0, m_identifiers[id0]).data());
break;
}
case op_get_by_val: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int r2 = (++it)->u.operand;
- printf("[%4d] get_by_val\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str());
+ printf("[%4d] get_by_val\t %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data());
break;
}
case op_get_by_pname: {
int r3 = (++it)->u.operand;
int r4 = (++it)->u.operand;
int r5 = (++it)->u.operand;
- printf("[%4d] get_by_pname\t %s, %s, %s, %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str(), registerName(exec, r3).c_str(), registerName(exec, r4).c_str(), registerName(exec, r5).c_str());
+ printf("[%4d] get_by_pname\t %s, %s, %s, %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data(), registerName(exec, r3).data(), registerName(exec, r4).data(), registerName(exec, r5).data());
break;
}
case op_put_by_val: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int r2 = (++it)->u.operand;
- printf("[%4d] put_by_val\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str());
+ printf("[%4d] put_by_val\t %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data());
break;
}
case op_del_by_val: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int r2 = (++it)->u.operand;
- printf("[%4d] del_by_val\t %s, %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str());
+ printf("[%4d] del_by_val\t %s, %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data());
break;
}
case op_put_by_index: {
int r0 = (++it)->u.operand;
unsigned n0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] put_by_index\t %s, %u, %s\n", location, registerName(exec, r0).c_str(), n0, registerName(exec, r1).c_str());
+ printf("[%4d] put_by_index\t %s, %u, %s\n", location, registerName(exec, r0).data(), n0, registerName(exec, r1).data());
break;
}
case op_jmp: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] jneq_ptr\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] jneq_ptr\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_jnless: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] jnless\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] jnless\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_jnlesseq: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] jnlesseq\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] jnlesseq\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_loop_if_less: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] loop_if_less\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] loop_if_less\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_jless: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] jless\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] jless\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
+ break;
+ }
+ case op_jlesseq: {
+ int r0 = (++it)->u.operand;
+ int r1 = (++it)->u.operand;
+ int offset = (++it)->u.operand;
+ printf("[%4d] jlesseq\t\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_loop_if_lesseq: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] loop_if_lesseq\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), offset, location + offset);
+ printf("[%4d] loop_if_lesseq\t %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), offset, location + offset);
break;
}
case op_switch_imm: {
int tableIndex = (++it)->u.operand;
int defaultTarget = (++it)->u.operand;
int scrutineeRegister = (++it)->u.operand;
- printf("[%4d] switch_imm\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).c_str());
+ printf("[%4d] switch_imm\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).data());
break;
}
case op_switch_char: {
int tableIndex = (++it)->u.operand;
int defaultTarget = (++it)->u.operand;
int scrutineeRegister = (++it)->u.operand;
- printf("[%4d] switch_char\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).c_str());
+ printf("[%4d] switch_char\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).data());
break;
}
case op_switch_string: {
int tableIndex = (++it)->u.operand;
int defaultTarget = (++it)->u.operand;
int scrutineeRegister = (++it)->u.operand;
- printf("[%4d] switch_string\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).c_str());
+ printf("[%4d] switch_string\t %d, %d(->%d), %s\n", location, tableIndex, defaultTarget, location + defaultTarget, registerName(exec, scrutineeRegister).data());
break;
}
case op_new_func: {
int r0 = (++it)->u.operand;
int f0 = (++it)->u.operand;
- printf("[%4d] new_func\t\t %s, f%d\n", location, registerName(exec, r0).c_str(), f0);
+ printf("[%4d] new_func\t\t %s, f%d\n", location, registerName(exec, r0).data(), f0);
break;
}
case op_new_func_exp: {
int r0 = (++it)->u.operand;
int f0 = (++it)->u.operand;
- printf("[%4d] new_func_exp\t %s, f%d\n", location, registerName(exec, r0).c_str(), f0);
+ printf("[%4d] new_func_exp\t %s, f%d\n", location, registerName(exec, r0).data(), f0);
break;
}
case op_call: {
int func = (++it)->u.operand;
int argCount = (++it)->u.operand;
int registerOffset = (++it)->u.operand;
- printf("[%4d] call\t\t %s, %s, %d, %d\n", location, registerName(exec, dst).c_str(), registerName(exec, func).c_str(), argCount, registerOffset);
+ printf("[%4d] call\t\t %s, %s, %d, %d\n", location, registerName(exec, dst).data(), registerName(exec, func).data(), argCount, registerOffset);
break;
}
case op_call_eval: {
int func = (++it)->u.operand;
int argCount = (++it)->u.operand;
int registerOffset = (++it)->u.operand;
- printf("[%4d] call_eval\t %s, %s, %d, %d\n", location, registerName(exec, dst).c_str(), registerName(exec, func).c_str(), argCount, registerOffset);
+ printf("[%4d] call_eval\t %s, %s, %d, %d\n", location, registerName(exec, dst).data(), registerName(exec, func).data(), argCount, registerOffset);
break;
}
case op_call_varargs: {
int func = (++it)->u.operand;
int argCount = (++it)->u.operand;
int registerOffset = (++it)->u.operand;
- printf("[%4d] call_varargs\t %s, %s, %s, %d\n", location, registerName(exec, dst).c_str(), registerName(exec, func).c_str(), registerName(exec, argCount).c_str(), registerOffset);
+ printf("[%4d] call_varargs\t %s, %s, %s, %d\n", location, registerName(exec, dst).data(), registerName(exec, func).data(), registerName(exec, argCount).data(), registerOffset);
break;
}
case op_load_varargs: {
}
case op_tear_off_activation: {
int r0 = (++it)->u.operand;
- printf("[%4d] tear_off_activation\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] tear_off_activation\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_tear_off_arguments: {
}
case op_ret: {
int r0 = (++it)->u.operand;
- printf("[%4d] ret\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] ret\t\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_construct: {
int registerOffset = (++it)->u.operand;
int proto = (++it)->u.operand;
int thisRegister = (++it)->u.operand;
- printf("[%4d] construct\t %s, %s, %d, %d, %s, %s\n", location, registerName(exec, dst).c_str(), registerName(exec, func).c_str(), argCount, registerOffset, registerName(exec, proto).c_str(), registerName(exec, thisRegister).c_str());
+ printf("[%4d] construct\t %s, %s, %d, %d, %s, %s\n", location, registerName(exec, dst).data(), registerName(exec, func).data(), argCount, registerOffset, registerName(exec, proto).data(), registerName(exec, thisRegister).data());
break;
}
case op_construct_verify: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] construct_verify\t %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] construct_verify\t %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data());
break;
}
case op_strcat: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
int count = (++it)->u.operand;
- printf("[%4d] strcat\t\t %s, %s, %d\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), count);
+ printf("[%4d] strcat\t\t %s, %s, %d\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), count);
break;
}
case op_to_primitive: {
int r0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] to_primitive\t %s, %s\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] to_primitive\t %s, %s\n", location, registerName(exec, r0).data(), registerName(exec, r1).data());
break;
}
case op_get_pnames: {
int r2 = it[3].u.operand;
int r3 = it[4].u.operand;
int offset = it[5].u.operand;
- printf("[%4d] get_pnames\t %s, %s, %s, %s, %d(->%d)\n", location, registerName(exec, r0).c_str(), registerName(exec, r1).c_str(), registerName(exec, r2).c_str(), registerName(exec, r3).c_str(), offset, location + offset);
+ printf("[%4d] get_pnames\t %s, %s, %s, %s, %d(->%d)\n", location, registerName(exec, r0).data(), registerName(exec, r1).data(), registerName(exec, r2).data(), registerName(exec, r3).data(), offset, location + offset);
it += OPCODE_LENGTH(op_get_pnames) - 1;
break;
}
int dest = it[1].u.operand;
int iter = it[4].u.operand;
int offset = it[5].u.operand;
- printf("[%4d] next_pname\t %s, %s, %d(->%d)\n", location, registerName(exec, dest).c_str(), registerName(exec, iter).c_str(), offset, location + offset);
+ printf("[%4d] next_pname\t %s, %s, %d(->%d)\n", location, registerName(exec, dest).data(), registerName(exec, iter).data(), offset, location + offset);
it += OPCODE_LENGTH(op_next_pname) - 1;
break;
}
case op_push_scope: {
int r0 = (++it)->u.operand;
- printf("[%4d] push_scope\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] push_scope\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_pop_scope: {
int r0 = (++it)->u.operand;
int id0 = (++it)->u.operand;
int r1 = (++it)->u.operand;
- printf("[%4d] push_new_scope \t%s, %s, %s\n", location, registerName(exec, r0).c_str(), idName(id0, m_identifiers[id0]).c_str(), registerName(exec, r1).c_str());
+ printf("[%4d] push_new_scope \t%s, %s, %s\n", location, registerName(exec, r0).data(), idName(id0, m_identifiers[id0]).data(), registerName(exec, r1).data());
break;
}
case op_jmp_scopes: {
}
case op_catch: {
int r0 = (++it)->u.operand;
- printf("[%4d] catch\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] catch\t\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_throw: {
int r0 = (++it)->u.operand;
- printf("[%4d] throw\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] throw\t\t %s\n", location, registerName(exec, r0).data());
break;
}
case op_new_error: {
int r0 = (++it)->u.operand;
int errorType = (++it)->u.operand;
int k0 = (++it)->u.operand;
- printf("[%4d] new_error\t %s, %d, %s\n", location, registerName(exec, r0).c_str(), errorType, constantName(exec, k0, getConstant(k0)).c_str());
+ printf("[%4d] new_error\t %s, %d, %s\n", location, registerName(exec, r0).data(), errorType, constantName(exec, k0, getConstant(k0)).data());
break;
}
case op_jsr: {
int retAddrDst = (++it)->u.operand;
int offset = (++it)->u.operand;
- printf("[%4d] jsr\t\t %s, %d(->%d)\n", location, registerName(exec, retAddrDst).c_str(), offset, location + offset);
+ printf("[%4d] jsr\t\t %s, %d(->%d)\n", location, registerName(exec, retAddrDst).data(), offset, location + offset);
break;
}
case op_sret: {
int retAddrSrc = (++it)->u.operand;
- printf("[%4d] sret\t\t %s\n", location, registerName(exec, retAddrSrc).c_str());
+ printf("[%4d] sret\t\t %s\n", location, registerName(exec, retAddrSrc).data());
break;
}
case op_debug: {
}
case op_profile_will_call: {
int function = (++it)->u.operand;
- printf("[%4d] profile_will_call %s\n", location, registerName(exec, function).c_str());
+ printf("[%4d] profile_will_call %s\n", location, registerName(exec, function).data());
break;
}
case op_profile_did_call: {
int function = (++it)->u.operand;
- printf("[%4d] profile_did_call\t %s\n", location, registerName(exec, function).c_str());
+ printf("[%4d] profile_did_call\t %s\n", location, registerName(exec, function).data());
break;
}
case op_end: {
int r0 = (++it)->u.operand;
- printf("[%4d] end\t\t %s\n", location, registerName(exec, r0).c_str());
+ printf("[%4d] end\t\t %s\n", location, registerName(exec, r0).data());
break;
}
}
#endif
, m_needsFullScopeChain(ownerExecutable->needsActivation())
, m_usesEval(ownerExecutable->usesEval())
+ , m_usesArguments(false)
, m_isNumericCompareFunction(false)
, m_codeType(codeType)
, m_source(sourceProvider)
CodeBlock::~CodeBlock()
{
-#if !ENABLE(JIT)
+#if ENABLE(INTERPRETER)
for (size_t size = m_globalResolveInstructions.size(), i = 0; i < size; ++i)
derefStructures(&m_instructions[m_globalResolveInstructions[i]]);
for (size_t size = m_propertyAccessInstructions.size(), i = 0; i < size; ++i)
derefStructures(&m_instructions[m_propertyAccessInstructions[i]]);
-#else
+#endif
+#if ENABLE(JIT)
for (size_t size = m_globalResolveInfos.size(), i = 0; i < size; ++i) {
if (m_globalResolveInfos[i].structure)
m_globalResolveInfos[i].structure->deref();
unlinkCallers();
#endif
-#endif // !ENABLE(JIT)
+#endif // ENABLE(JIT)
#if DUMP_CODE_BLOCK_STATISTICS
liveCodeBlockSet.remove(this);
{
Interpreter* interpreter = m_globalData->interpreter;
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_self) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_self)) {
vPC[4].u.structure->deref();
return;
}
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_proto) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_proto)) {
vPC[4].u.structure->deref();
vPC[5].u.structure->deref();
return;
}
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_chain)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_chain) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_chain) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_chain)) {
vPC[4].u.structure->deref();
vPC[5].u.structureChain->deref();
return;
vPC[4].u.structure->deref();
return;
}
- if (vPC[0].u.opcode == interpreter->getOpcode(op_resolve_global)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_resolve_global) || vPC[0].u.opcode == interpreter->getOpcode(op_resolve_global_dynamic)) {
if(vPC[4].u.structure)
vPC[4].u.structure->deref();
return;
}
if ((vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto_list))
- || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self_list))) {
+ || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self_list))
+ || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_proto_list))
+ || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_self_list))
+ || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_proto_list))
+ || (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_self_list))) {
PolymorphicAccessStructureList* polymorphicStructures = vPC[4].u.polymorphicStructures;
polymorphicStructures->derefStructures(vPC[5].u.operand);
delete polymorphicStructures;
{
Interpreter* interpreter = m_globalData->interpreter;
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_self) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_self) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_self)) {
vPC[4].u.structure->ref();
return;
}
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_proto) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_proto) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_proto)) {
vPC[4].u.structure->ref();
vPC[5].u.structure->ref();
return;
}
- if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_chain)) {
+ if (vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_chain) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_getter_chain) || vPC[0].u.opcode == interpreter->getOpcode(op_get_by_id_custom_chain)) {
vPC[4].u.structure->ref();
vPC[5].u.structureChain->ref();
return;
}
#endif
-#if !ENABLE(JIT)
+#if ENABLE(INTERPRETER)
bool CodeBlock::hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset)
{
if (m_globalResolveInstructions.isEmpty())
return false;
return true;
}
-#else
+#endif
+#if ENABLE(JIT)
bool CodeBlock::hasGlobalResolveInfoAtBytecodeOffset(unsigned bytecodeOffset)
{
if (m_globalResolveInfos.isEmpty())
{
m_instructions.shrinkToFit();
-#if !ENABLE(JIT)
+#if ENABLE(INTERPRETER)
m_propertyAccessInstructions.shrinkToFit();
m_globalResolveInstructions.shrinkToFit();
-#else
+#endif
+#if ENABLE(JIT)
m_structureStubInfos.shrinkToFit();
m_globalResolveInfos.shrinkToFit();
m_callLinkInfos.shrinkToFit();
#include "JSGlobalObject.h"
#include "JumpTable.h"
#include "Nodes.h"
-#include "PtrAndFlags.h"
#include "RegExp.h"
#include "UString.h"
#include <wtf/FastAllocBase.h>
struct CallLinkInfo {
CallLinkInfo()
: callee(0)
+ , position(0)
+ , hasSeenShouldRepatch(0)
{
}
-
+
unsigned bytecodeIndex;
CodeLocationNearCall callReturnLocation;
CodeLocationDataLabelPtr hotPathBegin;
CodeLocationNearCall hotPathOther;
- PtrAndFlags<CodeBlock, HasSeenShouldRepatch> ownerCodeBlock;
+ CodeBlock* ownerCodeBlock;
CodeBlock* callee;
- unsigned position;
+ unsigned position : 31;
+ unsigned hasSeenShouldRepatch : 1;
void setUnlinked() { callee = 0; }
bool isLinked() { return callee; }
bool seenOnce()
{
- return ownerCodeBlock.isFlagSet(hasSeenShouldRepatch);
+ return hasSeenShouldRepatch;
}
void setSeen()
{
- ownerCodeBlock.setFlag(hasSeenShouldRepatch);
+ hasSeenShouldRepatch = true;
}
};
struct MethodCallLinkInfo {
MethodCallLinkInfo()
: cachedStructure(0)
+ , cachedPrototypeStructure(0)
{
}
bool seenOnce()
{
- return cachedPrototypeStructure.isFlagSet(hasSeenShouldRepatch);
+ ASSERT(!cachedStructure);
+ return cachedPrototypeStructure;
}
void setSeen()
{
- cachedPrototypeStructure.setFlag(hasSeenShouldRepatch);
+ ASSERT(!cachedStructure && !cachedPrototypeStructure);
+ // We use the values of cachedStructure & cachedPrototypeStructure to indicate the
+ // current state.
+ // - In the initial state, both are null.
+ // - Once this transition has been taken once, cachedStructure is
+ // null and cachedPrototypeStructure is set to a nun-null value.
+ // - Once the call is linked both structures are set to non-null values.
+ cachedPrototypeStructure = (Structure*)1;
}
CodeLocationCall callReturnLocation;
CodeLocationDataLabelPtr structureLabel;
Structure* cachedStructure;
- PtrAndFlags<Structure, HasSeenShouldRepatch> cachedPrototypeStructure;
+ Structure* cachedPrototypeStructure;
};
struct FunctionRegisterInfo {
bool functionRegisterForBytecodeOffset(unsigned bytecodeOffset, int& functionRegisterIndex);
#endif
+#if ENABLE(INTERPRETER)
+ unsigned bytecodeOffset(CallFrame*, Instruction* returnAddress)
+ {
+ return static_cast<Instruction*>(returnAddress) - instructions().begin();
+ }
+#endif
void setIsNumericCompareFunction(bool isNumericCompareFunction) { m_isNumericCompareFunction = isNumericCompareFunction; }
bool isNumericCompareFunction() { return m_isNumericCompareFunction; }
unsigned jumpTarget(int index) const { return m_jumpTargets[index]; }
unsigned lastJumpTarget() const { return m_jumpTargets.last(); }
-#if !ENABLE(JIT)
+#if ENABLE(INTERPRETER)
void addPropertyAccessInstruction(unsigned propertyAccessInstruction) { m_propertyAccessInstructions.append(propertyAccessInstruction); }
void addGlobalResolveInstruction(unsigned globalResolveInstruction) { m_globalResolveInstructions.append(globalResolveInstruction); }
bool hasGlobalResolveInstructionAtBytecodeOffset(unsigned bytecodeOffset);
-#else
+#endif
+#if ENABLE(JIT)
size_t numberOfStructureStubInfos() const { return m_structureStubInfos.size(); }
void addStructureStubInfo(const StructureStubInfo& stubInfo) { m_structureStubInfos.append(stubInfo); }
StructureStubInfo& structureStubInfo(int index) { return m_structureStubInfos[index]; }
RefPtr<SourceProvider> m_source;
unsigned m_sourceOffset;
-#if !ENABLE(JIT)
+#if ENABLE(INTERPRETER)
Vector<unsigned> m_propertyAccessInstructions;
Vector<unsigned> m_globalResolveInstructions;
-#else
+#endif
+#if ENABLE(JIT)
Vector<StructureStubInfo> m_structureStubInfos;
Vector<GlobalResolveInfo> m_globalResolveInfos;
Vector<CallLinkInfo> m_callLinkInfos;
#include "UString.h"
#include <wtf/HashMap.h>
#include <wtf/RefPtr.h>
+#include <wtf/text/StringHash.h>
namespace JSC {
bool isEmpty() const { return m_cacheMap.isEmpty(); }
private:
- static const int maxCacheableSourceLength = 256;
+ static const unsigned maxCacheableSourceLength = 256;
static const int maxCacheEntries = 64;
typedef HashMap<RefPtr<UString::Rep>, RefPtr<EvalExecutable> > EvalCacheMap;
#include "MacroAssembler.h"
#include "Opcode.h"
+#include "PropertySlot.h"
#include "Structure.h"
#include <wtf/VectorTraits.h>
struct Instruction {
Instruction(Opcode opcode)
{
-#if !HAVE(COMPUTED_GOTO)
+#if !ENABLE(COMPUTED_GOTO_INTERPRETER)
// We have to initialize one of the pointer members to ensure that
// the entire struct is initialized, when opcode is not a pointer.
u.jsCell = 0;
Instruction(StructureChain* structureChain) { u.structureChain = structureChain; }
Instruction(JSCell* jsCell) { u.jsCell = jsCell; }
Instruction(PolymorphicAccessStructureList* polymorphicStructures) { u.polymorphicStructures = polymorphicStructures; }
+ Instruction(PropertySlot::GetValueFunc getterFunc) { u.getterFunc = getterFunc; }
union {
Opcode opcode;
StructureChain* structureChain;
JSCell* jsCell;
PolymorphicAccessStructureList* polymorphicStructures;
+ PropertySlot::GetValueFunc getterFunc;
} u;
};
#include "config.h"
#include "JumpTable.h"
+#include <wtf/text/StringHash.h>
+
namespace JSC {
int32_t SimpleJumpTable::offsetForValue(int32_t value, int32_t defaultOffset)
macro(op_resolve, 3) \
macro(op_resolve_skip, 4) \
macro(op_resolve_global, 6) \
+ macro(op_resolve_global_dynamic, 7) \
macro(op_get_scoped_var, 4) \
macro(op_put_scoped_var, 4) \
macro(op_get_global_var, 4) \
macro(op_get_by_id_proto, 8) \
macro(op_get_by_id_proto_list, 8) \
macro(op_get_by_id_chain, 8) \
+ macro(op_get_by_id_getter_self, 8) \
+ macro(op_get_by_id_getter_self_list, 8) \
+ macro(op_get_by_id_getter_proto, 8) \
+ macro(op_get_by_id_getter_proto_list, 8) \
+ macro(op_get_by_id_getter_chain, 8) \
+ macro(op_get_by_id_custom_self, 8) \
+ macro(op_get_by_id_custom_self_list, 8) \
+ macro(op_get_by_id_custom_proto, 8) \
+ macro(op_get_by_id_custom_proto_list, 8) \
+ macro(op_get_by_id_custom_chain, 8) \
macro(op_get_by_id_generic, 8) \
macro(op_get_array_length, 8) \
macro(op_get_string_length, 8) \
- macro(op_put_by_id, 8) \
- macro(op_put_by_id_transition, 8) \
- macro(op_put_by_id_replace, 8) \
- macro(op_put_by_id_generic, 8) \
+ macro(op_put_by_id, 9) \
+ macro(op_put_by_id_transition, 9) \
+ macro(op_put_by_id_replace, 9) \
+ macro(op_put_by_id_generic, 9) \
macro(op_del_by_id, 4) \
macro(op_get_by_val, 4) \
macro(op_get_by_pname, 7) \
macro(op_jnless, 4) \
macro(op_jnlesseq, 4) \
macro(op_jless, 4) \
+ macro(op_jlesseq, 4) \
macro(op_jmp_scopes, 3) \
macro(op_loop, 2) \
macro(op_loop_if_true, 3) \
FOR_EACH_OPCODE_ID(VERIFY_OPCODE_ID);
#undef VERIFY_OPCODE_ID
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
#if COMPILER(RVCT)
typedef void* Opcode;
#else
void SamplingFlags::sample()
{
- uint32_t mask = 1 << 31;
+ uint32_t mask = static_cast<uint32_t>(1 << 31);
unsigned index;
for (index = 0; index < 32; ++index) {
if (blockPercent >= 1) {
//Instruction* code = codeBlock->instructions().begin();
- printf("#%d: %s:%d: %d / %lld (%.3f%%)\n", i + 1, record->m_executable->sourceURL().UTF8String().c_str(), codeBlock->lineNumberForBytecodeOffset(exec, 0), record->m_sampleCount, m_sampleCount, blockPercent);
+ printf("#%d: %s:%d: %d / %lld (%.3f%%)\n", i + 1, record->m_executable->sourceURL().ascii(), codeBlock->lineNumberForBytecodeOffset(exec, 0), record->m_sampleCount, m_sampleCount, blockPercent);
if (i < 10) {
HashMap<unsigned,unsigned> lineCounts;
codeBlock->dump(exec);
instructions().append(target->bind(begin, instructions().size()));
return target;
}
- } else if (m_lastOpcodeID == op_lesseq && !target->isForward()) {
+ } else if (m_lastOpcodeID == op_lesseq) {
int dstIndex;
int src1Index;
int src2Index;
rewindBinaryOp();
size_t begin = instructions().size();
- emitOpcode(op_loop_if_lesseq);
+ emitOpcode(target->isForward() ? op_jlesseq : op_loop_if_lesseq);
instructions().append(src1Index);
instructions().append(src2Index);
instructions().append(target->bind(begin, instructions().size()));
return constantID;
}
-bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, JSObject*& globalObject)
+bool BytecodeGenerator::findScopedProperty(const Identifier& property, int& index, size_t& stackDepth, bool forWriting, bool& requiresDynamicChecks, JSObject*& globalObject)
{
// Cases where we cannot statically optimize the lookup.
if (property == propertyNames().arguments || !canOptimizeNonLocals()) {
}
size_t depth = 0;
-
+ requiresDynamicChecks = false;
ScopeChainIterator iter = m_scopeChain->begin();
ScopeChainIterator end = m_scopeChain->end();
for (; iter != end; ++iter, ++depth) {
globalObject = currentVariableObject;
return true;
}
- if (currentVariableObject->isDynamicScope())
+ bool scopeRequiresDynamicChecks = false;
+ if (currentVariableObject->isDynamicScope(scopeRequiresDynamicChecks))
break;
+ requiresDynamicChecks |= scopeRequiresDynamicChecks;
}
-
// Can't locate the property but we're able to avoid a few lookups.
stackDepth = depth;
index = missingSymbolMarker();
size_t depth = 0;
int index = 0;
JSObject* globalObject = 0;
- if (!findScopedProperty(property, index, depth, false, globalObject) && !globalObject) {
+ bool requiresDynamicChecks = false;
+ if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) && !globalObject) {
// We can't optimise at all :-(
emitOpcode(op_resolve);
instructions().append(dst->index());
#endif
}
- if (index != missingSymbolMarker() && !forceGlobalResolve) {
+ if (index != missingSymbolMarker() && !forceGlobalResolve && !requiresDynamicChecks) {
// Directly index the property lookup across multiple scopes.
return emitGetScopedVar(dst, depth, index, globalObject);
}
#else
m_codeBlock->addGlobalResolveInstruction(instructions().size());
#endif
- emitOpcode(op_resolve_global);
+ emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
instructions().append(dst->index());
instructions().append(globalObject);
instructions().append(addConstant(property));
instructions().append(0);
instructions().append(0);
+ if (requiresDynamicChecks)
+ instructions().append(depth);
+ return dst;
+ }
+
+ if (requiresDynamicChecks) {
+ // If we get here we have eval nested inside a |with| just give up
+ emitOpcode(op_resolve);
+ instructions().append(dst->index());
+ instructions().append(addConstant(property));
return dst;
}
size_t depth = 0;
int index = 0;
JSObject* globalObject = 0;
- findScopedProperty(property, index, depth, false, globalObject);
- if (!globalObject) {
+ bool requiresDynamicChecks = false;
+ findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject);
+ if (!globalObject || requiresDynamicChecks) {
// We can't optimise at all :-(
emitOpcode(op_resolve_base);
instructions().append(dst->index());
size_t depth = 0;
int index = 0;
JSObject* globalObject = 0;
- if (!findScopedProperty(property, index, depth, false, globalObject) || !globalObject) {
+ bool requiresDynamicChecks = false;
+ if (!findScopedProperty(property, index, depth, false, requiresDynamicChecks, globalObject) || !globalObject || requiresDynamicChecks) {
// We can't optimise at all :-(
emitOpcode(op_resolve_with_base);
instructions().append(baseDst->index());
#else
m_codeBlock->addGlobalResolveInstruction(instructions().size());
#endif
- emitOpcode(op_resolve_global);
+ emitOpcode(requiresDynamicChecks ? op_resolve_global_dynamic : op_resolve_global);
instructions().append(propDst->index());
instructions().append(globalObject);
instructions().append(addConstant(property));
instructions().append(0);
instructions().append(0);
+ if (requiresDynamicChecks)
+ instructions().append(depth);
return baseDst;
}
instructions().append(0);
instructions().append(0);
instructions().append(0);
+ instructions().append(0);
+ return value;
+}
+
+RegisterID* BytecodeGenerator::emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value)
+{
+#if ENABLE(JIT)
+ m_codeBlock->addStructureStubInfo(StructureStubInfo(access_put_by_id));
+#else
+ m_codeBlock->addPropertyAccessInstruction(instructions().size());
+#endif
+
+ emitOpcode(op_put_by_id);
+ instructions().append(base->index());
+ instructions().append(addConstant(property));
+ instructions().append(value->index());
+ instructions().append(0);
+ instructions().append(0);
+ instructions().append(0);
+ instructions().append(0);
+ instructions().append(property != m_globalData->propertyNames->underscoreProto);
return value;
}
void BytecodeGenerator::emitDebugHook(DebugHookID debugHookID, int firstLine, int lastLine)
{
+#if ENABLE(DEBUG_WITH_BREAKPOINT)
+ if (debugHookID != DidReachBreakpoint)
+ return;
+#else
if (!m_shouldEmitDebugHooks)
return;
+#endif
emitOpcode(op_debug);
instructions().append(debugHookID);
instructions().append(firstLine);
UNUSED_PARAM(max);
ASSERT(node->isString());
UString::Rep* clause = static_cast<StringNode*>(node)->value().ustring().rep();
- ASSERT(clause->size() == 1);
+ ASSERT(clause->length() == 1);
- int32_t key = clause->data()[0];
+ int32_t key = clause->characters()[0];
ASSERT(key >= min);
ASSERT(key <= max);
return key - min;
//
// NB: depth does _not_ include the local scope. eg. a depth of 0 refers
// to the scope containing this codeblock.
- bool findScopedProperty(const Identifier&, int& index, size_t& depth, bool forWriting, JSObject*& globalObject);
+ bool findScopedProperty(const Identifier&, int& index, size_t& depth, bool forWriting, bool& includesDynamicScopes, JSObject*& globalObject);
// Returns the register storing "this"
RegisterID* thisRegister() { return &m_thisRegister; }
RegisterID* emitGetById(RegisterID* dst, RegisterID* base, const Identifier& property);
RegisterID* emitPutById(RegisterID* base, const Identifier& property, RegisterID* value);
+ RegisterID* emitDirectPutById(RegisterID* base, const Identifier& property, RegisterID* value);
RegisterID* emitDeleteById(RegisterID* dst, RegisterID* base, const Identifier&);
RegisterID* emitGetByVal(RegisterID* dst, RegisterID* base, RegisterID* property);
RegisterID* emitPutByVal(RegisterID* base, RegisterID* property, RegisterID* value);
void emitOpcode(OpcodeID);
void retrieveLastBinaryOp(int& dstIndex, int& src1Index, int& src2Index);
void retrieveLastUnaryOp(int& dstIndex, int& srcIndex);
- void rewindBinaryOp();
- void rewindUnaryOp();
+ ALWAYS_INLINE void rewindBinaryOp();
+ ALWAYS_INLINE void rewindUnaryOp();
PassRefPtr<Label> emitComplexJumpScopes(Label* target, ControlFlowContext* topScope, ControlFlowContext* bottomScope);
#include "Operations.h"
#include "Parser.h"
#include "PropertyNameArray.h"
+#include "RegExpCache.h"
#include "RegExpObject.h"
#include "SamplingTool.h"
#include <wtf/Assertions.h>
static void substitute(UString& string, const UString& substring)
{
- int position = string.find("%s");
- ASSERT(position != -1);
+ unsigned position = string.find("%s");
+ ASSERT(position != UString::NotFound);
string = makeString(string.substr(0, position), substring, string.substr(position + 2));
}
RegisterID* RegExpNode::emitBytecode(BytecodeGenerator& generator, RegisterID* dst)
{
- RefPtr<RegExp> regExp = RegExp::create(generator.globalData(), m_pattern.ustring(), m_flags.ustring());
+ RefPtr<RegExp> regExp = generator.globalData()->regExpCache()->lookupOrCreate(m_pattern.ustring(), m_flags.ustring());
if (!regExp->isValid())
return emitThrowError(generator, SyntaxError, "Invalid regular expression: %s", regExp->errorMessage());
if (dst == generator.ignoredResult())
switch (p->m_node->m_type) {
case PropertyNode::Constant: {
- generator.emitPutById(newObj.get(), p->m_node->name(), value);
+ generator.emitDirectPutById(newObj.get(), p->m_node->name(), value);
break;
}
case PropertyNode::Getter: {
int index = 0;
size_t depth = 0;
JSObject* globalObject = 0;
- if (generator.findScopedProperty(m_ident, index, depth, false, globalObject) && index != missingSymbolMarker()) {
+ bool requiresDynamicChecks = false;
+ if (generator.findScopedProperty(m_ident, index, depth, false, requiresDynamicChecks, globalObject) && index != missingSymbolMarker() && !requiresDynamicChecks) {
RefPtr<RegisterID> func = generator.emitGetScopedVar(generator.newTemporary(), depth, index, globalObject);
RefPtr<RegisterID> thisRegister = generator.emitLoad(generator.newTemporary(), jsNull());
return generator.emitCall(generator.finalDestination(dst, func.get()), func.get(), thisRegister.get(), m_args, divot(), startOffset(), endOffset());
int index = 0;
size_t depth = 0;
JSObject* globalObject = 0;
- if (generator.findScopedProperty(m_ident, index, depth, true, globalObject) && index != missingSymbolMarker()) {
+ bool requiresDynamicChecks = false;
+ if (generator.findScopedProperty(m_ident, index, depth, true, requiresDynamicChecks, globalObject) && index != missingSymbolMarker() && !requiresDynamicChecks) {
RefPtr<RegisterID> value = generator.emitGetScopedVar(generator.newTemporary(), depth, index, globalObject);
RegisterID* oldValue;
if (dst == generator.ignoredResult()) {
int index = 0;
size_t depth = 0;
JSObject* globalObject = 0;
- if (generator.findScopedProperty(m_ident, index, depth, false, globalObject) && index != missingSymbolMarker()) {
+ bool requiresDynamicChecks = false;
+ if (generator.findScopedProperty(m_ident, index, depth, false, requiresDynamicChecks, globalObject) && index != missingSymbolMarker() && !requiresDynamicChecks) {
RefPtr<RegisterID> propDst = generator.emitGetScopedVar(generator.tempDestination(dst), depth, index, globalObject);
emitPreIncOrDec(generator, propDst.get(), m_operator);
generator.emitPutScopedVar(depth, index, propDst.get(), globalObject);
int index = 0;
size_t depth = 0;
JSObject* globalObject = 0;
- if (generator.findScopedProperty(m_ident, index, depth, true, globalObject) && index != missingSymbolMarker()) {
+ bool requiresDynamicChecks = false;
+ if (generator.findScopedProperty(m_ident, index, depth, true, requiresDynamicChecks, globalObject) && index != missingSymbolMarker() && !requiresDynamicChecks) {
RefPtr<RegisterID> src1 = generator.emitGetScopedVar(generator.tempDestination(dst), depth, index, globalObject);
RegisterID* result = emitReadModifyAssignment(generator, generator.finalDestination(dst, src1.get()), src1.get(), m_right, m_operator, OperandTypes(ResultType::unknownType(), m_right->resultDescriptor()));
generator.emitPutScopedVar(depth, index, result, globalObject);
int index = 0;
size_t depth = 0;
JSObject* globalObject = 0;
- if (generator.findScopedProperty(m_ident, index, depth, true, globalObject) && index != missingSymbolMarker()) {
+ bool requiresDynamicChecks = false;
+ if (generator.findScopedProperty(m_ident, index, depth, true, requiresDynamicChecks, globalObject) && index != missingSymbolMarker() && !requiresDynamicChecks) {
if (dst == generator.ignoredResult())
dst = 0;
RegisterID* value = generator.emitNode(dst, m_right);
}
const UString& value = static_cast<StringNode*>(clauseExpression)->value().ustring();
if (singleCharacterSwitch &= value.size() == 1) {
- int32_t intVal = value.rep()->data()[0];
+ int32_t intVal = value.rep()->characters()[0];
if (intVal < min_num)
min_num = intVal;
if (intVal > max_num)
#include <wtf/Platform.h>
-#if OS(WINDOWS) && !defined(BUILDING_WX__) && !COMPILER(GCC)
+#if !PLATFORM(CHROMIUM) && OS(WINDOWS) && !defined(BUILDING_WX__) && !COMPILER(GCC)
#if defined(BUILDING_JavaScriptCore) || defined(BUILDING_WTF)
#define JS_EXPORTDATA __declspec(dllexport)
#else
#define max max
#define min min
-#if !COMPILER(MSVC7) && !OS(WINCE)
+#if !COMPILER(MSVC7_OR_LOWER) && !OS(WINCE)
// We need to define this before the first #include of stdlib.h or it won't contain rand_s.
#ifndef _CRT_RAND_S
#define _CRT_RAND_S
#include <wtf/DisallowCType.h>
#endif
+#if COMPILER(MSVC)
+#define SKIP_STATIC_CONSTRUCTORS_ON_MSVC 1
+#else
+#define SKIP_STATIC_CONSTRUCTORS_ON_GCC 1
+#endif
+
#if PLATFORM(CHROMIUM)
#if !defined(WTF_USE_V8)
#define WTF_USE_V8 1
print "\nnamespace JSC {\n";
}
my $count = scalar @keys + 1;
+ print "#if ENABLE(JIT)\n";
+ print "#define THUNK_GENERATOR(generator) , generator\n";
+ print "#else\n";
+ print "#define THUNK_GENERATOR(generator)\n";
+ print "#endif\n";
print "\nstatic const struct HashTableValue ${nameEntries}\[$count\] = {\n";
my $i = 0;
foreach my $key (@keys) {
my $firstValue = "";
my $secondValue = "";
+ my $castStr = "";
if ($values[$i]{"type"} eq "Function") {
+ $castStr = "static_cast<NativeFunction>";
$firstValue = $values[$i]{"function"};
$secondValue = $values[$i]{"params"};
} elsif ($values[$i]{"type"} eq "Property") {
+ $castStr = "static_cast<PropertySlot::GetValueFunc>";
$firstValue = $values[$i]{"get"};
$secondValue = $values[$i]{"put"};
} elsif ($values[$i]{"type"} eq "Lexer") {
$firstValue = $values[$i]{"value"};
$secondValue = "0";
}
- print " { \"$key\", $attrs[$i], (intptr_t)$firstValue, (intptr_t)$secondValue },\n";
+ my $thunkGenerator = "0";
+ if ($key eq "charCodeAt") {
+ $thunkGenerator = "charCodeAtThunkGenerator";
+ }
+ if ($key eq "charAt") {
+ $thunkGenerator = "charAtThunkGenerator";
+ }
+ if ($key eq "sqrt") {
+ $thunkGenerator = "sqrtThunkGenerator";
+ }
+ if ($key eq "pow") {
+ $thunkGenerator = "powThunkGenerator";
+ }
+ print " { \"$key\", $attrs[$i], (intptr_t)" . $castStr . "($firstValue), (intptr_t)$secondValue THUNK_GENERATOR($thunkGenerator) },\n";
$i++;
}
- print " { 0, 0, 0, 0 }\n";
+ print " { 0, 0, 0, 0 THUNK_GENERATOR(0) }\n";
print "};\n\n";
+ print "#undef THUNK_GENERATOR\n";
print "extern JSC_CONST_HASHTABLE HashTable $name =\n";
print " \{ $compactSize, $compactHashSizeMask, $nameEntries, 0 \};\n";
print "} // namespace\n";
--- /dev/null
+#! /usr/bin/perl -w
+#
+# Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies)
+# Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Library General Public
+# License as published by the Free Software Foundation; either
+# version 2 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Library General Public License for more details.
+#
+# You should have received a copy of the GNU Library General Public License
+# along with this library; see the file COPYING.LIB. If not, write to
+# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+# Boston, MA 02110-1301, USA.
+
+use strict;
+use File::Basename;
+use Getopt::Long;
+
+my $usage = basename($0) . " --prefix prefix [--offset offset] file";
+
+my $rtype_template = quotemeta("#rtype#");
+my $offset_template = quotemeta("#offset#");
+my $op_template = quotemeta("#op#");
+
+my $prefix;
+my $offset = 32;
+my $file;
+
+my $getOptionsResult = GetOptions(
+ 'prefix=s' => \$prefix,
+ 'offset=i' => \$offset
+);
+
+$file = $ARGV[0];
+
+die "$usage\n" unless ($prefix and $file);
+
+my $stub_template = "";
+my $stub = "";
+
+my $rtype = "";
+my $op = "";
+
+print STDERR "Creating JIT stubs for $file \n";
+open(IN, $file) or die "No such file $file";
+
+while ( $_ = <IN> ) {
+ if ( /^$prefix\((.*)\)/ ) {
+ $stub_template .= $1 . "\n";
+ }
+ if ( /^DEFINE_STUB_FUNCTION\((.*), (.*)\)/ ) {
+ $stub = $stub_template;
+ $rtype = quotemeta($1);
+ $op = quotemeta($2);
+ $stub =~ s/$offset_template/$offset/g;
+ $stub =~ s/$rtype_template/$rtype/g;
+ $stub =~ s/$op_template/$op/g;
+ $stub =~ s/\\\*/\*/g;
+ print $stub;
+ }
+}
+
+close(IN);
--- /dev/null
+# Copyright (C) 2010 Apple Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+# 1. Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# 2. Redistributions in binary form must reproduce the above copyright
+# notice, this list of conditions and the following disclaimer in the
+# documentation and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+types = {
+ "wordchar": { "UseTable" : True, "data": ['_', ('0','9'), ('A', 'Z'), ('a','z')]},
+ "nonwordchar": { "UseTable" : True, "Inverse": "wordchar", "data": ['`', (0, ord('0') - 1), (ord('9') + 1, ord('A') - 1), (ord('Z') + 1, ord('_') - 1), (ord('z') + 1, 0xffff)]},
+ "newline": { "UseTable" : False, "data": ['\n', '\r', 0x2028, 0x2029]},
+ "spaces": { "UseTable" : True, "data": [' ', ('\t', '\r'), 0xa0, 0x1680, 0x180e, 0x2028, 0x2029, 0x202f, 0x205f, 0x3000, (0x2000, 0x200a)]},
+ "nonspaces": { "UseTable" : True, "Inverse": "spaces", "data": [(0, ord('\t') - 1), (ord('\r') + 1, ord(' ') - 1), (ord(' ') + 1, 0x009f), (0x00a1, 0x167f), (0x1681, 0x180d), (0x180f, 0x1fff), (0x200b, 0x2027), (0x202a, 0x202e), (0x2030, 0x205e), (0x2060, 0x2fff), (0x3001, 0xffff)]},
+ "digits": { "UseTable" : False, "data": [('0', '9')]},
+ "nondigits": { "UseTable" : False, "Inverse": "digits", "data": [(0, ord('0') - 1), (ord('9') + 1, 0xffff)] }
+}
+entriesPerLine = 50
+arrays = "";
+functions = "";
+
+for name, classes in types.items():
+ ranges = [];
+ size = 0;
+ for _class in classes["data"]:
+ if type(_class) == str:
+ ranges.append((ord(_class), ord(_class)))
+ elif type(_class) == int:
+ ranges.append((_class, _class))
+ else:
+ (min, max) = _class;
+ if type(min) == str:
+ min = ord(min)
+ if type(max) == str:
+ max = ord(max)
+ if max > 0x7f and min <= 0x7f:
+ ranges.append((min, 0x7f))
+ min = 0x80
+ ranges.append((min,max))
+ ranges.sort();
+
+ if classes["UseTable"] and (not "Inverse" in classes):
+ array = ("static const char _%sData[65536] = {\n" % name);
+ i = 0
+ for (min,max) in ranges:
+ while i < min:
+ i = i + 1
+ array += ('0,')
+ if (i % entriesPerLine == 0) and (i != 0):
+ array += ('\n')
+ while i <= max:
+ i = i + 1
+ if (i == 65536):
+ array += ("1")
+ else:
+ array += ('1,')
+ if (i % entriesPerLine == 0) and (i != 0):
+ array += ('\n')
+ while i < 0xffff:
+ array += ("0,")
+ i = i + 1;
+ if (i % entriesPerLine == 0) and (i != 0):
+ array += ('\n')
+ if i == 0xffff:
+ array += ("0")
+ array += ("\n};\n\n");
+ arrays += array
+
+ # Generate createFunction:
+ function = "";
+ function += ("CharacterClass* %sCreate()\n" % name)
+ function += ("{\n")
+ if classes["UseTable"]:
+ if "Inverse" in classes:
+ function += (" CharacterClass* characterClass = new CharacterClass(CharacterClassTable::create(_%sData, true));\n" % (classes["Inverse"]))
+ else:
+ function += (" CharacterClass* characterClass = new CharacterClass(CharacterClassTable::create(_%sData, false));\n" % (name))
+ else:
+ function += (" CharacterClass* characterClass = new CharacterClass(0);\n")
+ for (min, max) in ranges:
+ if (min == max):
+ if (min > 127):
+ function += (" characterClass->m_matchesUnicode.append(0x%04x);\n" % min)
+ else:
+ function += (" characterClass->m_matches.append(0x%02x);\n" % min)
+ continue
+ if (min > 127) or (max > 127):
+ function += (" characterClass->m_rangesUnicode.append(CharacterRange(0x%04x, 0x%04x));\n" % (min, max))
+ else:
+ function += (" characterClass->m_ranges.append(CharacterRange(0x%02x, 0x%02x));\n" % (min, max))
+ function += (" return characterClass;\n")
+ function += ("}\n\n")
+ functions += function
+
+print(arrays)
+print(functions)
+
+++ /dev/null
-#! /usr/bin/perl -w
-#
-# Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies)
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Library General Public
-# License as published by the Free Software Foundation; either
-# version 2 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Library General Public License for more details.
-#
-# You should have received a copy of the GNU Library General Public License
-# along with this library; see the file COPYING.LIB. If not, write to
-# the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
-# Boston, MA 02110-1301, USA.
-
-use strict;
-
-my $file = $ARGV[0];
-shift;
-
-my $stub_template = "";
-my $stub = "";
-
-my $rtype = "";
-my $op = "";
-
-my $rtype_template = quotemeta("#rtype#");
-my $op_template = quotemeta("#op#");
-
-print STDERR "Creating RVCT stubs for $file \n";
-open(IN, $file) or die "No such file $file";
-
-while ( $_ = <IN> ) {
- if ( /^RVCT\((.*)\)/ ) {
- $stub_template .= $1 . "\n";
- }
- if ( /^DEFINE_STUB_FUNCTION\((.*), (.*)\)/ ) {
- $stub = $stub_template;
- $rtype = quotemeta($1);
- $op = quotemeta($2);
- $stub =~ s/$rtype_template/$rtype/g;
- $stub =~ s/$op_template/$op/g;
- $stub =~ s/\\\*/\*/g;
- print $stub;
- }
-}
-
-close(IN);
// JavaScript in the inspector.
SourceProviderMap::const_iterator end = sourceProviders.end();
for (SourceProviderMap::const_iterator iter = sourceProviders.begin(); iter != end; ++iter)
- sourceParsed(iter->second, SourceCode(iter->first), -1, 0);
+ sourceParsed(iter->second, SourceCode(iter->first), -1, UString());
}
JSValue evaluateInGlobalCallFrame(const UString& script, JSValue& exception, JSGlobalObject* globalObject)
UString DebuggerCallFrame::calculatedFunctionName() const
{
if (!m_callFrame->codeBlock())
- return 0;
+ return UString();
if (!m_callFrame->callee())
return UString();
JSFunction* function = asFunction(m_callFrame->callee());
if (!function)
- return 0;
+ return UString();
return function->calculatedDisplayName(m_callFrame);
}
CallFrame* callerFrame() const { return this[RegisterFile::CallerFrame].callFrame(); }
Arguments* optionalCalleeArguments() const { return this[RegisterFile::OptionalCalleeArguments].arguments(); }
- Instruction* returnPC() const { return this[RegisterFile::ReturnPC].vPC(); }
+#if ENABLE(JIT)
+ ReturnAddressPtr returnPC() const { return ReturnAddressPtr(this[RegisterFile::ReturnPC].vPC()); }
+#endif
+#if ENABLE(INTERPRETER)
+ Instruction* returnVPC() const { return this[RegisterFile::ReturnPC].vPC(); }
+#endif
void setCalleeArguments(JSValue arguments) { static_cast<Register*>(this)[RegisterFile::OptionalCalleeArguments] = arguments; }
void setCallerFrame(CallFrame* callerFrame) { static_cast<Register*>(this)[RegisterFile::CallerFrame] = callerFrame; }
#include "DebuggerCallFrame.h"
#include "EvalCodeCache.h"
#include "ExceptionHelpers.h"
+#include "GetterSetter.h"
#include "GlobalEvalFunction.h"
#include "JSActivation.h"
#include "JSArray.h"
#include "JIT.h"
#endif
+#define WTF_USE_GCC_COMPUTED_GOTO_WORKAROUND (ENABLE(COMPUTED_GOTO_INTERPRETER) && !defined(__llvm__))
+
using namespace std;
namespace JSC {
-
-static ALWAYS_INLINE unsigned bytecodeOffsetForPC(CallFrame* callFrame, CodeBlock* codeBlock, void* pc)
-{
+
#if ENABLE(JIT)
- return codeBlock->getBytecodeIndex(callFrame, ReturnAddressPtr(pc));
-#else
- UNUSED_PARAM(callFrame);
- return static_cast<Instruction*>(pc) - codeBlock->instructions().begin();
+ static ALWAYS_INLINE unsigned bytecodeOffsetForPC(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr pc)
+ {
+ return codeBlock->getBytecodeIndex(callFrame, ReturnAddressPtr(pc));
+ }
+#endif
+#if ENABLE(INTERPRETER)
+ static ALWAYS_INLINE unsigned bytecodeOffsetForPC(CallFrame* callFrame, CodeBlock* codeBlock, Instruction* pc)
+ {
+ UNUSED_PARAM(callFrame);
+ return pc - codeBlock->instructions().begin();
+ }
#endif
-}
// Returns the depth of the scope chain within a given call frame.
static int depth(CodeBlock* codeBlock, ScopeChain& sc)
return sc.localDepth();
}
-#if USE(INTERPRETER)
+#if ENABLE(INTERPRETER)
+static NEVER_INLINE JSValue concatenateStrings(ExecState* exec, Register* strings, unsigned count)
+{
+ return jsString(exec, strings, count);
+}
+
NEVER_INLINE bool Interpreter::resolve(CallFrame* callFrame, Instruction* vPC, JSValue& exceptionValue)
{
int dst = vPC[1].u.operand;
PropertySlot slot(globalObject);
if (globalObject->getPropertySlot(callFrame, ident, slot)) {
JSValue result = slot.getValue(callFrame, ident);
- if (slot.isCacheable() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
if (vPC[4].u.structure)
vPC[4].u.structure->deref();
globalObject->structure()->ref();
return false;
}
+NEVER_INLINE bool Interpreter::resolveGlobalDynamic(CallFrame* callFrame, Instruction* vPC, JSValue& exceptionValue)
+{
+ int dst = vPC[1].u.operand;
+ JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(vPC[2].u.jsCell);
+ ASSERT(globalObject->isGlobalObject());
+ int property = vPC[3].u.operand;
+ Structure* structure = vPC[4].u.structure;
+ int offset = vPC[5].u.operand;
+ CodeBlock* codeBlock = callFrame->codeBlock();
+ int skip = vPC[6].u.operand + codeBlock->needsFullScopeChain();
+
+ ScopeChainNode* scopeChain = callFrame->scopeChain();
+ ScopeChainIterator iter = scopeChain->begin();
+ ScopeChainIterator end = scopeChain->end();
+ ASSERT(iter != end);
+ while (skip--) {
+ JSObject* o = *iter;
+ if (o->hasCustomProperties()) {
+ Identifier& ident = codeBlock->identifier(property);
+ do {
+ PropertySlot slot(o);
+ if (o->getPropertySlot(callFrame, ident, slot)) {
+ JSValue result = slot.getValue(callFrame, ident);
+ exceptionValue = callFrame->globalData().exception;
+ if (exceptionValue)
+ return false;
+ callFrame->r(dst) = JSValue(result);
+ return true;
+ }
+ if (iter == end)
+ break;
+ o = *iter;
+ ++iter;
+ } while (true);
+ exceptionValue = createUndefinedVariableError(callFrame, ident, vPC - codeBlock->instructions().begin(), codeBlock);
+ return false;
+ }
+ ++iter;
+ }
+
+ if (structure == globalObject->structure()) {
+ callFrame->r(dst) = JSValue(globalObject->getDirectOffset(offset));
+ return true;
+ }
+
+ Identifier& ident = codeBlock->identifier(property);
+ PropertySlot slot(globalObject);
+ if (globalObject->getPropertySlot(callFrame, ident, slot)) {
+ JSValue result = slot.getValue(callFrame, ident);
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
+ if (vPC[4].u.structure)
+ vPC[4].u.structure->deref();
+ globalObject->structure()->ref();
+ vPC[4] = globalObject->structure();
+ vPC[5] = slot.cachedOffset();
+ callFrame->r(dst) = JSValue(result);
+ return true;
+ }
+
+ exceptionValue = callFrame->globalData().exception;
+ if (exceptionValue)
+ return false;
+ callFrame->r(dst) = JSValue(result);
+ return true;
+ }
+
+ exceptionValue = createUndefinedVariableError(callFrame, ident, vPC - codeBlock->instructions().begin(), codeBlock);
+ return false;
+}
+
NEVER_INLINE void Interpreter::resolveBase(CallFrame* callFrame, Instruction* vPC)
{
int dst = vPC[1].u.operand;
return false;
}
-#endif // USE(INTERPRETER)
+#endif // ENABLE(INTERPRETER)
ALWAYS_INLINE CallFrame* Interpreter::slideRegisterWindowForCall(CodeBlock* newCodeBlock, RegisterFile* registerFile, CallFrame* callFrame, size_t registerOffset, int argc)
{
return CallFrame::create(r);
}
-#if USE(INTERPRETER)
+#if ENABLE(INTERPRETER)
static NEVER_INLINE bool isInvalidParamForIn(CallFrame* callFrame, CodeBlock* codeBlock, const Instruction* vPC, JSValue value, JSValue& exceptionData)
{
if (value.isObject())
return program;
UString programSource = asString(program)->value(callFrame);
+ if (callFrame->hadException())
+ return JSValue();
LiteralParser preparser(callFrame, programSource, LiteralParser::NonStrictJSON);
if (JSValue parsedObject = preparser.tryLiteralParse())
: m_sampleEntryDepth(0)
, m_reentryDepth(0)
{
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
privateExecute(InitializeAndReturn, 0, 0, 0);
for (int i = 0; i < numOpcodeIDs; ++i)
m_opcodeIDTable.add(m_opcodeTable[i], static_cast<OpcodeID>(i));
-#endif // HAVE(COMPUTED_GOTO)
+#endif // ENABLE(COMPUTED_GOTO_INTERPRETER)
#if ENABLE(OPCODE_SAMPLING)
enableSampler();
bool Interpreter::isOpcode(Opcode opcode)
{
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
return opcode != HashTraits<Opcode>::emptyValue()
&& !HashTraits<Opcode>::isDeletedValue(opcode)
&& m_opcodeIDTable.contains(opcode);
if (oldCodeBlock->needsFullScopeChain())
scopeChain->deref();
- void* returnPC = callFrame->returnPC();
- callFrame = callFrame->callerFrame();
- if (callFrame->hasHostCallFrameFlag())
+ ExecState* callerFrame = callFrame->callerFrame();
+ if (callerFrame->hasHostCallFrameFlag())
return false;
- codeBlock = callFrame->codeBlock();
- bytecodeOffset = bytecodeOffsetForPC(callFrame, codeBlock, returnPC);
+ codeBlock = callerFrame->codeBlock();
+#if ENABLE(JIT)
+#if ENABLE(INTERPRETER)
+ if (callerFrame->globalData().canUseJIT())
+#endif
+ bytecodeOffset = bytecodeOffsetForPC(callerFrame, codeBlock, callFrame->returnPC());
+#if ENABLE(INTERPRETER)
+ else
+ bytecodeOffset = bytecodeOffsetForPC(callerFrame, codeBlock, callFrame->returnVPC());
+#endif
+#else
+ bytecodeOffset = codeBlock->bytecodeOffset(callerFrame, callFrame->returnVPC());
+#endif
+ callFrame = callerFrame;
return true;
}
exception->putWithAttributes(callFrame, Identifier(callFrame, "sourceId"), jsNumber(callFrame, codeBlock->ownerExecutable()->sourceID()), ReadOnly | DontDelete);
exception->putWithAttributes(callFrame, Identifier(callFrame, "sourceURL"), jsOwnedString(callFrame, codeBlock->ownerExecutable()->sourceURL()), ReadOnly | DontDelete);
}
-
- if (exception->isWatchdogException()) {
+
+ ComplType exceptionType = exception->exceptionType();
+ if (exceptionType == Interrupted || exceptionType == Terminated) {
while (unwindCallFrame(callFrame, exceptionValue, bytecodeOffset, codeBlock)) {
// Don't need handler checks or anything, we just want to unroll all the JS callframes possible.
}
// the profiler manually that the call instruction has returned, since
// we'll never reach the relevant op_profile_did_call.
if (Profiler* profiler = *Profiler::enabledProfilerReference()) {
-#if !ENABLE(JIT)
- if (isCallBytecode(codeBlock->instructions()[bytecodeOffset].u.opcode))
- profiler->didExecute(callFrame, callFrame->r(codeBlock->instructions()[bytecodeOffset + 2].u.operand).jsValue());
- else if (codeBlock->instructions().size() > (bytecodeOffset + 8) && codeBlock->instructions()[bytecodeOffset + 8].u.opcode == getOpcode(op_construct))
- profiler->didExecute(callFrame, callFrame->r(codeBlock->instructions()[bytecodeOffset + 10].u.operand).jsValue());
-#else
- int functionRegisterIndex;
- if (codeBlock->functionRegisterForBytecodeOffset(bytecodeOffset, functionRegisterIndex))
- profiler->didExecute(callFrame, callFrame->r(functionRegisterIndex).jsValue());
+#if ENABLE(INTERPRETER)
+ if (!callFrame->globalData().canUseJIT()) {
+ // FIXME: Why 8? - work out what this magic value is, replace the constant with something more helpful.
+ if (isCallBytecode(codeBlock->instructions()[bytecodeOffset].u.opcode))
+ profiler->didExecute(callFrame, callFrame->r(codeBlock->instructions()[bytecodeOffset + 1].u.operand).jsValue());
+ else if (codeBlock->instructions().size() > (bytecodeOffset + 8) && codeBlock->instructions()[bytecodeOffset + 8].u.opcode == getOpcode(op_construct))
+ profiler->didExecute(callFrame, callFrame->r(codeBlock->instructions()[bytecodeOffset + 9].u.operand).jsValue());
+ }
+#if ENABLE(JIT)
+ else
+#endif
+#endif
+#if ENABLE(JIT)
+ {
+ int functionRegisterIndex;
+ if (codeBlock->functionRegisterForBytecodeOffset(bytecodeOffset, functionRegisterIndex))
+ profiler->didExecute(callFrame, callFrame->r(functionRegisterIndex).jsValue());
+ }
#endif
}
{
ASSERT(!scopeChain->globalData->exception);
- if (m_reentryDepth >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || m_reentryDepth >= MaxMainThreadReentryDepth) {
+ if (m_reentryDepth >= MaxSmallThreadReentryDepth) {
+ if (m_reentryDepth >= callFrame->globalData().maxReentryDepth) {
*exception = createStackOverflowError(callFrame);
return jsNull();
}
{
SamplingTool::CallRecord callRecord(m_sampler.get());
- m_reentryDepth++;
+ m_reentryDepth++;
#if ENABLE(JIT)
- result = program->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
-#else
- result = privateExecute(Normal, &m_registerFile, newCallFrame, exception);
+#if ENABLE(INTERPRETER)
+ if (callFrame->globalData().canUseJIT())
+#endif
+ result = program->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
+#if ENABLE(INTERPRETER)
+ else
+#endif
#endif
+#if ENABLE(INTERPRETER)
+ result = privateExecute(Normal, &m_registerFile, newCallFrame, exception);
+#endif
+
m_reentryDepth--;
}
{
ASSERT(!scopeChain->globalData->exception);
- if (m_reentryDepth >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || m_reentryDepth >= MaxMainThreadReentryDepth) {
+ if (m_reentryDepth >= MaxSmallThreadReentryDepth) {
+ if (m_reentryDepth >= callFrame->globalData().maxReentryDepth) {
*exception = createStackOverflowError(callFrame);
return jsNull();
}
m_reentryDepth++;
#if ENABLE(JIT)
- result = functionExecutable->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
-#else
+#if ENABLE(INTERPRETER)
+ if (scopeChain->globalData->canUseJIT())
+#endif
+ result = functionExecutable->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
+#if ENABLE(INTERPRETER)
+ else
+#endif
+#endif
+#if ENABLE(INTERPRETER)
result = privateExecute(Normal, &m_registerFile, newCallFrame, exception);
#endif
m_reentryDepth--;
{
ASSERT(!scopeChain->globalData->exception);
- if (m_reentryDepth >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || m_reentryDepth >= MaxMainThreadReentryDepth) {
+ if (m_reentryDepth >= MaxSmallThreadReentryDepth) {
+ if (m_reentryDepth >= callFrame->globalData().maxReentryDepth) {
*exception = createStackOverflowError(callFrame);
return CallFrameClosure();
}
return CallFrameClosure();
}
// a 0 codeBlock indicates a built-in caller
- newCallFrame->init(codeBlock, 0, scopeChain, callFrame->addHostCallFrameFlag(), 0, argc, function);
+ newCallFrame->init(codeBlock, 0, scopeChain, callFrame->addHostCallFrameFlag(), 0, argc, function);
#if ENABLE(JIT)
- FunctionExecutable->jitCode(newCallFrame, scopeChain);
+#if ENABLE(INTERPRETER)
+ if (callFrame->globalData().canUseJIT())
+#endif
+ FunctionExecutable->jitCode(newCallFrame, scopeChain);
#endif
-
CallFrameClosure result = { callFrame, newCallFrame, function, FunctionExecutable, scopeChain->globalData, oldEnd, scopeChain, codeBlock->m_numParameters, argc };
return result;
}
{
SamplingTool::CallRecord callRecord(m_sampler.get());
- m_reentryDepth++;
+ m_reentryDepth++;
#if ENABLE(JIT)
- result = closure.functionExecutable->generatedJITCode().execute(&m_registerFile, closure.newCallFrame, closure.globalData, exception);
-#else
- result = privateExecute(Normal, &m_registerFile, closure.newCallFrame, exception);
+#if ENABLE(INTERPRETER)
+ if (closure.newCallFrame->globalData().canUseJIT())
+#endif
+ result = closure.functionExecutable->generatedJITCode().execute(&m_registerFile, closure.newCallFrame, closure.globalData, exception);
+#if ENABLE(INTERPRETER)
+ else
+#endif
+#endif
+#if ENABLE(INTERPRETER)
+ result = privateExecute(Normal, &m_registerFile, closure.newCallFrame, exception);
#endif
m_reentryDepth--;
}
{
ASSERT(!scopeChain->globalData->exception);
- if (m_reentryDepth >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || m_reentryDepth >= MaxMainThreadReentryDepth) {
+ if (m_reentryDepth >= MaxSmallThreadReentryDepth) {
+ if (m_reentryDepth >= callFrame->globalData().maxReentryDepth) {
*exception = createStackOverflowError(callFrame);
return jsNull();
}
}
}
- { // Scope for BatchedTransitionOptimizer
-
+ unsigned numVariables = codeBlock->numVariables();
+ int numFunctions = codeBlock->numberOfFunctionDecls();
+ if (numVariables || numFunctions) {
+ // Scope for BatchedTransitionOptimizer
BatchedTransitionOptimizer optimizer(variableObject);
- unsigned numVariables = codeBlock->numVariables();
for (unsigned i = 0; i < numVariables; ++i) {
const Identifier& ident = codeBlock->variable(i);
if (!variableObject->hasProperty(callFrame, ident)) {
}
}
- int numFunctions = codeBlock->numberOfFunctionDecls();
for (int i = 0; i < numFunctions; ++i) {
FunctionExecutable* function = codeBlock->functionDecl(i);
PutPropertySlot slot;
variableObject->put(callFrame, function->name(), function->make(callFrame, scopeChain), slot);
}
-
}
Register* oldEnd = m_registerFile.end();
SamplingTool::CallRecord callRecord(m_sampler.get());
m_reentryDepth++;
+
#if ENABLE(JIT)
- result = eval->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
-#else
- result = privateExecute(Normal, &m_registerFile, newCallFrame, exception);
+#if ENABLE(INTERPRETER)
+ if (callFrame->globalData().canUseJIT())
+#endif
+ result = eval->jitCode(newCallFrame, scopeChain).execute(&m_registerFile, newCallFrame, scopeChain->globalData, exception);
+#if ENABLE(INTERPRETER)
+ else
+#endif
+#endif
+#if ENABLE(INTERPRETER)
+ result = privateExecute(Normal, &m_registerFile, newCallFrame, exception);
#endif
m_reentryDepth--;
}
}
}
-#if USE(INTERPRETER)
+#if ENABLE(INTERPRETER)
NEVER_INLINE ScopeChainNode* Interpreter::createExceptionScope(CallFrame* callFrame, const Instruction* vPC)
{
int dst = vPC[1].u.operand;
// Cache hit: Specialize instruction and ref Structures.
if (slot.slotBase() == baseValue) {
- vPC[0] = getOpcode(op_get_by_id_self);
- vPC[5] = slot.cachedOffset();
+ switch (slot.cachedPropertyType()) {
+ case PropertySlot::Getter:
+ vPC[0] = getOpcode(op_get_by_id_getter_self);
+ vPC[5] = slot.cachedOffset();
+ break;
+ case PropertySlot::Custom:
+ vPC[0] = getOpcode(op_get_by_id_custom_self);
+ vPC[5] = slot.customGetter();
+ break;
+ default:
+ vPC[0] = getOpcode(op_get_by_id_self);
+ vPC[5] = slot.cachedOffset();
+ break;
+ }
codeBlock->refStructures(vPC);
return;
}
ASSERT(!baseObject->structure()->isUncacheableDictionary());
-
- vPC[0] = getOpcode(op_get_by_id_proto);
+
+ switch (slot.cachedPropertyType()) {
+ case PropertySlot::Getter:
+ vPC[0] = getOpcode(op_get_by_id_getter_proto);
+ vPC[6] = offset;
+ break;
+ case PropertySlot::Custom:
+ vPC[0] = getOpcode(op_get_by_id_custom_proto);
+ vPC[6] = slot.customGetter();
+ break;
+ default:
+ vPC[0] = getOpcode(op_get_by_id_proto);
+ vPC[6] = offset;
+ break;
+ }
vPC[5] = baseObject->structure();
- vPC[6] = offset;
codeBlock->refStructures(vPC);
return;
return;
}
- vPC[0] = getOpcode(op_get_by_id_chain);
+
+ switch (slot.cachedPropertyType()) {
+ case PropertySlot::Getter:
+ vPC[0] = getOpcode(op_get_by_id_getter_chain);
+ vPC[7] = offset;
+ break;
+ case PropertySlot::Custom:
+ vPC[0] = getOpcode(op_get_by_id_custom_chain);
+ vPC[7] = slot.customGetter();
+ break;
+ default:
+ vPC[0] = getOpcode(op_get_by_id_chain);
+ vPC[7] = offset;
+ break;
+ }
vPC[4] = structure;
vPC[5] = structure->prototypeChain(callFrame);
vPC[6] = count;
- vPC[7] = offset;
codeBlock->refStructures(vPC);
}
vPC[4] = 0;
}
-#endif // USE(INTERPRETER)
+#endif // ENABLE(INTERPRETER)
JSValue Interpreter::privateExecute(ExecutionFlag flag, RegisterFile* registerFile, CallFrame* callFrame, JSValue* exception)
{
// One-time initialization of our address tables. We have to put this code
// here because our labels are only in scope inside this function.
if (UNLIKELY(flag == InitializeAndReturn)) {
- #if HAVE(COMPUTED_GOTO)
+ #if ENABLE(COMPUTED_GOTO_INTERPRETER)
#define LIST_OPCODE_LABEL(id, length) &&id,
static Opcode labels[] = { FOR_EACH_OPCODE_ID(LIST_OPCODE_LABEL) };
for (size_t i = 0; i < sizeof(labels) / sizeof(Opcode); ++i)
m_opcodeTable[i] = labels[i];
#undef LIST_OPCODE_LABEL
- #endif // HAVE(COMPUTED_GOTO)
+ #endif // ENABLE(COMPUTED_GOTO_INTERPRETER)
return JSValue();
}
-
+
#if ENABLE(JIT)
+#if ENABLE(INTERPRETER)
// Mixing Interpreter + JIT is not supported.
- ASSERT_NOT_REACHED();
+ if (callFrame->globalData().canUseJIT())
+#endif
+ ASSERT_NOT_REACHED();
#endif
-#if !USE(INTERPRETER)
+
+#if !ENABLE(INTERPRETER)
UNUSED_PARAM(registerFile);
UNUSED_PARAM(callFrame);
UNUSED_PARAM(exception);
#define CHECK_FOR_TIMEOUT() \
if (!--tickCount) { \
- if (globalData->timeoutChecker.didTimeOut(callFrame)) { \
+ if (globalData->terminator.shouldTerminate() || globalData->timeoutChecker.didTimeOut(callFrame)) { \
exceptionValue = jsNull(); \
goto vm_throw; \
} \
#define SAMPLE(codeBlock, vPC)
#endif
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
#define NEXT_INSTRUCTION() SAMPLE(callFrame->codeBlock(), vPC); goto *vPC->u.opcode
#if ENABLE(OPCODE_STATS)
#define DEFINE_OPCODE(opcode) opcode: OpcodeStats::recordInstruction(opcode);
int dst = vPC[1].u.operand;
JSValue src1 = callFrame->r(vPC[2].u.operand).jsValue();
JSValue src2 = callFrame->r(vPC[3].u.operand).jsValue();
- callFrame->r(dst) = jsBoolean(JSValue::strictEqual(callFrame, src1, src2));
+ bool result = JSValue::strictEqual(callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = jsBoolean(result);
vPC += OPCODE_LENGTH(op_stricteq);
NEXT_INSTRUCTION();
int dst = vPC[1].u.operand;
JSValue src1 = callFrame->r(vPC[2].u.operand).jsValue();
JSValue src2 = callFrame->r(vPC[3].u.operand).jsValue();
- callFrame->r(dst) = jsBoolean(!JSValue::strictEqual(callFrame, src1, src2));
+ bool result = !JSValue::strictEqual(callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = jsBoolean(result);
vPC += OPCODE_LENGTH(op_nstricteq);
NEXT_INSTRUCTION();
*/
int dst = vPC[1].u.operand;
JSValue src = callFrame->r(vPC[2].u.operand).jsValue();
- if (src.isInt32() && src.asInt32())
+ if (src.isInt32() && (src.asInt32() & 0x7fffffff)) // non-zero and no overflow
callFrame->r(dst) = jsNumber(callFrame, -src.asInt32());
else {
JSValue result = jsNumber(callFrame, -src.toNumber(callFrame));
NEXT_INSTRUCTION();
}
+ DEFINE_OPCODE(op_resolve_global_dynamic) {
+ /* resolve_skip dst(r) globalObject(c) property(id) structure(sID) offset(n), depth(n)
+
+ Performs a dynamic property lookup for the given property, on the provided
+ global object. If structure matches the Structure of the global then perform
+ a fast lookup using the case offset, otherwise fall back to a full resolve and
+ cache the new structure and offset.
+
+ This walks through n levels of the scope chain to verify that none of those levels
+ in the scope chain include dynamically added properties.
+ */
+ if (UNLIKELY(!resolveGlobalDynamic(callFrame, vPC, exceptionValue)))
+ goto vm_throw;
+
+ vPC += OPCODE_LENGTH(op_resolve_global_dynamic);
+
+ NEXT_INSTRUCTION();
+ }
DEFINE_OPCODE(op_get_global_var) {
/* get_global_var dst(r) globalObject(c) index(n)
scope->registerAt(index) = JSValue(callFrame->r(value).jsValue());
vPC += OPCODE_LENGTH(op_put_global_var);
NEXT_INSTRUCTION();
- }
+ }
DEFINE_OPCODE(op_get_scoped_var) {
/* get_scoped_var dst(r) index(n) skip(n)
++iter;
ASSERT(iter != end);
}
-
ASSERT((*iter)->isVariableObject());
JSVariableObject* scope = static_cast<JSVariableObject*>(*iter);
callFrame->r(dst) = scope->registerAt(index);
uncacheGetByID(callFrame->codeBlock(), vPC);
NEXT_INSTRUCTION();
}
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_getter_proto);
+#endif
+ DEFINE_OPCODE(op_get_by_id_getter_proto) {
+ /* op_get_by_id_getter_proto dst(r) base(r) property(id) structure(sID) prototypeStructure(sID) offset(n) nop(n)
+
+ Cached property access: Attempts to get a cached getter property from the
+ value base's prototype. If the cache misses, op_get_by_id_getter_proto
+ reverts to op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ ASSERT(structure->prototypeForLookup(callFrame).isObject());
+ JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
+ Structure* prototypeStructure = vPC[5].u.structure;
+
+ if (LIKELY(protoObject->structure() == prototypeStructure)) {
+ int dst = vPC[1].u.operand;
+ int offset = vPC[6].u.operand;
+ if (GetterSetter* getterSetter = asGetterSetter(protoObject->getDirectOffset(offset).asCell())) {
+ JSObject* getter = getterSetter->getter();
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, asObject(baseCell), ArgList());
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ } else
+ callFrame->r(dst) = jsUndefined();
+ vPC += OPCODE_LENGTH(op_get_by_id_getter_proto);
+ NEXT_INSTRUCTION();
+ }
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ skip_id_getter_proto:
+#endif
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_custom_proto);
+#endif
+ DEFINE_OPCODE(op_get_by_id_custom_proto) {
+ /* op_get_by_id_custom_proto dst(r) base(r) property(id) structure(sID) prototypeStructure(sID) offset(n) nop(n)
+
+ Cached property access: Attempts to use a cached named property getter
+ from the value base's prototype. If the cache misses, op_get_by_id_custom_proto
+ reverts to op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ ASSERT(structure->prototypeForLookup(callFrame).isObject());
+ JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
+ Structure* prototypeStructure = vPC[5].u.structure;
+
+ if (LIKELY(protoObject->structure() == prototypeStructure)) {
+ int dst = vPC[1].u.operand;
+ int property = vPC[3].u.operand;
+ Identifier& ident = callFrame->codeBlock()->identifier(property);
+
+ PropertySlot::GetValueFunc getter = vPC[6].u.getterFunc;
+ JSValue result = getter(callFrame, protoObject, ident);
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ vPC += OPCODE_LENGTH(op_get_by_id_custom_proto);
+ NEXT_INSTRUCTION();
+ }
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ skip_id_custom_proto:
+#endif
DEFINE_OPCODE(op_get_by_id_self_list) {
// Polymorphic self access caching currently only supported when JITting.
ASSERT_NOT_REACHED();
vPC += OPCODE_LENGTH(op_get_by_id_proto_list);
NEXT_INSTRUCTION();
}
+ DEFINE_OPCODE(op_get_by_id_getter_self_list) {
+ // Polymorphic self access caching currently only supported when JITting.
+ ASSERT_NOT_REACHED();
+ // This case of the switch must not be empty, else (op_get_by_id_self_list == op_get_by_id_chain)!
+ vPC += OPCODE_LENGTH(op_get_by_id_self_list);
+ NEXT_INSTRUCTION();
+ }
+ DEFINE_OPCODE(op_get_by_id_getter_proto_list) {
+ // Polymorphic prototype access caching currently only supported when JITting.
+ ASSERT_NOT_REACHED();
+ // This case of the switch must not be empty, else (op_get_by_id_proto_list == op_get_by_id_chain)!
+ vPC += OPCODE_LENGTH(op_get_by_id_proto_list);
+ NEXT_INSTRUCTION();
+ }
+ DEFINE_OPCODE(op_get_by_id_custom_self_list) {
+ // Polymorphic self access caching currently only supported when JITting.
+ ASSERT_NOT_REACHED();
+ // This case of the switch must not be empty, else (op_get_by_id_self_list == op_get_by_id_chain)!
+ vPC += OPCODE_LENGTH(op_get_by_id_custom_self_list);
+ NEXT_INSTRUCTION();
+ }
+ DEFINE_OPCODE(op_get_by_id_custom_proto_list) {
+ // Polymorphic prototype access caching currently only supported when JITting.
+ ASSERT_NOT_REACHED();
+ // This case of the switch must not be empty, else (op_get_by_id_proto_list == op_get_by_id_chain)!
+ vPC += OPCODE_LENGTH(op_get_by_id_proto_list);
+ NEXT_INSTRUCTION();
+ }
DEFINE_OPCODE(op_get_by_id_chain) {
/* op_get_by_id_chain dst(r) base(r) property(id) structure(sID) structureChain(chain) count(n) offset(n)
uncacheGetByID(callFrame->codeBlock(), vPC);
NEXT_INSTRUCTION();
}
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_getter_self);
+#endif
+ DEFINE_OPCODE(op_get_by_id_getter_self) {
+ /* op_get_by_id_self dst(r) base(r) property(id) structure(sID) offset(n) nop(n) nop(n)
+
+ Cached property access: Attempts to get a cached property from the
+ value base. If the cache misses, op_get_by_id_getter_self reverts to
+ op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ ASSERT(baseCell->isObject());
+ JSObject* baseObject = asObject(baseCell);
+ int dst = vPC[1].u.operand;
+ int offset = vPC[5].u.operand;
+
+ if (GetterSetter* getterSetter = asGetterSetter(baseObject->getDirectOffset(offset).asCell())) {
+ JSObject* getter = getterSetter->getter();
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, baseObject, ArgList());
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ } else
+ callFrame->r(dst) = jsUndefined();
+
+ vPC += OPCODE_LENGTH(op_get_by_id_getter_self);
+ NEXT_INSTRUCTION();
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ skip_id_getter_self:
+#endif
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_custom_self);
+#endif
+ DEFINE_OPCODE(op_get_by_id_custom_self) {
+ /* op_get_by_id_custom_self dst(r) base(r) property(id) structure(sID) offset(n) nop(n) nop(n)
+
+ Cached property access: Attempts to use a cached named property getter
+ from the value base. If the cache misses, op_get_by_id_custom_self reverts to
+ op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ ASSERT(baseCell->isObject());
+ int dst = vPC[1].u.operand;
+ int property = vPC[3].u.operand;
+ Identifier& ident = callFrame->codeBlock()->identifier(property);
+
+ PropertySlot::GetValueFunc getter = vPC[5].u.getterFunc;
+ JSValue result = getter(callFrame, baseValue, ident);
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ vPC += OPCODE_LENGTH(op_get_by_id_custom_self);
+ NEXT_INSTRUCTION();
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+skip_id_custom_self:
+#endif
DEFINE_OPCODE(op_get_by_id_generic) {
/* op_get_by_id_generic dst(r) base(r) property(id) nop(sID) nop(n) nop(n) nop(n)
vPC += OPCODE_LENGTH(op_get_by_id_generic);
NEXT_INSTRUCTION();
}
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_getter_chain);
+#endif
+ DEFINE_OPCODE(op_get_by_id_getter_chain) {
+ /* op_get_by_id_getter_chain dst(r) base(r) property(id) structure(sID) structureChain(chain) count(n) offset(n)
+
+ Cached property access: Attempts to get a cached property from the
+ value base's prototype chain. If the cache misses, op_get_by_id_getter_chain
+ reverts to op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ RefPtr<Structure>* it = vPC[5].u.structureChain->head();
+ size_t count = vPC[6].u.operand;
+ RefPtr<Structure>* end = it + count;
+
+ while (true) {
+ JSObject* baseObject = asObject(baseCell->structure()->prototypeForLookup(callFrame));
+
+ if (UNLIKELY(baseObject->structure() != (*it).get()))
+ break;
+
+ if (++it == end) {
+ int dst = vPC[1].u.operand;
+ int offset = vPC[7].u.operand;
+ if (GetterSetter* getterSetter = asGetterSetter(baseObject->getDirectOffset(offset).asCell())) {
+ JSObject* getter = getterSetter->getter();
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, baseValue, ArgList());
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ } else
+ callFrame->r(dst) = jsUndefined();
+ vPC += OPCODE_LENGTH(op_get_by_id_getter_chain);
+ NEXT_INSTRUCTION();
+ }
+
+ // Update baseCell, so that next time around the loop we'll pick up the prototype's prototype.
+ baseCell = baseObject;
+ }
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ skip_id_getter_chain:
+#endif
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ goto *(&&skip_id_custom_chain);
+#endif
+ DEFINE_OPCODE(op_get_by_id_custom_chain) {
+ /* op_get_by_id_custom_chain dst(r) base(r) property(id) structure(sID) structureChain(chain) count(n) offset(n)
+
+ Cached property access: Attempts to use a cached named property getter on the
+ value base's prototype chain. If the cache misses, op_get_by_id_custom_chain
+ reverts to op_get_by_id.
+ */
+ int base = vPC[2].u.operand;
+ JSValue baseValue = callFrame->r(base).jsValue();
+
+ if (LIKELY(baseValue.isCell())) {
+ JSCell* baseCell = asCell(baseValue);
+ Structure* structure = vPC[4].u.structure;
+
+ if (LIKELY(baseCell->structure() == structure)) {
+ RefPtr<Structure>* it = vPC[5].u.structureChain->head();
+ size_t count = vPC[6].u.operand;
+ RefPtr<Structure>* end = it + count;
+
+ while (true) {
+ JSObject* baseObject = asObject(baseCell->structure()->prototypeForLookup(callFrame));
+
+ if (UNLIKELY(baseObject->structure() != (*it).get()))
+ break;
+
+ if (++it == end) {
+ int dst = vPC[1].u.operand;
+ int property = vPC[3].u.operand;
+ Identifier& ident = callFrame->codeBlock()->identifier(property);
+
+ PropertySlot::GetValueFunc getter = vPC[7].u.getterFunc;
+ JSValue result = getter(callFrame, baseObject, ident);
+ CHECK_FOR_EXCEPTION();
+ callFrame->r(dst) = result;
+ vPC += OPCODE_LENGTH(op_get_by_id_custom_chain);
+ NEXT_INSTRUCTION();
+ }
+
+ // Update baseCell, so that next time around the loop we'll pick up the prototype's prototype.
+ baseCell = baseObject;
+ }
+ }
+ }
+ uncacheGetByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+#if USE(GCC_COMPUTED_GOTO_WORKAROUND)
+ skip_id_custom_chain:
+#endif
DEFINE_OPCODE(op_get_array_length) {
/* op_get_array_length dst(r) base(r) property(id) nop(sID) nop(n) nop(n) nop(n)
NEXT_INSTRUCTION();
}
DEFINE_OPCODE(op_put_by_id) {
- /* put_by_id base(r) property(id) value(r) nop(n) nop(n) nop(n) nop(n)
+ /* put_by_id base(r) property(id) value(r) nop(n) nop(n) nop(n) nop(n) direct(b)
Generic property access: Sets the property named by identifier
property, belonging to register base, to register value.
Unlike many opcodes, this one does not write any output to
the register file.
+
+ The "direct" flag should only be set this put_by_id is to initialize
+ an object literal.
*/
int base = vPC[1].u.operand;
int property = vPC[2].u.operand;
int value = vPC[3].u.operand;
+ int direct = vPC[8].u.operand;
CodeBlock* codeBlock = callFrame->codeBlock();
JSValue baseValue = callFrame->r(base).jsValue();
Identifier& ident = codeBlock->identifier(property);
PutPropertySlot slot;
- baseValue.put(callFrame, ident, callFrame->r(value).jsValue(), slot);
+ if (direct) {
+ baseValue.putDirect(callFrame, ident, callFrame->r(value).jsValue(), slot);
+ ASSERT(slot.base() == baseValue);
+ } else
+ baseValue.put(callFrame, ident, callFrame->r(value).jsValue(), slot);
CHECK_FOR_EXCEPTION();
tryCachePutByID(callFrame, codeBlock, vPC, baseValue, slot);
NEXT_INSTRUCTION();
}
DEFINE_OPCODE(op_put_by_id_transition) {
- /* op_put_by_id_transition base(r) property(id) value(r) oldStructure(sID) newStructure(sID) structureChain(chain) offset(n)
+ /* op_put_by_id_transition base(r) property(id) value(r) oldStructure(sID) newStructure(sID) structureChain(chain) offset(n) direct(b)
Cached property access: Attempts to set a new property with a cached transition
property named by identifier property, belonging to register base,
if (LIKELY(baseCell->structure() == oldStructure)) {
ASSERT(baseCell->isObject());
JSObject* baseObject = asObject(baseCell);
-
- RefPtr<Structure>* it = vPC[6].u.structureChain->head();
-
- JSValue proto = baseObject->structure()->prototypeForLookup(callFrame);
- while (!proto.isNull()) {
- if (UNLIKELY(asObject(proto)->structure() != (*it).get())) {
- uncachePutByID(callFrame->codeBlock(), vPC);
- NEXT_INSTRUCTION();
+ int direct = vPC[8].u.operand;
+
+ if (!direct) {
+ RefPtr<Structure>* it = vPC[6].u.structureChain->head();
+
+ JSValue proto = baseObject->structure()->prototypeForLookup(callFrame);
+ while (!proto.isNull()) {
+ if (UNLIKELY(asObject(proto)->structure() != (*it).get())) {
+ uncachePutByID(callFrame->codeBlock(), vPC);
+ NEXT_INSTRUCTION();
+ }
+ ++it;
+ proto = asObject(proto)->structure()->prototypeForLookup(callFrame);
}
- ++it;
- proto = asObject(proto)->structure()->prototypeForLookup(callFrame);
}
-
baseObject->transitionTo(newStructure);
int value = vPC[3].u.operand;
NEXT_INSTRUCTION();
}
DEFINE_OPCODE(op_put_by_id_replace) {
- /* op_put_by_id_replace base(r) property(id) value(r) structure(sID) offset(n) nop(n) nop(n)
+ /* op_put_by_id_replace base(r) property(id) value(r) structure(sID) offset(n) nop(n) nop(n) direct(b)
Cached property access: Attempts to set a pre-existing, cached
property named by identifier property, belonging to register base,
NEXT_INSTRUCTION();
}
DEFINE_OPCODE(op_put_by_id_generic) {
- /* op_put_by_id_generic base(r) property(id) value(r) nop(n) nop(n) nop(n) nop(n)
+ /* op_put_by_id_generic base(r) property(id) value(r) nop(n) nop(n) nop(n) nop(n) direct(b)
Generic property access: Sets the property named by identifier
property, belonging to register base, to register value.
int base = vPC[1].u.operand;
int property = vPC[2].u.operand;
int value = vPC[3].u.operand;
+ int direct = vPC[8].u.operand;
JSValue baseValue = callFrame->r(base).jsValue();
Identifier& ident = callFrame->codeBlock()->identifier(property);
PutPropertySlot slot;
- baseValue.put(callFrame, ident, callFrame->r(value).jsValue(), slot);
+ if (direct) {
+ baseValue.putDirect(callFrame, ident, callFrame->r(value).jsValue(), slot);
+ ASSERT(slot.base() == baseValue);
+ } else
+ baseValue.put(callFrame, ident, callFrame->r(value).jsValue(), slot);
CHECK_FOR_EXCEPTION();
vPC += OPCODE_LENGTH(op_put_by_id_generic);
vPC += OPCODE_LENGTH(op_jnlesseq);
NEXT_INSTRUCTION();
}
+ DEFINE_OPCODE(op_jlesseq) {
+ /* jlesseq src1(r) src2(r) target(offset)
+
+ Checks whether register src1 is less than or equal to
+ register src2, as with the ECMAScript '<=' operator,
+ and then jumps to offset target from the current instruction,
+ if and only if the result of the comparison is true.
+ */
+ JSValue src1 = callFrame->r(vPC[1].u.operand).jsValue();
+ JSValue src2 = callFrame->r(vPC[2].u.operand).jsValue();
+ int target = vPC[3].u.operand;
+
+ bool result = jsLessEq(callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION();
+
+ if (result) {
+ vPC += target;
+ NEXT_INSTRUCTION();
+ }
+
+ vPC += OPCODE_LENGTH(op_jlesseq);
+ NEXT_INSTRUCTION();
+ }
DEFINE_OPCODE(op_switch_imm) {
/* switch_imm tableIndex(n) defaultOffset(offset) scrutinee(r)
vPC += defaultOffset;
else {
UString::Rep* value = asString(scrutinee)->value(callFrame).rep();
- if (value->size() != 1)
+ if (value->length() != 1)
vPC += defaultOffset;
else
- vPC += callFrame->codeBlock()->characterSwitchJumpTable(tableIndex).offsetForValue(value->data()[0], defaultOffset);
+ vPC += callFrame->codeBlock()->characterSwitchJumpTable(tableIndex).offsetForValue(value->characters()[0], defaultOffset);
}
NEXT_INSTRUCTION();
}
CHECK_FOR_EXCEPTION();
}
} else {
- if (!arguments.isObject()) {
- exceptionValue = createInvalidParamError(callFrame, "Function.prototype.apply", arguments, vPC - callFrame->codeBlock()->instructions().begin(), callFrame->codeBlock());
- goto vm_throw;
- }
+ exceptionValue = createInvalidParamError(callFrame, "Function.prototype.apply", arguments, vPC - callFrame->codeBlock()->instructions().begin(), callFrame->codeBlock());
+ goto vm_throw;
}
}
CHECK_FOR_EXCEPTION();
JSValue returnValue = callFrame->r(result).jsValue();
- vPC = callFrame->returnPC();
+ vPC = callFrame->returnVPC();
int dst = callFrame->returnValueRegister();
callFrame = callFrame->callerFrame();
callFrame->init(newCodeBlock, vPC + 7, callDataScopeChain, previousCallFrame, dst, argCount, asFunction(v));
vPC = newCodeBlock->instructions().begin();
-
#if ENABLE(OPCODE_STATS)
OpcodeStats::resetLastInstruction();
#endif
int src = vPC[2].u.operand;
int count = vPC[3].u.operand;
- callFrame->r(dst) = jsString(callFrame, &callFrame->registers()[src], count);
+ callFrame->r(dst) = concatenateStrings(callFrame, &callFrame->registers()[src], count);
CHECK_FOR_EXCEPTION();
vPC += OPCODE_LENGTH(op_strcat);
vPC += target;
NEXT_INSTRUCTION();
}
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
// Appease GCC
goto *(&&skip_new_scope);
#endif
vPC += OPCODE_LENGTH(op_push_new_scope);
NEXT_INSTRUCTION();
}
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
skip_new_scope:
#endif
DEFINE_OPCODE(op_catch) {
NEXT_INSTRUCTION();
}
}
-#if !HAVE(COMPUTED_GOTO)
+#if !ENABLE(COMPUTED_GOTO_INTERPRETER)
} // iterator loop ends
#endif
-#endif // USE(INTERPRETER)
#undef NEXT_INSTRUCTION
#undef DEFINE_OPCODE
#undef CHECK_FOR_EXCEPTION
#undef CHECK_FOR_TIMEOUT
+#endif // ENABLE(INTERPRETER)
}
JSValue Interpreter::retrieveArguments(CallFrame* callFrame, JSFunction* function) const
CodeBlock* callerCodeBlock = callerFrame->codeBlock();
if (!callerCodeBlock)
return;
-
- unsigned bytecodeOffset = bytecodeOffsetForPC(callerFrame, callerCodeBlock, callFrame->returnPC());
+ unsigned bytecodeOffset = 0;
+#if ENABLE(INTERPRETER)
+ if (!callerFrame->globalData().canUseJIT())
+ bytecodeOffset = bytecodeOffsetForPC(callerFrame, callerCodeBlock, callFrame->returnVPC());
+#if ENABLE(JIT)
+ else
+ bytecodeOffset = bytecodeOffsetForPC(callerFrame, callerCodeBlock, callFrame->returnPC());
+#endif
+#else
+ bytecodeOffset = bytecodeOffsetForPC(callerFrame, callerCodeBlock, callFrame->returnPC());
+#endif
lineNumber = callerCodeBlock->lineNumberForBytecodeOffset(callerFrame, bytecodeOffset - 1);
sourceID = callerCodeBlock->ownerExecutable()->sourceID();
sourceURL = callerCodeBlock->ownerExecutable()->sourceURL();
// We use a smaller reentrancy limit on iPhone because of the high amount of
// stack space required on the web thread.
- enum { MaxMainThreadReentryDepth = 100, MaxSecondaryThreadReentryDepth = 32 };
+ enum { MaxLargeThreadReentryDepth = 100, MaxSmallThreadReentryDepth = 32 };
class Interpreter : public FastAllocBase {
friend class JIT;
Opcode getOpcode(OpcodeID id)
{
- #if HAVE(COMPUTED_GOTO)
+ #if ENABLE(COMPUTED_GOTO_INTERPRETER)
return m_opcodeTable[id];
#else
return id;
OpcodeID getOpcodeID(Opcode opcode)
{
- #if HAVE(COMPUTED_GOTO)
+ #if ENABLE(COMPUTED_GOTO_INTERPRETER)
ASSERT(isOpcode(opcode));
return m_opcodeIDTable.get(opcode);
#else
JSValue execute(EvalExecutable*, CallFrame*, JSObject* thisObject, int globalRegisterOffset, ScopeChainNode*, JSValue* exception);
-#if USE(INTERPRETER)
+#if ENABLE(INTERPRETER)
NEVER_INLINE bool resolve(CallFrame*, Instruction*, JSValue& exceptionValue);
NEVER_INLINE bool resolveSkip(CallFrame*, Instruction*, JSValue& exceptionValue);
NEVER_INLINE bool resolveGlobal(CallFrame*, Instruction*, JSValue& exceptionValue);
+ NEVER_INLINE bool resolveGlobalDynamic(CallFrame*, Instruction*, JSValue& exceptionValue);
NEVER_INLINE void resolveBase(CallFrame*, Instruction* vPC);
NEVER_INLINE bool resolveBaseAndProperty(CallFrame*, Instruction*, JSValue& exceptionValue);
NEVER_INLINE ScopeChainNode* createExceptionScope(CallFrame*, const Instruction* vPC);
void uncacheGetByID(CodeBlock*, Instruction* vPC);
void tryCachePutByID(CallFrame*, CodeBlock*, Instruction*, JSValue baseValue, const PutPropertySlot&);
void uncachePutByID(CodeBlock*, Instruction* vPC);
-#endif
+#endif // ENABLE(INTERPRETER)
NEVER_INLINE bool unwindCallFrame(CallFrame*&, JSValue, unsigned& bytecodeOffset, CodeBlock*&);
RegisterFile m_registerFile;
-#if HAVE(COMPUTED_GOTO)
+#if ENABLE(COMPUTED_GOTO_INTERPRETER)
Opcode m_opcodeTable[numOpcodeIDs]; // Maps OpcodeID => Opcode for compiling
HashMap<Opcode, OpcodeID> m_opcodeIDTable; // Maps Opcode => OpcodeID for decompiling
#endif
#include "config.h"
#include "RegisterFile.h"
+#include "JSGlobalObject.h"
+
namespace JSC {
RegisterFile::~RegisterFile()
m_maxUsed = m_start;
}
+void RegisterFile::setGlobalObject(JSGlobalObject* globalObject)
+{
+ m_globalObject = globalObject;
+}
+
+bool RegisterFile::clearGlobalObject(JSGlobalObject* globalObject)
+{
+ return m_globalObject.clear(globalObject);
+}
+
+JSGlobalObject* RegisterFile::globalObject()
+{
+ return m_globalObject.get();
+}
+
} // namespace JSC
#include "Collector.h"
#include "ExecutableAllocator.h"
#include "Register.h"
+#include "WeakGCPtr.h"
#include <stdio.h>
#include <wtf/Noncopyable.h>
#include <wtf/VMTags.h>
Register* end() const { return m_end; }
size_t size() const { return m_end - m_start; }
- void setGlobalObject(JSGlobalObject* globalObject) { m_globalObject = globalObject; }
- JSGlobalObject* globalObject() { return m_globalObject; }
+ void setGlobalObject(JSGlobalObject*);
+ bool clearGlobalObject(JSGlobalObject*);
+ JSGlobalObject* globalObject();
bool grow(Register* newEnd);
void shrink(Register* newEnd);
Register* m_commitEnd;
#endif
- JSGlobalObject* m_globalObject; // The global object whose vars are currently stored in the register file.
+ WeakGCPtr<JSGlobalObject> m_globalObject; // The global object whose vars are currently stored in the register file.
};
// FIXME: Add a generic getpagesize() to WTF, then move this function to WTF as well.
, m_end(0)
, m_max(0)
, m_buffer(0)
- , m_globalObject(0)
{
// Verify that our values will play nice with mmap and VirtualAlloc.
ASSERT(isPageAligned(maxGlobals));
size_t ExecutableAllocator::pageSize = 0;
+#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
+void ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSeting setting)
+{
+ if (!pageSize)
+ intializePageSize();
+
+ // Calculate the start of the page containing this region,
+ // and account for this extra memory within size.
+ intptr_t startPtr = reinterpret_cast<intptr_t>(start);
+ intptr_t pageStartPtr = startPtr & ~(pageSize - 1);
+ void* pageStart = reinterpret_cast<void*>(pageStartPtr);
+ size += (startPtr - pageStartPtr);
+
+ // Round size up
+ size += (pageSize - 1);
+ size &= ~(pageSize - 1);
+
+ mprotect(pageStart, size, (setting == Writable) ? PROTECTION_FLAGS_RW : PROTECTION_FLAGS_RX);
+}
+#endif
+
+#if CPU(ARM_TRADITIONAL) && OS(LINUX) && COMPILER(RVCT)
+__asm void ExecutableAllocator::cacheFlush(void* code, size_t size)
+{
+ ARM
+ push {r7}
+ add r1, r1, r0
+ mov r7, #0xf0000
+ add r7, r7, #0x2
+ mov r2, #0x0
+ svc #0x0
+ pop {r7}
+ bx lr
+}
+#endif
+
}
#endif // HAVE(ASSEMBLER)
#include <e32std.h>
#endif
+#if CPU(MIPS) && OS(LINUX)
+#include <sys/cachectl.h>
+#endif
+
#if OS(WINCE)
// From pkfuncs.h (private header file from the Platform Builder)
#define CACHE_SYNC_ALL 0x07F
{
if (!pageSize)
intializePageSize();
- m_smallAllocationPool = ExecutablePool::create(JIT_ALLOCATOR_LARGE_ALLOC_SIZE);
+ if (isValid())
+ m_smallAllocationPool = ExecutablePool::create(JIT_ALLOCATOR_LARGE_ALLOC_SIZE);
+#if !ENABLE(INTERPRETER)
+ else
+ CRASH();
+#endif
}
+ bool isValid() const;
+
PassRefPtr<ExecutablePool> poolForSize(size_t n)
{
// Try to fit in the existing small allocator
+ ASSERT(m_smallAllocationPool);
if (n < m_smallAllocationPool->available())
return m_smallAllocationPool;
static void cacheFlush(void*, size_t)
{
}
+#elif CPU(MIPS)
+ static void cacheFlush(void* code, size_t size)
+ {
+#if COMPILER(GCC) && (GCC_VERSION >= 40300)
+#if WTF_MIPS_ISA_REV(2) && (GCC_VERSION < 40403)
+ int lineSize;
+ asm("rdhwr %0, $1" : "=r" (lineSize));
+ //
+ // Modify "start" and "end" to avoid GCC 4.3.0-4.4.2 bug in
+ // mips_expand_synci_loop that may execute synci one more time.
+ // "start" points to the fisrt byte of the cache line.
+ // "end" points to the last byte of the line before the last cache line.
+ // Because size is always a multiple of 4, this is safe to set
+ // "end" to the last byte.
+ //
+ intptr_t start = reinterpret_cast<intptr_t>(code) & (-lineSize);
+ intptr_t end = ((reinterpret_cast<intptr_t>(code) + size - 1) & (-lineSize)) - 1;
+ __builtin___clear_cache(reinterpret_cast<char*>(start), reinterpret_cast<char*>(end));
+#else
+ intptr_t end = reinterpret_cast<intptr_t>(code) + size;
+ __builtin___clear_cache(reinterpret_cast<char*>(code), reinterpret_cast<char*>(end));
+#endif
+#else
+ _flush_cache(reinterpret_cast<char*>(code), size, BCACHE);
+#endif
+ }
#elif CPU(ARM_THUMB2) && OS(IPHONE_OS)
static void cacheFlush(void* code, size_t size)
{
{
User::IMB_Range(code, static_cast<char*>(code) + size);
}
-#elif CPU(ARM_TRADITIONAL) && OS(LINUX)
+#elif CPU(ARM_TRADITIONAL) && OS(LINUX) && COMPILER(RVCT)
+ static __asm void cacheFlush(void* code, size_t size);
+#elif CPU(ARM_TRADITIONAL) && OS(LINUX) && COMPILER(GCC)
static void cacheFlush(void* code, size_t size)
{
asm volatile (
#include "ExecutableAllocator.h"
-#include <errno.h>
+#if ENABLE(EXECUTABLE_ALLOCATOR_FIXED)
-#if ENABLE(ASSEMBLER) && OS(DARWIN) && CPU(X86_64)
+#include <errno.h>
#include "TCSpinLock.h"
-#include <mach/mach_init.h>
-#include <mach/vm_map.h>
#include <sys/mman.h>
#include <unistd.h>
#include <wtf/AVLTree.h>
#include <wtf/VMTags.h>
+ #define MMAP_FLAGS (MAP_PRIVATE | MAP_ANON | MAP_JIT)
+
using namespace WTF;
namespace JSC {
-#define TWO_GB (2u * 1024u * 1024u * 1024u)
-#define SIXTEEN_MB (16u * 1024u * 1024u)
+#if CPU(X86_64)
+ // These limits suitable on 64-bit platforms (particularly x86-64, where we require all jumps to have a 2Gb max range).
+ #define VM_POOL_SIZE (2u * 1024u * 1024u * 1024u) // 2Gb
+ #define COALESCE_LIMIT (16u * 1024u * 1024u) // 16Mb
+#else
+ // These limits are hopefully sensible on embedded platforms.
+ #define VM_POOL_SIZE (32u * 1024u * 1024u) // 32Mb
+ #define COALESCE_LIMIT (4u * 1024u * 1024u) // 4Mb
+#endif
+
+// ASLR currently only works on darwin (due to arc4random) & 64-bit (due to address space size).
+#define VM_POOL_ASLR (OS(DARWIN) && CPU(X86_64))
// FreeListEntry describes a free chunk of memory, stored in the freeList.
struct FreeListEntry {
{
while (madvise(position, size, MADV_FREE_REUSE) == -1 && errno == EAGAIN) { }
}
+#elif HAVE(MADV_FREE)
+ void release(void* position, size_t size)
+ {
+ while (madvise(position, size, MADV_FREE) == -1 && errno == EAGAIN) { }
+ }
+
+ void reuse(void*, size_t) {}
#elif HAVE(MADV_DONTNEED)
void release(void* position, size_t size)
{
// for now instead of 2^26 bits of ASLR lets stick with 25 bits of randomization plus
// 2^24, which should put up somewhere in the middle of usespace (in the address range
// 0x200000000000 .. 0x5fffffffffff).
- intptr_t randomLocation = arc4random() & ((1 << 25) - 1);
+ intptr_t randomLocation = 0;
+#if VM_POOL_ASLR
+ randomLocation = arc4random() & ((1 << 25) - 1);
randomLocation += (1 << 24);
randomLocation <<= 21;
- m_base = mmap(reinterpret_cast<void*>(randomLocation), m_totalHeapSize, INITIAL_PROTECTION_FLAGS, MAP_PRIVATE | MAP_ANON, VM_TAG_FOR_EXECUTABLEALLOCATOR_MEMORY, 0);
- if (!m_base)
- CRASH();
+#endif
+ m_base = mmap(reinterpret_cast<void*>(randomLocation), m_totalHeapSize, INITIAL_PROTECTION_FLAGS, MMAP_FLAGS, VM_TAG_FOR_EXECUTABLEALLOCATOR_MEMORY, 0);
- // For simplicity, we keep all memory in m_freeList in a 'released' state.
- // This means that we can simply reuse all memory when allocating, without
- // worrying about it's previous state, and also makes coalescing m_freeList
- // simpler since we need not worry about the possibility of coalescing released
- // chunks with non-released ones.
- release(m_base, m_totalHeapSize);
- m_freeList.insert(new FreeListEntry(m_base, m_totalHeapSize));
+ if (m_base == MAP_FAILED) {
+#if ENABLE(INTERPRETER)
+ m_base = 0;
+#else
+ CRASH();
+#endif
+ } else {
+ // For simplicity, we keep all memory in m_freeList in a 'released' state.
+ // This means that we can simply reuse all memory when allocating, without
+ // worrying about it's previous state, and also makes coalescing m_freeList
+ // simpler since we need not worry about the possibility of coalescing released
+ // chunks with non-released ones.
+ release(m_base, m_totalHeapSize);
+ m_freeList.insert(new FreeListEntry(m_base, m_totalHeapSize));
+ }
}
void* alloc(size_t size)
{
+#if ENABLE(INTERPRETER)
+ if (!m_base)
+ return 0;
+#else
+ ASSERT(m_base);
+#endif
void* result;
// Freed allocations of the common size are not stored back into the main
void free(void* pointer, size_t size)
{
+ ASSERT(m_base);
// Call release to report to the operating system that this
// memory is no longer in use, and need not be paged out.
ASSERT(isWithinVMPool(pointer, size));
// 16MB of allocations have been freed, sweep m_freeList
// coalescing any neighboring fragments.
m_countFreedSinceLastCoalesce += size;
- if (m_countFreedSinceLastCoalesce >= SIXTEEN_MB) {
+ if (m_countFreedSinceLastCoalesce >= COALESCE_LIMIT) {
m_countFreedSinceLastCoalesce = 0;
coalesceFreeSpace();
}
}
+ bool isValid() const { return !!m_base; }
+
private:
#ifndef NDEBUG
static FixedVMPoolAllocator* allocator = 0;
static SpinLock spinlock = SPINLOCK_INITIALIZER;
+bool ExecutableAllocator::isValid() const
+{
+ SpinLockHolder lock_holder(&spinlock);
+ if (!allocator)
+ allocator = new FixedVMPoolAllocator(JIT_ALLOCATOR_LARGE_ALLOC_SIZE, VM_POOL_SIZE);
+ return allocator->isValid();
+}
+
ExecutablePool::Allocation ExecutablePool::systemAlloc(size_t size)
{
- SpinLockHolder lock_holder(&spinlock);
+ SpinLockHolder lock_holder(&spinlock);
if (!allocator)
- allocator = new FixedVMPoolAllocator(JIT_ALLOCATOR_LARGE_ALLOC_SIZE, TWO_GB);
+ allocator = new FixedVMPoolAllocator(JIT_ALLOCATOR_LARGE_ALLOC_SIZE, VM_POOL_SIZE);
ExecutablePool::Allocation alloc = {reinterpret_cast<char*>(allocator->alloc(size)), size};
return alloc;
}
void ExecutablePool::systemRelease(const ExecutablePool::Allocation& allocation)
{
- SpinLockHolder lock_holder(&spinlock);
+ SpinLockHolder lock_holder(&spinlock);
ASSERT(allocator);
allocator->free(allocation.pages, allocation.size);
#include "ExecutableAllocator.h"
-#if ENABLE(ASSEMBLER) && OS(UNIX) && !OS(SYMBIAN)
+#if ENABLE(EXECUTABLE_ALLOCATOR_DEMAND) && !OS(WINDOWS) && !OS(SYMBIAN)
#include <sys/mman.h>
#include <unistd.h>
namespace JSC {
-#if !(OS(DARWIN) && CPU(X86_64))
-
void ExecutableAllocator::intializePageSize()
{
ExecutableAllocator::pageSize = getpagesize();
ASSERT_UNUSED(result, !result);
}
-#endif // !(OS(DARWIN) && CPU(X86_64))
-
-#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
-void ExecutableAllocator::reprotectRegion(void* start, size_t size, ProtectionSeting setting)
+bool ExecutableAllocator::isValid() const
{
- if (!pageSize)
- intializePageSize();
-
- // Calculate the start of the page containing this region,
- // and account for this extra memory within size.
- intptr_t startPtr = reinterpret_cast<intptr_t>(start);
- intptr_t pageStartPtr = startPtr & ~(pageSize - 1);
- void* pageStart = reinterpret_cast<void*>(pageStartPtr);
- size += (startPtr - pageStartPtr);
-
- // Round size up
- size += (pageSize - 1);
- size &= ~(pageSize - 1);
+ return true;
+}
- mprotect(pageStart, size, (setting == Writable) ? PROTECTION_FLAGS_RW : PROTECTION_FLAGS_RX);
+#if CPU(ARM_TRADITIONAL) && OS(LINUX) && COMPILER(RVCT)
+__asm void ExecutableAllocator::cacheFlush(void* code, size_t size)
+{
+ ARM
+ push {r7}
+ add r1, r1, r0
+ mov r7, #0xf0000
+ add r7, r7, #0x2
+ mov r2, #0x0
+ svc #0x0
+ pop {r7}
+ bx lr
}
#endif
}
-#endif // HAVE(ASSEMBLER)
+#endif
#include "ExecutableAllocator.h"
-#if ENABLE(ASSEMBLER) && OS(SYMBIAN)
+#if ENABLE(EXECUTABLE_ALLOCATOR_DEMAND) && OS(SYMBIAN)
#include <e32hal.h>
#include <e32std.h>
#include "ExecutableAllocator.h"
-#if ENABLE(ASSEMBLER) && OS(WINDOWS)
+#if ENABLE(EXECUTABLE_ALLOCATOR_DEMAND) && OS(WINDOWS)
#include "windows.h"
VirtualFree(alloc.pages, 0, MEM_RELEASE);
}
+bool ExecutableAllocator::isValid() const
+{
+ return true;
+}
+
#if ENABLE(ASSEMBLER_WX_EXCLUSIVE)
#error "ASSEMBLER_WX_EXCLUSIVE not yet suported on this platform."
#endif
DEFINE_BINARY_OP(op_in)
DEFINE_BINARY_OP(op_less)
DEFINE_BINARY_OP(op_lesseq)
- DEFINE_BINARY_OP(op_urshift)
DEFINE_UNARY_OP(op_is_boolean)
DEFINE_UNARY_OP(op_is_function)
DEFINE_UNARY_OP(op_is_number)
DEFINE_OP(op_jneq_ptr)
DEFINE_OP(op_jnless)
DEFINE_OP(op_jless)
+ DEFINE_OP(op_jlesseq)
DEFINE_OP(op_jnlesseq)
DEFINE_OP(op_jsr)
DEFINE_OP(op_jtrue)
DEFINE_OP(op_resolve)
DEFINE_OP(op_resolve_base)
DEFINE_OP(op_resolve_global)
+ DEFINE_OP(op_resolve_global_dynamic)
DEFINE_OP(op_resolve_skip)
DEFINE_OP(op_resolve_with_base)
DEFINE_OP(op_ret)
DEFINE_OP(op_rshift)
+ DEFINE_OP(op_urshift)
DEFINE_OP(op_sret)
DEFINE_OP(op_strcat)
DEFINE_OP(op_stricteq)
case op_get_by_id_proto_list:
case op_get_by_id_self:
case op_get_by_id_self_list:
+ case op_get_by_id_getter_chain:
+ case op_get_by_id_getter_proto:
+ case op_get_by_id_getter_proto_list:
+ case op_get_by_id_getter_self:
+ case op_get_by_id_getter_self_list:
+ case op_get_by_id_custom_chain:
+ case op_get_by_id_custom_proto:
+ case op_get_by_id_custom_proto_list:
+ case op_get_by_id_custom_self:
+ case op_get_by_id_custom_self_list:
case op_get_string_length:
case op_put_by_id_generic:
case op_put_by_id_replace:
Instruction* instructionsBegin = m_codeBlock->instructions().begin();
m_propertyAccessInstructionIndex = 0;
-#if USE(JSVALUE32_64)
m_globalResolveInfoIndex = 0;
-#endif
m_callLinkInfoIndex = 0;
for (Vector<SlowCaseEntry>::iterator iter = m_slowCases.begin(); iter != m_slowCases.end();) {
DEFINE_SLOWCASE_OP(op_jfalse)
DEFINE_SLOWCASE_OP(op_jnless)
DEFINE_SLOWCASE_OP(op_jless)
+ DEFINE_SLOWCASE_OP(op_jlesseq)
DEFINE_SLOWCASE_OP(op_jnlesseq)
DEFINE_SLOWCASE_OP(op_jtrue)
DEFINE_SLOWCASE_OP(op_loop_if_less)
DEFINE_SLOWCASE_OP(op_pre_inc)
DEFINE_SLOWCASE_OP(op_put_by_id)
DEFINE_SLOWCASE_OP(op_put_by_val)
-#if USE(JSVALUE32_64)
DEFINE_SLOWCASE_OP(op_resolve_global)
-#endif
+ DEFINE_SLOWCASE_OP(op_resolve_global_dynamic)
DEFINE_SLOWCASE_OP(op_rshift)
+ DEFINE_SLOWCASE_OP(op_urshift)
DEFINE_SLOWCASE_OP(op_stricteq)
DEFINE_SLOWCASE_OP(op_sub)
DEFINE_SLOWCASE_OP(op_to_jsnumber)
// When the JSFunction is deleted the pointer embedded in the instruction stream will no longer be valid
// (and, if a new JSFunction happened to be constructed at the same location, we could get a false positive
// match). Reset the check so it no longer matches.
- RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock.get());
+ RepatchBuffer repatchBuffer(callLinkInfo->ownerCodeBlock);
#if USE(JSVALUE32_64)
repatchBuffer.repatch(callLinkInfo->hotPathBegin, 0);
#else
}
// patch the call so we do not continue to try to link.
- repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs.ctiVirtualCall());
+ repatchBuffer.relink(callLinkInfo->callReturnLocation, globalData->jitStubs->ctiVirtualCall());
}
#endif // ENABLE(JIT_OPTIMIZE_CALL)
#ifndef JIT_h
#define JIT_h
-#include <wtf/Platform.h>
-
#if ENABLE(JIT)
// We've run into some problems where changing the size of the class JIT leads to
#include "CodeBlock.h"
#include "Interpreter.h"
-#include "JITCode.h"
-#include "JITStubs.h"
+#include "JSInterfaceJIT.h"
#include "Opcode.h"
-#include "RegisterFile.h"
-#include "MacroAssembler.h"
#include "Profiler.h"
#include <bytecode/SamplingTool.h>
-#include <wtf/AlwaysInline.h>
-#include <wtf/Vector.h>
namespace JSC {
void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, MacroAssemblerCodePtr newCalleeFunction);
void ctiPatchCallByReturnAddress(CodeBlock* codeblock, ReturnAddressPtr returnAddress, FunctionPtr newCalleeFunction);
- class JIT : private MacroAssembler {
+ class JIT : private JSInterfaceJIT {
friend class JITStubCall;
using MacroAssembler::Jump;
using MacroAssembler::JumpList;
using MacroAssembler::Label;
- // NOTES:
- //
- // regT0 has two special meanings. The return value from a stub
- // call will always be in regT0, and by default (unless
- // a register is specified) emitPutVirtualRegister() will store
- // the value from regT0.
- //
- // regT3 is required to be callee-preserved.
- //
- // tempRegister2 is has no such dependencies. It is important that
- // on x86/x86-64 it is ecx for performance reasons, since the
- // MacroAssembler will need to plant register swaps if it is not -
- // however the code will still function correctly.
-#if CPU(X86_64)
- static const RegisterID returnValueRegister = X86Registers::eax;
- static const RegisterID cachedResultRegister = X86Registers::eax;
- static const RegisterID firstArgumentRegister = X86Registers::edi;
-
- static const RegisterID timeoutCheckRegister = X86Registers::r12;
- static const RegisterID callFrameRegister = X86Registers::r13;
- static const RegisterID tagTypeNumberRegister = X86Registers::r14;
- static const RegisterID tagMaskRegister = X86Registers::r15;
-
- static const RegisterID regT0 = X86Registers::eax;
- static const RegisterID regT1 = X86Registers::edx;
- static const RegisterID regT2 = X86Registers::ecx;
- static const RegisterID regT3 = X86Registers::ebx;
-
- static const FPRegisterID fpRegT0 = X86Registers::xmm0;
- static const FPRegisterID fpRegT1 = X86Registers::xmm1;
- static const FPRegisterID fpRegT2 = X86Registers::xmm2;
-#elif CPU(X86)
- static const RegisterID returnValueRegister = X86Registers::eax;
- static const RegisterID cachedResultRegister = X86Registers::eax;
- // On x86 we always use fastcall conventions = but on
- // OS X if might make more sense to just use regparm.
- static const RegisterID firstArgumentRegister = X86Registers::ecx;
-
- static const RegisterID timeoutCheckRegister = X86Registers::esi;
- static const RegisterID callFrameRegister = X86Registers::edi;
-
- static const RegisterID regT0 = X86Registers::eax;
- static const RegisterID regT1 = X86Registers::edx;
- static const RegisterID regT2 = X86Registers::ecx;
- static const RegisterID regT3 = X86Registers::ebx;
-
- static const FPRegisterID fpRegT0 = X86Registers::xmm0;
- static const FPRegisterID fpRegT1 = X86Registers::xmm1;
- static const FPRegisterID fpRegT2 = X86Registers::xmm2;
-#elif CPU(ARM_THUMB2)
- static const RegisterID returnValueRegister = ARMRegisters::r0;
- static const RegisterID cachedResultRegister = ARMRegisters::r0;
- static const RegisterID firstArgumentRegister = ARMRegisters::r0;
-
- static const RegisterID regT0 = ARMRegisters::r0;
- static const RegisterID regT1 = ARMRegisters::r1;
- static const RegisterID regT2 = ARMRegisters::r2;
- static const RegisterID regT3 = ARMRegisters::r4;
-
- static const RegisterID callFrameRegister = ARMRegisters::r5;
- static const RegisterID timeoutCheckRegister = ARMRegisters::r6;
-
- static const FPRegisterID fpRegT0 = ARMRegisters::d0;
- static const FPRegisterID fpRegT1 = ARMRegisters::d1;
- static const FPRegisterID fpRegT2 = ARMRegisters::d2;
-#elif CPU(ARM_TRADITIONAL)
- static const RegisterID returnValueRegister = ARMRegisters::r0;
- static const RegisterID cachedResultRegister = ARMRegisters::r0;
- static const RegisterID firstArgumentRegister = ARMRegisters::r0;
-
- static const RegisterID timeoutCheckRegister = ARMRegisters::r5;
- static const RegisterID callFrameRegister = ARMRegisters::r4;
-
- static const RegisterID regT0 = ARMRegisters::r0;
- static const RegisterID regT1 = ARMRegisters::r1;
- static const RegisterID regT2 = ARMRegisters::r2;
- // Callee preserved
- static const RegisterID regT3 = ARMRegisters::r7;
-
- static const RegisterID regS0 = ARMRegisters::S0;
- // Callee preserved
- static const RegisterID regS1 = ARMRegisters::S1;
-
- static const RegisterID regStackPtr = ARMRegisters::sp;
- static const RegisterID regLink = ARMRegisters::lr;
-
- static const FPRegisterID fpRegT0 = ARMRegisters::d0;
- static const FPRegisterID fpRegT1 = ARMRegisters::d1;
- static const FPRegisterID fpRegT2 = ARMRegisters::d2;
-#else
- #error "JIT not supported on this platform."
-#endif
-
static const int patchGetByIdDefaultStructure = -1;
// Magic number - initial offset cannot be representable as a signed 8bit value, or the X86Assembler
// will compress the displacement, and we may not be able to fit a patched offset.
return JIT(globalData, codeBlock).privateCompile();
}
- static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress)
+ static void compileGetByIdProto(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
JIT jit(globalData, codeBlock);
- jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, cachedOffset, returnAddress, callFrame);
+ jit.privateCompileGetByIdProto(stubInfo, structure, prototypeStructure, ident, slot, cachedOffset, returnAddress, callFrame);
}
- static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
+ static void compileGetByIdSelfList(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, cachedOffset);
+ jit.privateCompileGetByIdSelfList(stubInfo, polymorphicStructures, currentIndex, structure, ident, slot, cachedOffset);
}
- static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset)
+ static void compileGetByIdProtoList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, cachedOffset, callFrame);
+ jit.privateCompileGetByIdProtoList(stubInfo, prototypeStructureList, currentIndex, structure, prototypeStructure, ident, slot, cachedOffset, callFrame);
}
- static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset)
+ static void compileGetByIdChainList(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructureList, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
JIT jit(globalData, codeBlock);
- jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, cachedOffset, callFrame);
+ jit.privateCompileGetByIdChainList(stubInfo, prototypeStructureList, currentIndex, structure, chain, count, ident, slot, cachedOffset, callFrame);
}
- static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress)
+ static void compileGetByIdChain(JSGlobalData* globalData, CallFrame* callFrame, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress)
{
JIT jit(globalData, codeBlock);
- jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, cachedOffset, returnAddress, callFrame);
+ jit.privateCompileGetByIdChain(stubInfo, structure, chain, count, ident, slot, cachedOffset, returnAddress, callFrame);
}
- static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
+ static void compilePutByIdTransition(JSGlobalData* globalData, CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
{
JIT jit(globalData, codeBlock);
- jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress);
+ jit.privateCompilePutByIdTransition(stubInfo, oldStructure, newStructure, cachedOffset, chain, returnAddress, direct);
}
- static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
+ static void compileCTIMachineTrampolines(JSGlobalData* globalData, RefPtr<ExecutablePool>* executablePool, TrampolineStructure *trampolines)
{
+ if (!globalData->canUseJIT())
+ return;
JIT jit(globalData);
- jit.privateCompileCTIMachineTrampolines(executablePool, globalData, ctiStringLengthTrampoline, ctiVirtualCallLink, ctiVirtualCall, ctiNativeCallThunk);
+ jit.privateCompileCTIMachineTrampolines(executablePool, globalData, trampolines);
}
static void patchGetByIdSelf(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
- static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress);
+ static void patchPutByIdReplace(CodeBlock* codeblock, StructureStubInfo*, Structure*, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct);
static void patchMethodCallProto(CodeBlock* codeblock, MethodCallLinkInfo&, JSFunction*, Structure*, JSObject*, ReturnAddressPtr);
static void compilePatchGetArrayLength(JSGlobalData* globalData, CodeBlock* codeBlock, ReturnAddressPtr returnAddress)
void privateCompileLinkPass();
void privateCompileSlowCases();
JITCode privateCompile();
- void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
- void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, size_t cachedOffset);
- void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame);
- void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame);
- void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
- void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress);
-
- void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk);
+ void privateCompileGetByIdProto(StructureStubInfo*, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
+ void privateCompileGetByIdSelfList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, const Identifier&, const PropertySlot&, size_t cachedOffset);
+ void privateCompileGetByIdProtoList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, Structure* prototypeStructure, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
+ void privateCompileGetByIdChainList(StructureStubInfo*, PolymorphicAccessStructureList*, int, Structure*, StructureChain* chain, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, CallFrame* callFrame);
+ void privateCompileGetByIdChain(StructureStubInfo*, Structure*, StructureChain*, size_t count, const Identifier&, const PropertySlot&, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame);
+ void privateCompilePutByIdTransition(StructureStubInfo*, Structure*, Structure*, size_t cachedOffset, StructureChain*, ReturnAddressPtr returnAddress, bool direct);
+
+ void privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* data, TrampolineStructure *trampolines);
void privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress);
void addSlowCase(Jump);
void emitLoadDouble(unsigned index, FPRegisterID value);
void emitLoadInt32ToDouble(unsigned index, FPRegisterID value);
- Address addressFor(unsigned index, RegisterID base = callFrameRegister);
-
void testPrototype(Structure*, JumpList& failureCases);
#if USE(JSVALUE32_64)
- Address tagFor(unsigned index, RegisterID base = callFrameRegister);
- Address payloadFor(unsigned index, RegisterID base = callFrameRegister);
-
bool getOperandConstantImmediateInt(unsigned op1, unsigned op2, unsigned& op, int32_t& constant);
void emitLoadTag(unsigned index, RegisterID tag);
static const int patchOffsetMethodCheckProtoStruct = 20;
static const int patchOffsetMethodCheckPutFunction = 32;
+ // sequenceOpCall
+ static const int sequenceOpCallInstructionSpace = 12;
+ static const int sequenceOpCallConstantSpace = 2;
+ // sequenceMethodCheck
+ static const int sequenceMethodCheckInstructionSpace = 40;
+ static const int sequenceMethodCheckConstantSpace = 6;
+ // sequenceGetByIdHotPath
+ static const int sequenceGetByIdHotPathInstructionSpace = 36;
+ static const int sequenceGetByIdHotPathConstantSpace = 4;
+ // sequenceGetByIdSlowCase
+ static const int sequenceGetByIdSlowCaseInstructionSpace = 40;
+ static const int sequenceGetByIdSlowCaseConstantSpace = 2;
+ // sequencePutById
+ static const int sequencePutByIdInstructionSpace = 36;
+ static const int sequencePutByIdConstantSpace = 4;
+#elif CPU(ARM_THUMB2)
+ // These architecture specific value are used to enable patching - see comment on op_put_by_id.
+ static const int patchOffsetPutByIdStructure = 10;
+ static const int patchOffsetPutByIdExternalLoad = 26;
+ static const int patchLengthPutByIdExternalLoad = 12;
+ static const int patchOffsetPutByIdPropertyMapOffset1 = 46;
+ static const int patchOffsetPutByIdPropertyMapOffset2 = 58;
+ // These architecture specific value are used to enable patching - see comment on op_get_by_id.
+ static const int patchOffsetGetByIdStructure = 10;
+ static const int patchOffsetGetByIdBranchToSlowCase = 26;
+ static const int patchOffsetGetByIdExternalLoad = 26;
+ static const int patchLengthGetByIdExternalLoad = 12;
+ static const int patchOffsetGetByIdPropertyMapOffset1 = 46;
+ static const int patchOffsetGetByIdPropertyMapOffset2 = 58;
+ static const int patchOffsetGetByIdPutResult = 62;
+#if ENABLE(OPCODE_SAMPLING)
+ #error "OPCODE_SAMPLING is not yet supported"
+#else
+ static const int patchOffsetGetByIdSlowCaseCall = 30;
+#endif
+ static const int patchOffsetOpCallCompareToJump = 16;
+
+ static const int patchOffsetMethodCheckProtoObj = 24;
+ static const int patchOffsetMethodCheckProtoStruct = 34;
+ static const int patchOffsetMethodCheckPutFunction = 58;
+
// sequenceOpCall
static const int sequenceOpCallInstructionSpace = 12;
static const int sequenceOpCallConstantSpace = 2;
void emitJumpSlowCaseIfNotJSCell(RegisterID);
void emitJumpSlowCaseIfNotJSCell(RegisterID, int VReg);
#if USE(JSVALUE64)
- JIT::Jump emitJumpIfImmediateNumber(RegisterID);
- JIT::Jump emitJumpIfNotImmediateNumber(RegisterID);
#else
JIT::Jump emitJumpIfImmediateNumber(RegisterID reg)
{
Jump emitFastArithDeTagImmediateJumpIfZero(RegisterID);
#endif
void emitFastArithReTagImmediate(RegisterID src, RegisterID dest);
- void emitFastArithImmToInt(RegisterID);
void emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest);
void emitTagAsBoolImmediate(RegisterID reg);
// sequencePutById
static const int sequencePutByIdInstructionSpace = 28;
static const int sequencePutByIdConstantSpace = 3;
+#elif CPU(MIPS)
+#if WTF_MIPS_ISA(1)
+ static const int patchOffsetPutByIdStructure = 16;
+ static const int patchOffsetPutByIdExternalLoad = 48;
+ static const int patchLengthPutByIdExternalLoad = 20;
+ static const int patchOffsetPutByIdPropertyMapOffset = 68;
+ static const int patchOffsetGetByIdStructure = 16;
+ static const int patchOffsetGetByIdBranchToSlowCase = 48;
+ static const int patchOffsetGetByIdExternalLoad = 48;
+ static const int patchLengthGetByIdExternalLoad = 20;
+ static const int patchOffsetGetByIdPropertyMapOffset = 68;
+ static const int patchOffsetGetByIdPutResult = 88;
+#if ENABLE(OPCODE_SAMPLING)
+ #error "OPCODE_SAMPLING is not yet supported"
+#else
+ static const int patchOffsetGetByIdSlowCaseCall = 40;
+#endif
+ static const int patchOffsetOpCallCompareToJump = 32;
+ static const int patchOffsetMethodCheckProtoObj = 32;
+ static const int patchOffsetMethodCheckProtoStruct = 56;
+ static const int patchOffsetMethodCheckPutFunction = 88;
+#else // WTF_MIPS_ISA(1)
+ static const int patchOffsetPutByIdStructure = 12;
+ static const int patchOffsetPutByIdExternalLoad = 44;
+ static const int patchLengthPutByIdExternalLoad = 16;
+ static const int patchOffsetPutByIdPropertyMapOffset = 60;
+ static const int patchOffsetGetByIdStructure = 12;
+ static const int patchOffsetGetByIdBranchToSlowCase = 44;
+ static const int patchOffsetGetByIdExternalLoad = 44;
+ static const int patchLengthGetByIdExternalLoad = 16;
+ static const int patchOffsetGetByIdPropertyMapOffset = 60;
+ static const int patchOffsetGetByIdPutResult = 76;
+#if ENABLE(OPCODE_SAMPLING)
+ #error "OPCODE_SAMPLING is not yet supported"
+#else
+ static const int patchOffsetGetByIdSlowCaseCall = 40;
+#endif
+ static const int patchOffsetOpCallCompareToJump = 32;
+ static const int patchOffsetMethodCheckProtoObj = 32;
+ static const int patchOffsetMethodCheckProtoStruct = 52;
+ static const int patchOffsetMethodCheckPutFunction = 84;
+#endif
#endif
#endif // USE(JSVALUE32_64)
void emit_op_jneq_ptr(Instruction*);
void emit_op_jnless(Instruction*);
void emit_op_jless(Instruction*);
+ void emit_op_jlesseq(Instruction*, bool invert = false);
void emit_op_jnlesseq(Instruction*);
void emit_op_jsr(Instruction*);
void emit_op_jtrue(Instruction*);
void emit_op_put_setter(Instruction*);
void emit_op_resolve(Instruction*);
void emit_op_resolve_base(Instruction*);
- void emit_op_resolve_global(Instruction*);
+ void emit_op_resolve_global(Instruction*, bool dynamic = false);
+ void emit_op_resolve_global_dynamic(Instruction*);
void emit_op_resolve_skip(Instruction*);
void emit_op_resolve_with_base(Instruction*);
void emit_op_ret(Instruction*);
void emit_op_to_jsnumber(Instruction*);
void emit_op_to_primitive(Instruction*);
void emit_op_unexpected_load(Instruction*);
+ void emit_op_urshift(Instruction*);
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ void softModulo();
+#endif
void emitSlow_op_add(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_bitand(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_jfalse(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_jnless(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_jless(Instruction*, Vector<SlowCaseEntry>::iterator&);
+ void emitSlow_op_jlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&, bool invert = false);
void emitSlow_op_jnlesseq(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_jtrue(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_loop_if_less(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_resolve_global(Instruction*, Vector<SlowCaseEntry>::iterator&);
+ void emitSlow_op_resolve_global_dynamic(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_rshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_stricteq(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_sub(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_to_jsnumber(Instruction*, Vector<SlowCaseEntry>::iterator&);
void emitSlow_op_to_primitive(Instruction*, Vector<SlowCaseEntry>::iterator&);
+ void emitSlow_op_urshift(Instruction*, Vector<SlowCaseEntry>::iterator&);
+
+
+ void emitRightShift(Instruction*, bool isUnsigned);
+ void emitRightShiftSlowCase(Instruction*, Vector<SlowCaseEntry>::iterator&, bool isUnsigned);
/* These functions are deprecated: Please use JITStubCall instead. */
void emitPutJITStubArg(RegisterID src, unsigned argumentNumber);
JSValue getConstantOperand(unsigned src);
bool isOperandConstantImmediateInt(unsigned src);
+ bool isOperandConstantImmediateChar(unsigned src);
Jump getSlowCase(Vector<SlowCaseEntry>::iterator& iter)
{
void restoreReturnAddressBeforeReturn(RegisterID);
void restoreReturnAddressBeforeReturn(Address);
+ // Loads the character value of a single character string into dst.
+ void emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures);
+
void emitTimeoutCheck();
#ifndef NDEBUG
void printBytecodeOperandTypes(unsigned src1, unsigned src2);
int m_uninterruptedConstantSequenceBegin;
#endif
#endif
+ static PassRefPtr<NativeExecutable> stringGetByValStubGenerator(JSGlobalData* globalData, ExecutablePool* pool);
} JIT_CLASS_ALIGNMENT;
inline void JIT::emit_op_loop(Instruction* currentInstruction)
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "CodeBlock.h"
#include "JITInlineMethods.h"
#include "JITStubCall.h"
+#include "JITStubs.h"
#include "JSArray.h"
#include "JSFunction.h"
#include "Interpreter.h"
namespace JSC {
-#if USE(JSVALUE32_64)
-
-void JIT::emit_op_negate(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
-
- Jump srcNotInt = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
- addSlowCase(branch32(Equal, regT0, Imm32(0)));
-
- neg32(regT0);
- emitStoreInt32(dst, regT0, (dst == src));
-
- Jump end = jump();
-
- srcNotInt.link(this);
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
-
- xor32(Imm32(1 << 31), regT1);
- store32(regT1, tagFor(dst));
- if (dst != src)
- store32(regT0, payloadFor(dst));
-
- end.link(this);
-}
-
-void JIT::emitSlow_op_negate(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter); // 0 check
- linkSlowCase(iter); // double check
-
- JITStubCall stubCall(this, cti_op_negate);
- stubCall.addArgument(regT1, regT0);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_jnless(Instruction* currentInstruction)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- // Int32 less.
- if (isOperandConstantImmediateInt(op1)) {
- emitLoad(op2, regT3, regT2);
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThanOrEqual, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
- } else if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
- } else {
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThanOrEqual, regT0, regT2), target);
- }
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double less.
- emitBinaryDoubleOp(op_jnless, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
- end.link(this);
-}
-
-void JIT::emitSlow_op_jnless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- if (!supportsFloatingPoint()) {
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- } else {
- if (!isOperandConstantImmediateInt(op1)) {
- linkSlowCase(iter); // double check
- linkSlowCase(iter); // int32 check
- }
- if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // double check
- }
-
- JITStubCall stubCall(this, cti_op_jless);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target);
-}
-
-void JIT::emit_op_jless(Instruction* currentInstruction)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- // Int32 less.
- if (isOperandConstantImmediateInt(op1)) {
- emitLoad(op2, regT3, regT2);
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThan, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
- } else if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
- } else {
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThan, regT0, regT2), target);
- }
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double less.
- emitBinaryDoubleOp(op_jless, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
- end.link(this);
-}
-
-void JIT::emitSlow_op_jless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- if (!supportsFloatingPoint()) {
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- } else {
- if (!isOperandConstantImmediateInt(op1)) {
- linkSlowCase(iter); // double check
- linkSlowCase(iter); // int32 check
- }
- if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // double check
- }
-
- JITStubCall stubCall(this, cti_op_jless);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
-}
-
-void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- // Int32 less.
- if (isOperandConstantImmediateInt(op1)) {
- emitLoad(op2, regT3, regT2);
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThan, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
- } else if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
- } else {
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThan, regT0, regT2), target);
- }
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double less.
- emitBinaryDoubleOp(op_jnlesseq, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
- end.link(this);
-}
-
-void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- if (!supportsFloatingPoint()) {
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- } else {
- if (!isOperandConstantImmediateInt(op1)) {
- linkSlowCase(iter); // double check
- linkSlowCase(iter); // int32 check
- }
- if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // double check
- }
-
- JITStubCall stubCall(this, cti_op_jlesseq);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target);
-}
-
-// LeftShift (<<)
-
-void JIT::emit_op_lshift(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- lshift32(Imm32(getConstantOperand(op2).asInt32()), regT0);
- emitStoreInt32(dst, regT0, dst == op1);
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- if (!isOperandConstantImmediateInt(op1))
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- lshift32(regT2, regT0);
- emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
-}
-
-void JIT::emitSlow_op_lshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_lshift);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// RightShift (>>)
-
-void JIT::emit_op_rshift(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- rshift32(Imm32(getConstantOperand(op2).asInt32()), regT0);
- emitStoreInt32(dst, regT0, dst == op1);
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- if (!isOperandConstantImmediateInt(op1))
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- rshift32(regT2, regT0);
- emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
-}
-
-void JIT::emitSlow_op_rshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_rshift);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// BitAnd (&)
-
-void JIT::emit_op_bitand(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- unsigned op;
- int32_t constant;
- if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
- emitLoad(op, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- and32(Imm32(constant), regT0);
- emitStoreInt32(dst, regT0, (op == dst));
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- and32(regT2, regT0);
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-}
-
-void JIT::emitSlow_op_bitand(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_bitand);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// BitOr (|)
-
-void JIT::emit_op_bitor(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- unsigned op;
- int32_t constant;
- if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
- emitLoad(op, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- or32(Imm32(constant), regT0);
- emitStoreInt32(dst, regT0, (op == dst));
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- or32(regT2, regT0);
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-}
-
-void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_bitor);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// BitXor (^)
-
-void JIT::emit_op_bitxor(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- unsigned op;
- int32_t constant;
- if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
- emitLoad(op, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- xor32(Imm32(constant), regT0);
- emitStoreInt32(dst, regT0, (op == dst));
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- xor32(regT2, regT0);
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-}
-
-void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_bitxor);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// BitNot (~)
-
-void JIT::emit_op_bitnot(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
-
- not32(regT0);
- emitStoreInt32(dst, regT0, (dst == src));
-}
-
-void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_bitnot);
- stubCall.addArgument(regT1, regT0);
- stubCall.call(dst);
-}
-
-// PostInc (i++)
-
-void JIT::emit_op_post_inc(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned srcDst = currentInstruction[2].u.operand;
-
- emitLoad(srcDst, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
-
- if (dst == srcDst) // x = x++ is a noop for ints.
- return;
-
- emitStoreInt32(dst, regT0);
-
- addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
- emitStoreInt32(srcDst, regT0, true);
-}
-
-void JIT::emitSlow_op_post_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned srcDst = currentInstruction[2].u.operand;
-
- linkSlowCase(iter); // int32 check
- if (dst != srcDst)
- linkSlowCase(iter); // overflow check
-
- JITStubCall stubCall(this, cti_op_post_inc);
- stubCall.addArgument(srcDst);
- stubCall.addArgument(Imm32(srcDst));
- stubCall.call(dst);
-}
-
-// PostDec (i--)
-
-void JIT::emit_op_post_dec(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned srcDst = currentInstruction[2].u.operand;
-
- emitLoad(srcDst, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
-
- if (dst == srcDst) // x = x-- is a noop for ints.
- return;
-
- emitStoreInt32(dst, regT0);
-
- addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
- emitStoreInt32(srcDst, regT0, true);
-}
-
-void JIT::emitSlow_op_post_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned srcDst = currentInstruction[2].u.operand;
-
- linkSlowCase(iter); // int32 check
- if (dst != srcDst)
- linkSlowCase(iter); // overflow check
-
- JITStubCall stubCall(this, cti_op_post_dec);
- stubCall.addArgument(srcDst);
- stubCall.addArgument(Imm32(srcDst));
- stubCall.call(dst);
-}
-
-// PreInc (++i)
-
-void JIT::emit_op_pre_inc(Instruction* currentInstruction)
-{
- unsigned srcDst = currentInstruction[1].u.operand;
-
- emitLoad(srcDst, regT1, regT0);
-
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
- emitStoreInt32(srcDst, regT0, true);
-}
-
-void JIT::emitSlow_op_pre_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned srcDst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // overflow check
-
- JITStubCall stubCall(this, cti_op_pre_inc);
- stubCall.addArgument(srcDst);
- stubCall.call(srcDst);
-}
-
-// PreDec (--i)
-
-void JIT::emit_op_pre_dec(Instruction* currentInstruction)
-{
- unsigned srcDst = currentInstruction[1].u.operand;
-
- emitLoad(srcDst, regT1, regT0);
-
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
- emitStoreInt32(srcDst, regT0, true);
-}
-
-void JIT::emitSlow_op_pre_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned srcDst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // overflow check
-
- JITStubCall stubCall(this, cti_op_pre_dec);
- stubCall.addArgument(srcDst);
- stubCall.call(srcDst);
-}
-
-// Addition (+)
-
-void JIT::emit_op_add(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- if (!types.first().mightBeNumber() || !types.second().mightBeNumber()) {
- JITStubCall stubCall(this, cti_op_add);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
- return;
- }
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- unsigned op;
- int32_t constant;
- if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
- emitAdd32Constant(dst, op, constant, op == op1 ? types.first() : types.second());
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
-
- // Int32 case.
- addSlowCase(branchAdd32(Overflow, regT2, regT0));
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double case.
- emitBinaryDoubleOp(op_add, dst, op1, op2, types, notInt32Op1, notInt32Op2);
- end.link(this);
-}
-
-void JIT::emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
-{
- // Int32 case.
- emitLoad(op, regT1, regT0);
- Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
- addSlowCase(branchAdd32(Overflow, Imm32(constant), regT0));
- emitStoreInt32(dst, regT0, (op == dst));
-
- // Double case.
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32);
- return;
- }
- Jump end = jump();
-
- notInt32.link(this);
- if (!opType.definitelyIsNumber())
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
- move(Imm32(constant), regT2);
- convertInt32ToDouble(regT2, fpRegT0);
- emitLoadDouble(op, fpRegT1);
- addDouble(fpRegT1, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
-
- end.link(this);
-}
-
-void JIT::emitSlow_op_add(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- if (!types.first().mightBeNumber() || !types.second().mightBeNumber())
- return;
-
- unsigned op;
- int32_t constant;
- if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
- linkSlowCase(iter); // overflow check
-
- if (!supportsFloatingPoint())
- linkSlowCase(iter); // non-sse case
- else {
- ResultType opType = op == op1 ? types.first() : types.second();
- if (!opType.definitelyIsNumber())
- linkSlowCase(iter); // double check
- }
- } else {
- linkSlowCase(iter); // overflow check
-
- if (!supportsFloatingPoint()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- } else {
- if (!types.first().definitelyIsNumber())
- linkSlowCase(iter); // double check
-
- if (!types.second().definitelyIsNumber()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // double check
- }
- }
- }
-
- JITStubCall stubCall(this, cti_op_add);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// Subtraction (-)
-
-void JIT::emit_op_sub(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- if (isOperandConstantImmediateInt(op2)) {
- emitSub32Constant(dst, op1, getConstantOperand(op2).asInt32(), types.first());
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
-
- // Int32 case.
- addSlowCase(branchSub32(Overflow, regT2, regT0));
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double case.
- emitBinaryDoubleOp(op_sub, dst, op1, op2, types, notInt32Op1, notInt32Op2);
- end.link(this);
-}
-
-void JIT::emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
-{
- // Int32 case.
- emitLoad(op, regT1, regT0);
- Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
- addSlowCase(branchSub32(Overflow, Imm32(constant), regT0));
- emitStoreInt32(dst, regT0, (op == dst));
-
- // Double case.
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32);
- return;
- }
- Jump end = jump();
-
- notInt32.link(this);
- if (!opType.definitelyIsNumber())
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
- move(Imm32(constant), regT2);
- convertInt32ToDouble(regT2, fpRegT0);
- emitLoadDouble(op, fpRegT1);
- subDouble(fpRegT0, fpRegT1);
- emitStoreDouble(dst, fpRegT1);
-
- end.link(this);
-}
-
-void JIT::emitSlow_op_sub(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- if (isOperandConstantImmediateInt(op2)) {
- linkSlowCase(iter); // overflow check
-
- if (!supportsFloatingPoint() || !types.first().definitelyIsNumber())
- linkSlowCase(iter); // int32 or double check
- } else {
- linkSlowCase(iter); // overflow check
-
- if (!supportsFloatingPoint()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- } else {
- if (!types.first().definitelyIsNumber())
- linkSlowCase(iter); // double check
-
- if (!types.second().definitelyIsNumber()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // double check
- }
- }
- }
-
- JITStubCall stubCall(this, cti_op_sub);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-void JIT::emitBinaryDoubleOp(OpcodeID opcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes types, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters, bool op2IsInRegisters)
-{
- JumpList end;
-
- if (!notInt32Op1.empty()) {
- // Double case 1: Op1 is not int32; Op2 is unknown.
- notInt32Op1.link(this);
-
- ASSERT(op1IsInRegisters);
-
- // Verify Op1 is double.
- if (!types.first().definitelyIsNumber())
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
-
- if (!op2IsInRegisters)
- emitLoad(op2, regT3, regT2);
-
- Jump doubleOp2 = branch32(Below, regT3, Imm32(JSValue::LowestTag));
-
- if (!types.second().definitelyIsNumber())
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
-
- convertInt32ToDouble(regT2, fpRegT0);
- Jump doTheMath = jump();
-
- // Load Op2 as double into double register.
- doubleOp2.link(this);
- emitLoadDouble(op2, fpRegT0);
-
- // Do the math.
- doTheMath.link(this);
- switch (opcodeID) {
- case op_mul:
- emitLoadDouble(op1, fpRegT2);
- mulDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_add:
- emitLoadDouble(op1, fpRegT2);
- addDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_sub:
- emitLoadDouble(op1, fpRegT1);
- subDouble(fpRegT0, fpRegT1);
- emitStoreDouble(dst, fpRegT1);
- break;
- case op_div:
- emitLoadDouble(op1, fpRegT1);
- divDouble(fpRegT0, fpRegT1);
- emitStoreDouble(dst, fpRegT1);
- break;
- case op_jnless:
- emitLoadDouble(op1, fpRegT2);
- addJump(branchDouble(DoubleLessThanOrEqualOrUnordered, fpRegT0, fpRegT2), dst);
- break;
- case op_jless:
- emitLoadDouble(op1, fpRegT2);
- addJump(branchDouble(DoubleLessThan, fpRegT2, fpRegT0), dst);
- break;
- case op_jnlesseq:
- emitLoadDouble(op1, fpRegT2);
- addJump(branchDouble(DoubleLessThanOrUnordered, fpRegT0, fpRegT2), dst);
- break;
- default:
- ASSERT_NOT_REACHED();
- }
-
- if (!notInt32Op2.empty())
- end.append(jump());
- }
-
- if (!notInt32Op2.empty()) {
- // Double case 2: Op1 is int32; Op2 is not int32.
- notInt32Op2.link(this);
-
- ASSERT(op2IsInRegisters);
-
- if (!op1IsInRegisters)
- emitLoadPayload(op1, regT0);
-
- convertInt32ToDouble(regT0, fpRegT0);
-
- // Verify op2 is double.
- if (!types.second().definitelyIsNumber())
- addSlowCase(branch32(Above, regT3, Imm32(JSValue::LowestTag)));
-
- // Do the math.
- switch (opcodeID) {
- case op_mul:
- emitLoadDouble(op2, fpRegT2);
- mulDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_add:
- emitLoadDouble(op2, fpRegT2);
- addDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_sub:
- emitLoadDouble(op2, fpRegT2);
- subDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_div:
- emitLoadDouble(op2, fpRegT2);
- divDouble(fpRegT2, fpRegT0);
- emitStoreDouble(dst, fpRegT0);
- break;
- case op_jnless:
- emitLoadDouble(op2, fpRegT1);
- addJump(branchDouble(DoubleLessThanOrEqualOrUnordered, fpRegT1, fpRegT0), dst);
- break;
- case op_jless:
- emitLoadDouble(op2, fpRegT1);
- addJump(branchDouble(DoubleLessThan, fpRegT0, fpRegT1), dst);
- break;
- case op_jnlesseq:
- emitLoadDouble(op2, fpRegT1);
- addJump(branchDouble(DoubleLessThanOrUnordered, fpRegT1, fpRegT0), dst);
- break;
- default:
- ASSERT_NOT_REACHED();
- }
- }
-
- end.link(this);
-}
-
-// Multiplication (*)
-
-void JIT::emit_op_mul(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
-
- // Int32 case.
- move(regT0, regT3);
- addSlowCase(branchMul32(Overflow, regT2, regT0));
- addSlowCase(branchTest32(Zero, regT0));
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
-
- if (!supportsFloatingPoint()) {
- addSlowCase(notInt32Op1);
- addSlowCase(notInt32Op2);
- return;
- }
- Jump end = jump();
-
- // Double case.
- emitBinaryDoubleOp(op_mul, dst, op1, op2, types, notInt32Op1, notInt32Op2);
- end.link(this);
-}
-
-void JIT::emitSlow_op_mul(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- Jump overflow = getSlowCase(iter); // overflow check
- linkSlowCase(iter); // zero result check
-
- Jump negZero = branchOr32(Signed, regT2, regT3);
- emitStoreInt32(dst, Imm32(0), (op1 == dst || op2 == dst));
-
- emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_mul));
-
- negZero.link(this);
- overflow.link(this);
-
- if (!supportsFloatingPoint()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- }
-
- if (supportsFloatingPoint()) {
- if (!types.first().definitelyIsNumber())
- linkSlowCase(iter); // double check
-
- if (!types.second().definitelyIsNumber()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // double check
- }
- }
-
- Label jitStubCall(this);
- JITStubCall stubCall(this, cti_op_mul);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// Division (/)
-
-void JIT::emit_op_div(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- if (!supportsFloatingPoint()) {
- addSlowCase(jump());
- return;
- }
-
- // Int32 divide.
- JumpList notInt32Op1;
- JumpList notInt32Op2;
-
- JumpList end;
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
-
- notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
-
- convertInt32ToDouble(regT0, fpRegT0);
- convertInt32ToDouble(regT2, fpRegT1);
- divDouble(fpRegT1, fpRegT0);
-
- JumpList doubleResult;
- branchConvertDoubleToInt32(fpRegT0, regT0, doubleResult, fpRegT1);
-
- // Int32 result.
- emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
- end.append(jump());
-
- // Double result.
- doubleResult.link(this);
- emitStoreDouble(dst, fpRegT0);
- end.append(jump());
-
- // Double divide.
- emitBinaryDoubleOp(op_div, dst, op1, op2, types, notInt32Op1, notInt32Op2);
- end.link(this);
-}
-
-void JIT::emitSlow_op_div(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
- OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
-
- if (!supportsFloatingPoint())
- linkSlowCase(iter);
- else {
- if (!types.first().definitelyIsNumber())
- linkSlowCase(iter); // double check
-
- if (!types.second().definitelyIsNumber()) {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // double check
- }
- }
-
- JITStubCall stubCall(this, cti_op_div);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-// Mod (%)
-
-/* ------------------------------ BEGIN: OP_MOD ------------------------------ */
-
-#if CPU(X86) || CPU(X86_64)
-
-void JIT::emit_op_mod(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
- emitLoad(op1, X86Registers::edx, X86Registers::eax);
- move(Imm32(getConstantOperand(op2).asInt32()), X86Registers::ecx);
- addSlowCase(branch32(NotEqual, X86Registers::edx, Imm32(JSValue::Int32Tag)));
- if (getConstantOperand(op2).asInt32() == -1)
- addSlowCase(branch32(Equal, X86Registers::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
- } else {
- emitLoad2(op1, X86Registers::edx, X86Registers::eax, op2, X86Registers::ebx, X86Registers::ecx);
- addSlowCase(branch32(NotEqual, X86Registers::edx, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, X86Registers::ebx, Imm32(JSValue::Int32Tag)));
-
- addSlowCase(branch32(Equal, X86Registers::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
- addSlowCase(branch32(Equal, X86Registers::ecx, Imm32(0))); // divide by 0
- }
-
- move(X86Registers::eax, X86Registers::ebx); // Save dividend payload, in case of 0.
- m_assembler.cdq();
- m_assembler.idivl_r(X86Registers::ecx);
-
- // If the remainder is zero and the dividend is negative, the result is -0.
- Jump storeResult1 = branchTest32(NonZero, X86Registers::edx);
- Jump storeResult2 = branchTest32(Zero, X86Registers::ebx, Imm32(0x80000000)); // not negative
- emitStore(dst, jsNumber(m_globalData, -0.0));
- Jump end = jump();
-
- storeResult1.link(this);
- storeResult2.link(this);
- emitStoreInt32(dst, X86Registers::edx, (op1 == dst || op2 == dst));
- end.link(this);
-}
-
-void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
- linkSlowCase(iter); // int32 check
- if (getConstantOperand(op2).asInt32() == -1)
- linkSlowCase(iter); // 0x80000000 check
- } else {
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // 0 check
- linkSlowCase(iter); // 0x80000000 check
- }
-
- JITStubCall stubCall(this, cti_op_mod);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-#else // CPU(X86) || CPU(X86_64)
-
-void JIT::emit_op_mod(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_mod);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call(dst);
-}
-
-void JIT::emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&)
-{
-}
-
-#endif // CPU(X86) || CPU(X86_64)
-
-/* ------------------------------ END: OP_MOD ------------------------------ */
-
-#else // USE(JSVALUE32_64)
+#if !USE(JSVALUE32_64)
void JIT::emit_op_lshift(Instruction* currentInstruction)
{
stubCall.call(result);
}
+void JIT::emit_op_urshift(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ // Slow case of urshift makes assumptions about what registers hold the
+ // shift arguments, so any changes must be updated there as well.
+ if (isOperandConstantImmediateInt(op2)) {
+ emitGetVirtualRegister(op1, regT0);
+ emitJumpSlowCaseIfNotImmediateInteger(regT0);
+ emitFastArithImmToInt(regT0);
+ int shift = getConstantOperand(op2).asInt32();
+ if (shift)
+ urshift32(Imm32(shift & 0x1f), regT0);
+ // unsigned shift < 0 or shift = k*2^32 may result in (essentially)
+ // a toUint conversion, which can result in a value we can represent
+ // as an immediate int.
+ if (shift < 0 || !(shift & 31))
+ addSlowCase(branch32(LessThan, regT0, Imm32(0)));
+#if USE(JSVALUE32)
+ addSlowCase(branchAdd32(Overflow, regT0, regT0));
+ signExtend32ToPtr(regT0, regT0);
+#endif
+ emitFastArithReTagImmediate(regT0, regT0);
+ emitPutVirtualRegister(dst, regT0);
+ return;
+ }
+ emitGetVirtualRegisters(op1, regT0, op2, regT1);
+ if (!isOperandConstantImmediateInt(op1))
+ emitJumpSlowCaseIfNotImmediateInteger(regT0);
+ emitJumpSlowCaseIfNotImmediateInteger(regT1);
+ emitFastArithImmToInt(regT0);
+ emitFastArithImmToInt(regT1);
+ urshift32(regT1, regT0);
+ addSlowCase(branch32(LessThan, regT0, Imm32(0)));
+#if USE(JSVALUE32)
+ addSlowCase(branchAdd32(Overflow, regT0, regT0));
+ signExtend32ToPtr(regT0, regT0);
+#endif
+ emitFastArithReTagImmediate(regT0, regT0);
+ emitPutVirtualRegister(dst, regT0);
+}
+
+void JIT::emitSlow_op_urshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ if (isOperandConstantImmediateInt(op2)) {
+ int shift = getConstantOperand(op2).asInt32();
+ // op1 = regT0
+ linkSlowCase(iter); // int32 check
+#if USE(JSVALUE64)
+ if (supportsFloatingPointTruncate()) {
+ JumpList failures;
+ failures.append(emitJumpIfNotImmediateNumber(regT0)); // op1 is not a double
+ addPtr(tagTypeNumberRegister, regT0);
+ movePtrToDouble(regT0, fpRegT0);
+ failures.append(branchTruncateDoubleToInt32(fpRegT0, regT0));
+ if (shift)
+ urshift32(Imm32(shift & 0x1f), regT0);
+ if (shift < 0 || !(shift & 31))
+ failures.append(branch32(LessThan, regT0, Imm32(0)));
+ emitFastArithReTagImmediate(regT0, regT0);
+ emitPutVirtualRegister(dst, regT0);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_rshift));
+ failures.link(this);
+ }
+#endif // JSVALUE64
+ if (shift < 0 || !(shift & 31))
+ linkSlowCase(iter); // failed to box in hot path
+#if USE(JSVALUE32)
+ linkSlowCase(iter); // Couldn't box result
+#endif
+ } else {
+ // op1 = regT0
+ // op2 = regT1
+ if (!isOperandConstantImmediateInt(op1)) {
+ linkSlowCase(iter); // int32 check -- op1 is not an int
+#if USE(JSVALUE64)
+ if (supportsFloatingPointTruncate()) {
+ JumpList failures;
+ failures.append(emitJumpIfNotImmediateNumber(regT0)); // op1 is not a double
+ addPtr(tagTypeNumberRegister, regT0);
+ movePtrToDouble(regT0, fpRegT0);
+ failures.append(branchTruncateDoubleToInt32(fpRegT0, regT0));
+ failures.append(emitJumpIfNotImmediateInteger(regT1)); // op2 is not an int
+ emitFastArithImmToInt(regT1);
+ urshift32(regT1, regT0);
+ failures.append(branch32(LessThan, regT0, Imm32(0)));
+ emitFastArithReTagImmediate(regT0, regT0);
+ emitPutVirtualRegister(dst, regT0);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_rshift));
+ failures.link(this);
+ }
+#endif
+ }
+
+ linkSlowCase(iter); // int32 check - op2 is not an int
+ linkSlowCase(iter); // Can't represent unsigned result as an immediate
+#if USE(JSVALUE32)
+ linkSlowCase(iter); // Couldn't box result
+#endif
+ }
+
+ JITStubCall stubCall(this, cti_op_urshift);
+ stubCall.addArgument(op1, regT0);
+ stubCall.addArgument(op2, regT1);
+ stubCall.call(dst);
+}
+
void JIT::emit_op_jnless(Instruction* currentInstruction)
{
unsigned op1 = currentInstruction[1].u.operand;
// - constant int immediate to int immediate
// - int immediate to int immediate
+ if (isOperandConstantImmediateChar(op1)) {
+ emitGetVirtualRegister(op2, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(LessThanOrEqual, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitGetVirtualRegister(op1, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(GreaterThanOrEqual, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
if (isOperandConstantImmediateInt(op2)) {
emitGetVirtualRegister(op1, regT0);
emitJumpSlowCaseIfNotImmediateInteger(regT0);
// - floating-point number to constant int immediate
// - constant int immediate to floating-point number
// - floating-point number to floating-point number.
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_jless);
+ stubCall.addArgument(op1, regT0);
+ stubCall.addArgument(op2, regT1);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(Zero, regT0), target);
+ return;
+ }
if (isOperandConstantImmediateInt(op2)) {
linkSlowCase(iter);
Jump fail2 = checkStructure(regT0, m_globalData->numberStructure.get());
loadDouble(Address(regT0, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT0);
#endif
-
+
int32_t op2imm = getConstantOperand(op2).asInt32();;
-
+
move(Imm32(op2imm), regT1);
convertInt32ToDouble(regT1, fpRegT1);
Jump fail1;
if (!m_codeBlock->isKnownNotImmediate(op2))
fail1 = emitJumpIfNotJSCell(regT1);
-
+
Jump fail2 = checkStructure(regT1, m_globalData->numberStructure.get());
loadDouble(Address(regT1, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT1);
#endif
-
+
int32_t op1imm = getConstantOperand(op1).asInt32();;
-
+
move(Imm32(op1imm), regT0);
convertInt32ToDouble(regT0, fpRegT0);
// - constant int immediate to int immediate
// - int immediate to int immediate
+ if (isOperandConstantImmediateChar(op1)) {
+ emitGetVirtualRegister(op2, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(GreaterThan, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitGetVirtualRegister(op1, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(LessThan, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
if (isOperandConstantImmediateInt(op2)) {
emitGetVirtualRegister(op1, regT0);
emitJumpSlowCaseIfNotImmediateInteger(regT0);
// - floating-point number to constant int immediate
// - constant int immediate to floating-point number
// - floating-point number to floating-point number.
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_jless);
+ stubCall.addArgument(op1, regT0);
+ stubCall.addArgument(op2, regT1);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
+ return;
+ }
if (isOperandConstantImmediateInt(op2)) {
linkSlowCase(iter);
Jump fail2 = checkStructure(regT0, m_globalData->numberStructure.get());
loadDouble(Address(regT0, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT0);
#endif
-
+
int32_t op2imm = getConstantOperand(op2).asInt32();
-
+
move(Imm32(op2imm), regT1);
convertInt32ToDouble(regT1, fpRegT1);
Jump fail1;
if (!m_codeBlock->isKnownNotImmediate(op2))
fail1 = emitJumpIfNotJSCell(regT1);
-
+
Jump fail2 = checkStructure(regT1, m_globalData->numberStructure.get());
loadDouble(Address(regT1, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT1);
#endif
-
+
int32_t op1imm = getConstantOperand(op1).asInt32();
-
+
move(Imm32(op1imm), regT0);
convertInt32ToDouble(regT0, fpRegT0);
}
}
-void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
+void JIT::emit_op_jlesseq(Instruction* currentInstruction, bool invert)
{
unsigned op1 = currentInstruction[1].u.operand;
unsigned op2 = currentInstruction[2].u.operand;
// - constant int immediate to int immediate
// - int immediate to int immediate
+ if (isOperandConstantImmediateChar(op1)) {
+ emitGetVirtualRegister(op2, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(invert ? LessThan : GreaterThanOrEqual, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitGetVirtualRegister(op1, regT0);
+ addSlowCase(emitJumpIfNotJSCell(regT0));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
if (isOperandConstantImmediateInt(op2)) {
emitGetVirtualRegister(op1, regT0);
emitJumpSlowCaseIfNotImmediateInteger(regT0);
#else
int32_t op2imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op2)));
#endif
- addJump(branch32(GreaterThan, regT0, Imm32(op2imm)), target);
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, Imm32(op2imm)), target);
} else if (isOperandConstantImmediateInt(op1)) {
emitGetVirtualRegister(op2, regT1);
emitJumpSlowCaseIfNotImmediateInteger(regT1);
#else
int32_t op1imm = static_cast<int32_t>(JSImmediate::rawValue(getConstantOperand(op1)));
#endif
- addJump(branch32(LessThan, regT1, Imm32(op1imm)), target);
+ addJump(branch32(invert ? LessThan : GreaterThanOrEqual, regT1, Imm32(op1imm)), target);
} else {
emitGetVirtualRegisters(op1, regT0, op2, regT1);
emitJumpSlowCaseIfNotImmediateInteger(regT0);
emitJumpSlowCaseIfNotImmediateInteger(regT1);
- addJump(branch32(GreaterThan, regT0, regT1), target);
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, regT1), target);
}
}
-void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+void JIT::emitSlow_op_jlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter, bool invert)
{
unsigned op1 = currentInstruction[1].u.operand;
unsigned op2 = currentInstruction[2].u.operand;
// - constant int immediate to floating-point number
// - floating-point number to floating-point number.
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_jlesseq);
+ stubCall.addArgument(op1, regT0);
+ stubCall.addArgument(op2, regT1);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, regT0), target);
+ return;
+ }
+
if (isOperandConstantImmediateInt(op2)) {
linkSlowCase(iter);
Jump fail2 = checkStructure(regT0, m_globalData->numberStructure.get());
loadDouble(Address(regT0, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT0);
#endif
-
+
int32_t op2imm = getConstantOperand(op2).asInt32();;
-
+
move(Imm32(op2imm), regT1);
convertInt32ToDouble(regT1, fpRegT1);
- emitJumpSlowToHot(branchDouble(DoubleLessThanOrUnordered, fpRegT1, fpRegT0), target);
+ emitJumpSlowToHot(branchDouble(invert ? DoubleLessThanOrUnordered : DoubleGreaterThanOrEqual, fpRegT1, fpRegT0), target);
emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
stubCall.addArgument(regT0);
stubCall.addArgument(op2, regT2);
stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target);
+ emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, regT0), target);
} else if (isOperandConstantImmediateInt(op1)) {
linkSlowCase(iter);
Jump fail1;
if (!m_codeBlock->isKnownNotImmediate(op2))
fail1 = emitJumpIfNotJSCell(regT1);
-
+
Jump fail2 = checkStructure(regT1, m_globalData->numberStructure.get());
loadDouble(Address(regT1, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT1);
#endif
-
+
int32_t op1imm = getConstantOperand(op1).asInt32();;
-
+
move(Imm32(op1imm), regT0);
convertInt32ToDouble(regT0, fpRegT0);
- emitJumpSlowToHot(branchDouble(DoubleLessThanOrUnordered, fpRegT1, fpRegT0), target);
+ emitJumpSlowToHot(branchDouble(invert ? DoubleLessThanOrUnordered : DoubleGreaterThanOrEqual, fpRegT1, fpRegT0), target);
emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
stubCall.addArgument(op1, regT2);
stubCall.addArgument(regT1);
stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target);
+ emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, regT0), target);
} else {
linkSlowCase(iter);
loadDouble(Address(regT1, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT1);
#endif
- emitJumpSlowToHot(branchDouble(DoubleLessThanOrUnordered, fpRegT1, fpRegT0), target);
+ emitJumpSlowToHot(branchDouble(invert ? DoubleLessThanOrUnordered : DoubleGreaterThanOrEqual, fpRegT1, fpRegT0), target);
emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_jnlesseq));
stubCall.addArgument(regT0);
stubCall.addArgument(regT1);
stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target);
+ emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, regT0), target);
}
}
+void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
+{
+ emit_op_jlesseq(currentInstruction, true);
+}
+
+void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ emitSlow_op_jlesseq(currentInstruction, iter, true);
+}
+
void JIT::emit_op_bitand(Instruction* currentInstruction)
{
unsigned result = currentInstruction[1].u.operand;
unsigned op1 = currentInstruction[2].u.operand;
unsigned op2 = currentInstruction[3].u.operand;
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ emitGetVirtualRegisters(op1, regT0, op2, regT2);
+ emitJumpSlowCaseIfNotImmediateInteger(regT0);
+ emitJumpSlowCaseIfNotImmediateInteger(regT2);
+
+ addSlowCase(branch32(Equal, regT2, Imm32(1)));
+
+ emitNakedCall(m_globalData->jitStubs.ctiSoftModulo());
+
+ emitPutVirtualRegister(result, regT0);
+#else
JITStubCall stubCall(this, cti_op_mod);
stubCall.addArgument(op1, regT2);
stubCall.addArgument(op2, regT2);
stubCall.call(result);
+#endif
}
-void JIT::emitSlow_op_mod(Instruction*, Vector<SlowCaseEntry>::iterator&)
+void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
{
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ unsigned result = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_mod);
+ stubCall.addArgument(op1, regT2);
+ stubCall.addArgument(op2, regT2);
+ stubCall.call(result);
+#else
ASSERT_NOT_REACHED();
+#endif
}
#endif // CPU(X86) || CPU(X86_64)
{
// We assume that subtracting TagTypeNumber is equivalent to adding DoubleEncodeOffset.
COMPILE_ASSERT(((JSImmediate::TagTypeNumber + JSImmediate::DoubleEncodeOffset) == 0), TagTypeNumber_PLUS_DoubleEncodeOffset_EQUALS_0);
-
+
Jump notImm1;
Jump notImm2;
if (op1HasImmediateIntFastCase) {
movePtrToDouble(regT0, fpRegT0);
skipDoubleLoad.link(this);
}
-
+
if (isOperandConstantImmediateDouble(op2)) {
emitGetVirtualRegister(op2, regT1);
addPtr(tagTypeNumberRegister, regT1);
op1imm.link(this);
emitFastArithImmToInt(regT0);
convertInt32ToDouble(regT0, fpRegT0);
- // (1c)
+ // (1c)
loadedDouble.link(this);
if (opcodeID == op_add)
addDouble(Address(regT1, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT0);
op2imm.link(this);
emitFastArithImmToInt(regT1);
convertInt32ToDouble(regT1, fpRegT1);
- // (1c)
+ // (1c)
loadedDouble.link(this);
loadDouble(Address(regT0, OBJECT_OFFSETOF(JSNumberCell, m_value)), fpRegT0);
if (opcodeID == op_add)
Jump op2NonZero = branchTest32(NonZero, regT1);
op1Zero.link(this);
// if either input is zero, add the two together, and check if the result is < 0.
- // If it is, we have a problem (N < 0), (N * 0) == -0, not representatble as a JSImmediate.
+ // If it is, we have a problem (N < 0), (N * 0) == -0, not representatble as a JSImmediate.
move(regT0, regT2);
addSlowCase(branchAdd32(Signed, regT1, regT2));
// Skip the above check if neither input is zero
/* ------------------------------ END: OP_ADD, OP_SUB, OP_MUL ------------------------------ */
-#endif // USE(JSVALUE32_64)
+#endif // !USE(JSVALUE32_64)
} // namespace JSC
--- /dev/null
+/*
+* Copyright (C) 2008 Apple Inc. All rights reserved.
+*
+* Redistribution and use in source and binary forms, with or without
+* modification, are permitted provided that the following conditions
+* are met:
+* 1. Redistributions of source code must retain the above copyright
+* notice, this list of conditions and the following disclaimer.
+* 2. Redistributions in binary form must reproduce the above copyright
+* notice, this list of conditions and the following disclaimer in the
+* documentation and/or other materials provided with the distribution.
+*
+* THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include "config.h"
+#include "JIT.h"
+
+#if ENABLE(JIT)
+
+#include "CodeBlock.h"
+#include "JITInlineMethods.h"
+#include "JITStubCall.h"
+#include "JITStubs.h"
+#include "JSArray.h"
+#include "JSFunction.h"
+#include "Interpreter.h"
+#include "ResultType.h"
+#include "SamplingTool.h"
+
+#ifndef NDEBUG
+#include <stdio.h>
+#endif
+
+using namespace std;
+
+namespace JSC {
+
+#if USE(JSVALUE32_64)
+
+void JIT::emit_op_negate(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+
+ Jump srcNotInt = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
+ addSlowCase(branchTest32(Zero, regT0, Imm32(0x7fffffff)));
+ neg32(regT0);
+ emitStoreInt32(dst, regT0, (dst == src));
+
+ Jump end = jump();
+
+ srcNotInt.link(this);
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+
+ xor32(Imm32(1 << 31), regT1);
+ store32(regT1, tagFor(dst));
+ if (dst != src)
+ store32(regT0, payloadFor(dst));
+
+ end.link(this);
+}
+
+void JIT::emitSlow_op_negate(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter); // 0x7fffffff check
+ linkSlowCase(iter); // double check
+
+ JITStubCall stubCall(this, cti_op_negate);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_jnless(Instruction* currentInstruction)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ // Character less.
+ if (isOperandConstantImmediateChar(op1)) {
+ emitLoad(op2, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(LessThanOrEqual, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(GreaterThanOrEqual, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateInt(op1)) {
+ // Int32 less.
+ emitLoad(op2, regT3, regT2);
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(LessThanOrEqual, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
+ } else if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
+ } else {
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(GreaterThanOrEqual, regT0, regT2), target);
+ }
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double less.
+ emitBinaryDoubleOp(op_jnless, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
+ end.link(this);
+}
+
+void JIT::emitSlow_op_jnless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ } else {
+ if (!supportsFloatingPoint()) {
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ } else {
+ if (!isOperandConstantImmediateInt(op1)) {
+ linkSlowCase(iter); // double check
+ linkSlowCase(iter); // int32 check
+ }
+ if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // double check
+ }
+ }
+
+ JITStubCall stubCall(this, cti_op_jless);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(Zero, regT0), target);
+}
+
+void JIT::emit_op_jless(Instruction* currentInstruction)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ // Character less.
+ if (isOperandConstantImmediateChar(op1)) {
+ emitLoad(op2, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(GreaterThan, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(LessThan, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateInt(op1)) {
+ emitLoad(op2, regT3, regT2);
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(GreaterThan, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
+ } else if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(LessThan, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
+ } else {
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(LessThan, regT0, regT2), target);
+ }
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double less.
+ emitBinaryDoubleOp(op_jless, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
+ end.link(this);
+}
+
+void JIT::emitSlow_op_jless(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ } else {
+ if (!supportsFloatingPoint()) {
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ } else {
+ if (!isOperandConstantImmediateInt(op1)) {
+ linkSlowCase(iter); // double check
+ linkSlowCase(iter); // int32 check
+ }
+ if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // double check
+ }
+ }
+ JITStubCall stubCall(this, cti_op_jless);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
+}
+
+void JIT::emit_op_jlesseq(Instruction* currentInstruction, bool invert)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ // Character less.
+ if (isOperandConstantImmediateChar(op1)) {
+ emitLoad(op2, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(invert ? LessThan : GreaterThanOrEqual, regT0, Imm32(asString(getConstantOperand(op1))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateChar(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ JumpList failures;
+ emitLoadCharacterString(regT0, regT0, failures);
+ addSlowCase(failures);
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, Imm32(asString(getConstantOperand(op2))->tryGetValue()[0])), target);
+ return;
+ }
+ if (isOperandConstantImmediateInt(op1)) {
+ emitLoad(op2, regT3, regT2);
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(invert ? LessThan : GreaterThanOrEqual, regT2, Imm32(getConstantOperand(op1).asInt32())), target);
+ } else if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
+ } else {
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(invert ? GreaterThan : LessThanOrEqual, regT0, regT2), target);
+ }
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double less.
+ emitBinaryDoubleOp(invert ? op_jnlesseq : op_jlesseq, target, op1, op2, OperandTypes(), notInt32Op1, notInt32Op2, !isOperandConstantImmediateInt(op1), isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2));
+ end.link(this);
+}
+
+void JIT::emitSlow_op_jlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter, bool invert)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateChar(op1) || isOperandConstantImmediateChar(op2)) {
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ } else {
+ if (!supportsFloatingPoint()) {
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ } else {
+ if (!isOperandConstantImmediateInt(op1)) {
+ linkSlowCase(iter); // double check
+ linkSlowCase(iter); // int32 check
+ }
+ if (isOperandConstantImmediateInt(op1) || !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // double check
+ }
+ }
+
+ JITStubCall stubCall(this, cti_op_jlesseq);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(invert ? Zero : NonZero, regT0), target);
+}
+
+void JIT::emit_op_jnlesseq(Instruction* currentInstruction)
+{
+ emit_op_jlesseq(currentInstruction, true);
+}
+
+void JIT::emitSlow_op_jnlesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ emitSlow_op_jlesseq(currentInstruction, iter, true);
+}
+
+// LeftShift (<<)
+
+void JIT::emit_op_lshift(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ lshift32(Imm32(getConstantOperand(op2).asInt32()), regT0);
+ emitStoreInt32(dst, regT0, dst == op1);
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ if (!isOperandConstantImmediateInt(op1))
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ lshift32(regT2, regT0);
+ emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
+}
+
+void JIT::emitSlow_op_lshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_lshift);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// RightShift (>>) and UnsignedRightShift (>>>) helper
+
+void JIT::emitRightShift(Instruction* currentInstruction, bool isUnsigned)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ // Slow case of rshift makes assumptions about what registers hold the
+ // shift arguments, so any changes must be updated there as well.
+ if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ int shift = getConstantOperand(op2).asInt32();
+ if (isUnsigned) {
+ if (shift)
+ urshift32(Imm32(shift & 0x1f), regT0);
+ // unsigned shift < 0 or shift = k*2^32 may result in (essentially)
+ // a toUint conversion, which can result in a value we can represent
+ // as an immediate int.
+ if (shift < 0 || !(shift & 31))
+ addSlowCase(branch32(LessThan, regT0, Imm32(0)));
+ } else if (shift) { // signed right shift by zero is simply toInt conversion
+ rshift32(Imm32(shift & 0x1f), regT0);
+ }
+ emitStoreInt32(dst, regT0, dst == op1);
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ if (!isOperandConstantImmediateInt(op1))
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ if (isUnsigned) {
+ urshift32(regT2, regT0);
+ addSlowCase(branch32(LessThan, regT0, Imm32(0)));
+ } else
+ rshift32(regT2, regT0);
+ emitStoreInt32(dst, regT0, dst == op1 || dst == op2);
+}
+
+void JIT::emitRightShiftSlowCase(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter, bool isUnsigned)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ if (isOperandConstantImmediateInt(op2)) {
+ int shift = getConstantOperand(op2).asInt32();
+ // op1 = regT1:regT0
+ linkSlowCase(iter); // int32 check
+ if (supportsFloatingPointTruncate()) {
+ JumpList failures;
+ failures.append(branch32(AboveOrEqual, regT1, Imm32(JSValue::LowestTag)));
+ emitLoadDouble(op1, fpRegT0);
+ failures.append(branchTruncateDoubleToInt32(fpRegT0, regT0));
+ if (isUnsigned) {
+ if (shift)
+ urshift32(Imm32(shift & 0x1f), regT0);
+ if (shift < 0 || !(shift & 31))
+ failures.append(branch32(LessThan, regT0, Imm32(0)));
+ } else if (shift)
+ rshift32(Imm32(shift & 0x1f), regT0);
+ emitStoreInt32(dst, regT0, false);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_rshift));
+ failures.link(this);
+ }
+ if (isUnsigned && (shift < 0 || !(shift & 31)))
+ linkSlowCase(iter); // failed to box in hot path
+ } else {
+ // op1 = regT1:regT0
+ // op2 = regT3:regT2
+ if (!isOperandConstantImmediateInt(op1)) {
+ linkSlowCase(iter); // int32 check -- op1 is not an int
+ if (supportsFloatingPointTruncate()) {
+ Jump notDouble = branch32(Above, regT1, Imm32(JSValue::LowestTag)); // op1 is not a double
+ emitLoadDouble(op1, fpRegT0);
+ Jump notInt = branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)); // op2 is not an int
+ Jump cantTruncate = branchTruncateDoubleToInt32(fpRegT0, regT0);
+ if (isUnsigned)
+ urshift32(regT2, regT0);
+ else
+ rshift32(regT2, regT0);
+ emitStoreInt32(dst, regT0, false);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_rshift));
+ notDouble.link(this);
+ notInt.link(this);
+ cantTruncate.link(this);
+ }
+ }
+
+ linkSlowCase(iter); // int32 check - op2 is not an int
+ if (isUnsigned)
+ linkSlowCase(iter); // Can't represent unsigned result as an immediate
+ }
+
+ JITStubCall stubCall(this, isUnsigned ? cti_op_urshift : cti_op_rshift);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// RightShift (>>)
+
+void JIT::emit_op_rshift(Instruction* currentInstruction)
+{
+ emitRightShift(currentInstruction, false);
+}
+
+void JIT::emitSlow_op_rshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ emitRightShiftSlowCase(currentInstruction, iter, false);
+}
+
+// UnsignedRightShift (>>>)
+
+void JIT::emit_op_urshift(Instruction* currentInstruction)
+{
+ emitRightShift(currentInstruction, true);
+}
+
+void JIT::emitSlow_op_urshift(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ emitRightShiftSlowCase(currentInstruction, iter, true);
+}
+
+// BitAnd (&)
+
+void JIT::emit_op_bitand(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ unsigned op;
+ int32_t constant;
+ if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
+ emitLoad(op, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ and32(Imm32(constant), regT0);
+ emitStoreInt32(dst, regT0, (op == dst));
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ and32(regT2, regT0);
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+}
+
+void JIT::emitSlow_op_bitand(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_bitand);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// BitOr (|)
+
+void JIT::emit_op_bitor(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ unsigned op;
+ int32_t constant;
+ if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
+ emitLoad(op, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ or32(Imm32(constant), regT0);
+ emitStoreInt32(dst, regT0, (op == dst));
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ or32(regT2, regT0);
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+}
+
+void JIT::emitSlow_op_bitor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_bitor);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// BitXor (^)
+
+void JIT::emit_op_bitxor(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ unsigned op;
+ int32_t constant;
+ if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
+ emitLoad(op, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ xor32(Imm32(constant), regT0);
+ emitStoreInt32(dst, regT0, (op == dst));
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ xor32(regT2, regT0);
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+}
+
+void JIT::emitSlow_op_bitxor(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_bitxor);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// BitNot (~)
+
+void JIT::emit_op_bitnot(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+
+ not32(regT0);
+ emitStoreInt32(dst, regT0, (dst == src));
+}
+
+void JIT::emitSlow_op_bitnot(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_bitnot);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.call(dst);
+}
+
+// PostInc (i++)
+
+void JIT::emit_op_post_inc(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned srcDst = currentInstruction[2].u.operand;
+
+ emitLoad(srcDst, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+
+ if (dst == srcDst) // x = x++ is a noop for ints.
+ return;
+
+ emitStoreInt32(dst, regT0);
+
+ addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
+ emitStoreInt32(srcDst, regT0, true);
+}
+
+void JIT::emitSlow_op_post_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned srcDst = currentInstruction[2].u.operand;
+
+ linkSlowCase(iter); // int32 check
+ if (dst != srcDst)
+ linkSlowCase(iter); // overflow check
+
+ JITStubCall stubCall(this, cti_op_post_inc);
+ stubCall.addArgument(srcDst);
+ stubCall.addArgument(Imm32(srcDst));
+ stubCall.call(dst);
+}
+
+// PostDec (i--)
+
+void JIT::emit_op_post_dec(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned srcDst = currentInstruction[2].u.operand;
+
+ emitLoad(srcDst, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+
+ if (dst == srcDst) // x = x-- is a noop for ints.
+ return;
+
+ emitStoreInt32(dst, regT0);
+
+ addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
+ emitStoreInt32(srcDst, regT0, true);
+}
+
+void JIT::emitSlow_op_post_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned srcDst = currentInstruction[2].u.operand;
+
+ linkSlowCase(iter); // int32 check
+ if (dst != srcDst)
+ linkSlowCase(iter); // overflow check
+
+ JITStubCall stubCall(this, cti_op_post_dec);
+ stubCall.addArgument(srcDst);
+ stubCall.addArgument(Imm32(srcDst));
+ stubCall.call(dst);
+}
+
+// PreInc (++i)
+
+void JIT::emit_op_pre_inc(Instruction* currentInstruction)
+{
+ unsigned srcDst = currentInstruction[1].u.operand;
+
+ emitLoad(srcDst, regT1, regT0);
+
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branchAdd32(Overflow, Imm32(1), regT0));
+ emitStoreInt32(srcDst, regT0, true);
+}
+
+void JIT::emitSlow_op_pre_inc(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned srcDst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // overflow check
+
+ JITStubCall stubCall(this, cti_op_pre_inc);
+ stubCall.addArgument(srcDst);
+ stubCall.call(srcDst);
+}
+
+// PreDec (--i)
+
+void JIT::emit_op_pre_dec(Instruction* currentInstruction)
+{
+ unsigned srcDst = currentInstruction[1].u.operand;
+
+ emitLoad(srcDst, regT1, regT0);
+
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branchSub32(Overflow, Imm32(1), regT0));
+ emitStoreInt32(srcDst, regT0, true);
+}
+
+void JIT::emitSlow_op_pre_dec(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned srcDst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // overflow check
+
+ JITStubCall stubCall(this, cti_op_pre_dec);
+ stubCall.addArgument(srcDst);
+ stubCall.call(srcDst);
+}
+
+// Addition (+)
+
+void JIT::emit_op_add(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ if (!types.first().mightBeNumber() || !types.second().mightBeNumber()) {
+ JITStubCall stubCall(this, cti_op_add);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+ return;
+ }
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ unsigned op;
+ int32_t constant;
+ if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
+ emitAdd32Constant(dst, op, constant, op == op1 ? types.first() : types.second());
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ // Int32 case.
+ addSlowCase(branchAdd32(Overflow, regT2, regT0));
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double case.
+ emitBinaryDoubleOp(op_add, dst, op1, op2, types, notInt32Op1, notInt32Op2);
+ end.link(this);
+}
+
+void JIT::emitAdd32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
+{
+ // Int32 case.
+ emitLoad(op, regT1, regT0);
+ Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
+ addSlowCase(branchAdd32(Overflow, Imm32(constant), regT0));
+ emitStoreInt32(dst, regT0, (op == dst));
+
+ // Double case.
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32);
+ return;
+ }
+ Jump end = jump();
+
+ notInt32.link(this);
+ if (!opType.definitelyIsNumber())
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+ move(Imm32(constant), regT2);
+ convertInt32ToDouble(regT2, fpRegT0);
+ emitLoadDouble(op, fpRegT1);
+ addDouble(fpRegT1, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+
+ end.link(this);
+}
+
+void JIT::emitSlow_op_add(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ if (!types.first().mightBeNumber() || !types.second().mightBeNumber())
+ return;
+
+ unsigned op;
+ int32_t constant;
+ if (getOperandConstantImmediateInt(op1, op2, op, constant)) {
+ linkSlowCase(iter); // overflow check
+
+ if (!supportsFloatingPoint())
+ linkSlowCase(iter); // non-sse case
+ else {
+ ResultType opType = op == op1 ? types.first() : types.second();
+ if (!opType.definitelyIsNumber())
+ linkSlowCase(iter); // double check
+ }
+ } else {
+ linkSlowCase(iter); // overflow check
+
+ if (!supportsFloatingPoint()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ } else {
+ if (!types.first().definitelyIsNumber())
+ linkSlowCase(iter); // double check
+
+ if (!types.second().definitelyIsNumber()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // double check
+ }
+ }
+ }
+
+ JITStubCall stubCall(this, cti_op_add);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// Subtraction (-)
+
+void JIT::emit_op_sub(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ if (isOperandConstantImmediateInt(op2)) {
+ emitSub32Constant(dst, op1, getConstantOperand(op2).asInt32(), types.first());
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ // Int32 case.
+ addSlowCase(branchSub32(Overflow, regT2, regT0));
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double case.
+ emitBinaryDoubleOp(op_sub, dst, op1, op2, types, notInt32Op1, notInt32Op2);
+ end.link(this);
+}
+
+void JIT::emitSub32Constant(unsigned dst, unsigned op, int32_t constant, ResultType opType)
+{
+ // Int32 case.
+ emitLoad(op, regT1, regT0);
+ Jump notInt32 = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
+ addSlowCase(branchSub32(Overflow, Imm32(constant), regT0));
+ emitStoreInt32(dst, regT0, (op == dst));
+
+ // Double case.
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32);
+ return;
+ }
+ Jump end = jump();
+
+ notInt32.link(this);
+ if (!opType.definitelyIsNumber())
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+ move(Imm32(constant), regT2);
+ convertInt32ToDouble(regT2, fpRegT0);
+ emitLoadDouble(op, fpRegT1);
+ subDouble(fpRegT0, fpRegT1);
+ emitStoreDouble(dst, fpRegT1);
+
+ end.link(this);
+}
+
+void JIT::emitSlow_op_sub(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ if (isOperandConstantImmediateInt(op2)) {
+ linkSlowCase(iter); // overflow check
+
+ if (!supportsFloatingPoint() || !types.first().definitelyIsNumber())
+ linkSlowCase(iter); // int32 or double check
+ } else {
+ linkSlowCase(iter); // overflow check
+
+ if (!supportsFloatingPoint()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ } else {
+ if (!types.first().definitelyIsNumber())
+ linkSlowCase(iter); // double check
+
+ if (!types.second().definitelyIsNumber()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // double check
+ }
+ }
+ }
+
+ JITStubCall stubCall(this, cti_op_sub);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+void JIT::emitBinaryDoubleOp(OpcodeID opcodeID, unsigned dst, unsigned op1, unsigned op2, OperandTypes types, JumpList& notInt32Op1, JumpList& notInt32Op2, bool op1IsInRegisters, bool op2IsInRegisters)
+{
+ JumpList end;
+
+ if (!notInt32Op1.empty()) {
+ // Double case 1: Op1 is not int32; Op2 is unknown.
+ notInt32Op1.link(this);
+
+ ASSERT(op1IsInRegisters);
+
+ // Verify Op1 is double.
+ if (!types.first().definitelyIsNumber())
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+
+ if (!op2IsInRegisters)
+ emitLoad(op2, regT3, regT2);
+
+ Jump doubleOp2 = branch32(Below, regT3, Imm32(JSValue::LowestTag));
+
+ if (!types.second().definitelyIsNumber())
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ convertInt32ToDouble(regT2, fpRegT0);
+ Jump doTheMath = jump();
+
+ // Load Op2 as double into double register.
+ doubleOp2.link(this);
+ emitLoadDouble(op2, fpRegT0);
+
+ // Do the math.
+ doTheMath.link(this);
+ switch (opcodeID) {
+ case op_mul:
+ emitLoadDouble(op1, fpRegT2);
+ mulDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_add:
+ emitLoadDouble(op1, fpRegT2);
+ addDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_sub:
+ emitLoadDouble(op1, fpRegT1);
+ subDouble(fpRegT0, fpRegT1);
+ emitStoreDouble(dst, fpRegT1);
+ break;
+ case op_div:
+ emitLoadDouble(op1, fpRegT1);
+ divDouble(fpRegT0, fpRegT1);
+ emitStoreDouble(dst, fpRegT1);
+ break;
+ case op_jnless:
+ emitLoadDouble(op1, fpRegT2);
+ addJump(branchDouble(DoubleLessThanOrEqualOrUnordered, fpRegT0, fpRegT2), dst);
+ break;
+ case op_jless:
+ emitLoadDouble(op1, fpRegT2);
+ addJump(branchDouble(DoubleLessThan, fpRegT2, fpRegT0), dst);
+ break;
+ case op_jlesseq:
+ emitLoadDouble(op1, fpRegT2);
+ addJump(branchDouble(DoubleLessThanOrEqual, fpRegT2, fpRegT0), dst);
+ break;
+ case op_jnlesseq:
+ emitLoadDouble(op1, fpRegT2);
+ addJump(branchDouble(DoubleLessThanOrUnordered, fpRegT0, fpRegT2), dst);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ }
+
+ if (!notInt32Op2.empty())
+ end.append(jump());
+ }
+
+ if (!notInt32Op2.empty()) {
+ // Double case 2: Op1 is int32; Op2 is not int32.
+ notInt32Op2.link(this);
+
+ ASSERT(op2IsInRegisters);
+
+ if (!op1IsInRegisters)
+ emitLoadPayload(op1, regT0);
+
+ convertInt32ToDouble(regT0, fpRegT0);
+
+ // Verify op2 is double.
+ if (!types.second().definitelyIsNumber())
+ addSlowCase(branch32(Above, regT3, Imm32(JSValue::LowestTag)));
+
+ // Do the math.
+ switch (opcodeID) {
+ case op_mul:
+ emitLoadDouble(op2, fpRegT2);
+ mulDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_add:
+ emitLoadDouble(op2, fpRegT2);
+ addDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_sub:
+ emitLoadDouble(op2, fpRegT2);
+ subDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_div:
+ emitLoadDouble(op2, fpRegT2);
+ divDouble(fpRegT2, fpRegT0);
+ emitStoreDouble(dst, fpRegT0);
+ break;
+ case op_jnless:
+ emitLoadDouble(op2, fpRegT1);
+ addJump(branchDouble(DoubleLessThanOrEqualOrUnordered, fpRegT1, fpRegT0), dst);
+ break;
+ case op_jless:
+ emitLoadDouble(op2, fpRegT1);
+ addJump(branchDouble(DoubleLessThan, fpRegT0, fpRegT1), dst);
+ break;
+ case op_jnlesseq:
+ emitLoadDouble(op2, fpRegT1);
+ addJump(branchDouble(DoubleLessThanOrUnordered, fpRegT1, fpRegT0), dst);
+ break;
+ case op_jlesseq:
+ emitLoadDouble(op2, fpRegT1);
+ addJump(branchDouble(DoubleLessThanOrEqual, fpRegT0, fpRegT1), dst);
+ break;
+ default:
+ ASSERT_NOT_REACHED();
+ }
+ }
+
+ end.link(this);
+}
+
+// Multiplication (*)
+
+void JIT::emit_op_mul(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ // Int32 case.
+ move(regT0, regT3);
+ addSlowCase(branchMul32(Overflow, regT2, regT0));
+ addSlowCase(branchTest32(Zero, regT0));
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(notInt32Op1);
+ addSlowCase(notInt32Op2);
+ return;
+ }
+ Jump end = jump();
+
+ // Double case.
+ emitBinaryDoubleOp(op_mul, dst, op1, op2, types, notInt32Op1, notInt32Op2);
+ end.link(this);
+}
+
+void JIT::emitSlow_op_mul(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ Jump overflow = getSlowCase(iter); // overflow check
+ linkSlowCase(iter); // zero result check
+
+ Jump negZero = branchOr32(Signed, regT2, regT3);
+ emitStoreInt32(dst, Imm32(0), (op1 == dst || op2 == dst));
+
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_mul));
+
+ negZero.link(this);
+ overflow.link(this);
+
+ if (!supportsFloatingPoint()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ }
+
+ if (supportsFloatingPoint()) {
+ if (!types.first().definitelyIsNumber())
+ linkSlowCase(iter); // double check
+
+ if (!types.second().definitelyIsNumber()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // double check
+ }
+ }
+
+ Label jitStubCall(this);
+ JITStubCall stubCall(this, cti_op_mul);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// Division (/)
+
+void JIT::emit_op_div(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ if (!supportsFloatingPoint()) {
+ addSlowCase(jump());
+ return;
+ }
+
+ // Int32 divide.
+ JumpList notInt32Op1;
+ JumpList notInt32Op2;
+
+ JumpList end;
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+
+ notInt32Op1.append(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ notInt32Op2.append(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ convertInt32ToDouble(regT0, fpRegT0);
+ convertInt32ToDouble(regT2, fpRegT1);
+ divDouble(fpRegT1, fpRegT0);
+
+ JumpList doubleResult;
+ branchConvertDoubleToInt32(fpRegT0, regT0, doubleResult, fpRegT1);
+
+ // Int32 result.
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+ end.append(jump());
+
+ // Double result.
+ doubleResult.link(this);
+ emitStoreDouble(dst, fpRegT0);
+ end.append(jump());
+
+ // Double divide.
+ emitBinaryDoubleOp(op_div, dst, op1, op2, types, notInt32Op1, notInt32Op2);
+ end.link(this);
+}
+
+void JIT::emitSlow_op_div(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ OperandTypes types = OperandTypes::fromInt(currentInstruction[4].u.operand);
+
+ if (!supportsFloatingPoint())
+ linkSlowCase(iter);
+ else {
+ if (!types.first().definitelyIsNumber())
+ linkSlowCase(iter); // double check
+
+ if (!types.second().definitelyIsNumber()) {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // double check
+ }
+ }
+
+ JITStubCall stubCall(this, cti_op_div);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+// Mod (%)
+
+/* ------------------------------ BEGIN: OP_MOD ------------------------------ */
+
+#if CPU(X86) || CPU(X86_64)
+
+void JIT::emit_op_mod(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
+ emitLoad(op1, X86Registers::edx, X86Registers::eax);
+ move(Imm32(getConstantOperand(op2).asInt32()), X86Registers::ecx);
+ addSlowCase(branch32(NotEqual, X86Registers::edx, Imm32(JSValue::Int32Tag)));
+ if (getConstantOperand(op2).asInt32() == -1)
+ addSlowCase(branch32(Equal, X86Registers::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
+ } else {
+ emitLoad2(op1, X86Registers::edx, X86Registers::eax, op2, X86Registers::ebx, X86Registers::ecx);
+ addSlowCase(branch32(NotEqual, X86Registers::edx, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, X86Registers::ebx, Imm32(JSValue::Int32Tag)));
+
+ addSlowCase(branch32(Equal, X86Registers::eax, Imm32(0x80000000))); // -2147483648 / -1 => EXC_ARITHMETIC
+ addSlowCase(branch32(Equal, X86Registers::ecx, Imm32(0))); // divide by 0
+ }
+
+ move(X86Registers::eax, X86Registers::ebx); // Save dividend payload, in case of 0.
+ m_assembler.cdq();
+ m_assembler.idivl_r(X86Registers::ecx);
+
+ // If the remainder is zero and the dividend is negative, the result is -0.
+ Jump storeResult1 = branchTest32(NonZero, X86Registers::edx);
+ Jump storeResult2 = branchTest32(Zero, X86Registers::ebx, Imm32(0x80000000)); // not negative
+ emitStore(dst, jsNumber(m_globalData, -0.0));
+ Jump end = jump();
+
+ storeResult1.link(this);
+ storeResult2.link(this);
+ emitStoreInt32(dst, X86Registers::edx, (op1 == dst || op2 == dst));
+ end.link(this);
+}
+
+void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ if (isOperandConstantImmediateInt(op2) && getConstantOperand(op2).asInt32() != 0) {
+ linkSlowCase(iter); // int32 check
+ if (getConstantOperand(op2).asInt32() == -1)
+ linkSlowCase(iter); // 0x80000000 check
+ } else {
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // 0 check
+ linkSlowCase(iter); // 0x80000000 check
+ }
+
+ JITStubCall stubCall(this, cti_op_mod);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+}
+
+#else // CPU(X86) || CPU(X86_64)
+
+void JIT::emit_op_mod(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+
+ addSlowCase(branch32(Equal, regT2, Imm32(0)));
+
+ emitNakedCall(m_globalData->jitStubs.ctiSoftModulo());
+
+ emitStoreInt32(dst, regT0, (op1 == dst || op2 == dst));
+#else
+ JITStubCall stubCall(this, cti_op_mod);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(dst);
+#endif
+}
+
+void JIT::emitSlow_op_mod(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ unsigned result = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_mod);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call(result);
+#else
+ UNUSED_PARAM(currentInstruction);
+ UNUSED_PARAM(iter);
+ ASSERT_NOT_REACHED();
+#endif
+}
+
+#endif // CPU(X86) || CPU(X86_64)
+
+/* ------------------------------ END: OP_MOD ------------------------------ */
+
+#endif // USE(JSVALUE32_64)
+
+}
+
+#endif // ENABLE(JIT)
move(regT2, regT1); // argCount
- emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
+ emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
emitStore(dst, regT1, regT0);
emitLoad(dst, regT1, regT0);
addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
+ addSlowCase(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
}
void JIT::emitSlow_op_construct_verify(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
addPtr(Imm32(registerOffset * static_cast<int>(sizeof(Register))), callFrameRegister);
move(Imm32(argCount), regT1);
- m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallLink());
+ m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs->ctiVirtualCallLink());
// Put the return value in dst.
emitStore(dst, regT1, regT0);;
addPtr(callFrameRegister, regT3);
storePtr(callFrameRegister, regT3);
addPtr(regT2, callFrameRegister);
- emitNakedCall(m_globalData->jitStubs.ctiVirtualCall());
+ emitNakedCall(m_globalData->jitStubs->ctiVirtualCall());
// Put the return value in dst. In the interpreter, op_ret does this.
emitPutVirtualRegister(dst);
move(regT0, regT2);
- m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs.ctiVirtualCallLink());
+ m_callStructureStubCompilationInfo[callLinkInfoIndex].callReturnLocation = emitNakedCall(m_globalData->jitStubs->ctiVirtualCallLink());
// Put the return value in dst.
emitPutVirtualRegister(dst);
#ifndef JITCode_h
#define JITCode_h
-#include <wtf/Platform.h>
-
#if ENABLE(JIT)
#include "CallFrame.h"
#ifndef JITInlineMethods_h
#define JITInlineMethods_h
-#include <wtf/Platform.h>
#if ENABLE(JIT)
#endif
}
+ALWAYS_INLINE void JIT::emitLoadCharacterString(RegisterID src, RegisterID dst, JumpList& failures)
+{
+ failures.append(branchPtr(NotEqual, Address(src), ImmPtr(m_globalData->jsStringVPtr)));
+ failures.append(branchTest32(NonZero, Address(src, OBJECT_OFFSETOF(JSString, m_fiberCount))));
+ failures.append(branch32(NotEqual, MacroAssembler::Address(src, ThunkHelpers::jsStringLengthOffset()), Imm32(1)));
+ loadPtr(MacroAssembler::Address(src, ThunkHelpers::jsStringValueOffset()), dst);
+ loadPtr(MacroAssembler::Address(dst, ThunkHelpers::stringImplDataOffset()), dst);
+ load16(MacroAssembler::Address(dst, 0), dst);
+}
+
ALWAYS_INLINE void JIT::emitGetFromCallFrameHeader32(RegisterFile::CallFrameHeaderEntry entry, RegisterID to, RegisterID from)
{
load32(Address(from, entry * sizeof(Register)), to);
loadPtr(address, linkRegister);
}
+#elif CPU(MIPS)
+
+ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
+{
+ move(returnAddressRegister, reg);
+}
+
+ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(RegisterID reg)
+{
+ move(reg, returnAddressRegister);
+}
+
+ALWAYS_INLINE void JIT::restoreReturnAddressBeforeReturn(Address address)
+{
+ loadPtr(address, returnAddressRegister);
+}
+
#else // CPU(X86) || CPU(X86_64)
ALWAYS_INLINE void JIT::preserveReturnAddressAfterCall(RegisterID reg)
#endif
#endif
-inline JIT::Address JIT::addressFor(unsigned index, RegisterID base)
+ALWAYS_INLINE bool JIT::isOperandConstantImmediateChar(unsigned src)
{
- return Address(base, (index * sizeof(Register)));
+ return m_codeBlock->isConstantRegisterIndex(src) && getConstantOperand(src).isString() && asString(getConstantOperand(src).asCell())->length() == 1;
}
#if USE(JSVALUE32_64)
-inline JIT::Address JIT::tagFor(unsigned index, RegisterID base)
-{
- return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
-}
-
-inline JIT::Address JIT::payloadFor(unsigned index, RegisterID base)
-{
- return Address(base, (index * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
-}
-
inline void JIT::emitLoadTag(unsigned index, RegisterID tag)
{
RegisterID mappedTag;
inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex)
{
- if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
- addSlowCase(branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag)));
+ if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
+ if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
+ addSlowCase(jump());
+ else
+ addSlowCase(emitJumpIfNotJSCell(virtualRegisterIndex));
+ }
}
inline void JIT::emitJumpSlowCaseIfNotJSCell(unsigned virtualRegisterIndex, RegisterID tag)
{
- if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex))
- addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
+ if (!m_codeBlock->isKnownNotImmediate(virtualRegisterIndex)) {
+ if (m_codeBlock->isConstantRegisterIndex(virtualRegisterIndex))
+ addSlowCase(jump());
+ else
+ addSlowCase(branch32(NotEqual, tag, Imm32(JSValue::CellTag)));
+ }
}
inline void JIT::linkSlowCaseIfNotJSCell(Vector<SlowCaseEntry>::iterator& iter, unsigned virtualRegisterIndex)
}
#if USE(JSVALUE64)
-ALWAYS_INLINE JIT::Jump JIT::emitJumpIfImmediateNumber(RegisterID reg)
-{
- return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
-}
-ALWAYS_INLINE JIT::Jump JIT::emitJumpIfNotImmediateNumber(RegisterID reg)
-{
- return branchTestPtr(Zero, reg, tagTypeNumberRegister);
-}
inline void JIT::emitLoadDouble(unsigned index, FPRegisterID value)
{
#endif
}
-ALWAYS_INLINE void JIT::emitFastArithImmToInt(RegisterID reg)
-{
-#if USE(JSVALUE64)
- UNUSED_PARAM(reg);
-#else
- rshift32(Imm32(JSImmediate::IntegerPayloadShift), reg);
-#endif
-}
-
// operand is int32_t, must have been zero-extended if register is 64-bit.
ALWAYS_INLINE void JIT::emitFastArithIntToImmNoCheck(RegisterID src, RegisterID dest)
{
/*
* Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
namespace JSC {
-#if USE(JSVALUE32_64)
-
-void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
-{
-#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
- // (1) This function provides fast property access for string length
- Label stringLengthBegin = align();
-
- // regT0 holds payload, regT1 holds tag
-
- Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
- Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
-
- // Checks out okay! - get the length from the Ustring.
- load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT2);
-
- Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
- move(regT2, regT0);
- move(Imm32(JSValue::Int32Tag), regT1);
-
- ret();
-#endif
-
- // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
-
-#if ENABLE(JIT_OPTIMIZE_CALL)
- // VirtualCallLink Trampoline
- // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
- Label virtualCallLinkBegin = align();
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
-
- Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
-
- Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
- preserveReturnAddressAfterCall(regT3);
- restoreArgumentReference();
- Call callJSFunction2 = call();
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
- emitGetJITStubArg(2, regT1); // argCount
- restoreReturnAddressBeforeReturn(regT3);
- hasCodeBlock2.link(this);
-
- // Check argCount matches callee arity.
- Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
- preserveReturnAddressAfterCall(regT3);
- emitPutJITStubArg(regT3, 1); // return address
- restoreArgumentReference();
- Call callArityCheck2 = call();
- move(regT1, callFrameRegister);
- emitGetJITStubArg(2, regT1); // argCount
- restoreReturnAddressBeforeReturn(regT3);
- arityCheckOkay2.link(this);
-
- isNativeFunc2.link(this);
-
- compileOpCallInitializeCallFrame();
-
- preserveReturnAddressAfterCall(regT3);
- emitPutJITStubArg(regT3, 1); // return address
- restoreArgumentReference();
- Call callLazyLinkCall = call();
- restoreReturnAddressBeforeReturn(regT3);
- jump(regT0);
-#endif // ENABLE(JIT_OPTIMIZE_CALL)
-
- // VirtualCall Trampoline
- // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
- Label virtualCallBegin = align();
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
-
- Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
-
- Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
- preserveReturnAddressAfterCall(regT3);
- restoreArgumentReference();
- Call callJSFunction1 = call();
- emitGetJITStubArg(2, regT1); // argCount
- restoreReturnAddressBeforeReturn(regT3);
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
- hasCodeBlock3.link(this);
-
- // Check argCount matches callee arity.
- Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
- preserveReturnAddressAfterCall(regT3);
- emitPutJITStubArg(regT3, 1); // return address
- restoreArgumentReference();
- Call callArityCheck1 = call();
- move(regT1, callFrameRegister);
- emitGetJITStubArg(2, regT1); // argCount
- restoreReturnAddressBeforeReturn(regT3);
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
- arityCheckOkay3.link(this);
-
- isNativeFunc3.link(this);
-
- compileOpCallInitializeCallFrame();
- loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
- jump(regT0);
-
-#if CPU(X86) || CPU(ARM_TRADITIONAL)
- Label nativeCallThunk = align();
- preserveReturnAddressAfterCall(regT0);
- emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
-
- // Load caller frame's scope chain into this callframe so that whatever we call can
- // get to its global data.
- emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
- emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
- emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
-
-#if CPU(X86)
- emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
-
- /* We have two structs that we use to describe the stackframe we set up for our
- * call to native code. NativeCallFrameStructure describes the how we set up the stack
- * in advance of the call. NativeFunctionCalleeSignature describes the callframe
- * as the native code expects it. We do this as we are using the fastcall calling
- * convention which results in the callee popping its arguments off the stack, but
- * not the rest of the callframe so we need a nice way to ensure we increment the
- * stack pointer by the right amount after the call.
- */
-
-#if COMPILER(MSVC) || OS(LINUX)
-#if COMPILER(MSVC)
-#pragma pack(push)
-#pragma pack(4)
-#endif // COMPILER(MSVC)
- struct NativeCallFrameStructure {
- // CallFrame* callFrame; // passed in EDX
- JSObject* callee;
- JSValue thisValue;
- ArgList* argPointer;
- ArgList args;
- JSValue result;
- };
- struct NativeFunctionCalleeSignature {
- JSObject* callee;
- JSValue thisValue;
- ArgList* argPointer;
- };
-#if COMPILER(MSVC)
-#pragma pack(pop)
-#endif // COMPILER(MSVC)
-#else
- struct NativeCallFrameStructure {
- // CallFrame* callFrame; // passed in ECX
- // JSObject* callee; // passed in EDX
- JSValue thisValue;
- ArgList* argPointer;
- ArgList args;
- };
- struct NativeFunctionCalleeSignature {
- JSValue thisValue;
- ArgList* argPointer;
- };
-#endif
-
- const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
- // Allocate system stack frame
- subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
-
- // Set up arguments
- subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
-
- // push argcount
- storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
-
- // Calculate the start of the callframe header, and store in regT1
- addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
-
- // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
- mul32(Imm32(sizeof(Register)), regT0, regT0);
- subPtr(regT0, regT1);
- storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
-
- // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
- addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
- storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
-
- // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
- loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
- loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
- storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
- storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
-
-#if COMPILER(MSVC) || OS(LINUX)
- // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
- addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
-
- // Plant callee
- emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
- storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
-
- // Plant callframe
- move(callFrameRegister, X86Registers::edx);
-
- call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
-
- // JSValue is a non-POD type, so eax points to it
- emitLoad(0, regT1, regT0, X86Registers::eax);
-#else
- emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
- move(callFrameRegister, X86Registers::ecx); // callFrame
- call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
-#endif
-
- // We've put a few temporaries on the stack in addition to the actual arguments
- // so pull them off now
- addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
-
-#elif CPU(ARM_TRADITIONAL)
- emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
-
- // Allocate stack space for our arglist
- COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
- subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
-
- // Set up arguments
- subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
-
- // Push argcount
- storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
-
- // Calculate the start of the callframe header, and store in regT1
- move(callFrameRegister, regT1);
- sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
-
- // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
- mul32(Imm32(sizeof(Register)), regT0, regT0);
- subPtr(regT0, regT1);
-
- // push pointer to arguments
- storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
-
- // Argument passing method:
- // r0 - points to return value
- // r1 - callFrame
- // r2 - callee
- // stack: this(JSValue) and a pointer to ArgList
-
- move(stackPointerRegister, regT3);
- subPtr(Imm32(8), stackPointerRegister);
- move(stackPointerRegister, regT0);
- subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
-
- // Setup arg4:
- storePtr(regT3, Address(stackPointerRegister, 8));
-
- // Setup arg3
- // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
- load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
- storePtr(regT3, Address(stackPointerRegister, 0));
- load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
- storePtr(regT3, Address(stackPointerRegister, 4));
-
- // Setup arg2:
- emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
-
- // Setup arg1:
- move(callFrameRegister, regT1);
-
- call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
-
- // Load return value
- load32(Address(stackPointerRegister, 16), regT0);
- load32(Address(stackPointerRegister, 20), regT1);
-
- addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
-#endif
-
- // Check for an exception
- move(ImmPtr(&globalData->exception), regT2);
- Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
-
- // Grab the return address.
- emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
-
- // Restore our caller's "r".
- emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
-
- // Return.
- restoreReturnAddressBeforeReturn(regT3);
- ret();
-
- // Handle an exception
- sawException.link(this);
- // Grab the return address.
- emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
- move(ImmPtr(&globalData->exceptionLocation), regT2);
- storePtr(regT1, regT2);
- move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
- emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
- poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
- restoreReturnAddressBeforeReturn(regT2);
- ret();
-
-#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
-#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
-#else
- breakpoint();
-#endif
-
-#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
- Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
- Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
- Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
-#endif
-
- // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
- LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
-
-#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
- patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
- patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
- patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
-#endif
- patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
- patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
-#if ENABLE(JIT_OPTIMIZE_CALL)
- patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
- patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
- patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
-#endif
-
- CodeRef finalCode = patchBuffer.finalizeCode();
- *executablePool = finalCode.m_executablePool;
-
- *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
- *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
-#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
- *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
-#else
- UNUSED_PARAM(ctiStringLengthTrampoline);
-#endif
-#if ENABLE(JIT_OPTIMIZE_CALL)
- *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
-#else
- UNUSED_PARAM(ctiVirtualCallLink);
-#endif
-}
-
-void JIT::emit_op_mov(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- if (m_codeBlock->isConstantRegisterIndex(src))
- emitStore(dst, getConstantOperand(src));
- else {
- emitLoad(src, regT1, regT0);
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
- }
-}
-
-void JIT::emit_op_end(Instruction* currentInstruction)
-{
- if (m_codeBlock->needsFullScopeChain())
- JITStubCall(this, cti_op_end).call();
- ASSERT(returnValueRegister != callFrameRegister);
- emitLoad(currentInstruction[1].u.operand, regT1, regT0);
- restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
- ret();
-}
-
-void JIT::emit_op_jmp(Instruction* currentInstruction)
-{
- unsigned target = currentInstruction[1].u.operand;
- addJump(jump(), target);
-}
-
-void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- emitTimeoutCheck();
-
- if (isOperandConstantImmediateInt(op1)) {
- emitLoad(op2, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
- return;
- }
-
- if (isOperandConstantImmediateInt(op2)) {
- emitLoad(op1, regT1, regT0);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
- return;
- }
-
- emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- addJump(branch32(LessThanOrEqual, regT0, regT2), target);
-}
-
-void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned op1 = currentInstruction[1].u.operand;
- unsigned op2 = currentInstruction[2].u.operand;
- unsigned target = currentInstruction[3].u.operand;
-
- if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
- linkSlowCase(iter); // int32 check
- linkSlowCase(iter); // int32 check
-
- JITStubCall stubCall(this, cti_op_loop_if_lesseq);
- stubCall.addArgument(op1);
- stubCall.addArgument(op2);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
-}
-
-void JIT::emit_op_new_object(Instruction* currentInstruction)
-{
- JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_instanceof(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned value = currentInstruction[2].u.operand;
- unsigned baseVal = currentInstruction[3].u.operand;
- unsigned proto = currentInstruction[4].u.operand;
-
- // Load the operands into registers.
- // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
- emitLoadPayload(value, regT2);
- emitLoadPayload(baseVal, regT0);
- emitLoadPayload(proto, regT1);
-
- // Check that value, baseVal, and proto are cells.
- emitJumpSlowCaseIfNotJSCell(value);
- emitJumpSlowCaseIfNotJSCell(baseVal);
- emitJumpSlowCaseIfNotJSCell(proto);
-
- // Check that baseVal 'ImplementsDefaultHasInstance'.
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
- addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
-
- // Optimistically load the result true, and start looping.
- // Initially, regT1 still contains proto and regT2 still contains value.
- // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
- move(Imm32(JSValue::TrueTag), regT0);
- Label loop(this);
-
- // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
- // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
- Jump isInstance = branchPtr(Equal, regT2, regT1);
- branchTest32(NonZero, regT2).linkTo(loop, this);
-
- // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
- move(Imm32(JSValue::FalseTag), regT0);
-
- // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
- isInstance.link(this);
- emitStoreBool(dst, regT0);
-}
-
-void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned value = currentInstruction[2].u.operand;
- unsigned baseVal = currentInstruction[3].u.operand;
- unsigned proto = currentInstruction[4].u.operand;
-
- linkSlowCaseIfNotJSCell(iter, value);
- linkSlowCaseIfNotJSCell(iter, baseVal);
- linkSlowCaseIfNotJSCell(iter, proto);
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_instanceof);
- stubCall.addArgument(value);
- stubCall.addArgument(baseVal);
- stubCall.addArgument(proto);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_new_func(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_new_func);
- stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_get_global_var(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
- ASSERT(globalObject->isGlobalObject());
- int index = currentInstruction[3].u.operand;
-
- loadPtr(&globalObject->d()->registers, regT2);
-
- emitLoad(index, regT1, regT0, regT2);
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
-}
-
-void JIT::emit_op_put_global_var(Instruction* currentInstruction)
-{
- JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
- ASSERT(globalObject->isGlobalObject());
- int index = currentInstruction[2].u.operand;
- int value = currentInstruction[3].u.operand;
-
- emitLoad(value, regT1, regT0);
-
- loadPtr(&globalObject->d()->registers, regT2);
- emitStore(index, regT1, regT0, regT2);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
-}
-
-void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int index = currentInstruction[2].u.operand;
- int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
-
- emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
- while (skip--)
- loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
-
- loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
-
- emitLoad(index, regT1, regT0, regT2);
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
-}
-
-void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
-{
- int index = currentInstruction[1].u.operand;
- int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
- int value = currentInstruction[3].u.operand;
-
- emitLoad(value, regT1, regT0);
-
- emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
- while (skip--)
- loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
-
- loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
-
- emitStore(index, regT1, regT0, regT2);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
-}
-
-void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_tear_off_activation);
- stubCall.addArgument(currentInstruction[1].u.operand);
- stubCall.call();
-}
-
-void JIT::emit_op_tear_off_arguments(Instruction*)
-{
- JITStubCall(this, cti_op_tear_off_arguments).call();
-}
-
-void JIT::emit_op_new_array(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_new_array);
- stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
- stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_resolve(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_resolve);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_to_primitive(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
-
- Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
- addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
- isImm.link(this);
-
- if (dst != src)
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
-}
-
-void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- int dst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_to_primitive);
- stubCall.addArgument(regT1, regT0);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_strcat(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_strcat);
- stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
- stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_resolve_base(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_resolve_base);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_resolve_skip);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
- stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_resolve_global(Instruction* currentInstruction)
-{
- // FIXME: Optimize to use patching instead of so many memory accesses.
-
- unsigned dst = currentInstruction[1].u.operand;
- void* globalObject = currentInstruction[2].u.jsCell;
-
- unsigned currentIndex = m_globalResolveInfoIndex++;
- void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
- void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
-
- // Verify structure.
- move(ImmPtr(globalObject), regT0);
- loadPtr(structureAddress, regT1);
- addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
-
- // Load property.
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
- load32(offsetAddr, regT3);
- load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
- load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
-}
-
-void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- void* globalObject = currentInstruction[2].u.jsCell;
- Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
-
- unsigned currentIndex = m_globalResolveInfoIndex++;
-
- linkSlowCase(iter);
- JITStubCall stubCall(this, cti_op_resolve_global);
- stubCall.addArgument(ImmPtr(globalObject));
- stubCall.addArgument(ImmPtr(ident));
- stubCall.addArgument(Imm32(currentIndex));
- stubCall.call(dst);
-}
-
-void JIT::emit_op_not(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- emitLoadTag(src, regT0);
-
- xor32(Imm32(JSValue::FalseTag), regT0);
- addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
- xor32(Imm32(JSValue::TrueTag), regT0);
-
- emitStoreBool(dst, regT0, (dst == src));
-}
-
-void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_not);
- stubCall.addArgument(src);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_jfalse(Instruction* currentInstruction)
-{
- unsigned cond = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- emitLoad(cond, regT1, regT0);
-
- Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
- addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
-
- Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
- Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
- addJump(jump(), target);
-
- if (supportsFloatingPoint()) {
- isNotInteger.link(this);
-
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
-
- zeroDouble(fpRegT0);
- emitLoadDouble(cond, fpRegT1);
- addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
- } else
- addSlowCase(isNotInteger);
-
- isTrue.link(this);
- isTrue2.link(this);
-}
-
-void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned cond = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- linkSlowCase(iter);
- JITStubCall stubCall(this, cti_op_jtrue);
- stubCall.addArgument(cond);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
-}
-
-void JIT::emit_op_jtrue(Instruction* currentInstruction)
-{
- unsigned cond = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- emitLoad(cond, regT1, regT0);
-
- Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
- addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
-
- Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
- Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
- addJump(jump(), target);
-
- if (supportsFloatingPoint()) {
- isNotInteger.link(this);
-
- addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
-
- zeroDouble(fpRegT0);
- emitLoadDouble(cond, fpRegT1);
- addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
- } else
- addSlowCase(isNotInteger);
-
- isFalse.link(this);
- isFalse2.link(this);
-}
-
-void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned cond = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- linkSlowCase(iter);
- JITStubCall stubCall(this, cti_op_jtrue);
- stubCall.addArgument(cond);
- stubCall.call();
- emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
-}
-
-void JIT::emit_op_jeq_null(Instruction* currentInstruction)
-{
- unsigned src = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
-
- Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
-
- // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
-
- Jump wasNotImmediate = jump();
-
- // Now handle the immediate cases - undefined & null
- isImmediate.link(this);
-
- set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
- set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
- or32(regT2, regT1);
-
- addJump(branchTest32(NonZero, regT1), target);
-
- wasNotImmediate.link(this);
-}
-
-void JIT::emit_op_jneq_null(Instruction* currentInstruction)
-{
- unsigned src = currentInstruction[1].u.operand;
- unsigned target = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
-
- Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
-
- // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
-
- Jump wasNotImmediate = jump();
-
- // Now handle the immediate cases - undefined & null
- isImmediate.link(this);
-
- set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
- set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
- or32(regT2, regT1);
-
- addJump(branchTest32(Zero, regT1), target);
-
- wasNotImmediate.link(this);
-}
-
-void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
-{
- unsigned src = currentInstruction[1].u.operand;
- JSCell* ptr = currentInstruction[2].u.jsCell;
- unsigned target = currentInstruction[3].u.operand;
-
- emitLoad(src, regT1, regT0);
- addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
- addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
-}
-
-void JIT::emit_op_jsr(Instruction* currentInstruction)
-{
- int retAddrDst = currentInstruction[1].u.operand;
- int target = currentInstruction[2].u.operand;
- DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
- addJump(jump(), target);
- m_jsrSites.append(JSRInfo(storeLocation, label()));
-}
-
-void JIT::emit_op_sret(Instruction* currentInstruction)
-{
- jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
-}
-
-void JIT::emit_op_eq(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src1 = currentInstruction[2].u.operand;
- unsigned src2 = currentInstruction[3].u.operand;
-
- emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, regT3));
- addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
- addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
-
- set8(Equal, regT0, regT2, regT0);
- or32(Imm32(JSValue::FalseTag), regT0);
-
- emitStoreBool(dst, regT0);
-}
-
-void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned op1 = currentInstruction[2].u.operand;
- unsigned op2 = currentInstruction[3].u.operand;
-
- JumpList storeResult;
- JumpList genericCase;
-
- genericCase.append(getSlowCase(iter)); // tags not equal
-
- linkSlowCase(iter); // tags equal and JSCell
- genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
- genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
-
- // String case.
- JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
- stubCallEqStrings.addArgument(regT0);
- stubCallEqStrings.addArgument(regT2);
- stubCallEqStrings.call();
- storeResult.append(jump());
-
- // Generic case.
- genericCase.append(getSlowCase(iter)); // doubles
- genericCase.link(this);
- JITStubCall stubCallEq(this, cti_op_eq);
- stubCallEq.addArgument(op1);
- stubCallEq.addArgument(op2);
- stubCallEq.call(regT0);
-
- storeResult.link(this);
- or32(Imm32(JSValue::FalseTag), regT0);
- emitStoreBool(dst, regT0);
-}
-
-void JIT::emit_op_neq(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src1 = currentInstruction[2].u.operand;
- unsigned src2 = currentInstruction[3].u.operand;
-
- emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
- addSlowCase(branch32(NotEqual, regT1, regT3));
- addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
- addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
-
- set8(NotEqual, regT0, regT2, regT0);
- or32(Imm32(JSValue::FalseTag), regT0);
-
- emitStoreBool(dst, regT0);
-}
-
-void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
-
- JumpList storeResult;
- JumpList genericCase;
-
- genericCase.append(getSlowCase(iter)); // tags not equal
-
- linkSlowCase(iter); // tags equal and JSCell
- genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
- genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
-
- // String case.
- JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
- stubCallEqStrings.addArgument(regT0);
- stubCallEqStrings.addArgument(regT2);
- stubCallEqStrings.call(regT0);
- storeResult.append(jump());
-
- // Generic case.
- genericCase.append(getSlowCase(iter)); // doubles
- genericCase.link(this);
- JITStubCall stubCallEq(this, cti_op_eq);
- stubCallEq.addArgument(regT1, regT0);
- stubCallEq.addArgument(regT3, regT2);
- stubCallEq.call(regT0);
-
- storeResult.link(this);
- xor32(Imm32(0x1), regT0);
- or32(Imm32(JSValue::FalseTag), regT0);
- emitStoreBool(dst, regT0);
-}
-
-void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src1 = currentInstruction[2].u.operand;
- unsigned src2 = currentInstruction[3].u.operand;
-
- emitLoadTag(src1, regT0);
- emitLoadTag(src2, regT1);
-
- // Jump to a slow case if either operand is double, or if both operands are
- // cells and/or Int32s.
- move(regT0, regT2);
- and32(regT1, regT2);
- addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
- addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
-
- if (type == OpStrictEq)
- set8(Equal, regT0, regT1, regT0);
- else
- set8(NotEqual, regT0, regT1, regT0);
-
- or32(Imm32(JSValue::FalseTag), regT0);
-
- emitStoreBool(dst, regT0);
-}
-
-void JIT::emit_op_stricteq(Instruction* currentInstruction)
-{
- compileOpStrictEq(currentInstruction, OpStrictEq);
-}
-
-void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src1 = currentInstruction[2].u.operand;
- unsigned src2 = currentInstruction[3].u.operand;
-
- linkSlowCase(iter);
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_stricteq);
- stubCall.addArgument(src1);
- stubCall.addArgument(src2);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_nstricteq(Instruction* currentInstruction)
-{
- compileOpStrictEq(currentInstruction, OpNStrictEq);
-}
-
-void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src1 = currentInstruction[2].u.operand;
- unsigned src2 = currentInstruction[3].u.operand;
-
- linkSlowCase(iter);
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_nstricteq);
- stubCall.addArgument(src1);
- stubCall.addArgument(src2);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_eq_null(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
- Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
- setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
-
- Jump wasNotImmediate = jump();
-
- isImmediate.link(this);
-
- set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
- set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
- or32(regT2, regT1);
-
- wasNotImmediate.link(this);
-
- or32(Imm32(JSValue::FalseTag), regT1);
-
- emitStoreBool(dst, regT1);
-}
-
-void JIT::emit_op_neq_null(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
- Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
- setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
-
- Jump wasNotImmediate = jump();
-
- isImmediate.link(this);
-
- set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
- set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
- and32(regT2, regT1);
-
- wasNotImmediate.link(this);
-
- or32(Imm32(JSValue::FalseTag), regT1);
-
- emitStoreBool(dst, regT1);
-}
-
-void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_resolve_with_base);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
- stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
- stubCall.call(currentInstruction[2].u.operand);
-}
-
-void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_new_func_exp);
- stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_new_regexp(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_new_regexp);
- stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_throw(Instruction* currentInstruction)
-{
- unsigned exception = currentInstruction[1].u.operand;
- JITStubCall stubCall(this, cti_op_throw);
- stubCall.addArgument(exception);
- stubCall.call();
-
-#ifndef NDEBUG
- // cti_op_throw always changes it's return address,
- // this point in the code should never be reached.
- breakpoint();
-#endif
-}
-
-void JIT::emit_op_get_pnames(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int i = currentInstruction[3].u.operand;
- int size = currentInstruction[4].u.operand;
- int breakTarget = currentInstruction[5].u.operand;
-
- JumpList isNotObject;
-
- emitLoad(base, regT1, regT0);
- if (!m_codeBlock->isKnownNotImmediate(base))
- isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
- if (base != m_codeBlock->thisRegister()) {
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
- }
-
- // We could inline the case where you have a valid cache, but
- // this call doesn't seem to be hot.
- Label isObject(this);
- JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
- getPnamesStubCall.addArgument(regT0);
- getPnamesStubCall.call(dst);
- load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
- store32(Imm32(0), addressFor(i));
- store32(regT3, addressFor(size));
- Jump end = jump();
-
- isNotObject.link(this);
- addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
- addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
- JITStubCall toObjectStubCall(this, cti_to_object);
- toObjectStubCall.addArgument(regT1, regT0);
- toObjectStubCall.call(base);
- jump().linkTo(isObject, this);
-
- end.link(this);
-}
-
-void JIT::emit_op_next_pname(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int i = currentInstruction[3].u.operand;
- int size = currentInstruction[4].u.operand;
- int it = currentInstruction[5].u.operand;
- int target = currentInstruction[6].u.operand;
-
- JumpList callHasProperty;
-
- Label begin(this);
- load32(addressFor(i), regT0);
- Jump end = branch32(Equal, regT0, addressFor(size));
-
- // Grab key @ i
- loadPtr(addressFor(it), regT1);
- loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
- load32(BaseIndex(regT2, regT0, TimesEight), regT2);
- store32(Imm32(JSValue::CellTag), tagFor(dst));
- store32(regT2, payloadFor(dst));
-
- // Increment i
- add32(Imm32(1), regT0);
- store32(regT0, addressFor(i));
-
- // Verify that i is valid:
- loadPtr(addressFor(base), regT0);
-
- // Test base's structure
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
-
- // Test base's prototype chain
- loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
- loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
- addJump(branchTestPtr(Zero, Address(regT3)), target);
-
- Label checkPrototype(this);
- callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
- loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
- addPtr(Imm32(sizeof(Structure*)), regT3);
- branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
-
- // Continue loop.
- addJump(jump(), target);
-
- // Slow case: Ask the object if i is valid.
- callHasProperty.link(this);
- loadPtr(addressFor(dst), regT1);
- JITStubCall stubCall(this, cti_has_property);
- stubCall.addArgument(regT0);
- stubCall.addArgument(regT1);
- stubCall.call();
-
- // Test for valid key.
- addJump(branchTest32(NonZero, regT0), target);
- jump().linkTo(begin, this);
-
- // End of loop.
- end.link(this);
-}
-
-void JIT::emit_op_push_scope(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_push_scope);
- stubCall.addArgument(currentInstruction[1].u.operand);
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_pop_scope(Instruction*)
-{
- JITStubCall(this, cti_op_pop_scope).call();
-}
-
-void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int src = currentInstruction[2].u.operand;
-
- emitLoad(src, regT1, regT0);
-
- Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
- addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
- isInt32.link(this);
-
- if (src != dst)
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
-}
-
-void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- int dst = currentInstruction[1].u.operand;
-
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_to_jsnumber);
- stubCall.addArgument(regT1, regT0);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_push_new_scope);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
- stubCall.addArgument(currentInstruction[3].u.operand);
- stubCall.call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_catch(Instruction* currentInstruction)
-{
- unsigned exception = currentInstruction[1].u.operand;
-
- // This opcode only executes after a return from cti_op_throw.
-
- // cti_op_throw may have taken us to a call frame further up the stack; reload
- // the call frame pointer to adjust.
- peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof (void*));
-
- // Now store the exception returned by cti_op_throw.
- emitStore(exception, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
-}
-
-void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_jmp_scopes);
- stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
- stubCall.call();
- addJump(jump(), currentInstruction[2].u.operand);
-}
-
-void JIT::emit_op_switch_imm(Instruction* currentInstruction)
-{
- unsigned tableIndex = currentInstruction[1].u.operand;
- unsigned defaultOffset = currentInstruction[2].u.operand;
- unsigned scrutinee = currentInstruction[3].u.operand;
-
- // create jump table for switch destinations, track this switch statement.
- SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
- m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
- jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
-
- JITStubCall stubCall(this, cti_op_switch_imm);
- stubCall.addArgument(scrutinee);
- stubCall.addArgument(Imm32(tableIndex));
- stubCall.call();
- jump(regT0);
-}
-
-void JIT::emit_op_switch_char(Instruction* currentInstruction)
-{
- unsigned tableIndex = currentInstruction[1].u.operand;
- unsigned defaultOffset = currentInstruction[2].u.operand;
- unsigned scrutinee = currentInstruction[3].u.operand;
-
- // create jump table for switch destinations, track this switch statement.
- SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
- m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
- jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
-
- JITStubCall stubCall(this, cti_op_switch_char);
- stubCall.addArgument(scrutinee);
- stubCall.addArgument(Imm32(tableIndex));
- stubCall.call();
- jump(regT0);
-}
-
-void JIT::emit_op_switch_string(Instruction* currentInstruction)
-{
- unsigned tableIndex = currentInstruction[1].u.operand;
- unsigned defaultOffset = currentInstruction[2].u.operand;
- unsigned scrutinee = currentInstruction[3].u.operand;
-
- // create jump table for switch destinations, track this switch statement.
- StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
- m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
-
- JITStubCall stubCall(this, cti_op_switch_string);
- stubCall.addArgument(scrutinee);
- stubCall.addArgument(Imm32(tableIndex));
- stubCall.call();
- jump(regT0);
-}
-
-void JIT::emit_op_new_error(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned type = currentInstruction[2].u.operand;
- unsigned message = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_new_error);
- stubCall.addArgument(Imm32(type));
- stubCall.addArgument(m_codeBlock->getConstant(message));
- stubCall.addArgument(Imm32(m_bytecodeIndex));
- stubCall.call(dst);
-}
-
-void JIT::emit_op_debug(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_debug);
- stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
- stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
- stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
- stubCall.call();
-}
-
-
-void JIT::emit_op_enter(Instruction*)
-{
- // Even though JIT code doesn't use them, we initialize our constant
- // registers to zap stale pointers, to avoid unnecessarily prolonging
- // object lifetime and increasing GC pressure.
- for (int i = 0; i < m_codeBlock->m_numVars; ++i)
- emitStore(i, jsUndefined());
-}
-
-void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
-{
- emit_op_enter(currentInstruction);
-
- JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
-}
-
-void JIT::emit_op_create_arguments(Instruction*)
-{
- Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
-
- // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
- if (m_codeBlock->m_numParameters == 1)
- JITStubCall(this, cti_op_create_arguments_no_params).call();
- else
- JITStubCall(this, cti_op_create_arguments).call();
-
- argsCreated.link(this);
-}
-
-void JIT::emit_op_init_arguments(Instruction*)
-{
- emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
-}
-
-void JIT::emit_op_convert_this(Instruction* currentInstruction)
-{
- unsigned thisRegister = currentInstruction[1].u.operand;
-
- emitLoad(thisRegister, regT1, regT0);
-
- addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addSlowCase(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
-
- map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
-}
-
-void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned thisRegister = currentInstruction[1].u.operand;
-
- linkSlowCase(iter);
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_convert_this);
- stubCall.addArgument(regT1, regT0);
- stubCall.call(thisRegister);
-}
-
-void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
-{
- peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
- Jump noProfiler = branchTestPtr(Zero, Address(regT2));
-
- JITStubCall stubCall(this, cti_op_profile_will_call);
- stubCall.addArgument(currentInstruction[1].u.operand);
- stubCall.call();
- noProfiler.link(this);
-}
-
-void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
-{
- peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof (void*));
- Jump noProfiler = branchTestPtr(Zero, Address(regT2));
-
- JITStubCall stubCall(this, cti_op_profile_did_call);
- stubCall.addArgument(currentInstruction[1].u.operand);
- stubCall.call();
- noProfiler.link(this);
-}
-
-#else // USE(JSVALUE32_64)
+#if !USE(JSVALUE32_64)
#define RECORD_JUMP_TARGET(targetOffset) \
do { m_labels[m_bytecodeIndex + (targetOffset)].used(); } while (false)
-void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, CodePtr* ctiStringLengthTrampoline, CodePtr* ctiVirtualCallLink, CodePtr* ctiVirtualCall, CodePtr* ctiNativeCallThunk)
+void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
{
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ Label softModBegin = align();
+ softModulo();
+#endif
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
// (2) The second function provides fast property access for string length
Label stringLengthBegin = align();
Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
// Checks out okay! - get the length from the Ustring.
- load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_stringLength)), regT0);
+ load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT0);
Jump string_failureCases3 = branch32(Above, regT0, Imm32(JSImmediate::maxImmediateInt));
// push pointer to arguments
storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
- // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
+ // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
+
+#if OS(WINCE)
+ // Setup arg3:
+ loadPtr(Address(regT1, -(int32_t)sizeof(Register)), ARMRegisters::r3);
+
+ // Setup arg2:
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
+
+ // Setup arg1:
+ move(callFrameRegister, regT1);
+
+ // Setup arg0:
+ move(stackPointerRegister, regT0);
+ subPtr(Imm32(sizeof(Register)), stackPointerRegister);
+ storePtr(regT0, Address(stackPointerRegister));
+
+ call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
+
+ loadPtr(Address(regT0), regT0);
+
+ addPtr(Imm32(sizeof(Register) + sizeof(ArgList)), stackPointerRegister);
+#else // OS(WINCE)
+ // Setup arg3:
loadPtr(Address(regT1, -(int32_t)sizeof(Register)), regT2);
// Setup arg2:
call(Address(regT1, OBJECT_OFFSETOF(JSFunction, m_data)));
addPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
+#endif // OS(WINCE)
+
+#elif CPU(MIPS)
+ emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
+
+ // Allocate stack space for our arglist
+ COMPILE_ASSERT(!(sizeof(ArgList) & 0x7), ArgList_should_by_8byte_aligned);
+ subPtr(Imm32(sizeof(ArgList) + 24), stackPointerRegister);
+
+ // Set up arguments
+ subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
+
+ // Push argcount to 24 + offset($sp)
+ storePtr(regT0, Address(stackPointerRegister, 24 + OBJECT_OFFSETOF(ArgList, m_argCount)));
+
+ // Calculate the start of the callframe header, and store in regT1
+ move(callFrameRegister, regT1);
+ sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
+
+ // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
+ mul32(Imm32(sizeof(Register)), regT0, regT0);
+ subPtr(regT0, regT1);
+
+ // push pointer to arguments to 24 + offset($sp)
+ storePtr(regT1, Address(stackPointerRegister, 24 + OBJECT_OFFSETOF(ArgList, m_args)));
+
+ // Setup arg3: regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
+ loadPtr(Address(regT1, -(int32_t)sizeof(Register)), MIPSRegisters::a3);
+
+ // Setup arg2:
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, MIPSRegisters::a2);
+
+ // Setup arg1:
+ move(callFrameRegister, MIPSRegisters::a1);
+
+ // Setup arg4: ArgList is passed by reference. At 16($sp), store ($sp + 24)
+ addPtr(Imm32(24), stackPointerRegister, regT2);
+ storePtr(regT2, Address(stackPointerRegister, 16));
+
+ // Setup arg0 as 20($sp) to hold the returned structure.
+ ASSERT(sizeof(JSValue) == 4);
+ addPtr(Imm32(20), stackPointerRegister, MIPSRegisters::a0);
+
+ // Call
+ call(Address(MIPSRegisters::a2, OBJECT_OFFSETOF(JSFunction, m_data)));
+
+ // Get returned value from 0($v0) which is the same as 20($sp)
+ loadPtr(Address(returnValueRegister, 0), returnValueRegister);
+
+ // Restore stack space
+ addPtr(Imm32(sizeof(ArgList) + 24), stackPointerRegister);
#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
CodeRef finalCode = patchBuffer.finalizeCode();
*executablePool = finalCode.m_executablePool;
- *ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
- *ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
- *ctiNativeCallThunk = trampolineAt(finalCode, nativeCallThunk);
+ trampolines->ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
+ trampolines->ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
+ trampolines->ctiNativeCallThunk = adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(trampolineAt(finalCode, nativeCallThunk)))));
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ trampolines->ctiSoftModulo = trampolineAt(finalCode, softModBegin);
+#endif
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
- *ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
+ trampolines->ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
#else
UNUSED_PARAM(ctiStringLengthTrampoline);
#endif
// Check that baseVal 'ImplementsDefaultHasInstance'.
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
- addSlowCase(branchTest32(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
+ addSlowCase(branchTest8(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
// Optimistically load the result true, and start looping.
// Initially, regT1 still contains proto and regT2 still contains value.
emitJumpSlowCaseIfNotJSCell(regT0);
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo) + OBJECT_OFFSETOF(TypeInfo, m_type)), Imm32(ObjectType)));
+ addSlowCase(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
}
stubCall.call(currentInstruction[1].u.operand);
}
-void JIT::emit_op_resolve_global(Instruction* currentInstruction)
+void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool)
{
// Fast case
void* globalObject = currentInstruction[2].u.jsCell;
- Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
-
unsigned currentIndex = m_globalResolveInfoIndex++;
void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
// Check Structure of global object
move(ImmPtr(globalObject), regT0);
loadPtr(structureAddress, regT1);
- Jump noMatch = branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))); // Structures don't match
+ addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)))); // Structures don't match
// Load cached property
// Assume that the global object always uses external storage.
load32(offsetAddr, regT1);
loadPtr(BaseIndex(regT0, regT1, ScalePtr), regT0);
emitPutVirtualRegister(currentInstruction[1].u.operand);
- Jump end = jump();
+}
- // Slow case
- noMatch.link(this);
+void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ void* globalObject = currentInstruction[2].u.jsCell;
+ Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
+
+ unsigned currentIndex = m_globalResolveInfoIndex++;
+
+ linkSlowCase(iter);
JITStubCall stubCall(this, cti_op_resolve_global);
stubCall.addArgument(ImmPtr(globalObject));
stubCall.addArgument(ImmPtr(ident));
stubCall.addArgument(Imm32(currentIndex));
- stubCall.call(currentInstruction[1].u.operand);
- end.link(this);
+ stubCall.call(dst);
}
void JIT::emit_op_not(Instruction* currentInstruction)
isNonZero.link(this);
RECORD_JUMP_TARGET(target);
-};
+}
+
void JIT::emit_op_jeq_null(Instruction* currentInstruction)
{
unsigned src = currentInstruction[1].u.operand;
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addJump(branchTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
+ addJump(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
Jump wasNotImmediate = jump();
// Now handle the immediate cases - undefined & null
// First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addJump(branchTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
+ addJump(branchTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
Jump wasNotImmediate = jump();
// Now handle the immediate cases - undefined & null
emitPutVirtualRegister(currentInstruction[1].u.operand);
}
-void JIT::emit_op_new_regexp(Instruction* currentInstruction)
-{
- JITStubCall stubCall(this, cti_op_new_regexp);
- stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
- stubCall.call(currentInstruction[1].u.operand);
-}
-
void JIT::emit_op_bitor(Instruction* currentInstruction)
{
emitGetVirtualRegisters(currentInstruction[2].u.operand, regT0, currentInstruction[3].u.operand, regT1);
isNotObject.append(emitJumpIfNotJSCell(regT0));
if (base != m_codeBlock->thisRegister()) {
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- isNotObject.append(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
+ isNotObject.append(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
}
// We could inline the case where you have a valid cache, but
emitJumpSlowCaseIfNotJSCell(regT0, srcVReg);
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- addSlowCase(branch32(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
+ addSlowCase(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(NumberType)));
wasImmediate.link(this);
void JIT::emit_op_debug(Instruction* currentInstruction)
{
+#if ENABLE(DEBUG_WITH_BREAKPOINT)
+ UNUSED_PARAM(currentInstruction);
+ breakpoint();
+#else
JITStubCall stubCall(this, cti_op_debug);
stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
stubCall.call();
+#endif
}
void JIT::emit_op_eq_null(Instruction* currentInstruction)
Jump isImmediate = emitJumpIfNotJSCell(regT0);
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- setTest32(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
+ setTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
Jump wasNotImmediate = jump();
Jump isImmediate = emitJumpIfNotJSCell(regT0);
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
- setTest32(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
+ setTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT0);
Jump wasNotImmediate = jump();
emitJumpSlowCaseIfNotJSCell(regT0);
loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
- addSlowCase(branchTest32(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
+ addSlowCase(branchTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
}
stubCall.call(currentInstruction[1].u.operand);
}
-void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
-
- linkSlowCase(iter); // property int32 check
- linkSlowCaseIfNotJSCell(iter, base); // base cell check
- linkSlowCase(iter); // base array check
- linkSlowCase(iter); // vector length check
- linkSlowCase(iter); // empty value
-
- JITStubCall stubCall(this, cti_op_get_by_val);
- stubCall.addArgument(base, regT2);
- stubCall.addArgument(property, regT2);
- stubCall.call(dst);
-}
-
void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
{
unsigned op2 = currentInstruction[2].u.operand;
stubCall.call(currentInstruction[1].u.operand);
}
-#endif // USE(JSVALUE32_64)
+#endif // !USE(JSVALUE32_64)
+
+void JIT::emit_op_resolve_global_dynamic(Instruction* currentInstruction)
+{
+ int skip = currentInstruction[6].u.operand + m_codeBlock->needsFullScopeChain();
+
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT0);
+ while (skip--) {
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, object)), regT1);
+ addSlowCase(checkStructure(regT1, m_globalData->activationStructure.get()));
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(ScopeChainNode, next)), regT0);
+ }
+ emit_op_resolve_global(currentInstruction, true);
+}
+
+void JIT::emitSlow_op_resolve_global_dynamic(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ void* globalObject = currentInstruction[2].u.jsCell;
+ Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
+ int skip = currentInstruction[6].u.operand + m_codeBlock->needsFullScopeChain();
+ while (skip--)
+ linkSlowCase(iter);
+ JITStubCall resolveStubCall(this, cti_op_resolve);
+ resolveStubCall.addArgument(ImmPtr(ident));
+ resolveStubCall.call(dst);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_resolve_global_dynamic));
+
+ unsigned currentIndex = m_globalResolveInfoIndex++;
+
+ linkSlowCase(iter); // We managed to skip all the nodes in the scope chain, but the cache missed.
+ JITStubCall stubCall(this, cti_op_resolve_global);
+ stubCall.addArgument(ImmPtr(globalObject));
+ stubCall.addArgument(ImmPtr(ident));
+ stubCall.addArgument(Imm32(currentIndex));
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_new_regexp(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_new_regexp);
+ stubCall.addArgument(ImmPtr(m_codeBlock->regexp(currentInstruction[2].u.operand)));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+// For both JSValue32_64 and JSValue32
+#if ENABLE(JIT_OPTIMIZE_MOD)
+#if CPU(ARM_TRADITIONAL)
+void JIT::softModulo()
+{
+ push(regS0);
+ push(regS1);
+ push(regT1);
+ push(regT3);
+#if USE(JSVALUE32_64)
+ m_assembler.mov_r(regT3, regT2);
+ m_assembler.mov_r(regT2, regT0);
+#else
+ m_assembler.mov_r(regT3, m_assembler.asr(regT2, 1));
+ m_assembler.mov_r(regT2, m_assembler.asr(regT0, 1));
+#endif
+ m_assembler.mov_r(regT1, ARMAssembler::getOp2(0));
+
+ m_assembler.teq_r(regT3, ARMAssembler::getOp2(0));
+ m_assembler.rsb_r(regT3, regT3, ARMAssembler::getOp2(0), ARMAssembler::MI);
+ m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(1), ARMAssembler::MI);
+
+ m_assembler.teq_r(regT2, ARMAssembler::getOp2(0));
+ m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::MI);
+ m_assembler.eor_r(regT1, regT1, ARMAssembler::getOp2(2), ARMAssembler::MI);
+
+ Jump exitBranch = branch32(LessThan, regT2, regT3);
+
+ m_assembler.sub_r(regS1, regT3, ARMAssembler::getOp2(1));
+ m_assembler.tst_r(regS1, regT3);
+ m_assembler.and_r(regT2, regT2, regS1, ARMAssembler::EQ);
+ m_assembler.and_r(regT0, regS1, regT3);
+ Jump exitBranch2 = branchTest32(Zero, regT0);
+
+ m_assembler.clz_r(regS1, regT2);
+ m_assembler.clz_r(regS0, regT3);
+ m_assembler.sub_r(regS0, regS0, regS1);
+
+ m_assembler.rsbs_r(regS0, regS0, ARMAssembler::getOp2(31));
+
+ m_assembler.mov_r(regS0, m_assembler.lsl(regS0, 1), ARMAssembler::NE);
+
+ m_assembler.add_r(ARMRegisters::pc, ARMRegisters::pc, m_assembler.lsl(regS0, 2), ARMAssembler::NE);
+ m_assembler.mov_r(regT0, regT0);
+
+ for (int i = 31; i > 0; --i) {
+ m_assembler.cmp_r(regT2, m_assembler.lsl(regT3, i));
+ m_assembler.sub_r(regT2, regT2, m_assembler.lsl(regT3, i), ARMAssembler::CS);
+ }
+ m_assembler.cmp_r(regT2, regT3);
+ m_assembler.sub_r(regT2, regT2, regT3, ARMAssembler::CS);
+
+ exitBranch.link(this);
+ exitBranch2.link(this);
+
+ m_assembler.teq_r(regT1, ARMAssembler::getOp2(0));
+ m_assembler.rsb_r(regT2, regT2, ARMAssembler::getOp2(0), ARMAssembler::GT);
+
+#if USE(JSVALUE32_64)
+ m_assembler.mov_r(regT0, regT2);
+#else
+ m_assembler.mov_r(regT0, m_assembler.lsl(regT2, 1));
+ m_assembler.eor_r(regT0, regT0, ARMAssembler::getOp2(1));
+#endif
+ pop(regT3);
+ pop(regT1);
+ pop(regS1);
+ pop(regS0);
+ ret();
+}
+#else
+#error "JIT_OPTIMIZE_MOD not yet supported on this platform."
+#endif // CPU(ARM_TRADITIONAL)
+#endif
} // namespace JSC
#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2010 Patrick Gansterer <paroga@paroga.com>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "JIT.h"
+
+#if ENABLE(JIT) && USE(JSVALUE32_64)
+
+#include "JITInlineMethods.h"
+#include "JITStubCall.h"
+#include "JSArray.h"
+#include "JSCell.h"
+#include "JSFunction.h"
+#include "JSPropertyNameIterator.h"
+#include "LinkBuffer.h"
+
+namespace JSC {
+
+void JIT::privateCompileCTIMachineTrampolines(RefPtr<ExecutablePool>* executablePool, JSGlobalData* globalData, TrampolineStructure *trampolines)
+{
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ Label softModBegin = align();
+ softModulo();
+#endif
+#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+ // (1) This function provides fast property access for string length
+ Label stringLengthBegin = align();
+
+ // regT0 holds payload, regT1 holds tag
+
+ Jump string_failureCases1 = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+ Jump string_failureCases2 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
+
+ // Checks out okay! - get the length from the Ustring.
+ load32(Address(regT0, OBJECT_OFFSETOF(JSString, m_length)), regT2);
+
+ Jump string_failureCases3 = branch32(Above, regT2, Imm32(INT_MAX));
+ move(regT2, regT0);
+ move(Imm32(JSValue::Int32Tag), regT1);
+
+ ret();
+#endif
+
+ // (2) Trampolines for the slow cases of op_call / op_call_eval / op_construct.
+
+#if ENABLE(JIT_OPTIMIZE_CALL)
+ // VirtualCallLink Trampoline
+ // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
+ Label virtualCallLinkBegin = align();
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+
+ Jump isNativeFunc2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
+
+ Jump hasCodeBlock2 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
+ preserveReturnAddressAfterCall(regT3);
+ restoreArgumentReference();
+ Call callJSFunction2 = call();
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+ emitGetJITStubArg(2, regT1); // argCount
+ restoreReturnAddressBeforeReturn(regT3);
+ hasCodeBlock2.link(this);
+
+ // Check argCount matches callee arity.
+ Jump arityCheckOkay2 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
+ preserveReturnAddressAfterCall(regT3);
+ emitPutJITStubArg(regT3, 1); // return address
+ restoreArgumentReference();
+ Call callArityCheck2 = call();
+ move(regT1, callFrameRegister);
+ emitGetJITStubArg(2, regT1); // argCount
+ restoreReturnAddressBeforeReturn(regT3);
+ arityCheckOkay2.link(this);
+
+ isNativeFunc2.link(this);
+
+ compileOpCallInitializeCallFrame();
+
+ preserveReturnAddressAfterCall(regT3);
+ emitPutJITStubArg(regT3, 1); // return address
+ restoreArgumentReference();
+ Call callLazyLinkCall = call();
+ restoreReturnAddressBeforeReturn(regT3);
+ jump(regT0);
+#endif // ENABLE(JIT_OPTIMIZE_CALL)
+
+ // VirtualCall Trampoline
+ // regT0 holds callee, regT1 holds argCount. regT2 will hold the FunctionExecutable.
+ Label virtualCallBegin = align();
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+
+ Jump isNativeFunc3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
+
+ Jump hasCodeBlock3 = branch32(GreaterThan, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), Imm32(0));
+ preserveReturnAddressAfterCall(regT3);
+ restoreArgumentReference();
+ Call callJSFunction1 = call();
+ emitGetJITStubArg(2, regT1); // argCount
+ restoreReturnAddressBeforeReturn(regT3);
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+ hasCodeBlock3.link(this);
+
+ // Check argCount matches callee arity.
+ Jump arityCheckOkay3 = branch32(Equal, Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_numParameters)), regT1);
+ preserveReturnAddressAfterCall(regT3);
+ emitPutJITStubArg(regT3, 1); // return address
+ restoreArgumentReference();
+ Call callArityCheck1 = call();
+ move(regT1, callFrameRegister);
+ emitGetJITStubArg(2, regT1); // argCount
+ restoreReturnAddressBeforeReturn(regT3);
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSFunction, m_executable)), regT2);
+ arityCheckOkay3.link(this);
+
+ isNativeFunc3.link(this);
+
+ compileOpCallInitializeCallFrame();
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(FunctionExecutable, m_jitCode)), regT0);
+ jump(regT0);
+
+#if CPU(X86) || CPU(ARM_TRADITIONAL)
+ Label nativeCallThunk = align();
+ preserveReturnAddressAfterCall(regT0);
+ emitPutToCallFrameHeader(regT0, RegisterFile::ReturnPC); // Push return address
+
+ // Load caller frame's scope chain into this callframe so that whatever we call can
+ // get to its global data.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, regT1);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT1, regT1);
+ emitPutToCallFrameHeader(regT1, RegisterFile::ScopeChain);
+
+#if CPU(X86)
+ emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
+
+ /* We have two structs that we use to describe the stackframe we set up for our
+ * call to native code. NativeCallFrameStructure describes the how we set up the stack
+ * in advance of the call. NativeFunctionCalleeSignature describes the callframe
+ * as the native code expects it. We do this as we are using the fastcall calling
+ * convention which results in the callee popping its arguments off the stack, but
+ * not the rest of the callframe so we need a nice way to ensure we increment the
+ * stack pointer by the right amount after the call.
+ */
+
+#if COMPILER(MSVC) || OS(LINUX)
+#if COMPILER(MSVC)
+#pragma pack(push)
+#pragma pack(4)
+#endif // COMPILER(MSVC)
+ struct NativeCallFrameStructure {
+ // CallFrame* callFrame; // passed in EDX
+ JSObject* callee;
+ JSValue thisValue;
+ ArgList* argPointer;
+ ArgList args;
+ JSValue result;
+ };
+ struct NativeFunctionCalleeSignature {
+ JSObject* callee;
+ JSValue thisValue;
+ ArgList* argPointer;
+ };
+#if COMPILER(MSVC)
+#pragma pack(pop)
+#endif // COMPILER(MSVC)
+#else
+ struct NativeCallFrameStructure {
+ // CallFrame* callFrame; // passed in ECX
+ // JSObject* callee; // passed in EDX
+ JSValue thisValue;
+ ArgList* argPointer;
+ ArgList args;
+ };
+ struct NativeFunctionCalleeSignature {
+ JSValue thisValue;
+ ArgList* argPointer;
+ };
+#endif
+
+ const int NativeCallFrameSize = (sizeof(NativeCallFrameStructure) + 15) & ~15;
+ // Allocate system stack frame
+ subPtr(Imm32(NativeCallFrameSize), stackPointerRegister);
+
+ // Set up arguments
+ subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
+
+ // push argcount
+ storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_argCount)));
+
+ // Calculate the start of the callframe header, and store in regT1
+ addPtr(Imm32(-RegisterFile::CallFrameHeaderSize * (int)sizeof(Register)), callFrameRegister, regT1);
+
+ // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT0)
+ mul32(Imm32(sizeof(Register)), regT0, regT0);
+ subPtr(regT0, regT1);
+ storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, args) + OBJECT_OFFSETOF(ArgList, m_args)));
+
+ // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
+ addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, args)), stackPointerRegister, regT0);
+ storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, argPointer)));
+
+ // regT1 currently points to the first argument, regT1 - sizeof(Register) points to 'this'
+ loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
+ loadPtr(Address(regT1, -(int)sizeof(Register) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), regT3);
+ storePtr(regT2, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)));
+ storePtr(regT3, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, thisValue) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)));
+
+#if COMPILER(MSVC) || OS(LINUX)
+ // ArgList is passed by reference so is stackPointerRegister + 4 * sizeof(Register)
+ addPtr(Imm32(OBJECT_OFFSETOF(NativeCallFrameStructure, result)), stackPointerRegister, X86Registers::ecx);
+
+ // Plant callee
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::eax);
+ storePtr(X86Registers::eax, Address(stackPointerRegister, OBJECT_OFFSETOF(NativeCallFrameStructure, callee)));
+
+ // Plant callframe
+ move(callFrameRegister, X86Registers::edx);
+
+ call(Address(X86Registers::eax, OBJECT_OFFSETOF(JSFunction, m_data)));
+
+ // JSValue is a non-POD type, so eax points to it
+ emitLoad(0, regT1, regT0, X86Registers::eax);
+#else
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, X86Registers::edx); // callee
+ move(callFrameRegister, X86Registers::ecx); // callFrame
+ call(Address(X86Registers::edx, OBJECT_OFFSETOF(JSFunction, m_data)));
+#endif
+
+ // We've put a few temporaries on the stack in addition to the actual arguments
+ // so pull them off now
+ addPtr(Imm32(NativeCallFrameSize - sizeof(NativeFunctionCalleeSignature)), stackPointerRegister);
+
+#elif CPU(ARM_TRADITIONAL)
+ emitGetFromCallFrameHeader32(RegisterFile::ArgumentCount, regT0);
+
+ // Allocate stack space for our arglist
+ COMPILE_ASSERT((sizeof(ArgList) & 0x7) == 0 && sizeof(JSValue) == 8 && sizeof(Register) == 8, ArgList_should_by_8byte_aligned);
+ subPtr(Imm32(sizeof(ArgList)), stackPointerRegister);
+
+ // Set up arguments
+ subPtr(Imm32(1), regT0); // Don't include 'this' in argcount
+
+ // Push argcount
+ storePtr(regT0, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_argCount)));
+
+ // Calculate the start of the callframe header, and store in regT1
+ move(callFrameRegister, regT1);
+ sub32(Imm32(RegisterFile::CallFrameHeaderSize * (int32_t)sizeof(Register)), regT1);
+
+ // Calculate start of arguments as callframe header - sizeof(Register) * argcount (regT1)
+ mul32(Imm32(sizeof(Register)), regT0, regT0);
+ subPtr(regT0, regT1);
+
+ // push pointer to arguments
+ storePtr(regT1, Address(stackPointerRegister, OBJECT_OFFSETOF(ArgList, m_args)));
+
+ // Argument passing method:
+ // r0 - points to return value
+ // r1 - callFrame
+ // r2 - callee
+ // stack: this(JSValue) and a pointer to ArgList
+
+#if OS(WINCE)
+ // Setup arg4:
+ push(stackPointerRegister);
+
+ // Setup arg3:
+ // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
+ load32(Address(regT1, -(int32_t)sizeof(void*) * 2), ARMRegisters::r3);
+ push(ARMRegisters::r3);
+ load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
+ storePtr(regT3, Address(stackPointerRegister));
+
+ // Setup arg2:
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
+
+ // Setup arg1:
+ move(callFrameRegister, regT1);
+
+ // Setup arg0:
+ move(stackPointerRegister, regT0);
+
+ call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
+
+ load32(Address(stackPointerRegister, 0), regT0);
+ load32(Address(stackPointerRegister, 4), regT1);
+
+ addPtr(Imm32(sizeof(ArgList) + 8), stackPointerRegister);
+#else // OS(WINCE)
+ move(stackPointerRegister, regT3);
+ subPtr(Imm32(8), stackPointerRegister);
+ move(stackPointerRegister, regT0);
+ subPtr(Imm32(8 + 4 + 4 /* padding */), stackPointerRegister);
+
+ // Setup arg4:
+ storePtr(regT3, Address(stackPointerRegister, 8));
+
+ // Setup arg3:
+ // regT1 currently points to the first argument, regT1-sizeof(Register) points to 'this'
+ load32(Address(regT1, -(int32_t)sizeof(void*) * 2), regT3);
+ storePtr(regT3, Address(stackPointerRegister, 0));
+ load32(Address(regT1, -(int32_t)sizeof(void*)), regT3);
+ storePtr(regT3, Address(stackPointerRegister, 4));
+
+ // Setup arg2:
+ emitGetFromCallFrameHeaderPtr(RegisterFile::Callee, regT2);
+
+ // Setup arg1:
+ move(callFrameRegister, regT1);
+
+ call(Address(regT2, OBJECT_OFFSETOF(JSFunction, m_data)));
+
+ // Load return value
+ load32(Address(stackPointerRegister, 16), regT0);
+ load32(Address(stackPointerRegister, 20), regT1);
+
+ addPtr(Imm32(sizeof(ArgList) + 16 + 8), stackPointerRegister);
+#endif // OS(WINCE)
+
+#endif
+
+ // Check for an exception
+ move(ImmPtr(&globalData->exception), regT2);
+ Jump sawException = branch32(NotEqual, tagFor(0, regT2), Imm32(JSValue::EmptyValueTag));
+
+ // Grab the return address.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT3);
+
+ // Restore our caller's "r".
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
+
+ // Return.
+ restoreReturnAddressBeforeReturn(regT3);
+ ret();
+
+ // Handle an exception
+ sawException.link(this);
+ // Grab the return address.
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ReturnPC, regT1);
+ move(ImmPtr(&globalData->exceptionLocation), regT2);
+ storePtr(regT1, regT2);
+ move(ImmPtr(FunctionPtr(ctiVMThrowTrampoline).value()), regT2);
+ emitGetFromCallFrameHeaderPtr(RegisterFile::CallerFrame, callFrameRegister);
+ poke(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+ restoreReturnAddressBeforeReturn(regT2);
+ ret();
+
+#elif ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
+#error "JIT_OPTIMIZE_NATIVE_CALL not yet supported on this platform."
+#else
+ UNUSED_PARAM(globalData);
+ breakpoint();
+#endif
+
+#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+ Call string_failureCases1Call = makeTailRecursiveCall(string_failureCases1);
+ Call string_failureCases2Call = makeTailRecursiveCall(string_failureCases2);
+ Call string_failureCases3Call = makeTailRecursiveCall(string_failureCases3);
+#endif
+
+ // All trampolines constructed! copy the code, link up calls, and set the pointers on the Machine object.
+ LinkBuffer patchBuffer(this, m_globalData->executableAllocator.poolForSize(m_assembler.size()));
+
+#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+ patchBuffer.link(string_failureCases1Call, FunctionPtr(cti_op_get_by_id_string_fail));
+ patchBuffer.link(string_failureCases2Call, FunctionPtr(cti_op_get_by_id_string_fail));
+ patchBuffer.link(string_failureCases3Call, FunctionPtr(cti_op_get_by_id_string_fail));
+#endif
+ patchBuffer.link(callArityCheck1, FunctionPtr(cti_op_call_arityCheck));
+ patchBuffer.link(callJSFunction1, FunctionPtr(cti_op_call_JSFunction));
+#if ENABLE(JIT_OPTIMIZE_CALL)
+ patchBuffer.link(callArityCheck2, FunctionPtr(cti_op_call_arityCheck));
+ patchBuffer.link(callJSFunction2, FunctionPtr(cti_op_call_JSFunction));
+ patchBuffer.link(callLazyLinkCall, FunctionPtr(cti_vm_lazyLinkCall));
+#endif
+
+ CodeRef finalCode = patchBuffer.finalizeCode();
+ *executablePool = finalCode.m_executablePool;
+
+ trampolines->ctiVirtualCall = trampolineAt(finalCode, virtualCallBegin);
+#if ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
+ trampolines->ctiNativeCallThunk = adoptRef(new NativeExecutable(JITCode(JITCode::HostFunction(trampolineAt(finalCode, nativeCallThunk)))));
+#endif
+#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+ trampolines->ctiStringLengthTrampoline = trampolineAt(finalCode, stringLengthBegin);
+#else
+ UNUSED_PARAM(ctiStringLengthTrampoline);
+#endif
+#if ENABLE(JIT_OPTIMIZE_CALL)
+ trampolines->ctiVirtualCallLink = trampolineAt(finalCode, virtualCallLinkBegin);
+#else
+ UNUSED_PARAM(ctiVirtualCallLink);
+#endif
+#if ENABLE(JIT_OPTIMIZE_MOD)
+ trampolines->ctiSoftModulo = trampolineAt(finalCode, softModBegin);
+#endif
+}
+
+void JIT::emit_op_mov(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ if (m_codeBlock->isConstantRegisterIndex(src))
+ emitStore(dst, getConstantOperand(src));
+ else {
+ emitLoad(src, regT1, regT0);
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_mov), dst, regT1, regT0);
+ }
+}
+
+void JIT::emit_op_end(Instruction* currentInstruction)
+{
+ if (m_codeBlock->needsFullScopeChain())
+ JITStubCall(this, cti_op_end).call();
+ ASSERT(returnValueRegister != callFrameRegister);
+ emitLoad(currentInstruction[1].u.operand, regT1, regT0);
+ restoreReturnAddressBeforeReturn(Address(callFrameRegister, RegisterFile::ReturnPC * static_cast<int>(sizeof(Register))));
+ ret();
+}
+
+void JIT::emit_op_jmp(Instruction* currentInstruction)
+{
+ unsigned target = currentInstruction[1].u.operand;
+ addJump(jump(), target);
+}
+
+void JIT::emit_op_loop_if_lesseq(Instruction* currentInstruction)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ emitTimeoutCheck();
+
+ if (isOperandConstantImmediateInt(op1)) {
+ emitLoad(op2, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(GreaterThanOrEqual, regT0, Imm32(getConstantOperand(op1).asInt32())), target);
+ return;
+ }
+
+ if (isOperandConstantImmediateInt(op2)) {
+ emitLoad(op1, regT1, regT0);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(LessThanOrEqual, regT0, Imm32(getConstantOperand(op2).asInt32())), target);
+ return;
+ }
+
+ emitLoad2(op1, regT1, regT0, op2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag)));
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ addJump(branch32(LessThanOrEqual, regT0, regT2), target);
+}
+
+void JIT::emitSlow_op_loop_if_lesseq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned op1 = currentInstruction[1].u.operand;
+ unsigned op2 = currentInstruction[2].u.operand;
+ unsigned target = currentInstruction[3].u.operand;
+
+ if (!isOperandConstantImmediateInt(op1) && !isOperandConstantImmediateInt(op2))
+ linkSlowCase(iter); // int32 check
+ linkSlowCase(iter); // int32 check
+
+ JITStubCall stubCall(this, cti_op_loop_if_lesseq);
+ stubCall.addArgument(op1);
+ stubCall.addArgument(op2);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
+}
+
+void JIT::emit_op_new_object(Instruction* currentInstruction)
+{
+ JITStubCall(this, cti_op_new_object).call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_instanceof(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned value = currentInstruction[2].u.operand;
+ unsigned baseVal = currentInstruction[3].u.operand;
+ unsigned proto = currentInstruction[4].u.operand;
+
+ // Load the operands into registers.
+ // We use regT0 for baseVal since we will be done with this first, and we can then use it for the result.
+ emitLoadPayload(value, regT2);
+ emitLoadPayload(baseVal, regT0);
+ emitLoadPayload(proto, regT1);
+
+ // Check that value, baseVal, and proto are cells.
+ emitJumpSlowCaseIfNotJSCell(value);
+ emitJumpSlowCaseIfNotJSCell(baseVal);
+ emitJumpSlowCaseIfNotJSCell(proto);
+
+ // Check that baseVal 'ImplementsDefaultHasInstance'.
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
+ addSlowCase(branchTest8(Zero, Address(regT0, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(ImplementsDefaultHasInstance)));
+
+ // Optimistically load the result true, and start looping.
+ // Initially, regT1 still contains proto and regT2 still contains value.
+ // As we loop regT2 will be updated with its prototype, recursively walking the prototype chain.
+ move(Imm32(JSValue::TrueTag), regT0);
+ Label loop(this);
+
+ // Load the prototype of the cell in regT2. If this is equal to regT1 - WIN!
+ // Otherwise, check if we've hit null - if we have then drop out of the loop, if not go again.
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ load32(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
+ Jump isInstance = branchPtr(Equal, regT2, regT1);
+ branchTest32(NonZero, regT2).linkTo(loop, this);
+
+ // We get here either by dropping out of the loop, or if value was not an Object. Result is false.
+ move(Imm32(JSValue::FalseTag), regT0);
+
+ // isInstance jumps right down to here, to skip setting the result to false (it has already set true).
+ isInstance.link(this);
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::emitSlow_op_instanceof(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned value = currentInstruction[2].u.operand;
+ unsigned baseVal = currentInstruction[3].u.operand;
+ unsigned proto = currentInstruction[4].u.operand;
+
+ linkSlowCaseIfNotJSCell(iter, value);
+ linkSlowCaseIfNotJSCell(iter, baseVal);
+ linkSlowCaseIfNotJSCell(iter, proto);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_instanceof);
+ stubCall.addArgument(value);
+ stubCall.addArgument(baseVal);
+ stubCall.addArgument(proto);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_new_func(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_new_func);
+ stubCall.addArgument(ImmPtr(m_codeBlock->functionDecl(currentInstruction[2].u.operand)));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_get_global_var(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[2].u.jsCell);
+ ASSERT(globalObject->isGlobalObject());
+ int index = currentInstruction[3].u.operand;
+
+ loadPtr(&globalObject->d()->registers, regT2);
+
+ emitLoad(index, regT1, regT0, regT2);
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_global_var), dst, regT1, regT0);
+}
+
+void JIT::emit_op_put_global_var(Instruction* currentInstruction)
+{
+ JSGlobalObject* globalObject = static_cast<JSGlobalObject*>(currentInstruction[1].u.jsCell);
+ ASSERT(globalObject->isGlobalObject());
+ int index = currentInstruction[2].u.operand;
+ int value = currentInstruction[3].u.operand;
+
+ emitLoad(value, regT1, regT0);
+
+ loadPtr(&globalObject->d()->registers, regT2);
+ emitStore(index, regT1, regT0, regT2);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_put_global_var), value, regT1, regT0);
+}
+
+void JIT::emit_op_get_scoped_var(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int index = currentInstruction[2].u.operand;
+ int skip = currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain();
+
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
+ while (skip--)
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
+
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
+
+ emitLoad(index, regT1, regT0, regT2);
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_scoped_var), dst, regT1, regT0);
+}
+
+void JIT::emit_op_put_scoped_var(Instruction* currentInstruction)
+{
+ int index = currentInstruction[1].u.operand;
+ int skip = currentInstruction[2].u.operand + m_codeBlock->needsFullScopeChain();
+ int value = currentInstruction[3].u.operand;
+
+ emitLoad(value, regT1, regT0);
+
+ emitGetFromCallFrameHeaderPtr(RegisterFile::ScopeChain, regT2);
+ while (skip--)
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, next)), regT2);
+
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(ScopeChainNode, object)), regT2);
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject, d)), regT2);
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSVariableObject::JSVariableObjectData, registers)), regT2);
+
+ emitStore(index, regT1, regT0, regT2);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_put_scoped_var), value, regT1, regT0);
+}
+
+void JIT::emit_op_tear_off_activation(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_tear_off_activation);
+ stubCall.addArgument(currentInstruction[1].u.operand);
+ stubCall.call();
+}
+
+void JIT::emit_op_tear_off_arguments(Instruction*)
+{
+ JITStubCall(this, cti_op_tear_off_arguments).call();
+}
+
+void JIT::emit_op_new_array(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_new_array);
+ stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
+ stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_resolve(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_resolve);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_to_primitive(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+
+ Jump isImm = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+ addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
+ isImm.link(this);
+
+ if (dst != src)
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_to_primitive), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_to_primitive(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ int dst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_to_primitive);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_strcat(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_strcat);
+ stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
+ stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_resolve_base(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_resolve_base);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_resolve_skip(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_resolve_skip);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
+ stubCall.addArgument(Imm32(currentInstruction[3].u.operand + m_codeBlock->needsFullScopeChain()));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_resolve_global(Instruction* currentInstruction, bool dynamic)
+{
+ // FIXME: Optimize to use patching instead of so many memory accesses.
+
+ unsigned dst = currentInstruction[1].u.operand;
+ void* globalObject = currentInstruction[2].u.jsCell;
+
+ unsigned currentIndex = m_globalResolveInfoIndex++;
+ void* structureAddress = &(m_codeBlock->globalResolveInfo(currentIndex).structure);
+ void* offsetAddr = &(m_codeBlock->globalResolveInfo(currentIndex).offset);
+
+ // Verify structure.
+ move(ImmPtr(globalObject), regT0);
+ loadPtr(structureAddress, regT1);
+ addSlowCase(branchPtr(NotEqual, regT1, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure))));
+
+ // Load property.
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSGlobalObject, m_externalStorage)), regT2);
+ load32(offsetAddr, regT3);
+ load32(BaseIndex(regT2, regT3, TimesEight), regT0); // payload
+ load32(BaseIndex(regT2, regT3, TimesEight, 4), regT1); // tag
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + dynamic ? OPCODE_LENGTH(op_resolve_global_dynamic) : OPCODE_LENGTH(op_resolve_global), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_resolve_global(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ void* globalObject = currentInstruction[2].u.jsCell;
+ Identifier* ident = &m_codeBlock->identifier(currentInstruction[3].u.operand);
+
+ unsigned currentIndex = m_globalResolveInfoIndex++;
+
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_resolve_global);
+ stubCall.addArgument(ImmPtr(globalObject));
+ stubCall.addArgument(ImmPtr(ident));
+ stubCall.addArgument(Imm32(currentIndex));
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_not(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ emitLoadTag(src, regT0);
+
+ xor32(Imm32(JSValue::FalseTag), regT0);
+ addSlowCase(branchTest32(NonZero, regT0, Imm32(~1)));
+ xor32(Imm32(JSValue::TrueTag), regT0);
+
+ emitStoreBool(dst, regT0, (dst == src));
+}
+
+void JIT::emitSlow_op_not(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_not);
+ stubCall.addArgument(src);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_jfalse(Instruction* currentInstruction)
+{
+ unsigned cond = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ emitLoad(cond, regT1, regT0);
+
+ Jump isTrue = branch32(Equal, regT1, Imm32(JSValue::TrueTag));
+ addJump(branch32(Equal, regT1, Imm32(JSValue::FalseTag)), target);
+
+ Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
+ Jump isTrue2 = branch32(NotEqual, regT0, Imm32(0));
+ addJump(jump(), target);
+
+ if (supportsFloatingPoint()) {
+ isNotInteger.link(this);
+
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+
+ zeroDouble(fpRegT0);
+ emitLoadDouble(cond, fpRegT1);
+ addJump(branchDouble(DoubleEqualOrUnordered, fpRegT0, fpRegT1), target);
+ } else
+ addSlowCase(isNotInteger);
+
+ isTrue.link(this);
+ isTrue2.link(this);
+}
+
+void JIT::emitSlow_op_jfalse(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned cond = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_jtrue);
+ stubCall.addArgument(cond);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(Zero, regT0), target); // Inverted.
+}
+
+void JIT::emit_op_jtrue(Instruction* currentInstruction)
+{
+ unsigned cond = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ emitLoad(cond, regT1, regT0);
+
+ Jump isFalse = branch32(Equal, regT1, Imm32(JSValue::FalseTag));
+ addJump(branch32(Equal, regT1, Imm32(JSValue::TrueTag)), target);
+
+ Jump isNotInteger = branch32(NotEqual, regT1, Imm32(JSValue::Int32Tag));
+ Jump isFalse2 = branch32(Equal, regT0, Imm32(0));
+ addJump(jump(), target);
+
+ if (supportsFloatingPoint()) {
+ isNotInteger.link(this);
+
+ addSlowCase(branch32(Above, regT1, Imm32(JSValue::LowestTag)));
+
+ zeroDouble(fpRegT0);
+ emitLoadDouble(cond, fpRegT1);
+ addJump(branchDouble(DoubleNotEqual, fpRegT0, fpRegT1), target);
+ } else
+ addSlowCase(isNotInteger);
+
+ isFalse.link(this);
+ isFalse2.link(this);
+}
+
+void JIT::emitSlow_op_jtrue(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned cond = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ linkSlowCase(iter);
+ JITStubCall stubCall(this, cti_op_jtrue);
+ stubCall.addArgument(cond);
+ stubCall.call();
+ emitJumpSlowToHot(branchTest32(NonZero, regT0), target);
+}
+
+void JIT::emit_op_jeq_null(Instruction* currentInstruction)
+{
+ unsigned src = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+
+ Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+
+ // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ addJump(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
+
+ Jump wasNotImmediate = jump();
+
+ // Now handle the immediate cases - undefined & null
+ isImmediate.link(this);
+
+ set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
+ set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
+ or32(regT2, regT1);
+
+ addJump(branchTest32(NonZero, regT1), target);
+
+ wasNotImmediate.link(this);
+}
+
+void JIT::emit_op_jneq_null(Instruction* currentInstruction)
+{
+ unsigned src = currentInstruction[1].u.operand;
+ unsigned target = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+
+ Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+
+ // First, handle JSCell cases - check MasqueradesAsUndefined bit on the structure.
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ addJump(branchTest8(Zero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined)), target);
+
+ Jump wasNotImmediate = jump();
+
+ // Now handle the immediate cases - undefined & null
+ isImmediate.link(this);
+
+ set32(Equal, regT1, Imm32(JSValue::NullTag), regT2);
+ set32(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
+ or32(regT2, regT1);
+
+ addJump(branchTest32(Zero, regT1), target);
+
+ wasNotImmediate.link(this);
+}
+
+void JIT::emit_op_jneq_ptr(Instruction* currentInstruction)
+{
+ unsigned src = currentInstruction[1].u.operand;
+ JSCell* ptr = currentInstruction[2].u.jsCell;
+ unsigned target = currentInstruction[3].u.operand;
+
+ emitLoad(src, regT1, regT0);
+ addJump(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)), target);
+ addJump(branchPtr(NotEqual, regT0, ImmPtr(ptr)), target);
+}
+
+void JIT::emit_op_jsr(Instruction* currentInstruction)
+{
+ int retAddrDst = currentInstruction[1].u.operand;
+ int target = currentInstruction[2].u.operand;
+ DataLabelPtr storeLocation = storePtrWithPatch(ImmPtr(0), Address(callFrameRegister, sizeof(Register) * retAddrDst));
+ addJump(jump(), target);
+ m_jsrSites.append(JSRInfo(storeLocation, label()));
+}
+
+void JIT::emit_op_sret(Instruction* currentInstruction)
+{
+ jump(Address(callFrameRegister, sizeof(Register) * currentInstruction[1].u.operand));
+}
+
+void JIT::emit_op_eq(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src1 = currentInstruction[2].u.operand;
+ unsigned src2 = currentInstruction[3].u.operand;
+
+ emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, regT3));
+ addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
+ addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
+
+ set8(Equal, regT0, regT2, regT0);
+ or32(Imm32(JSValue::FalseTag), regT0);
+
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::emitSlow_op_eq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned op1 = currentInstruction[2].u.operand;
+ unsigned op2 = currentInstruction[3].u.operand;
+
+ JumpList storeResult;
+ JumpList genericCase;
+
+ genericCase.append(getSlowCase(iter)); // tags not equal
+
+ linkSlowCase(iter); // tags equal and JSCell
+ genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
+ genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
+
+ // String case.
+ JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
+ stubCallEqStrings.addArgument(regT0);
+ stubCallEqStrings.addArgument(regT2);
+ stubCallEqStrings.call();
+ storeResult.append(jump());
+
+ // Generic case.
+ genericCase.append(getSlowCase(iter)); // doubles
+ genericCase.link(this);
+ JITStubCall stubCallEq(this, cti_op_eq);
+ stubCallEq.addArgument(op1);
+ stubCallEq.addArgument(op2);
+ stubCallEq.call(regT0);
+
+ storeResult.link(this);
+ or32(Imm32(JSValue::FalseTag), regT0);
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::emit_op_neq(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src1 = currentInstruction[2].u.operand;
+ unsigned src2 = currentInstruction[3].u.operand;
+
+ emitLoad2(src1, regT1, regT0, src2, regT3, regT2);
+ addSlowCase(branch32(NotEqual, regT1, regT3));
+ addSlowCase(branch32(Equal, regT1, Imm32(JSValue::CellTag)));
+ addSlowCase(branch32(Below, regT1, Imm32(JSValue::LowestTag)));
+
+ set8(NotEqual, regT0, regT2, regT0);
+ or32(Imm32(JSValue::FalseTag), regT0);
+
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::emitSlow_op_neq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+
+ JumpList storeResult;
+ JumpList genericCase;
+
+ genericCase.append(getSlowCase(iter)); // tags not equal
+
+ linkSlowCase(iter); // tags equal and JSCell
+ genericCase.append(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr)));
+ genericCase.append(branchPtr(NotEqual, Address(regT2), ImmPtr(m_globalData->jsStringVPtr)));
+
+ // String case.
+ JITStubCall stubCallEqStrings(this, cti_op_eq_strings);
+ stubCallEqStrings.addArgument(regT0);
+ stubCallEqStrings.addArgument(regT2);
+ stubCallEqStrings.call(regT0);
+ storeResult.append(jump());
+
+ // Generic case.
+ genericCase.append(getSlowCase(iter)); // doubles
+ genericCase.link(this);
+ JITStubCall stubCallEq(this, cti_op_eq);
+ stubCallEq.addArgument(regT1, regT0);
+ stubCallEq.addArgument(regT3, regT2);
+ stubCallEq.call(regT0);
+
+ storeResult.link(this);
+ xor32(Imm32(0x1), regT0);
+ or32(Imm32(JSValue::FalseTag), regT0);
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::compileOpStrictEq(Instruction* currentInstruction, CompileOpStrictEqType type)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src1 = currentInstruction[2].u.operand;
+ unsigned src2 = currentInstruction[3].u.operand;
+
+ emitLoadTag(src1, regT0);
+ emitLoadTag(src2, regT1);
+
+ // Jump to a slow case if either operand is double, or if both operands are
+ // cells and/or Int32s.
+ move(regT0, regT2);
+ and32(regT1, regT2);
+ addSlowCase(branch32(Below, regT2, Imm32(JSValue::LowestTag)));
+ addSlowCase(branch32(AboveOrEqual, regT2, Imm32(JSValue::CellTag)));
+
+ if (type == OpStrictEq)
+ set8(Equal, regT0, regT1, regT0);
+ else
+ set8(NotEqual, regT0, regT1, regT0);
+
+ or32(Imm32(JSValue::FalseTag), regT0);
+
+ emitStoreBool(dst, regT0);
+}
+
+void JIT::emit_op_stricteq(Instruction* currentInstruction)
+{
+ compileOpStrictEq(currentInstruction, OpStrictEq);
+}
+
+void JIT::emitSlow_op_stricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src1 = currentInstruction[2].u.operand;
+ unsigned src2 = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_stricteq);
+ stubCall.addArgument(src1);
+ stubCall.addArgument(src2);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_nstricteq(Instruction* currentInstruction)
+{
+ compileOpStrictEq(currentInstruction, OpNStrictEq);
+}
+
+void JIT::emitSlow_op_nstricteq(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src1 = currentInstruction[2].u.operand;
+ unsigned src2 = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_nstricteq);
+ stubCall.addArgument(src1);
+ stubCall.addArgument(src2);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_eq_null(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+ Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
+ setTest8(NonZero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
+
+ Jump wasNotImmediate = jump();
+
+ isImmediate.link(this);
+
+ set8(Equal, regT1, Imm32(JSValue::NullTag), regT2);
+ set8(Equal, regT1, Imm32(JSValue::UndefinedTag), regT1);
+ or32(regT2, regT1);
+
+ wasNotImmediate.link(this);
+
+ or32(Imm32(JSValue::FalseTag), regT1);
+
+ emitStoreBool(dst, regT1);
+}
+
+void JIT::emit_op_neq_null(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+ Jump isImmediate = branch32(NotEqual, regT1, Imm32(JSValue::CellTag));
+
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT1);
+ setTest8(Zero, Address(regT1, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(MasqueradesAsUndefined), regT1);
+
+ Jump wasNotImmediate = jump();
+
+ isImmediate.link(this);
+
+ set8(NotEqual, regT1, Imm32(JSValue::NullTag), regT2);
+ set8(NotEqual, regT1, Imm32(JSValue::UndefinedTag), regT1);
+ and32(regT2, regT1);
+
+ wasNotImmediate.link(this);
+
+ or32(Imm32(JSValue::FalseTag), regT1);
+
+ emitStoreBool(dst, regT1);
+}
+
+void JIT::emit_op_resolve_with_base(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_resolve_with_base);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[3].u.operand)));
+ stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
+ stubCall.call(currentInstruction[2].u.operand);
+}
+
+void JIT::emit_op_new_func_exp(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_new_func_exp);
+ stubCall.addArgument(ImmPtr(m_codeBlock->functionExpr(currentInstruction[2].u.operand)));
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_throw(Instruction* currentInstruction)
+{
+ unsigned exception = currentInstruction[1].u.operand;
+ JITStubCall stubCall(this, cti_op_throw);
+ stubCall.addArgument(exception);
+ stubCall.call();
+
+#ifndef NDEBUG
+ // cti_op_throw always changes it's return address,
+ // this point in the code should never be reached.
+ breakpoint();
+#endif
+}
+
+void JIT::emit_op_get_pnames(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+ int i = currentInstruction[3].u.operand;
+ int size = currentInstruction[4].u.operand;
+ int breakTarget = currentInstruction[5].u.operand;
+
+ JumpList isNotObject;
+
+ emitLoad(base, regT1, regT0);
+ if (!m_codeBlock->isKnownNotImmediate(base))
+ isNotObject.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ if (base != m_codeBlock->thisRegister()) {
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ isNotObject.append(branch8(NotEqual, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_type)), Imm32(ObjectType)));
+ }
+
+ // We could inline the case where you have a valid cache, but
+ // this call doesn't seem to be hot.
+ Label isObject(this);
+ JITStubCall getPnamesStubCall(this, cti_op_get_pnames);
+ getPnamesStubCall.addArgument(regT0);
+ getPnamesStubCall.call(dst);
+ load32(Address(regT0, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStringsSize)), regT3);
+ store32(Imm32(0), addressFor(i));
+ store32(regT3, addressFor(size));
+ Jump end = jump();
+
+ isNotObject.link(this);
+ addJump(branch32(Equal, regT1, Imm32(JSValue::NullTag)), breakTarget);
+ addJump(branch32(Equal, regT1, Imm32(JSValue::UndefinedTag)), breakTarget);
+ JITStubCall toObjectStubCall(this, cti_to_object);
+ toObjectStubCall.addArgument(regT1, regT0);
+ toObjectStubCall.call(base);
+ jump().linkTo(isObject, this);
+
+ end.link(this);
+}
+
+void JIT::emit_op_next_pname(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+ int i = currentInstruction[3].u.operand;
+ int size = currentInstruction[4].u.operand;
+ int it = currentInstruction[5].u.operand;
+ int target = currentInstruction[6].u.operand;
+
+ JumpList callHasProperty;
+
+ Label begin(this);
+ load32(addressFor(i), regT0);
+ Jump end = branch32(Equal, regT0, addressFor(size));
+
+ // Grab key @ i
+ loadPtr(addressFor(it), regT1);
+ loadPtr(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_jsStrings)), regT2);
+ load32(BaseIndex(regT2, regT0, TimesEight), regT2);
+ store32(Imm32(JSValue::CellTag), tagFor(dst));
+ store32(regT2, payloadFor(dst));
+
+ // Increment i
+ add32(Imm32(1), regT0);
+ store32(regT0, addressFor(i));
+
+ // Verify that i is valid:
+ loadPtr(addressFor(base), regT0);
+
+ // Test base's structure
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ callHasProperty.append(branchPtr(NotEqual, regT2, Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure)))));
+
+ // Test base's prototype chain
+ loadPtr(Address(Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedPrototypeChain))), regT3);
+ loadPtr(Address(regT3, OBJECT_OFFSETOF(StructureChain, m_vector)), regT3);
+ addJump(branchTestPtr(Zero, Address(regT3)), target);
+
+ Label checkPrototype(this);
+ callHasProperty.append(branch32(Equal, Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.tag)), Imm32(JSValue::NullTag)));
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(Structure, m_prototype) + OBJECT_OFFSETOF(JSValue, u.asBits.payload)), regT2);
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ callHasProperty.append(branchPtr(NotEqual, regT2, Address(regT3)));
+ addPtr(Imm32(sizeof(Structure*)), regT3);
+ branchTestPtr(NonZero, Address(regT3)).linkTo(checkPrototype, this);
+
+ // Continue loop.
+ addJump(jump(), target);
+
+ // Slow case: Ask the object if i is valid.
+ callHasProperty.link(this);
+ loadPtr(addressFor(dst), regT1);
+ JITStubCall stubCall(this, cti_has_property);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(regT1);
+ stubCall.call();
+
+ // Test for valid key.
+ addJump(branchTest32(NonZero, regT0), target);
+ jump().linkTo(begin, this);
+
+ // End of loop.
+ end.link(this);
+}
+
+void JIT::emit_op_push_scope(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_push_scope);
+ stubCall.addArgument(currentInstruction[1].u.operand);
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_pop_scope(Instruction*)
+{
+ JITStubCall(this, cti_op_pop_scope).call();
+}
+
+void JIT::emit_op_to_jsnumber(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int src = currentInstruction[2].u.operand;
+
+ emitLoad(src, regT1, regT0);
+
+ Jump isInt32 = branch32(Equal, regT1, Imm32(JSValue::Int32Tag));
+ addSlowCase(branch32(AboveOrEqual, regT1, Imm32(JSValue::EmptyValueTag)));
+ isInt32.link(this);
+
+ if (src != dst)
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_to_jsnumber), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_to_jsnumber(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ int dst = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_to_jsnumber);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_push_new_scope(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_push_new_scope);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(currentInstruction[2].u.operand)));
+ stubCall.addArgument(currentInstruction[3].u.operand);
+ stubCall.call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_catch(Instruction* currentInstruction)
+{
+ unsigned exception = currentInstruction[1].u.operand;
+
+ // This opcode only executes after a return from cti_op_throw.
+
+ // cti_op_throw may have taken us to a call frame further up the stack; reload
+ // the call frame pointer to adjust.
+ peek(callFrameRegister, OBJECT_OFFSETOF(struct JITStackFrame, callFrame) / sizeof(void*));
+
+ // Now store the exception returned by cti_op_throw.
+ emitStore(exception, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_catch), exception, regT1, regT0);
+}
+
+void JIT::emit_op_jmp_scopes(Instruction* currentInstruction)
+{
+ JITStubCall stubCall(this, cti_op_jmp_scopes);
+ stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
+ stubCall.call();
+ addJump(jump(), currentInstruction[2].u.operand);
+}
+
+void JIT::emit_op_switch_imm(Instruction* currentInstruction)
+{
+ unsigned tableIndex = currentInstruction[1].u.operand;
+ unsigned defaultOffset = currentInstruction[2].u.operand;
+ unsigned scrutinee = currentInstruction[3].u.operand;
+
+ // create jump table for switch destinations, track this switch statement.
+ SimpleJumpTable* jumpTable = &m_codeBlock->immediateSwitchJumpTable(tableIndex);
+ m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Immediate));
+ jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
+
+ JITStubCall stubCall(this, cti_op_switch_imm);
+ stubCall.addArgument(scrutinee);
+ stubCall.addArgument(Imm32(tableIndex));
+ stubCall.call();
+ jump(regT0);
+}
+
+void JIT::emit_op_switch_char(Instruction* currentInstruction)
+{
+ unsigned tableIndex = currentInstruction[1].u.operand;
+ unsigned defaultOffset = currentInstruction[2].u.operand;
+ unsigned scrutinee = currentInstruction[3].u.operand;
+
+ // create jump table for switch destinations, track this switch statement.
+ SimpleJumpTable* jumpTable = &m_codeBlock->characterSwitchJumpTable(tableIndex);
+ m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset, SwitchRecord::Character));
+ jumpTable->ctiOffsets.grow(jumpTable->branchOffsets.size());
+
+ JITStubCall stubCall(this, cti_op_switch_char);
+ stubCall.addArgument(scrutinee);
+ stubCall.addArgument(Imm32(tableIndex));
+ stubCall.call();
+ jump(regT0);
+}
+
+void JIT::emit_op_switch_string(Instruction* currentInstruction)
+{
+ unsigned tableIndex = currentInstruction[1].u.operand;
+ unsigned defaultOffset = currentInstruction[2].u.operand;
+ unsigned scrutinee = currentInstruction[3].u.operand;
+
+ // create jump table for switch destinations, track this switch statement.
+ StringJumpTable* jumpTable = &m_codeBlock->stringSwitchJumpTable(tableIndex);
+ m_switches.append(SwitchRecord(jumpTable, m_bytecodeIndex, defaultOffset));
+
+ JITStubCall stubCall(this, cti_op_switch_string);
+ stubCall.addArgument(scrutinee);
+ stubCall.addArgument(Imm32(tableIndex));
+ stubCall.call();
+ jump(regT0);
+}
+
+void JIT::emit_op_new_error(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned type = currentInstruction[2].u.operand;
+ unsigned message = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_new_error);
+ stubCall.addArgument(Imm32(type));
+ stubCall.addArgument(m_codeBlock->getConstant(message));
+ stubCall.addArgument(Imm32(m_bytecodeIndex));
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_debug(Instruction* currentInstruction)
+{
+#if ENABLE(DEBUG_WITH_BREAKPOINT)
+ UNUSED_PARAM(currentInstruction);
+ breakpoint();
+#else
+ JITStubCall stubCall(this, cti_op_debug);
+ stubCall.addArgument(Imm32(currentInstruction[1].u.operand));
+ stubCall.addArgument(Imm32(currentInstruction[2].u.operand));
+ stubCall.addArgument(Imm32(currentInstruction[3].u.operand));
+ stubCall.call();
+#endif
+}
+
+
+void JIT::emit_op_enter(Instruction*)
+{
+ // Even though JIT code doesn't use them, we initialize our constant
+ // registers to zap stale pointers, to avoid unnecessarily prolonging
+ // object lifetime and increasing GC pressure.
+ for (int i = 0; i < m_codeBlock->m_numVars; ++i)
+ emitStore(i, jsUndefined());
+}
+
+void JIT::emit_op_enter_with_activation(Instruction* currentInstruction)
+{
+ emit_op_enter(currentInstruction);
+
+ JITStubCall(this, cti_op_push_activation).call(currentInstruction[1].u.operand);
+}
+
+void JIT::emit_op_create_arguments(Instruction*)
+{
+ Jump argsCreated = branch32(NotEqual, tagFor(RegisterFile::ArgumentsRegister, callFrameRegister), Imm32(JSValue::EmptyValueTag));
+
+ // If we get here the arguments pointer is a null cell - i.e. arguments need lazy creation.
+ if (m_codeBlock->m_numParameters == 1)
+ JITStubCall(this, cti_op_create_arguments_no_params).call();
+ else
+ JITStubCall(this, cti_op_create_arguments).call();
+
+ argsCreated.link(this);
+}
+
+void JIT::emit_op_init_arguments(Instruction*)
+{
+ emitStore(RegisterFile::ArgumentsRegister, JSValue(), callFrameRegister);
+}
+
+void JIT::emit_op_convert_this(Instruction* currentInstruction)
+{
+ unsigned thisRegister = currentInstruction[1].u.operand;
+
+ emitLoad(thisRegister, regT1, regT0);
+
+ addSlowCase(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), regT2);
+ addSlowCase(branchTest8(NonZero, Address(regT2, OBJECT_OFFSETOF(Structure, m_typeInfo.m_flags)), Imm32(NeedsThisConversion)));
+
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_convert_this), thisRegister, regT1, regT0);
+}
+
+void JIT::emitSlow_op_convert_this(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned thisRegister = currentInstruction[1].u.operand;
+
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_convert_this);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.call(thisRegister);
+}
+
+void JIT::emit_op_profile_will_call(Instruction* currentInstruction)
+{
+ peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
+ Jump noProfiler = branchTestPtr(Zero, Address(regT2));
+
+ JITStubCall stubCall(this, cti_op_profile_will_call);
+ stubCall.addArgument(currentInstruction[1].u.operand);
+ stubCall.call();
+ noProfiler.link(this);
+}
+
+void JIT::emit_op_profile_did_call(Instruction* currentInstruction)
+{
+ peek(regT2, OBJECT_OFFSETOF(JITStackFrame, enabledProfilerReference) / sizeof(void*));
+ Jump noProfiler = branchTestPtr(Zero, Address(regT2));
+
+ JITStubCall stubCall(this, cti_op_profile_did_call);
+ stubCall.addArgument(currentInstruction[1].u.operand);
+ stubCall.call();
+ noProfiler.link(this);
+}
+
+}
+
+#endif // ENABLE(JIT) && USE(JSVALUE32_64)
*/
#include "config.h"
-#include "JIT.h"
-
-#if ENABLE(JIT)
-
-#include "CodeBlock.h"
-#include "JITInlineMethods.h"
-#include "JITStubCall.h"
-#include "JSArray.h"
-#include "JSFunction.h"
-#include "JSPropertyNameIterator.h"
-#include "Interpreter.h"
-#include "LinkBuffer.h"
-#include "RepatchBuffer.h"
-#include "ResultType.h"
-#include "SamplingTool.h"
-
-#ifndef NDEBUG
-#include <stdio.h>
-#endif
-
-using namespace std;
-
-namespace JSC {
-
-#if USE(JSVALUE32_64)
-
-void JIT::emit_op_put_by_index(Instruction* currentInstruction)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned value = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_put_by_index);
- stubCall.addArgument(base);
- stubCall.addArgument(Imm32(property));
- stubCall.addArgument(value);
- stubCall.call();
-}
-
-void JIT::emit_op_put_getter(Instruction* currentInstruction)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned function = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_put_getter);
- stubCall.addArgument(base);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
- stubCall.addArgument(function);
- stubCall.call();
-}
-
-void JIT::emit_op_put_setter(Instruction* currentInstruction)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned function = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_put_setter);
- stubCall.addArgument(base);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
- stubCall.addArgument(function);
- stubCall.call();
-}
-
-void JIT::emit_op_del_by_id(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_del_by_id);
- stubCall.addArgument(base);
- stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
- stubCall.call(dst);
-}
-
-
-#if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-
-/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
-
-// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
-void JIT::emit_op_method_check(Instruction*) {}
-void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
-#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
-#error "JIT_OPTIMIZE_METHOD_CALLS requires JIT_OPTIMIZE_PROPERTY_ACCESS"
-#endif
-
-void JIT::emit_op_get_by_val(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_get_by_val);
- stubCall.addArgument(base);
- stubCall.addArgument(property);
- stubCall.call(dst);
-}
-
-void JIT::emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
-{
- ASSERT_NOT_REACHED();
-}
-
-void JIT::emit_op_put_by_val(Instruction* currentInstruction)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned value = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_put_by_val);
- stubCall.addArgument(base);
- stubCall.addArgument(property);
- stubCall.addArgument(value);
- stubCall.call();
-}
-
-void JIT::emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
-{
- ASSERT_NOT_REACHED();
-}
-
-void JIT::emit_op_get_by_id(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int ident = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_get_by_id_generic);
- stubCall.addArgument(base);
- stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
- stubCall.call(dst);
-
- m_propertyAccessInstructionIndex++;
-}
-
-void JIT::emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
-{
- m_propertyAccessInstructionIndex++;
- ASSERT_NOT_REACHED();
-}
-
-void JIT::emit_op_put_by_id(Instruction* currentInstruction)
-{
- int base = currentInstruction[1].u.operand;
- int ident = currentInstruction[2].u.operand;
- int value = currentInstruction[3].u.operand;
-
- JITStubCall stubCall(this, cti_op_put_by_id_generic);
- stubCall.addArgument(base);
- stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
- stubCall.addArgument(value);
- stubCall.call();
-
- m_propertyAccessInstructionIndex++;
-}
-
-void JIT::emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
-{
- m_propertyAccessInstructionIndex++;
- ASSERT_NOT_REACHED();
-}
-
-#else // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-
-/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
-
-#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
-
-void JIT::emit_op_method_check(Instruction* currentInstruction)
-{
- // Assert that the following instruction is a get_by_id.
- ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
-
- currentInstruction += OPCODE_LENGTH(op_method_check);
-
- // Do the method check - check the object & its prototype's structure inline (this is the common case).
- m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_propertyAccessInstructionIndex));
- MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
-
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
-
- emitLoad(base, regT1, regT0);
- emitJumpSlowCaseIfNotJSCell(base, regT1);
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), info.structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
- DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(ImmPtr(0), regT2);
- Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), protoStructureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
-
- // This will be relinked to load the function without doing a load.
- DataLabelPtr putFunction = moveWithPatch(ImmPtr(0), regT0);
-
- END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
-
- move(Imm32(JSValue::CellTag), regT1);
- Jump match = jump();
-
- ASSERT(differenceBetween(info.structureToCompare, protoObj) == patchOffsetMethodCheckProtoObj);
- ASSERT(differenceBetween(info.structureToCompare, protoStructureToCompare) == patchOffsetMethodCheckProtoStruct);
- ASSERT(differenceBetween(info.structureToCompare, putFunction) == patchOffsetMethodCheckPutFunction);
-
- // Link the failure cases here.
- structureCheck.link(this);
- protoStructureCheck.link(this);
-
- // Do a regular(ish) get_by_id (the slow case will be link to
- // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
- compileGetByIdHotPath();
-
- match.link(this);
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_method_check), dst, regT1, regT0);
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
-}
-
-void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- currentInstruction += OPCODE_LENGTH(op_method_check);
-
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int ident = currentInstruction[3].u.operand;
-
- compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
-
- // We've already generated the following get_by_id, so make sure it's skipped over.
- m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
-}
-
-#else //!ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
-
-// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
-void JIT::emit_op_method_check(Instruction*) {}
-void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
-
-#endif
-
-void JIT::emit_op_get_by_val(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
-
- emitLoad2(base, regT1, regT0, property, regT3, regT2);
-
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- emitJumpSlowCaseIfNotJSCell(base, regT1);
- addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
- addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_vectorLength))));
-
- load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), regT1); // tag
- load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); // payload
- addSlowCase(branch32(Equal, regT1, Imm32(JSValue::EmptyValueTag)));
-
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
-}
-
-void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
-
- linkSlowCase(iter); // property int32 check
- linkSlowCaseIfNotJSCell(iter, base); // base cell check
- linkSlowCase(iter); // base array check
- linkSlowCase(iter); // vector length check
- linkSlowCase(iter); // empty value
-
- JITStubCall stubCall(this, cti_op_get_by_val);
- stubCall.addArgument(base);
- stubCall.addArgument(property);
- stubCall.call(dst);
-}
-
-void JIT::emit_op_put_by_val(Instruction* currentInstruction)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned value = currentInstruction[3].u.operand;
-
- emitLoad2(base, regT1, regT0, property, regT3, regT2);
-
- addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
- emitJumpSlowCaseIfNotJSCell(base, regT1);
- addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
- addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_vectorLength))));
-
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
-
- Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), Imm32(JSValue::EmptyValueTag));
-
- Label storeResult(this);
- emitLoad(value, regT1, regT0);
- store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); // payload
- store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4)); // tag
- Jump end = jump();
-
- empty.link(this);
- add32(Imm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
- branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
-
- add32(Imm32(1), regT2, regT0);
- store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
- jump().linkTo(storeResult, this);
-
- end.link(this);
-}
-
-void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned base = currentInstruction[1].u.operand;
- unsigned property = currentInstruction[2].u.operand;
- unsigned value = currentInstruction[3].u.operand;
-
- linkSlowCase(iter); // property int32 check
- linkSlowCaseIfNotJSCell(iter, base); // base cell check
- linkSlowCase(iter); // base not array check
- linkSlowCase(iter); // in vector check
-
- JITStubCall stubPutByValCall(this, cti_op_put_by_val);
- stubPutByValCall.addArgument(base);
- stubPutByValCall.addArgument(property);
- stubPutByValCall.addArgument(value);
- stubPutByValCall.call();
-}
-
-void JIT::emit_op_get_by_id(Instruction* currentInstruction)
-{
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
-
- emitLoad(base, regT1, regT0);
- emitJumpSlowCaseIfNotJSCell(base, regT1);
- compileGetByIdHotPath();
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
-}
-
-void JIT::compileGetByIdHotPath()
-{
- // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
- // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
- // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
- // to jump back to if one of these trampolies finds a match.
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
-
- Label hotPathBegin(this);
- m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
- m_propertyAccessInstructionIndex++;
-
- DataLabelPtr structureToCompare;
- Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
- addSlowCase(structureCheck);
- ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
- ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
-
- Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT2);
- Label externalLoadComplete(this);
- ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetGetByIdExternalLoad);
- ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthGetByIdExternalLoad);
-
- DataLabel32 displacementLabel1 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
- ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetGetByIdPropertyMapOffset1);
- DataLabel32 displacementLabel2 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
- ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetGetByIdPropertyMapOffset2);
-
- Label putResult(this);
- ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
-
- END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
-}
-
-void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- int dst = currentInstruction[1].u.operand;
- int base = currentInstruction[2].u.operand;
- int ident = currentInstruction[3].u.operand;
-
- compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
-}
-
-void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
-{
- // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
- // so that we only need track one pointer into the slow case code - we track a pointer to the location
- // of the call (which we can use to look up the patch information), but should a array-length or
- // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
- // the distance from the call to the head of the slow case.
- linkSlowCaseIfNotJSCell(iter, base);
- linkSlowCase(iter);
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
-
-#ifndef NDEBUG
- Label coldPathBegin(this);
-#endif
- JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
- stubCall.addArgument(regT1, regT0);
- stubCall.addArgument(ImmPtr(ident));
- Call call = stubCall.call(dst);
-
- END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
-
- ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
-
- // Track the location of the call; this will be used to recover patch information.
- m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
- m_propertyAccessInstructionIndex++;
-}
-
-void JIT::emit_op_put_by_id(Instruction* currentInstruction)
-{
- // In order to be able to patch both the Structure, and the object offset, we store one pointer,
- // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
- // such that the Structure & offset are always at the same distance from this.
-
- int base = currentInstruction[1].u.operand;
- int value = currentInstruction[3].u.operand;
-
- emitLoad2(base, regT1, regT0, value, regT3, regT2);
-
- emitJumpSlowCaseIfNotJSCell(base, regT1);
-
- BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
-
- Label hotPathBegin(this);
- m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
- m_propertyAccessInstructionIndex++;
-
- // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
- DataLabelPtr structureToCompare;
- addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
- ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
-
- // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
- Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT0);
- Label externalLoadComplete(this);
- ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetPutByIdExternalLoad);
- ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthPutByIdExternalLoad);
-
- DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT0, patchGetByIdDefaultOffset)); // payload
- DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT0, patchGetByIdDefaultOffset)); // tag
-
- END_UNINTERRUPTED_SEQUENCE(sequencePutById);
-
- ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetPutByIdPropertyMapOffset1);
- ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetPutByIdPropertyMapOffset2);
-}
-
-void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- int base = currentInstruction[1].u.operand;
- int ident = currentInstruction[2].u.operand;
-
- linkSlowCaseIfNotJSCell(iter, base);
- linkSlowCase(iter);
-
- JITStubCall stubCall(this, cti_op_put_by_id);
- stubCall.addArgument(regT1, regT0);
- stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
- stubCall.addArgument(regT3, regT2);
- Call call = stubCall.call();
-
- // Track the location of the call; this will be used to recover patch information.
- m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
- m_propertyAccessInstructionIndex++;
-}
-
-// Compile a store into an object's property storage. May overwrite base.
-void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset)
-{
- int offset = cachedOffset;
- if (structure->isUsingInlineStorage())
- offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) / sizeof(Register);
- else
- loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
- emitStore(offset, valueTag, valuePayload, base);
-}
-
-// Compile a load from an object's property storage. May overwrite base.
-void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset)
-{
- int offset = cachedOffset;
- if (structure->isUsingInlineStorage())
- offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) / sizeof(Register);
- else
- loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
- emitLoad(offset, resultTag, resultPayload, base);
-}
-
-void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
-{
- if (base->isUsingInlineStorage()) {
- load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]), resultPayload);
- load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + 4, resultTag);
- return;
- }
-
- size_t offset = cachedOffset * sizeof(JSValue);
-
- PropertyStorage* protoPropertyStorage = &base->m_externalStorage;
- loadPtr(static_cast<void*>(protoPropertyStorage), temp);
- load32(Address(temp, offset), resultPayload);
- load32(Address(temp, offset + 4), resultTag);
-}
-
-void JIT::testPrototype(Structure* structure, JumpList& failureCases)
-{
- if (structure->m_prototype.isNull())
- return;
-
- failureCases.append(branchPtr(NotEqual, AbsoluteAddress(&asCell(structure->m_prototype)->m_structure), ImmPtr(asCell(structure->m_prototype)->m_structure)));
-}
-
-void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
-{
- // It is assumed that regT0 contains the basePayload and regT1 contains the baseTag. The value can be found on the stack.
-
- JumpList failureCases;
- failureCases.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
- failureCases.append(branchPtr(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(oldStructure)));
- testPrototype(oldStructure, failureCases);
-
- // Verify that nothing in the prototype chain has a setter for this property.
- for (RefPtr<Structure>* it = chain->head(); *it; ++it)
- testPrototype(it->get(), failureCases);
-
- // Reallocate property storage if needed.
- Call callTarget;
- bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
- if (willNeedStorageRealloc) {
- // This trampoline was called to like a JIT stub; before we can can call again we need to
- // remove the return address from the stack, to prevent the stack from becoming misaligned.
- preserveReturnAddressAfterCall(regT3);
-
- JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
- stubCall.skipArgument(); // base
- stubCall.skipArgument(); // ident
- stubCall.skipArgument(); // value
- stubCall.addArgument(Imm32(oldStructure->propertyStorageCapacity()));
- stubCall.addArgument(Imm32(newStructure->propertyStorageCapacity()));
- stubCall.call(regT0);
-
- restoreReturnAddressBeforeReturn(regT3);
- }
-
- sub32(Imm32(1), AbsoluteAddress(oldStructure->addressOfCount()));
- add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
- storePtr(ImmPtr(newStructure), Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)));
-
- load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*)), regT3);
- load32(Address(stackPointerRegister, offsetof(struct JITStackFrame, args[2]) + sizeof(void*) + 4), regT2);
-
- // Write the value
- compilePutDirectOffset(regT0, regT2, regT3, newStructure, cachedOffset);
-
- ret();
-
- ASSERT(!failureCases.empty());
- failureCases.link(this);
- restoreArgumentReferenceForTrampoline();
- Call failureCall = tailRecursiveCall();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- patchBuffer.link(failureCall, FunctionPtr(cti_op_put_by_id_fail));
-
- if (willNeedStorageRealloc) {
- ASSERT(m_calls.size() == 1);
- patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
- }
-
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
- stubInfo->stubRoutine = entryLabel;
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
-}
-
-void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
-{
- RepatchBuffer repatchBuffer(codeBlock);
-
- // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
- // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
-
- int offset = sizeof(JSValue) * cachedOffset;
-
- // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
- // and makes the subsequent load's offset automatically correct
- if (structure->isUsingInlineStorage())
- repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetGetByIdExternalLoad));
-
- // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset1), offset); // payload
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + 4); // tag
-}
-
-void JIT::patchMethodCallProto(CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
-{
- RepatchBuffer repatchBuffer(codeBlock);
-
- ASSERT(!methodCallLinkInfo.cachedStructure);
- methodCallLinkInfo.cachedStructure = structure;
- structure->ref();
-
- Structure* prototypeStructure = proto->structure();
- ASSERT(!methodCallLinkInfo.cachedPrototypeStructure);
- methodCallLinkInfo.cachedPrototypeStructure = prototypeStructure;
- prototypeStructure->ref();
-
- repatchBuffer.repatch(methodCallLinkInfo.structureLabel, structure);
- repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), proto);
- repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), prototypeStructure);
- repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), callee);
-
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id));
-}
-
-void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
-{
- RepatchBuffer repatchBuffer(codeBlock);
-
- // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
- // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_put_by_id_generic));
-
- int offset = sizeof(JSValue) * cachedOffset;
-
- // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
- // and makes the subsequent load's offset automatically correct
- if (structure->isUsingInlineStorage())
- repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetPutByIdExternalLoad));
-
- // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset); // payload
- repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + 4); // tag
-}
-
-void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
-{
- StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
-
- // regT0 holds a JSCell*
-
- // Check for array
- Jump failureCases1 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr));
-
- // Checks out okay! - get the length from the storage
- loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT2);
- load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
-
- Jump failureCases2 = branch32(Above, regT2, Imm32(INT_MAX));
- move(regT2, regT0);
- move(Imm32(JSValue::Int32Tag), regT1);
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
- patchBuffer.link(failureCases1, slowCaseBegin);
- patchBuffer.link(failureCases2, slowCaseBegin);
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- // Track the stub we have created so that it will be deleted later.
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
- stubInfo->stubRoutine = entryLabel;
-
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-
- // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
-}
-
-void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
-{
- // regT0 holds a JSCell*
-
- // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
- // referencing the prototype object - let's speculatively load it's table nice and early!)
- JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
-
- Jump failureCases1 = checkStructure(regT0, structure);
-
- // Check the prototype object's Structure had not changed.
- Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if CPU(X86_64)
- move(ImmPtr(prototypeStructure), regT3);
- Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
-#else
- Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
-#endif
-
- // Checks out okay! - getDirectOffset
- compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
-
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
- patchBuffer.link(failureCases1, slowCaseBegin);
- patchBuffer.link(failureCases2, slowCaseBegin);
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- // Track the stub we have created so that it will be deleted later.
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
- stubInfo->stubRoutine = entryLabel;
-
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-
- // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
-}
-
-
-void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
-{
- // regT0 holds a JSCell*
-
- Jump failureCase = checkStructure(regT0, structure);
- compileGetDirectOffset(regT0, regT1, regT0, structure, cachedOffset);
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- CodeLocationLabel lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
- if (!lastProtoBegin)
- lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
-
- patchBuffer.link(failureCase, lastProtoBegin);
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
- structure->ref();
- polymorphicStructures->list[currentIndex].set(entryLabel, structure);
+#if !USE(JSVALUE32_64)
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-}
+#include "JIT.h"
-void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
-{
- // regT0 holds a JSCell*
-
- // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
- // referencing the prototype object - let's speculatively load it's table nice and early!)
- JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
+#if ENABLE(JIT)
- // Check eax is an object of the right Structure.
- Jump failureCases1 = checkStructure(regT0, structure);
+#include "CodeBlock.h"
+#include "GetterSetter.h"
+#include "JITInlineMethods.h"
+#include "JITStubCall.h"
+#include "JSArray.h"
+#include "JSFunction.h"
+#include "JSPropertyNameIterator.h"
+#include "Interpreter.h"
+#include "LinkBuffer.h"
+#include "RepatchBuffer.h"
+#include "ResultType.h"
+#include "SamplingTool.h"
- // Check the prototype object's Structure had not changed.
- Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if CPU(X86_64)
- move(ImmPtr(prototypeStructure), regT3);
- Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
-#else
- Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
+#ifndef NDEBUG
+#include <stdio.h>
#endif
- compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
-
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
- patchBuffer.link(failureCases1, lastProtoBegin);
- patchBuffer.link(failureCases2, lastProtoBegin);
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
-
- structure->ref();
- prototypeStructure->ref();
- prototypeStructures->list[currentIndex].set(entryLabel, structure, prototypeStructure);
+using namespace std;
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-}
+namespace JSC {
-void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
+PassRefPtr<NativeExecutable> JIT::stringGetByValStubGenerator(JSGlobalData* globalData, ExecutablePool* pool)
{
- // regT0 holds a JSCell*
-
- ASSERT(count);
-
- JumpList bucketsOfFail;
-
- // Check eax is an object of the right Structure.
- bucketsOfFail.append(checkStructure(regT0, structure));
-
- Structure* currStructure = structure;
- RefPtr<Structure>* chainEntries = chain->head();
- JSObject* protoObject = 0;
- for (unsigned i = 0; i < count; ++i) {
- protoObject = asObject(currStructure->prototypeForLookup(callFrame));
- currStructure = chainEntries[i].get();
-
- // Check the prototype object's Structure had not changed.
- Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if CPU(X86_64)
- move(ImmPtr(currStructure), regT3);
- bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
+ JSInterfaceJIT jit;
+ JumpList failures;
+ failures.append(jit.branchPtr(NotEqual, Address(regT0), ImmPtr(globalData->jsStringVPtr)));
+ failures.append(jit.branchTest32(NonZero, Address(regT0, OBJECT_OFFSETOF(JSString, m_fiberCount))));
+#if USE(JSVALUE64)
+ jit.zeroExtend32ToPtr(regT1, regT1);
#else
- bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
+ jit.emitFastArithImmToInt(regT1);
#endif
- }
- ASSERT(protoObject);
-
- compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
-
- patchBuffer.link(bucketsOfFail, lastProtoBegin);
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
-
- // Track the stub we have created so that it will be deleted later.
- structure->ref();
- chain->ref();
- prototypeStructures->list[currentIndex].set(entryLabel, structure, chain);
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-}
-
-void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
-{
- // regT0 holds a JSCell*
+ // Load string length to regT1, and start the process of loading the data pointer into regT0
+ jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT2);
+ jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
+ jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
- ASSERT(count);
+ // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
+ failures.append(jit.branch32(AboveOrEqual, regT1, regT2));
- JumpList bucketsOfFail;
-
- // Check eax is an object of the right Structure.
- bucketsOfFail.append(checkStructure(regT0, structure));
-
- Structure* currStructure = structure;
- RefPtr<Structure>* chainEntries = chain->head();
- JSObject* protoObject = 0;
- for (unsigned i = 0; i < count; ++i) {
- protoObject = asObject(currStructure->prototypeForLookup(callFrame));
- currStructure = chainEntries[i].get();
-
- // Check the prototype object's Structure had not changed.
- Structure** prototypeStructureAddress = &(protoObject->m_structure);
-#if CPU(X86_64)
- move(ImmPtr(currStructure), regT3);
- bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
-#else
- bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
-#endif
- }
- ASSERT(protoObject);
-
- compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
- Jump success = jump();
-
- LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
-
- // Use the patch information to link the failure cases back to the original slow case routine.
- patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
-
- // On success return back to the hot patch code, at a point it will perform the store to dest for us.
- patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
-
- // Track the stub we have created so that it will be deleted later.
- CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
- stubInfo->stubRoutine = entryLabel;
-
- // Finally patch the jump to slow case back in the hot path to jump here instead.
- CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
- RepatchBuffer repatchBuffer(m_codeBlock);
- repatchBuffer.relink(jumpLocation, entryLabel);
-
- // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
-}
-
-/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
-
-#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-
-void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID structure, RegisterID offset)
-{
- ASSERT(sizeof(((Structure*)0)->m_propertyStorageCapacity) == sizeof(int32_t));
- ASSERT(sizeof(JSObject::inlineStorageCapacity) == sizeof(int32_t));
- ASSERT(sizeof(JSValue) == 8);
-
- Jump notUsingInlineStorage = branch32(NotEqual, Address(structure, OBJECT_OFFSETOF(Structure, m_propertyStorageCapacity)), Imm32(JSObject::inlineStorageCapacity));
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
- Jump finishedLoad = jump();
- notUsingInlineStorage.link(this);
- loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
- loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
- finishedLoad.link(this);
-}
-
-void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
- unsigned expected = currentInstruction[4].u.operand;
- unsigned iter = currentInstruction[5].u.operand;
- unsigned i = currentInstruction[6].u.operand;
-
- emitLoad2(property, regT1, regT0, base, regT3, regT2);
- emitJumpSlowCaseIfNotJSCell(property, regT1);
- addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
- // Property registers are now available as the property is known
- emitJumpSlowCaseIfNotJSCell(base, regT3);
- emitLoadPayload(iter, regT1);
+ // Load the character
+ jit.load16(BaseIndex(regT0, regT1, TimesTwo, 0), regT0);
- // Test base's structure
- loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
- addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
- load32(addressFor(i), regT3);
- sub32(Imm32(1), regT3);
- addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
- compileGetDirectOffset(regT2, regT1, regT0, regT0, regT3);
-
- emitStore(dst, regT1, regT0);
- map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
-}
-
-void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
-{
- unsigned dst = currentInstruction[1].u.operand;
- unsigned base = currentInstruction[2].u.operand;
- unsigned property = currentInstruction[3].u.operand;
+ failures.append(jit.branch32(AboveOrEqual, regT0, Imm32(0x100)));
+ jit.move(ImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
+ jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
+ jit.ret();
- linkSlowCaseIfNotJSCell(iter, property);
- linkSlowCase(iter);
- linkSlowCaseIfNotJSCell(iter, base);
- linkSlowCase(iter);
- linkSlowCase(iter);
+ failures.link(&jit);
+ jit.move(Imm32(0), regT0);
+ jit.ret();
- JITStubCall stubCall(this, cti_op_get_by_val);
- stubCall.addArgument(base);
- stubCall.addArgument(property);
- stubCall.call(dst);
+ LinkBuffer patchBuffer(&jit, pool);
+ return adoptRef(new NativeExecutable(patchBuffer.finalizeCode()));
}
-#else // USE(JSVALUE32_64)
-
void JIT::emit_op_get_by_val(Instruction* currentInstruction)
{
unsigned dst = currentInstruction[1].u.operand;
emitPutVirtualRegister(dst);
}
+void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter); // property int32 check
+ linkSlowCaseIfNotJSCell(iter, base); // base cell check
+ Jump nonCell = jump();
+ linkSlowCase(iter); // base array check
+ Jump notString = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
+ emitNakedCall(m_globalData->getThunk(stringGetByValStubGenerator)->generatedJITCode().addressForCall());
+ Jump failed = branchTestPtr(Zero, regT0);
+ emitPutVirtualRegister(dst, regT0);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
+ failed.link(this);
+ notString.link(this);
+ nonCell.link(this);
+
+ linkSlowCase(iter); // vector length check
+ linkSlowCase(iter); // empty value
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base, regT2);
+ stubCall.addArgument(property, regT2);
+ stubCall.call(dst);
+}
+
void JIT::compileGetDirectOffset(RegisterID base, RegisterID result, RegisterID structure, RegisterID offset, RegisterID scratch)
{
ASSERT(sizeof(((Structure*)0)->m_propertyStorageCapacity) == sizeof(int32_t));
unsigned baseVReg = currentInstruction[1].u.operand;
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
unsigned valueVReg = currentInstruction[3].u.operand;
+ unsigned direct = currentInstruction[8].u.operand;
emitGetVirtualRegisters(baseVReg, regT0, valueVReg, regT1);
- JITStubCall stubCall(this, cti_op_put_by_id_generic);
+ JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct_generic, cti_op_put_by_id_generic);
stubCall.addArgument(regT0);
stubCall.addArgument(ImmPtr(ident));
stubCall.addArgument(regT1);
{
unsigned baseVReg = currentInstruction[1].u.operand;
Identifier* ident = &(m_codeBlock->identifier(currentInstruction[2].u.operand));
+ unsigned direct = currentInstruction[8].u.operand;
unsigned propertyAccessInstructionIndex = m_propertyAccessInstructionIndex++;
linkSlowCaseIfNotJSCell(iter, baseVReg);
linkSlowCase(iter);
- JITStubCall stubCall(this, cti_op_put_by_id);
+ JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
stubCall.addArgument(regT0);
stubCall.addArgument(ImmPtr(ident));
stubCall.addArgument(regT1);
failureCases.append(branchPtr(NotEqual, Address(regT2), regT3));
}
-void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress)
+void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
{
JumpList failureCases;
// Check eax is an object of the right Structure.
testPrototype(oldStructure, failureCases);
// ecx = baseObject->m_structure
- for (RefPtr<Structure>* it = chain->head(); *it; ++it)
- testPrototype(it->get(), failureCases);
+ if (!direct) {
+ for (RefPtr<Structure>* it = chain->head(); *it; ++it)
+ testPrototype(it->get(), failureCases);
+ }
Call callTarget;
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
- patchBuffer.link(failureCall, FunctionPtr(cti_op_put_by_id_fail));
+ patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
if (willNeedStorageRealloc) {
ASSERT(m_calls.size() == 1);
structure->ref();
Structure* prototypeStructure = proto->structure();
- ASSERT(!methodCallLinkInfo.cachedPrototypeStructure);
methodCallLinkInfo.cachedPrototypeStructure = prototypeStructure;
prototypeStructure->ref();
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id));
}
-void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
+void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
{
RepatchBuffer repatchBuffer(codeBlock);
// We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
// Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
- repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
int offset = sizeof(JSValue) * cachedOffset;
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
}
-void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
// The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
// referencing the prototype object - let's speculatively load it's table nice and early!)
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
#endif
- // Checks out okay! - getDirectOffset
- compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
-
+ bool needsStubLink = false;
+
+ // Checks out okay!
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT1, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
Jump success = jump();
-
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
// Use the patch information to link the failure cases back to the original slow case routine.
// On success return back to the hot patch code, at a point it will perform the store to dest for us.
patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
// Track the stub we have created so that it will be deleted later.
CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
stubInfo->stubRoutine = entryLabel;
repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
}
-void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, size_t cachedOffset)
+void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
{
Jump failureCase = checkStructure(regT0, structure);
- compileGetDirectOffset(regT0, regT0, structure, cachedOffset);
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ if (!structure->isUsingInlineStorage()) {
+ move(regT0, regT1);
+ compileGetDirectOffset(regT1, regT1, structure, cachedOffset);
+ } else
+ compileGetDirectOffset(regT0, regT1, structure, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(regT0, regT0, structure, cachedOffset);
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
if (!lastProtoBegin)
repatchBuffer.relink(jumpLocation, entryLabel);
}
-void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
// The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
// referencing the prototype object - let's speculatively load it's table nice and early!)
Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
#endif
- // Checks out okay! - getDirectOffset
- compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
+ // Checks out okay!
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT1, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
patchBuffer.link(failureCases1, lastProtoBegin);
repatchBuffer.relink(jumpLocation, entryLabel);
}
-void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
{
ASSERT(count);
-
JumpList bucketsOfFail;
// Check eax is an object of the right Structure.
#endif
}
ASSERT(protoObject);
-
- compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
+
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT1, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
// Use the patch information to link the failure cases back to the original slow case routine.
CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
repatchBuffer.relink(jumpLocation, entryLabel);
}
-void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
{
ASSERT(count);
-
+
JumpList bucketsOfFail;
// Check eax is an object of the right Structure.
}
ASSERT(protoObject);
- compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT1, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT1, regT0, cachedOffset);
Jump success = jump();
LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+
// Use the patch information to link the failure cases back to the original slow case routine.
patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-#endif // USE(JSVALUE32_64)
-
} // namespace JSC
#endif // ENABLE(JIT)
+
+#endif // !USE(JSVALUE32_64)
--- /dev/null
+/*
+ * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#if USE(JSVALUE32_64)
+
+#include "JIT.h"
+
+#if ENABLE(JIT)
+
+#include "CodeBlock.h"
+#include "JITInlineMethods.h"
+#include "JITStubCall.h"
+#include "JSArray.h"
+#include "JSFunction.h"
+#include "JSPropertyNameIterator.h"
+#include "Interpreter.h"
+#include "LinkBuffer.h"
+#include "RepatchBuffer.h"
+#include "ResultType.h"
+#include "SamplingTool.h"
+
+#ifndef NDEBUG
+#include <stdio.h>
+#endif
+
+using namespace std;
+
+namespace JSC {
+
+void JIT::emit_op_put_by_index(Instruction* currentInstruction)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned value = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_put_by_index);
+ stubCall.addArgument(base);
+ stubCall.addArgument(Imm32(property));
+ stubCall.addArgument(value);
+ stubCall.call();
+}
+
+void JIT::emit_op_put_getter(Instruction* currentInstruction)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned function = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_put_getter);
+ stubCall.addArgument(base);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
+ stubCall.addArgument(function);
+ stubCall.call();
+}
+
+void JIT::emit_op_put_setter(Instruction* currentInstruction)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned function = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_put_setter);
+ stubCall.addArgument(base);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
+ stubCall.addArgument(function);
+ stubCall.call();
+}
+
+void JIT::emit_op_del_by_id(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_del_by_id);
+ stubCall.addArgument(base);
+ stubCall.addArgument(ImmPtr(&m_codeBlock->identifier(property)));
+ stubCall.call(dst);
+}
+
+
+#if !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+
+/* ------------------------------ BEGIN: !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
+
+// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
+void JIT::emit_op_method_check(Instruction*) {}
+void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
+#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
+#error "JIT_OPTIMIZE_METHOD_CALLS requires JIT_OPTIMIZE_PROPERTY_ACCESS"
+#endif
+
+void JIT::emit_op_get_by_val(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base);
+ stubCall.addArgument(property);
+ stubCall.call(dst);
+}
+
+void JIT::emitSlow_op_get_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
+{
+ ASSERT_NOT_REACHED();
+}
+
+void JIT::emit_op_put_by_val(Instruction* currentInstruction)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned value = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_put_by_val);
+ stubCall.addArgument(base);
+ stubCall.addArgument(property);
+ stubCall.addArgument(value);
+ stubCall.call();
+}
+
+void JIT::emitSlow_op_put_by_val(Instruction*, Vector<SlowCaseEntry>::iterator&)
+{
+ ASSERT_NOT_REACHED();
+}
+
+void JIT::emit_op_get_by_id(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+ int ident = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_get_by_id_generic);
+ stubCall.addArgument(base);
+ stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
+ stubCall.call(dst);
+
+ m_propertyAccessInstructionIndex++;
+}
+
+void JIT::emitSlow_op_get_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
+{
+ m_propertyAccessInstructionIndex++;
+ ASSERT_NOT_REACHED();
+}
+
+void JIT::emit_op_put_by_id(Instruction* currentInstruction)
+{
+ int base = currentInstruction[1].u.operand;
+ int ident = currentInstruction[2].u.operand;
+ int value = currentInstruction[3].u.operand;
+
+ JITStubCall stubCall(this, cti_op_put_by_id_generic);
+ stubCall.addArgument(base);
+ stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
+ stubCall.addArgument(value);
+ stubCall.call();
+
+ m_propertyAccessInstructionIndex++;
+}
+
+void JIT::emitSlow_op_put_by_id(Instruction*, Vector<SlowCaseEntry>::iterator&)
+{
+ m_propertyAccessInstructionIndex++;
+ ASSERT_NOT_REACHED();
+}
+
+#else // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+
+/* ------------------------------ BEGIN: ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
+
+#if ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
+
+void JIT::emit_op_method_check(Instruction* currentInstruction)
+{
+ // Assert that the following instruction is a get_by_id.
+ ASSERT(m_interpreter->getOpcodeID((currentInstruction + OPCODE_LENGTH(op_method_check))->u.opcode) == op_get_by_id);
+
+ currentInstruction += OPCODE_LENGTH(op_method_check);
+
+ // Do the method check - check the object & its prototype's structure inline (this is the common case).
+ m_methodCallCompilationInfo.append(MethodCallCompilationInfo(m_propertyAccessInstructionIndex));
+ MethodCallCompilationInfo& info = m_methodCallCompilationInfo.last();
+
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+
+ emitLoad(base, regT1, regT0);
+ emitJumpSlowCaseIfNotJSCell(base, regT1);
+
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
+
+ Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), info.structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
+ DataLabelPtr protoStructureToCompare, protoObj = moveWithPatch(ImmPtr(0), regT2);
+ Jump protoStructureCheck = branchPtrWithPatch(NotEqual, Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), protoStructureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
+
+ // This will be relinked to load the function without doing a load.
+ DataLabelPtr putFunction = moveWithPatch(ImmPtr(0), regT0);
+
+ END_UNINTERRUPTED_SEQUENCE(sequenceMethodCheck);
+
+ move(Imm32(JSValue::CellTag), regT1);
+ Jump match = jump();
+
+ ASSERT(differenceBetween(info.structureToCompare, protoObj) == patchOffsetMethodCheckProtoObj);
+ ASSERT(differenceBetween(info.structureToCompare, protoStructureToCompare) == patchOffsetMethodCheckProtoStruct);
+ ASSERT(differenceBetween(info.structureToCompare, putFunction) == patchOffsetMethodCheckPutFunction);
+
+ // Link the failure cases here.
+ structureCheck.link(this);
+ protoStructureCheck.link(this);
+
+ // Do a regular(ish) get_by_id (the slow case will be link to
+ // cti_op_get_by_id_method_check instead of cti_op_get_by_id.
+ compileGetByIdHotPath();
+
+ match.link(this);
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_method_check), dst, regT1, regT0);
+
+ // We've already generated the following get_by_id, so make sure it's skipped over.
+ m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
+}
+
+void JIT::emitSlow_op_method_check(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ currentInstruction += OPCODE_LENGTH(op_method_check);
+
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+ int ident = currentInstruction[3].u.operand;
+
+ compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter, true);
+
+ // We've already generated the following get_by_id, so make sure it's skipped over.
+ m_bytecodeIndex += OPCODE_LENGTH(op_get_by_id);
+}
+
+#else //!ENABLE(JIT_OPTIMIZE_METHOD_CALLS)
+
+// Treat these as nops - the call will be handed as a regular get_by_id/op_call pair.
+void JIT::emit_op_method_check(Instruction*) {}
+void JIT::emitSlow_op_method_check(Instruction*, Vector<SlowCaseEntry>::iterator&) { ASSERT_NOT_REACHED(); }
+
+#endif
+
+PassRefPtr<NativeExecutable> JIT::stringGetByValStubGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+ JSInterfaceJIT jit;
+ JumpList failures;
+ failures.append(jit.branchPtr(NotEqual, Address(regT0), ImmPtr(globalData->jsStringVPtr)));
+ failures.append(jit.branchTest32(NonZero, Address(regT0, OBJECT_OFFSETOF(JSString, m_fiberCount))));
+
+ // Load string length to regT1, and start the process of loading the data pointer into regT0
+ jit.load32(Address(regT0, ThunkHelpers::jsStringLengthOffset()), regT1);
+ jit.loadPtr(Address(regT0, ThunkHelpers::jsStringValueOffset()), regT0);
+ jit.loadPtr(Address(regT0, ThunkHelpers::stringImplDataOffset()), regT0);
+
+ // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
+ failures.append(jit.branch32(AboveOrEqual, regT2, regT1));
+
+ // Load the character
+ jit.load16(BaseIndex(regT0, regT2, TimesTwo, 0), regT0);
+
+ failures.append(jit.branch32(AboveOrEqual, regT0, Imm32(0x100)));
+ jit.move(ImmPtr(globalData->smallStrings.singleCharacterStrings()), regT1);
+ jit.loadPtr(BaseIndex(regT1, regT0, ScalePtr, 0), regT0);
+ jit.move(Imm32(JSValue::CellTag), regT1); // We null check regT0 on return so this is safe
+ jit.ret();
+
+ failures.link(&jit);
+ jit.move(Imm32(0), regT0);
+ jit.ret();
+
+ LinkBuffer patchBuffer(&jit, pool);
+ return adoptRef(new NativeExecutable(patchBuffer.finalizeCode()));
+}
+
+void JIT::emit_op_get_by_val(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ emitLoad2(base, regT1, regT0, property, regT3, regT2);
+
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ emitJumpSlowCaseIfNotJSCell(base, regT1);
+ addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
+
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
+ addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_vectorLength))));
+
+ load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), regT1); // tag
+ load32(BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0])), regT0); // payload
+ addSlowCase(branch32(Equal, regT1, Imm32(JSValue::EmptyValueTag)));
+
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_val), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_get_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter); // property int32 check
+ linkSlowCaseIfNotJSCell(iter, base); // base cell check
+
+ Jump nonCell = jump();
+ linkSlowCase(iter); // base array check
+ Jump notString = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsStringVPtr));
+ emitNakedCall(m_globalData->getThunk(stringGetByValStubGenerator)->generatedJITCode().addressForCall());
+ Jump failed = branchTestPtr(Zero, regT0);
+ emitStore(dst, regT1, regT0);
+ emitJumpSlowToHot(jump(), OPCODE_LENGTH(op_get_by_val));
+ failed.link(this);
+ notString.link(this);
+ nonCell.link(this);
+
+ linkSlowCase(iter); // vector length check
+ linkSlowCase(iter); // empty value
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base);
+ stubCall.addArgument(property);
+ stubCall.call(dst);
+}
+
+void JIT::emit_op_put_by_val(Instruction* currentInstruction)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned value = currentInstruction[3].u.operand;
+
+ emitLoad2(base, regT1, regT0, property, regT3, regT2);
+
+ addSlowCase(branch32(NotEqual, regT3, Imm32(JSValue::Int32Tag)));
+ emitJumpSlowCaseIfNotJSCell(base, regT1);
+ addSlowCase(branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr)));
+ addSlowCase(branch32(AboveOrEqual, regT2, Address(regT0, OBJECT_OFFSETOF(JSArray, m_vectorLength))));
+
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT3);
+
+ Jump empty = branch32(Equal, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4), Imm32(JSValue::EmptyValueTag));
+
+ Label storeResult(this);
+ emitLoad(value, regT1, regT0);
+ store32(regT0, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]))); // payload
+ store32(regT1, BaseIndex(regT3, regT2, TimesEight, OBJECT_OFFSETOF(ArrayStorage, m_vector[0]) + 4)); // tag
+ Jump end = jump();
+
+ empty.link(this);
+ add32(Imm32(1), Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_numValuesInVector)));
+ branch32(Below, regT2, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length))).linkTo(storeResult, this);
+
+ add32(Imm32(1), regT2, regT0);
+ store32(regT0, Address(regT3, OBJECT_OFFSETOF(ArrayStorage, m_length)));
+ jump().linkTo(storeResult, this);
+
+ end.link(this);
+}
+
+void JIT::emitSlow_op_put_by_val(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned base = currentInstruction[1].u.operand;
+ unsigned property = currentInstruction[2].u.operand;
+ unsigned value = currentInstruction[3].u.operand;
+
+ linkSlowCase(iter); // property int32 check
+ linkSlowCaseIfNotJSCell(iter, base); // base cell check
+ linkSlowCase(iter); // base not array check
+ linkSlowCase(iter); // in vector check
+
+ JITStubCall stubPutByValCall(this, cti_op_put_by_val);
+ stubPutByValCall.addArgument(base);
+ stubPutByValCall.addArgument(property);
+ stubPutByValCall.addArgument(value);
+ stubPutByValCall.call();
+}
+
+void JIT::emit_op_get_by_id(Instruction* currentInstruction)
+{
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+
+ emitLoad(base, regT1, regT0);
+ emitJumpSlowCaseIfNotJSCell(base, regT1);
+ compileGetByIdHotPath();
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_id), dst, regT1, regT0);
+}
+
+void JIT::compileGetByIdHotPath()
+{
+ // As for put_by_id, get_by_id requires the offset of the Structure and the offset of the access to be patched.
+ // Additionally, for get_by_id we need patch the offset of the branch to the slow case (we patch this to jump
+ // to array-length / prototype access tranpolines, and finally we also the the property-map access offset as a label
+ // to jump back to if one of these trampolies finds a match.
+
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
+
+ Label hotPathBegin(this);
+ m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
+ m_propertyAccessInstructionIndex++;
+
+ DataLabelPtr structureToCompare;
+ Jump structureCheck = branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure)));
+ addSlowCase(structureCheck);
+ ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetGetByIdStructure);
+ ASSERT(differenceBetween(hotPathBegin, structureCheck) == patchOffsetGetByIdBranchToSlowCase);
+
+ Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT2);
+ Label externalLoadComplete(this);
+ ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetGetByIdExternalLoad);
+ ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthGetByIdExternalLoad);
+
+ DataLabel32 displacementLabel1 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT0); // payload
+ ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetGetByIdPropertyMapOffset1);
+ DataLabel32 displacementLabel2 = loadPtrWithAddressOffsetPatch(Address(regT2, patchGetByIdDefaultOffset), regT1); // tag
+ ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetGetByIdPropertyMapOffset2);
+
+ Label putResult(this);
+ ASSERT(differenceBetween(hotPathBegin, putResult) == patchOffsetGetByIdPutResult);
+
+ END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdHotPath);
+}
+
+void JIT::emitSlow_op_get_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ int dst = currentInstruction[1].u.operand;
+ int base = currentInstruction[2].u.operand;
+ int ident = currentInstruction[3].u.operand;
+
+ compileGetByIdSlowCase(dst, base, &(m_codeBlock->identifier(ident)), iter);
+}
+
+void JIT::compileGetByIdSlowCase(int dst, int base, Identifier* ident, Vector<SlowCaseEntry>::iterator& iter, bool isMethodCheck)
+{
+ // As for the hot path of get_by_id, above, we ensure that we can use an architecture specific offset
+ // so that we only need track one pointer into the slow case code - we track a pointer to the location
+ // of the call (which we can use to look up the patch information), but should a array-length or
+ // prototype access trampoline fail we want to bail out back to here. To do so we can subtract back
+ // the distance from the call to the head of the slow case.
+ linkSlowCaseIfNotJSCell(iter, base);
+ linkSlowCase(iter);
+
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
+
+#ifndef NDEBUG
+ Label coldPathBegin(this);
+#endif
+ JITStubCall stubCall(this, isMethodCheck ? cti_op_get_by_id_method_check : cti_op_get_by_id);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.addArgument(ImmPtr(ident));
+ Call call = stubCall.call(dst);
+
+ END_UNINTERRUPTED_SEQUENCE(sequenceGetByIdSlowCase);
+
+ ASSERT(differenceBetween(coldPathBegin, call) == patchOffsetGetByIdSlowCaseCall);
+
+ // Track the location of the call; this will be used to recover patch information.
+ m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
+ m_propertyAccessInstructionIndex++;
+}
+
+void JIT::emit_op_put_by_id(Instruction* currentInstruction)
+{
+ // In order to be able to patch both the Structure, and the object offset, we store one pointer,
+ // to just after the arguments have been loaded into registers 'hotPathBegin', and we generate code
+ // such that the Structure & offset are always at the same distance from this.
+
+ int base = currentInstruction[1].u.operand;
+ int value = currentInstruction[3].u.operand;
+
+ emitLoad2(base, regT1, regT0, value, regT3, regT2);
+
+ emitJumpSlowCaseIfNotJSCell(base, regT1);
+
+ BEGIN_UNINTERRUPTED_SEQUENCE(sequencePutById);
+
+ Label hotPathBegin(this);
+ m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].hotPathBegin = hotPathBegin;
+ m_propertyAccessInstructionIndex++;
+
+ // It is important that the following instruction plants a 32bit immediate, in order that it can be patched over.
+ DataLabelPtr structureToCompare;
+ addSlowCase(branchPtrWithPatch(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), structureToCompare, ImmPtr(reinterpret_cast<void*>(patchGetByIdDefaultStructure))));
+ ASSERT(differenceBetween(hotPathBegin, structureToCompare) == patchOffsetPutByIdStructure);
+
+ // Plant a load from a bogus ofset in the object's property map; we will patch this later, if it is to be used.
+ Label externalLoad = loadPtrWithPatchToLEA(Address(regT0, OBJECT_OFFSETOF(JSObject, m_externalStorage)), regT0);
+ Label externalLoadComplete(this);
+ ASSERT(differenceBetween(hotPathBegin, externalLoad) == patchOffsetPutByIdExternalLoad);
+ ASSERT(differenceBetween(externalLoad, externalLoadComplete) == patchLengthPutByIdExternalLoad);
+
+ DataLabel32 displacementLabel1 = storePtrWithAddressOffsetPatch(regT2, Address(regT0, patchGetByIdDefaultOffset)); // payload
+ DataLabel32 displacementLabel2 = storePtrWithAddressOffsetPatch(regT3, Address(regT0, patchGetByIdDefaultOffset)); // tag
+
+ END_UNINTERRUPTED_SEQUENCE(sequencePutById);
+
+ ASSERT(differenceBetween(hotPathBegin, displacementLabel1) == patchOffsetPutByIdPropertyMapOffset1);
+ ASSERT(differenceBetween(hotPathBegin, displacementLabel2) == patchOffsetPutByIdPropertyMapOffset2);
+}
+
+void JIT::emitSlow_op_put_by_id(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ int base = currentInstruction[1].u.operand;
+ int ident = currentInstruction[2].u.operand;
+ int direct = currentInstruction[8].u.operand;
+
+ linkSlowCaseIfNotJSCell(iter, base);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, direct ? cti_op_put_by_id_direct : cti_op_put_by_id);
+ stubCall.addArgument(regT1, regT0);
+ stubCall.addArgument(ImmPtr(&(m_codeBlock->identifier(ident))));
+ stubCall.addArgument(regT3, regT2);
+ Call call = stubCall.call();
+
+ // Track the location of the call; this will be used to recover patch information.
+ m_propertyAccessCompilationInfo[m_propertyAccessInstructionIndex].callReturnLocation = call;
+ m_propertyAccessInstructionIndex++;
+}
+
+// Compile a store into an object's property storage. May overwrite base.
+void JIT::compilePutDirectOffset(RegisterID base, RegisterID valueTag, RegisterID valuePayload, Structure* structure, size_t cachedOffset)
+{
+ int offset = cachedOffset;
+ if (structure->isUsingInlineStorage())
+ offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) / sizeof(Register);
+ else
+ loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
+ emitStore(offset, valueTag, valuePayload, base);
+}
+
+// Compile a load from an object's property storage. May overwrite base.
+void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, Structure* structure, size_t cachedOffset)
+{
+ int offset = cachedOffset;
+ if (structure->isUsingInlineStorage())
+ offset += OBJECT_OFFSETOF(JSObject, m_inlineStorage) / sizeof(Register);
+ else
+ loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
+ emitLoad(offset, resultTag, resultPayload, base);
+}
+
+void JIT::compileGetDirectOffset(JSObject* base, RegisterID temp, RegisterID resultTag, RegisterID resultPayload, size_t cachedOffset)
+{
+ if (base->isUsingInlineStorage()) {
+ load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]), resultPayload);
+ load32(reinterpret_cast<char*>(&base->m_inlineStorage[cachedOffset]) + 4, resultTag);
+ return;
+ }
+
+ size_t offset = cachedOffset * sizeof(JSValue);
+
+ PropertyStorage* protoPropertyStorage = &base->m_externalStorage;
+ loadPtr(static_cast<void*>(protoPropertyStorage), temp);
+ load32(Address(temp, offset), resultPayload);
+ load32(Address(temp, offset + 4), resultTag);
+}
+
+void JIT::testPrototype(Structure* structure, JumpList& failureCases)
+{
+ if (structure->m_prototype.isNull())
+ return;
+
+ failureCases.append(branchPtr(NotEqual, AbsoluteAddress(&asCell(structure->m_prototype)->m_structure), ImmPtr(asCell(structure->m_prototype)->m_structure)));
+}
+
+void JIT::privateCompilePutByIdTransition(StructureStubInfo* stubInfo, Structure* oldStructure, Structure* newStructure, size_t cachedOffset, StructureChain* chain, ReturnAddressPtr returnAddress, bool direct)
+{
+ // It is assumed that regT0 contains the basePayload and regT1 contains the baseTag. The value can be found on the stack.
+
+ JumpList failureCases;
+ failureCases.append(branch32(NotEqual, regT1, Imm32(JSValue::CellTag)));
+ failureCases.append(branchPtr(NotEqual, Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)), ImmPtr(oldStructure)));
+ testPrototype(oldStructure, failureCases);
+
+ if (!direct) {
+ // Verify that nothing in the prototype chain has a setter for this property.
+ for (RefPtr<Structure>* it = chain->head(); *it; ++it)
+ testPrototype(it->get(), failureCases);
+ }
+
+ // Reallocate property storage if needed.
+ Call callTarget;
+ bool willNeedStorageRealloc = oldStructure->propertyStorageCapacity() != newStructure->propertyStorageCapacity();
+ if (willNeedStorageRealloc) {
+ // This trampoline was called to like a JIT stub; before we can can call again we need to
+ // remove the return address from the stack, to prevent the stack from becoming misaligned.
+ preserveReturnAddressAfterCall(regT3);
+
+ JITStubCall stubCall(this, cti_op_put_by_id_transition_realloc);
+ stubCall.skipArgument(); // base
+ stubCall.skipArgument(); // ident
+ stubCall.skipArgument(); // value
+ stubCall.addArgument(Imm32(oldStructure->propertyStorageCapacity()));
+ stubCall.addArgument(Imm32(newStructure->propertyStorageCapacity()));
+ stubCall.call(regT0);
+
+ restoreReturnAddressBeforeReturn(regT3);
+ }
+
+ sub32(Imm32(1), AbsoluteAddress(oldStructure->addressOfCount()));
+ add32(Imm32(1), AbsoluteAddress(newStructure->addressOfCount()));
+ storePtr(ImmPtr(newStructure), Address(regT0, OBJECT_OFFSETOF(JSCell, m_structure)));
+
+ load32(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, args[2]) + sizeof(void*)), regT3);
+ load32(Address(stackPointerRegister, OBJECT_OFFSETOF(struct JITStackFrame, args[2]) + sizeof(void*) + 4), regT2);
+
+ // Write the value
+ compilePutDirectOffset(regT0, regT2, regT3, newStructure, cachedOffset);
+
+ ret();
+
+ ASSERT(!failureCases.empty());
+ failureCases.link(this);
+ restoreArgumentReferenceForTrampoline();
+ Call failureCall = tailRecursiveCall();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+
+ patchBuffer.link(failureCall, FunctionPtr(direct ? cti_op_put_by_id_direct_fail : cti_op_put_by_id_fail));
+
+ if (willNeedStorageRealloc) {
+ ASSERT(m_calls.size() == 1);
+ patchBuffer.link(m_calls[0].from, FunctionPtr(cti_op_put_by_id_transition_realloc));
+ }
+
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relinkCallerToTrampoline(returnAddress, entryLabel);
+}
+
+void JIT::patchGetByIdSelf(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress)
+{
+ RepatchBuffer repatchBuffer(codeBlock);
+
+ // We don't want to patch more than once - in future go to cti_op_get_by_id_generic.
+ // Should probably go to JITStubs::cti_op_get_by_id_fail, but that doesn't do anything interesting right now.
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
+
+ int offset = sizeof(JSValue) * cachedOffset;
+
+ // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
+ // and makes the subsequent load's offset automatically correct
+ if (structure->isUsingInlineStorage())
+ repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetGetByIdExternalLoad));
+
+ // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetGetByIdStructure), structure);
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset1), offset); // payload
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetGetByIdPropertyMapOffset2), offset + 4); // tag
+}
+
+void JIT::patchMethodCallProto(CodeBlock* codeBlock, MethodCallLinkInfo& methodCallLinkInfo, JSFunction* callee, Structure* structure, JSObject* proto, ReturnAddressPtr returnAddress)
+{
+ RepatchBuffer repatchBuffer(codeBlock);
+
+ ASSERT(!methodCallLinkInfo.cachedStructure);
+ methodCallLinkInfo.cachedStructure = structure;
+ structure->ref();
+
+ Structure* prototypeStructure = proto->structure();
+ methodCallLinkInfo.cachedPrototypeStructure = prototypeStructure;
+ prototypeStructure->ref();
+
+ repatchBuffer.repatch(methodCallLinkInfo.structureLabel, structure);
+ repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoObj), proto);
+ repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckProtoStruct), prototypeStructure);
+ repatchBuffer.repatch(methodCallLinkInfo.structureLabel.dataLabelPtrAtOffset(patchOffsetMethodCheckPutFunction), callee);
+
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id));
+}
+
+void JIT::patchPutByIdReplace(CodeBlock* codeBlock, StructureStubInfo* stubInfo, Structure* structure, size_t cachedOffset, ReturnAddressPtr returnAddress, bool direct)
+{
+ RepatchBuffer repatchBuffer(codeBlock);
+
+ // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
+ // Should probably go to cti_op_put_by_id_fail, but that doesn't do anything interesting right now.
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
+
+ int offset = sizeof(JSValue) * cachedOffset;
+
+ // If we're patching to use inline storage, convert the initial load to a lea; this avoids the extra load
+ // and makes the subsequent load's offset automatically correct
+ if (structure->isUsingInlineStorage())
+ repatchBuffer.repatchLoadPtrToLEA(stubInfo->hotPathBegin.instructionAtOffset(patchOffsetPutByIdExternalLoad));
+
+ // Patch the offset into the propoerty map to load from, then patch the Structure to look for.
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabelPtrAtOffset(patchOffsetPutByIdStructure), structure);
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset1), offset); // payload
+ repatchBuffer.repatch(stubInfo->hotPathBegin.dataLabel32AtOffset(patchOffsetPutByIdPropertyMapOffset2), offset + 4); // tag
+}
+
+void JIT::privateCompilePatchGetArrayLength(ReturnAddressPtr returnAddress)
+{
+ StructureStubInfo* stubInfo = &m_codeBlock->getStubInfo(returnAddress);
+
+ // regT0 holds a JSCell*
+
+ // Check for array
+ Jump failureCases1 = branchPtr(NotEqual, Address(regT0), ImmPtr(m_globalData->jsArrayVPtr));
+
+ // Checks out okay! - get the length from the storage
+ loadPtr(Address(regT0, OBJECT_OFFSETOF(JSArray, m_storage)), regT2);
+ load32(Address(regT2, OBJECT_OFFSETOF(ArrayStorage, m_length)), regT2);
+
+ Jump failureCases2 = branch32(Above, regT2, Imm32(INT_MAX));
+ move(regT2, regT0);
+ move(Imm32(JSValue::Int32Tag), regT1);
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
+ patchBuffer.link(failureCases1, slowCaseBegin);
+ patchBuffer.link(failureCases2, slowCaseBegin);
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ // Track the stub we have created so that it will be deleted later.
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+
+ // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_array_fail));
+}
+
+void JIT::privateCompileGetByIdProto(StructureStubInfo* stubInfo, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+{
+ // regT0 holds a JSCell*
+
+ // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
+ // referencing the prototype object - let's speculatively load it's table nice and early!)
+ JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
+
+ Jump failureCases1 = checkStructure(regT0, structure);
+
+ // Check the prototype object's Structure had not changed.
+ Structure** prototypeStructureAddress = &(protoObject->m_structure);
+#if CPU(X86_64)
+ move(ImmPtr(prototypeStructure), regT3);
+ Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
+#else
+ Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
+#endif
+ bool needsStubLink = false;
+ // Checks out okay!
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT2, regT2, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
+
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ CodeLocationLabel slowCaseBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
+ patchBuffer.link(failureCases1, slowCaseBegin);
+ patchBuffer.link(failureCases2, slowCaseBegin);
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+
+ // Track the stub we have created so that it will be deleted later.
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+
+ // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
+}
+
+
+void JIT::privateCompileGetByIdSelfList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* polymorphicStructures, int currentIndex, Structure* structure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset)
+{
+ // regT0 holds a JSCell*
+ Jump failureCase = checkStructure(regT0, structure);
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ if (!structure->isUsingInlineStorage()) {
+ move(regT0, regT1);
+ compileGetDirectOffset(regT1, regT2, regT1, structure, cachedOffset);
+ } else
+ compileGetDirectOffset(regT0, regT2, regT1, structure, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(regT0, regT1, regT0, structure, cachedOffset);
+
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ CodeLocationLabel lastProtoBegin = polymorphicStructures->list[currentIndex - 1].stubRoutine;
+ if (!lastProtoBegin)
+ lastProtoBegin = stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall);
+
+ patchBuffer.link(failureCase, lastProtoBegin);
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+
+ structure->ref();
+ polymorphicStructures->list[currentIndex].set(entryLabel, structure);
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+}
+
+void JIT::privateCompileGetByIdProtoList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, Structure* prototypeStructure, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+{
+ // regT0 holds a JSCell*
+
+ // The prototype object definitely exists (if this stub exists the CodeBlock is referencing a Structure that is
+ // referencing the prototype object - let's speculatively load it's table nice and early!)
+ JSObject* protoObject = asObject(structure->prototypeForLookup(callFrame));
+
+ // Check eax is an object of the right Structure.
+ Jump failureCases1 = checkStructure(regT0, structure);
+
+ // Check the prototype object's Structure had not changed.
+ Structure** prototypeStructureAddress = &(protoObject->m_structure);
+#if CPU(X86_64)
+ move(ImmPtr(prototypeStructure), regT3);
+ Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3);
+#else
+ Jump failureCases2 = branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(prototypeStructure));
+#endif
+
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT2, regT2, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
+
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
+ patchBuffer.link(failureCases1, lastProtoBegin);
+ patchBuffer.link(failureCases2, lastProtoBegin);
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+
+ structure->ref();
+ prototypeStructure->ref();
+ prototypeStructures->list[currentIndex].set(entryLabel, structure, prototypeStructure);
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+}
+
+void JIT::privateCompileGetByIdChainList(StructureStubInfo* stubInfo, PolymorphicAccessStructureList* prototypeStructures, int currentIndex, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, CallFrame* callFrame)
+{
+ // regT0 holds a JSCell*
+ ASSERT(count);
+
+ JumpList bucketsOfFail;
+
+ // Check eax is an object of the right Structure.
+ bucketsOfFail.append(checkStructure(regT0, structure));
+
+ Structure* currStructure = structure;
+ RefPtr<Structure>* chainEntries = chain->head();
+ JSObject* protoObject = 0;
+ for (unsigned i = 0; i < count; ++i) {
+ protoObject = asObject(currStructure->prototypeForLookup(callFrame));
+ currStructure = chainEntries[i].get();
+
+ // Check the prototype object's Structure had not changed.
+ Structure** prototypeStructureAddress = &(protoObject->m_structure);
+#if CPU(X86_64)
+ move(ImmPtr(currStructure), regT3);
+ bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
+#else
+ bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
+#endif
+ }
+ ASSERT(protoObject);
+
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT2, regT2, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
+
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ CodeLocationLabel lastProtoBegin = prototypeStructures->list[currentIndex - 1].stubRoutine;
+
+ patchBuffer.link(bucketsOfFail, lastProtoBegin);
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+
+ // Track the stub we have created so that it will be deleted later.
+ structure->ref();
+ chain->ref();
+ prototypeStructures->list[currentIndex].set(entryLabel, structure, chain);
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+}
+
+void JIT::privateCompileGetByIdChain(StructureStubInfo* stubInfo, Structure* structure, StructureChain* chain, size_t count, const Identifier& ident, const PropertySlot& slot, size_t cachedOffset, ReturnAddressPtr returnAddress, CallFrame* callFrame)
+{
+ // regT0 holds a JSCell*
+ ASSERT(count);
+
+ JumpList bucketsOfFail;
+
+ // Check eax is an object of the right Structure.
+ bucketsOfFail.append(checkStructure(regT0, structure));
+
+ Structure* currStructure = structure;
+ RefPtr<Structure>* chainEntries = chain->head();
+ JSObject* protoObject = 0;
+ for (unsigned i = 0; i < count; ++i) {
+ protoObject = asObject(currStructure->prototypeForLookup(callFrame));
+ currStructure = chainEntries[i].get();
+
+ // Check the prototype object's Structure had not changed.
+ Structure** prototypeStructureAddress = &(protoObject->m_structure);
+#if CPU(X86_64)
+ move(ImmPtr(currStructure), regT3);
+ bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), regT3));
+#else
+ bucketsOfFail.append(branchPtr(NotEqual, AbsoluteAddress(prototypeStructureAddress), ImmPtr(currStructure)));
+#endif
+ }
+ ASSERT(protoObject);
+
+ bool needsStubLink = false;
+ if (slot.cachedPropertyType() == PropertySlot::Getter) {
+ needsStubLink = true;
+ compileGetDirectOffset(protoObject, regT2, regT2, regT1, cachedOffset);
+ JITStubCall stubCall(this, cti_op_get_by_id_getter_stub);
+ stubCall.addArgument(regT1);
+ stubCall.addArgument(regT0);
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else if (slot.cachedPropertyType() == PropertySlot::Custom) {
+ needsStubLink = true;
+ JITStubCall stubCall(this, cti_op_get_by_id_custom_stub);
+ stubCall.addArgument(ImmPtr(protoObject));
+ stubCall.addArgument(ImmPtr(FunctionPtr(slot.customGetter()).executableAddress()));
+ stubCall.addArgument(ImmPtr(const_cast<Identifier*>(&ident)));
+ stubCall.addArgument(ImmPtr(stubInfo->callReturnLocation.executableAddress()));
+ stubCall.call();
+ } else
+ compileGetDirectOffset(protoObject, regT2, regT1, regT0, cachedOffset);
+ Jump success = jump();
+
+ LinkBuffer patchBuffer(this, m_codeBlock->executablePool());
+ if (needsStubLink) {
+ for (Vector<CallRecord>::iterator iter = m_calls.begin(); iter != m_calls.end(); ++iter) {
+ if (iter->to)
+ patchBuffer.link(iter->from, FunctionPtr(iter->to));
+ }
+ }
+ // Use the patch information to link the failure cases back to the original slow case routine.
+ patchBuffer.link(bucketsOfFail, stubInfo->callReturnLocation.labelAtOffset(-patchOffsetGetByIdSlowCaseCall));
+
+ // On success return back to the hot patch code, at a point it will perform the store to dest for us.
+ patchBuffer.link(success, stubInfo->hotPathBegin.labelAtOffset(patchOffsetGetByIdPutResult));
+
+ // Track the stub we have created so that it will be deleted later.
+ CodeLocationLabel entryLabel = patchBuffer.finalizeCodeAddendum();
+ stubInfo->stubRoutine = entryLabel;
+
+ // Finally patch the jump to slow case back in the hot path to jump here instead.
+ CodeLocationJump jumpLocation = stubInfo->hotPathBegin.jumpAtOffset(patchOffsetGetByIdBranchToSlowCase);
+ RepatchBuffer repatchBuffer(m_codeBlock);
+ repatchBuffer.relink(jumpLocation, entryLabel);
+
+ // We don't want to patch more than once - in future go to cti_op_put_by_id_generic.
+ repatchBuffer.relinkCallerToFunction(returnAddress, FunctionPtr(cti_op_get_by_id_proto_list));
+}
+
+/* ------------------------------ END: !ENABLE / ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS) ------------------------------ */
+
+#endif // !ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
+
+void JIT::compileGetDirectOffset(RegisterID base, RegisterID resultTag, RegisterID resultPayload, RegisterID structure, RegisterID offset)
+{
+ ASSERT(sizeof(((Structure*)0)->m_propertyStorageCapacity) == sizeof(int32_t));
+ ASSERT(sizeof(JSObject::inlineStorageCapacity) == sizeof(int32_t));
+ ASSERT(sizeof(JSValue) == 8);
+
+ Jump notUsingInlineStorage = branch32(NotEqual, Address(structure, OBJECT_OFFSETOF(Structure, m_propertyStorageCapacity)), Imm32(JSObject::inlineStorageCapacity));
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSObject, m_inlineStorage)+OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
+ Jump finishedLoad = jump();
+ notUsingInlineStorage.link(this);
+ loadPtr(Address(base, OBJECT_OFFSETOF(JSObject, m_externalStorage)), base);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.payload)), resultPayload);
+ loadPtr(BaseIndex(base, offset, TimesEight, OBJECT_OFFSETOF(JSValue, u.asBits.tag)), resultTag);
+ finishedLoad.link(this);
+}
+
+void JIT::emit_op_get_by_pname(Instruction* currentInstruction)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+ unsigned expected = currentInstruction[4].u.operand;
+ unsigned iter = currentInstruction[5].u.operand;
+ unsigned i = currentInstruction[6].u.operand;
+
+ emitLoad2(property, regT1, regT0, base, regT3, regT2);
+ emitJumpSlowCaseIfNotJSCell(property, regT1);
+ addSlowCase(branchPtr(NotEqual, regT0, payloadFor(expected)));
+ // Property registers are now available as the property is known
+ emitJumpSlowCaseIfNotJSCell(base, regT3);
+ emitLoadPayload(iter, regT1);
+
+ // Test base's structure
+ loadPtr(Address(regT2, OBJECT_OFFSETOF(JSCell, m_structure)), regT0);
+ addSlowCase(branchPtr(NotEqual, regT0, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_cachedStructure))));
+ load32(addressFor(i), regT3);
+ sub32(Imm32(1), regT3);
+ addSlowCase(branch32(AboveOrEqual, regT3, Address(regT1, OBJECT_OFFSETOF(JSPropertyNameIterator, m_numCacheableSlots))));
+ compileGetDirectOffset(regT2, regT1, regT0, regT0, regT3);
+
+ emitStore(dst, regT1, regT0);
+ map(m_bytecodeIndex + OPCODE_LENGTH(op_get_by_pname), dst, regT1, regT0);
+}
+
+void JIT::emitSlow_op_get_by_pname(Instruction* currentInstruction, Vector<SlowCaseEntry>::iterator& iter)
+{
+ unsigned dst = currentInstruction[1].u.operand;
+ unsigned base = currentInstruction[2].u.operand;
+ unsigned property = currentInstruction[3].u.operand;
+
+ linkSlowCaseIfNotJSCell(iter, property);
+ linkSlowCase(iter);
+ linkSlowCaseIfNotJSCell(iter, base);
+ linkSlowCase(iter);
+ linkSlowCase(iter);
+
+ JITStubCall stubCall(this, cti_op_get_by_val);
+ stubCall.addArgument(base);
+ stubCall.addArgument(property);
+ stubCall.call(dst);
+}
+
+} // namespace JSC
+
+#endif // ENABLE(JIT)
+
+#endif // ENABLE(JSVALUE32_64)
+
m_jit->poke(argument, m_stackIndex);
m_stackIndex += stackIndexStep;
}
-
+
+#if USE(JSVALUE32_64)
void addArgument(const JSValue& value)
{
m_jit->poke(JIT::Imm32(value.payload()), m_stackIndex);
m_jit->poke(JIT::Imm32(value.tag()), m_stackIndex + 1);
m_stackIndex += stackIndexStep;
}
+#endif
void addArgument(JIT::RegisterID tag, JIT::RegisterID payload)
{
#include "Collector.h"
#include "Debugger.h"
#include "ExceptionHelpers.h"
+#include "GetterSetter.h"
#include "GlobalEvalFunction.h"
#include "JIT.h"
#include "JSActivation.h"
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x3c" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-asm volatile (
-".text" "\n"
-".align 2" "\n"
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 0x3C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x40
+#define PRESERVED_R4_OFFSET 0x44
+#define PRESERVED_R5_OFFSET 0x48
+#define PRESERVED_R6_OFFSET 0x4C
+#define REGISTER_FILE_OFFSET 0x50
+#define CALLFRAME_OFFSET 0x54
+#define EXCEPTION_OFFSET 0x58
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x60
#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-asm volatile (
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "stmdb sp!, {r1-r3}" "\n"
- "stmdb sp!, {r4-r8, lr}" "\n"
- "sub sp, sp, #68" "\n"
- "mov r4, r2" "\n"
- "mov r5, #512" "\n"
- // r0 contains the code
- "mov lr, pc" "\n"
- "mov pc, r0" "\n"
- "add sp, sp, #68" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
-
-asm volatile (
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "mov r0, sp" "\n"
- "bl " SYMBOL_STRING(cti_vm_throw) "\n"
-
-// Both has the same return sequence
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "add sp, sp, #68" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
+#define THUNK_RETURN_ADDRESS_OFFSET 64
+#define PRESERVEDR4_OFFSET 68
#elif COMPILER(MSVC) && CPU(X86)
#error "JIT_STUB_ARGUMENT_VA_LIST not supported on ARMv7."
#endif
-asm volatile (
+#define THUNK_RETURN_ADDRESS_OFFSET 0x1C
+#define PRESERVED_RETURN_ADDRESS_OFFSET 0x20
+#define PRESERVED_R4_OFFSET 0x24
+#define PRESERVED_R5_OFFSET 0x28
+#define PRESERVED_R6_OFFSET 0x2C
+#define REGISTER_FILE_OFFSET 0x30
+#define CALLFRAME_OFFSET 0x34
+#define EXCEPTION_OFFSET 0x38
+#define ENABLE_PROFILER_REFERENCE_OFFSET 0x40
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
+#elif CPU(MIPS)
+
+#if USE(JIT_STUB_ARGUMENT_VA_LIST)
+#error "JIT_STUB_ARGUMENT_VA_LIST not supported on MIPS."
+#endif
+
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiTrampoline) "\n"
SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "sub sp, sp, #0x40" "\n"
- "str lr, [sp, #0x20]" "\n"
- "str r4, [sp, #0x24]" "\n"
- "str r5, [sp, #0x28]" "\n"
- "str r6, [sp, #0x2c]" "\n"
- "str r1, [sp, #0x30]" "\n"
- "str r2, [sp, #0x34]" "\n"
- "str r3, [sp, #0x38]" "\n"
- "cpy r5, r2" "\n"
- "mov r6, #512" "\n"
- "blx r0" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x40" "\n"
- "bx lr" "\n"
+ "addiu $29,$29,-72" "\n"
+ "sw $31,44($29)" "\n"
+ "sw $18,40($29)" "\n"
+ "sw $17,36($29)" "\n"
+ "sw $16,32($29)" "\n"
+#if WTF_MIPS_PIC
+ "sw $28,28($29)" "\n"
+#endif
+ "move $16,$6 # set callFrameRegister" "\n"
+ "li $17,512 # set timeoutCheckRegister" "\n"
+ "move $25,$4 # move executableAddress to t9" "\n"
+ "sw $5,52($29) # store registerFile to current stack" "\n"
+ "sw $6,56($29) # store callFrame to curent stack" "\n"
+ "sw $7,60($29) # store exception to current stack" "\n"
+ "lw $8,88($29) # load enableProfilerReference from previous stack" "\n"
+ "lw $9,92($29) # load globalData from previous stack" "\n"
+ "sw $8,64($29) # store enableProfilerReference to current stack" "\n"
+ "jalr $25" "\n"
+ "sw $9,68($29) # store globalData to current stack" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
+".ent " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "cpy r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x40" "\n"
- "bx lr" "\n"
+#if WTF_MIPS_PIC
+ "lw $28,28($29)" "\n"
+".set macro" "\n"
+ "la $25," SYMBOL_STRING(cti_vm_throw) "\n"
+".set nomacro" "\n"
+ "bal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#else
+ "jal " SYMBOL_STRING(cti_vm_throw) "\n"
+ "move $4,$29" "\n"
+#endif
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
);
-asm volatile (
+asm volatile(
".text" "\n"
".align 2" "\n"
+".set noreorder" "\n"
+".set nomacro" "\n"
+".set nomips16" "\n"
".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
-".thumb" "\n"
-".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
+".ent " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "ldr r6, [sp, #0x2c]" "\n"
- "ldr r5, [sp, #0x28]" "\n"
- "ldr r4, [sp, #0x24]" "\n"
- "ldr lr, [sp, #0x20]" "\n"
- "add sp, sp, #0x3c" "\n"
- "bx lr" "\n"
-);
-
-#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
-
-asm volatile (
-".text\n"
-".globl " SYMBOL_STRING(ctiTrampoline) "\n"
-HIDE_SYMBOL(ctiTrampoline) "\n"
-SYMBOL_STRING(ctiTrampoline) ":" "\n"
- "stmdb sp!, {r1-r3}" "\n"
- "stmdb sp!, {r4-r8, lr}" "\n"
- "sub sp, sp, #36" "\n"
- "mov r4, r2" "\n"
- "mov r5, #512" "\n"
- "mov lr, pc" "\n"
- "mov pc, r0" "\n"
- "add sp, sp, #36" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
-);
-
-asm volatile (
-".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
-HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
-SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
- "mov r0, sp" "\n"
- "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
-
-// Both has the same return sequence
-".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
-HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
-SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
- "add sp, sp, #36" "\n"
- "ldmia sp!, {r4-r8, lr}" "\n"
- "add sp, sp, #12" "\n"
- "mov pc, lr" "\n"
+ "lw $16,32($29)" "\n"
+ "lw $17,36($29)" "\n"
+ "lw $18,40($29)" "\n"
+ "lw $31,44($29)" "\n"
+ "jr $31" "\n"
+ "addiu $29,$29,72" "\n"
+".set reorder" "\n"
+".set macro" "\n"
+".end " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
);
#elif COMPILER(RVCT) && CPU(ARM_TRADITIONAL)
+#define THUNK_RETURN_ADDRESS_OFFSET 32
+#define PRESERVEDR4_OFFSET 36
+
__asm EncodedJSValue ctiTrampoline(void*, RegisterFile*, CallFrame*, JSValue*, Profiler**, JSGlobalData*)
{
ARM
#endif // USE(JSVALUE32_64)
+#if COMPILER(GCC) && CPU(ARM_THUMB2)
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "str r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "str r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "str r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "str r1, [sp, #" STRINGIZE_VALUE_OF(REGISTER_FILE_OFFSET) "]" "\n"
+ "str r2, [sp, #" STRINGIZE_VALUE_OF(CALLFRAME_OFFSET) "]" "\n"
+ "str r3, [sp, #" STRINGIZE_VALUE_OF(EXCEPTION_OFFSET) "]" "\n"
+ "cpy r5, r2" "\n"
+ "mov r6, #512" "\n"
+ "blx r0" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "cpy r0, sp" "\n"
+ "bl " SYMBOL_STRING_RELOCATION(cti_vm_throw) "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+asm volatile(
+".text" "\n"
+".align 2" "\n"
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+".thumb" "\n"
+".thumb_func " THUMB_FUNC_PARAM(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "ldr r6, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R6_OFFSET) "]" "\n"
+ "ldr r5, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R5_OFFSET) "]" "\n"
+ "ldr r4, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_R4_OFFSET) "]" "\n"
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(PRESERVED_RETURN_ADDRESS_OFFSET) "]" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(ENABLE_PROFILER_REFERENCE_OFFSET) "\n"
+ "bx lr" "\n"
+);
+
+#elif COMPILER(GCC) && CPU(ARM_TRADITIONAL)
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiTrampoline) "\n"
+HIDE_SYMBOL(ctiTrampoline) "\n"
+SYMBOL_STRING(ctiTrampoline) ":" "\n"
+ "stmdb sp!, {r1-r3}" "\n"
+ "stmdb sp!, {r4-r8, lr}" "\n"
+ "sub sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "mov r4, r2" "\n"
+ "mov r5, #512" "\n"
+ // r0 contains the code
+ "mov lr, pc" "\n"
+ "mov pc, r0" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+asm volatile(
+".globl " SYMBOL_STRING(ctiVMThrowTrampoline) "\n"
+HIDE_SYMBOL(ctiVMThrowTrampoline) "\n"
+SYMBOL_STRING(ctiVMThrowTrampoline) ":" "\n"
+ "mov r0, sp" "\n"
+ "bl " SYMBOL_STRING(cti_vm_throw) "\n"
+
+// Both has the same return sequence
+".globl " SYMBOL_STRING(ctiOpThrowNotCaught) "\n"
+HIDE_SYMBOL(ctiOpThrowNotCaught) "\n"
+SYMBOL_STRING(ctiOpThrowNotCaught) ":" "\n"
+ "add sp, sp, #" STRINGIZE_VALUE_OF(PRESERVEDR4_OFFSET) "\n"
+ "ldmia sp!, {r4-r8, lr}" "\n"
+ "add sp, sp, #12" "\n"
+ "mov pc, lr" "\n"
+);
+
+#endif
+
#if ENABLE(OPCODE_SAMPLING)
#define CTI_SAMPLER stackFrame.globalData->interpreter->sampler()
#else
JITThunks::JITThunks(JSGlobalData* globalData)
{
- JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_ctiStringLengthTrampoline, &m_ctiVirtualCallLink, &m_ctiVirtualCall, &m_ctiNativeCallThunk);
+ JIT::compileCTIMachineTrampolines(globalData, &m_executablePool, &m_trampolineStructure);
#if CPU(ARM_THUMB2)
// Unfortunate the arm compiler does not like the use of offsetof on JITStackFrame (since it contains non POD types),
// and the OBJECT_OFFSETOF macro does not appear constantish enough for it to be happy with its use in COMPILE_ASSERT
// macros.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 0x20);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == 0x24);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == 0x28);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == 0x2c);
-
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 0x30);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 0x34);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 0x38);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == PRESERVED_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVED_R4_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR5) == PRESERVED_R5_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR6) == PRESERVED_R6_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == REGISTER_FILE_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == CALLFRAME_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == EXCEPTION_OFFSET);
// The fifth argument is the first item already on the stack.
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 0x40);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == ENABLE_PROFILER_REFERENCE_OFFSET);
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+
+#elif CPU(ARM_TRADITIONAL)
+
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedR4) == PRESERVEDR4_OFFSET);
+
+
+#elif CPU(MIPS)
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedGP) == 28);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS0) == 32);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS1) == 36);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedS2) == 40);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, preservedReturnAddress) == 44);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 48);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, registerFile) == 52);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, callFrame) == 56);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, exception) == 60);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, enabledProfilerReference) == 64);
+ ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, globalData) == 68);
- ASSERT(OBJECT_OFFSETOF(struct JITStackFrame, thunkReturnAddress) == 0x1C);
#endif
}
+JITThunks::~JITThunks()
+{
+}
+
#if ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
-NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo)
+NEVER_INLINE void JITThunks::tryCachePutByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot& slot, StructureStubInfo* stubInfo, bool direct)
{
// The interpreter checks for recursion here; I do not believe this can occur in CTI.
// Uncacheable: give up.
if (!slot.isCacheable()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
Structure* structure = baseCell->structure();
if (structure->isUncacheableDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
// If baseCell != base, then baseCell must be a proxy for another object.
if (baseCell != slot.base()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
// Structure transition, cache transition info
if (slot.type() == PutPropertySlot::NewProperty) {
if (structure->isDictionary()) {
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_put_by_id_generic));
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(direct ? cti_op_put_by_id_direct_generic : cti_op_put_by_id_generic));
return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
stubInfo->initPutByIdTransition(structure->previousID(), structure, prototypeChain);
- JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress);
+ JIT::compilePutByIdTransition(callFrame->scopeChain()->globalData, codeBlock, stubInfo, structure->previousID(), structure, slot.cachedOffset(), prototypeChain, returnAddress, direct);
return;
}
stubInfo->initPutByIdReplace(structure);
- JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ JIT::patchPutByIdReplace(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress, direct);
}
NEVER_INLINE void JITThunks::tryCacheGetByID(CallFrame* callFrame, CodeBlock* codeBlock, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot& slot, StructureStubInfo* stubInfo)
if (isJSString(globalData, baseValue) && propertyName == callFrame->propertyNames().length) {
// The tradeoff of compiling an patched inline string length access routine does not seem
// to pay off, so we currently only do this for arrays.
- ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs.ctiStringLengthTrampoline());
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, globalData->jitStubs->ctiStringLengthTrampoline());
return;
}
if (slot.slotBase() == baseValue) {
// set this up, so derefStructures can do it's job.
stubInfo->initGetByIdSelf(structure);
-
- JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
+ if (slot.cachedPropertyType() != PropertySlot::Value)
+ ctiPatchCallByReturnAddress(codeBlock, returnAddress, FunctionPtr(cti_op_get_by_id_self_fail));
+ else
+ JIT::patchGetByIdSelf(codeBlock, stubInfo, structure, slot.cachedOffset(), returnAddress);
return;
}
ASSERT(!structure->isDictionary());
ASSERT(!slotBaseObject->structure()->isDictionary());
- JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), offset, returnAddress);
+ JIT::compileGetByIdProto(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, slotBaseObject->structure(), propertyName, slot, offset, returnAddress);
return;
}
StructureChain* prototypeChain = structure->prototypeChain(callFrame);
stubInfo->initGetByIdChain(structure, prototypeChain);
- JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, offset, returnAddress);
+ JIT::compileGetByIdChain(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, structure, prototypeChain, count, propertyName, slot, offset, returnAddress);
}
#endif // ENABLE(JIT_OPTIMIZE_PROPERTY_ACCESS)
".thumb" "\n" \
".thumb_func " THUMB_FUNC_PARAM(cti_##op) "\n" \
SYMBOL_STRING(cti_##op) ":" "\n" \
- "str lr, [sp, #0x1c]" "\n" \
+ "str lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bl " SYMBOL_STRING(JITStubThunked_##op) "\n" \
- "ldr lr, [sp, #0x1c]" "\n" \
+ "ldr lr, [sp, #" STRINGIZE_VALUE_OF(THUNK_RETURN_ADDRESS_OFFSET) "]" "\n" \
"bx lr" "\n" \
); \
rtype JITStubThunked_##op(STUB_ARGS_DECLARATION) \
-#elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
+#elif CPU(MIPS)
+#if WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "lw $28,28($29)" "\n" \
+ "sw $31,48($29)" "\n" \
+ ".set macro" "\n" \
+ "la $25," SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ ".set nomacro" "\n" \
+ "bal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
+
+#else // WTF_MIPS_PIC
+#define DEFINE_STUB_FUNCTION(rtype, op) \
+ extern "C" { \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION); \
+ }; \
+ asm volatile( \
+ ".text" "\n" \
+ ".align 2" "\n" \
+ ".set noreorder" "\n" \
+ ".set nomacro" "\n" \
+ ".set nomips16" "\n" \
+ ".globl " SYMBOL_STRING(cti_##op) "\n" \
+ ".ent " SYMBOL_STRING(cti_##op) "\n" \
+ SYMBOL_STRING(cti_##op) ":" "\n" \
+ "sw $31,48($29)" "\n" \
+ "jal " SYMBOL_STRING(JITStubThunked_##op) "\n" \
+ "nop" "\n" \
+ "lw $31,48($29)" "\n" \
+ "jr $31" "\n" \
+ "nop" "\n" \
+ ".set reorder" "\n" \
+ ".set macro" "\n" \
+ ".end " SYMBOL_STRING(cti_##op) "\n" \
+ ); \
+ rtype JITStubThunked_##op(STUB_ARGS_DECLARATION)
-#if USE(JSVALUE32_64)
-#define THUNK_RETURN_ADDRESS_OFFSET 64
-#else
-#define THUNK_RETURN_ADDRESS_OFFSET 32
#endif
-COMPILE_ASSERT(offsetof(struct JITStackFrame, thunkReturnAddress) == THUNK_RETURN_ADDRESS_OFFSET, JITStackFrame_thunkReturnAddress_offset_mismatch);
+#elif CPU(ARM_TRADITIONAL) && COMPILER(GCC)
#define DEFINE_STUB_FUNCTION(rtype, op) \
extern "C" { \
RVCT({)
RVCT( ARM)
RVCT( IMPORT JITStubThunked_#op#)
-RVCT( str lr, [sp, #32])
+RVCT( str lr, [sp, ##offset#])
RVCT( bl JITStubThunked_#op#)
-RVCT( ldr lr, [sp, #32])
+RVCT( ldr lr, [sp, ##offset#])
RVCT( bx lr)
RVCT(})
RVCT()
DEFINE_STUB_FUNCTION(int, timeout_check)
{
STUB_INIT_STACK_FRAME(stackFrame);
-
+
JSGlobalData* globalData = stackFrame.globalData;
TimeoutChecker& timeoutChecker = globalData->timeoutChecker;
- if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
+ if (globalData->terminator.shouldTerminate()) {
+ globalData->exception = createTerminatedExecutionException(globalData);
+ VM_THROW_EXCEPTION_AT_END();
+ } else if (timeoutChecker.didTimeOut(stackFrame.callFrame)) {
globalData->exception = createInterruptedExecutionException(globalData);
VM_THROW_EXCEPTION_AT_END();
}
-
+
return timeoutChecker.ticksUntilNextCheck();
}
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_generic)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(stackFrame.callFrame, stackFrame.args[1].identifier(), stackFrame.args[2].jsValue(), slot);
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_generic)
{
STUB_INIT_STACK_FRAME(stackFrame);
STUB_INIT_STACK_FRAME(stackFrame);
CallFrame* callFrame = stackFrame.callFrame;
Identifier& ident = stackFrame.args[1].identifier();
-
+
PutPropertySlot slot;
stackFrame.args[0].jsValue().put(callFrame, ident, stackFrame.args[2].jsValue(), slot);
-
+
CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
if (!stubInfo->seenOnce())
stubInfo->setSeen();
else
- JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo);
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, false);
+
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CodeBlock* codeBlock = stackFrame.callFrame->codeBlock();
+ StructureStubInfo* stubInfo = &codeBlock->getStubInfo(STUB_RETURN_ADDRESS);
+ if (!stubInfo->seenOnce())
+ stubInfo->setSeen();
+ else
+ JITThunks::tryCachePutByID(callFrame, codeBlock, STUB_RETURN_ADDRESS, stackFrame.args[0].jsValue(), slot, stubInfo, true);
+
CHECK_FOR_EXCEPTION_AT_END();
}
CHECK_FOR_EXCEPTION_AT_END();
}
+DEFINE_STUB_FUNCTION(void, op_put_by_id_direct_fail)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+
+ CallFrame* callFrame = stackFrame.callFrame;
+ Identifier& ident = stackFrame.args[1].identifier();
+
+ PutPropertySlot slot;
+ stackFrame.args[0].jsValue().putDirect(callFrame, ident, stackFrame.args[2].jsValue(), slot);
+
+ CHECK_FOR_EXCEPTION_AT_END();
+}
+
DEFINE_STUB_FUNCTION(JSObject*, op_put_by_id_transition_realloc)
{
STUB_INIT_STACK_FRAME(stackFrame);
// If we successfully got something, then the base from which it is being accessed must
// be an object. (Assertion to ensure asObject() call below is safe, which comes after
// an isCacheable() chceck.
- ASSERT(!slot.isCacheable() || slot.slotBase().isObject());
+ ASSERT(!slot.isCacheableValue() || slot.slotBase().isObject());
// Check that:
// * We're dealing with a JSCell,
JSCell* specific;
JSObject* slotBaseObject;
if (baseValue.isCell()
- && slot.isCacheable()
+ && slot.isCacheableValue()
&& !(structure = asCell(baseValue)->structure())->isUncacheableDictionary()
&& (slotBaseObject = asObject(slot.slotBase()))->getPropertySpecificValue(callFrame, ident, specific)
&& specific
if (stubInfo->accessType == access_get_by_id_self) {
ASSERT(!stubInfo->stubRoutine);
polymorphicStructureList = new PolymorphicAccessStructureList(CodeLocationLabel(), stubInfo->u.getByIdSelf.baseObjectStructure);
- stubInfo->initGetByIdSelfList(polymorphicStructureList, 2);
+ stubInfo->initGetByIdSelfList(polymorphicStructureList, 1);
} else {
polymorphicStructureList = stubInfo->u.getByIdSelfList.structureList;
listIndex = stubInfo->u.getByIdSelfList.listSize;
- stubInfo->u.getByIdSelfList.listSize++;
}
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ stubInfo->u.getByIdSelfList.listSize++;
+ JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), ident, slot, slot.cachedOffset());
- JIT::compileGetByIdSelfList(callFrame->scopeChain()->globalData, codeBlock, stubInfo, polymorphicStructureList, listIndex, asCell(baseValue)->structure(), slot.cachedOffset());
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
+ }
} else
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_generic));
return JSValue::encode(result);
case access_get_by_id_proto_list:
prototypeStructureList = stubInfo->u.getByIdProtoList.structureList;
listIndex = stubInfo->u.getByIdProtoList.listSize;
- stubInfo->u.getByIdProtoList.listSize++;
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE)
+ stubInfo->u.getByIdProtoList.listSize++;
break;
default:
ASSERT_NOT_REACHED();
}
- ASSERT(listIndex < POLYMORPHIC_LIST_CACHE_SIZE);
+ ASSERT(listIndex <= POLYMORPHIC_LIST_CACHE_SIZE);
return prototypeStructureList;
}
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_getter_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ GetterSetter* getterSetter = asGetterSetter(stackFrame.args[0].jsObject());
+ if (!getterSetter->getter())
+ return JSValue::encode(jsUndefined());
+ JSObject* getter = asObject(getterSetter->getter());
+ CallData callData;
+ CallType callType = getter->getCallData(callData);
+ JSValue result = call(callFrame, getter, callType, callData, stackFrame.args[1].jsObject(), ArgList());
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[2].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
+DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_custom_stub)
+{
+ STUB_INIT_STACK_FRAME(stackFrame);
+ CallFrame* callFrame = stackFrame.callFrame;
+ JSObject* slotBase = stackFrame.args[0].jsObject();
+ PropertySlot::GetValueFunc getter = reinterpret_cast<PropertySlot::GetValueFunc>(stackFrame.args[1].asPointer);
+ const Identifier& ident = stackFrame.args[2].identifier();
+ JSValue result = getter(callFrame, slotBase, ident);
+ if (callFrame->hadException())
+ returnToThrowTrampoline(&callFrame->globalData(), stackFrame.args[3].returnAddress(), STUB_RETURN_ADDRESS);
+
+ return JSValue::encode(result);
+}
+
DEFINE_STUB_FUNCTION(EncodedJSValue, op_get_by_id_proto_list)
{
STUB_INIT_STACK_FRAME(stackFrame);
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), propertyName, slot, offset);
- JIT::compileGetByIdProtoList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, slotBaseObject->structure(), offset);
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
} else if (size_t count = normalizePrototypeChain(callFrame, baseValue, slot.slotBase(), propertyName, offset)) {
ASSERT(!asCell(baseValue)->structure()->isDictionary());
int listIndex;
PolymorphicAccessStructureList* prototypeStructureList = getPolymorphicAccessStructureListSlot(stubInfo, listIndex);
+
+ if (listIndex < POLYMORPHIC_LIST_CACHE_SIZE) {
+ StructureChain* protoChain = structure->prototypeChain(callFrame);
+ JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, propertyName, slot, offset);
- StructureChain* protoChain = structure->prototypeChain(callFrame);
- JIT::compileGetByIdChainList(callFrame->scopeChain()->globalData, callFrame, codeBlock, stubInfo, prototypeStructureList, listIndex, structure, protoChain, count, offset);
-
- if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
- ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ if (listIndex == (POLYMORPHIC_LIST_CACHE_SIZE - 1))
+ ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_list_full));
+ }
} else
ctiPatchCallByReturnAddress(codeBlock, STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_id_proto_fail));
if (argCount > newCodeBlock->m_numParameters) {
size_t numParameters = newCodeBlock->m_numParameters;
Register* r = callFrame->registers() + numParameters;
+ Register* newEnd = r + newCodeBlock->m_numCalleeRegisters;
+ if (!stackFrame.registerFile->grow(newEnd)) {
+ // Rewind to the previous call frame because op_call already optimistically
+ // moved the call frame forward.
+ stackFrame.callFrame = oldCallFrame;
+ throwStackOverflowError(oldCallFrame, stackFrame.globalData, stackFrame.args[1].returnAddress(), STUB_RETURN_ADDRESS);
+ RETURN_POINTER_PAIR(0, 0);
+ }
Register* argv = r - RegisterFile::CallFrameHeaderSize - numParameters - argCount;
for (size_t i = 0; i < numParameters; ++i)
callFrame->setCallerFrame(oldCallFrame);
}
+ ASSERT((void*)callFrame <= stackFrame.registerFile->end());
RETURN_POINTER_PAIR(callee, callFrame);
}
JSValue baseValue = stackFrame.args[0].jsValue();
JSValue subscript = stackFrame.args[1].jsValue();
- JSValue result;
+ if (LIKELY(baseValue.isCell() && subscript.isString())) {
+ Identifier propertyName(callFrame, asString(subscript)->value(callFrame));
+ PropertySlot slot(asCell(baseValue));
+ // JSString::value may have thrown, but we shouldn't find a property with a null identifier,
+ // so we should miss this case and wind up in the CHECK_FOR_EXCEPTION_AT_END, below.
+ if (asCell(baseValue)->fastGetOwnPropertySlot(callFrame, propertyName, slot)) {
+ JSValue result = slot.getValue(callFrame, propertyName);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ }
- if (LIKELY(subscript.isUInt32())) {
+ if (subscript.isUInt32()) {
uint32_t i = subscript.asUInt32();
- if (isJSArray(globalData, baseValue)) {
- JSArray* jsArray = asArray(baseValue);
- if (jsArray->canGetIndex(i))
- result = jsArray->getIndex(i);
- else
- result = jsArray->JSArray::get(callFrame, i);
- } else if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
- // All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
+ if (isJSString(globalData, baseValue) && asString(baseValue)->canGetIndex(i)) {
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_string));
- result = asString(baseValue)->getIndex(callFrame, i);
- } else if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
+ JSValue result = asString(baseValue)->getIndex(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
+ }
+ if (isJSByteArray(globalData, baseValue) && asByteArray(baseValue)->canAccessIndex(i)) {
// All fast byte array accesses are safe from exceptions so return immediately to avoid exception checks.
ctiPatchCallByReturnAddress(callFrame->codeBlock(), STUB_RETURN_ADDRESS, FunctionPtr(cti_op_get_by_val_byte_array));
return JSValue::encode(asByteArray(baseValue)->getIndex(callFrame, i));
- } else
- result = baseValue.get(callFrame, i);
- } else {
- Identifier property(callFrame, subscript.toString(callFrame));
- result = baseValue.get(callFrame, property);
+ }
+ JSValue result = baseValue.get(callFrame, i);
+ CHECK_FOR_EXCEPTION();
+ return JSValue::encode(result);
}
-
+
+ Identifier property(callFrame, subscript.toString(callFrame));
+ JSValue result = baseValue.get(callFrame, property);
CHECK_FOR_EXCEPTION_AT_END();
return JSValue::encode(result);
}
PropertySlot slot(globalObject);
if (globalObject->getPropertySlot(callFrame, ident, slot)) {
JSValue result = slot.getValue(callFrame, ident);
- if (slot.isCacheable() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
+ if (slot.isCacheableValue() && !globalObject->structure()->isUncacheableDictionary() && slot.slotBase() == globalObject) {
GlobalResolveInfo& globalResolveInfo = callFrame->codeBlock()->globalResolveInfo(globalResolveInfoIndex);
if (globalResolveInfo.structure)
globalResolveInfo.structure->deref();
#endif // USE(JSVALUE32_64)
}
-#if USE(JSVALUE32_64)
-
DEFINE_STUB_FUNCTION(int, op_eq_strings)
{
+#if USE(JSVALUE32_64)
STUB_INIT_STACK_FRAME(stackFrame);
JSString* string1 = stackFrame.args[0].jsString();
ASSERT(string1->isString());
ASSERT(string2->isString());
return string1->value(stackFrame.callFrame) == string2->value(stackFrame.callFrame);
-}
-
+#else
+ UNUSED_PARAM(args);
+ ASSERT_NOT_REACHED();
+ return 0;
#endif
+}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_lshift)
{
JSObject* base = stackFrame.args[0].jsObject();
JSString* property = stackFrame.args[1].jsString();
- return base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
+ int result = base->hasProperty(stackFrame.callFrame, Identifier(stackFrame.callFrame, property->value(stackFrame.callFrame)));
+ CHECK_FOR_EXCEPTION_AT_END();
+ return result;
}
DEFINE_STUB_FUNCTION(JSObject*, op_push_scope)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(JSValue::strictEqual(stackFrame.callFrame, src1, src2)));
+ bool result = JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_primitive)
JSValue src1 = stackFrame.args[0].jsValue();
JSValue src2 = stackFrame.args[1].jsValue();
- return JSValue::encode(jsBoolean(!JSValue::strictEqual(stackFrame.callFrame, src1, src2)));
+ bool result = !JSValue::strictEqual(stackFrame.callFrame, src1, src2);
+ CHECK_FOR_EXCEPTION_AT_END();
+ return JSValue::encode(jsBoolean(result));
}
DEFINE_STUB_FUNCTION(EncodedJSValue, op_to_jsnumber)
if (scrutinee.isString()) {
UString::Rep* value = asString(scrutinee)->value(callFrame).rep();
- if (value->size() == 1)
- result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->data()[0]).executableAddress();
+ if (value->length() == 1)
+ result = codeBlock->characterSwitchJumpTable(tableIndex).ctiForValue(value->characters()[0]).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
result = codeBlock->stringSwitchJumpTable(tableIndex).ctiForValue(value).executableAddress();
}
+ CHECK_FOR_EXCEPTION_AT_END();
return result;
}
return JSValue::encode(stackFrame.args[0].jsValue().toObject(callFrame));
}
+NativeExecutable* JITThunks::specializedThunk(JSGlobalData* globalData, ThunkGenerator generator)
+{
+ std::pair<ThunkMap::iterator, bool> entry = m_thunkMap.add(generator, 0);
+ if (!entry.second)
+ return entry.first->second.get();
+ entry.first->second = generator(globalData, m_executablePool.get());
+ return entry.first->second.get();
+}
+
} // namespace JSC
#endif // ENABLE(JIT)
#ifndef JITStubs_h
#define JITStubs_h
-#include <wtf/Platform.h>
-
#include "MacroAssemblerCodeRef.h"
#include "Register.h"
+#include "ThunkGenerators.h"
+#include <wtf/HashMap.h>
#if ENABLE(JIT)
class FunctionExecutable;
class Identifier;
class JSGlobalData;
- class JSGlobalData;
+ class JSGlobalObject;
class JSObject;
class JSPropertyNameIterator;
class JSValue;
class JSValueEncodedAsPointer;
+ class NativeExecutable;
class Profiler;
class PropertySlot;
class PutPropertySlot;
class RegisterFile;
- class JSGlobalObject;
class RegExp;
union JITStubArg {
JSString* jsString() { return static_cast<JSString*>(asPointer); }
ReturnAddressPtr returnAddress() { return ReturnAddressPtr(asPointer); }
};
+
+ struct TrampolineStructure {
+ MacroAssemblerCodePtr ctiStringLengthTrampoline;
+ MacroAssemblerCodePtr ctiVirtualCallLink;
+ MacroAssemblerCodePtr ctiVirtualCall;
+ RefPtr<NativeExecutable> ctiNativeCallThunk;
+ MacroAssemblerCodePtr ctiSoftModulo;
+ };
#if CPU(X86_64)
struct JITStackFrame {
ReturnAddressPtr* returnAddressSlot() { return reinterpret_cast<ReturnAddressPtr*>(this) - 1; }
};
#elif CPU(X86)
-#if COMPILER(MSVC)
+#if COMPILER(MSVC) || (OS(WINDOWS) && COMPILER(GCC))
#pragma pack(push)
#pragma pack(4)
-#endif // COMPILER(MSVC)
+#endif // COMPILER(MSVC) || (OS(WINDOWS) && COMPILER(GCC))
struct JITStackFrame {
void* reserved; // Unused
JITStubArg args[6];
// When JIT code makes a call, it pushes its return address just below the rest of the stack.
ReturnAddressPtr* returnAddressSlot() { return reinterpret_cast<ReturnAddressPtr*>(this) - 1; }
};
-#if COMPILER(MSVC)
+#if COMPILER(MSVC) || (OS(WINDOWS) && COMPILER(GCC))
#pragma pack(pop)
-#endif // COMPILER(MSVC)
+#endif // COMPILER(MSVC) || (OS(WINDOWS) && COMPILER(GCC))
#elif CPU(ARM_THUMB2)
struct JITStackFrame {
void* reserved; // Unused
JSGlobalData* globalData;
// When JIT code makes a call, it pushes its return address just below the rest of the stack.
+ ReturnAddressPtr* returnAddressSlot() { return &thunkReturnAddress; }
+ };
+#elif CPU(MIPS)
+ struct JITStackFrame {
+ void* reserved; // Unused
+ JITStubArg args[6];
+
+ void* preservedGP; // store GP when using PIC code
+ void* preservedS0;
+ void* preservedS1;
+ void* preservedS2;
+ void* preservedReturnAddress;
+
+ ReturnAddressPtr thunkReturnAddress;
+
+ // These arguments passed in a1..a3 (a0 contained the entry code pointed, which is not preserved)
+ RegisterFile* registerFile;
+ CallFrame* callFrame;
+ JSValue* exception;
+
+ // These arguments passed on the stack.
+ Profiler** enabledProfilerReference;
+ JSGlobalData* globalData;
+
ReturnAddressPtr* returnAddressSlot() { return &thunkReturnAddress; }
};
#else
class JITThunks {
public:
JITThunks(JSGlobalData*);
+ ~JITThunks();
static void tryCacheGetByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const Identifier& propertyName, const PropertySlot&, StructureStubInfo* stubInfo);
- static void tryCachePutByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot&, StructureStubInfo* stubInfo);
+ static void tryCachePutByID(CallFrame*, CodeBlock*, ReturnAddressPtr returnAddress, JSValue baseValue, const PutPropertySlot&, StructureStubInfo* stubInfo, bool direct);
- MacroAssemblerCodePtr ctiStringLengthTrampoline() { return m_ctiStringLengthTrampoline; }
- MacroAssemblerCodePtr ctiVirtualCallLink() { return m_ctiVirtualCallLink; }
- MacroAssemblerCodePtr ctiVirtualCall() { return m_ctiVirtualCall; }
- MacroAssemblerCodePtr ctiNativeCallThunk() { return m_ctiNativeCallThunk; }
+ MacroAssemblerCodePtr ctiStringLengthTrampoline() { return m_trampolineStructure.ctiStringLengthTrampoline; }
+ MacroAssemblerCodePtr ctiVirtualCallLink() { return m_trampolineStructure.ctiVirtualCallLink; }
+ MacroAssemblerCodePtr ctiVirtualCall() { return m_trampolineStructure.ctiVirtualCall; }
+ NativeExecutable* ctiNativeCallThunk() { return m_trampolineStructure.ctiNativeCallThunk.get(); }
+ MacroAssemblerCodePtr ctiSoftModulo() { return m_trampolineStructure.ctiSoftModulo; }
+ NativeExecutable* specializedThunk(JSGlobalData* globalData, ThunkGenerator generator);
private:
+ typedef HashMap<ThunkGenerator, RefPtr<NativeExecutable> > ThunkMap;
+ ThunkMap m_thunkMap;
RefPtr<ExecutablePool> m_executablePool;
- MacroAssemblerCodePtr m_ctiStringLengthTrampoline;
- MacroAssemblerCodePtr m_ctiVirtualCallLink;
- MacroAssemblerCodePtr m_ctiVirtualCall;
- MacroAssemblerCodePtr m_ctiNativeCallThunk;
+ TrampolineStructure m_trampolineStructure;
};
extern "C" {
EncodedJSValue JIT_STUB cti_op_get_by_id_array_fail(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_get_by_id_generic(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_get_by_id_method_check(STUB_ARGS_DECLARATION);
+ EncodedJSValue JIT_STUB cti_op_get_by_id_getter_stub(STUB_ARGS_DECLARATION);
+ EncodedJSValue JIT_STUB cti_op_get_by_id_custom_stub(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_fail(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_get_by_id_proto_list_full(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_resolve(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_resolve_base(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_resolve_global(STUB_ARGS_DECLARATION);
+ EncodedJSValue JIT_STUB cti_op_resolve_global_dynamic(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_resolve_skip(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_resolve_with_base(STUB_ARGS_DECLARATION);
EncodedJSValue JIT_STUB cti_op_rshift(STUB_ARGS_DECLARATION);
JSPropertyNameIterator* JIT_STUB cti_op_get_pnames(STUB_ARGS_DECLARATION);
VoidPtrPair JIT_STUB cti_op_call_arityCheck(STUB_ARGS_DECLARATION);
int JIT_STUB cti_op_eq(STUB_ARGS_DECLARATION);
-#if USE(JSVALUE32_64)
int JIT_STUB cti_op_eq_strings(STUB_ARGS_DECLARATION);
-#endif
int JIT_STUB cti_op_jless(STUB_ARGS_DECLARATION);
int JIT_STUB cti_op_jlesseq(STUB_ARGS_DECLARATION);
int JIT_STUB cti_op_jtrue(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_id(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_id_fail(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_id_generic(STUB_ARGS_DECLARATION);
+ void JIT_STUB cti_op_put_by_id_direct(STUB_ARGS_DECLARATION);
+ void JIT_STUB cti_op_put_by_id_direct_fail(STUB_ARGS_DECLARATION);
+ void JIT_STUB cti_op_put_by_id_direct_generic(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_index(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_val(STUB_ARGS_DECLARATION);
void JIT_STUB cti_op_put_by_val_byte_array(STUB_ARGS_DECLARATION);
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSInterfaceJIT_h
+#define JSInterfaceJIT_h
+
+#include "JITCode.h"
+#include "JITStubs.h"
+#include "JSImmediate.h"
+#include "MacroAssembler.h"
+#include "RegisterFile.h"
+#include <wtf/AlwaysInline.h>
+#include <wtf/Vector.h>
+
+namespace JSC {
+ class JSInterfaceJIT : public MacroAssembler {
+ public:
+ // NOTES:
+ //
+ // regT0 has two special meanings. The return value from a stub
+ // call will always be in regT0, and by default (unless
+ // a register is specified) emitPutVirtualRegister() will store
+ // the value from regT0.
+ //
+ // regT3 is required to be callee-preserved.
+ //
+ // tempRegister2 is has no such dependencies. It is important that
+ // on x86/x86-64 it is ecx for performance reasons, since the
+ // MacroAssembler will need to plant register swaps if it is not -
+ // however the code will still function correctly.
+#if CPU(X86_64)
+ static const RegisterID returnValueRegister = X86Registers::eax;
+ static const RegisterID cachedResultRegister = X86Registers::eax;
+ static const RegisterID firstArgumentRegister = X86Registers::edi;
+
+ static const RegisterID timeoutCheckRegister = X86Registers::r12;
+ static const RegisterID callFrameRegister = X86Registers::r13;
+ static const RegisterID tagTypeNumberRegister = X86Registers::r14;
+ static const RegisterID tagMaskRegister = X86Registers::r15;
+
+ static const RegisterID regT0 = X86Registers::eax;
+ static const RegisterID regT1 = X86Registers::edx;
+ static const RegisterID regT2 = X86Registers::ecx;
+ static const RegisterID regT3 = X86Registers::ebx;
+
+ static const FPRegisterID fpRegT0 = X86Registers::xmm0;
+ static const FPRegisterID fpRegT1 = X86Registers::xmm1;
+ static const FPRegisterID fpRegT2 = X86Registers::xmm2;
+ static const FPRegisterID fpRegT3 = X86Registers::xmm3;
+#elif CPU(X86)
+ static const RegisterID returnValueRegister = X86Registers::eax;
+ static const RegisterID cachedResultRegister = X86Registers::eax;
+ // On x86 we always use fastcall conventions = but on
+ // OS X if might make more sense to just use regparm.
+ static const RegisterID firstArgumentRegister = X86Registers::ecx;
+
+ static const RegisterID timeoutCheckRegister = X86Registers::esi;
+ static const RegisterID callFrameRegister = X86Registers::edi;
+
+ static const RegisterID regT0 = X86Registers::eax;
+ static const RegisterID regT1 = X86Registers::edx;
+ static const RegisterID regT2 = X86Registers::ecx;
+ static const RegisterID regT3 = X86Registers::ebx;
+
+ static const FPRegisterID fpRegT0 = X86Registers::xmm0;
+ static const FPRegisterID fpRegT1 = X86Registers::xmm1;
+ static const FPRegisterID fpRegT2 = X86Registers::xmm2;
+ static const FPRegisterID fpRegT3 = X86Registers::xmm3;
+#elif CPU(ARM_THUMB2)
+ static const RegisterID returnValueRegister = ARMRegisters::r0;
+ static const RegisterID cachedResultRegister = ARMRegisters::r0;
+ static const RegisterID firstArgumentRegister = ARMRegisters::r0;
+
+ static const RegisterID regT0 = ARMRegisters::r0;
+ static const RegisterID regT1 = ARMRegisters::r1;
+ static const RegisterID regT2 = ARMRegisters::r2;
+ static const RegisterID regT3 = ARMRegisters::r4;
+
+ static const RegisterID callFrameRegister = ARMRegisters::r5;
+ static const RegisterID timeoutCheckRegister = ARMRegisters::r6;
+
+ static const FPRegisterID fpRegT0 = ARMRegisters::d0;
+ static const FPRegisterID fpRegT1 = ARMRegisters::d1;
+ static const FPRegisterID fpRegT2 = ARMRegisters::d2;
+ static const FPRegisterID fpRegT3 = ARMRegisters::d3;
+#elif CPU(ARM_TRADITIONAL)
+ static const RegisterID returnValueRegister = ARMRegisters::r0;
+ static const RegisterID cachedResultRegister = ARMRegisters::r0;
+ static const RegisterID firstArgumentRegister = ARMRegisters::r0;
+
+ static const RegisterID timeoutCheckRegister = ARMRegisters::r5;
+ static const RegisterID callFrameRegister = ARMRegisters::r4;
+
+ static const RegisterID regT0 = ARMRegisters::r0;
+ static const RegisterID regT1 = ARMRegisters::r1;
+ static const RegisterID regT2 = ARMRegisters::r2;
+ // Callee preserved
+ static const RegisterID regT3 = ARMRegisters::r7;
+
+ static const RegisterID regS0 = ARMRegisters::S0;
+ // Callee preserved
+ static const RegisterID regS1 = ARMRegisters::S1;
+
+ static const RegisterID regStackPtr = ARMRegisters::sp;
+ static const RegisterID regLink = ARMRegisters::lr;
+
+ static const FPRegisterID fpRegT0 = ARMRegisters::d0;
+ static const FPRegisterID fpRegT1 = ARMRegisters::d1;
+ static const FPRegisterID fpRegT2 = ARMRegisters::d2;
+ static const FPRegisterID fpRegT3 = ARMRegisters::d3;
+#elif CPU(MIPS)
+ static const RegisterID returnValueRegister = MIPSRegisters::v0;
+ static const RegisterID cachedResultRegister = MIPSRegisters::v0;
+ static const RegisterID firstArgumentRegister = MIPSRegisters::a0;
+
+ // regT0 must be v0 for returning a 32-bit value.
+ static const RegisterID regT0 = MIPSRegisters::v0;
+
+ // regT1 must be v1 for returning a pair of 32-bit value.
+ static const RegisterID regT1 = MIPSRegisters::v1;
+
+ static const RegisterID regT2 = MIPSRegisters::t4;
+
+ // regT3 must be saved in the callee, so use an S register.
+ static const RegisterID regT3 = MIPSRegisters::s2;
+
+ static const RegisterID callFrameRegister = MIPSRegisters::s0;
+ static const RegisterID timeoutCheckRegister = MIPSRegisters::s1;
+
+ static const FPRegisterID fpRegT0 = MIPSRegisters::f4;
+ static const FPRegisterID fpRegT1 = MIPSRegisters::f6;
+ static const FPRegisterID fpRegT2 = MIPSRegisters::f8;
+ static const FPRegisterID fpRegT2 = MIPSRegisters::f10;
+#else
+#error "JIT not supported on this platform."
+#endif
+
+ inline Jump emitLoadJSCell(unsigned virtualRegisterIndex, RegisterID payload);
+ inline Jump emitLoadInt32(unsigned virtualRegisterIndex, RegisterID dst);
+ inline Jump emitLoadDouble(unsigned virtualRegisterIndex, FPRegisterID dst, RegisterID scratch);
+
+#if USE(JSVALUE32_64)
+ inline Jump emitJumpIfNotJSCell(unsigned virtualRegisterIndex);
+ inline Address tagFor(unsigned index, RegisterID base = callFrameRegister);
+#endif
+
+#if USE(JSVALUE32) || USE(JSVALUE64)
+ Jump emitJumpIfImmediateNumber(RegisterID reg);
+ Jump emitJumpIfNotImmediateNumber(RegisterID reg);
+ void emitFastArithImmToInt(RegisterID reg);
+#endif
+
+ inline Address payloadFor(unsigned index, RegisterID base = callFrameRegister);
+ inline Address addressFor(unsigned index, RegisterID base = callFrameRegister);
+ };
+
+ struct ThunkHelpers {
+ static unsigned stringImplDataOffset() { return WebCore::StringImpl::dataOffset(); }
+ static unsigned jsStringLengthOffset() { return OBJECT_OFFSETOF(JSString, m_length); }
+ static unsigned jsStringValueOffset() { return OBJECT_OFFSETOF(JSString, m_value); }
+ };
+
+#if USE(JSVALUE32_64)
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadJSCell(unsigned virtualRegisterIndex, RegisterID payload)
+ {
+ loadPtr(payloadFor(virtualRegisterIndex), payload);
+ return emitJumpIfNotJSCell(virtualRegisterIndex);
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitJumpIfNotJSCell(unsigned virtualRegisterIndex)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ return branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::CellTag));
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadInt32(unsigned virtualRegisterIndex, RegisterID dst)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ loadPtr(payloadFor(virtualRegisterIndex), dst);
+ return branch32(NotEqual, tagFor(virtualRegisterIndex), Imm32(JSValue::Int32Tag));
+ }
+
+ inline JSInterfaceJIT::Address JSInterfaceJIT::tagFor(unsigned virtualRegisterIndex, RegisterID base)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ return Address(base, (virtualRegisterIndex * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.tag));
+ }
+
+ inline JSInterfaceJIT::Address JSInterfaceJIT::payloadFor(unsigned virtualRegisterIndex, RegisterID base)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ return Address(base, (virtualRegisterIndex * sizeof(Register)) + OBJECT_OFFSETOF(JSValue, u.asBits.payload));
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadDouble(unsigned virtualRegisterIndex, FPRegisterID dst, RegisterID scratch)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ loadPtr(tagFor(virtualRegisterIndex), scratch);
+ Jump isDouble = branch32(Below, scratch, Imm32(JSValue::LowestTag));
+ Jump notInt = branch32(NotEqual, scratch, Imm32(JSValue::Int32Tag));
+ loadPtr(payloadFor(virtualRegisterIndex), scratch);
+ convertInt32ToDouble(scratch, dst);
+ Jump done = jump();
+ isDouble.link(this);
+ loadDouble(addressFor(virtualRegisterIndex), dst);
+ done.link(this);
+ return notInt;
+ }
+#endif
+
+#if USE(JSVALUE64)
+ ALWAYS_INLINE JSInterfaceJIT::Jump JSInterfaceJIT::emitJumpIfImmediateNumber(RegisterID reg)
+ {
+ return branchTestPtr(NonZero, reg, tagTypeNumberRegister);
+ }
+ ALWAYS_INLINE JSInterfaceJIT::Jump JSInterfaceJIT::emitJumpIfNotImmediateNumber(RegisterID reg)
+ {
+ return branchTestPtr(Zero, reg, tagTypeNumberRegister);
+ }
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadJSCell(unsigned virtualRegisterIndex, RegisterID dst)
+ {
+ loadPtr(addressFor(virtualRegisterIndex), dst);
+ return branchTestPtr(NonZero, dst, tagMaskRegister);
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadInt32(unsigned virtualRegisterIndex, RegisterID dst)
+ {
+ loadPtr(addressFor(virtualRegisterIndex), dst);
+ Jump result = branchPtr(Below, dst, tagTypeNumberRegister);
+ zeroExtend32ToPtr(dst, dst);
+ return result;
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadDouble(unsigned virtualRegisterIndex, FPRegisterID dst, RegisterID scratch)
+ {
+ loadPtr(addressFor(virtualRegisterIndex), scratch);
+ Jump notNumber = emitJumpIfNotImmediateNumber(scratch);
+ Jump notInt = branchPtr(Below, scratch, tagTypeNumberRegister);
+ convertInt32ToDouble(scratch, dst);
+ Jump done = jump();
+ notInt.link(this);
+ addPtr(tagTypeNumberRegister, scratch);
+ movePtrToDouble(scratch, dst);
+ done.link(this);
+ return notNumber;
+ }
+
+ ALWAYS_INLINE void JSInterfaceJIT::emitFastArithImmToInt(RegisterID)
+ {
+ }
+
+#endif
+
+#if USE(JSVALUE32)
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadJSCell(unsigned virtualRegisterIndex, RegisterID dst)
+ {
+ loadPtr(addressFor(virtualRegisterIndex), dst);
+ return branchTest32(NonZero, dst, Imm32(JSImmediate::TagMask));
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadInt32(unsigned virtualRegisterIndex, RegisterID dst)
+ {
+ loadPtr(addressFor(virtualRegisterIndex), dst);
+ Jump result = branchTest32(Zero, dst, Imm32(JSImmediate::TagTypeNumber));
+ rshift32(Imm32(JSImmediate::IntegerPayloadShift), dst);
+ return result;
+ }
+
+ inline JSInterfaceJIT::Jump JSInterfaceJIT::emitLoadDouble(unsigned, FPRegisterID, RegisterID)
+ {
+ ASSERT_NOT_REACHED();
+ return jump();
+ }
+
+ ALWAYS_INLINE void JSInterfaceJIT::emitFastArithImmToInt(RegisterID reg)
+ {
+ rshift32(Imm32(JSImmediate::IntegerPayloadShift), reg);
+ }
+
+#endif
+
+#if !USE(JSVALUE32_64)
+ inline JSInterfaceJIT::Address JSInterfaceJIT::payloadFor(unsigned virtualRegisterIndex, RegisterID base)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ return addressFor(virtualRegisterIndex, base);
+ }
+#endif
+
+ inline JSInterfaceJIT::Address JSInterfaceJIT::addressFor(unsigned virtualRegisterIndex, RegisterID base)
+ {
+ ASSERT(static_cast<int>(virtualRegisterIndex) < FirstConstantRegisterIndex);
+ return Address(base, (virtualRegisterIndex * sizeof(Register)));
+ }
+
+}
+
+#endif // JSInterfaceJIT_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef SpecializedThunkJIT_h
+#define SpecializedThunkJIT_h
+
+#if ENABLE(JIT)
+
+#include "Executable.h"
+#include "JSInterfaceJIT.h"
+#include "LinkBuffer.h"
+
+namespace JSC {
+
+ class SpecializedThunkJIT : public JSInterfaceJIT {
+ public:
+ static const int ThisArgument = -1;
+ SpecializedThunkJIT(int expectedArgCount, JSGlobalData* globalData, ExecutablePool* pool)
+ : m_expectedArgCount(expectedArgCount)
+ , m_globalData(globalData)
+ , m_pool(pool)
+ {
+ // Check that we have the expected number of arguments
+ m_failures.append(branch32(NotEqual, Address(callFrameRegister, RegisterFile::ArgumentCount * (int)sizeof(Register)), Imm32(expectedArgCount + 1)));
+ }
+
+ void loadDoubleArgument(int argument, FPRegisterID dst, RegisterID scratch)
+ {
+ unsigned src = argumentToVirtualRegister(argument);
+ m_failures.append(emitLoadDouble(src, dst, scratch));
+ }
+
+ void loadCellArgument(int argument, RegisterID dst)
+ {
+ unsigned src = argumentToVirtualRegister(argument);
+ m_failures.append(emitLoadJSCell(src, dst));
+ }
+
+ void loadJSStringArgument(int argument, RegisterID dst)
+ {
+ loadCellArgument(argument, dst);
+ m_failures.append(branchPtr(NotEqual, Address(dst, 0), ImmPtr(m_globalData->jsStringVPtr)));
+ m_failures.append(branchTest32(NonZero, Address(dst, OBJECT_OFFSETOF(JSString, m_fiberCount))));
+ }
+
+ void loadInt32Argument(int argument, RegisterID dst, Jump& failTarget)
+ {
+ unsigned src = argumentToVirtualRegister(argument);
+ failTarget = emitLoadInt32(src, dst);
+ }
+
+ void loadInt32Argument(int argument, RegisterID dst)
+ {
+ Jump conversionFailed;
+ loadInt32Argument(argument, dst, conversionFailed);
+ m_failures.append(conversionFailed);
+ }
+
+ void appendFailure(const Jump& failure)
+ {
+ m_failures.append(failure);
+ }
+
+ void returnJSValue(RegisterID src)
+ {
+ if (src != regT0)
+ move(src, regT0);
+ loadPtr(Address(callFrameRegister, RegisterFile::CallerFrame * (int)sizeof(Register)), callFrameRegister);
+ ret();
+ }
+
+ void returnDouble(FPRegisterID src)
+ {
+#if USE(JSVALUE64)
+ moveDoubleToPtr(src, regT0);
+ subPtr(tagTypeNumberRegister, regT0);
+#elif USE(JSVALUE32_64)
+ storeDouble(src, Address(stackPointerRegister, -(int)sizeof(double)));
+ loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.tag) - sizeof(double)), regT1);
+ loadPtr(Address(stackPointerRegister, OBJECT_OFFSETOF(JSValue, u.asBits.payload) - sizeof(double)), regT0);
+#else
+ UNUSED_PARAM(src);
+ ASSERT_NOT_REACHED();
+ m_failures.append(jump());
+#endif
+ loadPtr(Address(callFrameRegister, RegisterFile::CallerFrame * (int)sizeof(Register)), callFrameRegister);
+ ret();
+ }
+
+ void returnInt32(RegisterID src)
+ {
+ if (src != regT0)
+ move(src, regT0);
+ tagReturnAsInt32();
+ loadPtr(Address(callFrameRegister, RegisterFile::CallerFrame * (int)sizeof(Register)), callFrameRegister);
+ ret();
+ }
+
+ void returnJSCell(RegisterID src)
+ {
+ if (src != regT0)
+ move(src, regT0);
+ tagReturnAsJSCell();
+ loadPtr(Address(callFrameRegister, RegisterFile::CallerFrame * (int)sizeof(Register)), callFrameRegister);
+ ret();
+ }
+
+ PassRefPtr<NativeExecutable> finalize()
+ {
+ LinkBuffer patchBuffer(this, m_pool.get());
+ patchBuffer.link(m_failures, CodeLocationLabel(m_globalData->jitStubs->ctiNativeCallThunk()->generatedJITCode().addressForCall()));
+ return adoptRef(new NativeExecutable(patchBuffer.finalizeCode()));
+ }
+
+ private:
+ int argumentToVirtualRegister(unsigned argument)
+ {
+ return -static_cast<int>(RegisterFile::CallFrameHeaderSize + (m_expectedArgCount - argument));
+ }
+
+ void tagReturnAsInt32()
+ {
+#if USE(JSVALUE64)
+ orPtr(tagTypeNumberRegister, regT0);
+#elif USE(JSVALUE32_64)
+ move(Imm32(JSValue::Int32Tag), regT1);
+#else
+ signExtend32ToPtr(regT0, regT0);
+ // If we can't tag the result, give up and jump to the slow case
+ m_failures.append(branchAddPtr(Overflow, regT0, regT0));
+ addPtr(Imm32(JSImmediate::TagTypeNumber), regT0);
+#endif
+ }
+
+ void tagReturnAsJSCell()
+ {
+#if USE(JSVALUE32_64)
+ move(Imm32(JSValue::CellTag), regT1);
+#endif
+ }
+
+ int m_expectedArgCount;
+ JSGlobalData* m_globalData;
+ RefPtr<ExecutablePool> m_pool;
+ MacroAssembler::JumpList m_failures;
+ };
+
+}
+
+#endif // ENABLE(JIT)
+
+#endif // SpecializedThunkJIT_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "ThunkGenerators.h"
+
+#include "CodeBlock.h"
+#include <wtf/text/StringImpl.h>
+#include "SpecializedThunkJIT.h"
+
+#if ENABLE(JIT)
+
+namespace JSC {
+
+static void stringCharLoad(SpecializedThunkJIT& jit)
+{
+ // load string
+ jit.loadJSStringArgument(SpecializedThunkJIT::ThisArgument, SpecializedThunkJIT::regT0);
+ // regT0 now contains this, and is a non-rope JSString*
+
+ // Load string length to regT2, and start the process of loading the data pointer into regT0
+ jit.load32(MacroAssembler::Address(SpecializedThunkJIT::regT0, ThunkHelpers::jsStringLengthOffset()), SpecializedThunkJIT::regT2);
+ jit.loadPtr(MacroAssembler::Address(SpecializedThunkJIT::regT0, ThunkHelpers::jsStringValueOffset()), SpecializedThunkJIT::regT0);
+ jit.loadPtr(MacroAssembler::Address(SpecializedThunkJIT::regT0, ThunkHelpers::stringImplDataOffset()), SpecializedThunkJIT::regT0);
+
+ // load index
+ jit.loadInt32Argument(0, SpecializedThunkJIT::regT1); // regT1 contains the index
+
+ // Do an unsigned compare to simultaneously filter negative indices as well as indices that are too large
+ jit.appendFailure(jit.branch32(MacroAssembler::AboveOrEqual, SpecializedThunkJIT::regT1, SpecializedThunkJIT::regT2));
+
+ // Load the character
+ jit.load16(MacroAssembler::BaseIndex(SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1, MacroAssembler::TimesTwo, 0), SpecializedThunkJIT::regT0);
+}
+
+static void charToString(SpecializedThunkJIT& jit, JSGlobalData* globalData, MacroAssembler::RegisterID src, MacroAssembler::RegisterID dst, MacroAssembler::RegisterID scratch)
+{
+ jit.appendFailure(jit.branch32(MacroAssembler::AboveOrEqual, src, MacroAssembler::Imm32(0x100)));
+ jit.move(MacroAssembler::ImmPtr(globalData->smallStrings.singleCharacterStrings()), scratch);
+ jit.loadPtr(MacroAssembler::BaseIndex(scratch, src, MacroAssembler::ScalePtr, 0), dst);
+ jit.appendFailure(jit.branchTestPtr(MacroAssembler::Zero, dst));
+}
+
+PassRefPtr<NativeExecutable> charCodeAtThunkGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+ SpecializedThunkJIT jit(1, globalData, pool);
+ stringCharLoad(jit);
+ jit.returnInt32(SpecializedThunkJIT::regT0);
+ return jit.finalize();
+}
+
+PassRefPtr<NativeExecutable> charAtThunkGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+ SpecializedThunkJIT jit(1, globalData, pool);
+ stringCharLoad(jit);
+ charToString(jit, globalData, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1);
+ jit.returnJSCell(SpecializedThunkJIT::regT0);
+ return jit.finalize();
+}
+
+PassRefPtr<NativeExecutable> fromCharCodeThunkGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+ SpecializedThunkJIT jit(1, globalData, pool);
+ // load char code
+ jit.loadInt32Argument(0, SpecializedThunkJIT::regT0);
+ charToString(jit, globalData, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT0, SpecializedThunkJIT::regT1);
+ jit.returnJSCell(SpecializedThunkJIT::regT0);
+ return jit.finalize();
+}
+
+PassRefPtr<NativeExecutable> sqrtThunkGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+#if USE(JSVALUE64) || USE(JSVALUE32_64)
+ SpecializedThunkJIT jit(1, globalData, pool);
+ if (!jit.supportsFloatingPointSqrt())
+ return globalData->jitStubs->ctiNativeCallThunk();
+
+ jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0);
+ jit.sqrtDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT0);
+ jit.returnDouble(SpecializedThunkJIT::fpRegT0);
+ return jit.finalize();
+#else
+ UNUSED_PARAM(pool);
+ return globalData->jitStubs.ctiNativeCallThunk();
+#endif
+}
+
+static const double oneConstant = 1.0;
+static const double negativeHalfConstant = -0.5;
+
+PassRefPtr<NativeExecutable> powThunkGenerator(JSGlobalData* globalData, ExecutablePool* pool)
+{
+#if USE(JSVALUE64) || USE(JSVALUE32_64)
+ SpecializedThunkJIT jit(2, globalData, pool);
+ if (!jit.supportsFloatingPoint())
+ return globalData->jitStubs->ctiNativeCallThunk();
+
+ jit.loadDouble(&oneConstant, SpecializedThunkJIT::fpRegT1);
+ jit.loadDoubleArgument(0, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::regT0);
+ MacroAssembler::Jump nonIntExponent;
+ jit.loadInt32Argument(1, SpecializedThunkJIT::regT0, nonIntExponent);
+ jit.appendFailure(jit.branch32(MacroAssembler::LessThan, SpecializedThunkJIT::regT0, MacroAssembler::Imm32(0)));
+
+ MacroAssembler::Jump exponentIsZero = jit.branchTest32(MacroAssembler::Zero, SpecializedThunkJIT::regT0);
+ MacroAssembler::Label startLoop(jit.label());
+
+ MacroAssembler::Jump exponentIsEven = jit.branchTest32(MacroAssembler::Zero, SpecializedThunkJIT::regT0, MacroAssembler::Imm32(1));
+ jit.mulDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT1);
+ exponentIsEven.link(&jit);
+ jit.mulDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT0);
+ jit.rshift32(MacroAssembler::Imm32(1), SpecializedThunkJIT::regT0);
+ jit.branchTest32(MacroAssembler::NonZero, SpecializedThunkJIT::regT0).linkTo(startLoop, &jit);
+
+ exponentIsZero.link(&jit);
+ jit.returnDouble(SpecializedThunkJIT::fpRegT1);
+
+ if (jit.supportsFloatingPointSqrt()) {
+ nonIntExponent.link(&jit);
+ jit.loadDouble(&negativeHalfConstant, SpecializedThunkJIT::fpRegT3);
+ jit.loadDoubleArgument(1, SpecializedThunkJIT::fpRegT2, SpecializedThunkJIT::regT0);
+ jit.appendFailure(jit.branchDouble(MacroAssembler::DoubleLessThanOrEqual, SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT1));
+ jit.appendFailure(jit.branchDouble(MacroAssembler::DoubleNotEqualOrUnordered, SpecializedThunkJIT::fpRegT2, SpecializedThunkJIT::fpRegT3));
+ jit.sqrtDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT0);
+ jit.divDouble(SpecializedThunkJIT::fpRegT0, SpecializedThunkJIT::fpRegT1);
+ jit.returnDouble(SpecializedThunkJIT::fpRegT1);
+ } else
+ jit.appendFailure(nonIntExponent);
+
+ return jit.finalize();
+#else
+ UNUSED_PARAM(pool);
+ return globalData->jitStubs.ctiNativeCallThunk();
+#endif
+}
+
+}
+
+#endif // ENABLE(JIT)
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
+ * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
+ * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+ * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
+ * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
+ * THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef ThunkGenerators_h
+#define ThunkGenerators_h
+
+#if ENABLE(JIT)
+#include <wtf/PassRefPtr.h>
+
+namespace JSC {
+ class ExecutablePool;
+ class JSGlobalData;
+ class NativeExecutable;
+
+ typedef PassRefPtr<NativeExecutable> (*ThunkGenerator)(JSGlobalData*, ExecutablePool*);
+ PassRefPtr<NativeExecutable> charCodeAtThunkGenerator(JSGlobalData*, ExecutablePool*);
+ PassRefPtr<NativeExecutable> charAtThunkGenerator(JSGlobalData*, ExecutablePool*);
+ PassRefPtr<NativeExecutable> fromCharCodeThunkGenerator(JSGlobalData*, ExecutablePool*);
+ PassRefPtr<NativeExecutable> sqrtThunkGenerator(JSGlobalData*, ExecutablePool*);
+ PassRefPtr<NativeExecutable> powThunkGenerator(JSGlobalData*, ExecutablePool*);
+}
+#endif
+
+#endif // ThunkGenerator_h
static JSValue JSC_HOST_CALL functionLoad(ExecState*, JSObject*, JSValue, const ArgList&);
static JSValue JSC_HOST_CALL functionCheckSyntax(ExecState*, JSObject*, JSValue, const ArgList&);
static JSValue JSC_HOST_CALL functionReadline(ExecState*, JSObject*, JSValue, const ArgList&);
-static NO_RETURN JSValue JSC_HOST_CALL functionQuit(ExecState*, JSObject*, JSValue, const ArgList&);
+static NO_RETURN_WITH_VALUE JSValue JSC_HOST_CALL functionQuit(ExecState*, JSObject*, JSValue, const ArgList&);
#if ENABLE(SAMPLING_FLAGS)
static JSValue JSC_HOST_CALL functionSetSamplingFlags(ExecState*, JSObject*, JSValue, const ArgList&);
if (i)
putchar(' ');
- printf("%s", args.at(i).toString(exec).UTF8String().c_str());
+ printf("%s", args.at(i).toString(exec).UTF8String().data());
}
putchar('\n');
JSValue JSC_HOST_CALL functionDebug(ExecState* exec, JSObject*, JSValue, const ArgList& args)
{
- fprintf(stderr, "--> %s\n", args.at(0).toString(exec).UTF8String().c_str());
+ fprintf(stderr, "--> %s\n", args.at(0).toString(exec).UTF8String().data());
return jsUndefined();
}
// We can't use destructors in the following code because it uses Windows
// Structured Exception Handling
int res = 0;
- JSGlobalData* globalData = JSGlobalData::create().releaseRef();
+ JSGlobalData* globalData = JSGlobalData::create(ThreadStackTypeLarge).releaseRef();
TRY
res = jscmain(argc, argv, globalData);
EXCEPT(res = 3)
if (completion.complType() == Throw)
printf("Exception: %s\n", completion.value().toString(globalObject->globalExec()).ascii());
else
- printf("%s\n", completion.value().toString(globalObject->globalExec()).UTF8String().c_str());
+ printf("%s\n", completion.value().toString(globalObject->globalExec()).UTF8String().data());
globalObject->globalExec()->clearException();
}
static bool fillBufferWithContentsOfFile(const UString& fileName, Vector<char>& buffer)
{
- FILE* f = fopen(fileName.UTF8String().c_str(), "r");
+ FILE* f = fopen(fileName.UTF8String().data(), "r");
if (!f) {
- fprintf(stderr, "Could not open file: %s\n", fileName.UTF8String().c_str());
+ fprintf(stderr, "Could not open file: %s\n", fileName.UTF8String().data());
return false;
}
win32-*: CONFIG += console
win32-msvc*: CONFIG += exceptions_off stl_off
+isEmpty(OUTPUT_DIR): OUTPUT_DIR= ..
include($$PWD/../WebKit.pri)
CONFIG += link_pkgconfig
QMAKE_RPATHDIR += $$OUTPUT_DIR/lib
-isEmpty(OUTPUT_DIR):OUTPUT_DIR=$$PWD/..
CONFIG(debug, debug|release) {
OBJECTS_DIR = obj/debug
} else { # Release
}
OBJECTS_DIR_WTR = $$OBJECTS_DIR$${QMAKE_DIR_SEP}
include($$PWD/JavaScriptCore.pri)
-
-*-g++*:QMAKE_CXXFLAGS_RELEASE -= -O2
-*-g++*:QMAKE_CXXFLAGS_RELEASE += -O3
+addJavaScriptCoreLib(.)
symbian {
TARGET.CAPABILITY = ReadUserData WriteUserData NetworkServices
export SRCROOT=$PWD
export WebCore=$PWD
export CREATE_HASH_TABLE="$SRCROOT/create_hash_table"
+export CREATE_REGEXP_TABLES="$SRCROOT/create_regex_tables"
mkdir -p DerivedSources/JavaScriptCore
cd DerivedSources/JavaScriptCore
CASSERT(sizeof(uint16_t) == 2, uint16_t_is_two_bytes)
CASSERT(sizeof(int32_t) == 4, int32_t_is_four_bytes)
CASSERT(sizeof(uint32_t) == 4, uint32_t_is_four_bytes)
-CASSERT(sizeof(int64_t) == 8, int64_t_is_four_bytes)
-CASSERT(sizeof(uint64_t) == 8, uint64_t_is_four_bytes)
+CASSERT(sizeof(int64_t) == 8, int64_t_is_eight_bytes)
+CASSERT(sizeof(uint64_t) == 8, uint64_t_is_eight_bytes)
#endif
static ExpressionNode* makeNegateNode(JSGlobalData* globalData, ExpressionNode* n)
{
if (n->isNumber()) {
- NumberNode* number = static_cast<NumberNode*>(n);
-
- if (number->value() > 0.0) {
- number->setValue(-number->value());
- return number;
- }
+ NumberNode* numberNode = static_cast<NumberNode*>(n);
+ numberNode->setValue(-numberNode->value());
+ return numberNode;
}
return new (globalData) NegateNode(globalData, n);
, m_globalData(globalData)
, m_keywordTable(JSC::mainTable)
{
- m_buffer8.reserveInitialCapacity(initialReadBufferCapacity);
- m_buffer16.reserveInitialCapacity(initialReadBufferCapacity);
}
Lexer::~Lexer()
m_error = false;
m_atLineStart = true;
+ m_buffer8.reserveInitialCapacity(initialReadBufferCapacity);
+ m_buffer16.reserveInitialCapacity((m_codeEnd - m_code) / 2);
+
// ECMA-262 calls for stripping all Cf characters, but we only strip BOM characters.
// See <https://bugs.webkit.org/show_bug.cgi?id=4931> for details.
if (source.provider()->hasBOMs()) {
m_codeWithoutBOMs.clear();
Vector<char> newBuffer8;
- newBuffer8.reserveInitialCapacity(initialReadBufferCapacity);
m_buffer8.swap(newBuffer8);
Vector<UChar> newBuffer16;
- newBuffer16.reserveInitialCapacity(initialReadBufferCapacity);
m_buffer16.swap(newBuffer16);
m_isReparsing = false;
using ParserArenaRefCounted::operator new;
- void adoptData(std::auto_ptr<ScopeNodeData> data)
- {
- ASSERT(!data->m_arena.contains(this));
- ASSERT(!m_data);
- m_data.adopt(data);
- }
ScopeNodeData* data() const { return m_data.get(); }
void destroyData() { m_data.clear(); }
#include "Lexer.h"
#include <wtf/HashSet.h>
#include <wtf/Vector.h>
-#include <memory>
-
-using std::auto_ptr;
#ifndef yyparse
extern int jscyyparse(void*);
errMsg = &defaultErrMsg;
*errLine = -1;
- *errMsg = 0;
+ *errMsg = UString();
Lexer& lexer = *globalData->lexer;
lexer.setCode(*m_source, m_arena);
length = md.endSubject - p;
while (length-- > 0) {
int c;
- if (isprint(c = *(p++)))
+ if (isASCIIPrintable(c = *(p++)))
printf("%c", c);
else if (c < 256)
printf("\\x%02x", c);
CallIdentifier(const UString& name, const UString& url, int lineNumber)
: m_name(name)
- , m_url(url)
+ , m_url(!url.isNull() ? url : "")
, m_lineNumber(lineNumber)
{
}
#ifndef NDEBUG
operator const char*() const { return c_str(); }
- const char* c_str() const { return m_name.UTF8String().c_str(); }
+ const char* c_str() const { return m_name.UTF8String().data(); }
#endif
};
{
// FIXME: When multi-threading is supported this will be a vector and calls
// into the profiler will need to know which thread it is executing on.
- m_head = ProfileNode::create(CallIdentifier("Thread_1", 0, 0), 0, 0);
+ m_head = ProfileNode::create(CallIdentifier("Thread_1", UString(), 0), 0, 0);
}
Profile::~Profile()
std::sort(sortedFunctions.begin(), sortedFunctions.end(), functionNameCountPairComparator);
for (NameCountPairVector::iterator it = sortedFunctions.begin(); it != sortedFunctions.end(); ++it)
- printf(" %-12d%s\n", (*it).second, UString((*it).first).UTF8String().c_str());
+ printf(" %-12d%s\n", (*it).second, UString((*it).first).UTF8String().data());
printf("\nSort by top of stack, same collapsed (when >= 5):\n");
}
if (JAVASCRIPTCORE_PROFILE_WILL_EXECUTE_ENABLED()) {
CString name = callIdentifier.m_name.UTF8String();
CString url = callIdentifier.m_url.UTF8String();
- JAVASCRIPTCORE_PROFILE_WILL_EXECUTE(m_profileGroup, const_cast<char*>(name.c_str()), const_cast<char*>(url.c_str()), callIdentifier.m_lineNumber);
+ JAVASCRIPTCORE_PROFILE_WILL_EXECUTE(m_profileGroup, const_cast<char*>(name.data()), const_cast<char*>(url.data()), callIdentifier.m_lineNumber);
}
if (!m_originatingGlobalExec)
if (JAVASCRIPTCORE_PROFILE_DID_EXECUTE_ENABLED()) {
CString name = callIdentifier.m_name.UTF8String();
CString url = callIdentifier.m_url.UTF8String();
- JAVASCRIPTCORE_PROFILE_DID_EXECUTE(m_profileGroup, const_cast<char*>(name.c_str()), const_cast<char*>(url.c_str()), callIdentifier.m_lineNumber);
+ JAVASCRIPTCORE_PROFILE_DID_EXECUTE(m_profileGroup, const_cast<char*>(name.data()), const_cast<char*>(url.data()), callIdentifier.m_lineNumber);
}
if (!m_originatingGlobalExec)
m_currentNode = m_currentNode->parent();
if (double headSelfTime = m_head->selfTime()) {
- RefPtr<ProfileNode> idleNode = ProfileNode::create(CallIdentifier(NonJSExecution, 0, 0), m_head.get(), m_head.get());
+ RefPtr<ProfileNode> idleNode = ProfileNode::create(CallIdentifier(NonJSExecution, UString(), 0), m_head.get(), m_head.get());
idleNode->setTotalTime(headSelfTime);
idleNode->setSelfTime(headSelfTime);
#include "Profiler.h"
#include <stdio.h>
#include <wtf/DateMath.h>
+#include <wtf/text/StringHash.h>
#if OS(WINDOWS)
#include <windows.h>
printf(" ");
printf("Function Name %s %d SelfTime %.3fms/%.3f%% TotalTime %.3fms/%.3f%% VSelf %.3fms VTotal %.3fms Visible %s Next Sibling %s\n",
- functionName().UTF8String().c_str(),
+ functionName().UTF8String().data(),
m_numberOfCalls, m_actualSelfTime, selfPercent(), m_actualTotalTime, totalPercent(),
m_visibleSelfTime, m_visibleTotalTime,
(m_visible ? "True" : "False"),
- m_nextSibling ? m_nextSibling->functionName().UTF8String().c_str() : "");
+ m_nextSibling ? m_nextSibling->functionName().UTF8String().data() : "");
++indentLevel;
printf(" ");
// Print function names
- const char* name = functionName().UTF8String().c_str();
+ const char* name = functionName().UTF8String().data();
double sampleCount = m_actualTotalTime * 1000;
if (indentLevel) {
for (int i = 0; i < indentLevel; ++i)
while (indentLevel--)
printf(" ");
- printf("%.0f %s\n", sampleCount - sumOfChildrensCount, functionName().UTF8String().c_str());
+ printf("%.0f %s\n", sampleCount - sumOfChildrensCount, functionName().UTF8String().data());
}
return m_actualTotalTime;
OBJECTS_DIR = obj/release
}
+isEmpty(OUTPUT_DIR): OUTPUT_DIR = ../../..
include($$PWD/../../../WebKit.pri)
+
include($$PWD/../../JavaScriptCore.pri)
+addJavaScriptCoreLib(../..)
INCLUDEPATH += $$PWD/../../API
SOURCES += $$PWD/qscriptengine.cpp \
$$PWD/qscriptengine_p.cpp \
$$PWD/qscriptvalue.cpp \
+ $$PWD/qscriptstring.cpp \
+ $$PWD/qscriptprogram.cpp \
+ $$PWD/qscriptsyntaxcheckresult.cpp \
HEADERS += $$PWD/qtscriptglobal.h \
$$PWD/qscriptengine.h \
$$PWD/qscriptvalue.h \
$$PWD/qscriptvalue_p.h \
$$PWD/qscriptconverter_p.h \
+ $$PWD/qscriptstring.h \
+ $$PWD/qscriptstring_p.h \
+ $$PWD/qscriptprogram.h \
+ $$PWD/qscriptprogram_p.h \
+ $$PWD/qscriptsyntaxcheckresult.h \
!static: DEFINES += QT_MAKEDLL
DESTDIR = $$OUTPUT_DIR/lib
-
#define qscriptconverter_p_h
#include <JavaScriptCore/JavaScript.h>
+#include <QtCore/qnumeric.h>
#include <QtCore/qstring.h>
+#include <QtCore/qvarlengtharray.h>
+
+extern char *qdtoa(double d, int mode, int ndigits, int *decpt, int *sign, char **rve, char **digits_str);
/*
\internal
*/
class QScriptConverter {
public:
+ static quint32 toArrayIndex(const JSStringRef jsstring)
+ {
+ // FIXME this function should be exported by JSC C API.
+ QString qstring = toString(jsstring);
+
+ bool ok;
+ quint32 idx = qstring.toUInt(&ok);
+ if (!ok || toString(idx) != qstring)
+ idx = 0xffffffff;
+
+ return idx;
+ }
+
static QString toString(const JSStringRef str)
{
return QString(reinterpret_cast<const QChar*>(JSStringGetCharactersPtr(str)), JSStringGetLength(str));
{
return JSStringCreateWithUTF8CString(str);
}
+ static QString toString(double value)
+ {
+ // FIXME this should be easier. The ideal fix is to create
+ // a new function in JSC C API which could cover the functionality.
+
+ if (qIsNaN(value))
+ return QString::fromLatin1("NaN");
+ if (qIsInf(value))
+ return QString::fromLatin1(value < 0 ? "-Infinity" : "Infinity");
+ if (!value)
+ return QString::fromLatin1("0");
+
+ QVarLengthArray<char, 25> buf;
+ int decpt;
+ int sign;
+ char* result = 0;
+ char* endresult;
+ (void)qdtoa(value, 0, 0, &decpt, &sign, &endresult, &result);
+
+ if (!result)
+ return QString();
+
+ int resultLen = endresult - result;
+ if (decpt <= 0 && decpt > -6) {
+ buf.resize(-decpt + 2 + sign);
+ qMemSet(buf.data(), '0', -decpt + 2 + sign);
+ if (sign) // fix the sign.
+ buf[0] = '-';
+ buf[sign + 1] = '.';
+ buf.append(result, resultLen);
+ } else {
+ if (sign)
+ buf.append('-');
+ int length = buf.size() - sign + resultLen;
+ if (decpt <= 21 && decpt > 0) {
+ if (length <= decpt) {
+ const char* zeros = "0000000000000000000000000";
+ buf.append(result, resultLen);
+ buf.append(zeros, decpt - length);
+ } else {
+ buf.append(result, decpt);
+ buf.append('.');
+ buf.append(result + decpt, resultLen - decpt);
+ }
+ } else if (result[0] >= '0' && result[0] <= '9') {
+ if (length > 1) {
+ buf.append(result, 1);
+ buf.append('.');
+ buf.append(result + 1, resultLen - 1);
+ } else
+ buf.append(result, resultLen);
+ buf.append('e');
+ buf.append(decpt >= 0 ? '+' : '-');
+ int e = qAbs(decpt - 1);
+ if (e >= 100)
+ buf.append('0' + e / 100);
+ if (e >= 10)
+ buf.append('0' + (e % 100) / 10);
+ buf.append('0' + e % 10);
+ }
+ }
+ free(result);
+ buf.append(0);
+ return QString::fromLatin1(buf.constData());
+ }
};
#endif // qscriptconverter_p_h
#include "qscriptengine.h"
#include "qscriptengine_p.h"
+#include "qscriptprogram_p.h"
+#include "qscriptsyntaxcheckresult_p.h"
#include "qscriptvalue_p.h"
/*!
{
}
+/*!
+ Checks the syntax of the given \a program. Returns a
+ QScriptSyntaxCheckResult object that contains the result of the check.
+*/
+QScriptSyntaxCheckResult QScriptEngine::checkSyntax(const QString &program)
+{
+ // FIXME This is not optimal.
+ // The JSC C API needs a context to perform a syntax check, it means that a QScriptEnginePrivate
+ // had to be created. This function is static so we have to create QScriptEnginePrivate for each
+ // call. We can't remove the "static" for compatibility reason, at least up to Qt5.
+ // QScriptSyntaxCheckResultPrivate takes ownership of newly created engine. The engine will be
+ // kept as long as it is needed for lazy evaluation of properties of
+ // the QScriptSyntaxCheckResultPrivate.
+ QScriptEnginePrivate* engine = new QScriptEnginePrivate(/* q_ptr */ 0);
+ return QScriptSyntaxCheckResultPrivate::get(engine->checkSyntax(program));
+}
+
/*!
Evaluates \a program, using \a lineNumber as the base line number,
and returns the result of the evaluation.
return QScriptValuePrivate::get(d_ptr->evaluate(program, fileName, lineNumber));
}
+QScriptValue QScriptEngine::evaluate(const QScriptProgram& program)
+{
+ return QScriptValuePrivate::get(d_ptr->evaluate(QScriptProgramPrivate::get(program)));
+}
+
/*!
Runs the garbage collector.
when the QScriptEngine decides that it's wise to do so (i.e. when a certain number of new objects
have been created). However, you can call this function to explicitly request that garbage
collection should be performed as soon as possible.
+
+ \sa reportAdditionalMemoryCost()
*/
void QScriptEngine::collectGarbage()
{
d_ptr->collectGarbage();
}
+
+/*!
+ Reports an additional memory cost of the given \a size, measured in
+ bytes, to the garbage collector.
+
+ This function can be called to indicate that a JavaScript object has
+ memory associated with it that isn't managed by Qt Script itself.
+ Reporting the additional cost makes it more likely that the garbage
+ collector will be triggered.
+
+ Note that if the additional memory is shared with objects outside
+ the scripting environment, the cost should not be reported, since
+ collecting the JavaScript object would not cause the memory to be
+ freed anyway.
+
+ Negative \a size values are ignored, i.e. this function can't be
+ used to report that the additional memory has been deallocated.
+
+ \sa collectGarbage()
+*/
+void QScriptEngine::reportAdditionalMemoryCost(int cost)
+{
+ d_ptr->reportAdditionalMemoryCost(cost);
+}
+
+/*!
+ Returns a handle that represents the given string, \a str.
+
+ QScriptString can be used to quickly look up properties, and
+ compare property names, of script objects.
+
+ \sa QScriptValue::property()
+*/
+QScriptString QScriptEngine::toStringHandle(const QString& str)
+{
+ return QScriptStringPrivate::get(d_ptr->toStringHandle(str));
+}
+
+/*!
+ Returns a QScriptValue of the primitive type Null.
+
+ \sa undefinedValue()
+*/
+QScriptValue QScriptEngine::nullValue()
+{
+ return QScriptValue(this, QScriptValue::NullValue);
+}
+
+/*!
+ Returns a QScriptValue of the primitive type Undefined.
+
+ \sa nullValue()
+*/
+QScriptValue QScriptEngine::undefinedValue()
+{
+ return QScriptValue(this, QScriptValue::UndefinedValue);
+}
+
+/*!
+ Returns this engine's Global Object.
+
+ By default, the Global Object contains the built-in objects that are
+ part of \l{ECMA-262}, such as Math, Date and String. Additionally,
+ you can set properties of the Global Object to make your own
+ extensions available to all script code. Non-local variables in
+ script code will be created as properties of the Global Object, as
+ well as local variables in global code.
+*/
+QScriptValue QScriptEngine::globalObject() const
+{
+ return QScriptValuePrivate::get(d_ptr->globalObject());
+}
#ifndef qscriptengine_h
#define qscriptengine_h
+#include "qscriptprogram.h"
+#include "qscriptstring.h"
+#include "qscriptsyntaxcheckresult.h"
#include <QtCore/qobject.h>
#include <QtCore/qshareddata.h>
#include <QtCore/qstring.h>
QScriptEngine();
~QScriptEngine();
+ static QScriptSyntaxCheckResult checkSyntax(const QString& program);
QScriptValue evaluate(const QString& program, const QString& fileName = QString(), int lineNumber = 1);
+ QScriptValue evaluate(const QScriptProgram& program);
+
void collectGarbage();
+ void reportAdditionalMemoryCost(int cost);
+
+ QScriptString toStringHandle(const QString& str);
+ QScriptValue nullValue();
+ QScriptValue undefinedValue();
+ QScriptValue globalObject() const;
private:
friend class QScriptEnginePrivate;
#include "qscriptengine_p.h"
+#include "qscriptprogram_p.h"
#include "qscriptvalue_p.h"
/*!
JSGlobalContextRelease(m_context);
}
+QScriptSyntaxCheckResultPrivate* QScriptEnginePrivate::checkSyntax(const QString& program)
+{
+ JSValueRef exception;
+ JSStringRef source = QScriptConverter::toString(program);
+ bool syntaxIsCorrect = JSCheckScriptSyntax(m_context, source, /* url */ 0, /* starting line */ 1, &exception);
+ JSStringRelease(source);
+ if (syntaxIsCorrect) {
+ return new QScriptSyntaxCheckResultPrivate(this);
+ }
+ JSValueProtect(m_context, exception);
+ return new QScriptSyntaxCheckResultPrivate(this, const_cast<JSObjectRef>(exception));
+}
+
/*!
Evaluates program and returns the result of the evaluation.
\internal
{
JSStringRef script = QScriptConverter::toString(program);
JSStringRef file = QScriptConverter::toString(fileName);
- JSValueRef exception;
- JSValueRef result = JSEvaluateScript(m_context, script, /* Global Object */ 0, file, lineNumber, &exception);
- if (!result)
- return new QScriptValuePrivate(this, exception); // returns an exception
- return new QScriptValuePrivate(this, result);
+ QScriptValuePrivate* result = new QScriptValuePrivate(this, evaluate(script, file, lineNumber));
+ JSStringRelease(script);
+ JSStringRelease(file);
+ return result;
+}
+
+/*!
+ Evaluates program and returns the result of the evaluation.
+ \internal
+*/
+QScriptValuePrivate* QScriptEnginePrivate::evaluate(const QScriptProgramPrivate* program)
+{
+ if (program->isNull())
+ return new QScriptValuePrivate;
+ return new QScriptValuePrivate(this, evaluate(program->program(), program->file(), program->line()));
+}
+
+QScriptValuePrivate* QScriptEnginePrivate::globalObject() const
+{
+ JSObjectRef globalObject = JSContextGetGlobalObject(context());
+ return new QScriptValuePrivate(this, globalObject, globalObject);
}
#include "qscriptconverter_p.h"
#include "qscriptengine.h"
+#include "qscriptstring_p.h"
+#include "qscriptsyntaxcheckresult_p.h"
#include "qscriptvalue.h"
#include <JavaScriptCore/JavaScript.h>
+#include <JSBasePrivate.h>
#include <QtCore/qshareddata.h>
#include <QtCore/qstring.h>
class QScriptEngine;
+class QScriptSyntaxCheckResultPrivate;
class QScriptEnginePrivate : public QSharedData {
public:
- static QScriptEnginePtr get(const QScriptEngine* q) { Q_ASSERT(q); return q->d_ptr; }
+ static QScriptEnginePrivate* get(const QScriptEngine* q) { Q_ASSERT(q); return q->d_ptr.data(); }
static QScriptEngine* get(const QScriptEnginePrivate* d) { Q_ASSERT(d); return d->q_ptr; }
QScriptEnginePrivate(const QScriptEngine*);
~QScriptEnginePrivate();
+ QScriptSyntaxCheckResultPrivate* checkSyntax(const QString& program);
QScriptValuePrivate* evaluate(const QString& program, const QString& fileName, int lineNumber);
+ QScriptValuePrivate* evaluate(const QScriptProgramPrivate* program);
+ inline JSValueRef evaluate(JSStringRef program, JSStringRef fileName, int lineNumber);
+
inline void collectGarbage();
+ inline void reportAdditionalMemoryCost(int cost);
inline JSValueRef makeJSValue(double number) const;
inline JSValueRef makeJSValue(int number) const;
inline JSValueRef makeJSValue(bool number) const;
inline JSValueRef makeJSValue(QScriptValue::SpecialValue value) const;
+ QScriptValuePrivate* globalObject() const;
+
+ inline QScriptStringPrivate* toStringHandle(const QString& str) const;
+
inline JSGlobalContextRef context() const;
private:
QScriptEngine* q_ptr;
JSGlobalContextRef m_context;
};
+
+/*!
+ Evaluates given JavaScript program and returns result of the evaluation.
+ \attention this function doesn't take ownership of the parameters.
+ \internal
+*/
+JSValueRef QScriptEnginePrivate::evaluate(JSStringRef program, JSStringRef fileName, int lineNumber)
+{
+ JSValueRef exception;
+ JSValueRef result = JSEvaluateScript(m_context, program, /* Global Object */ 0, fileName, lineNumber, &exception);
+ if (!result)
+ return exception; // returns an exception
+ return result;
+}
+
void QScriptEnginePrivate::collectGarbage()
{
JSGarbageCollect(m_context);
}
+void QScriptEnginePrivate::reportAdditionalMemoryCost(int cost)
+{
+ if (cost > 0)
+ JSReportExtraMemoryCost(m_context, cost);
+}
+
JSValueRef QScriptEnginePrivate::makeJSValue(double number) const
{
return JSValueMakeNumber(m_context, number);
JSValueRef QScriptEnginePrivate::makeJSValue(const QString& string) const
{
- return JSValueMakeString(m_context, QScriptConverter::toString(string));
+ JSStringRef tmp = QScriptConverter::toString(string);
+ JSValueRef result = JSValueMakeString(m_context, tmp);
+ JSStringRelease(tmp);
+ return result;
}
JSValueRef QScriptEnginePrivate::makeJSValue(bool value) const
return JSValueMakeUndefined(m_context);
}
+QScriptStringPrivate* QScriptEnginePrivate::toStringHandle(const QString& str) const
+{
+ return new QScriptStringPrivate(str);
+}
+
JSGlobalContextRef QScriptEnginePrivate::context() const
{
return m_context;
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#include "config.h"
+
+#include "qscriptprogram.h"
+
+#include "qscriptprogram_p.h"
+
+/*!
+ \internal
+
+ \class QScriptProgram
+
+ \brief The QScriptProgram class encapsulates a Qt Script program.
+
+ \ingroup script
+
+ QScriptProgram retains the compiled representation of the script if
+ possible. Thus, QScriptProgram can be used to evaluate the same
+ script multiple times more efficiently.
+
+ \code
+ QScriptEngine engine;
+ QScriptProgram program("1 + 2");
+ QScriptValue result = engine.evaluate(program);
+ \endcode
+*/
+
+/*!
+ Constructs a null QScriptProgram.
+*/
+QScriptProgram::QScriptProgram()
+ : d_ptr(new QScriptProgramPrivate)
+{}
+
+/*!
+ Constructs a new QScriptProgram with the given \a sourceCode, \a
+ fileName and \a firstLineNumber.
+*/
+QScriptProgram::QScriptProgram(const QString& sourceCode,
+ const QString fileName,
+ int firstLineNumber)
+ : d_ptr(new QScriptProgramPrivate(sourceCode, fileName, firstLineNumber))
+{}
+
+/*!
+ Destroys this QScriptProgram.
+*/
+QScriptProgram::~QScriptProgram()
+{}
+
+/*!
+ Constructs a new QScriptProgram that is a copy of \a other.
+*/
+QScriptProgram::QScriptProgram(const QScriptProgram& other)
+{
+ d_ptr = other.d_ptr;
+}
+
+/*!
+ Assigns the \a other value to this QScriptProgram.
+*/
+QScriptProgram& QScriptProgram::operator=(const QScriptProgram& other)
+{
+ d_ptr = other.d_ptr;
+ return *this;
+}
+
+/*!
+ Returns true if this QScriptProgram is null; otherwise
+ returns false.
+*/
+bool QScriptProgram::isNull() const
+{
+ return d_ptr->isNull();
+}
+
+/*!
+ Returns the source code of this program.
+*/
+QString QScriptProgram::sourceCode() const
+{
+ return d_ptr->sourceCode();
+}
+
+/*!
+ Returns the filename associated with this program.
+*/
+QString QScriptProgram::fileName() const
+{
+ return d_ptr->fileName();
+}
+
+/*!
+ Returns the line number associated with this program.
+*/
+int QScriptProgram::firstLineNumber() const
+{
+ return d_ptr->firstLineNumber();
+}
+
+/*!
+ Returns true if this QScriptProgram is equal to \a other;
+ otherwise returns false.
+*/
+bool QScriptProgram::operator==(const QScriptProgram& other) const
+{
+ return d_ptr == other.d_ptr || *d_ptr == *other.d_ptr;
+}
+
+/*!
+ Returns true if this QScriptProgram is not equal to \a other;
+ otherwise returns false.
+*/
+bool QScriptProgram::operator!=(const QScriptProgram& other) const
+{
+ return d_ptr != other.d_ptr && *d_ptr != *other.d_ptr;
+}
+
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptprogram_h
+#define qscriptprogram_h
+
+#include "qtscriptglobal.h"
+#include <QtCore/qshareddata.h>
+#include <QtCore/qstring.h>
+
+class QScriptProgramPrivate;
+class Q_JAVASCRIPT_EXPORT QScriptProgram {
+public:
+ QScriptProgram();
+ QScriptProgram(const QString& sourceCode,
+ const QString fileName = QString(),
+ int firstLineNumber = 1);
+ QScriptProgram(const QScriptProgram& other);
+ ~QScriptProgram();
+
+ QScriptProgram& operator=(const QScriptProgram& other);
+
+ bool isNull() const;
+
+ QString sourceCode() const;
+ QString fileName() const;
+ int firstLineNumber() const;
+
+ bool operator==(const QScriptProgram& other) const;
+ bool operator!=(const QScriptProgram& other) const;
+
+private:
+ QExplicitlySharedDataPointer<QScriptProgramPrivate> d_ptr;
+ Q_DECLARE_PRIVATE(QScriptProgram)
+};
+
+#endif // qscriptprogram_h
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptprogram_p_h
+#define qscriptprogram_p_h
+
+#include "qscriptconverter_p.h"
+#include "qscriptprogram.h"
+#include <JavaScriptCore/JavaScript.h>
+#include <QtCore/qshareddata.h>
+#include <QtCore/qstring.h>
+
+/*
+ FIXME The QScriptProgramPrivate potentially could be much faster. In current implementation we
+ gain CPU time only by avoiding QString -> JSStringRef conversion. In the ideal world we should
+ have a function inside the JSC C API that could provide us "parse once, execute multiple times"
+ functionality.
+*/
+
+class QScriptProgramPrivate : public QSharedData {
+public:
+ inline static QScriptProgramPrivate* get(const QScriptProgram& program);
+ inline QScriptProgramPrivate();
+ inline QScriptProgramPrivate(const QString& sourceCode,
+ const QString fileName,
+ int firstLineNumber);
+
+ inline ~QScriptProgramPrivate();
+
+ inline bool isNull() const;
+
+ inline QString sourceCode() const;
+ inline QString fileName() const;
+ inline int firstLineNumber() const;
+
+ inline bool operator==(const QScriptProgramPrivate& other) const;
+ inline bool operator!=(const QScriptProgramPrivate& other) const;
+
+ inline JSStringRef program() const;
+ inline JSStringRef file() const;
+ inline int line() const;
+private:
+ JSStringRef m_program;
+ JSStringRef m_fileName;
+ int m_line;
+};
+
+QScriptProgramPrivate* QScriptProgramPrivate::get(const QScriptProgram& program)
+{
+ return const_cast<QScriptProgramPrivate*>(program.d_ptr.constData());
+}
+
+QScriptProgramPrivate::QScriptProgramPrivate()
+ : m_program(0)
+ , m_fileName(0)
+ , m_line(-1)
+{}
+
+QScriptProgramPrivate::QScriptProgramPrivate(const QString& sourceCode,
+ const QString fileName,
+ int firstLineNumber)
+ : m_program(QScriptConverter::toString(sourceCode))
+ , m_fileName(QScriptConverter::toString(fileName))
+ , m_line(firstLineNumber)
+{}
+
+QScriptProgramPrivate::~QScriptProgramPrivate()
+{
+ if (!isNull()) {
+ JSStringRelease(m_program);
+ JSStringRelease(m_fileName);
+ }
+}
+
+bool QScriptProgramPrivate::isNull() const
+{
+ return !m_program;
+}
+
+QString QScriptProgramPrivate::sourceCode() const
+{
+ return QScriptConverter::toString(m_program);
+}
+
+QString QScriptProgramPrivate::fileName() const
+{
+ return QScriptConverter::toString(m_fileName);
+}
+
+int QScriptProgramPrivate::firstLineNumber() const
+{
+ return m_line;
+}
+
+bool QScriptProgramPrivate::operator==(const QScriptProgramPrivate& other) const
+{
+ return m_line == other.m_line
+ && JSStringIsEqual(m_fileName, other.m_fileName)
+ && JSStringIsEqual(m_program, other.m_program);
+}
+
+bool QScriptProgramPrivate::operator!=(const QScriptProgramPrivate& other) const
+{
+ return m_line != other.m_line
+ || !JSStringIsEqual(m_fileName, other.m_fileName)
+ || !JSStringIsEqual(m_program, other.m_program);
+}
+
+JSStringRef QScriptProgramPrivate::program() const { return m_program; }
+JSStringRef QScriptProgramPrivate::file() const {return m_fileName; }
+int QScriptProgramPrivate::line() const { return m_line; }
+
+#endif // qscriptprogram_p_h
--- /dev/null
+/*
+ Copyright (C) 2009 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#include "config.h"
+
+#include "qscriptstring.h"
+
+#include "qscriptstring_p.h"
+#include <QtCore/qhash.h>
+
+/*!
+ Constructs an invalid QScriptString.
+*/
+QScriptString::QScriptString()
+ : d_ptr(new QScriptStringPrivate())
+{
+}
+/*!
+ Constructs an QScriptString from internal representation
+ \internal
+*/
+QScriptString::QScriptString(QScriptStringPrivate* d)
+ : d_ptr(d)
+{
+}
+
+/*!
+ Constructs a new QScriptString that is a copy of \a other.
+*/
+QScriptString::QScriptString(const QScriptString& other)
+{
+ d_ptr = other.d_ptr;
+}
+
+/*!
+ Destroys this QScriptString.
+*/
+QScriptString::~QScriptString()
+{
+}
+
+/*!
+ Assigns the \a other value to this QScriptString.
+*/
+QScriptString& QScriptString::operator=(const QScriptString& other)
+{
+ d_ptr = other.d_ptr;
+ return *this;
+}
+
+/*!
+ Returns true if this QScriptString is valid; otherwise
+ returns false.
+*/
+bool QScriptString::isValid() const
+{
+ return d_ptr->isValid();
+}
+
+/*!
+ Returns true if this QScriptString is equal to \a other;
+ otherwise returns false.
+*/
+bool QScriptString::operator==(const QScriptString& other) const
+{
+ return d_ptr == other.d_ptr || *d_ptr == *(other.d_ptr);
+}
+
+/*!
+ Returns true if this QScriptString is not equal to \a other;
+ otherwise returns false.
+*/
+bool QScriptString::operator!=(const QScriptString& other) const
+{
+ return d_ptr != other.d_ptr || *d_ptr != *(other.d_ptr);
+}
+
+/*!
+ Attempts to convert this QScriptString to a QtScript array index,
+ and returns the result.
+
+ If a conversion error occurs, *\a{ok} is set to false; otherwise
+ *\a{ok} is set to true.
+*/
+quint32 QScriptString::toArrayIndex(bool* ok) const
+{
+ return d_ptr->toArrayIndex(ok);
+}
+
+/*!
+ Returns the string that this QScriptString represents, or a
+ null string if this QScriptString is not valid.
+
+ \sa isValid()
+*/
+QString QScriptString::toString() const
+{
+ return d_ptr->toString();
+}
+
+/*!
+ Returns the string that this QScriptString represents, or a
+ null string if this QScriptString is not valid.
+
+ \sa toString()
+*/
+QScriptString::operator QString() const
+{
+ return d_ptr->toString();
+}
+
+uint qHash(const QScriptString& key)
+{
+ return qHash(QScriptStringPrivate::get(key)->id());
+}
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptstring_h
+#define qscriptstring_h
+
+#include "qtscriptglobal.h"
+#include <QtCore/qshareddata.h>
+#include <QtCore/qstring.h>
+
+class QScriptStringPrivate;
+typedef QExplicitlySharedDataPointer<QScriptStringPrivate> QScriptStringPtr;
+
+class Q_JAVASCRIPT_EXPORT QScriptString {
+public:
+ QScriptString();
+ QScriptString(const QScriptString& other);
+ ~QScriptString();
+
+ QScriptString& operator=(const QScriptString& other);
+
+ bool isValid() const;
+
+ bool operator==(const QScriptString& other) const;
+ bool operator!=(const QScriptString& other) const;
+
+ quint32 toArrayIndex(bool* ok = 0) const;
+
+ QString toString() const;
+ operator QString() const;
+
+private:
+ QScriptString(QScriptStringPrivate* d);
+
+ QScriptStringPtr d_ptr;
+
+ friend class QScriptStringPrivate;
+};
+
+uint qHash(const QScriptString& key);
+
+#endif // qscriptstring_h
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptstring_p_h
+#define qscriptstring_p_h
+
+#include "qscriptconverter_p.h"
+#include "qscriptstring.h"
+#include <JavaScriptCore/JavaScript.h>
+#include <QtCore/qnumeric.h>
+#include <QtCore/qshareddata.h>
+
+class QScriptStringPrivate : public QSharedData {
+public:
+ inline QScriptStringPrivate();
+ inline QScriptStringPrivate(const QString& qtstring);
+ inline ~QScriptStringPrivate();
+
+ static inline QScriptString get(QScriptStringPrivate* d);
+ static inline QScriptStringPtr get(const QScriptString& p);
+
+ inline bool isValid() const;
+
+ inline bool operator==(const QScriptStringPrivate& other) const;
+ inline bool operator!=(const QScriptStringPrivate& other) const;
+
+ inline quint32 toArrayIndex(bool* ok = 0) const;
+
+ inline QString toString() const;
+
+ inline quint64 id() const;
+
+private:
+ JSStringRef m_string;
+};
+
+
+QScriptStringPrivate::QScriptStringPrivate()
+ : m_string(0)
+{}
+
+QScriptStringPrivate::QScriptStringPrivate(const QString& qtstring)
+ : m_string(QScriptConverter::toString(qtstring))
+{}
+
+QScriptStringPrivate::~QScriptStringPrivate()
+{
+ if (isValid())
+ JSStringRelease(m_string);
+}
+
+QScriptString QScriptStringPrivate::get(QScriptStringPrivate* d)
+{
+ Q_ASSERT(d);
+ return QScriptString(d);
+}
+
+QScriptStringPtr QScriptStringPrivate::get(const QScriptString& p)
+{
+ return p.d_ptr;
+}
+
+bool QScriptStringPrivate::isValid() const
+{
+ return m_string;
+}
+
+bool QScriptStringPrivate::operator==(const QScriptStringPrivate& other) const
+{
+ return isValid() && other.isValid() && JSStringIsEqual(m_string, other.m_string);
+}
+
+bool QScriptStringPrivate::operator!=(const QScriptStringPrivate& other) const
+{
+ return isValid() && other.isValid() && !JSStringIsEqual(m_string, other.m_string);
+}
+
+quint32 QScriptStringPrivate::toArrayIndex(bool* ok) const
+{
+ quint32 idx = QScriptConverter::toArrayIndex(m_string);
+ if (ok)
+ *ok = (idx != 0xffffffff);
+ return idx;
+}
+
+QString QScriptStringPrivate::toString() const
+{
+ return QScriptConverter::toString(m_string);
+}
+
+quint64 QScriptStringPrivate::id() const
+{
+ return reinterpret_cast<quint32>(m_string);
+}
+
+#endif // qscriptstring_p_h
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#include "config.h"
+
+#include "qscriptsyntaxcheckresult.h"
+#include "qscriptsyntaxcheckresult_p.h"
+
+/*!
+ \class QScriptSyntaxCheckResult
+
+ \brief The QScriptSyntaxCheckResult class provides the result of a script syntax check.
+
+ \ingroup script
+ \mainclass
+
+ QScriptSyntaxCheckResult is returned by QScriptEngine::checkSyntax() to
+ provide information about the syntactical (in)correctness of a script.
+*/
+
+/*!
+ \enum QScriptSyntaxCheckResult::State
+
+ This enum specifies the state of a syntax check.
+
+ \value Error The program contains a syntax error.
+ \value Intermediate The program is incomplete.
+ \value Valid The program is a syntactically correct Qt Script program.
+*/
+
+/*!
+ Constructs a new QScriptSyntaxCheckResult from the \a other result.
+*/
+QScriptSyntaxCheckResult::QScriptSyntaxCheckResult(const QScriptSyntaxCheckResult& other)
+ : d_ptr(other.d_ptr)
+{}
+
+/*!
+ Constructs a new QScriptSyntaxCheckResult from an internal representation.
+ \internal
+*/
+QScriptSyntaxCheckResult::QScriptSyntaxCheckResult(QScriptSyntaxCheckResultPrivate* d)
+ : d_ptr(d)
+{}
+
+/*!
+ Destroys this QScriptSyntaxCheckResult.
+*/
+QScriptSyntaxCheckResult::~QScriptSyntaxCheckResult()
+{}
+
+/*!
+ Assigns the \a other result to this QScriptSyntaxCheckResult, and returns a
+ reference to this QScriptSyntaxCheckResult.
+*/
+QScriptSyntaxCheckResult& QScriptSyntaxCheckResult::operator=(const QScriptSyntaxCheckResult& other)
+{
+ d_ptr = other.d_ptr;
+ return *this;
+}
+
+/*!
+ Returns the state of this QScriptSyntaxCheckResult.
+*/
+QScriptSyntaxCheckResult::State QScriptSyntaxCheckResult::state() const
+{
+ return d_ptr->state();
+}
+
+/*!
+ Returns the error line number of this QScriptSyntaxCheckResult, or -1 if
+ there is no error.
+
+ \sa state(), errorMessage()
+*/
+int QScriptSyntaxCheckResult::errorLineNumber() const
+{
+ return d_ptr->errorLineNumber();
+}
+
+/*!
+ Returns the error column number of this QScriptSyntaxCheckResult, or -1 if
+ there is no error.
+
+ \sa state(), errorLineNumber()
+*/
+int QScriptSyntaxCheckResult::errorColumnNumber() const
+{
+ return d_ptr->errorColumnNumber();
+}
+
+/*!
+ Returns the error message of this QScriptSyntaxCheckResult, or an empty
+ string if there is no error.
+
+ \sa state(), errorLineNumber()
+*/
+QString QScriptSyntaxCheckResult::errorMessage() const
+{
+ return d_ptr->errorMessage();
+}
+
+QScriptSyntaxCheckResultPrivate::~QScriptSyntaxCheckResultPrivate()
+{
+ if (m_exception)
+ JSValueUnprotect(m_engine->context(), m_exception);
+}
+
+QString QScriptSyntaxCheckResultPrivate::errorMessage() const
+{
+ if (!m_exception)
+ return QString();
+
+ JSStringRef tmp = JSValueToStringCopy(m_engine->context(), m_exception, /* exception */ 0);
+ QString message = QScriptConverter::toString(tmp);
+ JSStringRelease(tmp);
+ return message;
+}
+
+int QScriptSyntaxCheckResultPrivate::errorLineNumber() const
+{
+ if (!m_exception)
+ return -1;
+ // m_exception is an instance of the Exception so it has "line" attribute.
+ JSStringRef lineAttrName = QScriptConverter::toString("line");
+ JSValueRef line = JSObjectGetProperty(m_engine->context(),
+ m_exception,
+ lineAttrName,
+ /* exceptions */0);
+ JSStringRelease(lineAttrName);
+ return JSValueToNumber(m_engine->context(), line, /* exceptions */0);
+}
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptsyntaxcheckresult_h
+#define qscriptsyntaxcheckresult_h
+
+#include "qtscriptglobal.h"
+#include <QtCore/qshareddata.h>
+
+class QScriptSyntaxCheckResultPrivate;
+class Q_JAVASCRIPT_EXPORT QScriptSyntaxCheckResult {
+public:
+ enum State {
+ Error,
+ Intermediate,
+ Valid
+ };
+
+ QScriptSyntaxCheckResult(const QScriptSyntaxCheckResult& other);
+ ~QScriptSyntaxCheckResult();
+ QScriptSyntaxCheckResult& operator=(const QScriptSyntaxCheckResult& other);
+
+ State state() const;
+ int errorLineNumber() const;
+ int errorColumnNumber() const;
+ QString errorMessage() const;
+
+private:
+ QScriptSyntaxCheckResult(QScriptSyntaxCheckResultPrivate* d);
+ QExplicitlySharedDataPointer<QScriptSyntaxCheckResultPrivate> d_ptr;
+
+ friend class QScriptSyntaxCheckResultPrivate;
+};
+#endif // qscriptsyntaxcheckresult_h
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef qscriptsyntaxcheckresult_p_h
+#define qscriptsyntaxcheckresult_p_h
+
+#include "qscriptconverter_p.h"
+#include "qscriptengine_p.h"
+#include "qscriptsyntaxcheckresult.h"
+#include <JavaScriptCore/JavaScript.h>
+#include <QtCore/qshareddata.h>
+
+class QScriptSyntaxCheckResultPrivate : public QSharedData {
+public:
+ static inline QScriptSyntaxCheckResult get(QScriptSyntaxCheckResultPrivate* p);
+ inline QScriptSyntaxCheckResultPrivate(const QScriptEnginePrivate* engine);
+ inline QScriptSyntaxCheckResultPrivate(const QScriptEnginePrivate* engine, JSObjectRef value);
+ ~QScriptSyntaxCheckResultPrivate();
+
+ inline QScriptSyntaxCheckResult::State state() const;
+ int errorLineNumber() const;
+ inline int errorColumnNumber() const;
+ QString errorMessage() const;
+private:
+ JSObjectRef m_exception;
+ QScriptEnginePtr m_engine;
+};
+
+QScriptSyntaxCheckResult QScriptSyntaxCheckResultPrivate::get(QScriptSyntaxCheckResultPrivate* p)
+{
+ return QScriptSyntaxCheckResult(p);
+}
+
+QScriptSyntaxCheckResultPrivate::QScriptSyntaxCheckResultPrivate(const QScriptEnginePrivate* engine)
+ : m_exception(0)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+{}
+
+QScriptSyntaxCheckResultPrivate::QScriptSyntaxCheckResultPrivate(const QScriptEnginePrivate* engine, JSObjectRef value)
+ : m_exception(value)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+{}
+
+QScriptSyntaxCheckResult::State QScriptSyntaxCheckResultPrivate::state() const
+{
+ // FIXME This function doesn't return QScriptSyntaxCheckResult::Intermediate
+ return m_exception ? QScriptSyntaxCheckResult::Error : QScriptSyntaxCheckResult::Valid;
+}
+
+int QScriptSyntaxCheckResultPrivate::errorColumnNumber() const
+{
+ // FIXME JSC C API doesn't expose the error column number.
+ return m_exception ? 1 : -1;
+}
+
+
+#endif // qscriptsyntaxcheckresult_p_h
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, bool value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, int value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, uint value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, qsreal value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, const QString& value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, const char* value)
- : d_ptr(new QScriptValuePrivate(engine, QString::fromUtf8(value)))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), QString::fromUtf8(value));
+ else
+ d_ptr = new QScriptValuePrivate(QString::fromUtf8(value));
}
/*!
registers it with the script \a engine.
*/
QScriptValue::QScriptValue(QScriptEngine* engine, SpecialValue value)
- : d_ptr(new QScriptValuePrivate(engine, value))
{
+ if (engine)
+ d_ptr = new QScriptValuePrivate(QScriptEnginePrivate::get(engine), value);
+ else
+ d_ptr = new QScriptValuePrivate(value);
}
/*!
#include "qscriptengine_p.h"
#include "qscriptvalue.h"
#include <JavaScriptCore/JavaScript.h>
+#include <JavaScriptCore/JSRetainPtr.h>
+#include <QtCore/qmath.h>
+#include <QtCore/qnumeric.h>
#include <QtCore/qshareddata.h>
#include <QtCore/qvarlengtharray.h>
Implementation of QScriptValue.
The implementation is based on a state machine. The states names are included in
- QScriptValuePrivate::States. Each method should check for the current state and then perform a
+ QScriptValuePrivate::State. Each method should check for the current state and then perform a
correct action.
- States:
+ State:
Invalid -> QSVP is invalid, no assumptions should be made about class members (apart from m_value).
CString -> QSVP is created from QString or const char* and no JSC engine has been associated yet.
Current value is kept in m_string,
is kept in m_number (cast of QScriptValue::SpecialValue)
JSValue -> QSVP is associated with engine, but there is no information about real type, the state
have really short live cycle. Normally it is created as a function call result.
- JSNative -> QSVP is associated with engine, and it is sure that it isn't a JavaScript object.
+ JSPrimitive -> QSVP is associated with engine, and it is sure that it isn't a JavaScript object.
JSObject -> QSVP is associated with engine, and it is sure that it is a JavaScript object.
Each state keep all necessary information to invoke all methods, if not it should be changed to
inline QScriptValuePrivate(qsreal number);
inline QScriptValuePrivate(QScriptValue::SpecialValue value);
- inline QScriptValuePrivate(const QScriptEngine* engine, bool value);
- inline QScriptValuePrivate(const QScriptEngine* engine, int value);
- inline QScriptValuePrivate(const QScriptEngine* engine, uint value);
- inline QScriptValuePrivate(const QScriptEngine* engine, qsreal value);
- inline QScriptValuePrivate(const QScriptEngine* engine, const QString& value);
- inline QScriptValuePrivate(const QScriptEngine* engine, QScriptValue::SpecialValue value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, bool value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, int value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, uint value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, qsreal value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, const QString& value);
+ inline QScriptValuePrivate(const QScriptEnginePrivate* engine, QScriptValue::SpecialValue value);
inline QScriptValuePrivate(const QScriptEnginePrivate* engine, JSValueRef value);
inline QScriptValuePrivate(const QScriptEnginePrivate* engine, JSValueRef value, JSObjectRef object);
private:
// Please, update class documentation when you change the enum.
- enum States {
+ enum State {
Invalid = 0,
CString = 0x1000,
CNumber,
CBool,
CSpecial,
JSValue = 0x2000, // JS values are equal or higher then this value.
- JSNative,
+ JSPrimitive,
JSObject
} m_state;
QScriptEnginePtr m_engine;
inline void setValue(JSValueRef);
inline bool inherits(const char*);
+ inline State refinedJSValue();
inline bool isJSBased() const;
inline bool isNumberBased() const;
{
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, bool value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, bool value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CBool;
- m_number = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, int value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, int value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(m_engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CNumber;
- m_number = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, uint value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, uint value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(m_engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CNumber;
- m_number = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, qsreal value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, qsreal value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(m_engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CNumber;
- m_number = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, const QString& value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, const QString& value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(m_engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CString;
- m_string = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
-QScriptValuePrivate::QScriptValuePrivate(const QScriptEngine* engine, QScriptValue::SpecialValue value)
- : m_state(JSNative)
+QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, QScriptValue::SpecialValue value)
+ : m_state(JSPrimitive)
+ , m_engine(const_cast<QScriptEnginePrivate*>(engine))
+ , m_value(m_engine->makeJSValue(value))
{
- if (!engine) {
- // slower path reinitialization
- m_state = CSpecial;
- m_number = value;
- m_value = 0;
- } else {
- m_engine = QScriptEnginePrivate::get(engine);
- m_value = m_engine->makeJSValue(value);
- JSValueProtect(context(), m_value);
- }
+ Q_ASSERT(engine);
+ JSValueProtect(context(), m_value);
}
QScriptValuePrivate::QScriptValuePrivate(const QScriptEnginePrivate* engine, JSValueRef value)
, m_value(value)
{
Q_ASSERT(engine);
+ Q_ASSERT(value);
JSValueProtect(context(), m_value);
}
, m_object(object)
{
Q_ASSERT(engine);
+ Q_ASSERT(value);
+ Q_ASSERT(object);
JSValueProtect(context(), m_value);
}
case CBool:
return true;
case JSValue:
- if (isObject())
+ if (refinedJSValue() != JSPrimitive)
return false;
// Fall-through.
- case JSNative:
+ case JSPrimitive:
return JSValueIsBoolean(context(), value());
default:
return false;
{
switch (m_state) {
case CNumber:
- return m_number;
+ return true;
case JSValue:
- if (isObject())
+ if (refinedJSValue() != JSPrimitive)
return false;
// Fall-through.
- case JSNative:
+ case JSPrimitive:
return JSValueIsNumber(context(), value());
default:
return false;
case CSpecial:
return m_number == static_cast<int>(QScriptValue::NullValue);
case JSValue:
- if (isObject())
+ if (refinedJSValue() != JSPrimitive)
return false;
// Fall-through.
- case JSNative:
+ case JSPrimitive:
return JSValueIsNull(context(), value());
default:
return false;
case CString:
return true;
case JSValue:
- if (isObject())
+ if (refinedJSValue() != JSPrimitive)
return false;
// Fall-through.
- case JSNative:
+ case JSPrimitive:
return JSValueIsString(context(), value());
default:
return false;
case CSpecial:
return m_number == static_cast<int>(QScriptValue::UndefinedValue);
case JSValue:
- if (isObject())
+ if (refinedJSValue() != JSPrimitive)
return false;
// Fall-through.
- case JSNative:
+ case JSPrimitive:
return JSValueIsUndefined(context(), value());
default:
return false;
{
switch (m_state) {
case JSValue:
- if (!isObject())
+ if (refinedJSValue() != JSObject)
return false;
// Fall-through.
case JSObject:
bool QScriptValuePrivate::isObject()
{
switch (m_state) {
- case JSObject:
- return true;
case JSValue:
- m_object = JSValueToObject(context(), value(), /* exception */ 0);
- if (!m_object)
- return false;
- m_state = JSObject;
+ return refinedJSValue() == JSObject;
+ case JSObject:
return true;
+
default:
return false;
}
{
switch (m_state) {
case JSValue:
- m_object = JSValueToObject(context(), value(), /* exception */ 0);
- if (!m_object)
+ if (refinedJSValue() != JSObject)
return false;
- m_state = JSObject;
// Fall-through.
case JSObject:
return JSObjectIsFunction(context(), object());
case CString:
return m_string;
case CNumber:
- return QString::number(m_number);
+ return QScriptConverter::toString(m_number);
case CSpecial:
return m_number == QScriptValue::NullValue ? QString::fromLatin1("null") : QString::fromLatin1("undefined");
case JSValue:
- case JSNative:
+ case JSPrimitive:
case JSObject:
- return QScriptConverter::toString(JSValueToStringCopy(context(), value(), /* exception */ 0));
+ JSRetainPtr<JSStringRef> ptr(Adopt, JSValueToStringCopy(context(), value(), /* exception */ 0));
+ return QScriptConverter::toString(ptr.get());
}
Q_ASSERT_X(false, "toString()", "Not all states are included in the previous switch statement.");
qsreal QScriptValuePrivate::toNumber() const
{
- // TODO Check it.
switch (m_state) {
case JSValue:
- case JSNative:
+ case JSPrimitive:
case JSObject:
return JSValueToNumber(context(), value(), /* exception */ 0);
case CNumber:
- case CBool:
return m_number;
+ case CBool:
+ return m_number ? 1 : 0;
case Invalid:
+ return 0;
case CSpecial:
- return false;
+ return m_number == QScriptValue::NullValue ? 0 : qQNaN();
case CString:
- return m_string.isEmpty();
+ bool ok;
+ qsreal result = m_string.toDouble(&ok);
+ if (ok)
+ return result;
+ result = m_string.toInt(&ok, 0); // Try other bases.
+ if (ok)
+ return result;
+ if (m_string == "Infinity" || m_string == "-Infinity")
+ return qInf();
+ return m_string.length() ? qQNaN() : 0;
}
Q_ASSERT_X(false, "toNumber()", "Not all states are included in the previous switch statement.");
{
switch (m_state) {
case JSValue:
- case JSNative:
- case JSObject:
+ case JSPrimitive:
return JSValueToBoolean(context(), value());
+ case JSObject:
+ return true;
case CNumber:
+ return !(qIsNaN(m_number) || !m_number);
case CBool:
return m_number;
case Invalid:
case CSpecial:
return false;
case CString:
- return m_string.isEmpty();
+ return m_string.length();
}
Q_ASSERT_X(false, "toBool()", "Not all states are included in the previous switch statement.");
qsreal QScriptValuePrivate::toInteger() const
{
- // TODO it is not true implementation!
- return toNumber();
+ qsreal result = toNumber();
+ if (qIsNaN(result))
+ return 0;
+ if (qIsInf(result))
+ return result;
+ return (result > 0) ? qFloor(result) : -1 * qFloor(-result);
}
qint32 QScriptValuePrivate::toInt32() const
{
- // TODO it is not true implementation!
- return toNumber();
+ qsreal result = toInteger();
+ // Orginaly it should look like that (result == 0 || qIsInf(result) || qIsNaN(result)), but
+ // some of these operation are invoked in toInteger subcall.
+ if (qIsInf(result))
+ return 0;
+ return result;
}
quint32 QScriptValuePrivate::toUInt32() const
{
- // TODO it is not true implementation!
- return toNumber();
+ qsreal result = toInteger();
+ // Orginaly it should look like that (result == 0 || qIsInf(result) || qIsNaN(result)), but
+ // some of these operation are invoked in toInteger subcall.
+ if (qIsInf(result))
+ return 0;
+ return result;
}
quint16 QScriptValuePrivate::toUInt16() const
{
- // TODO it is not true implementation!
- return toNumber();
+ return toInt32();
}
return false;
}
m_engine = engine;
- m_state = JSNative;
+ m_state = JSPrimitive;
setValue(value);
return true;
}
{
switch (m_state) {
case JSValue:
- m_object = JSValueToObject(context(), value(), /* exception */ 0);
- if (!object()) {
- m_state = JSValue;
+ if (refinedJSValue() != JSObject)
return new QScriptValuePrivate;
- }
- m_state = JSObject;
// Fall-through.
case JSObject:
{
{
Q_ASSERT(isJSBased());
JSObjectRef globalObject = JSContextGetGlobalObject(context());
- JSValueRef error = JSObjectGetProperty(context(), globalObject, QScriptConverter::toString(name), 0);
+ JSStringRef errorAttrName = QScriptConverter::toString(name);
+ JSValueRef error = JSObjectGetProperty(context(), globalObject, errorAttrName, /* exception */ 0);
+ JSStringRelease(errorAttrName);
return JSValueIsInstanceOfConstructor(context(), value(), JSValueToObject(context(), error, /* exception */ 0), /* exception */ 0);
}
+/*!
+ \internal
+ Refines the state of this QScriptValuePrivate. Returns the new state.
+*/
+QScriptValuePrivate::State QScriptValuePrivate::refinedJSValue()
+{
+ Q_ASSERT(m_state == JSValue);
+ if (!JSValueIsObject(context(), value())) {
+ m_state = JSPrimitive;
+ } else {
+ m_state = JSObject;
+ // We are sure that value is an JSObject, so we can const_cast safely without
+ // calling JSC C API (JSValueToObject(context(), value(), /* exceptions */ 0)).
+ m_object = const_cast<JSObjectRef>(m_value);
+ }
+ return m_state;
+}
+
/*!
\internal
Returns true if QSV have an engine associated.
#include <QtCore/qglobal.h>
-#if defined(Q_OS_WIN) || defined(Q_OS_SYMBIAN)
-# if defined(QT_NODLL)
-# elif defined(QT_MAKEDLL) /* create a Qt DLL library */
-# if defined(QT_BUILD_JAVASCRIPT_LIB)
-# define Q_JAVASCRIPT_EXPORT Q_DECL_EXPORT
-# else
-# define Q_JAVASCRIPT_EXPORT Q_DECL_IMPORT
-# endif
-# elif defined(QT_DLL) /* use a Qt DLL library */
-# define Q_JAVASCRIPT_EXPORT
+#if defined(QT_MAKEDLL) /* create a Qt DLL library */
+# if defined(QT_BUILD_JAVASCRIPT_LIB)
+# define Q_JAVASCRIPT_EXPORT Q_DECL_EXPORT
+# else
+# define Q_JAVASCRIPT_EXPORT Q_DECL_IMPORT
# endif
+#elif defined(QT_DLL) /* use a Qt DLL library */
+# define Q_JAVASCRIPT_EXPORT
#endif
-#if defined(QT_SHARED)
-# define Q_JAVASCRIPT_EXPORT Q_DECL_EXPORT
-#else
-# define Q_JAVASCRIPT_EXPORT
+#if !defined(Q_JAVASCRIPT_EXPORT)
+# if defined(QT_SHARED)
+# define Q_JAVASCRIPT_EXPORT Q_DECL_EXPORT
+# else
+# define Q_JAVASCRIPT_EXPORT
+# endif
#endif
#endif
TEMPLATE = app
TARGET = tst_qscriptengine
QT += testlib
+isEmpty(OUTPUT_DIR): OUTPUT_DIR = ../../../..
include(../tests.pri)
SOURCES += tst_qscriptengine.cpp
*/
#include "qscriptengine.h"
+#include "qscriptprogram.h"
+#include "qscriptsyntaxcheckresult.h"
#include "qscriptvalue.h"
#include <QtTest/qtest.h>
void cleanup() {}
private slots:
+ void globalObject();
void evaluate();
void collectGarbage();
+ void reportAdditionalMemoryCost();
+ void nullValue();
+ void undefinedValue();
+ void evaluateProgram();
+ void checkSyntax_data();
+ void checkSyntax();
};
/* Evaluating a script that throw an unhandled exception should return an invalid value. */
QVERIFY2(engine.evaluate("ping").isValid(), "Script throwing an unhandled exception should return an exception value");
}
+void tst_QScriptEngine::globalObject()
+{
+ QScriptEngine engine;
+ QScriptValue global = engine.globalObject();
+ QScriptValue self = engine.evaluate("this");
+ QVERIFY(global.isObject());
+ QVERIFY(engine.globalObject().equals(engine.evaluate("this")));
+ QEXPECT_FAIL("", "strictlyEquals is broken - bug 36600 in bugs.webkit.org", Continue);
+ QVERIFY(engine.globalObject().strictlyEquals(self));
+}
+
/* Test garbage collection, at least try to not crash. */
void tst_QScriptEngine::collectGarbage()
{
engine.collectGarbage();
QCOMPARE(foo.call().toString(), QString::fromAscii("pong"));
}
+
+void tst_QScriptEngine::reportAdditionalMemoryCost()
+{
+ // There isn't any easy way to test the responsiveness of the GC;
+ // just try to call the function a few times with various sizes.
+ QScriptEngine eng;
+ for (int i = 0; i < 100; ++i) {
+ eng.reportAdditionalMemoryCost(0);
+ eng.reportAdditionalMemoryCost(10);
+ eng.reportAdditionalMemoryCost(1000);
+ eng.reportAdditionalMemoryCost(10000);
+ eng.reportAdditionalMemoryCost(100000);
+ eng.reportAdditionalMemoryCost(1000000);
+ eng.reportAdditionalMemoryCost(10000000);
+ eng.reportAdditionalMemoryCost(-1);
+ eng.reportAdditionalMemoryCost(-1000);
+ QScriptValue obj = eng.evaluate("new Object");
+ eng.collectGarbage();
+ }
+}
+
+void tst_QScriptEngine::nullValue()
+{
+ QScriptEngine engine;
+ QScriptValue value = engine.nullValue();
+ QVERIFY(value.isValid());
+ QVERIFY(value.isNull());
+}
+
+void tst_QScriptEngine::undefinedValue()
+{
+ QScriptEngine engine;
+ QScriptValue value = engine.undefinedValue();
+ QVERIFY(value.isValid());
+ QVERIFY(value.isUndefined());
+}
+
+void tst_QScriptEngine::evaluateProgram()
+{
+ QScriptEngine eng;
+ {
+ QString code("1 + 2");
+ QString fileName("hello.js");
+ int lineNumber = 123;
+ QScriptProgram program(code, fileName, lineNumber);
+ QVERIFY(!program.isNull());
+ QCOMPARE(program.sourceCode(), code);
+ QCOMPARE(program.fileName(), fileName);
+ QCOMPARE(program.firstLineNumber(), lineNumber);
+
+ QScriptValue expected = eng.evaluate(code);
+ for (int x = 0; x < 10; ++x) {
+ QScriptValue ret = eng.evaluate(program);
+ QVERIFY(ret.equals(expected));
+ }
+
+ // operator=
+ QScriptProgram sameProgram = program;
+ QVERIFY(sameProgram == program);
+ QVERIFY(eng.evaluate(sameProgram).equals(expected));
+
+ // copy constructor
+ QScriptProgram sameProgram2(program);
+ QVERIFY(sameProgram2 == program);
+ QVERIFY(eng.evaluate(sameProgram2).equals(expected));
+
+ QScriptProgram differentProgram("2 + 3");
+ QVERIFY(differentProgram != program);
+ QVERIFY(!eng.evaluate(differentProgram).equals(expected));
+ }
+
+ // Program that accesses variable in the scope
+ {
+ QScriptProgram program("a");
+ QVERIFY(!program.isNull());
+ {
+ QScriptValue ret = eng.evaluate(program);
+ QVERIFY(ret.isError());
+ QCOMPARE(ret.toString(), QString::fromLatin1("ReferenceError: Can't find variable: a"));
+ }
+ {
+ QScriptValue ret = eng.evaluate(program);
+ QVERIFY(ret.isError());
+ }
+ eng.evaluate("a = 456");
+ {
+ QScriptValue ret = eng.evaluate(program);
+ QVERIFY(!ret.isError());
+ QCOMPARE(ret.toNumber(), 456.0);
+ }
+ }
+
+ // Program that creates closure
+ {
+ QScriptProgram program("(function() { var count = 0; return function() { return count++; }; })");
+ QVERIFY(!program.isNull());
+ QScriptValue createCounter = eng.evaluate(program);
+ QVERIFY(createCounter.isFunction());
+ QScriptValue counter = createCounter.call();
+ QVERIFY(counter.isFunction());
+ {
+ QScriptValue ret = counter.call();
+ QVERIFY(ret.isNumber());
+ }
+ QScriptValue counter2 = createCounter.call();
+ QVERIFY(counter2.isFunction());
+ QVERIFY(!counter2.equals(counter));
+ {
+ QScriptValue ret = counter2.call();
+ QVERIFY(ret.isNumber());
+ }
+ }
+
+ // Same program run in different engines
+ {
+ QString code("1 + 2");
+ QScriptProgram program(code);
+ QVERIFY(!program.isNull());
+ double expected = eng.evaluate(program).toNumber();
+ for (int x = 0; x < 2; ++x) {
+ QScriptEngine eng2;
+ for (int y = 0; y < 2; ++y) {
+ double ret = eng2.evaluate(program).toNumber();
+ QCOMPARE(ret, expected);
+ }
+ }
+ }
+
+ // No program
+ {
+ QScriptProgram program;
+ QVERIFY(program.isNull());
+ QScriptValue ret = eng.evaluate(program);
+ QVERIFY(!ret.isValid());
+ }
+}
+
+void tst_QScriptEngine::checkSyntax_data()
+{
+ QTest::addColumn<QString>("code");
+ QTest::addColumn<int>("expectedState");
+ QTest::addColumn<int>("errorLineNumber");
+ QTest::addColumn<int>("errorColumnNumber");
+ QTest::addColumn<QString>("errorMessage");
+
+ QTest::newRow("0")
+ << QString("0") << int(QScriptSyntaxCheckResult::Valid)
+ << -1 << -1 << "";
+ QTest::newRow("if (")
+ << QString("if (\n") << int(QScriptSyntaxCheckResult::Intermediate)
+ << 1 << 4 << "";
+ QTest::newRow("if else")
+ << QString("\nif else") << int(QScriptSyntaxCheckResult::Error)
+ << 2 << 4 << "SyntaxError: Parse error";
+ QTest::newRow("{if}")
+ << QString("{\n{\nif\n}\n") << int(QScriptSyntaxCheckResult::Error)
+ << 4 << 1 << "SyntaxError: Parse error";
+ QTest::newRow("foo[")
+ << QString("foo[") << int(QScriptSyntaxCheckResult::Error)
+ << 1 << 4 << "SyntaxError: Parse error";
+ QTest::newRow("foo['bar']")
+ << QString("foo['bar']") << int(QScriptSyntaxCheckResult::Valid)
+ << -1 << -1 << "";
+
+ QTest::newRow("/*")
+ << QString("/*") << int(QScriptSyntaxCheckResult::Intermediate)
+ << 1 << 1 << "Unclosed comment at end of file";
+ QTest::newRow("/*\nMy comment")
+ << QString("/*\nMy comment") << int(QScriptSyntaxCheckResult::Intermediate)
+ << 1 << 1 << "Unclosed comment at end of file";
+ QTest::newRow("/*\nMy comment */\nfoo = 10")
+ << QString("/*\nMy comment */\nfoo = 10") << int(QScriptSyntaxCheckResult::Valid)
+ << -1 << -1 << "";
+ QTest::newRow("foo = 10 /*")
+ << QString("foo = 10 /*") << int(QScriptSyntaxCheckResult::Intermediate)
+ << -1 << -1 << "";
+ QTest::newRow("foo = 10; /*")
+ << QString("foo = 10; /*") << int(QScriptSyntaxCheckResult::Intermediate)
+ << 1 << 11 << "Expected `end of file'";
+ QTest::newRow("foo = 10 /* My comment */")
+ << QString("foo = 10 /* My comment */") << int(QScriptSyntaxCheckResult::Valid)
+ << -1 << -1 << "";
+
+ QTest::newRow("/=/")
+ << QString("/=/") << int(QScriptSyntaxCheckResult::Valid) << -1 << -1 << "";
+ QTest::newRow("/=/g")
+ << QString("/=/g") << int(QScriptSyntaxCheckResult::Valid) << -1 << -1 << "";
+ QTest::newRow("/a/")
+ << QString("/a/") << int(QScriptSyntaxCheckResult::Valid) << -1 << -1 << "";
+ QTest::newRow("/a/g")
+ << QString("/a/g") << int(QScriptSyntaxCheckResult::Valid) << -1 << -1 << "";
+}
+
+void tst_QScriptEngine::checkSyntax()
+{
+ QFETCH(QString, code);
+ QFETCH(int, expectedState);
+ QFETCH(int, errorLineNumber);
+ QFETCH(int, errorColumnNumber);
+ QFETCH(QString, errorMessage);
+
+ QScriptSyntaxCheckResult result = QScriptEngine::checkSyntax(code);
+
+ // assignment
+ {
+ QScriptSyntaxCheckResult copy = result;
+ QCOMPARE(copy.state(), result.state());
+ QCOMPARE(copy.errorLineNumber(), result.errorLineNumber());
+ QCOMPARE(copy.errorColumnNumber(), result.errorColumnNumber());
+ QCOMPARE(copy.errorMessage(), result.errorMessage());
+ }
+ {
+ QScriptSyntaxCheckResult copy(result);
+ QCOMPARE(copy.state(), result.state());
+ QCOMPARE(copy.errorLineNumber(), result.errorLineNumber());
+ QCOMPARE(copy.errorColumnNumber(), result.errorColumnNumber());
+ QCOMPARE(copy.errorMessage(), result.errorMessage());
+ }
+
+ if (expectedState == QScriptSyntaxCheckResult::Intermediate)
+ QEXPECT_FAIL("", "QScriptSyntaxCheckResult::state() doesn't return the Intermediate state", Abort);
+ QCOMPARE(result.state(), QScriptSyntaxCheckResult::State(expectedState));
+ QCOMPARE(result.errorLineNumber(), errorLineNumber);
+ if (expectedState != QScriptSyntaxCheckResult::Valid && errorColumnNumber != 1)
+ QEXPECT_FAIL("", "QScriptSyntaxCheckResult::errorColumnNumber() doesn't return correct value", Continue);
+ QCOMPARE(result.errorColumnNumber(), errorColumnNumber);
+ QCOMPARE(result.errorMessage(), errorMessage);
+}
+
+
QTEST_MAIN(tst_QScriptEngine)
#include "tst_qscriptengine.moc"
--- /dev/null
+TEMPLATE = app
+TARGET = tst_qscriptstring
+QT += testlib
+include(../tests.pri)
+
+SOURCES += tst_qscriptstring.cpp
+
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef tst_qscriptstring_h
+#define tst_qscriptstring_h
+
+#include "qscriptengine.h"
+#include "qscriptstring.h"
+#include <QtCore/qhash.h>
+#include <QtTest/QtTest>
+
+class tst_QScriptString : public QObject {
+ Q_OBJECT
+
+public:
+ tst_QScriptString();
+ virtual ~tst_QScriptString();
+
+private slots:
+ void test();
+ void hash();
+ void toArrayIndex_data();
+ void toArrayIndex();
+};
+
+tst_QScriptString::tst_QScriptString()
+{
+}
+
+tst_QScriptString::~tst_QScriptString()
+{
+}
+
+void tst_QScriptString::test()
+{
+ QScriptEngine eng;
+ {
+ QScriptString str;
+ QVERIFY(!str.isValid());
+ QVERIFY(str == str);
+ QVERIFY(!(str != str));
+ QVERIFY(str.toString().isNull());
+
+ QScriptString str1(str);
+ QVERIFY(!str1.isValid());
+
+ QScriptString str2 = str;
+ QVERIFY(!str2.isValid());
+
+ QCOMPARE(str.toArrayIndex(), quint32(0xffffffff));
+ }
+ for (int x = 0; x < 2; ++x) {
+ QString ciao = QString::fromLatin1("ciao");
+ QScriptString str = eng.toStringHandle(ciao);
+ QVERIFY(str.isValid());
+ QVERIFY(str == str);
+ QVERIFY(!(str != str));
+ QCOMPARE(str.toString(), ciao);
+
+ QScriptString str1(str);
+ QCOMPARE(str, str1);
+
+ QScriptString str2 = str;
+ QCOMPARE(str, str2);
+
+ QScriptString str3 = eng.toStringHandle(ciao);
+ QVERIFY(str3.isValid());
+ QCOMPARE(str, str3);
+
+ eng.collectGarbage();
+
+ QVERIFY(str.isValid());
+ QCOMPARE(str.toString(), ciao);
+ QVERIFY(str1.isValid());
+ QCOMPARE(str1.toString(), ciao);
+ QVERIFY(str2.isValid());
+ QCOMPARE(str2.toString(), ciao);
+ QVERIFY(str3.isValid());
+ QCOMPARE(str3.toString(), ciao);
+ }
+ {
+ QScriptEngine* eng2 = new QScriptEngine;
+ QString one = QString::fromLatin1("one");
+ QString two = QString::fromLatin1("two");
+ QScriptString oneInterned = eng2->toStringHandle(one);
+ QCOMPARE(oneInterned.toString(), one);
+ QScriptString twoInterned = eng2->toStringHandle(two);
+ QCOMPARE(twoInterned.toString(), two);
+ QVERIFY(oneInterned != twoInterned);
+ QVERIFY(!(oneInterned == twoInterned));
+
+ delete eng2;
+ }
+}
+
+void tst_QScriptString::hash()
+{
+ QScriptEngine engine;
+ QHash<QScriptString, int> stringToInt;
+ QScriptString foo = engine.toStringHandle("foo");
+
+ QScriptString bar = engine.toStringHandle("bar");
+ QVERIFY(!stringToInt.contains(foo));
+ for (int i = 0; i < 1000000; ++i)
+ stringToInt.insert(foo, 123);
+ QCOMPARE(stringToInt.value(foo), 123);
+ QVERIFY(!stringToInt.contains(bar));
+ stringToInt.insert(bar, 456);
+ QCOMPARE(stringToInt.value(bar), 456);
+ QCOMPARE(stringToInt.value(foo), 123);
+}
+
+void tst_QScriptString::toArrayIndex_data()
+{
+ QTest::addColumn<QString>("input");
+ QTest::addColumn<bool>("expectSuccess");
+ QTest::addColumn<quint32>("expectedIndex");
+ QTest::newRow("foo") << QString::fromLatin1("foo") << false << quint32(0xffffffff);
+ QTest::newRow("empty") << QString::fromLatin1("") << false << quint32(0xffffffff);
+ QTest::newRow("0") << QString::fromLatin1("0") << true << quint32(0);
+ QTest::newRow("00") << QString::fromLatin1("00") << false << quint32(0xffffffff);
+ QTest::newRow("1") << QString::fromLatin1("1") << true << quint32(1);
+ QTest::newRow("123") << QString::fromLatin1("123") << true << quint32(123);
+ QTest::newRow("-1") << QString::fromLatin1("-1") << false << quint32(0xffffffff);
+ QTest::newRow("0a") << QString::fromLatin1("0a") << false << quint32(0xffffffff);
+ QTest::newRow("0x1") << QString::fromLatin1("0x1") << false << quint32(0xffffffff);
+ QTest::newRow("01") << QString::fromLatin1("01") << false << quint32(0xffffffff);
+ QTest::newRow("101a") << QString::fromLatin1("101a") << false << quint32(0xffffffff);
+ QTest::newRow("4294967294") << QString::fromLatin1("4294967294") << true << quint32(0xfffffffe);
+ QTest::newRow("4294967295") << QString::fromLatin1("4294967295") << false << quint32(0xffffffff);
+ QTest::newRow("11111111111") << QString::fromLatin1("11111111111") << false << quint32(0xffffffff);
+ QTest::newRow("0.0") << QString::fromLatin1("0.0") << false << quint32(0xffffffff);
+ QTest::newRow("1.0") << QString::fromLatin1("1.0") << false << quint32(0xffffffff);
+ QTest::newRow("1.5") << QString::fromLatin1("1.5") << false << quint32(0xffffffff);
+ QTest::newRow("1.") << QString::fromLatin1("1.") << false << quint32(0xffffffff);
+ QTest::newRow(".1") << QString::fromLatin1(".1") << false << quint32(0xffffffff);
+ QTest::newRow("1e0") << QString::fromLatin1("1e0") << false << quint32(0xffffffff);
+}
+
+void tst_QScriptString::toArrayIndex()
+{
+ QFETCH(QString, input);
+ QFETCH(bool, expectSuccess);
+ QFETCH(quint32, expectedIndex);
+ QScriptEngine engine;
+ for (int x = 0; x < 2; ++x) {
+ bool isArrayIndex;
+ bool* ptr = (!x) ? &isArrayIndex : (bool*)0;
+ quint32 result = engine.toStringHandle(input).toArrayIndex(ptr);
+ if (!x)
+ QCOMPARE(isArrayIndex, expectSuccess);
+ QCOMPARE(result, expectedIndex);
+ }
+}
+
+QTEST_MAIN(tst_QScriptString)
+#include "tst_qscriptstring.moc"
+
+#endif // tst_qscriptstring_h
TEMPLATE = app
TARGET = tst_qscriptvalue
QT += testlib
+isEmpty(OUTPUT_DIR): OUTPUT_DIR = ../../../..
include(../tests.pri)
-SOURCES += tst_qscriptvalue.cpp
+SOURCES += \
+ tst_qscriptvalue.cpp \
+ tst_qscriptvalue_generated.cpp
+HEADERS += \
+ tst_qscriptvalue.h
Boston, MA 02110-1301, USA.
*/
-#include "qscriptengine.h"
-#include "qscriptvalue.h"
-#include <QtTest/qtest.h>
-
-Q_DECLARE_METATYPE(QScriptValue*);
-Q_DECLARE_METATYPE(QScriptValue);
-
-class tst_QScriptValue : public QObject {
- Q_OBJECT
-
-public:
- tst_QScriptValue() {}
- virtual ~tst_QScriptValue() {}
-
-private slots:
- void toString_data();
- void toString();
- void copyConstructor_data();
- void copyConstructor();
- void assignOperator_data();
- void assignOperator();
- void dataSharing();
- void constructors_data();
- void constructors();
- void call();
-
- // copied from Qt's QtScript.
- void ctor();
-};
+#include "tst_qscriptvalue.h"
+#include <QtCore/qnumeric.h>
+
+tst_QScriptValue::tst_QScriptValue()
+ : engine(0)
+{
+}
+
+tst_QScriptValue::~tst_QScriptValue()
+{
+ delete engine;
+}
+
+void tst_QScriptValue::dataHelper(InitDataFunction init, DefineDataFunction define)
+{
+ QTest::addColumn<QString>("__expression__");
+ (this->*init)();
+ QHash<QString, QScriptValue>::const_iterator it;
+ for (it = m_values.constBegin(); it != m_values.constEnd(); ++it) {
+ m_currentExpression = it.key();
+ (this->*define)(it.key().toLatin1());
+ }
+ m_currentExpression = QString();
+}
+
+QTestData& tst_QScriptValue::newRow(const char* tag)
+{
+ return QTest::newRow(tag) << m_currentExpression;
+}
+
+void tst_QScriptValue::testHelper(TestFunction fun)
+{
+ QFETCH(QString, __expression__);
+ QScriptValue value = m_values.value(__expression__);
+ (this->*fun)(__expression__.toLatin1(), value);
+}
+
void tst_QScriptValue::ctor()
{
{
QScriptValue v;
QCOMPARE(v.isValid(), false);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(&eng, QScriptValue::UndefinedValue);
QCOMPARE(v.isValid(), true);
QCOMPARE(v.isUndefined(), true);
QCOMPARE(v.isObject(), false);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(QScriptValue::NullValue);
QCOMPARE(v.isValid(), true);
QCOMPARE(v.isNull(), true);
QCOMPARE(v.isObject(), false);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(false);
QCOMPARE(v.isBool(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toBoolean(), false);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(int(1));
QCOMPARE(v.isNumber(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toNumber(), 1.0);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(uint(1));
QCOMPARE(v.isNumber(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toNumber(), 1.0);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(1.0);
QCOMPARE(v.isNumber(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toNumber(), 1.0);
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v("ciao");
QCOMPARE(v.isString(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toString(), QLatin1String("ciao"));
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
{
QScriptValue v(QString("ciao"));
QCOMPARE(v.isString(), true);
QCOMPARE(v.isObject(), false);
QCOMPARE(v.toString(), QLatin1String("ciao"));
- QCOMPARE(v.engine(), (QScriptEngine *)0);
+ QCOMPARE(v.engine(), (QScriptEngine*)0);
}
// copy constructor, operator=
{
QScriptValue v(1.0);
QScriptValue v2(v);
QCOMPARE(v2.strictlyEquals(v), true);
- QCOMPARE(v2.engine(), (QScriptEngine *)0);
+ QCOMPARE(v2.engine(), (QScriptEngine*)0);
QScriptValue v3(v);
QCOMPARE(v3.strictlyEquals(v), true);
QCOMPARE(v3.strictlyEquals(v2), true);
- QCOMPARE(v3.engine(), (QScriptEngine *)0);
+ QCOMPARE(v3.engine(), (QScriptEngine*)0);
QScriptValue v4(2.0);
QCOMPARE(v4.strictlyEquals(v), false);
QVERIFY(QScriptValue(0, QString("ciao")).isString());
}
-void tst_QScriptValue::toString_data()
+void tst_QScriptValue::toStringSimple_data()
{
QTest::addColumn<QString>("code");
QTest::addColumn<QString>("result");
}
/* Test conversion to string from different JSC types */
-void tst_QScriptValue::toString()
+void tst_QScriptValue::toStringSimple()
{
QFETCH(QString, code);
QFETCH(QString, result);
QVERIFY(incr.call().isValid()); // Exception.
}
+
QTEST_MAIN(tst_QScriptValue)
-#include "tst_qscriptvalue.moc"
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#ifndef tst_qscriptvalue_h
+#define tst_qscriptvalue_h
+
+#include "qscriptengine.h"
+#include "qscriptvalue.h"
+#include <QtCore/qnumeric.h>
+#include <QtTest/qtest.h>
+
+Q_DECLARE_METATYPE(QScriptValue*);
+Q_DECLARE_METATYPE(QScriptValue);
+
+class tst_QScriptValue : public QObject {
+ Q_OBJECT
+
+public:
+ tst_QScriptValue();
+ virtual ~tst_QScriptValue();
+
+private slots:
+ void toStringSimple_data();
+ void toStringSimple();
+ void copyConstructor_data();
+ void copyConstructor();
+ void assignOperator_data();
+ void assignOperator();
+ void dataSharing();
+ void constructors_data();
+ void constructors();
+ void call();
+ void ctor();
+
+ // Generated test functions.
+ void isBool_data();
+ void isBool();
+
+ void isBoolean_data();
+ void isBoolean();
+
+ void isNumber_data();
+ void isNumber();
+
+ void isFunction_data();
+ void isFunction();
+
+ void isNull_data();
+ void isNull();
+
+ void isObject_data();
+ void isObject();
+
+ void isString_data();
+ void isString();
+
+ void isUndefined_data();
+ void isUndefined();
+
+ void isValid_data();
+ void isValid();
+
+ void toString_data();
+ void toString();
+
+ void toNumber_data();
+ void toNumber();
+
+ void toBool_data();
+ void toBool();
+
+ void toBoolean_data();
+ void toBoolean();
+
+ void toInteger_data();
+ void toInteger();
+
+ void toInt32_data();
+ void toInt32();
+
+ void toUInt32_data();
+ void toUInt32();
+
+ void toUInt16_data();
+ void toUInt16();
+
+private:
+ typedef void (tst_QScriptValue::*InitDataFunction)();
+ typedef void (tst_QScriptValue::*DefineDataFunction)(const char*);
+ void dataHelper(InitDataFunction init, DefineDataFunction define);
+ QTestData& newRow(const char* tag);
+
+ typedef void (tst_QScriptValue::*TestFunction)(const char*, const QScriptValue&);
+ void testHelper(TestFunction fun);
+
+ // Generated functions
+
+ void initScriptValues();
+
+ void isBool_initData();
+ void isBool_makeData(const char* expr);
+ void isBool_test(const char* expr, const QScriptValue& value);
+
+ void isBoolean_initData();
+ void isBoolean_makeData(const char* expr);
+ void isBoolean_test(const char* expr, const QScriptValue& value);
+
+ void isNumber_initData();
+ void isNumber_makeData(const char* expr);
+ void isNumber_test(const char* expr, const QScriptValue&);
+
+ void isFunction_initData();
+ void isFunction_makeData(const char* expr);
+ void isFunction_test(const char* expr, const QScriptValue& value);
+
+ void isNull_initData();
+ void isNull_makeData(const char* expr);
+ void isNull_test(const char* expr, const QScriptValue& value);
+
+ void isObject_initData();
+ void isObject_makeData(const char* expr);
+ void isObject_test(const char* expr, const QScriptValue& value);
+
+ void isString_initData();
+ void isString_makeData(const char* expr);
+ void isString_test(const char* expr, const QScriptValue& value);
+
+ void isUndefined_initData();
+ void isUndefined_makeData(const char* expr);
+ void isUndefined_test(const char* expr, const QScriptValue& value);
+
+ void isValid_initData();
+ void isValid_makeData(const char* expr);
+ void isValid_test(const char* expr, const QScriptValue& value);
+
+ void toString_initData();
+ void toString_makeData(const char*);
+ void toString_test(const char*, const QScriptValue&);
+
+ void toNumber_initData();
+ void toNumber_makeData(const char*);
+ void toNumber_test(const char*, const QScriptValue&);
+
+ void toBool_initData();
+ void toBool_makeData(const char*);
+ void toBool_test(const char*, const QScriptValue&);
+
+ void toBoolean_initData();
+ void toBoolean_makeData(const char*);
+ void toBoolean_test(const char*, const QScriptValue&);
+
+ void toInteger_initData();
+ void toInteger_makeData(const char*);
+ void toInteger_test(const char*, const QScriptValue&);
+
+ void toInt32_initData();
+ void toInt32_makeData(const char*);
+ void toInt32_test(const char*, const QScriptValue&);
+
+ void toUInt32_initData();
+ void toUInt32_makeData(const char*);
+ void toUInt32_test(const char*, const QScriptValue&);
+
+ void toUInt16_initData();
+ void toUInt16_makeData(const char*);
+ void toUInt16_test(const char*, const QScriptValue&);
+
+private:
+ QScriptEngine* engine;
+ QHash<QString, QScriptValue> m_values;
+ QString m_currentExpression;
+};
+
+#define DEFINE_TEST_FUNCTION(name) \
+void tst_QScriptValue::name##_data() { dataHelper(&tst_QScriptValue::name##_initData, &tst_QScriptValue::name##_makeData); } \
+void tst_QScriptValue::name() { testHelper(&tst_QScriptValue::name##_test); }
+
+
+
+#endif // tst_qscriptvalue_h
--- /dev/null
+/*
+ Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Library General Public
+ License as published by the Free Software Foundation; either
+ version 2 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Library General Public License for more details.
+
+ You should have received a copy of the GNU Library General Public License
+ along with this library; see the file COPYING.LIB. If not, write to
+ the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ Boston, MA 02110-1301, USA.
+*/
+
+#include "tst_qscriptvalue.h"
+
+#define DEFINE_TEST_VALUE(expr) m_values.insert(QString::fromLatin1(#expr), expr)
+
+void tst_QScriptValue::initScriptValues()
+{
+ m_values.clear();
+ if (engine)
+ delete engine;
+ engine = new QScriptEngine;
+ DEFINE_TEST_VALUE(QScriptValue());
+ DEFINE_TEST_VALUE(QScriptValue(QScriptValue::UndefinedValue));
+ DEFINE_TEST_VALUE(QScriptValue(QScriptValue::NullValue));
+ DEFINE_TEST_VALUE(QScriptValue(true));
+ DEFINE_TEST_VALUE(QScriptValue(false));
+ DEFINE_TEST_VALUE(QScriptValue(int(122)));
+ DEFINE_TEST_VALUE(QScriptValue(uint(124)));
+ DEFINE_TEST_VALUE(QScriptValue(0));
+ DEFINE_TEST_VALUE(QScriptValue(0.0));
+ DEFINE_TEST_VALUE(QScriptValue(123.0));
+ DEFINE_TEST_VALUE(QScriptValue(6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(-6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(0x43211234));
+ DEFINE_TEST_VALUE(QScriptValue(0x10000));
+ DEFINE_TEST_VALUE(QScriptValue(0x10001));
+ DEFINE_TEST_VALUE(QScriptValue(qSNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(qQNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(qInf()));
+ DEFINE_TEST_VALUE(QScriptValue(-qInf()));
+ DEFINE_TEST_VALUE(QScriptValue("NaN"));
+ DEFINE_TEST_VALUE(QScriptValue("Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue("-Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue("ciao"));
+ DEFINE_TEST_VALUE(QScriptValue(QString::fromLatin1("ciao")));
+ DEFINE_TEST_VALUE(QScriptValue(QString("")));
+ DEFINE_TEST_VALUE(QScriptValue(QString()));
+ DEFINE_TEST_VALUE(QScriptValue(QString("0")));
+ DEFINE_TEST_VALUE(QScriptValue(QString("123")));
+ DEFINE_TEST_VALUE(QScriptValue(QString("12.4")));
+ DEFINE_TEST_VALUE(QScriptValue(0, QScriptValue::UndefinedValue));
+ DEFINE_TEST_VALUE(QScriptValue(0, QScriptValue::NullValue));
+ DEFINE_TEST_VALUE(QScriptValue(0, true));
+ DEFINE_TEST_VALUE(QScriptValue(0, false));
+ DEFINE_TEST_VALUE(QScriptValue(0, int(122)));
+ DEFINE_TEST_VALUE(QScriptValue(0, uint(124)));
+ DEFINE_TEST_VALUE(QScriptValue(0, 0));
+ DEFINE_TEST_VALUE(QScriptValue(0, 0.0));
+ DEFINE_TEST_VALUE(QScriptValue(0, 123.0));
+ DEFINE_TEST_VALUE(QScriptValue(0, 6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(0, -6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(0, 0x43211234));
+ DEFINE_TEST_VALUE(QScriptValue(0, 0x10000));
+ DEFINE_TEST_VALUE(QScriptValue(0, 0x10001));
+ DEFINE_TEST_VALUE(QScriptValue(0, qSNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(0, qQNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(0, qInf()));
+ DEFINE_TEST_VALUE(QScriptValue(0, -qInf()));
+ DEFINE_TEST_VALUE(QScriptValue(0, "NaN"));
+ DEFINE_TEST_VALUE(QScriptValue(0, "Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue(0, "-Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue(0, "ciao"));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString::fromLatin1("ciao")));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString("")));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString()));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString("0")));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString("123")));
+ DEFINE_TEST_VALUE(QScriptValue(0, QString("12.3")));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QScriptValue::UndefinedValue));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QScriptValue::NullValue));
+ DEFINE_TEST_VALUE(QScriptValue(engine, true));
+ DEFINE_TEST_VALUE(QScriptValue(engine, false));
+ DEFINE_TEST_VALUE(QScriptValue(engine, int(122)));
+ DEFINE_TEST_VALUE(QScriptValue(engine, uint(124)));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 0));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 0.0));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 123.0));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(engine, -6.37e-8));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 0x43211234));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 0x10000));
+ DEFINE_TEST_VALUE(QScriptValue(engine, 0x10001));
+ DEFINE_TEST_VALUE(QScriptValue(engine, qSNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(engine, qQNaN()));
+ DEFINE_TEST_VALUE(QScriptValue(engine, qInf()));
+ DEFINE_TEST_VALUE(QScriptValue(engine, -qInf()));
+ DEFINE_TEST_VALUE(QScriptValue(engine, "NaN"));
+ DEFINE_TEST_VALUE(QScriptValue(engine, "Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue(engine, "-Infinity"));
+ DEFINE_TEST_VALUE(QScriptValue(engine, "ciao"));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString::fromLatin1("ciao")));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString("")));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString()));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString("0")));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString("123")));
+ DEFINE_TEST_VALUE(QScriptValue(engine, QString("1.23")));
+ DEFINE_TEST_VALUE(engine->evaluate("[]"));
+ DEFINE_TEST_VALUE(engine->evaluate("{}"));
+ DEFINE_TEST_VALUE(engine->evaluate("Object.prototype"));
+ DEFINE_TEST_VALUE(engine->evaluate("Date.prototype"));
+ DEFINE_TEST_VALUE(engine->evaluate("Array.prototype"));
+ DEFINE_TEST_VALUE(engine->evaluate("Function.prototype"));
+ DEFINE_TEST_VALUE(engine->evaluate("Error.prototype"));
+ DEFINE_TEST_VALUE(engine->evaluate("Object"));
+ DEFINE_TEST_VALUE(engine->evaluate("Array"));
+ DEFINE_TEST_VALUE(engine->evaluate("Number"));
+ DEFINE_TEST_VALUE(engine->evaluate("Function"));
+ DEFINE_TEST_VALUE(engine->evaluate("(function() { return 1; })"));
+ DEFINE_TEST_VALUE(engine->evaluate("(function() { return 'ciao'; })"));
+ DEFINE_TEST_VALUE(engine->evaluate("(function() { throw new Error('foo'); })"));
+ DEFINE_TEST_VALUE(engine->evaluate("/foo/"));
+ DEFINE_TEST_VALUE(engine->evaluate("new Object()"));
+ DEFINE_TEST_VALUE(engine->evaluate("new Array()"));
+ DEFINE_TEST_VALUE(engine->evaluate("new Error()"));
+ DEFINE_TEST_VALUE(engine->evaluate("a = new Object(); a.foo = 22; a.foo"));
+ DEFINE_TEST_VALUE(engine->evaluate("Undefined"));
+ DEFINE_TEST_VALUE(engine->evaluate("Null"));
+ DEFINE_TEST_VALUE(engine->evaluate("True"));
+ DEFINE_TEST_VALUE(engine->evaluate("False"));
+ DEFINE_TEST_VALUE(engine->evaluate("undefined"));
+ DEFINE_TEST_VALUE(engine->evaluate("null"));
+ DEFINE_TEST_VALUE(engine->evaluate("true"));
+ DEFINE_TEST_VALUE(engine->evaluate("false"));
+ DEFINE_TEST_VALUE(engine->evaluate("122"));
+ DEFINE_TEST_VALUE(engine->evaluate("124"));
+ DEFINE_TEST_VALUE(engine->evaluate("0"));
+ DEFINE_TEST_VALUE(engine->evaluate("0.0"));
+ DEFINE_TEST_VALUE(engine->evaluate("123.0"));
+ DEFINE_TEST_VALUE(engine->evaluate("6.37e-8"));
+ DEFINE_TEST_VALUE(engine->evaluate("-6.37e-8"));
+ DEFINE_TEST_VALUE(engine->evaluate("0x43211234"));
+ DEFINE_TEST_VALUE(engine->evaluate("0x10000"));
+ DEFINE_TEST_VALUE(engine->evaluate("0x10001"));
+ DEFINE_TEST_VALUE(engine->evaluate("NaN"));
+ DEFINE_TEST_VALUE(engine->evaluate("Infinity"));
+ DEFINE_TEST_VALUE(engine->evaluate("-Infinity"));
+ DEFINE_TEST_VALUE(engine->evaluate("'ciao'"));
+ DEFINE_TEST_VALUE(engine->evaluate("''"));
+ DEFINE_TEST_VALUE(engine->evaluate("'0'"));
+ DEFINE_TEST_VALUE(engine->evaluate("'123'"));
+ DEFINE_TEST_VALUE(engine->evaluate("'12.4'"));
+ DEFINE_TEST_VALUE(engine->nullValue());
+ DEFINE_TEST_VALUE(engine->undefinedValue());
+}
+
+
+void tst_QScriptValue::isValid_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isValid_makeData(const char* expr)
+{
+ static QSet<QString> isValid;
+ if (isValid.isEmpty()) {
+ isValid << "QScriptValue(QScriptValue::UndefinedValue)"
+ << "QScriptValue(QScriptValue::NullValue)"
+ << "QScriptValue(true)"
+ << "QScriptValue(false)"
+ << "QScriptValue(int(122))"
+ << "QScriptValue(uint(124))"
+ << "QScriptValue(0)"
+ << "QScriptValue(0.0)"
+ << "QScriptValue(123.0)"
+ << "QScriptValue(6.37e-8)"
+ << "QScriptValue(-6.37e-8)"
+ << "QScriptValue(0x43211234)"
+ << "QScriptValue(0x10000)"
+ << "QScriptValue(0x10001)"
+ << "QScriptValue(qSNaN())"
+ << "QScriptValue(qQNaN())"
+ << "QScriptValue(qInf())"
+ << "QScriptValue(-qInf())"
+ << "QScriptValue(\"NaN\")"
+ << "QScriptValue(\"Infinity\")"
+ << "QScriptValue(\"-Infinity\")"
+ << "QScriptValue(\"ciao\")"
+ << "QScriptValue(QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(QString(\"\"))"
+ << "QScriptValue(QString())"
+ << "QScriptValue(QString(\"0\"))"
+ << "QScriptValue(QString(\"123\"))"
+ << "QScriptValue(QString(\"12.4\"))"
+ << "QScriptValue(0, QScriptValue::UndefinedValue)"
+ << "QScriptValue(0, QScriptValue::NullValue)"
+ << "QScriptValue(0, true)"
+ << "QScriptValue(0, false)"
+ << "QScriptValue(0, int(122))"
+ << "QScriptValue(0, uint(124))"
+ << "QScriptValue(0, 0)"
+ << "QScriptValue(0, 0.0)"
+ << "QScriptValue(0, 123.0)"
+ << "QScriptValue(0, 6.37e-8)"
+ << "QScriptValue(0, -6.37e-8)"
+ << "QScriptValue(0, 0x43211234)"
+ << "QScriptValue(0, 0x10000)"
+ << "QScriptValue(0, 0x10001)"
+ << "QScriptValue(0, qSNaN())"
+ << "QScriptValue(0, qQNaN())"
+ << "QScriptValue(0, qInf())"
+ << "QScriptValue(0, -qInf())"
+ << "QScriptValue(0, \"NaN\")"
+ << "QScriptValue(0, \"Infinity\")"
+ << "QScriptValue(0, \"-Infinity\")"
+ << "QScriptValue(0, \"ciao\")"
+ << "QScriptValue(0, QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(0, QString(\"\"))"
+ << "QScriptValue(0, QString())"
+ << "QScriptValue(0, QString(\"0\"))"
+ << "QScriptValue(0, QString(\"123\"))"
+ << "QScriptValue(0, QString(\"12.3\"))"
+ << "QScriptValue(engine, QScriptValue::UndefinedValue)"
+ << "QScriptValue(engine, QScriptValue::NullValue)"
+ << "QScriptValue(engine, true)"
+ << "QScriptValue(engine, false)"
+ << "QScriptValue(engine, int(122))"
+ << "QScriptValue(engine, uint(124))"
+ << "QScriptValue(engine, 0)"
+ << "QScriptValue(engine, 0.0)"
+ << "QScriptValue(engine, 123.0)"
+ << "QScriptValue(engine, 6.37e-8)"
+ << "QScriptValue(engine, -6.37e-8)"
+ << "QScriptValue(engine, 0x43211234)"
+ << "QScriptValue(engine, 0x10000)"
+ << "QScriptValue(engine, 0x10001)"
+ << "QScriptValue(engine, qSNaN())"
+ << "QScriptValue(engine, qQNaN())"
+ << "QScriptValue(engine, qInf())"
+ << "QScriptValue(engine, -qInf())"
+ << "QScriptValue(engine, \"NaN\")"
+ << "QScriptValue(engine, \"Infinity\")"
+ << "QScriptValue(engine, \"-Infinity\")"
+ << "QScriptValue(engine, \"ciao\")"
+ << "QScriptValue(engine, QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(engine, QString(\"\"))"
+ << "QScriptValue(engine, QString())"
+ << "QScriptValue(engine, QString(\"0\"))"
+ << "QScriptValue(engine, QString(\"123\"))"
+ << "QScriptValue(engine, QString(\"1.23\"))"
+ << "engine->evaluate(\"[]\")"
+ << "engine->evaluate(\"{}\")"
+ << "engine->evaluate(\"Object.prototype\")"
+ << "engine->evaluate(\"Date.prototype\")"
+ << "engine->evaluate(\"Array.prototype\")"
+ << "engine->evaluate(\"Function.prototype\")"
+ << "engine->evaluate(\"Error.prototype\")"
+ << "engine->evaluate(\"Object\")"
+ << "engine->evaluate(\"Array\")"
+ << "engine->evaluate(\"Number\")"
+ << "engine->evaluate(\"Function\")"
+ << "engine->evaluate(\"(function() { return 1; })\")"
+ << "engine->evaluate(\"(function() { return 'ciao'; })\")"
+ << "engine->evaluate(\"(function() { throw new Error('foo'); })\")"
+ << "engine->evaluate(\"/foo/\")"
+ << "engine->evaluate(\"new Object()\")"
+ << "engine->evaluate(\"new Array()\")"
+ << "engine->evaluate(\"new Error()\")"
+ << "engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")"
+ << "engine->evaluate(\"Undefined\")"
+ << "engine->evaluate(\"Null\")"
+ << "engine->evaluate(\"True\")"
+ << "engine->evaluate(\"False\")"
+ << "engine->evaluate(\"undefined\")"
+ << "engine->evaluate(\"null\")"
+ << "engine->evaluate(\"true\")"
+ << "engine->evaluate(\"false\")"
+ << "engine->evaluate(\"122\")"
+ << "engine->evaluate(\"124\")"
+ << "engine->evaluate(\"0\")"
+ << "engine->evaluate(\"0.0\")"
+ << "engine->evaluate(\"123.0\")"
+ << "engine->evaluate(\"6.37e-8\")"
+ << "engine->evaluate(\"-6.37e-8\")"
+ << "engine->evaluate(\"0x43211234\")"
+ << "engine->evaluate(\"0x10000\")"
+ << "engine->evaluate(\"0x10001\")"
+ << "engine->evaluate(\"NaN\")"
+ << "engine->evaluate(\"Infinity\")"
+ << "engine->evaluate(\"-Infinity\")"
+ << "engine->evaluate(\"'ciao'\")"
+ << "engine->evaluate(\"''\")"
+ << "engine->evaluate(\"'0'\")"
+ << "engine->evaluate(\"'123'\")"
+ << "engine->evaluate(\"'12.4'\")"
+ << "engine->nullValue()"
+ << "engine->undefinedValue()";
+ }
+ newRow(expr) << isValid.contains(expr);
+}
+
+void tst_QScriptValue::isValid_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isValid(), expected);
+ QCOMPARE(value.isValid(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isValid)
+
+
+void tst_QScriptValue::isBool_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isBool_makeData(const char* expr)
+{
+ static QSet<QString> isBool;
+ if (isBool.isEmpty()) {
+ isBool << "QScriptValue(true)"
+ << "QScriptValue(false)"
+ << "QScriptValue(0, true)"
+ << "QScriptValue(0, false)"
+ << "QScriptValue(engine, true)"
+ << "QScriptValue(engine, false)"
+ << "engine->evaluate(\"true\")"
+ << "engine->evaluate(\"false\")";
+ }
+ newRow(expr) << isBool.contains(expr);
+}
+
+void tst_QScriptValue::isBool_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isBool(), expected);
+ QCOMPARE(value.isBool(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isBool)
+
+
+void tst_QScriptValue::isBoolean_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isBoolean_makeData(const char* expr)
+{
+ static QSet<QString> isBoolean;
+ if (isBoolean.isEmpty()) {
+ isBoolean << "QScriptValue(true)"
+ << "QScriptValue(false)"
+ << "QScriptValue(0, true)"
+ << "QScriptValue(0, false)"
+ << "QScriptValue(engine, true)"
+ << "QScriptValue(engine, false)"
+ << "engine->evaluate(\"true\")"
+ << "engine->evaluate(\"false\")";
+ }
+ newRow(expr) << isBoolean.contains(expr);
+}
+
+void tst_QScriptValue::isBoolean_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isBoolean(), expected);
+ QCOMPARE(value.isBoolean(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isBoolean)
+
+
+void tst_QScriptValue::isNumber_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isNumber_makeData(const char* expr)
+{
+ static QSet<QString> isNumber;
+ if (isNumber.isEmpty()) {
+ isNumber << "QScriptValue(int(122))"
+ << "QScriptValue(uint(124))"
+ << "QScriptValue(0)"
+ << "QScriptValue(0.0)"
+ << "QScriptValue(123.0)"
+ << "QScriptValue(6.37e-8)"
+ << "QScriptValue(-6.37e-8)"
+ << "QScriptValue(0x43211234)"
+ << "QScriptValue(0x10000)"
+ << "QScriptValue(0x10001)"
+ << "QScriptValue(qSNaN())"
+ << "QScriptValue(qQNaN())"
+ << "QScriptValue(qInf())"
+ << "QScriptValue(-qInf())"
+ << "QScriptValue(0, int(122))"
+ << "QScriptValue(0, uint(124))"
+ << "QScriptValue(0, 0)"
+ << "QScriptValue(0, 0.0)"
+ << "QScriptValue(0, 123.0)"
+ << "QScriptValue(0, 6.37e-8)"
+ << "QScriptValue(0, -6.37e-8)"
+ << "QScriptValue(0, 0x43211234)"
+ << "QScriptValue(0, 0x10000)"
+ << "QScriptValue(0, 0x10001)"
+ << "QScriptValue(0, qSNaN())"
+ << "QScriptValue(0, qQNaN())"
+ << "QScriptValue(0, qInf())"
+ << "QScriptValue(0, -qInf())"
+ << "QScriptValue(engine, int(122))"
+ << "QScriptValue(engine, uint(124))"
+ << "QScriptValue(engine, 0)"
+ << "QScriptValue(engine, 0.0)"
+ << "QScriptValue(engine, 123.0)"
+ << "QScriptValue(engine, 6.37e-8)"
+ << "QScriptValue(engine, -6.37e-8)"
+ << "QScriptValue(engine, 0x43211234)"
+ << "QScriptValue(engine, 0x10000)"
+ << "QScriptValue(engine, 0x10001)"
+ << "QScriptValue(engine, qSNaN())"
+ << "QScriptValue(engine, qQNaN())"
+ << "QScriptValue(engine, qInf())"
+ << "QScriptValue(engine, -qInf())"
+ << "engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")"
+ << "engine->evaluate(\"122\")"
+ << "engine->evaluate(\"124\")"
+ << "engine->evaluate(\"0\")"
+ << "engine->evaluate(\"0.0\")"
+ << "engine->evaluate(\"123.0\")"
+ << "engine->evaluate(\"6.37e-8\")"
+ << "engine->evaluate(\"-6.37e-8\")"
+ << "engine->evaluate(\"0x43211234\")"
+ << "engine->evaluate(\"0x10000\")"
+ << "engine->evaluate(\"0x10001\")"
+ << "engine->evaluate(\"NaN\")"
+ << "engine->evaluate(\"Infinity\")"
+ << "engine->evaluate(\"-Infinity\")";
+ }
+ newRow(expr) << isNumber.contains(expr);
+}
+
+void tst_QScriptValue::isNumber_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isNumber(), expected);
+ QCOMPARE(value.isNumber(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isNumber)
+
+
+void tst_QScriptValue::isFunction_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isFunction_makeData(const char* expr)
+{
+ static QSet<QString> isFunction;
+ if (isFunction.isEmpty()) {
+ isFunction << "engine->evaluate(\"Function.prototype\")"
+ << "engine->evaluate(\"Object\")"
+ << "engine->evaluate(\"Array\")"
+ << "engine->evaluate(\"Number\")"
+ << "engine->evaluate(\"Function\")"
+ << "engine->evaluate(\"(function() { return 1; })\")"
+ << "engine->evaluate(\"(function() { return 'ciao'; })\")"
+ << "engine->evaluate(\"(function() { throw new Error('foo'); })\")"
+ << "engine->evaluate(\"/foo/\")";
+ }
+ newRow(expr) << isFunction.contains(expr);
+}
+
+void tst_QScriptValue::isFunction_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isFunction(), expected);
+ QCOMPARE(value.isFunction(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isFunction)
+
+
+void tst_QScriptValue::isNull_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isNull_makeData(const char* expr)
+{
+ static QSet<QString> isNull;
+ if (isNull.isEmpty()) {
+ isNull << "QScriptValue(QScriptValue::NullValue)"
+ << "QScriptValue(0, QScriptValue::NullValue)"
+ << "QScriptValue(engine, QScriptValue::NullValue)"
+ << "engine->evaluate(\"null\")"
+ << "engine->nullValue()";
+ }
+ newRow(expr) << isNull.contains(expr);
+}
+
+void tst_QScriptValue::isNull_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isNull(), expected);
+ QCOMPARE(value.isNull(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isNull)
+
+
+void tst_QScriptValue::isString_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isString_makeData(const char* expr)
+{
+ static QSet<QString> isString;
+ if (isString.isEmpty()) {
+ isString << "QScriptValue(\"NaN\")"
+ << "QScriptValue(\"Infinity\")"
+ << "QScriptValue(\"-Infinity\")"
+ << "QScriptValue(\"ciao\")"
+ << "QScriptValue(QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(QString(\"\"))"
+ << "QScriptValue(QString())"
+ << "QScriptValue(QString(\"0\"))"
+ << "QScriptValue(QString(\"123\"))"
+ << "QScriptValue(QString(\"12.4\"))"
+ << "QScriptValue(0, \"NaN\")"
+ << "QScriptValue(0, \"Infinity\")"
+ << "QScriptValue(0, \"-Infinity\")"
+ << "QScriptValue(0, \"ciao\")"
+ << "QScriptValue(0, QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(0, QString(\"\"))"
+ << "QScriptValue(0, QString())"
+ << "QScriptValue(0, QString(\"0\"))"
+ << "QScriptValue(0, QString(\"123\"))"
+ << "QScriptValue(0, QString(\"12.3\"))"
+ << "QScriptValue(engine, \"NaN\")"
+ << "QScriptValue(engine, \"Infinity\")"
+ << "QScriptValue(engine, \"-Infinity\")"
+ << "QScriptValue(engine, \"ciao\")"
+ << "QScriptValue(engine, QString::fromLatin1(\"ciao\"))"
+ << "QScriptValue(engine, QString(\"\"))"
+ << "QScriptValue(engine, QString())"
+ << "QScriptValue(engine, QString(\"0\"))"
+ << "QScriptValue(engine, QString(\"123\"))"
+ << "QScriptValue(engine, QString(\"1.23\"))"
+ << "engine->evaluate(\"'ciao'\")"
+ << "engine->evaluate(\"''\")"
+ << "engine->evaluate(\"'0'\")"
+ << "engine->evaluate(\"'123'\")"
+ << "engine->evaluate(\"'12.4'\")";
+ }
+ newRow(expr) << isString.contains(expr);
+}
+
+void tst_QScriptValue::isString_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isString(), expected);
+ QCOMPARE(value.isString(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isString)
+
+
+void tst_QScriptValue::isUndefined_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isUndefined_makeData(const char* expr)
+{
+ static QSet<QString> isUndefined;
+ if (isUndefined.isEmpty()) {
+ isUndefined << "QScriptValue(QScriptValue::UndefinedValue)"
+ << "QScriptValue(0, QScriptValue::UndefinedValue)"
+ << "QScriptValue(engine, QScriptValue::UndefinedValue)"
+ << "engine->evaluate(\"{}\")"
+ << "engine->evaluate(\"undefined\")"
+ << "engine->undefinedValue()";
+ }
+ newRow(expr) << isUndefined.contains(expr);
+}
+
+void tst_QScriptValue::isUndefined_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isUndefined(), expected);
+ QCOMPARE(value.isUndefined(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isUndefined)
+
+
+
+
+
+void tst_QScriptValue::isObject_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::isObject_makeData(const char* expr)
+{
+ static QSet<QString> isObject;
+ if (isObject.isEmpty()) {
+ isObject << "engine->evaluate(\"[]\")"
+ << "engine->evaluate(\"Object.prototype\")"
+ << "engine->evaluate(\"Date.prototype\")"
+ << "engine->evaluate(\"Array.prototype\")"
+ << "engine->evaluate(\"Function.prototype\")"
+ << "engine->evaluate(\"Error.prototype\")"
+ << "engine->evaluate(\"Object\")"
+ << "engine->evaluate(\"Array\")"
+ << "engine->evaluate(\"Number\")"
+ << "engine->evaluate(\"Function\")"
+ << "engine->evaluate(\"(function() { return 1; })\")"
+ << "engine->evaluate(\"(function() { return 'ciao'; })\")"
+ << "engine->evaluate(\"(function() { throw new Error('foo'); })\")"
+ << "engine->evaluate(\"/foo/\")"
+ << "engine->evaluate(\"new Object()\")"
+ << "engine->evaluate(\"new Array()\")"
+ << "engine->evaluate(\"new Error()\")"
+ << "engine->evaluate(\"Undefined\")"
+ << "engine->evaluate(\"Null\")"
+ << "engine->evaluate(\"True\")"
+ << "engine->evaluate(\"False\")";
+ }
+ newRow(expr) << isObject.contains(expr);
+}
+
+void tst_QScriptValue::isObject_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.isObject(), expected);
+ QCOMPARE(value.isObject(), expected);
+}
+
+DEFINE_TEST_FUNCTION(isObject)
+
+
+void tst_QScriptValue::toString_initData()
+{
+ QTest::addColumn<QString>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toString_makeData(const char* expr)
+{
+ static QHash<QString, QString> toString;
+ if (toString.isEmpty()) {
+ toString.insert("QScriptValue()", "");
+ toString.insert("QScriptValue(QScriptValue::UndefinedValue)", "undefined");
+ toString.insert("QScriptValue(QScriptValue::NullValue)", "null");
+ toString.insert("QScriptValue(true)", "true");
+ toString.insert("QScriptValue(false)", "false");
+ toString.insert("QScriptValue(int(122))", "122");
+ toString.insert("QScriptValue(uint(124))", "124");
+ toString.insert("QScriptValue(0)", "0");
+ toString.insert("QScriptValue(0.0)", "0");
+ toString.insert("QScriptValue(123.0)", "123");
+ toString.insert("QScriptValue(6.37e-8)", "6.37e-8");
+ toString.insert("QScriptValue(-6.37e-8)", "-6.37e-8");
+ toString.insert("QScriptValue(0x43211234)", "1126240820");
+ toString.insert("QScriptValue(0x10000)", "65536");
+ toString.insert("QScriptValue(0x10001)", "65537");
+ toString.insert("QScriptValue(qSNaN())", "NaN");
+ toString.insert("QScriptValue(qQNaN())", "NaN");
+ toString.insert("QScriptValue(qInf())", "Infinity");
+ toString.insert("QScriptValue(-qInf())", "-Infinity");
+ toString.insert("QScriptValue(\"NaN\")", "NaN");
+ toString.insert("QScriptValue(\"Infinity\")", "Infinity");
+ toString.insert("QScriptValue(\"-Infinity\")", "-Infinity");
+ toString.insert("QScriptValue(\"ciao\")", "ciao");
+ toString.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", "ciao");
+ toString.insert("QScriptValue(QString(\"\"))", "");
+ toString.insert("QScriptValue(QString())", "");
+ toString.insert("QScriptValue(QString(\"0\"))", "0");
+ toString.insert("QScriptValue(QString(\"123\"))", "123");
+ toString.insert("QScriptValue(QString(\"12.4\"))", "12.4");
+ toString.insert("QScriptValue(0, QScriptValue::UndefinedValue)", "undefined");
+ toString.insert("QScriptValue(0, QScriptValue::NullValue)", "null");
+ toString.insert("QScriptValue(0, true)", "true");
+ toString.insert("QScriptValue(0, false)", "false");
+ toString.insert("QScriptValue(0, int(122))", "122");
+ toString.insert("QScriptValue(0, uint(124))", "124");
+ toString.insert("QScriptValue(0, 0)", "0");
+ toString.insert("QScriptValue(0, 0.0)", "0");
+ toString.insert("QScriptValue(0, 123.0)", "123");
+ toString.insert("QScriptValue(0, 6.37e-8)", "6.37e-8");
+ toString.insert("QScriptValue(0, -6.37e-8)", "-6.37e-8");
+ toString.insert("QScriptValue(0, 0x43211234)", "1126240820");
+ toString.insert("QScriptValue(0, 0x10000)", "65536");
+ toString.insert("QScriptValue(0, 0x10001)", "65537");
+ toString.insert("QScriptValue(0, qSNaN())", "NaN");
+ toString.insert("QScriptValue(0, qQNaN())", "NaN");
+ toString.insert("QScriptValue(0, qInf())", "Infinity");
+ toString.insert("QScriptValue(0, -qInf())", "-Infinity");
+ toString.insert("QScriptValue(0, \"NaN\")", "NaN");
+ toString.insert("QScriptValue(0, \"Infinity\")", "Infinity");
+ toString.insert("QScriptValue(0, \"-Infinity\")", "-Infinity");
+ toString.insert("QScriptValue(0, \"ciao\")", "ciao");
+ toString.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", "ciao");
+ toString.insert("QScriptValue(0, QString(\"\"))", "");
+ toString.insert("QScriptValue(0, QString())", "");
+ toString.insert("QScriptValue(0, QString(\"0\"))", "0");
+ toString.insert("QScriptValue(0, QString(\"123\"))", "123");
+ toString.insert("QScriptValue(0, QString(\"12.3\"))", "12.3");
+ toString.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", "undefined");
+ toString.insert("QScriptValue(engine, QScriptValue::NullValue)", "null");
+ toString.insert("QScriptValue(engine, true)", "true");
+ toString.insert("QScriptValue(engine, false)", "false");
+ toString.insert("QScriptValue(engine, int(122))", "122");
+ toString.insert("QScriptValue(engine, uint(124))", "124");
+ toString.insert("QScriptValue(engine, 0)", "0");
+ toString.insert("QScriptValue(engine, 0.0)", "0");
+ toString.insert("QScriptValue(engine, 123.0)", "123");
+ toString.insert("QScriptValue(engine, 6.37e-8)", "6.37e-8");
+ toString.insert("QScriptValue(engine, -6.37e-8)", "-6.37e-8");
+ toString.insert("QScriptValue(engine, 0x43211234)", "1126240820");
+ toString.insert("QScriptValue(engine, 0x10000)", "65536");
+ toString.insert("QScriptValue(engine, 0x10001)", "65537");
+ toString.insert("QScriptValue(engine, qSNaN())", "NaN");
+ toString.insert("QScriptValue(engine, qQNaN())", "NaN");
+ toString.insert("QScriptValue(engine, qInf())", "Infinity");
+ toString.insert("QScriptValue(engine, -qInf())", "-Infinity");
+ toString.insert("QScriptValue(engine, \"NaN\")", "NaN");
+ toString.insert("QScriptValue(engine, \"Infinity\")", "Infinity");
+ toString.insert("QScriptValue(engine, \"-Infinity\")", "-Infinity");
+ toString.insert("QScriptValue(engine, \"ciao\")", "ciao");
+ toString.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", "ciao");
+ toString.insert("QScriptValue(engine, QString(\"\"))", "");
+ toString.insert("QScriptValue(engine, QString())", "");
+ toString.insert("QScriptValue(engine, QString(\"0\"))", "0");
+ toString.insert("QScriptValue(engine, QString(\"123\"))", "123");
+ toString.insert("QScriptValue(engine, QString(\"1.23\"))", "1.23");
+ toString.insert("engine->evaluate(\"[]\")", "");
+ toString.insert("engine->evaluate(\"{}\")", "undefined");
+ toString.insert("engine->evaluate(\"Object.prototype\")", "[object Object]");
+ toString.insert("engine->evaluate(\"Date.prototype\")", "Invalid Date");
+ toString.insert("engine->evaluate(\"Array.prototype\")", "");
+ toString.insert("engine->evaluate(\"Function.prototype\")", "function () {\n [native code]\n}");
+ toString.insert("engine->evaluate(\"Error.prototype\")", "Error: Unknown error");
+ toString.insert("engine->evaluate(\"Object\")", "function Object() {\n [native code]\n}");
+ toString.insert("engine->evaluate(\"Array\")", "function Array() {\n [native code]\n}");
+ toString.insert("engine->evaluate(\"Number\")", "function Number() {\n [native code]\n}");
+ toString.insert("engine->evaluate(\"Function\")", "function Function() {\n [native code]\n}");
+ toString.insert("engine->evaluate(\"(function() { return 1; })\")", "function () { return 1; }");
+ toString.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", "function () { return 'ciao'; }");
+ toString.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", "function () { throw new Error('foo'); }");
+ toString.insert("engine->evaluate(\"/foo/\")", "/foo/");
+ toString.insert("engine->evaluate(\"new Object()\")", "[object Object]");
+ toString.insert("engine->evaluate(\"new Array()\")", "");
+ toString.insert("engine->evaluate(\"new Error()\")", "Error: Unknown error");
+ toString.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", "22");
+ toString.insert("engine->evaluate(\"Undefined\")", "ReferenceError: Can't find variable: Undefined");
+ toString.insert("engine->evaluate(\"Null\")", "ReferenceError: Can't find variable: Null");
+ toString.insert("engine->evaluate(\"True\")", "ReferenceError: Can't find variable: True");
+ toString.insert("engine->evaluate(\"False\")", "ReferenceError: Can't find variable: False");
+ toString.insert("engine->evaluate(\"undefined\")", "undefined");
+ toString.insert("engine->evaluate(\"null\")", "null");
+ toString.insert("engine->evaluate(\"true\")", "true");
+ toString.insert("engine->evaluate(\"false\")", "false");
+ toString.insert("engine->evaluate(\"122\")", "122");
+ toString.insert("engine->evaluate(\"124\")", "124");
+ toString.insert("engine->evaluate(\"0\")", "0");
+ toString.insert("engine->evaluate(\"0.0\")", "0");
+ toString.insert("engine->evaluate(\"123.0\")", "123");
+ toString.insert("engine->evaluate(\"6.37e-8\")", "6.37e-8");
+ toString.insert("engine->evaluate(\"-6.37e-8\")", "-6.37e-8");
+ toString.insert("engine->evaluate(\"0x43211234\")", "1126240820");
+ toString.insert("engine->evaluate(\"0x10000\")", "65536");
+ toString.insert("engine->evaluate(\"0x10001\")", "65537");
+ toString.insert("engine->evaluate(\"NaN\")", "NaN");
+ toString.insert("engine->evaluate(\"Infinity\")", "Infinity");
+ toString.insert("engine->evaluate(\"-Infinity\")", "-Infinity");
+ toString.insert("engine->evaluate(\"'ciao'\")", "ciao");
+ toString.insert("engine->evaluate(\"''\")", "");
+ toString.insert("engine->evaluate(\"'0'\")", "0");
+ toString.insert("engine->evaluate(\"'123'\")", "123");
+ toString.insert("engine->evaluate(\"'12.4'\")", "12.4");
+ toString.insert("engine->nullValue()", "null");
+ toString.insert("engine->undefinedValue()", "undefined");
+ }
+ newRow(expr) << toString.value(expr);
+}
+
+void tst_QScriptValue::toString_test(const char*, const QScriptValue& value)
+{
+ QFETCH(QString, expected);
+ QCOMPARE(value.toString(), expected);
+ QCOMPARE(value.toString(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toString)
+
+
+void tst_QScriptValue::toNumber_initData()
+{
+ QTest::addColumn<qsreal>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toNumber_makeData(const char* expr)
+{
+ static QHash<QString, qsreal> toNumber;
+ if (toNumber.isEmpty()) {
+ toNumber.insert("QScriptValue()", 0);
+ toNumber.insert("QScriptValue(QScriptValue::UndefinedValue)", qQNaN());
+ toNumber.insert("QScriptValue(QScriptValue::NullValue)", 0);
+ toNumber.insert("QScriptValue(true)", 1);
+ toNumber.insert("QScriptValue(false)", 0);
+ toNumber.insert("QScriptValue(int(122))", 122);
+ toNumber.insert("QScriptValue(uint(124))", 124);
+ toNumber.insert("QScriptValue(0)", 0);
+ toNumber.insert("QScriptValue(0.0)", 0);
+ toNumber.insert("QScriptValue(123.0)", 123);
+ toNumber.insert("QScriptValue(6.37e-8)", 6.369999999999999e-08);
+ toNumber.insert("QScriptValue(-6.37e-8)", -6.369999999999999e-08);
+ toNumber.insert("QScriptValue(0x43211234)", 1126240820);
+ toNumber.insert("QScriptValue(0x10000)", 65536);
+ toNumber.insert("QScriptValue(0x10001)", 65537);
+ toNumber.insert("QScriptValue(qSNaN())", qQNaN());
+ toNumber.insert("QScriptValue(qQNaN())", qQNaN());
+ toNumber.insert("QScriptValue(qInf())", qInf());
+ toNumber.insert("QScriptValue(-qInf())", qInf());
+ toNumber.insert("QScriptValue(\"NaN\")", qQNaN());
+ toNumber.insert("QScriptValue(\"Infinity\")", qInf());
+ toNumber.insert("QScriptValue(\"-Infinity\")", qInf());
+ toNumber.insert("QScriptValue(\"ciao\")", qQNaN());
+ toNumber.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", qQNaN());
+ toNumber.insert("QScriptValue(QString(\"\"))", 0);
+ toNumber.insert("QScriptValue(QString())", 0);
+ toNumber.insert("QScriptValue(QString(\"0\"))", 0);
+ toNumber.insert("QScriptValue(QString(\"123\"))", 123);
+ toNumber.insert("QScriptValue(QString(\"12.4\"))", 12.4);
+ toNumber.insert("QScriptValue(0, QScriptValue::UndefinedValue)", qQNaN());
+ toNumber.insert("QScriptValue(0, QScriptValue::NullValue)", 0);
+ toNumber.insert("QScriptValue(0, true)", 1);
+ toNumber.insert("QScriptValue(0, false)", 0);
+ toNumber.insert("QScriptValue(0, int(122))", 122);
+ toNumber.insert("QScriptValue(0, uint(124))", 124);
+ toNumber.insert("QScriptValue(0, 0)", 0);
+ toNumber.insert("QScriptValue(0, 0.0)", 0);
+ toNumber.insert("QScriptValue(0, 123.0)", 123);
+ toNumber.insert("QScriptValue(0, 6.37e-8)", 6.369999999999999e-08);
+ toNumber.insert("QScriptValue(0, -6.37e-8)", -6.369999999999999e-08);
+ toNumber.insert("QScriptValue(0, 0x43211234)", 1126240820);
+ toNumber.insert("QScriptValue(0, 0x10000)", 65536);
+ toNumber.insert("QScriptValue(0, 0x10001)", 65537);
+ toNumber.insert("QScriptValue(0, qSNaN())", qQNaN());
+ toNumber.insert("QScriptValue(0, qQNaN())", qQNaN());
+ toNumber.insert("QScriptValue(0, qInf())", qInf());
+ toNumber.insert("QScriptValue(0, -qInf())", qInf());
+ toNumber.insert("QScriptValue(0, \"NaN\")", qQNaN());
+ toNumber.insert("QScriptValue(0, \"Infinity\")", qInf());
+ toNumber.insert("QScriptValue(0, \"-Infinity\")", qInf());
+ toNumber.insert("QScriptValue(0, \"ciao\")", qQNaN());
+ toNumber.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", qQNaN());
+ toNumber.insert("QScriptValue(0, QString(\"\"))", 0);
+ toNumber.insert("QScriptValue(0, QString())", 0);
+ toNumber.insert("QScriptValue(0, QString(\"0\"))", 0);
+ toNumber.insert("QScriptValue(0, QString(\"123\"))", 123);
+ toNumber.insert("QScriptValue(0, QString(\"12.3\"))", 12.3);
+ toNumber.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", qQNaN());
+ toNumber.insert("QScriptValue(engine, QScriptValue::NullValue)", 0);
+ toNumber.insert("QScriptValue(engine, true)", 1);
+ toNumber.insert("QScriptValue(engine, false)", 0);
+ toNumber.insert("QScriptValue(engine, int(122))", 122);
+ toNumber.insert("QScriptValue(engine, uint(124))", 124);
+ toNumber.insert("QScriptValue(engine, 0)", 0);
+ toNumber.insert("QScriptValue(engine, 0.0)", 0);
+ toNumber.insert("QScriptValue(engine, 123.0)", 123);
+ toNumber.insert("QScriptValue(engine, 6.37e-8)", 6.369999999999999e-08);
+ toNumber.insert("QScriptValue(engine, -6.37e-8)", -6.369999999999999e-08);
+ toNumber.insert("QScriptValue(engine, 0x43211234)", 1126240820);
+ toNumber.insert("QScriptValue(engine, 0x10000)", 65536);
+ toNumber.insert("QScriptValue(engine, 0x10001)", 65537);
+ toNumber.insert("QScriptValue(engine, qSNaN())", qQNaN());
+ toNumber.insert("QScriptValue(engine, qQNaN())", qQNaN());
+ toNumber.insert("QScriptValue(engine, qInf())", qInf());
+ toNumber.insert("QScriptValue(engine, -qInf())", qInf());
+ toNumber.insert("QScriptValue(engine, \"NaN\")", qQNaN());
+ toNumber.insert("QScriptValue(engine, \"Infinity\")", qInf());
+ toNumber.insert("QScriptValue(engine, \"-Infinity\")", qInf());
+ toNumber.insert("QScriptValue(engine, \"ciao\")", qQNaN());
+ toNumber.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", qQNaN());
+ toNumber.insert("QScriptValue(engine, QString(\"\"))", 0);
+ toNumber.insert("QScriptValue(engine, QString())", 0);
+ toNumber.insert("QScriptValue(engine, QString(\"0\"))", 0);
+ toNumber.insert("QScriptValue(engine, QString(\"123\"))", 123);
+ toNumber.insert("QScriptValue(engine, QString(\"1.23\"))", 1.23);
+ toNumber.insert("engine->evaluate(\"[]\")", 0);
+ toNumber.insert("engine->evaluate(\"{}\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Object.prototype\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Date.prototype\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Array.prototype\")", 0);
+ toNumber.insert("engine->evaluate(\"Function.prototype\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Error.prototype\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Object\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Array\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Number\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Function\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"(function() { return 1; })\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"/foo/\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"new Object()\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"new Array()\")", 0);
+ toNumber.insert("engine->evaluate(\"new Error()\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", 22);
+ toNumber.insert("engine->evaluate(\"Undefined\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Null\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"True\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"False\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"undefined\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"null\")", 0);
+ toNumber.insert("engine->evaluate(\"true\")", 1);
+ toNumber.insert("engine->evaluate(\"false\")", 0);
+ toNumber.insert("engine->evaluate(\"122\")", 122);
+ toNumber.insert("engine->evaluate(\"124\")", 124);
+ toNumber.insert("engine->evaluate(\"0\")", 0);
+ toNumber.insert("engine->evaluate(\"0.0\")", 0);
+ toNumber.insert("engine->evaluate(\"123.0\")", 123);
+ toNumber.insert("engine->evaluate(\"6.37e-8\")", 6.369999999999999e-08);
+ toNumber.insert("engine->evaluate(\"-6.37e-8\")", -6.369999999999999e-08);
+ toNumber.insert("engine->evaluate(\"0x43211234\")", 1126240820);
+ toNumber.insert("engine->evaluate(\"0x10000\")", 65536);
+ toNumber.insert("engine->evaluate(\"0x10001\")", 65537);
+ toNumber.insert("engine->evaluate(\"NaN\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"Infinity\")", qInf());
+ toNumber.insert("engine->evaluate(\"-Infinity\")", qInf());
+ toNumber.insert("engine->evaluate(\"'ciao'\")", qQNaN());
+ toNumber.insert("engine->evaluate(\"''\")", 0);
+ toNumber.insert("engine->evaluate(\"'0'\")", 0);
+ toNumber.insert("engine->evaluate(\"'123'\")", 123);
+ toNumber.insert("engine->evaluate(\"'12.4'\")", 12.4);
+ toNumber.insert("engine->nullValue()", 0);
+ toNumber.insert("engine->undefinedValue()", qQNaN());
+ }
+ newRow(expr) << toNumber.value(expr);
+}
+
+void tst_QScriptValue::toNumber_test(const char*, const QScriptValue& value)
+{
+ QFETCH(qsreal, expected);
+ if (qIsNaN(expected)) {
+ QVERIFY(qIsNaN(value.toNumber()));
+ return;
+ }
+ if (qIsInf(expected)) {
+ QVERIFY(qIsInf(value.toNumber()));
+ QVERIFY(qIsInf(value.toNumber()));
+ return;
+ }
+ QCOMPARE(value.toNumber(), expected);
+ QCOMPARE(value.toNumber(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toNumber)
+
+
+void tst_QScriptValue::toBool_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toBool_makeData(const char* expr)
+{
+ static QHash<QString, bool> toBool;
+ if (toBool.isEmpty()) {
+ toBool.insert("QScriptValue()", false);
+ toBool.insert("QScriptValue(QScriptValue::UndefinedValue)", false);
+ toBool.insert("QScriptValue(QScriptValue::NullValue)", false);
+ toBool.insert("QScriptValue(true)", true);
+ toBool.insert("QScriptValue(false)", false);
+ toBool.insert("QScriptValue(int(122))", true);
+ toBool.insert("QScriptValue(uint(124))", true);
+ toBool.insert("QScriptValue(0)", false);
+ toBool.insert("QScriptValue(0.0)", false);
+ toBool.insert("QScriptValue(123.0)", true);
+ toBool.insert("QScriptValue(6.37e-8)", true);
+ toBool.insert("QScriptValue(-6.37e-8)", true);
+ toBool.insert("QScriptValue(0x43211234)", true);
+ toBool.insert("QScriptValue(0x10000)", true);
+ toBool.insert("QScriptValue(0x10001)", true);
+ toBool.insert("QScriptValue(qSNaN())", false);
+ toBool.insert("QScriptValue(qQNaN())", false);
+ toBool.insert("QScriptValue(qInf())", true);
+ toBool.insert("QScriptValue(-qInf())", true);
+ toBool.insert("QScriptValue(\"NaN\")", true);
+ toBool.insert("QScriptValue(\"Infinity\")", true);
+ toBool.insert("QScriptValue(\"-Infinity\")", true);
+ toBool.insert("QScriptValue(\"ciao\")", true);
+ toBool.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", true);
+ toBool.insert("QScriptValue(QString(\"\"))", false);
+ toBool.insert("QScriptValue(QString())", false);
+ toBool.insert("QScriptValue(QString(\"0\"))", true);
+ toBool.insert("QScriptValue(QString(\"123\"))", true);
+ toBool.insert("QScriptValue(QString(\"12.4\"))", true);
+ toBool.insert("QScriptValue(0, QScriptValue::UndefinedValue)", false);
+ toBool.insert("QScriptValue(0, QScriptValue::NullValue)", false);
+ toBool.insert("QScriptValue(0, true)", true);
+ toBool.insert("QScriptValue(0, false)", false);
+ toBool.insert("QScriptValue(0, int(122))", true);
+ toBool.insert("QScriptValue(0, uint(124))", true);
+ toBool.insert("QScriptValue(0, 0)", false);
+ toBool.insert("QScriptValue(0, 0.0)", false);
+ toBool.insert("QScriptValue(0, 123.0)", true);
+ toBool.insert("QScriptValue(0, 6.37e-8)", true);
+ toBool.insert("QScriptValue(0, -6.37e-8)", true);
+ toBool.insert("QScriptValue(0, 0x43211234)", true);
+ toBool.insert("QScriptValue(0, 0x10000)", true);
+ toBool.insert("QScriptValue(0, 0x10001)", true);
+ toBool.insert("QScriptValue(0, qSNaN())", false);
+ toBool.insert("QScriptValue(0, qQNaN())", false);
+ toBool.insert("QScriptValue(0, qInf())", true);
+ toBool.insert("QScriptValue(0, -qInf())", true);
+ toBool.insert("QScriptValue(0, \"NaN\")", true);
+ toBool.insert("QScriptValue(0, \"Infinity\")", true);
+ toBool.insert("QScriptValue(0, \"-Infinity\")", true);
+ toBool.insert("QScriptValue(0, \"ciao\")", true);
+ toBool.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", true);
+ toBool.insert("QScriptValue(0, QString(\"\"))", false);
+ toBool.insert("QScriptValue(0, QString())", false);
+ toBool.insert("QScriptValue(0, QString(\"0\"))", true);
+ toBool.insert("QScriptValue(0, QString(\"123\"))", true);
+ toBool.insert("QScriptValue(0, QString(\"12.3\"))", true);
+ toBool.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", false);
+ toBool.insert("QScriptValue(engine, QScriptValue::NullValue)", false);
+ toBool.insert("QScriptValue(engine, true)", true);
+ toBool.insert("QScriptValue(engine, false)", false);
+ toBool.insert("QScriptValue(engine, int(122))", true);
+ toBool.insert("QScriptValue(engine, uint(124))", true);
+ toBool.insert("QScriptValue(engine, 0)", false);
+ toBool.insert("QScriptValue(engine, 0.0)", false);
+ toBool.insert("QScriptValue(engine, 123.0)", true);
+ toBool.insert("QScriptValue(engine, 6.37e-8)", true);
+ toBool.insert("QScriptValue(engine, -6.37e-8)", true);
+ toBool.insert("QScriptValue(engine, 0x43211234)", true);
+ toBool.insert("QScriptValue(engine, 0x10000)", true);
+ toBool.insert("QScriptValue(engine, 0x10001)", true);
+ toBool.insert("QScriptValue(engine, qSNaN())", false);
+ toBool.insert("QScriptValue(engine, qQNaN())", false);
+ toBool.insert("QScriptValue(engine, qInf())", true);
+ toBool.insert("QScriptValue(engine, -qInf())", true);
+ toBool.insert("QScriptValue(engine, \"NaN\")", true);
+ toBool.insert("QScriptValue(engine, \"Infinity\")", true);
+ toBool.insert("QScriptValue(engine, \"-Infinity\")", true);
+ toBool.insert("QScriptValue(engine, \"ciao\")", true);
+ toBool.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", true);
+ toBool.insert("QScriptValue(engine, QString(\"\"))", false);
+ toBool.insert("QScriptValue(engine, QString())", false);
+ toBool.insert("QScriptValue(engine, QString(\"0\"))", true);
+ toBool.insert("QScriptValue(engine, QString(\"123\"))", true);
+ toBool.insert("QScriptValue(engine, QString(\"1.23\"))", true);
+ toBool.insert("engine->evaluate(\"[]\")", true);
+ toBool.insert("engine->evaluate(\"{}\")", false);
+ toBool.insert("engine->evaluate(\"Object.prototype\")", true);
+ toBool.insert("engine->evaluate(\"Date.prototype\")", true);
+ toBool.insert("engine->evaluate(\"Array.prototype\")", true);
+ toBool.insert("engine->evaluate(\"Function.prototype\")", true);
+ toBool.insert("engine->evaluate(\"Error.prototype\")", true);
+ toBool.insert("engine->evaluate(\"Object\")", true);
+ toBool.insert("engine->evaluate(\"Array\")", true);
+ toBool.insert("engine->evaluate(\"Number\")", true);
+ toBool.insert("engine->evaluate(\"Function\")", true);
+ toBool.insert("engine->evaluate(\"(function() { return 1; })\")", true);
+ toBool.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", true);
+ toBool.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", true);
+ toBool.insert("engine->evaluate(\"/foo/\")", true);
+ toBool.insert("engine->evaluate(\"new Object()\")", true);
+ toBool.insert("engine->evaluate(\"new Array()\")", true);
+ toBool.insert("engine->evaluate(\"new Error()\")", true);
+ toBool.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", true);
+ toBool.insert("engine->evaluate(\"Undefined\")", true);
+ toBool.insert("engine->evaluate(\"Null\")", true);
+ toBool.insert("engine->evaluate(\"True\")", true);
+ toBool.insert("engine->evaluate(\"False\")", true);
+ toBool.insert("engine->evaluate(\"undefined\")", false);
+ toBool.insert("engine->evaluate(\"null\")", false);
+ toBool.insert("engine->evaluate(\"true\")", true);
+ toBool.insert("engine->evaluate(\"false\")", false);
+ toBool.insert("engine->evaluate(\"122\")", true);
+ toBool.insert("engine->evaluate(\"124\")", true);
+ toBool.insert("engine->evaluate(\"0\")", false);
+ toBool.insert("engine->evaluate(\"0.0\")", false);
+ toBool.insert("engine->evaluate(\"123.0\")", true);
+ toBool.insert("engine->evaluate(\"6.37e-8\")", true);
+ toBool.insert("engine->evaluate(\"-6.37e-8\")", true);
+ toBool.insert("engine->evaluate(\"0x43211234\")", true);
+ toBool.insert("engine->evaluate(\"0x10000\")", true);
+ toBool.insert("engine->evaluate(\"0x10001\")", true);
+ toBool.insert("engine->evaluate(\"NaN\")", false);
+ toBool.insert("engine->evaluate(\"Infinity\")", true);
+ toBool.insert("engine->evaluate(\"-Infinity\")", true);
+ toBool.insert("engine->evaluate(\"'ciao'\")", true);
+ toBool.insert("engine->evaluate(\"''\")", false);
+ toBool.insert("engine->evaluate(\"'0'\")", true);
+ toBool.insert("engine->evaluate(\"'123'\")", true);
+ toBool.insert("engine->evaluate(\"'12.4'\")", true);
+ toBool.insert("engine->nullValue()", false);
+ toBool.insert("engine->undefinedValue()", false);
+ }
+ newRow(expr) << toBool.value(expr);
+}
+
+void tst_QScriptValue::toBool_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.toBool(), expected);
+ QCOMPARE(value.toBool(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toBool)
+
+
+void tst_QScriptValue::toBoolean_initData()
+{
+ QTest::addColumn<bool>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toBoolean_makeData(const char* expr)
+{
+ static QHash<QString, bool> toBoolean;
+ if (toBoolean.isEmpty()) {
+ toBoolean.insert("QScriptValue()", false);
+ toBoolean.insert("QScriptValue(QScriptValue::UndefinedValue)", false);
+ toBoolean.insert("QScriptValue(QScriptValue::NullValue)", false);
+ toBoolean.insert("QScriptValue(true)", true);
+ toBoolean.insert("QScriptValue(false)", false);
+ toBoolean.insert("QScriptValue(int(122))", true);
+ toBoolean.insert("QScriptValue(uint(124))", true);
+ toBoolean.insert("QScriptValue(0)", false);
+ toBoolean.insert("QScriptValue(0.0)", false);
+ toBoolean.insert("QScriptValue(123.0)", true);
+ toBoolean.insert("QScriptValue(6.37e-8)", true);
+ toBoolean.insert("QScriptValue(-6.37e-8)", true);
+ toBoolean.insert("QScriptValue(0x43211234)", true);
+ toBoolean.insert("QScriptValue(0x10000)", true);
+ toBoolean.insert("QScriptValue(0x10001)", true);
+ toBoolean.insert("QScriptValue(qSNaN())", false);
+ toBoolean.insert("QScriptValue(qQNaN())", false);
+ toBoolean.insert("QScriptValue(qInf())", true);
+ toBoolean.insert("QScriptValue(-qInf())", true);
+ toBoolean.insert("QScriptValue(\"NaN\")", true);
+ toBoolean.insert("QScriptValue(\"Infinity\")", true);
+ toBoolean.insert("QScriptValue(\"-Infinity\")", true);
+ toBoolean.insert("QScriptValue(\"ciao\")", true);
+ toBoolean.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", true);
+ toBoolean.insert("QScriptValue(QString(\"\"))", false);
+ toBoolean.insert("QScriptValue(QString())", false);
+ toBoolean.insert("QScriptValue(QString(\"0\"))", true);
+ toBoolean.insert("QScriptValue(QString(\"123\"))", true);
+ toBoolean.insert("QScriptValue(QString(\"12.4\"))", true);
+ toBoolean.insert("QScriptValue(0, QScriptValue::UndefinedValue)", false);
+ toBoolean.insert("QScriptValue(0, QScriptValue::NullValue)", false);
+ toBoolean.insert("QScriptValue(0, true)", true);
+ toBoolean.insert("QScriptValue(0, false)", false);
+ toBoolean.insert("QScriptValue(0, int(122))", true);
+ toBoolean.insert("QScriptValue(0, uint(124))", true);
+ toBoolean.insert("QScriptValue(0, 0)", false);
+ toBoolean.insert("QScriptValue(0, 0.0)", false);
+ toBoolean.insert("QScriptValue(0, 123.0)", true);
+ toBoolean.insert("QScriptValue(0, 6.37e-8)", true);
+ toBoolean.insert("QScriptValue(0, -6.37e-8)", true);
+ toBoolean.insert("QScriptValue(0, 0x43211234)", true);
+ toBoolean.insert("QScriptValue(0, 0x10000)", true);
+ toBoolean.insert("QScriptValue(0, 0x10001)", true);
+ toBoolean.insert("QScriptValue(0, qSNaN())", false);
+ toBoolean.insert("QScriptValue(0, qQNaN())", false);
+ toBoolean.insert("QScriptValue(0, qInf())", true);
+ toBoolean.insert("QScriptValue(0, -qInf())", true);
+ toBoolean.insert("QScriptValue(0, \"NaN\")", true);
+ toBoolean.insert("QScriptValue(0, \"Infinity\")", true);
+ toBoolean.insert("QScriptValue(0, \"-Infinity\")", true);
+ toBoolean.insert("QScriptValue(0, \"ciao\")", true);
+ toBoolean.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", true);
+ toBoolean.insert("QScriptValue(0, QString(\"\"))", false);
+ toBoolean.insert("QScriptValue(0, QString())", false);
+ toBoolean.insert("QScriptValue(0, QString(\"0\"))", true);
+ toBoolean.insert("QScriptValue(0, QString(\"123\"))", true);
+ toBoolean.insert("QScriptValue(0, QString(\"12.3\"))", true);
+ toBoolean.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", false);
+ toBoolean.insert("QScriptValue(engine, QScriptValue::NullValue)", false);
+ toBoolean.insert("QScriptValue(engine, true)", true);
+ toBoolean.insert("QScriptValue(engine, false)", false);
+ toBoolean.insert("QScriptValue(engine, int(122))", true);
+ toBoolean.insert("QScriptValue(engine, uint(124))", true);
+ toBoolean.insert("QScriptValue(engine, 0)", false);
+ toBoolean.insert("QScriptValue(engine, 0.0)", false);
+ toBoolean.insert("QScriptValue(engine, 123.0)", true);
+ toBoolean.insert("QScriptValue(engine, 6.37e-8)", true);
+ toBoolean.insert("QScriptValue(engine, -6.37e-8)", true);
+ toBoolean.insert("QScriptValue(engine, 0x43211234)", true);
+ toBoolean.insert("QScriptValue(engine, 0x10000)", true);
+ toBoolean.insert("QScriptValue(engine, 0x10001)", true);
+ toBoolean.insert("QScriptValue(engine, qSNaN())", false);
+ toBoolean.insert("QScriptValue(engine, qQNaN())", false);
+ toBoolean.insert("QScriptValue(engine, qInf())", true);
+ toBoolean.insert("QScriptValue(engine, -qInf())", true);
+ toBoolean.insert("QScriptValue(engine, \"NaN\")", true);
+ toBoolean.insert("QScriptValue(engine, \"Infinity\")", true);
+ toBoolean.insert("QScriptValue(engine, \"-Infinity\")", true);
+ toBoolean.insert("QScriptValue(engine, \"ciao\")", true);
+ toBoolean.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", true);
+ toBoolean.insert("QScriptValue(engine, QString(\"\"))", false);
+ toBoolean.insert("QScriptValue(engine, QString())", false);
+ toBoolean.insert("QScriptValue(engine, QString(\"0\"))", true);
+ toBoolean.insert("QScriptValue(engine, QString(\"123\"))", true);
+ toBoolean.insert("QScriptValue(engine, QString(\"1.23\"))", true);
+ toBoolean.insert("engine->evaluate(\"[]\")", true);
+ toBoolean.insert("engine->evaluate(\"{}\")", false);
+ toBoolean.insert("engine->evaluate(\"Object.prototype\")", true);
+ toBoolean.insert("engine->evaluate(\"Date.prototype\")", true);
+ toBoolean.insert("engine->evaluate(\"Array.prototype\")", true);
+ toBoolean.insert("engine->evaluate(\"Function.prototype\")", true);
+ toBoolean.insert("engine->evaluate(\"Error.prototype\")", true);
+ toBoolean.insert("engine->evaluate(\"Object\")", true);
+ toBoolean.insert("engine->evaluate(\"Array\")", true);
+ toBoolean.insert("engine->evaluate(\"Number\")", true);
+ toBoolean.insert("engine->evaluate(\"Function\")", true);
+ toBoolean.insert("engine->evaluate(\"(function() { return 1; })\")", true);
+ toBoolean.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", true);
+ toBoolean.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", true);
+ toBoolean.insert("engine->evaluate(\"/foo/\")", true);
+ toBoolean.insert("engine->evaluate(\"new Object()\")", true);
+ toBoolean.insert("engine->evaluate(\"new Array()\")", true);
+ toBoolean.insert("engine->evaluate(\"new Error()\")", true);
+ toBoolean.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", true);
+ toBoolean.insert("engine->evaluate(\"Undefined\")", true);
+ toBoolean.insert("engine->evaluate(\"Null\")", true);
+ toBoolean.insert("engine->evaluate(\"True\")", true);
+ toBoolean.insert("engine->evaluate(\"False\")", true);
+ toBoolean.insert("engine->evaluate(\"undefined\")", false);
+ toBoolean.insert("engine->evaluate(\"null\")", false);
+ toBoolean.insert("engine->evaluate(\"true\")", true);
+ toBoolean.insert("engine->evaluate(\"false\")", false);
+ toBoolean.insert("engine->evaluate(\"122\")", true);
+ toBoolean.insert("engine->evaluate(\"124\")", true);
+ toBoolean.insert("engine->evaluate(\"0\")", false);
+ toBoolean.insert("engine->evaluate(\"0.0\")", false);
+ toBoolean.insert("engine->evaluate(\"123.0\")", true);
+ toBoolean.insert("engine->evaluate(\"6.37e-8\")", true);
+ toBoolean.insert("engine->evaluate(\"-6.37e-8\")", true);
+ toBoolean.insert("engine->evaluate(\"0x43211234\")", true);
+ toBoolean.insert("engine->evaluate(\"0x10000\")", true);
+ toBoolean.insert("engine->evaluate(\"0x10001\")", true);
+ toBoolean.insert("engine->evaluate(\"NaN\")", false);
+ toBoolean.insert("engine->evaluate(\"Infinity\")", true);
+ toBoolean.insert("engine->evaluate(\"-Infinity\")", true);
+ toBoolean.insert("engine->evaluate(\"'ciao'\")", true);
+ toBoolean.insert("engine->evaluate(\"''\")", false);
+ toBoolean.insert("engine->evaluate(\"'0'\")", true);
+ toBoolean.insert("engine->evaluate(\"'123'\")", true);
+ toBoolean.insert("engine->evaluate(\"'12.4'\")", true);
+ toBoolean.insert("engine->nullValue()", false);
+ toBoolean.insert("engine->undefinedValue()", false);
+ }
+ newRow(expr) << toBoolean.value(expr);
+}
+
+void tst_QScriptValue::toBoolean_test(const char*, const QScriptValue& value)
+{
+ QFETCH(bool, expected);
+ QCOMPARE(value.toBoolean(), expected);
+ QCOMPARE(value.toBoolean(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toBoolean)
+
+
+void tst_QScriptValue::toInteger_initData()
+{
+ QTest::addColumn<qsreal>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toInteger_makeData(const char* expr)
+{
+ static QHash<QString, qsreal> toInteger;
+ if (toInteger.isEmpty()) {
+ toInteger.insert("QScriptValue()", 0);
+ toInteger.insert("QScriptValue(QScriptValue::UndefinedValue)", 0);
+ toInteger.insert("QScriptValue(QScriptValue::NullValue)", 0);
+ toInteger.insert("QScriptValue(true)", 1);
+ toInteger.insert("QScriptValue(false)", 0);
+ toInteger.insert("QScriptValue(int(122))", 122);
+ toInteger.insert("QScriptValue(uint(124))", 124);
+ toInteger.insert("QScriptValue(0)", 0);
+ toInteger.insert("QScriptValue(0.0)", 0);
+ toInteger.insert("QScriptValue(123.0)", 123);
+ toInteger.insert("QScriptValue(6.37e-8)", 0);
+ toInteger.insert("QScriptValue(-6.37e-8)", 0);
+ toInteger.insert("QScriptValue(0x43211234)", 1126240820);
+ toInteger.insert("QScriptValue(0x10000)", 65536);
+ toInteger.insert("QScriptValue(0x10001)", 65537);
+ toInteger.insert("QScriptValue(qSNaN())", 0);
+ toInteger.insert("QScriptValue(qQNaN())", 0);
+ toInteger.insert("QScriptValue(qInf())", qInf());
+ toInteger.insert("QScriptValue(-qInf())", qInf());
+ toInteger.insert("QScriptValue(\"NaN\")", 0);
+ toInteger.insert("QScriptValue(\"Infinity\")", qInf());
+ toInteger.insert("QScriptValue(\"-Infinity\")", qInf());
+ toInteger.insert("QScriptValue(\"ciao\")", 0);
+ toInteger.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", 0);
+ toInteger.insert("QScriptValue(QString(\"\"))", 0);
+ toInteger.insert("QScriptValue(QString())", 0);
+ toInteger.insert("QScriptValue(QString(\"0\"))", 0);
+ toInteger.insert("QScriptValue(QString(\"123\"))", 123);
+ toInteger.insert("QScriptValue(QString(\"12.4\"))", 12);
+ toInteger.insert("QScriptValue(0, QScriptValue::UndefinedValue)", 0);
+ toInteger.insert("QScriptValue(0, QScriptValue::NullValue)", 0);
+ toInteger.insert("QScriptValue(0, true)", 1);
+ toInteger.insert("QScriptValue(0, false)", 0);
+ toInteger.insert("QScriptValue(0, int(122))", 122);
+ toInteger.insert("QScriptValue(0, uint(124))", 124);
+ toInteger.insert("QScriptValue(0, 0)", 0);
+ toInteger.insert("QScriptValue(0, 0.0)", 0);
+ toInteger.insert("QScriptValue(0, 123.0)", 123);
+ toInteger.insert("QScriptValue(0, 6.37e-8)", 0);
+ toInteger.insert("QScriptValue(0, -6.37e-8)", 0);
+ toInteger.insert("QScriptValue(0, 0x43211234)", 1126240820);
+ toInteger.insert("QScriptValue(0, 0x10000)", 65536);
+ toInteger.insert("QScriptValue(0, 0x10001)", 65537);
+ toInteger.insert("QScriptValue(0, qSNaN())", 0);
+ toInteger.insert("QScriptValue(0, qQNaN())", 0);
+ toInteger.insert("QScriptValue(0, qInf())", qInf());
+ toInteger.insert("QScriptValue(0, -qInf())", qInf());
+ toInteger.insert("QScriptValue(0, \"NaN\")", 0);
+ toInteger.insert("QScriptValue(0, \"Infinity\")", qInf());
+ toInteger.insert("QScriptValue(0, \"-Infinity\")", qInf());
+ toInteger.insert("QScriptValue(0, \"ciao\")", 0);
+ toInteger.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", 0);
+ toInteger.insert("QScriptValue(0, QString(\"\"))", 0);
+ toInteger.insert("QScriptValue(0, QString())", 0);
+ toInteger.insert("QScriptValue(0, QString(\"0\"))", 0);
+ toInteger.insert("QScriptValue(0, QString(\"123\"))", 123);
+ toInteger.insert("QScriptValue(0, QString(\"12.3\"))", 12);
+ toInteger.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", 0);
+ toInteger.insert("QScriptValue(engine, QScriptValue::NullValue)", 0);
+ toInteger.insert("QScriptValue(engine, true)", 1);
+ toInteger.insert("QScriptValue(engine, false)", 0);
+ toInteger.insert("QScriptValue(engine, int(122))", 122);
+ toInteger.insert("QScriptValue(engine, uint(124))", 124);
+ toInteger.insert("QScriptValue(engine, 0)", 0);
+ toInteger.insert("QScriptValue(engine, 0.0)", 0);
+ toInteger.insert("QScriptValue(engine, 123.0)", 123);
+ toInteger.insert("QScriptValue(engine, 6.37e-8)", 0);
+ toInteger.insert("QScriptValue(engine, -6.37e-8)", 0);
+ toInteger.insert("QScriptValue(engine, 0x43211234)", 1126240820);
+ toInteger.insert("QScriptValue(engine, 0x10000)", 65536);
+ toInteger.insert("QScriptValue(engine, 0x10001)", 65537);
+ toInteger.insert("QScriptValue(engine, qSNaN())", 0);
+ toInteger.insert("QScriptValue(engine, qQNaN())", 0);
+ toInteger.insert("QScriptValue(engine, qInf())", qInf());
+ toInteger.insert("QScriptValue(engine, -qInf())", qInf());
+ toInteger.insert("QScriptValue(engine, \"NaN\")", 0);
+ toInteger.insert("QScriptValue(engine, \"Infinity\")", qInf());
+ toInteger.insert("QScriptValue(engine, \"-Infinity\")", qInf());
+ toInteger.insert("QScriptValue(engine, \"ciao\")", 0);
+ toInteger.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", 0);
+ toInteger.insert("QScriptValue(engine, QString(\"\"))", 0);
+ toInteger.insert("QScriptValue(engine, QString())", 0);
+ toInteger.insert("QScriptValue(engine, QString(\"0\"))", 0);
+ toInteger.insert("QScriptValue(engine, QString(\"123\"))", 123);
+ toInteger.insert("QScriptValue(engine, QString(\"1.23\"))", 1);
+ toInteger.insert("engine->evaluate(\"[]\")", 0);
+ toInteger.insert("engine->evaluate(\"{}\")", 0);
+ toInteger.insert("engine->evaluate(\"Object.prototype\")", 0);
+ toInteger.insert("engine->evaluate(\"Date.prototype\")", 0);
+ toInteger.insert("engine->evaluate(\"Array.prototype\")", 0);
+ toInteger.insert("engine->evaluate(\"Function.prototype\")", 0);
+ toInteger.insert("engine->evaluate(\"Error.prototype\")", 0);
+ toInteger.insert("engine->evaluate(\"Object\")", 0);
+ toInteger.insert("engine->evaluate(\"Array\")", 0);
+ toInteger.insert("engine->evaluate(\"Number\")", 0);
+ toInteger.insert("engine->evaluate(\"Function\")", 0);
+ toInteger.insert("engine->evaluate(\"(function() { return 1; })\")", 0);
+ toInteger.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", 0);
+ toInteger.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", 0);
+ toInteger.insert("engine->evaluate(\"/foo/\")", 0);
+ toInteger.insert("engine->evaluate(\"new Object()\")", 0);
+ toInteger.insert("engine->evaluate(\"new Array()\")", 0);
+ toInteger.insert("engine->evaluate(\"new Error()\")", 0);
+ toInteger.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", 22);
+ toInteger.insert("engine->evaluate(\"Undefined\")", 0);
+ toInteger.insert("engine->evaluate(\"Null\")", 0);
+ toInteger.insert("engine->evaluate(\"True\")", 0);
+ toInteger.insert("engine->evaluate(\"False\")", 0);
+ toInteger.insert("engine->evaluate(\"undefined\")", 0);
+ toInteger.insert("engine->evaluate(\"null\")", 0);
+ toInteger.insert("engine->evaluate(\"true\")", 1);
+ toInteger.insert("engine->evaluate(\"false\")", 0);
+ toInteger.insert("engine->evaluate(\"122\")", 122);
+ toInteger.insert("engine->evaluate(\"124\")", 124);
+ toInteger.insert("engine->evaluate(\"0\")", 0);
+ toInteger.insert("engine->evaluate(\"0.0\")", 0);
+ toInteger.insert("engine->evaluate(\"123.0\")", 123);
+ toInteger.insert("engine->evaluate(\"6.37e-8\")", 0);
+ toInteger.insert("engine->evaluate(\"-6.37e-8\")", 0);
+ toInteger.insert("engine->evaluate(\"0x43211234\")", 1126240820);
+ toInteger.insert("engine->evaluate(\"0x10000\")", 65536);
+ toInteger.insert("engine->evaluate(\"0x10001\")", 65537);
+ toInteger.insert("engine->evaluate(\"NaN\")", 0);
+ toInteger.insert("engine->evaluate(\"Infinity\")", qInf());
+ toInteger.insert("engine->evaluate(\"-Infinity\")", qInf());
+ toInteger.insert("engine->evaluate(\"'ciao'\")", 0);
+ toInteger.insert("engine->evaluate(\"''\")", 0);
+ toInteger.insert("engine->evaluate(\"'0'\")", 0);
+ toInteger.insert("engine->evaluate(\"'123'\")", 123);
+ toInteger.insert("engine->evaluate(\"'12.4'\")", 12);
+ toInteger.insert("engine->nullValue()", 0);
+ toInteger.insert("engine->undefinedValue()", 0);
+ }
+ newRow(expr) << toInteger.value(expr);
+}
+
+void tst_QScriptValue::toInteger_test(const char*, const QScriptValue& value)
+{
+ QFETCH(qsreal, expected);
+ if (qIsInf(expected)) {
+ QVERIFY(qIsInf(value.toInteger()));
+ QVERIFY(qIsInf(value.toInteger()));
+ return;
+ }
+ QCOMPARE(value.toInteger(), expected);
+ QCOMPARE(value.toInteger(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toInteger)
+
+
+void tst_QScriptValue::toInt32_initData()
+{
+ QTest::addColumn<qint32>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toInt32_makeData(const char* expr)
+{
+ static QHash<QString, qint32> toInt32;
+ if (toInt32.isEmpty()) {
+ toInt32.insert("QScriptValue()", 0);
+ toInt32.insert("QScriptValue(QScriptValue::UndefinedValue)", 0);
+ toInt32.insert("QScriptValue(QScriptValue::NullValue)", 0);
+ toInt32.insert("QScriptValue(true)", 1);
+ toInt32.insert("QScriptValue(false)", 0);
+ toInt32.insert("QScriptValue(int(122))", 122);
+ toInt32.insert("QScriptValue(uint(124))", 124);
+ toInt32.insert("QScriptValue(0)", 0);
+ toInt32.insert("QScriptValue(0.0)", 0);
+ toInt32.insert("QScriptValue(123.0)", 123);
+ toInt32.insert("QScriptValue(6.37e-8)", 0);
+ toInt32.insert("QScriptValue(-6.37e-8)", 0);
+ toInt32.insert("QScriptValue(0x43211234)", 1126240820);
+ toInt32.insert("QScriptValue(0x10000)", 65536);
+ toInt32.insert("QScriptValue(0x10001)", 65537);
+ toInt32.insert("QScriptValue(qSNaN())", 0);
+ toInt32.insert("QScriptValue(qQNaN())", 0);
+ toInt32.insert("QScriptValue(qInf())", 0);
+ toInt32.insert("QScriptValue(-qInf())", 0);
+ toInt32.insert("QScriptValue(\"NaN\")", 0);
+ toInt32.insert("QScriptValue(\"Infinity\")", 0);
+ toInt32.insert("QScriptValue(\"-Infinity\")", 0);
+ toInt32.insert("QScriptValue(\"ciao\")", 0);
+ toInt32.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", 0);
+ toInt32.insert("QScriptValue(QString(\"\"))", 0);
+ toInt32.insert("QScriptValue(QString())", 0);
+ toInt32.insert("QScriptValue(QString(\"0\"))", 0);
+ toInt32.insert("QScriptValue(QString(\"123\"))", 123);
+ toInt32.insert("QScriptValue(QString(\"12.4\"))", 12);
+ toInt32.insert("QScriptValue(0, QScriptValue::UndefinedValue)", 0);
+ toInt32.insert("QScriptValue(0, QScriptValue::NullValue)", 0);
+ toInt32.insert("QScriptValue(0, true)", 1);
+ toInt32.insert("QScriptValue(0, false)", 0);
+ toInt32.insert("QScriptValue(0, int(122))", 122);
+ toInt32.insert("QScriptValue(0, uint(124))", 124);
+ toInt32.insert("QScriptValue(0, 0)", 0);
+ toInt32.insert("QScriptValue(0, 0.0)", 0);
+ toInt32.insert("QScriptValue(0, 123.0)", 123);
+ toInt32.insert("QScriptValue(0, 6.37e-8)", 0);
+ toInt32.insert("QScriptValue(0, -6.37e-8)", 0);
+ toInt32.insert("QScriptValue(0, 0x43211234)", 1126240820);
+ toInt32.insert("QScriptValue(0, 0x10000)", 65536);
+ toInt32.insert("QScriptValue(0, 0x10001)", 65537);
+ toInt32.insert("QScriptValue(0, qSNaN())", 0);
+ toInt32.insert("QScriptValue(0, qQNaN())", 0);
+ toInt32.insert("QScriptValue(0, qInf())", 0);
+ toInt32.insert("QScriptValue(0, -qInf())", 0);
+ toInt32.insert("QScriptValue(0, \"NaN\")", 0);
+ toInt32.insert("QScriptValue(0, \"Infinity\")", 0);
+ toInt32.insert("QScriptValue(0, \"-Infinity\")", 0);
+ toInt32.insert("QScriptValue(0, \"ciao\")", 0);
+ toInt32.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", 0);
+ toInt32.insert("QScriptValue(0, QString(\"\"))", 0);
+ toInt32.insert("QScriptValue(0, QString())", 0);
+ toInt32.insert("QScriptValue(0, QString(\"0\"))", 0);
+ toInt32.insert("QScriptValue(0, QString(\"123\"))", 123);
+ toInt32.insert("QScriptValue(0, QString(\"12.3\"))", 12);
+ toInt32.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", 0);
+ toInt32.insert("QScriptValue(engine, QScriptValue::NullValue)", 0);
+ toInt32.insert("QScriptValue(engine, true)", 1);
+ toInt32.insert("QScriptValue(engine, false)", 0);
+ toInt32.insert("QScriptValue(engine, int(122))", 122);
+ toInt32.insert("QScriptValue(engine, uint(124))", 124);
+ toInt32.insert("QScriptValue(engine, 0)", 0);
+ toInt32.insert("QScriptValue(engine, 0.0)", 0);
+ toInt32.insert("QScriptValue(engine, 123.0)", 123);
+ toInt32.insert("QScriptValue(engine, 6.37e-8)", 0);
+ toInt32.insert("QScriptValue(engine, -6.37e-8)", 0);
+ toInt32.insert("QScriptValue(engine, 0x43211234)", 1126240820);
+ toInt32.insert("QScriptValue(engine, 0x10000)", 65536);
+ toInt32.insert("QScriptValue(engine, 0x10001)", 65537);
+ toInt32.insert("QScriptValue(engine, qSNaN())", 0);
+ toInt32.insert("QScriptValue(engine, qQNaN())", 0);
+ toInt32.insert("QScriptValue(engine, qInf())", 0);
+ toInt32.insert("QScriptValue(engine, -qInf())", 0);
+ toInt32.insert("QScriptValue(engine, \"NaN\")", 0);
+ toInt32.insert("QScriptValue(engine, \"Infinity\")", 0);
+ toInt32.insert("QScriptValue(engine, \"-Infinity\")", 0);
+ toInt32.insert("QScriptValue(engine, \"ciao\")", 0);
+ toInt32.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", 0);
+ toInt32.insert("QScriptValue(engine, QString(\"\"))", 0);
+ toInt32.insert("QScriptValue(engine, QString())", 0);
+ toInt32.insert("QScriptValue(engine, QString(\"0\"))", 0);
+ toInt32.insert("QScriptValue(engine, QString(\"123\"))", 123);
+ toInt32.insert("QScriptValue(engine, QString(\"1.23\"))", 1);
+ toInt32.insert("engine->evaluate(\"[]\")", 0);
+ toInt32.insert("engine->evaluate(\"{}\")", 0);
+ toInt32.insert("engine->evaluate(\"Object.prototype\")", 0);
+ toInt32.insert("engine->evaluate(\"Date.prototype\")", 0);
+ toInt32.insert("engine->evaluate(\"Array.prototype\")", 0);
+ toInt32.insert("engine->evaluate(\"Function.prototype\")", 0);
+ toInt32.insert("engine->evaluate(\"Error.prototype\")", 0);
+ toInt32.insert("engine->evaluate(\"Object\")", 0);
+ toInt32.insert("engine->evaluate(\"Array\")", 0);
+ toInt32.insert("engine->evaluate(\"Number\")", 0);
+ toInt32.insert("engine->evaluate(\"Function\")", 0);
+ toInt32.insert("engine->evaluate(\"(function() { return 1; })\")", 0);
+ toInt32.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", 0);
+ toInt32.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", 0);
+ toInt32.insert("engine->evaluate(\"/foo/\")", 0);
+ toInt32.insert("engine->evaluate(\"new Object()\")", 0);
+ toInt32.insert("engine->evaluate(\"new Array()\")", 0);
+ toInt32.insert("engine->evaluate(\"new Error()\")", 0);
+ toInt32.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", 22);
+ toInt32.insert("engine->evaluate(\"Undefined\")", 0);
+ toInt32.insert("engine->evaluate(\"Null\")", 0);
+ toInt32.insert("engine->evaluate(\"True\")", 0);
+ toInt32.insert("engine->evaluate(\"False\")", 0);
+ toInt32.insert("engine->evaluate(\"undefined\")", 0);
+ toInt32.insert("engine->evaluate(\"null\")", 0);
+ toInt32.insert("engine->evaluate(\"true\")", 1);
+ toInt32.insert("engine->evaluate(\"false\")", 0);
+ toInt32.insert("engine->evaluate(\"122\")", 122);
+ toInt32.insert("engine->evaluate(\"124\")", 124);
+ toInt32.insert("engine->evaluate(\"0\")", 0);
+ toInt32.insert("engine->evaluate(\"0.0\")", 0);
+ toInt32.insert("engine->evaluate(\"123.0\")", 123);
+ toInt32.insert("engine->evaluate(\"6.37e-8\")", 0);
+ toInt32.insert("engine->evaluate(\"-6.37e-8\")", 0);
+ toInt32.insert("engine->evaluate(\"0x43211234\")", 1126240820);
+ toInt32.insert("engine->evaluate(\"0x10000\")", 65536);
+ toInt32.insert("engine->evaluate(\"0x10001\")", 65537);
+ toInt32.insert("engine->evaluate(\"NaN\")", 0);
+ toInt32.insert("engine->evaluate(\"Infinity\")", 0);
+ toInt32.insert("engine->evaluate(\"-Infinity\")", 0);
+ toInt32.insert("engine->evaluate(\"'ciao'\")", 0);
+ toInt32.insert("engine->evaluate(\"''\")", 0);
+ toInt32.insert("engine->evaluate(\"'0'\")", 0);
+ toInt32.insert("engine->evaluate(\"'123'\")", 123);
+ toInt32.insert("engine->evaluate(\"'12.4'\")", 12);
+ toInt32.insert("engine->nullValue()", 0);
+ toInt32.insert("engine->undefinedValue()", 0);
+ }
+ newRow(expr) << toInt32.value(expr);
+}
+
+void tst_QScriptValue::toInt32_test(const char*, const QScriptValue& value)
+{
+ QFETCH(qint32, expected);
+ QCOMPARE(value.toInt32(), expected);
+ QCOMPARE(value.toInt32(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toInt32)
+
+
+void tst_QScriptValue::toUInt32_initData()
+{
+ QTest::addColumn<quint32>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toUInt32_makeData(const char* expr)
+{
+ static QHash<QString, quint32> toUInt32;
+ if (toUInt32.isEmpty()) {
+ toUInt32.insert("QScriptValue()", 0);
+ toUInt32.insert("QScriptValue(QScriptValue::UndefinedValue)", 0);
+ toUInt32.insert("QScriptValue(QScriptValue::NullValue)", 0);
+ toUInt32.insert("QScriptValue(true)", 1);
+ toUInt32.insert("QScriptValue(false)", 0);
+ toUInt32.insert("QScriptValue(int(122))", 122);
+ toUInt32.insert("QScriptValue(uint(124))", 124);
+ toUInt32.insert("QScriptValue(0)", 0);
+ toUInt32.insert("QScriptValue(0.0)", 0);
+ toUInt32.insert("QScriptValue(123.0)", 123);
+ toUInt32.insert("QScriptValue(6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(-6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(0x43211234)", 1126240820);
+ toUInt32.insert("QScriptValue(0x10000)", 65536);
+ toUInt32.insert("QScriptValue(0x10001)", 65537);
+ toUInt32.insert("QScriptValue(qSNaN())", 0);
+ toUInt32.insert("QScriptValue(qQNaN())", 0);
+ toUInt32.insert("QScriptValue(qInf())", 0);
+ toUInt32.insert("QScriptValue(-qInf())", 0);
+ toUInt32.insert("QScriptValue(\"NaN\")", 0);
+ toUInt32.insert("QScriptValue(\"Infinity\")", 0);
+ toUInt32.insert("QScriptValue(\"-Infinity\")", 0);
+ toUInt32.insert("QScriptValue(\"ciao\")", 0);
+ toUInt32.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", 0);
+ toUInt32.insert("QScriptValue(QString(\"\"))", 0);
+ toUInt32.insert("QScriptValue(QString())", 0);
+ toUInt32.insert("QScriptValue(QString(\"0\"))", 0);
+ toUInt32.insert("QScriptValue(QString(\"123\"))", 123);
+ toUInt32.insert("QScriptValue(QString(\"12.4\"))", 12);
+ toUInt32.insert("QScriptValue(0, QScriptValue::UndefinedValue)", 0);
+ toUInt32.insert("QScriptValue(0, QScriptValue::NullValue)", 0);
+ toUInt32.insert("QScriptValue(0, true)", 1);
+ toUInt32.insert("QScriptValue(0, false)", 0);
+ toUInt32.insert("QScriptValue(0, int(122))", 122);
+ toUInt32.insert("QScriptValue(0, uint(124))", 124);
+ toUInt32.insert("QScriptValue(0, 0)", 0);
+ toUInt32.insert("QScriptValue(0, 0.0)", 0);
+ toUInt32.insert("QScriptValue(0, 123.0)", 123);
+ toUInt32.insert("QScriptValue(0, 6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(0, -6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(0, 0x43211234)", 1126240820);
+ toUInt32.insert("QScriptValue(0, 0x10000)", 65536);
+ toUInt32.insert("QScriptValue(0, 0x10001)", 65537);
+ toUInt32.insert("QScriptValue(0, qSNaN())", 0);
+ toUInt32.insert("QScriptValue(0, qQNaN())", 0);
+ toUInt32.insert("QScriptValue(0, qInf())", 0);
+ toUInt32.insert("QScriptValue(0, -qInf())", 0);
+ toUInt32.insert("QScriptValue(0, \"NaN\")", 0);
+ toUInt32.insert("QScriptValue(0, \"Infinity\")", 0);
+ toUInt32.insert("QScriptValue(0, \"-Infinity\")", 0);
+ toUInt32.insert("QScriptValue(0, \"ciao\")", 0);
+ toUInt32.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", 0);
+ toUInt32.insert("QScriptValue(0, QString(\"\"))", 0);
+ toUInt32.insert("QScriptValue(0, QString())", 0);
+ toUInt32.insert("QScriptValue(0, QString(\"0\"))", 0);
+ toUInt32.insert("QScriptValue(0, QString(\"123\"))", 123);
+ toUInt32.insert("QScriptValue(0, QString(\"12.3\"))", 12);
+ toUInt32.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", 0);
+ toUInt32.insert("QScriptValue(engine, QScriptValue::NullValue)", 0);
+ toUInt32.insert("QScriptValue(engine, true)", 1);
+ toUInt32.insert("QScriptValue(engine, false)", 0);
+ toUInt32.insert("QScriptValue(engine, int(122))", 122);
+ toUInt32.insert("QScriptValue(engine, uint(124))", 124);
+ toUInt32.insert("QScriptValue(engine, 0)", 0);
+ toUInt32.insert("QScriptValue(engine, 0.0)", 0);
+ toUInt32.insert("QScriptValue(engine, 123.0)", 123);
+ toUInt32.insert("QScriptValue(engine, 6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(engine, -6.37e-8)", 0);
+ toUInt32.insert("QScriptValue(engine, 0x43211234)", 1126240820);
+ toUInt32.insert("QScriptValue(engine, 0x10000)", 65536);
+ toUInt32.insert("QScriptValue(engine, 0x10001)", 65537);
+ toUInt32.insert("QScriptValue(engine, qSNaN())", 0);
+ toUInt32.insert("QScriptValue(engine, qQNaN())", 0);
+ toUInt32.insert("QScriptValue(engine, qInf())", 0);
+ toUInt32.insert("QScriptValue(engine, -qInf())", 0);
+ toUInt32.insert("QScriptValue(engine, \"NaN\")", 0);
+ toUInt32.insert("QScriptValue(engine, \"Infinity\")", 0);
+ toUInt32.insert("QScriptValue(engine, \"-Infinity\")", 0);
+ toUInt32.insert("QScriptValue(engine, \"ciao\")", 0);
+ toUInt32.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", 0);
+ toUInt32.insert("QScriptValue(engine, QString(\"\"))", 0);
+ toUInt32.insert("QScriptValue(engine, QString())", 0);
+ toUInt32.insert("QScriptValue(engine, QString(\"0\"))", 0);
+ toUInt32.insert("QScriptValue(engine, QString(\"123\"))", 123);
+ toUInt32.insert("QScriptValue(engine, QString(\"1.23\"))", 1);
+ toUInt32.insert("engine->evaluate(\"[]\")", 0);
+ toUInt32.insert("engine->evaluate(\"{}\")", 0);
+ toUInt32.insert("engine->evaluate(\"Object.prototype\")", 0);
+ toUInt32.insert("engine->evaluate(\"Date.prototype\")", 0);
+ toUInt32.insert("engine->evaluate(\"Array.prototype\")", 0);
+ toUInt32.insert("engine->evaluate(\"Function.prototype\")", 0);
+ toUInt32.insert("engine->evaluate(\"Error.prototype\")", 0);
+ toUInt32.insert("engine->evaluate(\"Object\")", 0);
+ toUInt32.insert("engine->evaluate(\"Array\")", 0);
+ toUInt32.insert("engine->evaluate(\"Number\")", 0);
+ toUInt32.insert("engine->evaluate(\"Function\")", 0);
+ toUInt32.insert("engine->evaluate(\"(function() { return 1; })\")", 0);
+ toUInt32.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", 0);
+ toUInt32.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", 0);
+ toUInt32.insert("engine->evaluate(\"/foo/\")", 0);
+ toUInt32.insert("engine->evaluate(\"new Object()\")", 0);
+ toUInt32.insert("engine->evaluate(\"new Array()\")", 0);
+ toUInt32.insert("engine->evaluate(\"new Error()\")", 0);
+ toUInt32.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", 22);
+ toUInt32.insert("engine->evaluate(\"Undefined\")", 0);
+ toUInt32.insert("engine->evaluate(\"Null\")", 0);
+ toUInt32.insert("engine->evaluate(\"True\")", 0);
+ toUInt32.insert("engine->evaluate(\"False\")", 0);
+ toUInt32.insert("engine->evaluate(\"undefined\")", 0);
+ toUInt32.insert("engine->evaluate(\"null\")", 0);
+ toUInt32.insert("engine->evaluate(\"true\")", 1);
+ toUInt32.insert("engine->evaluate(\"false\")", 0);
+ toUInt32.insert("engine->evaluate(\"122\")", 122);
+ toUInt32.insert("engine->evaluate(\"124\")", 124);
+ toUInt32.insert("engine->evaluate(\"0\")", 0);
+ toUInt32.insert("engine->evaluate(\"0.0\")", 0);
+ toUInt32.insert("engine->evaluate(\"123.0\")", 123);
+ toUInt32.insert("engine->evaluate(\"6.37e-8\")", 0);
+ toUInt32.insert("engine->evaluate(\"-6.37e-8\")", 0);
+ toUInt32.insert("engine->evaluate(\"0x43211234\")", 1126240820);
+ toUInt32.insert("engine->evaluate(\"0x10000\")", 65536);
+ toUInt32.insert("engine->evaluate(\"0x10001\")", 65537);
+ toUInt32.insert("engine->evaluate(\"NaN\")", 0);
+ toUInt32.insert("engine->evaluate(\"Infinity\")", 0);
+ toUInt32.insert("engine->evaluate(\"-Infinity\")", 0);
+ toUInt32.insert("engine->evaluate(\"'ciao'\")", 0);
+ toUInt32.insert("engine->evaluate(\"''\")", 0);
+ toUInt32.insert("engine->evaluate(\"'0'\")", 0);
+ toUInt32.insert("engine->evaluate(\"'123'\")", 123);
+ toUInt32.insert("engine->evaluate(\"'12.4'\")", 12);
+ toUInt32.insert("engine->nullValue()", 0);
+ toUInt32.insert("engine->undefinedValue()", 0);
+ }
+ newRow(expr) << toUInt32.value(expr);
+}
+
+void tst_QScriptValue::toUInt32_test(const char*, const QScriptValue& value)
+{
+ QFETCH(quint32, expected);
+ QCOMPARE(value.toUInt32(), expected);
+ QCOMPARE(value.toUInt32(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toUInt32)
+
+
+void tst_QScriptValue::toUInt16_initData()
+{
+ QTest::addColumn<quint16>("expected");
+ initScriptValues();
+}
+
+void tst_QScriptValue::toUInt16_makeData(const char* expr)
+{
+ static QHash<QString, quint16> toUInt16;
+ if (toUInt16.isEmpty()) {
+ toUInt16.insert("QScriptValue()", 0);
+ toUInt16.insert("QScriptValue(QScriptValue::UndefinedValue)", 0);
+ toUInt16.insert("QScriptValue(QScriptValue::NullValue)", 0);
+ toUInt16.insert("QScriptValue(true)", 1);
+ toUInt16.insert("QScriptValue(false)", 0);
+ toUInt16.insert("QScriptValue(int(122))", 122);
+ toUInt16.insert("QScriptValue(uint(124))", 124);
+ toUInt16.insert("QScriptValue(0)", 0);
+ toUInt16.insert("QScriptValue(0.0)", 0);
+ toUInt16.insert("QScriptValue(123.0)", 123);
+ toUInt16.insert("QScriptValue(6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(-6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(0x43211234)", 4660);
+ toUInt16.insert("QScriptValue(0x10000)", 0);
+ toUInt16.insert("QScriptValue(0x10001)", 1);
+ toUInt16.insert("QScriptValue(qSNaN())", 0);
+ toUInt16.insert("QScriptValue(qQNaN())", 0);
+ toUInt16.insert("QScriptValue(qInf())", 0);
+ toUInt16.insert("QScriptValue(-qInf())", 0);
+ toUInt16.insert("QScriptValue(\"NaN\")", 0);
+ toUInt16.insert("QScriptValue(\"Infinity\")", 0);
+ toUInt16.insert("QScriptValue(\"-Infinity\")", 0);
+ toUInt16.insert("QScriptValue(\"ciao\")", 0);
+ toUInt16.insert("QScriptValue(QString::fromLatin1(\"ciao\"))", 0);
+ toUInt16.insert("QScriptValue(QString(\"\"))", 0);
+ toUInt16.insert("QScriptValue(QString())", 0);
+ toUInt16.insert("QScriptValue(QString(\"0\"))", 0);
+ toUInt16.insert("QScriptValue(QString(\"123\"))", 123);
+ toUInt16.insert("QScriptValue(QString(\"12.4\"))", 12);
+ toUInt16.insert("QScriptValue(0, QScriptValue::UndefinedValue)", 0);
+ toUInt16.insert("QScriptValue(0, QScriptValue::NullValue)", 0);
+ toUInt16.insert("QScriptValue(0, true)", 1);
+ toUInt16.insert("QScriptValue(0, false)", 0);
+ toUInt16.insert("QScriptValue(0, int(122))", 122);
+ toUInt16.insert("QScriptValue(0, uint(124))", 124);
+ toUInt16.insert("QScriptValue(0, 0)", 0);
+ toUInt16.insert("QScriptValue(0, 0.0)", 0);
+ toUInt16.insert("QScriptValue(0, 123.0)", 123);
+ toUInt16.insert("QScriptValue(0, 6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(0, -6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(0, 0x43211234)", 4660);
+ toUInt16.insert("QScriptValue(0, 0x10000)", 0);
+ toUInt16.insert("QScriptValue(0, 0x10001)", 1);
+ toUInt16.insert("QScriptValue(0, qSNaN())", 0);
+ toUInt16.insert("QScriptValue(0, qQNaN())", 0);
+ toUInt16.insert("QScriptValue(0, qInf())", 0);
+ toUInt16.insert("QScriptValue(0, -qInf())", 0);
+ toUInt16.insert("QScriptValue(0, \"NaN\")", 0);
+ toUInt16.insert("QScriptValue(0, \"Infinity\")", 0);
+ toUInt16.insert("QScriptValue(0, \"-Infinity\")", 0);
+ toUInt16.insert("QScriptValue(0, \"ciao\")", 0);
+ toUInt16.insert("QScriptValue(0, QString::fromLatin1(\"ciao\"))", 0);
+ toUInt16.insert("QScriptValue(0, QString(\"\"))", 0);
+ toUInt16.insert("QScriptValue(0, QString())", 0);
+ toUInt16.insert("QScriptValue(0, QString(\"0\"))", 0);
+ toUInt16.insert("QScriptValue(0, QString(\"123\"))", 123);
+ toUInt16.insert("QScriptValue(0, QString(\"12.3\"))", 12);
+ toUInt16.insert("QScriptValue(engine, QScriptValue::UndefinedValue)", 0);
+ toUInt16.insert("QScriptValue(engine, QScriptValue::NullValue)", 0);
+ toUInt16.insert("QScriptValue(engine, true)", 1);
+ toUInt16.insert("QScriptValue(engine, false)", 0);
+ toUInt16.insert("QScriptValue(engine, int(122))", 122);
+ toUInt16.insert("QScriptValue(engine, uint(124))", 124);
+ toUInt16.insert("QScriptValue(engine, 0)", 0);
+ toUInt16.insert("QScriptValue(engine, 0.0)", 0);
+ toUInt16.insert("QScriptValue(engine, 123.0)", 123);
+ toUInt16.insert("QScriptValue(engine, 6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(engine, -6.37e-8)", 0);
+ toUInt16.insert("QScriptValue(engine, 0x43211234)", 4660);
+ toUInt16.insert("QScriptValue(engine, 0x10000)", 0);
+ toUInt16.insert("QScriptValue(engine, 0x10001)", 1);
+ toUInt16.insert("QScriptValue(engine, qSNaN())", 0);
+ toUInt16.insert("QScriptValue(engine, qQNaN())", 0);
+ toUInt16.insert("QScriptValue(engine, qInf())", 0);
+ toUInt16.insert("QScriptValue(engine, -qInf())", 0);
+ toUInt16.insert("QScriptValue(engine, \"NaN\")", 0);
+ toUInt16.insert("QScriptValue(engine, \"Infinity\")", 0);
+ toUInt16.insert("QScriptValue(engine, \"-Infinity\")", 0);
+ toUInt16.insert("QScriptValue(engine, \"ciao\")", 0);
+ toUInt16.insert("QScriptValue(engine, QString::fromLatin1(\"ciao\"))", 0);
+ toUInt16.insert("QScriptValue(engine, QString(\"\"))", 0);
+ toUInt16.insert("QScriptValue(engine, QString())", 0);
+ toUInt16.insert("QScriptValue(engine, QString(\"0\"))", 0);
+ toUInt16.insert("QScriptValue(engine, QString(\"123\"))", 123);
+ toUInt16.insert("QScriptValue(engine, QString(\"1.23\"))", 1);
+ toUInt16.insert("engine->evaluate(\"[]\")", 0);
+ toUInt16.insert("engine->evaluate(\"{}\")", 0);
+ toUInt16.insert("engine->evaluate(\"Object.prototype\")", 0);
+ toUInt16.insert("engine->evaluate(\"Date.prototype\")", 0);
+ toUInt16.insert("engine->evaluate(\"Array.prototype\")", 0);
+ toUInt16.insert("engine->evaluate(\"Function.prototype\")", 0);
+ toUInt16.insert("engine->evaluate(\"Error.prototype\")", 0);
+ toUInt16.insert("engine->evaluate(\"Object\")", 0);
+ toUInt16.insert("engine->evaluate(\"Array\")", 0);
+ toUInt16.insert("engine->evaluate(\"Number\")", 0);
+ toUInt16.insert("engine->evaluate(\"Function\")", 0);
+ toUInt16.insert("engine->evaluate(\"(function() { return 1; })\")", 0);
+ toUInt16.insert("engine->evaluate(\"(function() { return 'ciao'; })\")", 0);
+ toUInt16.insert("engine->evaluate(\"(function() { throw new Error('foo'); })\")", 0);
+ toUInt16.insert("engine->evaluate(\"/foo/\")", 0);
+ toUInt16.insert("engine->evaluate(\"new Object()\")", 0);
+ toUInt16.insert("engine->evaluate(\"new Array()\")", 0);
+ toUInt16.insert("engine->evaluate(\"new Error()\")", 0);
+ toUInt16.insert("engine->evaluate(\"a = new Object(); a.foo = 22; a.foo\")", 22);
+ toUInt16.insert("engine->evaluate(\"Undefined\")", 0);
+ toUInt16.insert("engine->evaluate(\"Null\")", 0);
+ toUInt16.insert("engine->evaluate(\"True\")", 0);
+ toUInt16.insert("engine->evaluate(\"False\")", 0);
+ toUInt16.insert("engine->evaluate(\"undefined\")", 0);
+ toUInt16.insert("engine->evaluate(\"null\")", 0);
+ toUInt16.insert("engine->evaluate(\"true\")", 1);
+ toUInt16.insert("engine->evaluate(\"false\")", 0);
+ toUInt16.insert("engine->evaluate(\"122\")", 122);
+ toUInt16.insert("engine->evaluate(\"124\")", 124);
+ toUInt16.insert("engine->evaluate(\"0\")", 0);
+ toUInt16.insert("engine->evaluate(\"0.0\")", 0);
+ toUInt16.insert("engine->evaluate(\"123.0\")", 123);
+ toUInt16.insert("engine->evaluate(\"6.37e-8\")", 0);
+ toUInt16.insert("engine->evaluate(\"-6.37e-8\")", 0);
+ toUInt16.insert("engine->evaluate(\"0x43211234\")", 4660);
+ toUInt16.insert("engine->evaluate(\"0x10000\")", 0);
+ toUInt16.insert("engine->evaluate(\"0x10001\")", 1);
+ toUInt16.insert("engine->evaluate(\"NaN\")", 0);
+ toUInt16.insert("engine->evaluate(\"Infinity\")", 0);
+ toUInt16.insert("engine->evaluate(\"-Infinity\")", 0);
+ toUInt16.insert("engine->evaluate(\"'ciao'\")", 0);
+ toUInt16.insert("engine->evaluate(\"''\")", 0);
+ toUInt16.insert("engine->evaluate(\"'0'\")", 0);
+ toUInt16.insert("engine->evaluate(\"'123'\")", 123);
+ toUInt16.insert("engine->evaluate(\"'12.4'\")", 12);
+ toUInt16.insert("engine->nullValue()", 0);
+ toUInt16.insert("engine->undefinedValue()", 0);
+ }
+ newRow(expr) << toUInt16.value(expr);
+}
+
+void tst_QScriptValue::toUInt16_test(const char*, const QScriptValue& value)
+{
+ QFETCH(quint16, expected);
+ QCOMPARE(value.toUInt16(), expected);
+ QCOMPARE(value.toUInt16(), expected);
+}
+
+DEFINE_TEST_FUNCTION(toUInt16)
-isEmpty(OUTPUT_DIR) {
- CONFIG(debug, debug|release) {
- OUTPUT_DIR=$$PWD/WebKitBuild/Debug
- } else { # Release
- OUTPUT_DIR=$$PWD/WebKitBuild/Release
- }
-}
-
-
QMAKE_RPATHDIR = $$OUTPUT_DIR/lib $$QMAKE_RPATHDIR
QMAKE_LIBDIR = $$OUTPUT_DIR/lib $$QMAKE_LIBDIR
mac:!static:contains(QT_CONFIG, qt_framework):!CONFIG(webkit_no_framework) {
TEMPLATE = subdirs
SUBDIRS = qscriptengine \
- qscriptvalue
+ qscriptvalue \
+ qscriptstring
#include "config.h"
#include "ArrayPrototype.h"
-#include "CodeBlock.h"
#include "CachedCall.h"
+#include "CodeBlock.h"
#include "Interpreter.h"
#include "JIT.h"
-#include "ObjectPrototype.h"
+#include "JSStringBuilder.h"
#include "Lookup.h"
+#include "ObjectPrototype.h"
#include "Operations.h"
#include <algorithm>
#include <wtf/Assertions.h>
#if ENABLE(JIT)
// If the JIT is enabled then we need to preserve the invariant that every
// function with a CodeBlock also has JIT code.
- callData.js.functionExecutable->jitCode(exec, callData.js.scopeChain);
- CodeBlock& codeBlock = callData.js.functionExecutable->generatedBytecode();
+ CodeBlock* codeBlock = 0;
+#if ENABLE(INTERPRETER)
+ if (!exec->globalData().canUseJIT())
+ codeBlock = &callData.js.functionExecutable->bytecode(exec, callData.js.scopeChain);
+ else
+#endif
+ {
+ callData.js.functionExecutable->jitCode(exec, callData.js.scopeChain);
+ codeBlock = &callData.js.functionExecutable->generatedBytecode();
+ }
#else
- CodeBlock& codeBlock = callData.js.functionExecutable->bytecode(exec, callData.js.scopeChain);
+ CodeBlock* codeBlock = &callData.js.functionExecutable->bytecode(exec, callData.js.scopeChain);
#endif
- return codeBlock.isNumericCompareFunction();
+ return codeBlock->isNumericCompareFunction();
}
// ------------------------------ ArrayPrototype ----------------------------
JSArray* thisObj = asArray(thisValue);
HashSet<JSObject*>& arrayVisitedElements = exec->globalData().arrayVisitedElements;
- if (arrayVisitedElements.size() >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || arrayVisitedElements.size() >= MaxMainThreadReentryDepth)
- return throwError(exec, RangeError, "Maximum call stack size exceeded.");
+ if (arrayVisitedElements.size() >= MaxSmallThreadReentryDepth) {
+ if (arrayVisitedElements.size() >= exec->globalData().maxReentryDepth)
+ return throwError(exec, RangeError, "Maximum call stack size exceeded.");
}
bool alreadyVisited = !arrayVisitedElements.add(thisObj).second;
totalSize += str.size();
if (!strBuffer.data()) {
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
+ throwOutOfMemoryError(exec);
}
if (exec->hadException())
Vector<UChar> buffer;
buffer.reserveCapacity(totalSize);
if (!buffer.data())
- return throwError(exec, GeneralError, "Out of memory");
+ return throwOutOfMemoryError(exec);
for (unsigned i = 0; i < length; i++) {
if (i)
buffer.append(',');
if (RefPtr<UString::Rep> rep = strBuffer[i])
- buffer.append(rep->data(), rep->size());
+ buffer.append(rep->characters(), rep->length());
}
ASSERT(buffer.size() == totalSize);
return jsString(exec, UString::adopt(buffer));
JSObject* thisObj = asArray(thisValue);
HashSet<JSObject*>& arrayVisitedElements = exec->globalData().arrayVisitedElements;
- if (arrayVisitedElements.size() >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || arrayVisitedElements.size() >= MaxMainThreadReentryDepth)
- return throwError(exec, RangeError, "Maximum call stack size exceeded.");
+ if (arrayVisitedElements.size() >= MaxSmallThreadReentryDepth) {
+ if (arrayVisitedElements.size() >= exec->globalData().maxReentryDepth)
+ return throwError(exec, RangeError, "Maximum call stack size exceeded.");
}
bool alreadyVisited = !arrayVisitedElements.add(thisObj).second;
if (alreadyVisited)
return jsEmptyString(exec); // return an empty string, avoding infinite recursion.
- Vector<UChar, 256> strBuffer;
+ JSStringBuilder strBuffer;
unsigned length = thisObj->get(exec, exec->propertyNames().length).toUInt32(exec);
for (unsigned k = 0; k < length; k++) {
if (k >= 1)
strBuffer.append(',');
- if (!strBuffer.data()) {
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
- break;
- }
JSValue element = thisObj->get(exec, k);
- if (element.isUndefinedOrNull())
- continue;
-
- JSObject* o = element.toObject(exec);
- JSValue conversionFunction = o->get(exec, exec->propertyNames().toLocaleString);
- UString str;
- CallData callData;
- CallType callType = conversionFunction.getCallData(callData);
- if (callType != CallTypeNone)
- str = call(exec, conversionFunction, callType, callData, element, exec->emptyList()).toString(exec);
- else
- str = element.toString(exec);
- strBuffer.append(str.data(), str.size());
-
- if (!strBuffer.data()) {
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
+ if (!element.isUndefinedOrNull()) {
+ JSObject* o = element.toObject(exec);
+ JSValue conversionFunction = o->get(exec, exec->propertyNames().toLocaleString);
+ UString str;
+ CallData callData;
+ CallType callType = conversionFunction.getCallData(callData);
+ if (callType != CallTypeNone)
+ str = call(exec, conversionFunction, callType, callData, element, exec->emptyList()).toString(exec);
+ else
+ str = element.toString(exec);
+ strBuffer.append(str);
}
-
- if (exec->hadException())
- break;
}
arrayVisitedElements.remove(thisObj);
- return jsString(exec, UString(strBuffer.data(), strBuffer.data() ? strBuffer.size() : 0));
+ return strBuffer.build(exec);
}
JSValue JSC_HOST_CALL arrayProtoFuncJoin(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
JSObject* thisObj = thisValue.toThisObject(exec);
HashSet<JSObject*>& arrayVisitedElements = exec->globalData().arrayVisitedElements;
- if (arrayVisitedElements.size() >= MaxSecondaryThreadReentryDepth) {
- if (!isMainThread() || arrayVisitedElements.size() >= MaxMainThreadReentryDepth)
- return throwError(exec, RangeError, "Maximum call stack size exceeded.");
+ if (arrayVisitedElements.size() >= MaxSmallThreadReentryDepth) {
+ if (arrayVisitedElements.size() >= exec->globalData().maxReentryDepth)
+ return throwError(exec, RangeError, "Maximum call stack size exceeded.");
}
bool alreadyVisited = !arrayVisitedElements.add(thisObj).second;
if (alreadyVisited)
return jsEmptyString(exec); // return an empty string, avoding infinite recursion.
- Vector<UChar, 256> strBuffer;
+ JSStringBuilder strBuffer;
- UChar comma = ',';
- UString separator = args.at(0).isUndefined() ? UString(&comma, 1) : args.at(0).toString(exec);
+ UString separator;
+ if (!args.at(0).isUndefined())
+ separator = args.at(0).toString(exec);
unsigned length = thisObj->get(exec, exec->propertyNames().length).toUInt32(exec);
- for (unsigned k = 0; k < length; k++) {
- if (k >= 1)
- strBuffer.append(separator.data(), separator.size());
- if (!strBuffer.data()) {
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
- break;
+ unsigned k = 0;
+ if (isJSArray(&exec->globalData(), thisObj)) {
+ JSArray* array = asArray(thisObj);
+ for (; k < length; k++) {
+ if (!array->canGetIndex(k))
+ break;
+ if (k >= 1) {
+ if (separator.isNull())
+ strBuffer.append(',');
+ else
+ strBuffer.append(separator);
+ }
+ JSValue element = array->getIndex(k);
+ if (!element.isUndefinedOrNull())
+ strBuffer.append(element.toString(exec));
}
-
- JSValue element = thisObj->get(exec, k);
- if (element.isUndefinedOrNull())
- continue;
-
- UString str = element.toString(exec);
- strBuffer.append(str.data(), str.size());
-
- if (!strBuffer.data()) {
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
+ }
+ for (; k < length; k++) {
+ if (k >= 1) {
+ if (separator.isNull())
+ strBuffer.append(',');
+ else
+ strBuffer.append(separator);
}
- if (exec->hadException())
- break;
+ JSValue element = thisObj->get(exec, k);
+ if (!element.isUndefinedOrNull())
+ strBuffer.append(element.toString(exec));
}
arrayVisitedElements.remove(thisObj);
- return jsString(exec, UString(strBuffer.data(), strBuffer.data() ? strBuffer.size() : 0));
+ return strBuffer.build(exec);
}
JSValue JSC_HOST_CALL arrayProtoFuncConcat(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
// 15.4.4.12
JSArray* resObj = constructEmptyArray(exec);
JSValue result = resObj;
- unsigned length = thisObj->get(exec, exec->propertyNames().length).toUInt32(exec);
+
+ // FIXME: Firefox returns an empty array.
if (!args.size())
return jsUndefined();
- int begin = args.at(0).toUInt32(exec);
- if (begin < 0)
- begin = std::max<int>(begin + length, 0);
- else
- begin = std::min<int>(begin, length);
+
+ unsigned length = thisObj->get(exec, exec->propertyNames().length).toUInt32(exec);
+ double relativeBegin = args.at(0).toInteger(exec);
+ unsigned begin;
+ if (relativeBegin < 0) {
+ relativeBegin += length;
+ begin = (relativeBegin < 0) ? 0 : static_cast<unsigned>(relativeBegin);
+ } else
+ begin = std::min<unsigned>(static_cast<unsigned>(relativeBegin), length);
unsigned deleteCount;
if (args.size() > 1)
for (unsigned k = length; k > length - deleteCount + additionalArgs; --k)
thisObj->deleteProperty(exec, k - 1);
} else {
- for (unsigned k = length - deleteCount; (int)k > begin; --k) {
+ for (unsigned k = length - deleteCount; k > begin; --k) {
if (JSValue obj = getProperty(exec, thisObj, k + deleteCount - 1))
thisObj->put(exec, k + additionalArgs - 1, obj);
else
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef CachedTranscendentalFunction_h
+#define CachedTranscendentalFunction_h
+
+#include "JSValue.h"
+
+namespace JSC {
+
+extern const double NaN;
+
+typedef double (*TranscendentalFunctionPtr)(double);
+
+// CachedTranscendentalFunction provides a generic mechanism to cache results
+// for pure functions with the signature "double func(double)", and where NaN
+// maps to NaN.
+template<TranscendentalFunctionPtr orignalFunction>
+class CachedTranscendentalFunction {
+ struct CacheEntry {
+ double operand;
+ double result;
+ };
+
+public:
+ CachedTranscendentalFunction()
+ : m_cache(0)
+ {
+ }
+
+ ~CachedTranscendentalFunction()
+ {
+ if (m_cache)
+ fastFree(m_cache);
+ }
+
+ JSValue operator() (ExecState* exec, double operand)
+ {
+ if (UNLIKELY(!m_cache))
+ initialize();
+ CacheEntry* entry = &m_cache[hash(operand)];
+
+ if (entry->operand == operand)
+ return jsDoubleNumber(exec, entry->result);
+ double result = orignalFunction(operand);
+ entry->operand = operand;
+ entry->result = result;
+ return jsDoubleNumber(exec, result);
+ }
+
+private:
+ void initialize()
+ {
+ // Lazily allocate the table, populate with NaN->NaN mapping.
+ m_cache = static_cast<CacheEntry*>(fastMalloc(s_cacheSize * sizeof(CacheEntry)));
+ for (unsigned x = 0; x < s_cacheSize; ++x) {
+ m_cache[x].operand = NaN;
+ m_cache[x].result = NaN;
+ }
+ }
+
+ static unsigned hash(double d)
+ {
+ union doubleAndUInt64 {
+ double d;
+ uint32_t is[2];
+ } u;
+ u.d = d;
+
+ unsigned x = u.is[0] ^ u.is[1];
+ x = (x >> 20) ^ (x >> 8);
+ return x & (s_cacheSize - 1);
+ }
+
+ static const unsigned s_cacheSize = 0x1000;
+ CacheEntry* m_cache;
+};
+
+}
+
+#endif // CachedTranscendentalFunction_h
#include <mach/thread_act.h>
#include <mach/vm_map.h>
-#elif OS(SYMBIAN)
-#include <e32std.h>
-#include <e32cmn.h>
-#include <unistd.h>
-
#elif OS(WINDOWS)
#include <windows.h>
// a PIC branch in Mach-O binaries, see <rdar://problem/5971391>.
#define MIN_ARRAY_SIZE (static_cast<size_t>(14))
-#if OS(SYMBIAN)
-const size_t MAX_NUM_BLOCKS = 256; // Max size of collector heap set to 16 MB
-static RHeap* userChunk = 0;
-#endif
-
#if ENABLE(JSC_MULTIPLE_THREADS)
#if OS(DARWIN)
#if ENABLE(JSC_MULTIPLE_THREADS)
, m_registeredThreads(0)
, m_currentThreadRegistrar(0)
+#endif
+#if OS(SYMBIAN)
+ , m_blockallocator(JSCCOLLECTOR_VIRTUALMEM_RESERVATION, BLOCK_SIZE)
#endif
, m_globalData(globalData)
{
ASSERT(globalData);
-
-#if OS(SYMBIAN)
- // Symbian OpenC supports mmap but currently not the MAP_ANON flag.
- // Using fastMalloc() does not properly align blocks on 64k boundaries
- // and previous implementation was flawed/incomplete.
- // UserHeap::ChunkHeap allows allocation of continuous memory and specification
- // of alignment value for (symbian) cells within that heap.
- //
- // Clarification and mapping of terminology:
- // RHeap (created by UserHeap::ChunkHeap below) is continuos memory chunk,
- // which can dynamically grow up to 8 MB,
- // that holds all CollectorBlocks of this session (static).
- // Each symbian cell within RHeap maps to a 64kb aligned CollectorBlock.
- // JSCell objects are maintained as usual within CollectorBlocks.
- if (!userChunk) {
- userChunk = UserHeap::ChunkHeap(0, 0, MAX_NUM_BLOCKS * BLOCK_SIZE, BLOCK_SIZE, BLOCK_SIZE);
- if (!userChunk)
- CRASH();
- }
-#endif // OS(SYMBIAN)
-
memset(&m_heap, 0, sizeof(CollectorHeap));
allocateBlock();
}
t = next;
}
#endif
-
+#if OS(SYMBIAN)
+ m_blockallocator.destroy();
+#endif
m_globalData = 0;
}
vm_address_t address = 0;
vm_map(current_task(), &address, BLOCK_SIZE, BLOCK_OFFSET_MASK, VM_FLAGS_ANYWHERE | VM_TAG_FOR_COLLECTOR_MEMORY, MEMORY_OBJECT_NULL, 0, FALSE, VM_PROT_DEFAULT, VM_PROT_DEFAULT, VM_INHERIT_DEFAULT);
#elif OS(SYMBIAN)
- // Allocate a 64 kb aligned CollectorBlock
- unsigned char* mask = reinterpret_cast<unsigned char*>(userChunk->Alloc(BLOCK_SIZE));
- if (!mask)
+ void* address = m_blockallocator.alloc();
+ if (!address)
CRASH();
- uintptr_t address = reinterpret_cast<uintptr_t>(mask);
#elif OS(WINCE)
void* address = VirtualAlloc(NULL, BLOCK_SIZE, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
#elif OS(WINDOWS)
-#if COMPILER(MINGW)
+#if COMPILER(MINGW) && !COMPILER(MINGW64)
void* address = __mingw_aligned_malloc(BLOCK_SIZE, BLOCK_SIZE);
#else
void* address = _aligned_malloc(BLOCK_SIZE, BLOCK_SIZE);
#if OS(DARWIN)
vm_deallocate(current_task(), reinterpret_cast<vm_address_t>(block), BLOCK_SIZE);
#elif OS(SYMBIAN)
- userChunk->Free(reinterpret_cast<TAny*>(block));
+ m_blockallocator.free(reinterpret_cast<void*>(block));
#elif OS(WINCE)
VirtualFree(block, 0, MEM_RELEASE);
#elif OS(WINDOWS)
-#if COMPILER(MINGW)
+#if COMPILER(MINGW) && !COMPILER(MINGW64)
__mingw_aligned_free(block);
#else
_aligned_free(block);
}
#if OS(WINCE)
-void* g_stackBase = 0;
+JS_EXPORTDATA void* g_stackBase = 0;
inline bool isPageWritable(void* page)
{
MOV pTib, EAX
}
return static_cast<void*>(pTib->StackBase);
-#elif OS(WINDOWS) && CPU(X86_64) && COMPILER(MSVC)
- // FIXME: why only for MSVC?
- PNT_TIB64 pTib = reinterpret_cast<PNT_TIB64>(NtCurrentTeb());
- return reinterpret_cast<void*>(pTib->StackBase);
#elif OS(WINDOWS) && CPU(X86) && COMPILER(GCC)
// offset 0x18 from the FS segment register gives a pointer to
// the thread information block for the current thread
: "=r" (pTib)
);
return static_cast<void*>(pTib->StackBase);
+#elif OS(WINDOWS) && CPU(X86_64)
+ PNT_TIB64 pTib = reinterpret_cast<PNT_TIB64>(NtCurrentTeb());
+ return reinterpret_cast<void*>(pTib->StackBase);
#elif OS(QNX)
+ AtomicallyInitializedStatic(Mutex&, mutex = *new Mutex);
+ MutexLocker locker(mutex);
return currentThreadStackBaseQNX();
#elif OS(SOLARIS)
stack_t s;
pthread_stackseg_np(thread, &stack);
return stack.ss_sp;
#elif OS(SYMBIAN)
- static void* stackBase = 0;
- if (stackBase == 0) {
- TThreadStackInfo info;
- RThread thread;
- thread.StackInfo(info);
- stackBase = (void*)info.iBase;
- }
- return (void*)stackBase;
+ TThreadStackInfo info;
+ RThread thread;
+ thread.StackInfo(info);
+ return (void*)info.iBase;
#elif OS(HAIKU)
thread_info threadInfo;
get_thread_info(find_thread(NULL), &threadInfo);
return threadInfo.stack_end;
#elif OS(UNIX)
+ AtomicallyInitializedStatic(Mutex&, mutex = *new Mutex);
+ MutexLocker locker(mutex);
static void* stackBase = 0;
static size_t stackSize = 0;
static pthread_t stackThread;
}
return static_cast<char*>(stackBase) + stackSize;
#elif OS(WINCE)
+ AtomicallyInitializedStatic(Mutex&, mutex = *new Mutex);
+ MutexLocker locker(mutex);
if (g_stackBase)
return g_stackBase;
else {
void Heap::registerThread()
{
- ASSERT(!m_globalData->mainThreadOnly || isMainThread() || pthread_main_np());
+ ASSERT(!m_globalData->exclusiveThread || m_globalData->exclusiveThread == currentThread());
if (!m_currentThreadRegistrar || pthread_getspecific(m_currentThreadRegistrar))
return;
void Heap::protect(JSValue k)
{
ASSERT(k);
- ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance);
+ ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance());
if (!k.isCell())
return;
m_protectedValues.add(k.asCell());
}
-void Heap::unprotect(JSValue k)
+bool Heap::unprotect(JSValue k)
{
ASSERT(k);
- ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance);
+ ASSERT(JSLock::currentThreadIsHoldingLock() || !m_globalData->isSharedInstance());
if (!k.isCell())
- return;
+ return false;
- m_protectedValues.remove(k.asCell());
+ return m_protectedValues.remove(k.asCell());
}
void Heap::markProtectedObjects(MarkStack& markStack)
void Heap::markRoots()
{
#ifndef NDEBUG
- if (m_globalData->isSharedInstance) {
+ if (m_globalData->isSharedInstance()) {
ASSERT(JSLock::lockCount() > 0);
ASSERT(JSLock::currentThreadIsHoldingLock());
}
return "number";
#endif
if (cell->isGetterSetter())
- return "gettersetter";
+ return "Getter-Setter";
if (cell->isAPIValueWrapper())
- return "value wrapper";
+ return "API wrapper";
if (cell->isPropertyNameIterator())
- return "for-in iterator";
- ASSERT(cell->isObject());
+ return "For-in iterator";
+ if (!cell->isObject())
+ return "[empty cell]";
const ClassInfo* info = cell->classInfo();
return info ? info->className : "Object";
}
return counts;
}
+HashCountedSet<const char*>* Heap::objectTypeCounts()
+{
+ HashCountedSet<const char*>* counts = new HashCountedSet<const char*>;
+
+ LiveObjectIterator it = primaryHeapBegin();
+ LiveObjectIterator heapEnd = primaryHeapEnd();
+ for ( ; it != heapEnd; ++it)
+ counts->add(typeName(*it));
+
+ return counts;
+}
+
bool Heap::isBusy()
{
return m_heap.operationInProgress != NoOperation;
#include <pthread.h>
#endif
+#if OS(SYMBIAN)
+#include <wtf/symbian/BlockAllocatorSymbian.h>
+#endif
+
#define ASSERT_CLASS_FITS_IN_CELL(class) COMPILE_ASSERT(sizeof(class) <= CELL_SIZE, class_fits_in_cell)
namespace JSC {
Statistics statistics() const;
void protect(JSValue);
- void unprotect(JSValue);
+ // Returns true if the value is no longer protected by any protect pointers
+ // (though it may still be alive due to heap/stack references).
+ bool unprotect(JSValue);
static Heap* heap(JSValue); // 0 for immediate values
static Heap* heap(JSCell*);
size_t protectedObjectCount();
size_t protectedGlobalObjectCount();
HashCountedSet<const char*>* protectedObjectTypeCounts();
+ HashCountedSet<const char*>* objectTypeCounts();
void registerThread(); // Only needs to be called by clients that can use the same heap from multiple threads.
pthread_key_t m_currentThreadRegistrar;
#endif
+#if OS(SYMBIAN)
+ // Allocates collector blocks with correct alignment
+ WTF::AlignedBlockAllocator m_blockallocator;
+#endif
+
JSGlobalData* m_globalData;
};
protected:
CollectorHeapIterator(CollectorHeap&, size_t startBlock, size_t startCell);
- void advance(size_t cellsPerBlock);
+ void advance(size_t max);
CollectorHeap& m_heap;
size_t m_block;
return reinterpret_cast<JSCell*>(m_heap.blocks[m_block]->cells + m_cell);
}
- inline void CollectorHeapIterator::advance(size_t cellsPerBlock)
+ // Iterators advance up to the next-to-last -- and not the last -- cell in a
+ // block, since the last cell is a dummy sentinel.
+ inline void CollectorHeapIterator::advance(size_t max)
{
++m_cell;
- if (m_cell == cellsPerBlock) {
+ if (m_cell == max) {
m_cell = 0;
++m_block;
}
inline LiveObjectIterator& LiveObjectIterator::operator++()
{
- if (m_block < m_heap.nextBlock || m_cell < m_heap.nextCell) {
- advance(HeapConstants::cellsPerBlock);
+ advance(HeapConstants::cellsPerBlock - 1);
+ if (m_block < m_heap.nextBlock || (m_block == m_heap.nextBlock && m_cell < m_heap.nextCell))
return *this;
- }
- do {
- advance(HeapConstants::cellsPerBlock);
- } while (m_block < m_heap.usedBlocks && !m_heap.blocks[m_block]->marked.get(m_cell));
+ while (m_block < m_heap.usedBlocks && !m_heap.blocks[m_block]->marked.get(m_cell))
+ advance(HeapConstants::cellsPerBlock - 1);
return *this;
}
inline DeadObjectIterator& DeadObjectIterator::operator++()
{
do {
- advance(HeapConstants::cellsPerBlock);
+ advance(HeapConstants::cellsPerBlock - 1);
ASSERT(m_block > m_heap.nextBlock || (m_block == m_heap.nextBlock && m_cell >= m_heap.nextCell));
} while (m_block < m_heap.usedBlocks && m_heap.blocks[m_block]->marked.get(m_cell));
return *this;
inline ObjectIterator& ObjectIterator::operator++()
{
- advance(HeapConstants::cellsPerBlock);
+ advance(HeapConstants::cellsPerBlock - 1);
return *this;
}
#include "Interpreter.h"
#include "Parser.h"
#include "Debugger.h"
+#include "WTFThreadData.h"
#include <stdio.h>
namespace JSC {
Completion checkSyntax(ExecState* exec, const SourceCode& source)
{
JSLock lock(exec);
- ASSERT(exec->globalData().identifierTable == currentIdentifierTable());
+ ASSERT(exec->globalData().identifierTable == wtfThreadData().currentIdentifierTable());
RefPtr<ProgramExecutable> program = ProgramExecutable::create(exec, source);
JSObject* error = program->checkSyntax(exec);
Completion evaluate(ExecState* exec, ScopeChain& scopeChain, const SourceCode& source, JSValue thisValue)
{
JSLock lock(exec);
- ASSERT(exec->globalData().identifierTable == currentIdentifierTable());
+ ASSERT(exec->globalData().identifierTable == wtfThreadData().currentIdentifierTable());
RefPtr<ProgramExecutable> program = ProgramExecutable::create(exec, source);
JSObject* error = program->compile(exec, scopeChain.node());
JSValue result = exec->interpreter()->execute(program.get(), exec, scopeChain.node(), thisObj, &exception);
if (exception) {
- if (exception.isObject() && asObject(exception)->isWatchdogException())
- return Completion(Interrupted, exception);
- return Completion(Throw, exception);
+ ComplType exceptionType = Throw;
+ if (exception.isObject())
+ exceptionType = asObject(exception)->exceptionType();
+ return Completion(exceptionType, exception);
}
return Completion(Normal, result);
}
class ScopeChain;
class SourceCode;
- enum ComplType { Normal, Break, Continue, ReturnValue, Throw, Interrupted };
+ enum ComplType { Normal, Break, Continue, ReturnValue, Throw, Interrupted, Terminated };
/*
* Completion objects are used to convey the return status and value
#include "JSFunction.h"
#include "JSGlobalObject.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "ObjectPrototype.h"
#include "PrototypeFunction.h"
#include <math.h>
DateConversionBuffer time;
formatDate(ts, date);
formatTime(ts, time);
- return jsNontrivialString(exec, makeString(date, " ", time));
+ return jsMakeNontrivialString(exec, date, " ", time);
}
CallType DateConstructor::getCallData(CallData& callData)
t.minute = args.at(4).toInt32(exec);
t.second = args.at(5).toInt32(exec);
double ms = (n >= 7) ? args.at(6).toNumber(exec) : 0;
- return jsNumber(exec, gregorianDateTimeToMS(exec, t, ms, true));
+ return jsNumber(exec, timeClip(gregorianDateTimeToMS(exec, t, ms, true)));
}
} // namespace JSC
{
if (date == exec->globalData().cachedDateString)
return exec->globalData().cachedDateStringValue;
- double value = parseDateFromNullTerminatedCharacters(exec, date.UTF8String().c_str());
+ double value = parseDateFromNullTerminatedCharacters(exec, date.UTF8String().data());
exec->globalData().cachedDateString = date;
exec->globalData().cachedDateStringValue = value;
return value;
#include "DatePrototype.h"
#include "DateConversion.h"
+#include "DateInstance.h"
#include "Error.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
+#include "Lookup.h"
#include "ObjectPrototype.h"
-#include "DateInstance.h"
#if !PLATFORM(MAC) && HAVE(LANGINFO_H)
#include <langinfo.h>
DateConversionBuffer time;
formatDate(*gregorianDateTime, date);
formatTime(*gregorianDateTime, time);
- return jsNontrivialString(exec, makeString(date, " ", time));
+ return jsMakeNontrivialString(exec, date, " ", time);
}
JSValue JSC_HOST_CALL dateProtoFuncToUTCString(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
DateConversionBuffer time;
formatDateUTCVariant(*gregorianDateTime, date);
formatTimeUTC(*gregorianDateTime, time);
- return jsNontrivialString(exec, makeString(date, " ", time));
+ return jsMakeNontrivialString(exec, date, " ", time);
}
JSValue JSC_HOST_CALL dateProtoFuncToISOString(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
DateConversionBuffer time;
formatDateUTCVariant(*gregorianDateTime, date);
formatTimeUTC(*gregorianDateTime, time);
- return jsNontrivialString(exec, makeString(date, " ", time));
+ return jsMakeNontrivialString(exec, date, " ", time);
}
JSValue JSC_HOST_CALL dateProtoFuncGetMonth(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
JSObject* Error::create(ExecState* exec, ErrorType type, const char* message)
{
- return create(exec, type, message, -1, -1, NULL);
+ return create(exec, type, message, -1, -1, UString());
}
JSObject* throwError(ExecState* exec, JSObject* error)
JSObject* throwError(ExecState* exec, ErrorType type)
{
- JSObject* error = Error::create(exec, type, UString(), -1, -1, NULL);
+ JSObject* error = Error::create(exec, type, UString(), -1, -1, UString());
exec->setException(error);
return error;
}
JSObject* throwError(ExecState* exec, ErrorType type, const UString& message)
{
- JSObject* error = Error::create(exec, type, message, -1, -1, NULL);
+ JSObject* error = Error::create(exec, type, message, -1, -1, UString());
exec->setException(error);
return error;
}
JSObject* throwError(ExecState* exec, ErrorType type, const char* message)
{
- JSObject* error = Error::create(exec, type, message, -1, -1, NULL);
+ JSObject* error = Error::create(exec, type, message, -1, -1, UString());
exec->setException(error);
return error;
}
#include "JSFunction.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "ObjectPrototype.h"
#include "PrototypeFunction.h"
#include "UString.h"
if (!name.isUndefined()) {
if (!message.isUndefined())
- return jsNontrivialString(exec, makeString(name.toString(exec), ": ", message.toString(exec)));
+ return jsMakeNontrivialString(exec, name.toString(exec), ": ", message.toString(exec));
return jsNontrivialString(exec, name.toString(exec));
}
if (!message.isUndefined())
- return jsNontrivialString(exec, makeString("Error: ", message.toString(exec)));
+ return jsMakeNontrivialString(exec, "Error: ", message.toString(exec));
return jsNontrivialString(exec, "Error");
}
{
}
- virtual bool isWatchdogException() const { return true; }
+ virtual ComplType exceptionType() const { return Interrupted; }
virtual UString toString(ExecState*) const { return "JavaScript execution exceeded timeout."; }
};
return new (globalData) InterruptedExecutionError(globalData);
}
+class TerminatedExecutionError : public JSObject {
+public:
+ TerminatedExecutionError(JSGlobalData* globalData)
+ : JSObject(globalData->terminatedExecutionErrorStructure)
+ {
+ }
+
+ virtual ComplType exceptionType() const { return Terminated; }
+
+ virtual UString toString(ExecState*) const { return "JavaScript execution terminated."; }
+};
+
+JSValue createTerminatedExecutionException(JSGlobalData* globalData)
+{
+ return new (globalData) TerminatedExecutionError(globalData);
+}
+
static JSValue createError(ExecState* exec, ErrorType e, const char* msg)
{
- return Error::create(exec, e, msg, -1, -1, 0);
+ return Error::create(exec, e, msg, -1, -1, UString());
}
JSValue createStackOverflowError(ExecState* exec)
return exception;
}
+JSValue throwOutOfMemoryError(ExecState* exec)
+{
+ return throwError(exec, GeneralError, "Out of memory");
+}
+
} // namespace JSC
struct Instruction;
JSValue createInterruptedExecutionException(JSGlobalData*);
+ JSValue createTerminatedExecutionException(JSGlobalData*);
JSValue createStackOverflowError(ExecState*);
JSValue createTypeError(ExecState*, const char* message);
JSValue createUndefinedVariableError(ExecState*, const Identifier&, unsigned bytecodeOffset, CodeBlock*);
JSObject* createNotAConstructorError(ExecState*, JSValue, unsigned bytecodeOffset, CodeBlock*);
JSValue createNotAFunctionError(ExecState*, JSValue, unsigned bytecodeOffset, CodeBlock*);
JSObject* createNotAnObjectError(ExecState*, JSNotAnObjectErrorStub*, unsigned bytecodeOffset, CodeBlock*);
+ JSValue throwOutOfMemoryError(ExecState*);
} // namespace JSC
void EvalExecutable::generateJITCode(ExecState* exec, ScopeChainNode* scopeChainNode)
{
+#if ENABLE(INTERPRETER)
+ ASSERT(scopeChainNode->globalData->canUseJIT());
+#endif
CodeBlock* codeBlock = &bytecode(exec, scopeChainNode);
m_jitCode = JIT::compile(scopeChainNode->globalData, codeBlock);
void ProgramExecutable::generateJITCode(ExecState* exec, ScopeChainNode* scopeChainNode)
{
+#if ENABLE(INTERPRETER)
+ ASSERT(scopeChainNode->globalData->canUseJIT());
+#endif
CodeBlock* codeBlock = &bytecode(exec, scopeChainNode);
m_jitCode = JIT::compile(scopeChainNode->globalData, codeBlock);
void FunctionExecutable::generateJITCode(ExecState* exec, ScopeChainNode* scopeChainNode)
{
+#if ENABLE(INTERPRETER)
+ ASSERT(scopeChainNode->globalData->canUseJIT());
+#endif
CodeBlock* codeBlock = &bytecode(exec, scopeChainNode);
m_jitCode = JIT::compile(scopeChainNode->globalData, codeBlock);
ASSERT(newCodeBlock->instructionCount() == codeBlock->instructionCount());
#if ENABLE(JIT)
- JITCode newJITCode = JIT::compile(globalData, newCodeBlock.get());
- ASSERT(newJITCode.size() == generatedJITCode().size());
+#if ENABLE(INTERPRETER)
+ if (globalData->canUseJIT())
+#endif
+ {
+ JITCode newJITCode = JIT::compile(globalData, newCodeBlock.get());
+ ASSERT(newJITCode.size() == generatedJITCode().size());
+ }
#endif
globalData->functionCodeBlockBeingReparsed = 0;
ASSERT(newCodeBlock->instructionCount() == codeBlock->instructionCount());
#if ENABLE(JIT)
- JITCode newJITCode = JIT::compile(globalData, newCodeBlock.get());
- ASSERT(newJITCode.size() == generatedJITCode().size());
+#if ENABLE(INTERPRETER)
+ if (globalData->canUseJIT())
+#endif
+ {
+ JITCode newJITCode = JIT::compile(globalData, newCodeBlock.get());
+ ASSERT(newJITCode.size() == generatedJITCode().size());
+ }
#endif
return newCodeBlock->extractExceptionInfo();
builder.append(", ");
builder.append(parameters[pos].ustring());
}
- return builder.release();
+ return builder.build();
}
};
#if ENABLE(JIT)
class NativeExecutable : public ExecutableBase {
public:
- NativeExecutable(ExecState* exec)
+ NativeExecutable(JITCode thunk)
: ExecutableBase(NUM_PARAMETERS_IS_HOST)
{
- m_jitCode = JITCode(JITCode::HostFunction(exec->globalData().jitStubs.ctiNativeCallThunk()));
+ m_jitCode = thunk;
}
~NativeExecutable();
const Identifier& name() { return m_name; }
size_t parameterCount() const { return m_parameters->size(); }
- size_t variableCount() const { return m_numVariables; }
+ unsigned variableCount() const { return m_numVariables; }
UString paramString() const;
void recompile(ExecState*);
private:
FunctionExecutable(JSGlobalData* globalData, const Identifier& name, const SourceCode& source, bool forceUsesArguments, FunctionParameters* parameters, int firstLine, int lastLine)
: ScriptExecutable(globalData, source)
+ , m_numVariables(0)
, m_forceUsesArguments(forceUsesArguments)
, m_parameters(parameters)
, m_codeBlock(0)
, m_name(name)
- , m_numVariables(0)
{
m_firstLine = firstLine;
m_lastLine = lastLine;
FunctionExecutable(ExecState* exec, const Identifier& name, const SourceCode& source, bool forceUsesArguments, FunctionParameters* parameters, int firstLine, int lastLine)
: ScriptExecutable(exec, source)
+ , m_numVariables(0)
, m_forceUsesArguments(forceUsesArguments)
, m_parameters(parameters)
, m_codeBlock(0)
, m_name(name)
- , m_numVariables(0)
{
m_firstLine = firstLine;
m_lastLine = lastLine;
void compile(ExecState*, ScopeChainNode*);
- bool m_forceUsesArguments;
+ unsigned m_numVariables : 31;
+ bool m_forceUsesArguments : 1;
+
RefPtr<FunctionParameters> m_parameters;
CodeBlock* m_codeBlock;
Identifier m_name;
- size_t m_numVariables;
#if ENABLE(JIT)
public:
builder.append(") { ");
builder.append(args.at(args.size() - 1).toString(exec));
builder.append("\n})");
- program = builder.release();
+ program = builder.build();
}
int errLine;
#include "JSArray.h"
#include "JSFunction.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "Interpreter.h"
#include "Lexer.h"
#include "PrototypeFunction.h"
FunctionExecutable* executable = function->jsExecutable();
UString sourceString = executable->source().toString();
insertSemicolonIfNeeded(sourceString);
- return jsString(exec, makeString("function ", function->name(exec), "(", executable->paramString(), ") ", sourceString));
+ return jsMakeNontrivialString(exec, "function ", function->name(exec), "(", executable->paramString(), ") ", sourceString);
}
}
if (thisValue.inherits(&InternalFunction::info)) {
InternalFunction* function = asInternalFunction(thisValue);
- return jsString(exec, makeString("function ", function->name(exec), "() {\n [native code]\n}"));
+ return jsMakeNontrivialString(exec, "function ", function->name(exec), "() {\n [native code]\n}");
}
return throwError(exec, TypeError);
// This is an internal value object which stores getter and setter functions
// for a property.
class GetterSetter : public JSCell {
+ friend class JIT;
public:
GetterSetter(ExecState* exec)
: JSCell(exec->globalData().getterSetterStructure.get())
#include "Identifier.h"
#include "CallFrame.h"
+#include "NumericStrings.h"
#include <new> // for placement new
#include <string.h> // for strlen
#include <wtf/Assertions.h>
#include <wtf/FastMalloc.h>
#include <wtf/HashSet.h>
+#include <wtf/WTFThreadData.h>
+#include <wtf/text/StringHash.h>
using WTF::ThreadSpecific;
namespace JSC {
-typedef HashMap<const char*, RefPtr<UString::Rep>, PtrHash<const char*> > LiteralIdentifierTable;
-
-class IdentifierTable : public FastAllocBase {
-public:
- ~IdentifierTable()
- {
- HashSet<UString::Rep*>::iterator end = m_table.end();
- for (HashSet<UString::Rep*>::iterator iter = m_table.begin(); iter != end; ++iter)
- (*iter)->setIsIdentifier(false);
- }
-
- std::pair<HashSet<UString::Rep*>::iterator, bool> add(UString::Rep* value)
- {
- std::pair<HashSet<UString::Rep*>::iterator, bool> result = m_table.add(value);
- (*result.first)->setIsIdentifier(true);
- return result;
- }
-
- template<typename U, typename V>
- std::pair<HashSet<UString::Rep*>::iterator, bool> add(U value)
- {
- std::pair<HashSet<UString::Rep*>::iterator, bool> result = m_table.add<U, V>(value);
- (*result.first)->setIsIdentifier(true);
- return result;
- }
-
- void remove(UString::Rep* r) { m_table.remove(r); }
-
- LiteralIdentifierTable& literalTable() { return m_literalTable; }
-
-private:
- HashSet<UString::Rep*> m_table;
- LiteralIdentifierTable m_literalTable;
-};
+IdentifierTable::~IdentifierTable()
+{
+ HashSet<StringImpl*>::iterator end = m_table.end();
+ for (HashSet<StringImpl*>::iterator iter = m_table.begin(); iter != end; ++iter)
+ (*iter)->setIsIdentifier(false);
+}
+std::pair<HashSet<StringImpl*>::iterator, bool> IdentifierTable::add(StringImpl* value)
+{
+ std::pair<HashSet<StringImpl*>::iterator, bool> result = m_table.add(value);
+ (*result.first)->setIsIdentifier(true);
+ return result;
+}
+template<typename U, typename V>
+std::pair<HashSet<StringImpl*>::iterator, bool> IdentifierTable::add(U value)
+{
+ std::pair<HashSet<StringImpl*>::iterator, bool> result = m_table.add<U, V>(value);
+ (*result.first)->setIsIdentifier(true);
+ return result;
+}
IdentifierTable* createIdentifierTable()
{
bool Identifier::equal(const UString::Rep* r, const char* s)
{
- int length = r->size();
- const UChar* d = r->data();
+ int length = r->length();
+ const UChar* d = r->characters();
for (int i = 0; i != length; ++i)
if (d[i] != (unsigned char)s[i])
return false;
return s[length] == 0;
}
-bool Identifier::equal(const UString::Rep* r, const UChar* s, int length)
+bool Identifier::equal(const UString::Rep* r, const UChar* s, unsigned length)
{
- if (r->size() != length)
+ if (r->length() != length)
return false;
- const UChar* d = r->data();
- for (int i = 0; i != length; ++i)
+ const UChar* d = r->characters();
+ for (unsigned i = 0; i != length; ++i)
if (d[i] != s[i])
return false;
return true;
}
-struct CStringTranslator {
+struct IdentifierCStringTranslator {
static unsigned hash(const char* c)
{
return UString::Rep::computeHash(c);
PassRefPtr<UString::Rep> Identifier::add(JSGlobalData* globalData, const char* c)
{
- if (!c) {
- UString::Rep::null().hash();
- return &UString::Rep::null();
- }
- if (!c[0]) {
- UString::Rep::empty().hash();
- return &UString::Rep::empty();
- }
+ if (!c)
+ return UString::null().rep();
+ if (!c[0])
+ return UString::Rep::empty();
if (!c[1])
return add(globalData, globalData->smallStrings.singleCharacterStringRep(static_cast<unsigned char>(c[0])));
if (iter != literalIdentifierTable.end())
return iter->second;
- pair<HashSet<UString::Rep*>::iterator, bool> addResult = identifierTable.add<const char*, CStringTranslator>(c);
+ pair<HashSet<UString::Rep*>::iterator, bool> addResult = identifierTable.add<const char*, IdentifierCStringTranslator>(c);
// If the string is newly-translated, then we need to adopt it.
// The boolean in the pair tells us if that is so.
unsigned int length;
};
-struct UCharBufferTranslator {
+struct IdentifierUCharBufferTranslator {
static unsigned hash(const UCharBuffer& buf)
{
return UString::Rep::computeHash(buf.s, buf.length);
if (c <= 0xFF)
return add(globalData, globalData->smallStrings.singleCharacterStringRep(c));
}
- if (!length) {
- UString::Rep::empty().hash();
- return &UString::Rep::empty();
- }
+ if (!length)
+ return UString::Rep::empty();
UCharBuffer buf = {s, length};
- pair<HashSet<UString::Rep*>::iterator, bool> addResult = globalData->identifierTable->add<UCharBuffer, UCharBufferTranslator>(buf);
+ pair<HashSet<UString::Rep*>::iterator, bool> addResult = globalData->identifierTable->add<UCharBuffer, IdentifierUCharBufferTranslator>(buf);
// If the string is newly-translated, then we need to adopt it.
// The boolean in the pair tells us if that is so.
PassRefPtr<UString::Rep> Identifier::addSlowCase(JSGlobalData* globalData, UString::Rep* r)
{
ASSERT(!r->isIdentifier());
- if (r->size() == 1) {
- UChar c = r->data()[0];
+ // The empty & null strings are static singletons, and static strings are handled
+ // in ::add() in the header, so we should never get here with a zero length string.
+ ASSERT(r->length());
+
+ if (r->length() == 1) {
+ UChar c = r->characters()[0];
if (c <= 0xFF)
r = globalData->smallStrings.singleCharacterStringRep(c);
- if (r->isIdentifier()) {
-#ifndef NDEBUG
- checkSameIdentifierTable(globalData, r);
-#endif
+ if (r->isIdentifier())
return r;
- }
- }
- if (!r->size()) {
- UString::Rep::empty().hash();
- return &UString::Rep::empty();
}
+
return *globalData->identifierTable->add(r).first;
}
return addSlowCase(&exec->globalData(), r);
}
-void Identifier::remove(UString::Rep* r)
+Identifier Identifier::from(ExecState* exec, unsigned value)
{
- currentIdentifierTable()->remove(r);
+ return Identifier(exec, exec->globalData().numericStrings.add(value));
}
-#ifndef NDEBUG
-
-void Identifier::checkSameIdentifierTable(ExecState* exec, UString::Rep*)
+Identifier Identifier::from(ExecState* exec, int value)
{
- ASSERT_UNUSED(exec, exec->globalData().identifierTable == currentIdentifierTable());
+ return Identifier(exec, exec->globalData().numericStrings.add(value));
}
-void Identifier::checkSameIdentifierTable(JSGlobalData* globalData, UString::Rep*)
+Identifier Identifier::from(ExecState* exec, double value)
{
- ASSERT_UNUSED(globalData, globalData->identifierTable == currentIdentifierTable());
+ return Identifier(exec, exec->globalData().numericStrings.add(value));
}
-#else
-
-void Identifier::checkSameIdentifierTable(ExecState*, UString::Rep*)
-{
-}
+#ifndef NDEBUG
-void Identifier::checkSameIdentifierTable(JSGlobalData*, UString::Rep*)
+void Identifier::checkCurrentIdentifierTable(JSGlobalData* globalData)
{
+ // Check the identifier table accessible through the threadspecific matches the
+ // globalData's identifier table.
+ ASSERT_UNUSED(globalData, globalData->identifierTable == wtfThreadData().currentIdentifierTable());
}
-#endif
-
-ThreadSpecific<ThreadIdentifierTableData>* g_identifierTableSpecific = 0;
-ThreadIdentifierTableData* g_identifierTableMain = 0;
-
-#if ENABLE(JSC_MULTIPLE_THREADS)
-
-pthread_once_t createIdentifierTableSpecificOnce = PTHREAD_ONCE_INIT;
-static void createIdentifierTableSpecificCallback()
+void Identifier::checkCurrentIdentifierTable(ExecState* exec)
{
- ASSERT(!g_identifierTableSpecific);
- g_identifierTableSpecific = new ThreadSpecific<ThreadIdentifierTableData>();
- ASSERT(!g_identifierTableMain);
- g_identifierTableMain = new ThreadIdentifierTableData();
-}
-void createIdentifierTableSpecific()
-{
- pthread_once(&createIdentifierTableSpecificOnce, createIdentifierTableSpecificCallback);
- ASSERT(g_identifierTableSpecific);
- ASSERT(g_identifierTableMain);
+ checkCurrentIdentifierTable(&exec->globalData());
}
-#else
+#else
-void createIdentifierTableSpecific()
-{
- ASSERT(!g_identifierTableSpecific);
- g_identifierTableSpecific = new ThreadSpecific<ThreadIdentifierTableData>();
- ASSERT(!g_identifierTableMain);
- g_identifierTableMain = new ThreadIdentifierTableData();
-}
+// These only exists so that our exports are the same for debug and release builds.
+// This would be an ASSERT_NOT_REACHED(), but we're in NDEBUG only code here!
+void Identifier::checkCurrentIdentifierTable(JSGlobalData*) { CRASH(); }
+void Identifier::checkCurrentIdentifierTable(ExecState*) { CRASH(); }
#endif
const char* ascii() const { return _ustring.ascii(); }
- static Identifier from(ExecState* exec, unsigned y) { return Identifier(exec, UString::from(y)); }
- static Identifier from(ExecState* exec, int y) { return Identifier(exec, UString::from(y)); }
- static Identifier from(ExecState* exec, double y) { return Identifier(exec, UString::from(y)); }
+ static Identifier from(ExecState* exec, unsigned y);
+ static Identifier from(ExecState* exec, int y);
+ static Identifier from(ExecState* exec, double y);
bool isNull() const { return _ustring.isNull(); }
bool isEmpty() const { return _ustring.isEmpty(); }
friend bool operator==(const Identifier&, const char*);
friend bool operator!=(const Identifier&, const char*);
- static void remove(UString::Rep*);
-
static bool equal(const UString::Rep*, const char*);
- static bool equal(const UString::Rep*, const UChar*, int length);
- static bool equal(const UString::Rep* a, const UString::Rep* b) { return JSC::equal(a, b); }
+ static bool equal(const UString::Rep*, const UChar*, unsigned length);
+ static bool equal(const UString::Rep* a, const UString::Rep* b) { return ::equal(a, b); }
static PassRefPtr<UString::Rep> add(ExecState*, const char*); // Only to be used with string literals.
static PassRefPtr<UString::Rep> add(JSGlobalData*, const char*); // Only to be used with string literals.
static PassRefPtr<UString::Rep> add(ExecState* exec, UString::Rep* r)
{
- if (r->isIdentifier()) {
#ifndef NDEBUG
- checkSameIdentifierTable(exec, r);
+ checkCurrentIdentifierTable(exec);
#endif
+ if (r->isIdentifier())
return r;
- }
return addSlowCase(exec, r);
}
static PassRefPtr<UString::Rep> add(JSGlobalData* globalData, UString::Rep* r)
{
- if (r->isIdentifier()) {
#ifndef NDEBUG
- checkSameIdentifierTable(globalData, r);
+ checkCurrentIdentifierTable(globalData);
#endif
+ if (r->isIdentifier())
return r;
- }
return addSlowCase(globalData, r);
}
static PassRefPtr<UString::Rep> addSlowCase(ExecState*, UString::Rep* r);
static PassRefPtr<UString::Rep> addSlowCase(JSGlobalData*, UString::Rep* r);
- static void checkSameIdentifierTable(ExecState*, UString::Rep*);
- static void checkSameIdentifierTable(JSGlobalData*, UString::Rep*);
+ static void checkCurrentIdentifierTable(ExecState*);
+ static void checkCurrentIdentifierTable(JSGlobalData*);
};
inline bool operator==(const Identifier& a, const Identifier& b)
IdentifierTable* createIdentifierTable();
void deleteIdentifierTable(IdentifierTable*);
- struct ThreadIdentifierTableData {
- ThreadIdentifierTableData()
- : defaultIdentifierTable(0)
- , currentIdentifierTable(0)
- {
- }
-
- IdentifierTable* defaultIdentifierTable;
- IdentifierTable* currentIdentifierTable;
- };
-
- extern WTF::ThreadSpecific<ThreadIdentifierTableData>* g_identifierTableSpecific;
- extern ThreadIdentifierTableData* g_identifierTableMain;
- void createIdentifierTableSpecific();
-
- inline IdentifierTable* defaultIdentifierTable()
- {
- if (isMainThread() || pthread_main_np()) {
- if (!g_identifierTableMain)
- createIdentifierTableSpecific();
- return g_identifierTableMain->defaultIdentifierTable;
- }
- if (!g_identifierTableSpecific)
- createIdentifierTableSpecific();
- ThreadIdentifierTableData& data = **g_identifierTableSpecific;
-
- return data.defaultIdentifierTable;
- }
-
- inline void setDefaultIdentifierTable(IdentifierTable* identifierTable)
- {
- if (isMainThread() || pthread_main_np()) {
- if (!g_identifierTableMain)
- createIdentifierTableSpecific();
- g_identifierTableMain->defaultIdentifierTable = identifierTable;
- return;
- }
- if (!g_identifierTableSpecific)
- createIdentifierTableSpecific();
- ThreadIdentifierTableData& data = **g_identifierTableSpecific;
-
- data.defaultIdentifierTable = identifierTable;
- }
-
- inline IdentifierTable* currentIdentifierTable()
- {
- if (isMainThread() || pthread_main_np()) {
- if (!g_identifierTableMain)
- createIdentifierTableSpecific();
- return g_identifierTableMain->currentIdentifierTable;
- }
- if (!g_identifierTableSpecific)
- createIdentifierTableSpecific();
- ThreadIdentifierTableData& data = **g_identifierTableSpecific;
-
- return data.currentIdentifierTable;
- }
-
- inline IdentifierTable* setCurrentIdentifierTable(IdentifierTable* identifierTable)
- {
- if (isMainThread() || pthread_main_np()) {
- if (!g_identifierTableMain)
- createIdentifierTableSpecific();
- IdentifierTable* oldIdentifierTable = g_identifierTableMain->currentIdentifierTable;
- g_identifierTableMain->currentIdentifierTable = identifierTable;
- return oldIdentifierTable;
- }
- if (!g_identifierTableSpecific)
- createIdentifierTableSpecific();
- ThreadIdentifierTableData& data = **g_identifierTableSpecific;
-
- IdentifierTable* oldIdentifierTable = data.currentIdentifierTable;
- data.currentIdentifierTable = identifierTable;
- return oldIdentifierTable;
- }
-
- inline void resetCurrentIdentifierTable()
- {
- if (isMainThread() || pthread_main_np()) {
- if (!g_identifierTableMain)
- createIdentifierTableSpecific();
- g_identifierTableMain->currentIdentifierTable = g_identifierTableMain->defaultIdentifierTable;
- return;
- }
- if (!g_identifierTableSpecific)
- createIdentifierTableSpecific();
- ThreadIdentifierTableData& data = **g_identifierTableSpecific;
-
- data.currentIdentifierTable = data.defaultIdentifierTable;
- }
-
} // namespace JSC
#endif // Identifier_h
#include "UString.h"
#include <wtf/DateMath.h>
#include <wtf/Threading.h>
+#include <wtf/WTFThreadData.h>
using namespace WTF;
static void initializeThreadingOnce()
{
WTF::initializeThreading();
+ wtfThreadData();
initializeUString();
JSGlobalData::storeVPtrs();
#if ENABLE(JSC_MULTIPLE_THREADS)
InternalFunction::InternalFunction(JSGlobalData* globalData, NonNullPassRefPtr<Structure> structure, const Identifier& name)
: JSObject(structure)
{
- putDirect(globalData->propertyNames->name, jsString(globalData, name.ustring()), DontDelete | ReadOnly | DontEnum);
+ putDirect(globalData->propertyNames->name, jsString(globalData, name.isNull() ? "" : name.ustring()), DontDelete | ReadOnly | DontEnum);
}
const UString& InternalFunction::name(ExecState* exec)
{
- return asString(getDirect(exec->globalData().propertyNames->name))->value(exec);
+ return asString(getDirect(exec->globalData().propertyNames->name))->tryGetValue();
}
const UString InternalFunction::displayName(ExecState* exec)
JSValue displayName = getDirect(exec->globalData().propertyNames->displayName);
if (displayName && isJSString(&exec->globalData(), displayName))
- return asString(displayName)->value(exec);
+ return asString(displayName)->tryGetValue();
return UString::null();
}
#ifndef JSAPIValueWrapper_h
#define JSAPIValueWrapper_h
-#include <wtf/Platform.h>
-
#include "JSCell.h"
#include "CallFrame.h"
return exec->globalThisValue();
}
-bool JSActivation::isDynamicScope() const
+bool JSActivation::isDynamicScope(bool& requiresDynamicChecks) const
{
- return d()->functionExecutable->usesEval();
+ requiresDynamicChecks = d()->functionExecutable->usesEval();
+ return false;
}
-JSValue JSActivation::argumentsGetter(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue JSActivation::argumentsGetter(ExecState* exec, JSValue slotBase, const Identifier&)
{
- JSActivation* activation = asActivation(slot.slotBase());
+ JSActivation* activation = asActivation(slotBase);
if (activation->d()->functionExecutable->usesArguments()) {
PropertySlot slot;
#include "CodeBlock.h"
#include "JSVariableObject.h"
-#include "RegisterFile.h"
#include "SymbolTable.h"
#include "Nodes.h"
virtual void markChildren(MarkStack&);
- virtual bool isDynamicScope() const;
+ virtual bool isDynamicScope(bool& requiresDynamicChecks) const;
virtual bool isActivationObject() const { return true; }
RefPtr<FunctionExecutable> functionExecutable;
};
- static JSValue argumentsGetter(ExecState*, const Identifier&, const PropertySlot&);
+ static JSValue argumentsGetter(ExecState*, JSValue, const Identifier&);
NEVER_INLINE PropertySlot::GetValueFunc getArgumentsGetter();
JSActivationData* d() const { return static_cast<JSActivationData*>(JSVariableObject::d); }
m_vectorLength = initialCapacity;
m_storage->m_numValuesInVector = 0;
m_storage->m_sparseValueMap = 0;
- m_storage->lazyCreationData = 0;
+ m_storage->subclassData = 0;
m_storage->reportedMapCapacity = 0;
JSValue* vector = m_storage->m_vector;
m_vectorLength = initialCapacity;
m_storage->m_numValuesInVector = initialCapacity;
m_storage->m_sparseValueMap = 0;
- m_storage->lazyCreationData = 0;
+ m_storage->subclassData = 0;
m_storage->reportedMapCapacity = 0;
size_t i = 0;
return numDefined;
}
-void* JSArray::lazyCreationData()
+void* JSArray::subclassData() const
{
- return m_storage->lazyCreationData;
+ return m_storage->subclassData;
}
-void JSArray::setLazyCreationData(void* d)
+void JSArray::setSubclassData(void* d)
{
- m_storage->lazyCreationData = d;
+ m_storage->subclassData = d;
}
#if CHECK_ARRAY_CONSISTENCY
unsigned m_length;
unsigned m_numValuesInVector;
SparseArrayValueMap* m_sparseValueMap;
- void* lazyCreationData; // A JSArray subclass can use this to fill the vector lazily.
+ void* subclassData; // A JSArray subclass can use this to fill the vector lazily.
size_t reportedMapCapacity;
JSValue m_vector[1];
};
virtual void getOwnPropertyNames(ExecState*, PropertyNameArray&, EnumerationMode mode = ExcludeDontEnumProperties);
virtual void markChildren(MarkStack&);
- void* lazyCreationData();
- void setLazyCreationData(void*);
+ void* subclassData() const;
+ void setSubclassData(void*);
private:
virtual const ClassInfo* classInfo() const { return &info; }
return toObject(exec);
}
-UString JSCell::toThisString(ExecState* exec) const
-{
- return toThisObject(exec)->toString(exec);
-}
-
-JSString* JSCell::toThisJSString(ExecState* exec)
-{
- return jsString(exec, toThisString(exec));
-}
-
const ClassInfo* JSCell::classInfo() const
{
return 0;
virtual bool deleteProperty(ExecState*, unsigned propertyName);
virtual JSObject* toThisObject(ExecState*) const;
- virtual UString toThisString(ExecState*) const;
- virtual JSString* toThisJSString(ExecState*);
virtual JSValue getJSNumber();
void* vptr() { return *reinterpret_cast<void**>(this); }
void setVPtr(void* vptr) { *reinterpret_cast<void**>(this) = vptr; }
+ // FIXME: Rename getOwnPropertySlot to virtualGetOwnPropertySlot, and
+ // fastGetOwnPropertySlot to getOwnPropertySlot. Callers should always
+ // call this function, not its slower virtual counterpart. (For integer
+ // property names, we want a similar interface with appropriate optimizations.)
+ bool fastGetOwnPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
+
protected:
static const unsigned AnonymousSlotCount = 0;
private:
// Base implementation; for non-object classes implements getPropertySlot.
- bool fastGetOwnPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
virtual bool getOwnPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
virtual bool getOwnPropertySlot(ExecState*, unsigned propertyName, PropertySlot&);
return asCell()->structure()->typeInfo().needsThisConversion();
}
- inline UString JSValue::toThisString(ExecState* exec) const
- {
- return isCell() ? asCell()->toThisString(exec) : toString(exec);
- }
-
inline JSValue JSValue::getJSNumber()
{
if (isInt32() || isDouble())
{
}
+JSFunction::JSFunction(ExecState* exec, NonNullPassRefPtr<Structure> structure, int length, const Identifier& name, NativeExecutable* thunk, NativeFunction func)
+ : Base(&exec->globalData(), structure, name)
+#if ENABLE(JIT)
+ , m_executable(thunk)
+#endif
+{
+#if ENABLE(JIT)
+ setNativeFunction(func);
+ putDirect(exec->propertyNames().length, jsNumber(exec, length), DontDelete | ReadOnly | DontEnum);
+#else
+ UNUSED_PARAM(thunk);
+ UNUSED_PARAM(length);
+ UNUSED_PARAM(func);
+ ASSERT_NOT_REACHED();
+#endif
+}
+
JSFunction::JSFunction(ExecState* exec, NonNullPassRefPtr<Structure> structure, int length, const Identifier& name, NativeFunction func)
: Base(&exec->globalData(), structure, name)
#if ENABLE(JIT)
- , m_executable(adoptRef(new NativeExecutable(exec)))
+ , m_executable(exec->globalData().jitStubs->ctiNativeCallThunk())
#endif
{
#if ENABLE(JIT)
return exec->interpreter()->execute(jsExecutable(), exec, this, thisValue.toThisObject(exec), args, scopeChain().node(), exec->exceptionSlot());
}
-JSValue JSFunction::argumentsGetter(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue JSFunction::argumentsGetter(ExecState* exec, JSValue slotBase, const Identifier&)
{
- JSFunction* thisObj = asFunction(slot.slotBase());
+ JSFunction* thisObj = asFunction(slotBase);
ASSERT(!thisObj->isHostFunction());
return exec->interpreter()->retrieveArguments(exec, thisObj);
}
-JSValue JSFunction::callerGetter(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue JSFunction::callerGetter(ExecState* exec, JSValue slotBase, const Identifier&)
{
- JSFunction* thisObj = asFunction(slot.slotBase());
+ JSFunction* thisObj = asFunction(slotBase);
ASSERT(!thisObj->isHostFunction());
return exec->interpreter()->retrieveCaller(exec, thisObj);
}
-JSValue JSFunction::lengthGetter(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue JSFunction::lengthGetter(ExecState* exec, JSValue slotBase, const Identifier&)
{
- JSFunction* thisObj = asFunction(slot.slotBase());
+ JSFunction* thisObj = asFunction(slotBase);
ASSERT(!thisObj->isHostFunction());
return jsNumber(exec, thisObj->jsExecutable()->parameterCount());
}
}
if (propertyName == exec->propertyNames().arguments) {
- slot.setCustom(this, argumentsGetter);
+ slot.setCacheableCustom(this, argumentsGetter);
return true;
}
if (propertyName == exec->propertyNames().length) {
- slot.setCustom(this, lengthGetter);
+ slot.setCacheableCustom(this, lengthGetter);
return true;
}
if (propertyName == exec->propertyNames().caller) {
- slot.setCustom(this, callerGetter);
+ slot.setCacheableCustom(this, callerGetter);
return true;
}
class FunctionPrototype;
class JSActivation;
class JSGlobalObject;
+ class NativeExecutable;
class JSFunction : public InternalFunction {
friend class JIT;
public:
JSFunction(ExecState*, NonNullPassRefPtr<Structure>, int length, const Identifier&, NativeFunction);
+ JSFunction(ExecState*, NonNullPassRefPtr<Structure>, int length, const Identifier&, NativeExecutable*, NativeFunction);
JSFunction(ExecState*, NonNullPassRefPtr<FunctionExecutable>, ScopeChainNode*);
virtual ~JSFunction();
virtual const ClassInfo* classInfo() const { return &info; }
- static JSValue argumentsGetter(ExecState*, const Identifier&, const PropertySlot&);
- static JSValue callerGetter(ExecState*, const Identifier&, const PropertySlot&);
- static JSValue lengthGetter(ExecState*, const Identifier&, const PropertySlot&);
+ static JSValue argumentsGetter(ExecState*, JSValue, const Identifier&);
+ static JSValue callerGetter(ExecState*, JSValue, const Identifier&);
+ static JSValue lengthGetter(ExecState*, JSValue, const Identifier&);
RefPtr<ExecutableBase> m_executable;
ScopeChain& scopeChain()
#include "Lookup.h"
#include "Nodes.h"
#include "Parser.h"
+#include "RegExpCache.h"
+#include <wtf/WTFThreadData.h>
#if ENABLE(JSC_MULTIPLE_THREADS)
#include <wtf/Threading.h>
#if PLATFORM(MAC)
#include "ProfilerServer.h"
+#include <CoreFoundation/CoreFoundation.h>
#endif
using namespace WTF;
jsFunction->~JSCell();
}
-JSGlobalData::JSGlobalData(bool isShared)
- : isSharedInstance(isShared)
+JSGlobalData::JSGlobalData(GlobalDataType globalDataType, ThreadStackType threadStackType)
+ : globalDataType(globalDataType)
, clientData(0)
, arrayTable(fastNew<HashTable>(JSC::arrayTable))
, dateTable(fastNew<HashTable>(JSC::dateTable))
, stringTable(fastNew<HashTable>(JSC::stringTable))
, activationStructure(JSActivation::createStructure(jsNull()))
, interruptedExecutionErrorStructure(JSObject::createStructure(jsNull()))
+ , terminatedExecutionErrorStructure(JSObject::createStructure(jsNull()))
, staticScopeStructure(JSStaticScopeObject::createStructure(jsNull()))
, stringStructure(JSString::createStructure(jsNull()))
, notAnObjectErrorStubStructure(JSNotAnObjectErrorStub::createStructure(jsNull()))
#if USE(JSVALUE32)
, numberStructure(JSNumberCell::createStructure(jsNull()))
#endif
- , identifierTable(createIdentifierTable())
+ , identifierTable(globalDataType == Default ? wtfThreadData().currentIdentifierTable() : createIdentifierTable())
, propertyNames(new CommonIdentifiers(this))
, emptyList(new MarkedArgumentBuffer)
, lexer(new Lexer(this))
, parser(new Parser)
, interpreter(new Interpreter)
-#if ENABLE(JIT)
- , jitStubs(this)
-#endif
, heap(this)
, initializingLazyNumericCompareFunction(false)
, head(0)
, firstStringifierToMark(0)
, markStack(jsArrayVPtr)
, cachedUTCOffset(NaN)
- , weakRandom(static_cast<int>(currentTime()))
+ , maxReentryDepth(threadStackType == ThreadStackTypeSmall ? MaxSmallThreadReentryDepth : MaxLargeThreadReentryDepth)
+ , m_regExpCache(new RegExpCache(this))
#ifndef NDEBUG
- , mainThreadOnly(false)
+ , exclusiveThread(0)
#endif
{
#if PLATFORM(MAC)
startProfilerServerIfNeeded();
#endif
+#if ENABLE(JIT) && ENABLE(INTERPRETER)
+#if PLATFORM(CF)
+ CFStringRef canUseJITKey = CFStringCreateWithCString(0 , "JavaScriptCoreUseJIT", kCFStringEncodingMacRoman);
+ CFBooleanRef canUseJIT = (CFBooleanRef)CFPreferencesCopyAppValue(canUseJITKey, kCFPreferencesCurrentApplication);
+ if (canUseJIT) {
+ m_canUseJIT = kCFBooleanTrue == canUseJIT;
+ CFRelease(canUseJIT);
+ } else
+ m_canUseJIT = !getenv("JavaScriptCoreUseJIT");
+ CFRelease(canUseJITKey);
+#elif OS(UNIX)
+ m_canUseJIT = !getenv("JavaScriptCoreUseJIT");
+#else
+ m_canUseJIT = true;
+#endif
+#endif
+#if ENABLE(JIT)
+#if ENABLE(INTERPRETER)
+ if (m_canUseJIT)
+ m_canUseJIT = executableAllocator.isValid();
+#endif
+ jitStubs.set(new JITThunks(this));
+#endif
}
JSGlobalData::~JSGlobalData()
delete emptyList;
delete propertyNames;
- deleteIdentifierTable(identifierTable);
+ if (globalDataType != Default)
+ deleteIdentifierTable(identifierTable);
delete clientData;
+ delete m_regExpCache;
}
-PassRefPtr<JSGlobalData> JSGlobalData::createNonDefault()
+PassRefPtr<JSGlobalData> JSGlobalData::createContextGroup(ThreadStackType type)
{
- return adoptRef(new JSGlobalData(false));
+ return adoptRef(new JSGlobalData(APIContextGroup, type));
}
-PassRefPtr<JSGlobalData> JSGlobalData::create()
+PassRefPtr<JSGlobalData> JSGlobalData::create(ThreadStackType type)
{
- JSGlobalData* globalData = new JSGlobalData(false);
- setDefaultIdentifierTable(globalData->identifierTable);
- setCurrentIdentifierTable(globalData->identifierTable);
- return adoptRef(globalData);
+ return adoptRef(new JSGlobalData(Default, type));
}
-PassRefPtr<JSGlobalData> JSGlobalData::createLeaked()
+PassRefPtr<JSGlobalData> JSGlobalData::createLeaked(ThreadStackType type)
{
Structure::startIgnoringLeaks();
- RefPtr<JSGlobalData> data = create();
+ RefPtr<JSGlobalData> data = create(type);
Structure::stopIgnoringLeaks();
return data.release();
}
{
JSGlobalData*& instance = sharedInstanceInternal();
if (!instance) {
- instance = new JSGlobalData(true);
+ instance = new JSGlobalData(APIShared, ThreadStackTypeSmall);
#if ENABLE(JSC_MULTIPLE_THREADS)
instance->makeUsableFromMultipleThreads();
#endif
#ifndef JSGlobalData_h
#define JSGlobalData_h
+#include "CachedTranscendentalFunction.h"
#include "Collector.h"
#include "DateInstanceCache.h"
#include "ExecutableAllocator.h"
#include "MarkStack.h"
#include "NumericStrings.h"
#include "SmallStrings.h"
+#include "Terminator.h"
#include "TimeoutChecker.h"
#include "WeakRandom.h"
#include <wtf/Forward.h>
class JSObject;
class Lexer;
class Parser;
+ class RegExpCache;
class Stringifier;
class Structure;
class UString;
double increment;
};
+ enum ThreadStackType {
+ ThreadStackTypeLarge,
+ ThreadStackTypeSmall
+ };
+
class JSGlobalData : public RefCounted<JSGlobalData> {
public:
+ // WebCore has a one-to-one mapping of threads to JSGlobalDatas;
+ // either create() or createLeaked() should only be called once
+ // on a thread, this is the 'default' JSGlobalData (it uses the
+ // thread's default string uniquing table from wtfThreadData).
+ // API contexts created using the new context group aware interface
+ // create APIContextGroup objects which require less locking of JSC
+ // than the old singleton APIShared JSGlobalData created for use by
+ // the original API.
+ enum GlobalDataType { Default, APIContextGroup, APIShared };
+
struct ClientData {
virtual ~ClientData() = 0;
};
+ bool isSharedInstance() { return globalDataType == APIShared; }
static bool sharedInstanceExists();
static JSGlobalData& sharedInstance();
- static PassRefPtr<JSGlobalData> create();
- static PassRefPtr<JSGlobalData> createLeaked();
- static PassRefPtr<JSGlobalData> createNonDefault();
+ static PassRefPtr<JSGlobalData> create(ThreadStackType);
+ static PassRefPtr<JSGlobalData> createLeaked(ThreadStackType);
+ static PassRefPtr<JSGlobalData> createContextGroup(ThreadStackType);
~JSGlobalData();
#if ENABLE(JSC_MULTIPLE_THREADS)
void makeUsableFromMultipleThreads() { heap.makeUsableFromMultipleThreads(); }
#endif
- bool isSharedInstance;
+ GlobalDataType globalDataType;
ClientData* clientData;
const HashTable* arrayTable;
RefPtr<Structure> activationStructure;
RefPtr<Structure> interruptedExecutionErrorStructure;
+ RefPtr<Structure> terminatedExecutionErrorStructure;
RefPtr<Structure> staticScopeStructure;
RefPtr<Structure> stringStructure;
RefPtr<Structure> notAnObjectErrorStubStructure;
ExecutableAllocator executableAllocator;
#endif
+#if ENABLE(JIT)
+#if ENABLE(INTERPRETER)
+ bool canUseJIT() { return m_canUseJIT; }
+#endif
+#else
+ bool canUseJIT() { return false; }
+#endif
Lexer* lexer;
Parser* parser;
Interpreter* interpreter;
#if ENABLE(JIT)
- JITThunks jitStubs;
+ OwnPtr<JITThunks> jitStubs;
+ NativeExecutable* getThunk(ThunkGenerator generator)
+ {
+ return jitStubs->specializedThunk(this, generator);
+ }
#endif
TimeoutChecker timeoutChecker;
+ Terminator terminator;
Heap heap;
JSValue exception;
UString cachedDateString;
double cachedDateStringValue;
-
- WeakRandom weakRandom;
+
+ int maxReentryDepth;
+
+ RegExpCache* m_regExpCache;
#ifndef NDEBUG
- bool mainThreadOnly;
+ ThreadIdentifier exclusiveThread;
#endif
+ CachedTranscendentalFunction<sin> cachedSin;
+
void resetDateCache();
void startSampling();
void stopSampling();
void dumpSampleData(ExecState* exec);
+ RegExpCache* regExpCache() { return m_regExpCache; }
private:
- JSGlobalData(bool isShared);
+ JSGlobalData(GlobalDataType, ThreadStackType);
static JSGlobalData*& sharedInstanceInternal();
void createNativeThunk();
+#if ENABLE(JIT) && ENABLE(INTERPRETER)
+ bool m_canUseJIT;
+#endif
};
-
+
} // namespace JSC
#endif // JSGlobalData_h
(*it)->clearGlobalObject();
RegisterFile& registerFile = globalData()->interpreter->registerFile();
- if (registerFile.globalObject() == this) {
- registerFile.setGlobalObject(0);
+ if (registerFile.clearGlobalObject(this))
registerFile.setNumGlobals(0);
- }
d()->destructor(d());
}
ErrorPrototype* errorPrototype = new (exec) ErrorPrototype(exec, ErrorPrototype::createStructure(d()->objectPrototype), d()->prototypeFunctionStructure.get());
d()->errorStructure = ErrorInstance::createStructure(errorPrototype);
- RefPtr<Structure> nativeErrorPrototypeStructure = NativeErrorPrototype::createStructure(errorPrototype);
-
- NativeErrorPrototype* evalErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "EvalError", "EvalError");
- NativeErrorPrototype* rangeErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "RangeError", "RangeError");
- NativeErrorPrototype* referenceErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "ReferenceError", "ReferenceError");
- NativeErrorPrototype* syntaxErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "SyntaxError", "SyntaxError");
- NativeErrorPrototype* typeErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "TypeError", "TypeError");
- NativeErrorPrototype* URIErrorPrototype = new (exec) NativeErrorPrototype(exec, nativeErrorPrototypeStructure, "URIError", "URIError");
-
// Constructors
JSCell* objectConstructor = new (exec) ObjectConstructor(exec, ObjectConstructor::createStructure(d()->functionPrototype), d()->objectPrototype, d()->prototypeFunctionStructure.get());
d()->errorConstructor = new (exec) ErrorConstructor(exec, ErrorConstructor::createStructure(d()->functionPrototype), errorPrototype);
+ RefPtr<Structure> nativeErrorPrototypeStructure = NativeErrorPrototype::createStructure(errorPrototype);
RefPtr<Structure> nativeErrorStructure = NativeErrorConstructor::createStructure(d()->functionPrototype);
- d()->evalErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, evalErrorPrototype);
- d()->rangeErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, rangeErrorPrototype);
- d()->referenceErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, referenceErrorPrototype);
- d()->syntaxErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, syntaxErrorPrototype);
- d()->typeErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, typeErrorPrototype);
- d()->URIErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, URIErrorPrototype);
+ d()->evalErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "EvalError");
+ d()->rangeErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "RangeError");
+ d()->referenceErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "ReferenceError");
+ d()->syntaxErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "SyntaxError");
+ d()->typeErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "TypeError");
+ d()->URIErrorConstructor = new (exec) NativeErrorConstructor(exec, nativeErrorStructure, nativeErrorPrototypeStructure, "URIError");
d()->objectPrototype->putDirectFunctionWithoutTransition(exec->propertyNames().constructor, objectConstructor, DontEnum);
d()->functionPrototype->putDirectFunctionWithoutTransition(exec->propertyNames().constructor, functionConstructor, DontEnum);
d()->regExpPrototype->putDirectFunctionWithoutTransition(exec->propertyNames().constructor, d()->regExpConstructor, DontEnum);
errorPrototype->putDirectFunctionWithoutTransition(exec->propertyNames().constructor, d()->errorConstructor, DontEnum);
- evalErrorPrototype->putDirect(exec->propertyNames().constructor, d()->evalErrorConstructor, DontEnum);
- rangeErrorPrototype->putDirect(exec->propertyNames().constructor, d()->rangeErrorConstructor, DontEnum);
- referenceErrorPrototype->putDirect(exec->propertyNames().constructor, d()->referenceErrorConstructor, DontEnum);
- syntaxErrorPrototype->putDirect(exec->propertyNames().constructor, d()->syntaxErrorConstructor, DontEnum);
- typeErrorPrototype->putDirect(exec->propertyNames().constructor, d()->typeErrorConstructor, DontEnum);
- URIErrorPrototype->putDirect(exec->propertyNames().constructor, d()->URIErrorConstructor, DontEnum);
-
// Set global constructors
// FIXME: These properties could be handled by a static hash table.
// Set global values.
GlobalPropertyInfo staticGlobals[] = {
GlobalPropertyInfo(Identifier(exec, "Math"), new (exec) MathObject(exec, MathObject::createStructure(d()->objectPrototype)), DontEnum | DontDelete),
- GlobalPropertyInfo(Identifier(exec, "NaN"), jsNaN(exec), DontEnum | DontDelete),
- GlobalPropertyInfo(Identifier(exec, "Infinity"), jsNumber(exec, Inf), DontEnum | DontDelete),
- GlobalPropertyInfo(Identifier(exec, "undefined"), jsUndefined(), DontEnum | DontDelete),
+ GlobalPropertyInfo(Identifier(exec, "NaN"), jsNaN(exec), DontEnum | DontDelete | ReadOnly),
+ GlobalPropertyInfo(Identifier(exec, "Infinity"), jsNumber(exec, Inf), DontEnum | DontDelete | ReadOnly),
+ GlobalPropertyInfo(Identifier(exec, "undefined"), jsUndefined(), DontEnum | DontDelete | ReadOnly),
GlobalPropertyInfo(Identifier(exec, "JSON"), new (exec) JSONObject(JSONObject::createStructure(d()->objectPrototype)), DontEnum | DontDelete)
};
return CallFrame::create(d()->globalCallFrame + RegisterFile::CallFrameHeaderSize);
}
-bool JSGlobalObject::isDynamicScope() const
+bool JSGlobalObject::isDynamicScope(bool&) const
{
return true;
}
#include "JSArray.h"
#include "JSGlobalData.h"
#include "JSVariableObject.h"
+#include "JSWeakObjectMapRefInternal.h"
#include "NativeFunctionWrapper.h"
#include "NumberPrototype.h"
#include "StringPrototype.h"
#include <wtf/HashSet.h>
#include <wtf/OwnPtr.h>
+#include <wtf/RandomNumber.h>
namespace JSC {
class JSGlobalObject : public JSVariableObject {
protected:
using JSVariableObject::JSVariableObjectData;
+ typedef HashSet<RefPtr<OpaqueJSWeakObjectMap> > WeakMapSet;
struct JSGlobalObjectData : public JSVariableObjectData {
// We use an explicit destructor function pointer instead of a
, datePrototype(0)
, regExpPrototype(0)
, methodCallDummy(0)
+ , weakRandom(static_cast<unsigned>(randomNumber() * (std::numeric_limits<unsigned>::max() + 1.0)))
{
}
RefPtr<JSGlobalData> globalData;
HashSet<GlobalCodeBlock*> codeBlocks;
+ WeakMapSet weakMaps;
+ WeakRandom weakRandom;
};
public:
virtual bool allowsAccessFrom(const JSGlobalObject*) const { return true; }
- virtual bool isDynamicScope() const;
+ virtual bool isDynamicScope(bool& requiresDynamicChecks) const;
HashSet<GlobalCodeBlock*>& codeBlocks() { return d()->codeBlocks; }
return Structure::create(prototype, TypeInfo(ObjectType, StructureFlags), AnonymousSlotCount);
}
+ void registerWeakMap(OpaqueJSWeakObjectMap* map)
+ {
+ d()->weakMaps.add(map);
+ }
+
+ void deregisterWeakMap(OpaqueJSWeakObjectMap* map)
+ {
+ d()->weakMaps.remove(map);
+ }
+
+ double weakRandomNumber() { return d()->weakRandom.get(); }
protected:
static const unsigned StructureFlags = OverridesGetOwnPropertySlot | OverridesMarkChildren | OverridesGetPropertyNames | JSVariableObject::StructureFlags;
#include "Interpreter.h"
#include "JSGlobalObject.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "Lexer.h"
#include "LiteralParser.h"
#include "Nodes.h"
{
UString str = args.at(0).toString(exec);
CString cstr = str.UTF8String(true);
- if (!cstr.c_str())
+ if (!cstr.data())
return throwError(exec, URIError, "String contained an illegal UTF-16 sequence.");
- StringBuilder builder;
- const char* p = cstr.c_str();
- for (size_t k = 0; k < cstr.size(); k++, p++) {
+ JSStringBuilder builder;
+ const char* p = cstr.data();
+ for (size_t k = 0; k < cstr.length(); k++, p++) {
char c = *p;
if (c && strchr(doNotEscape, c))
builder.append(c);
else {
char tmp[4];
snprintf(tmp, sizeof(tmp), "%%%02X", static_cast<unsigned char>(c));
- builder.append((const char*)tmp);
+ builder.append(tmp);
}
}
- return jsString(exec, builder.release());
+ return builder.build(exec);
}
static JSValue decode(ExecState* exec, const ArgList& args, const char* doNotUnescape, bool strict)
{
- StringBuilder builder;
+ JSStringBuilder builder;
UString str = args.at(0).toString(exec);
int k = 0;
int len = str.size();
k++;
builder.append(c);
}
- return jsString(exec, builder.release());
+ return builder.build(exec);
}
bool isStrWhiteSpace(UChar c)
}
if (number >= mantissaOverflowLowerBound) {
+ // FIXME: It is incorrect to use UString::ascii() here because it's not thread-safe.
if (radix == 10)
number = WTF::strtod(s.substr(firstDigitPosition, p - firstDigitPosition).ascii(), 0);
else if (radix == 2 || radix == 4 || radix == 8 || radix == 16 || radix == 32)
if (length - p >= 2 && data[p] == '0' && (data[p + 1] == 'x' || data[p + 1] == 'X'))
return 0;
+ // FIXME: UString::toDouble will ignore leading ASCII spaces, but we need to ignore
+ // other StrWhiteSpaceChar values as well.
return s.toDouble(true /*tolerant*/, false /* NaN for empty string */);
}
"0123456789"
"*+-./@_";
- StringBuilder builder;
- UString s;
+ JSStringBuilder builder;
UString str = args.at(0).toString(exec);
const UChar* c = str.data();
- for (int k = 0; k < str.size(); k++, c++) {
+ for (unsigned k = 0; k < str.size(); k++, c++) {
int u = c[0];
if (u > 255) {
char tmp[7];
snprintf(tmp, sizeof(tmp), "%%u%04X", u);
- s = UString(tmp);
+ builder.append(tmp);
} else if (u != 0 && strchr(do_not_escape, static_cast<char>(u)))
- s = UString(c, 1);
+ builder.append(c, 1);
else {
char tmp[4];
snprintf(tmp, sizeof(tmp), "%%%02X", u);
- s = UString(tmp);
+ builder.append(tmp);
}
- builder.append(s);
}
- return jsString(exec, builder.release());
+ return builder.build(exec);
}
JSValue JSC_HOST_CALL globalFuncUnescape(ExecState* exec, JSObject*, JSValue, const ArgList& args)
builder.append(*c);
}
- return jsString(exec, builder.release());
+ return jsString(exec, builder.build());
}
#ifndef NDEBUG
JSValue JSC_HOST_CALL globalFuncJSCPrint(ExecState* exec, JSObject*, JSValue, const ArgList& args)
{
- CStringBuffer string;
- args.at(0).toString(exec).getCString(string);
+ CString string = args.at(0).toString(exec).UTF8String();
puts(string.data());
return jsUndefined();
}
#ifndef JSImmediate_h
#define JSImmediate_h
-#include <wtf/Platform.h>
-
#if !USE(JSVALUE32_64)
#include <wtf/Assertions.h>
friend class JIT;
friend class JSValue;
friend class JSFastMath;
+ friend class JSInterfaceJIT;
+ friend class SpecializedThunkJIT;
friend JSValue jsNumber(ExecState* exec, double d);
friend JSValue jsNumber(ExecState*, char i);
friend JSValue jsNumber(ExecState*, unsigned char i);
// This value is 2^48, used to encode doubles such that the encoded value will begin
// with a 16-bit pattern within the range 0x0001..0xFFFE.
static const intptr_t DoubleEncodeOffset = 0x1000000000000ll;
-#else
+#elif USE(JSVALUE32)
static const intptr_t TagTypeNumber = 0x1; // bottom bit set indicates integer, this dominates the following bit
#endif
static const intptr_t TagBitTypeOther = 0x2; // second bit set indicates immediate other than an integer
}
JSLock::JSLock(ExecState* exec)
- : m_lockBehavior(exec->globalData().isSharedInstance ? LockForReal : SilenceAssertionsOnly)
+ : m_lockBehavior(exec->globalData().isSharedInstance() ? LockForReal : SilenceAssertionsOnly)
{
lock(m_lockBehavior);
}
void JSLock::lock(ExecState* exec)
{
- lock(exec->globalData().isSharedInstance ? LockForReal : SilenceAssertionsOnly);
+ lock(exec->globalData().isSharedInstance() ? LockForReal : SilenceAssertionsOnly);
}
void JSLock::unlock(ExecState* exec)
{
- unlock(exec->globalData().isSharedInstance ? LockForReal : SilenceAssertionsOnly);
+ unlock(exec->globalData().isSharedInstance() ? LockForReal : SilenceAssertionsOnly);
}
bool JSLock::currentThreadIsHoldingLock()
static unsigned lockDropDepth = 0;
JSLock::DropAllLocks::DropAllLocks(ExecState* exec)
- : m_lockBehavior(exec->globalData().isSharedInstance ? LockForReal : SilenceAssertionsOnly)
+ : m_lockBehavior(exec->globalData().isSharedInstance() ? LockForReal : SilenceAssertionsOnly)
{
pthread_once(&createJSLockCountOnce, createJSLockCount);
return UString::from(m_value);
}
-UString JSNumberCell::toThisString(ExecState*) const
-{
- return UString::from(m_value);
-}
-
JSObject* JSNumberCell::toObject(ExecState* exec) const
{
return constructNumber(exec, const_cast<JSNumberCell*>(this));
virtual UString toString(ExecState*) const;
virtual JSObject* toObject(ExecState*) const;
- virtual UString toThisString(ExecState*) const;
virtual JSObject* toThisObject(ExecState*) const;
virtual JSValue getJSNumber();
#include "ExceptionHelpers.h"
#include "JSArray.h"
#include "LiteralParser.h"
+#include "Lookup.h"
#include "PropertyNameArray.h"
#include "StringBuilder.h"
#include <wtf/MathExtras.h>
static inline UString gap(ExecState* exec, JSValue space)
{
- const int maxGapLength = 10;
+ const unsigned maxGapLength = 10;
space = unwrapBoxedPrimitive(exec, space);
// If the space value is a number, create a gap string with that number of spaces.
if (m_exec->hadException())
return jsNull();
- return jsString(m_exec, result.release());
+ return jsString(m_exec, result.build());
}
void Stringifier::appendQuotedString(StringBuilder& builder, const UString& value)
inline void Stringifier::indent()
{
// Use a single shared string, m_repeatedGap, so we don't keep allocating new ones as we indent and unindent.
- int newSize = m_indent.size() + m_gap.size();
+ unsigned newSize = m_indent.size() + m_gap.size();
if (newSize > m_repeatedGap.size())
m_repeatedGap = makeString(m_repeatedGap, m_gap);
ASSERT(newSize <= m_repeatedGap.size());
return Stringifier(exec, replacer, space).stringify(value);
}
+UString JSONStringify(ExecState* exec, JSValue value, unsigned indent)
+{
+ JSValue result = Stringifier(exec, jsNull(), jsNumber(exec, indent)).stringify(value);
+ if (result.isUndefinedOrNull())
+ return UString();
+ return result.getString(exec);
+}
+
} // namespace JSC
static const ClassInfo info;
};
+ UString JSONStringify(ExecState* exec, JSValue value, unsigned indent);
+
} // namespace JSC
#endif // JSONObject_h
NEVER_INLINE void JSObject::fillGetterPropertySlot(PropertySlot& slot, JSValue* location)
{
- if (JSObject* getterFunction = asGetterSetter(*location)->getter())
- slot.setGetterSlot(getterFunction);
- else
+ if (JSObject* getterFunction = asGetterSetter(*location)->getter()) {
+ if (!structure()->isDictionary())
+ slot.setCacheableGetterSlot(this, getterFunction, offsetForLocation(location));
+ else
+ slot.setGetterSlot(getterFunction);
+ } else
slot.setUndefined();
}
#include "ArgList.h"
#include "ClassInfo.h"
#include "CommonIdentifiers.h"
+#include "Completion.h"
#include "CallFrame.h"
#include "JSCell.h"
#include "JSNumberCell.h"
#include "ScopeChain.h"
#include "Structure.h"
#include "JSGlobalData.h"
+#include "JSString.h"
#include <wtf/StdLibExtras.h>
namespace JSC {
void putDirect(const Identifier& propertyName, JSValue value, unsigned attr, bool checkReadOnly, PutPropertySlot& slot);
void putDirect(const Identifier& propertyName, JSValue value, unsigned attr = 0);
+ void putDirect(const Identifier& propertyName, JSValue value, PutPropertySlot&);
void putDirectFunction(const Identifier& propertyName, JSCell* value, unsigned attr = 0);
void putDirectFunction(const Identifier& propertyName, JSCell* value, unsigned attr, bool checkReadOnly, PutPropertySlot& slot);
virtual bool isGlobalObject() const { return false; }
virtual bool isVariableObject() const { return false; }
virtual bool isActivationObject() const { return false; }
- virtual bool isWatchdogException() const { return false; }
virtual bool isNotAnObjectErrorStub() const { return false; }
+ virtual ComplType exceptionType() const { return Throw; }
+
void allocatePropertyStorage(size_t oldSize, size_t newSize);
void allocatePropertyStorageInline(size_t oldSize, size_t newSize);
bool isUsingInlineStorage() const { return m_structure->isUsingInlineStorage(); }
JSCell* currentSpecificFunction;
size_t offset = m_structure->get(propertyName, currentAttributes, currentSpecificFunction);
if (offset != WTF::notFound) {
+ // If there is currently a specific function, and there now either isn't,
+ // or the new value is different, then despecify.
if (currentSpecificFunction && (specificFunction != currentSpecificFunction))
m_structure->despecifyDictionaryFunction(propertyName);
if (checkReadOnly && currentAttributes & ReadOnly)
return;
putDirectOffset(offset, value);
- if (!specificFunction && !currentSpecificFunction)
+ // At this point, the objects structure only has a specific value set if previously there
+ // had been one set, and if the new value being specified is the same (otherwise we would
+ // have despecified, above). So, if currentSpecificFunction is not set, or if the new
+ // value is different (or there is no new value), then the slot now has no value - and
+ // as such it is cachable.
+ // If there was previously a value, and the new value is the same, then we cannot cache.
+ if (!currentSpecificFunction || (specificFunction != currentSpecificFunction))
slot.setExistingProperty(this, offset);
return;
}
ASSERT(offset < structure->propertyStorageCapacity());
setStructure(structure.release());
putDirectOffset(offset, value);
- // See comment on setNewProperty call below.
+ // This is a new property; transitions with specific values are not currently cachable,
+ // so leave the slot in an uncachable state.
if (!specificFunction)
slot.setNewProperty(this, offset);
return;
if (checkReadOnly && currentAttributes & ReadOnly)
return;
- if (currentSpecificFunction && (specificFunction != currentSpecificFunction)) {
+ // There are three possibilities here:
+ // (1) There is an existing specific value set, and we're overwriting with *the same value*.
+ // * Do nothing - no need to despecify, but that means we can't cache (a cached
+ // put could write a different value). Leave the slot in an uncachable state.
+ // (2) There is a specific value currently set, but we're writing a different value.
+ // * First, we have to despecify. Having done so, this is now a regular slot
+ // with no specific value, so go ahead & cache like normal.
+ // (3) Normal case, there is no specific value set.
+ // * Go ahead & cache like normal.
+ if (currentSpecificFunction) {
+ // case (1) Do the put, then return leaving the slot uncachable.
+ if (specificFunction == currentSpecificFunction) {
+ putDirectOffset(offset, value);
+ return;
+ }
+ // case (2) Despecify, fall through to (3).
setStructure(Structure::despecifyFunctionTransition(m_structure, propertyName));
- putDirectOffset(offset, value);
- // Function transitions are not currently cachable, so leave the slot in an uncachable state.
- return;
}
- putDirectOffset(offset, value);
+
+ // case (3) set the slot, do the put, return.
slot.setExistingProperty(this, offset);
+ putDirectOffset(offset, value);
return;
}
ASSERT(offset < structure->propertyStorageCapacity());
setStructure(structure.release());
putDirectOffset(offset, value);
- // Function transitions are not currently cachable, so leave the slot in an uncachable state.
+ // This is a new property; transitions with specific values are not currently cachable,
+ // so leave the slot in an uncachable state.
if (!specificFunction)
slot.setNewProperty(this, offset);
}
putDirectInternal(propertyName, value, attributes, false, slot, 0);
}
+inline void JSObject::putDirect(const Identifier& propertyName, JSValue value, PutPropertySlot& slot)
+{
+ putDirectInternal(propertyName, value, 0, false, slot, 0);
+}
+
inline void JSObject::putDirectFunction(const Identifier& propertyName, JSCell* value, unsigned attributes, bool checkReadOnly, PutPropertySlot& slot)
{
putDirectInternal(propertyName, value, attributes, checkReadOnly, slot, value);
asCell()->put(exec, propertyName, value, slot);
}
+inline void JSValue::putDirect(ExecState*, const Identifier& propertyName, JSValue value, PutPropertySlot& slot)
+{
+ ASSERT(isCell() && isObject());
+ asObject(asCell())->putDirect(propertyName, value, slot);
+}
+
inline void JSValue::put(ExecState* exec, unsigned propertyName, JSValue value)
{
if (UNLIKELY(!isCell())) {
markStack.appendValues(reinterpret_cast<JSValue*>(storage), storageSize);
}
+// --- JSValue inlines ----------------------------
+
+ALWAYS_INLINE UString JSValue::toThisString(ExecState* exec) const
+{
+ return isString() ? static_cast<JSString*>(asCell())->value(exec) : toThisObject(exec)->toString(exec);
+}
+
+inline JSString* JSValue::toThisJSString(ExecState* exec) const
+{
+ return isString() ? static_cast<JSString*>(asCell()) : jsString(exec, toThisObject(exec)->toString(exec));
+}
+
} // namespace JSC
#endif // JSObject_h
JSValue get(ExecState*, JSObject*, size_t i);
size_t size() { return m_jsStringsSize; }
- void setCachedStructure(Structure* structure) { m_cachedStructure = structure; }
- Structure* cachedStructure() { return m_cachedStructure; }
+ void setCachedStructure(Structure* structure)
+ {
+ ASSERT(!m_cachedStructure);
+ ASSERT(structure);
+ m_cachedStructure = structure;
+ }
+ Structure* cachedStructure() { return m_cachedStructure.get(); }
void setCachedPrototypeChain(NonNullPassRefPtr<StructureChain> cachedPrototypeChain) { m_cachedPrototypeChain = cachedPrototypeChain; }
StructureChain* cachedPrototypeChain() { return m_cachedPrototypeChain.get(); }
private:
JSPropertyNameIterator(ExecState*, PropertyNameArrayData* propertyNameArrayData, size_t numCacheableSlot);
- Structure* m_cachedStructure;
+ RefPtr<Structure> m_cachedStructure;
RefPtr<StructureChain> m_cachedPrototypeChain;
uint32_t m_numCacheableSlots;
uint32_t m_jsStringsSize;
ASSERT_NOT_REACHED();
}
-bool JSStaticScopeObject::isDynamicScope() const
+bool JSStaticScopeObject::isDynamicScope(bool&) const
{
return false;
}
}
virtual ~JSStaticScopeObject();
virtual void markChildren(MarkStack&);
- bool isDynamicScope() const;
+ bool isDynamicScope(bool& requiresDynamicChecks) const;
virtual JSObject* toThisObject(ExecState*) const;
virtual bool getOwnPropertySlot(ExecState*, const Identifier&, PropertySlot&);
virtual void put(ExecState*, const Identifier&, JSValue, PutPropertySlot&);
namespace JSC {
-void JSString::Rope::destructNonRecursive()
-{
- Vector<Rope*, 32> workQueue;
- Rope* rope = this;
-
- while (true) {
- unsigned length = rope->ropeLength();
- for (unsigned i = 0; i < length; ++i) {
- Fiber& fiber = rope->fibers(i);
- if (fiber.isString())
- fiber.string()->deref();
- else {
- Rope* nextRope = fiber.rope();
- if (nextRope->hasOneRef())
- workQueue.append(nextRope);
- else
- nextRope->deref();
- }
- }
- if (rope != this)
- fastFree(rope);
-
- if (workQueue.isEmpty())
- return;
-
- rope = workQueue.last();
- workQueue.removeLast();
- }
-}
-
-JSString::Rope::~Rope()
-{
- destructNonRecursive();
-}
-
// Overview: this methods converts a JSString from holding a string in rope form
// down to a simple UString representation. It does so by building up the string
// backwards, since we want to avoid recursion, we expect that the tree structure
// representing the rope is likely imbalanced with more nodes down the left side
// (since appending to the string is likely more common) - and as such resolving
// in this fashion should minimize work queue size. (If we built the queue forwards
-// we would likely have to place all of the constituent UString::Reps into the
+// we would likely have to place all of the constituent UStringImpls into the
// Vector before performing any concatenation, but by working backwards we likely
// only fill the queue with the number of substrings at any given level in a
// rope-of-ropes.)
// Allocate the buffer to hold the final string, position initially points to the end.
UChar* buffer;
- if (PassRefPtr<UStringImpl> newImpl = UStringImpl::tryCreateUninitialized(m_stringLength, buffer))
+ if (PassRefPtr<UStringImpl> newImpl = UStringImpl::tryCreateUninitialized(m_length, buffer))
m_value = newImpl;
else {
- for (unsigned i = 0; i < m_ropeLength; ++i) {
- m_fibers[i].deref();
- m_fibers[i] = static_cast<void*>(0);
+ for (unsigned i = 0; i < m_fiberCount; ++i) {
+ RopeImpl::deref(m_other.m_fibers[i]);
+ m_other.m_fibers[i] = 0;
}
- m_ropeLength = 0;
+ m_fiberCount = 0;
ASSERT(!isRope());
ASSERT(m_value == UString());
- throwOutOfMemoryError(exec);
+ if (exec)
+ throwOutOfMemoryError(exec);
return;
}
- UChar* position = buffer + m_stringLength;
-
- // Start with the current Rope.
- Vector<Rope::Fiber, 32> workQueue;
- Rope::Fiber currentFiber;
- for (unsigned i = 0; i < (m_ropeLength - 1); ++i)
- workQueue.append(m_fibers[i]);
- currentFiber = m_fibers[m_ropeLength - 1];
+ UChar* position = buffer + m_length;
+
+ // Start with the current RopeImpl.
+ Vector<RopeImpl::Fiber, 32> workQueue;
+ RopeImpl::Fiber currentFiber;
+ for (unsigned i = 0; i < (m_fiberCount - 1); ++i)
+ workQueue.append(m_other.m_fibers[i]);
+ currentFiber = m_other.m_fibers[m_fiberCount - 1];
while (true) {
- if (currentFiber.isRope()) {
- Rope* rope = currentFiber.rope();
+ if (RopeImpl::isRope(currentFiber)) {
+ RopeImpl* rope = static_cast<RopeImpl*>(currentFiber);
// Copy the contents of the current rope into the workQueue, with the last item in 'currentFiber'
// (we will be working backwards over the rope).
- unsigned ropeLengthMinusOne = rope->ropeLength() - 1;
- for (unsigned i = 0; i < ropeLengthMinusOne; ++i)
- workQueue.append(rope->fibers(i));
- currentFiber = rope->fibers(ropeLengthMinusOne);
+ unsigned fiberCountMinusOne = rope->fiberCount() - 1;
+ for (unsigned i = 0; i < fiberCountMinusOne; ++i)
+ workQueue.append(rope->fibers()[i]);
+ currentFiber = rope->fibers()[fiberCountMinusOne];
} else {
- UString::Rep* string = currentFiber.string();
- unsigned length = string->size();
+ UStringImpl* string = static_cast<UStringImpl*>(currentFiber);
+ unsigned length = string->length();
position -= length;
- UStringImpl::copyChars(position, string->data(), length);
+ UStringImpl::copyChars(position, string->characters(), length);
// Was this the last item in the work queue?
if (workQueue.isEmpty()) {
// Create a string from the UChar buffer, clear the rope RefPtr.
ASSERT(buffer == position);
- for (unsigned i = 0; i < m_ropeLength; ++i) {
- m_fibers[i].deref();
- m_fibers[i] = static_cast<void*>(0);
+ for (unsigned i = 0; i < m_fiberCount; ++i) {
+ RopeImpl::deref(m_other.m_fibers[i]);
+ m_other.m_fibers[i] = 0;
}
- m_ropeLength = 0;
+ m_fiberCount = 0;
ASSERT(!isRope());
return;
}
}
+JSValue JSString::replaceCharacter(ExecState* exec, UChar character, const UString& replacement)
+{
+ if (!isRope()) {
+ unsigned matchPosition = m_value.find(character);
+ if (matchPosition == UString::NotFound)
+ return JSValue(this);
+ return jsString(exec, m_value.substr(0, matchPosition), replacement, m_value.substr(matchPosition + 1));
+ }
+
+ RopeIterator end;
+
+ // Count total fibers and find matching string.
+ size_t fiberCount = 0;
+ UStringImpl* matchString = 0;
+ int matchPosition = -1;
+ for (RopeIterator it(m_other.m_fibers, m_fiberCount); it != end; ++it) {
+ ++fiberCount;
+ if (matchString)
+ continue;
+
+ UStringImpl* string = *it;
+ matchPosition = string->find(character);
+ if (matchPosition == -1)
+ continue;
+ matchString = string;
+ }
+
+ if (!matchString)
+ return this;
+
+ RopeBuilder builder(replacement.size() ? fiberCount + 2 : fiberCount + 1);
+ if (UNLIKELY(builder.isOutOfMemory()))
+ return throwOutOfMemoryError(exec);
+
+ for (RopeIterator it(m_other.m_fibers, m_fiberCount); it != end; ++it) {
+ UStringImpl* string = *it;
+ if (string != matchString) {
+ builder.append(UString(string));
+ continue;
+ }
+
+ builder.append(UString(string).substr(0, matchPosition));
+ if (replacement.size())
+ builder.append(replacement);
+ builder.append(UString(string).substr(matchPosition + 1));
+ matchString = 0;
+ }
+
+ JSGlobalData* globalData = &exec->globalData();
+ return JSValue(new (globalData) JSString(globalData, builder.release()));
+}
+
+JSString* JSString::getIndexSlowCase(ExecState* exec, unsigned i)
+{
+ ASSERT(isRope());
+ resolveRope(exec);
+ // Return a safe no-value result, this should never be used, since the excetion will be thrown.
+ if (exec->exception())
+ return jsString(exec, "");
+ ASSERT(!isRope());
+ ASSERT(i < m_value.size());
+ return jsSingleCharacterSubstring(exec, m_value, i);
+}
+
JSValue JSString::toPrimitive(ExecState*, PreferredPrimitiveType) const
{
return const_cast<JSString*>(this);
bool JSString::toBoolean(ExecState*) const
{
- return m_stringLength;
+ return m_length;
}
double JSString::toNumber(ExecState* exec) const
return value(exec);
}
-UString JSString::toThisString(ExecState* exec) const
-{
- return value(exec);
-}
-
-JSString* JSString::toThisJSString(ExecState*)
-{
- return this;
-}
-
inline StringObject* StringObject::create(ExecState* exec, JSString* string)
{
return new (exec) StringObject(exec->lexicalGlobalObject()->stringObjectStructure(), string);
bool JSString::getStringPropertyDescriptor(ExecState* exec, const Identifier& propertyName, PropertyDescriptor& descriptor)
{
if (propertyName == exec->propertyNames().length) {
- descriptor.setDescriptor(jsNumber(exec, m_stringLength), DontEnum | DontDelete | ReadOnly);
+ descriptor.setDescriptor(jsNumber(exec, m_length), DontEnum | DontDelete | ReadOnly);
return true;
}
bool isStrictUInt32;
unsigned i = propertyName.toStrictUInt32(&isStrictUInt32);
- if (isStrictUInt32 && i < m_stringLength) {
- descriptor.setDescriptor(jsSingleCharacterSubstring(exec, value(exec), i), DontDelete | ReadOnly);
+ if (isStrictUInt32 && i < m_length) {
+ descriptor.setDescriptor(getIndex(exec, i), DontDelete | ReadOnly);
return true;
}
#include "JSNumberCell.h"
#include "PropertyDescriptor.h"
#include "PropertySlot.h"
+#include "RopeImpl.h"
namespace JSC {
JSString* jsSingleCharacterString(JSGlobalData*, UChar);
JSString* jsSingleCharacterString(ExecState*, UChar);
- JSString* jsSingleCharacterSubstring(JSGlobalData*, const UString&, unsigned offset);
JSString* jsSingleCharacterSubstring(ExecState*, const UString&, unsigned offset);
JSString* jsSubstring(JSGlobalData*, const UString&, unsigned offset, unsigned length);
JSString* jsSubstring(ExecState*, const UString&, unsigned offset, unsigned length);
public:
friend class JIT;
friend class JSGlobalData;
+ friend class SpecializedThunkJIT;
+ friend struct ThunkHelpers;
- // A Rope is a string composed of a set of substrings.
- class Rope : public RefCounted<Rope> {
+ class RopeBuilder {
public:
- // A Rope is composed from a set of smaller strings called Fibers.
- // Each Fiber in a rope is either UString::Rep or another Rope.
- class Fiber {
- public:
- Fiber() : m_value(0) {}
- Fiber(UString::Rep* string) : m_value(reinterpret_cast<intptr_t>(string)) {}
- Fiber(Rope* rope) : m_value(reinterpret_cast<intptr_t>(rope) | 1) {}
+ RopeBuilder(unsigned fiberCount)
+ : m_index(0)
+ , m_rope(RopeImpl::tryCreateUninitialized(fiberCount))
+ {
+ }
+
+ bool isOutOfMemory() { return !m_rope; }
+
+ void append(RopeImpl::Fiber& fiber)
+ {
+ ASSERT(m_rope);
+ m_rope->initializeFiber(m_index, fiber);
+ }
+ void append(const UString& string)
+ {
+ ASSERT(m_rope);
+ m_rope->initializeFiber(m_index, string.rep());
+ }
+ void append(JSString* jsString)
+ {
+ if (jsString->isRope()) {
+ for (unsigned i = 0; i < jsString->m_fiberCount; ++i)
+ append(jsString->m_other.m_fibers[i]);
+ } else
+ append(jsString->string());
+ }
+
+ PassRefPtr<RopeImpl> release()
+ {
+ ASSERT(m_index == m_rope->fiberCount());
+ return m_rope.release();
+ }
- Fiber(void* nonFiber) : m_value(reinterpret_cast<intptr_t>(nonFiber)) {}
+ unsigned length() { return m_rope->length(); }
- void deref()
+ private:
+ unsigned m_index;
+ RefPtr<RopeImpl> m_rope;
+ };
+
+ class RopeIterator {
+ public:
+ RopeIterator() { }
+
+ RopeIterator(RopeImpl::Fiber* fibers, size_t fiberCount)
{
- if (isRope())
- rope()->deref();
- else
- string()->deref();
+ ASSERT(fiberCount);
+ m_workQueue.append(WorkItem(fibers, fiberCount));
+ skipRopes();
}
- Fiber& ref()
+ RopeIterator& operator++()
{
- if (isString())
- string()->ref();
- else
- rope()->ref();
+ WorkItem& item = m_workQueue.last();
+ ASSERT(!RopeImpl::isRope(item.fibers[item.i]));
+ if (++item.i == item.fiberCount)
+ m_workQueue.removeLast();
+ skipRopes();
return *this;
}
- unsigned refAndGetLength()
+ UStringImpl* operator*()
{
- if (isString()) {
- UString::Rep* rep = string();
- return rep->ref()->size();
- } else {
- Rope* r = rope();
- r->ref();
- return r->stringLength();
- }
+ WorkItem& item = m_workQueue.last();
+ RopeImpl::Fiber fiber = item.fibers[item.i];
+ ASSERT(!RopeImpl::isRope(fiber));
+ return static_cast<UStringImpl*>(fiber);
}
- bool isRope() { return m_value & 1; }
- Rope* rope() { return reinterpret_cast<Rope*>(m_value & ~1); }
- bool isString() { return !isRope(); }
- UString::Rep* string() { return reinterpret_cast<UString::Rep*>(m_value); }
+ bool operator!=(const RopeIterator& other) const
+ {
+ return m_workQueue != other.m_workQueue;
+ }
- void* nonFiber() { return reinterpret_cast<void*>(m_value); }
private:
- intptr_t m_value;
- };
-
- // Creates a Rope comprising of 'ropeLength' Fibers.
- // The Rope is constructed in an uninitialized state - initialize must be called for each Fiber in the Rope.
- static PassRefPtr<Rope> createOrNull(unsigned ropeLength)
- {
- void* allocation;
- if (tryFastMalloc(sizeof(Rope) + (ropeLength - 1) * sizeof(Fiber)).getValue(allocation))
- return adoptRef(new (allocation) Rope(ropeLength));
- return 0;
- }
+ struct WorkItem {
+ WorkItem(RopeImpl::Fiber* fibers, size_t fiberCount)
+ : fibers(fibers)
+ , fiberCount(fiberCount)
+ , i(0)
+ {
+ }
- ~Rope();
- void destructNonRecursive();
+ bool operator!=(const WorkItem& other) const
+ {
+ return fibers != other.fibers || fiberCount != other.fiberCount || i != other.i;
+ }
- void append(unsigned &index, Fiber& fiber)
- {
- m_fibers[index++] = fiber;
- m_stringLength += fiber.refAndGetLength();
- }
- void append(unsigned &index, const UString& string)
- {
- UString::Rep* rep = string.rep();
- m_fibers[index++] = Fiber(rep);
- m_stringLength += rep->ref()->size();
- }
- void append(unsigned& index, JSString* jsString)
- {
- if (jsString->isRope()) {
- for (unsigned i = 0; i < jsString->m_ropeLength; ++i)
- append(index, jsString->m_fibers[i]);
- } else
- append(index, jsString->string());
- }
+ RopeImpl::Fiber* fibers;
+ size_t fiberCount;
+ size_t i;
+ };
- unsigned ropeLength() { return m_ropeLength; }
- unsigned stringLength() { return m_stringLength; }
- Fiber& fibers(unsigned index) { return m_fibers[index]; }
+ void skipRopes()
+ {
+ if (m_workQueue.isEmpty())
+ return;
+
+ while (1) {
+ WorkItem& item = m_workQueue.last();
+ RopeImpl::Fiber fiber = item.fibers[item.i];
+ if (!RopeImpl::isRope(fiber))
+ break;
+ RopeImpl* rope = static_cast<RopeImpl*>(fiber);
+ if (++item.i == item.fiberCount)
+ m_workQueue.removeLast();
+ m_workQueue.append(WorkItem(rope->fibers(), rope->fiberCount()));
+ }
+ }
- private:
- Rope(unsigned ropeLength) : m_ropeLength(ropeLength), m_stringLength(0) {}
- void* operator new(size_t, void* inPlace) { return inPlace; }
-
- unsigned m_ropeLength;
- unsigned m_stringLength;
- Fiber m_fibers[1];
+ Vector<WorkItem, 16> m_workQueue;
};
ALWAYS_INLINE JSString(JSGlobalData* globalData, const UString& value)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(value.size())
+ , m_length(value.size())
, m_value(value)
- , m_ropeLength(0)
+ , m_fiberCount(0)
{
+ ASSERT(!m_value.isNull());
Heap::heap(this)->reportExtraMemoryCost(value.cost());
}
enum HasOtherOwnerType { HasOtherOwner };
JSString(JSGlobalData* globalData, const UString& value, HasOtherOwnerType)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(value.size())
+ , m_length(value.size())
, m_value(value)
- , m_ropeLength(0)
+ , m_fiberCount(0)
{
+ ASSERT(!m_value.isNull());
}
- JSString(JSGlobalData* globalData, PassRefPtr<UString::Rep> value, HasOtherOwnerType)
+ JSString(JSGlobalData* globalData, PassRefPtr<UStringImpl> value, HasOtherOwnerType)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(value->size())
+ , m_length(value->length())
, m_value(value)
- , m_ropeLength(0)
+ , m_fiberCount(0)
{
+ ASSERT(!m_value.isNull());
}
- JSString(JSGlobalData* globalData, PassRefPtr<JSString::Rope> rope)
+ JSString(JSGlobalData* globalData, PassRefPtr<RopeImpl> rope)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(rope->stringLength())
- , m_ropeLength(1)
+ , m_length(rope->length())
+ , m_fiberCount(1)
{
- m_fibers[0] = rope.releaseRef();
+ m_other.m_fibers[0] = rope.releaseRef();
}
// This constructor constructs a new string by concatenating s1 & s2.
- // This should only be called with ropeLength <= 3.
- JSString(JSGlobalData* globalData, unsigned ropeLength, JSString* s1, JSString* s2)
+ // This should only be called with fiberCount <= 3.
+ JSString(JSGlobalData* globalData, unsigned fiberCount, JSString* s1, JSString* s2)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(s1->length() + s2->length())
- , m_ropeLength(ropeLength)
+ , m_length(s1->length() + s2->length())
+ , m_fiberCount(fiberCount)
{
- ASSERT(ropeLength <= s_maxInternalRopeLength);
+ ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, s1);
appendStringInConstruct(index, s2);
- ASSERT(ropeLength == index);
+ ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating s1 & s2.
- // This should only be called with ropeLength <= 3.
- JSString(JSGlobalData* globalData, unsigned ropeLength, JSString* s1, const UString& u2)
+ // This should only be called with fiberCount <= 3.
+ JSString(JSGlobalData* globalData, unsigned fiberCount, JSString* s1, const UString& u2)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(s1->length() + u2.size())
- , m_ropeLength(ropeLength)
+ , m_length(s1->length() + u2.size())
+ , m_fiberCount(fiberCount)
{
- ASSERT(ropeLength <= s_maxInternalRopeLength);
+ ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, s1);
appendStringInConstruct(index, u2);
- ASSERT(ropeLength == index);
+ ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating s1 & s2.
- // This should only be called with ropeLength <= 3.
- JSString(JSGlobalData* globalData, unsigned ropeLength, const UString& u1, JSString* s2)
+ // This should only be called with fiberCount <= 3.
+ JSString(JSGlobalData* globalData, unsigned fiberCount, const UString& u1, JSString* s2)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(u1.size() + s2->length())
- , m_ropeLength(ropeLength)
+ , m_length(u1.size() + s2->length())
+ , m_fiberCount(fiberCount)
{
- ASSERT(ropeLength <= s_maxInternalRopeLength);
+ ASSERT(fiberCount <= s_maxInternalRopeLength);
unsigned index = 0;
appendStringInConstruct(index, u1);
appendStringInConstruct(index, s2);
- ASSERT(ropeLength == index);
+ ASSERT(fiberCount == index);
}
// This constructor constructs a new string by concatenating v1, v2 & v3.
- // This should only be called with ropeLength <= 3 ... which since every
- // value must require a ropeLength of at least one implies that the length
+ // This should only be called with fiberCount <= 3 ... which since every
+ // value must require a fiberCount of at least one implies that the length
// for each value must be exactly 1!
JSString(ExecState* exec, JSValue v1, JSValue v2, JSValue v3)
: JSCell(exec->globalData().stringStructure.get())
- , m_stringLength(0)
- , m_ropeLength(s_maxInternalRopeLength)
+ , m_length(0)
+ , m_fiberCount(s_maxInternalRopeLength)
{
unsigned index = 0;
appendValueInConstructAndIncrementLength(exec, index, v1);
ASSERT(index == s_maxInternalRopeLength);
}
+ // This constructor constructs a new string by concatenating u1 & u2.
+ JSString(JSGlobalData* globalData, const UString& u1, const UString& u2)
+ : JSCell(globalData->stringStructure.get())
+ , m_length(u1.size() + u2.size())
+ , m_fiberCount(2)
+ {
+ unsigned index = 0;
+ appendStringInConstruct(index, u1);
+ appendStringInConstruct(index, u2);
+ ASSERT(index <= s_maxInternalRopeLength);
+ }
+
+ // This constructor constructs a new string by concatenating u1, u2 & u3.
+ JSString(JSGlobalData* globalData, const UString& u1, const UString& u2, const UString& u3)
+ : JSCell(globalData->stringStructure.get())
+ , m_length(u1.size() + u2.size() + u3.size())
+ , m_fiberCount(s_maxInternalRopeLength)
+ {
+ unsigned index = 0;
+ appendStringInConstruct(index, u1);
+ appendStringInConstruct(index, u2);
+ appendStringInConstruct(index, u3);
+ ASSERT(index <= s_maxInternalRopeLength);
+ }
+
JSString(JSGlobalData* globalData, const UString& value, JSStringFinalizerCallback finalizer, void* context)
: JSCell(globalData->stringStructure.get())
- , m_stringLength(value.size())
+ , m_length(value.size())
, m_value(value)
- , m_ropeLength(0)
+ , m_fiberCount(0)
{
+ ASSERT(!m_value.isNull());
// nasty hack because we can't union non-POD types
- m_fibers[0] = reinterpret_cast<void*>(reinterpret_cast<ptrdiff_t>(finalizer));
- m_fibers[1] = context;
+ m_other.m_finalizerCallback = finalizer;
+ m_other.m_finalizerContext = context;
Heap::heap(this)->reportExtraMemoryCost(value.cost());
}
~JSString()
{
ASSERT(vptr() == JSGlobalData::jsStringVPtr);
- for (unsigned i = 0; i < m_ropeLength; ++i)
- m_fibers[i].deref();
+ for (unsigned i = 0; i < m_fiberCount; ++i)
+ RopeImpl::deref(m_other.m_fibers[i]);
- if (!m_ropeLength && m_fibers[0].nonFiber()) {
- JSStringFinalizerCallback finalizer = reinterpret_cast<JSStringFinalizerCallback>(m_fibers[0].nonFiber());
- finalizer(this, m_fibers[1].nonFiber());
- }
+ if (!m_fiberCount && m_other.m_finalizerCallback)
+ m_other.m_finalizerCallback(this, m_other.m_finalizerContext);
}
const UString& value(ExecState* exec) const
resolveRope(exec);
return m_value;
}
- const UString tryGetValue() const
+ const UString& tryGetValue() const
{
if (isRope())
- UString();
+ resolveRope(0);
return m_value;
}
- unsigned length() { return m_stringLength; }
+ unsigned length() { return m_length; }
bool getStringPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
bool getStringPropertySlot(ExecState*, unsigned propertyName, PropertySlot&);
bool getStringPropertyDescriptor(ExecState*, const Identifier& propertyName, PropertyDescriptor&);
- bool canGetIndex(unsigned i) { return i < m_stringLength; }
+ bool canGetIndex(unsigned i) { return i < m_length; }
JSString* getIndex(ExecState*, unsigned);
+ JSString* getIndexSlowCase(ExecState*, unsigned);
+
+ JSValue replaceCharacter(ExecState*, UChar, const UString& replacement);
static PassRefPtr<Structure> createStructure(JSValue proto) { return Structure::create(proto, TypeInfo(StringType, OverridesGetOwnPropertySlot | NeedsThisConversion), AnonymousSlotCount); }
enum VPtrStealingHackType { VPtrStealingHack };
JSString(VPtrStealingHackType)
: JSCell(0)
- , m_ropeLength(0)
+ , m_fiberCount(0)
{
}
void appendStringInConstruct(unsigned& index, const UString& string)
{
- m_fibers[index++] = Rope::Fiber(string.rep()->ref());
+ UStringImpl* impl = string.rep();
+ impl->ref();
+ m_other.m_fibers[index++] = impl;
}
void appendStringInConstruct(unsigned& index, JSString* jsString)
{
if (jsString->isRope()) {
- for (unsigned i = 0; i < jsString->m_ropeLength; ++i)
- m_fibers[index++] = jsString->m_fibers[i].ref();
+ for (unsigned i = 0; i < jsString->m_fiberCount; ++i) {
+ RopeImpl::Fiber fiber = jsString->m_other.m_fibers[i];
+ fiber->ref();
+ m_other.m_fibers[index++] = fiber;
+ }
} else
appendStringInConstruct(index, jsString->string());
}
if (v.isString()) {
ASSERT(asCell(v)->isString());
JSString* s = static_cast<JSString*>(asCell(v));
- ASSERT(s->ropeLength() == 1);
+ ASSERT(s->size() == 1);
appendStringInConstruct(index, s);
- m_stringLength += s->length();
+ m_length += s->length();
} else {
UString u(v.toString(exec));
- m_fibers[index++] = Rope::Fiber(u.rep()->ref());
- m_stringLength += u.size();
+ UStringImpl* impl = u.rep();
+ impl->ref();
+ m_other.m_fibers[index++] = impl;
+ m_length += u.size();
}
}
virtual UString toString(ExecState*) const;
virtual JSObject* toThisObject(ExecState*) const;
- virtual UString toThisString(ExecState*) const;
- virtual JSString* toThisJSString(ExecState*);
// Actually getPropertySlot, not getOwnPropertySlot (see JSCell).
virtual bool getOwnPropertySlot(ExecState*, const Identifier& propertyName, PropertySlot&);
static const unsigned s_maxInternalRopeLength = 3;
- // A string is represented either by a UString or a Rope.
- unsigned m_stringLength;
+ // A string is represented either by a UString or a RopeImpl.
+ unsigned m_length;
mutable UString m_value;
- mutable unsigned m_ropeLength;
- mutable Rope::Fiber m_fibers[s_maxInternalRopeLength];
+ mutable unsigned m_fiberCount;
+ // This structure exists to support a temporary workaround for a GC issue.
+ struct JSStringFinalizerStruct {
+ JSStringFinalizerStruct() : m_finalizerCallback(0) {}
+ union {
+ mutable RopeImpl::Fiber m_fibers[s_maxInternalRopeLength];
+ struct {
+ JSStringFinalizerCallback m_finalizerCallback;
+ void* m_finalizerContext;
+ };
+ };
+ } m_other;
- bool isRope() const { return m_ropeLength; }
+ bool isRope() const { return m_fiberCount; }
UString& string() { ASSERT(!isRope()); return m_value; }
- unsigned ropeLength() { return m_ropeLength ? m_ropeLength : 1; }
+ unsigned size() { return m_fiberCount ? m_fiberCount : 1; }
friend JSValue jsString(ExecState* exec, JSString* s1, JSString* s2);
friend JSValue jsString(ExecState* exec, const UString& u1, JSString* s2);
return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(&c, 1)));
}
- inline JSString* jsSingleCharacterSubstring(JSGlobalData* globalData, const UString& s, unsigned offset)
+ inline JSString* jsSingleCharacterSubstring(ExecState* exec, const UString& s, unsigned offset)
{
+ JSGlobalData* globalData = &exec->globalData();
ASSERT(offset < static_cast<unsigned>(s.size()));
UChar c = s.data()[offset];
if (c <= 0xFF)
return globalData->smallStrings.singleCharacterString(globalData, c);
- return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(UString::Rep::create(s.rep(), offset, 1))));
+ return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(UStringImpl::create(s.rep(), offset, 1))));
}
inline JSString* jsNontrivialString(JSGlobalData* globalData, const char* s)
inline JSString* JSString::getIndex(ExecState* exec, unsigned i)
{
ASSERT(canGetIndex(i));
- return jsSingleCharacterSubstring(&exec->globalData(), value(exec), i);
+ if (isRope())
+ return getIndexSlowCase(exec, i);
+ ASSERT(i < m_value.size());
+ return jsSingleCharacterSubstring(exec, m_value, i);
}
inline JSString* jsString(JSGlobalData* globalData, const UString& s)
if (c <= 0xFF)
return globalData->smallStrings.singleCharacterString(globalData, c);
}
- return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(UString::Rep::create(s.rep(), offset, length)), JSString::HasOtherOwner));
+ return fixupVPtr(globalData, new (globalData) JSString(globalData, UString(UStringImpl::create(s.rep(), offset, length)), JSString::HasOtherOwner));
}
inline JSString* jsOwnedString(JSGlobalData* globalData, const UString& s)
inline JSString* jsEmptyString(ExecState* exec) { return jsEmptyString(&exec->globalData()); }
inline JSString* jsString(ExecState* exec, const UString& s) { return jsString(&exec->globalData(), s); }
inline JSString* jsSingleCharacterString(ExecState* exec, UChar c) { return jsSingleCharacterString(&exec->globalData(), c); }
- inline JSString* jsSingleCharacterSubstring(ExecState* exec, const UString& s, unsigned offset) { return jsSingleCharacterSubstring(&exec->globalData(), s, offset); }
inline JSString* jsSubstring(ExecState* exec, const UString& s, unsigned offset, unsigned length) { return jsSubstring(&exec->globalData(), s, offset, length); }
inline JSString* jsNontrivialString(ExecState* exec, const UString& s) { return jsNontrivialString(&exec->globalData(), s); }
inline JSString* jsNontrivialString(ExecState* exec, const char* s) { return jsNontrivialString(&exec->globalData(), s); }
ALWAYS_INLINE bool JSString::getStringPropertySlot(ExecState* exec, const Identifier& propertyName, PropertySlot& slot)
{
if (propertyName == exec->propertyNames().length) {
- slot.setValue(jsNumber(exec, m_stringLength));
+ slot.setValue(jsNumber(exec, m_length));
return true;
}
bool isStrictUInt32;
unsigned i = propertyName.toStrictUInt32(&isStrictUInt32);
- if (isStrictUInt32 && i < m_stringLength) {
- slot.setValue(jsSingleCharacterSubstring(exec, value(exec), i));
+ if (isStrictUInt32 && i < m_length) {
+ slot.setValue(getIndex(exec, i));
return true;
}
ALWAYS_INLINE bool JSString::getStringPropertySlot(ExecState* exec, unsigned propertyName, PropertySlot& slot)
{
- if (propertyName < m_stringLength) {
- slot.setValue(jsSingleCharacterSubstring(exec, value(exec), propertyName));
+ if (propertyName < m_length) {
+ slot.setValue(getIndex(exec, propertyName));
return true;
}
// --- JSValue inlines ----------------------------
- inline JSString* JSValue::toThisJSString(ExecState* exec)
- {
- return isCell() ? asCell()->toThisJSString(exec) : jsString(exec, toString(exec));
- }
-
inline UString JSValue::toString(ExecState* exec) const
{
if (isString())
--- /dev/null
+/*
+ * Copyright (C) 2009 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef JSStringBuilder_h
+#define JSStringBuilder_h
+
+#include "ExceptionHelpers.h"
+#include "JSString.h"
+#include "Vector.h"
+
+namespace JSC {
+
+class JSStringBuilder {
+public:
+ JSStringBuilder()
+ : m_okay(true)
+ {
+ }
+
+ void append(const UChar u)
+ {
+ m_okay &= buffer.tryAppend(&u, 1);
+ }
+
+ void append(const char* str)
+ {
+ append(str, strlen(str));
+ }
+
+ void append(const char* str, size_t len)
+ {
+ m_okay &= buffer.tryReserveCapacity(buffer.size() + len);
+ for (size_t i = 0; i < len; i++) {
+ UChar u = static_cast<unsigned char>(str[i]);
+ m_okay &= buffer.tryAppend(&u, 1);
+ }
+ }
+
+ void append(const UChar* str, size_t len)
+ {
+ m_okay &= buffer.tryAppend(str, len);
+ }
+
+ void append(const UString& str)
+ {
+ m_okay &= buffer.tryAppend(str.data(), str.size());
+ }
+
+ JSValue build(ExecState* exec)
+ {
+ if (!m_okay)
+ return throwOutOfMemoryError(exec);
+ buffer.shrinkToFit();
+ if (!buffer.data())
+ return throwOutOfMemoryError(exec);
+ return jsString(exec, UString::adopt(buffer));
+ }
+
+protected:
+ Vector<UChar, 64> buffer;
+ bool m_okay;
+};
+
+template<typename StringType1, typename StringType2>
+inline JSValue jsMakeNontrivialString(ExecState* exec, StringType1 string1, StringType2 string2)
+{
+ PassRefPtr<UStringImpl> result = tryMakeString(string1, string2);
+ if (!result)
+ return throwOutOfMemoryError(exec);
+ return jsNontrivialString(exec, result);
+}
+
+template<typename StringType1, typename StringType2, typename StringType3>
+inline JSValue jsMakeNontrivialString(ExecState* exec, StringType1 string1, StringType2 string2, StringType3 string3)
+{
+ PassRefPtr<UStringImpl> result = tryMakeString(string1, string2, string3);
+ if (!result)
+ return throwOutOfMemoryError(exec);
+ return jsNontrivialString(exec, result);
+}
+
+template<typename StringType1, typename StringType2, typename StringType3, typename StringType4>
+inline JSValue jsMakeNontrivialString(ExecState* exec, StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4)
+{
+ PassRefPtr<UStringImpl> result = tryMakeString(string1, string2, string3, string4);
+ if (!result)
+ return throwOutOfMemoryError(exec);
+ return jsNontrivialString(exec, result);
+}
+
+template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5>
+inline JSValue jsMakeNontrivialString(ExecState* exec, StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5)
+{
+ PassRefPtr<UStringImpl> result = tryMakeString(string1, string2, string3, string4, string5);
+ if (!result)
+ return throwOutOfMemoryError(exec);
+ return jsNontrivialString(exec, result);
+}
+
+template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6>
+inline JSValue jsMakeNontrivialString(ExecState* exec, StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6)
+{
+ PassRefPtr<UStringImpl> result = tryMakeString(string1, string2, string3, string4, string5, string6);
+ if (!result)
+ return throwOutOfMemoryError(exec);
+ return jsNontrivialString(exec, result);
+}
+
+}
+
+#endif
TypeInfo(JSType type, unsigned flags = 0)
: m_type(type)
{
+ ASSERT(flags <= 0xFF);
+ ASSERT(type <= 0xFF);
// ImplementsDefaultHasInstance means (ImplementsHasInstance & !OverridesHasInstance)
if ((flags & (ImplementsHasInstance | OverridesHasInstance)) == ImplementsHasInstance)
m_flags = flags | ImplementsDefaultHasInstance;
m_flags = flags;
}
- JSType type() const { return m_type; }
+ JSType type() const { return (JSType)m_type; }
bool masqueradesAsUndefined() const { return m_flags & MasqueradesAsUndefined; }
bool implementsHasInstance() const { return m_flags & ImplementsHasInstance; }
unsigned flags() const { return m_flags; }
private:
- JSType m_type;
- unsigned m_flags;
+ unsigned char m_type;
+ unsigned char m_flags;
};
}
NEVER_INLINE double nonInlineNaN()
{
+#if OS(SYMBIAN)
+ return nanval();
+#else
return std::numeric_limits<double>::quiet_NaN();
+#endif
}
} // namespace JSC
friend class JIT;
friend class JITStubs;
friend class JITStubCall;
+ friend class JSInterfaceJIT;
+ friend class SpecializedThunkJIT;
public:
static EncodedJSValue encode(JSValue value);
JSValue get(ExecState*, unsigned propertyName) const;
JSValue get(ExecState*, unsigned propertyName, PropertySlot&) const;
void put(ExecState*, const Identifier& propertyName, JSValue, PutPropertySlot&);
+ void putDirect(ExecState*, const Identifier& propertyName, JSValue, PutPropertySlot&);
void put(ExecState*, unsigned propertyName, JSValue);
bool needsThisConversion() const;
JSObject* toThisObject(ExecState*) const;
UString toThisString(ExecState*) const;
- JSString* toThisJSString(ExecState*);
+ JSString* toThisJSString(ExecState*) const;
static bool equal(ExecState* exec, JSValue v1, JSValue v2);
static bool equalSlowCase(ExecState* exec, JSValue v1, JSValue v2);
JSObject* toObjectSlowCase(ExecState*) const;
JSObject* toThisObjectSlowCase(ExecState*) const;
+ JSObject* synthesizePrototype(ExecState*) const;
+ JSObject* synthesizeObject(ExecState*) const;
+
+#if USE(JSVALUE32_64)
enum { Int32Tag = 0xffffffff };
enum { CellTag = 0xfffffffe };
enum { TrueTag = 0xfffffffd };
enum { UndefinedTag = 0xfffffffa };
enum { EmptyValueTag = 0xfffffff9 };
enum { DeletedValueTag = 0xfffffff8 };
-
+
enum { LowestTag = DeletedValueTag };
-
+
uint32_t tag() const;
int32_t payload() const;
- JSObject* synthesizePrototype(ExecState*) const;
- JSObject* synthesizeObject(ExecState*) const;
-
-#if USE(JSVALUE32_64)
union {
EncodedJSValue asEncodedJSValue;
double asDouble;
virtual void getOwnPropertyNames(ExecState*, PropertyNameArray&, EnumerationMode mode = ExcludeDontEnumProperties);
virtual bool isVariableObject() const;
- virtual bool isDynamicScope() const = 0;
+ virtual bool isDynamicScope(bool& requiresDynamicChecks) const = 0;
Register& registerAt(int index) const { return d->registers[index]; }
virtual bool deleteProperty(ExecState*, const Identifier&) { ASSERT_NOT_REACHED(); return false; }
virtual bool deleteProperty(ExecState*, unsigned) { ASSERT_NOT_REACHED(); return false; }
virtual JSObject* toThisObject(ExecState*) const { ASSERT_NOT_REACHED(); return 0; }
- virtual UString toThisString(ExecState*) const { ASSERT_NOT_REACHED(); return ""; }
- virtual JSString* toThisJSString(ExecState*) { ASSERT_NOT_REACHED(); return 0; }
virtual JSValue getJSNumber() { ASSERT_NOT_REACHED(); return jsNull(); }
virtual bool getOwnPropertySlot(ExecState*, const Identifier&, PropertySlot&) { ASSERT_NOT_REACHED(); return false; }
virtual bool getOwnPropertySlot(ExecState*, unsigned, PropertySlot&) { ASSERT_NOT_REACHED(); return false; }
if (m_ptr >= m_end || *m_ptr != '"')
return TokError;
- token.stringToken = builder.release();
+ token.stringToken = builder.build();
token.type = TokString;
token.end = ++m_ptr;
return TokString;
entry = entry->next();
}
- entry->initialize(identifier, values[i].attributes, values[i].value1, values[i].value2);
+ entry->initialize(identifier, values[i].attributes, values[i].value1, values[i].value2
+#if ENABLE(JIT)
+ , values[i].generator
+#endif
+ );
}
table = entries;
}
JSValue* location = thisObj->getDirectLocation(propertyName);
if (!location) {
- InternalFunction* function = new (exec) NativeFunctionWrapper(exec, exec->lexicalGlobalObject()->prototypeFunctionStructure(), entry->functionLength(), propertyName, entry->function());
+ InternalFunction* function;
+#if ENABLE(JIT) && ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
+ if (entry->generator())
+ function = new (exec) NativeFunctionWrapper(exec, exec->lexicalGlobalObject()->prototypeFunctionStructure(), entry->functionLength(), propertyName, exec->globalData().getThunk(entry->generator()), entry->function());
+ else
+#endif
+ function = new (exec) NativeFunctionWrapper(exec, exec->lexicalGlobalObject()->prototypeFunctionStructure(), entry->functionLength(), propertyName, entry->function());
thisObj->putDirectFunction(propertyName, function, entry->attributes());
location = thisObj->getDirectLocation(propertyName);
#endif
namespace JSC {
-
// Hash table generated by the create_hash_table script.
struct HashTableValue {
const char* key; // property name
unsigned char attributes; // JSObject attributes
intptr_t value1;
intptr_t value2;
+#if ENABLE(JIT)
+ ThunkGenerator generator;
+#endif
};
// FIXME: There is no reason this get function can't be simpler.
class HashEntry : public FastAllocBase {
public:
- void initialize(UString::Rep* key, unsigned char attributes, intptr_t v1, intptr_t v2)
+ void initialize(UString::Rep* key, unsigned char attributes, intptr_t v1, intptr_t v2
+#if ENABLE(JIT)
+ , ThunkGenerator generator = 0
+#endif
+ )
{
m_key = key;
m_attributes = attributes;
m_u.store.value1 = v1;
m_u.store.value2 = v2;
+#if ENABLE(JIT)
+ m_u.function.generator = generator;
+#endif
m_next = 0;
}
unsigned char attributes() const { return m_attributes; }
+#if ENABLE(JIT)
+ ThunkGenerator generator() const { ASSERT(m_attributes & Function); return m_u.function.generator; }
+#endif
NativeFunction function() const { ASSERT(m_attributes & Function); return m_u.function.functionValue; }
unsigned char functionLength() const { ASSERT(m_attributes & Function); return static_cast<unsigned char>(m_u.function.length); }
struct {
NativeFunction functionValue;
intptr_t length; // number of arguments for function
+#if ENABLE(JIT)
+ ThunkGenerator generator;
+#endif
} function;
struct {
GetFunction get;
if (entry->attributes() & Function)
setUpStaticFunctionSlot(exec, entry, thisObj, propertyName, slot);
else
- slot.setCustom(thisObj, entry->propertyGetter());
+ slot.setCacheableCustom(thisObj, entry->propertyGetter());
return true;
}
ASSERT(!(entry->attributes() & Function));
- slot.setCustom(thisObj, entry->propertyGetter());
+ slot.setCacheableCustom(thisObj, entry->propertyGetter());
return true;
}
#include "config.h"
#include "MathObject.h"
+#include "Lookup.h"
#include "ObjectPrototype.h"
#include "Operations.h"
#include <time.h>
JSValue JSC_HOST_CALL mathProtoFuncRandom(ExecState* exec, JSObject*, JSValue, const ArgList&)
{
- return jsDoubleNumber(exec, exec->globalData().weakRandom.get());
+ return jsDoubleNumber(exec, exec->lexicalGlobalObject()->weakRandomNumber());
}
JSValue JSC_HOST_CALL mathProtoFuncRound(ExecState* exec, JSObject*, JSValue, const ArgList& args)
{
double arg = args.at(0).toNumber(exec);
- if (signbit(arg) && arg >= -0.5)
- return jsNumber(exec, -0.0);
- return jsNumber(exec, floor(arg + 0.5));
+ double integer = ceil(arg);
+ return jsNumber(exec, integer - (integer - arg > 0.5));
}
JSValue JSC_HOST_CALL mathProtoFuncSin(ExecState* exec, JSObject*, JSValue, const ArgList& args)
{
- return jsDoubleNumber(exec, sin(args.at(0).toNumber(exec)));
+ return exec->globalData().cachedSin(exec, args.at(0).toNumber(exec));
}
JSValue JSC_HOST_CALL mathProtoFuncSqrt(ExecState* exec, JSObject*, JSValue, const ArgList& args)
const ClassInfo NativeErrorConstructor::info = { "Function", &InternalFunction::info, 0, 0 };
-NativeErrorConstructor::NativeErrorConstructor(ExecState* exec, NonNullPassRefPtr<Structure> structure, NativeErrorPrototype* nativeErrorPrototype)
- : InternalFunction(&exec->globalData(), structure, Identifier(exec, nativeErrorPrototype->getDirect(exec->propertyNames().name).getString(exec)))
- , m_errorStructure(ErrorInstance::createStructure(nativeErrorPrototype))
+NativeErrorConstructor::NativeErrorConstructor(ExecState* exec, NonNullPassRefPtr<Structure> structure, NonNullPassRefPtr<Structure> prototypeStructure, const UString& nameAndMessage)
+ : InternalFunction(&exec->globalData(), structure, Identifier(exec, nameAndMessage))
{
+ NativeErrorPrototype* prototype = new (exec) NativeErrorPrototype(exec, prototypeStructure, nameAndMessage, this);
+
putDirect(exec->propertyNames().length, jsNumber(exec, 1), DontDelete | ReadOnly | DontEnum); // ECMA 15.11.7.5
- putDirect(exec->propertyNames().prototype, nativeErrorPrototype, DontDelete | ReadOnly | DontEnum);
+ putDirect(exec->propertyNames().prototype, prototype, DontDelete | ReadOnly | DontEnum);
+ m_errorStructure = ErrorInstance::createStructure(prototype);
}
+
ErrorInstance* NativeErrorConstructor::construct(ExecState* exec, const ArgList& args)
{
ErrorInstance* object = new (exec) ErrorInstance(m_errorStructure);
class NativeErrorConstructor : public InternalFunction {
public:
- NativeErrorConstructor(ExecState*, NonNullPassRefPtr<Structure>, NativeErrorPrototype*);
+ NativeErrorConstructor(ExecState*, NonNullPassRefPtr<Structure> structure, NonNullPassRefPtr<Structure> prototypeStructure, const UString&);
static const ClassInfo info;
#include "ErrorPrototype.h"
#include "JSString.h"
+#include "NativeErrorConstructor.h"
#include "UString.h"
namespace JSC {
ASSERT_CLASS_FITS_IN_CELL(NativeErrorPrototype);
-NativeErrorPrototype::NativeErrorPrototype(ExecState* exec, NonNullPassRefPtr<Structure> structure, const UString& name, const UString& message)
+NativeErrorPrototype::NativeErrorPrototype(ExecState* exec, NonNullPassRefPtr<Structure> structure, const UString& nameAndMessage, NativeErrorConstructor* constructor)
: JSObject(structure)
{
- putDirect(exec->propertyNames().name, jsString(exec, name), 0);
- putDirect(exec->propertyNames().message, jsString(exec, message), 0);
+ putDirect(exec->propertyNames().name, jsString(exec, nameAndMessage), 0);
+ putDirect(exec->propertyNames().message, jsString(exec, nameAndMessage), 0);
+ putDirect(exec->propertyNames().constructor, constructor, DontEnum);
}
} // namespace JSC
#include "JSObject.h"
namespace JSC {
+ class NativeErrorConstructor;
class NativeErrorPrototype : public JSObject {
public:
- NativeErrorPrototype(ExecState*, NonNullPassRefPtr<Structure>, const UString& name, const UString& message);
+ NativeErrorPrototype(ExecState*, NonNullPassRefPtr<Structure>, const UString&, NativeErrorConstructor*);
};
} // namespace JSC
#include "config.h"
#include "NumberConstructor.h"
+#include "Lookup.h"
#include "NumberObject.h"
#include "NumberPrototype.h"
ASSERT_CLASS_FITS_IN_CELL(NumberConstructor);
-static JSValue numberConstructorNaNValue(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue numberConstructorNegInfinity(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue numberConstructorPosInfinity(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue numberConstructorMaxValue(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue numberConstructorMinValue(ExecState*, const Identifier&, const PropertySlot&);
+static JSValue numberConstructorNaNValue(ExecState*, JSValue, const Identifier&);
+static JSValue numberConstructorNegInfinity(ExecState*, JSValue, const Identifier&);
+static JSValue numberConstructorPosInfinity(ExecState*, JSValue, const Identifier&);
+static JSValue numberConstructorMaxValue(ExecState*, JSValue, const Identifier&);
+static JSValue numberConstructorMinValue(ExecState*, JSValue, const Identifier&);
} // namespace JSC
return getStaticValueDescriptor<NumberConstructor, InternalFunction>(exec, ExecState::numberTable(exec), this, propertyName, descriptor);
}
-static JSValue numberConstructorNaNValue(ExecState* exec, const Identifier&, const PropertySlot&)
+static JSValue numberConstructorNaNValue(ExecState* exec, JSValue, const Identifier&)
{
return jsNaN(exec);
}
-static JSValue numberConstructorNegInfinity(ExecState* exec, const Identifier&, const PropertySlot&)
+static JSValue numberConstructorNegInfinity(ExecState* exec, JSValue, const Identifier&)
{
return jsNumber(exec, -Inf);
}
-static JSValue numberConstructorPosInfinity(ExecState* exec, const Identifier&, const PropertySlot&)
+static JSValue numberConstructorPosInfinity(ExecState* exec, JSValue, const Identifier&)
{
return jsNumber(exec, Inf);
}
-static JSValue numberConstructorMaxValue(ExecState* exec, const Identifier&, const PropertySlot&)
+static JSValue numberConstructorMaxValue(ExecState* exec, JSValue, const Identifier&)
{
return jsNumber(exec, 1.7976931348623157E+308);
}
-static JSValue numberConstructorMinValue(ExecState* exec, const Identifier&, const PropertySlot&)
+static JSValue numberConstructorMinValue(ExecState* exec, JSValue, const Identifier&)
{
return jsNumber(exec, 5E-324);
}
#include "Error.h"
#include "JSFunction.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "Operations.h"
#include "PrototypeFunction.h"
#include "StringBuilder.h"
builder.append((const char*)(buf.data()));
}
- return builder.release();
+ return builder.build();
}
static UString charSequence(char c, int count)
if (!v)
return throwError(exec, TypeError);
- double radixAsDouble = args.at(0).toInteger(exec); // nan -> 0
- if (radixAsDouble == 10 || args.at(0).isUndefined())
+ JSValue radixValue = args.at(0);
+ int radix;
+ if (radixValue.isInt32())
+ radix = radixValue.asInt32();
+ else if (radixValue.isUndefined())
+ radix = 10;
+ else
+ radix = static_cast<int>(radixValue.toInteger(exec)); // nan -> 0
+
+ if (radix == 10)
return jsString(exec, v.toString(exec));
- if (radixAsDouble < 2 || radixAsDouble > 36)
+ static const char* const digits = "0123456789abcdefghijklmnopqrstuvwxyz";
+
+ // Fast path for number to character conversion.
+ if (radix == 36) {
+ if (v.isInt32()) {
+ int x = v.asInt32();
+ if (static_cast<unsigned>(x) < 36) { // Exclude negatives
+ JSGlobalData* globalData = &exec->globalData();
+ return globalData->smallStrings.singleCharacterString(globalData, digits[x]);
+ }
+ }
+ }
+
+ if (radix < 2 || radix > 36)
return throwError(exec, RangeError, "toString() radix argument must be between 2 and 36");
- int radix = static_cast<int>(radixAsDouble);
- const char digits[] = "0123456789abcdefghijklmnopqrstuvwxyz";
// INT_MAX results in 1024 characters left of the dot with radix 2
// give the same space on the right side. safety checks are in place
// unless someone finds a precise rule.
for (int i = 0; i < f + 1 - k; i++)
z.append('0');
z.append(m);
- m = z.release();
+ m = z.build();
k = f + 1;
- ASSERT(k == m.size());
+ ASSERT(k == static_cast<int>(m.size()));
}
int kMinusf = k - f;
- if (kMinusf < m.size())
+ if (kMinusf < static_cast<int>(m.size()))
return jsString(exec, makeString(s, m.substr(0, kMinusf), ".", m.substr(kMinusf)));
return jsString(exec, makeString(s, m.substr(0, kMinusf)));
}
if (m.size() > 1)
m = makeString(m.substr(0, 1), ".", m.substr(1));
if (e >= 0)
- return jsNontrivialString(exec, makeString(s, m, "e+", UString::from(e)));
- return jsNontrivialString(exec, makeString(s, m, "e-", UString::from(-e)));
+ return jsMakeNontrivialString(exec, s, m, "e+", UString::from(e));
+ return jsMakeNontrivialString(exec, s, m, "e-", UString::from(-e));
}
} else {
m = charSequence('0', precision);
if (e == precision - 1)
return jsString(exec, makeString(s, m));
if (e >= 0) {
- if (e + 1 < m.size())
+ if (e + 1 < static_cast<int>(m.size()))
return jsString(exec, makeString(s, m.substr(0, e + 1), ".", m.substr(e + 1)));
return jsString(exec, makeString(s, m));
}
- return jsNontrivialString(exec, makeString(s, "0.", charSequence('0', -(e + 1)), m));
+ return jsMakeNontrivialString(exec, s, "0.", charSequence('0', -(e + 1)), m);
}
} // namespace JSC
UString add(int i)
{
+ if (static_cast<unsigned>(i) < cacheSize)
+ return lookupSmallString(static_cast<unsigned>(i));
CacheEntry<int>& entry = lookup(i);
if (i == entry.key && !entry.value.isNull())
return entry.value;
return entry.value;
}
+ UString add(unsigned i)
+ {
+ if (i < cacheSize)
+ return lookupSmallString(static_cast<unsigned>(i));
+ CacheEntry<unsigned>& entry = lookup(i);
+ if (i == entry.key && !entry.value.isNull())
+ return entry.value;
+ entry.key = i;
+ entry.value = UString::from(i);
+ return entry.value;
+ }
private:
static const size_t cacheSize = 64;
CacheEntry<double>& lookup(double d) { return doubleCache[WTF::FloatHash<double>::hash(d) & (cacheSize - 1)]; }
CacheEntry<int>& lookup(int i) { return intCache[WTF::IntHash<int>::hash(i) & (cacheSize - 1)]; }
+ CacheEntry<unsigned>& lookup(unsigned i) { return unsignedCache[WTF::IntHash<unsigned>::hash(i) & (cacheSize - 1)]; }
+ const UString& lookupSmallString(unsigned i)
+ {
+ ASSERT(i < cacheSize);
+ if (smallIntCache[i].isNull())
+ smallIntCache[i] = UString::from(i);
+ return smallIntCache[i];
+ }
CacheEntry<double> doubleCache[cacheSize];
CacheEntry<int> intCache[cacheSize];
+ CacheEntry<unsigned> unsignedCache[cacheSize];
+ UString smallIntCache[cacheSize];
};
} // namespace JSC
#include "Error.h"
#include "JSFunction.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "PrototypeFunction.h"
namespace JSC {
JSValue JSC_HOST_CALL objectProtoFuncToString(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
- return jsNontrivialString(exec, makeString("[object ", thisValue.toThisObject(exec)->className(), "]"));
+ return jsMakeNontrivialString(exec, "[object ", thisValue.toThisObject(exec)->className(), "]");
}
} // namespace JSC
return strictEqualSlowCaseInline(exec, v1, v2);
}
-NEVER_INLINE JSValue throwOutOfMemoryError(ExecState* exec)
-{
- JSObject* error = Error::create(exec, GeneralError, "Out of memory");
- exec->setException(error);
- return error;
-}
-
NEVER_INLINE JSValue jsAddSlowCase(CallFrame* callFrame, JSValue v1, JSValue v2)
{
// exception for the Date exception in defaultValue()
#ifndef Operations_h
#define Operations_h
+#include "ExceptionHelpers.h"
#include "Interpreter.h"
#include "JSImmediate.h"
#include "JSNumberCell.h"
namespace JSC {
- NEVER_INLINE JSValue throwOutOfMemoryError(ExecState*);
NEVER_INLINE JSValue jsAddSlowCase(CallFrame*, JSValue, JSValue);
JSValue jsTypeStringForValue(CallFrame*, JSValue);
bool jsIsObjectType(JSValue);
ALWAYS_INLINE JSValue jsString(ExecState* exec, JSString* s1, JSString* s2)
{
- if (!s1->length())
+ unsigned length1 = s1->length();
+ if (!length1)
return s2;
- if (!s2->length())
+ unsigned length2 = s2->length();
+ if (!length2)
return s1;
+ if ((length1 + length2) < length1)
+ return throwOutOfMemoryError(exec);
- unsigned ropeLength = s1->ropeLength() + s2->ropeLength();
+ unsigned fiberCount = s1->size() + s2->size();
JSGlobalData* globalData = &exec->globalData();
- if (ropeLength <= JSString::s_maxInternalRopeLength)
- return new (globalData) JSString(globalData, ropeLength, s1, s2);
+ if (fiberCount <= JSString::s_maxInternalRopeLength)
+ return new (globalData) JSString(globalData, fiberCount, s1, s2);
- unsigned index = 0;
- RefPtr<JSString::Rope> rope = JSString::Rope::createOrNull(ropeLength);
- if (UNLIKELY(!rope))
+ JSString::RopeBuilder ropeBuilder(fiberCount);
+ if (UNLIKELY(ropeBuilder.isOutOfMemory()))
return throwOutOfMemoryError(exec);
- rope->append(index, s1);
- rope->append(index, s2);
- ASSERT(index == ropeLength);
- return new (globalData) JSString(globalData, rope.release());
+ ropeBuilder.append(s1);
+ ropeBuilder.append(s2);
+ return new (globalData) JSString(globalData, ropeBuilder.release());
}
ALWAYS_INLINE JSValue jsString(ExecState* exec, const UString& u1, JSString* s2)
{
- unsigned ropeLength = 1 + s2->ropeLength();
+ unsigned length1 = u1.size();
+ if (!length1)
+ return s2;
+ unsigned length2 = s2->length();
+ if (!length2)
+ return jsString(exec, u1);
+ if ((length1 + length2) < length1)
+ return throwOutOfMemoryError(exec);
+
+ unsigned fiberCount = 1 + s2->size();
JSGlobalData* globalData = &exec->globalData();
- if (ropeLength <= JSString::s_maxInternalRopeLength)
- return new (globalData) JSString(globalData, ropeLength, u1, s2);
+ if (fiberCount <= JSString::s_maxInternalRopeLength)
+ return new (globalData) JSString(globalData, fiberCount, u1, s2);
- unsigned index = 0;
- RefPtr<JSString::Rope> rope = JSString::Rope::createOrNull(ropeLength);
- if (UNLIKELY(!rope))
+ JSString::RopeBuilder ropeBuilder(fiberCount);
+ if (UNLIKELY(ropeBuilder.isOutOfMemory()))
return throwOutOfMemoryError(exec);
- rope->append(index, u1);
- rope->append(index, s2);
- ASSERT(index == ropeLength);
- return new (globalData) JSString(globalData, rope.release());
+ ropeBuilder.append(u1);
+ ropeBuilder.append(s2);
+ return new (globalData) JSString(globalData, ropeBuilder.release());
}
ALWAYS_INLINE JSValue jsString(ExecState* exec, JSString* s1, const UString& u2)
{
- unsigned ropeLength = s1->ropeLength() + 1;
+ unsigned length1 = s1->length();
+ if (!length1)
+ return jsString(exec, u2);
+ unsigned length2 = u2.size();
+ if (!length2)
+ return s1;
+ if ((length1 + length2) < length1)
+ return throwOutOfMemoryError(exec);
+
+ unsigned fiberCount = s1->size() + 1;
JSGlobalData* globalData = &exec->globalData();
- if (ropeLength <= JSString::s_maxInternalRopeLength)
- return new (globalData) JSString(globalData, ropeLength, s1, u2);
+ if (fiberCount <= JSString::s_maxInternalRopeLength)
+ return new (globalData) JSString(globalData, fiberCount, s1, u2);
- unsigned index = 0;
- RefPtr<JSString::Rope> rope = JSString::Rope::createOrNull(ropeLength);
- if (UNLIKELY(!rope))
+ JSString::RopeBuilder ropeBuilder(fiberCount);
+ if (UNLIKELY(ropeBuilder.isOutOfMemory()))
return throwOutOfMemoryError(exec);
- rope->append(index, s1);
- rope->append(index, u2);
- ASSERT(index == ropeLength);
- return new (globalData) JSString(globalData, rope.release());
+ ropeBuilder.append(s1);
+ ropeBuilder.append(u2);
+ return new (globalData) JSString(globalData, ropeBuilder.release());
+ }
+
+ ALWAYS_INLINE JSValue jsString(ExecState* exec, const UString& u1, const UString& u2)
+ {
+ unsigned length1 = u1.size();
+ if (!length1)
+ return jsString(exec, u2);
+ unsigned length2 = u2.size();
+ if (!length2)
+ return jsString(exec, u1);
+ if ((length1 + length2) < length1)
+ return throwOutOfMemoryError(exec);
+
+ JSGlobalData* globalData = &exec->globalData();
+ return new (globalData) JSString(globalData, u1, u2);
+ }
+
+ ALWAYS_INLINE JSValue jsString(ExecState* exec, const UString& u1, const UString& u2, const UString& u3)
+ {
+ unsigned length1 = u1.size();
+ unsigned length2 = u2.size();
+ unsigned length3 = u3.size();
+ if (!length1)
+ return jsString(exec, u2, u3);
+ if (!length2)
+ return jsString(exec, u1, u3);
+ if (!length3)
+ return jsString(exec, u1, u2);
+
+ if ((length1 + length2) < length1)
+ return throwOutOfMemoryError(exec);
+ if ((length1 + length2 + length3) < length3)
+ return throwOutOfMemoryError(exec);
+
+ JSGlobalData* globalData = &exec->globalData();
+ return new (globalData) JSString(globalData, u1, u2, u3);
}
ALWAYS_INLINE JSValue jsString(ExecState* exec, Register* strings, unsigned count)
{
ASSERT(count >= 3);
- unsigned ropeLength = 0;
+ unsigned fiberCount = 0;
for (unsigned i = 0; i < count; ++i) {
JSValue v = strings[i].jsValue();
if (LIKELY(v.isString()))
- ropeLength += asString(v)->ropeLength();
+ fiberCount += asString(v)->size();
else
- ++ropeLength;
+ ++fiberCount;
}
JSGlobalData* globalData = &exec->globalData();
- if (ropeLength == 3)
+ if (fiberCount == 3)
return new (globalData) JSString(exec, strings[0].jsValue(), strings[1].jsValue(), strings[2].jsValue());
- RefPtr<JSString::Rope> rope = JSString::Rope::createOrNull(ropeLength);
- if (UNLIKELY(!rope))
+ JSString::RopeBuilder ropeBuilder(fiberCount);
+ if (UNLIKELY(ropeBuilder.isOutOfMemory()))
return throwOutOfMemoryError(exec);
- unsigned index = 0;
+ unsigned length = 0;
+ bool overflow = false;
+
for (unsigned i = 0; i < count; ++i) {
JSValue v = strings[i].jsValue();
if (LIKELY(v.isString()))
- rope->append(index, asString(v));
+ ropeBuilder.append(asString(v));
else
- rope->append(index, v.toString(exec));
+ ropeBuilder.append(v.toString(exec));
+
+ unsigned newLength = ropeBuilder.length();
+ if (newLength < length)
+ overflow = true;
+ length = newLength;
}
- ASSERT(index == ropeLength);
- return new (globalData) JSString(globalData, rope.release());
+ if (overflow)
+ return throwOutOfMemoryError(exec);
+
+ return new (globalData) JSString(globalData, ropeBuilder.release());
}
ALWAYS_INLINE JSValue jsString(ExecState* exec, JSValue thisValue, const ArgList& args)
{
- unsigned ropeLength = 0;
+ unsigned fiberCount = 0;
if (LIKELY(thisValue.isString()))
- ropeLength += asString(thisValue)->ropeLength();
+ fiberCount += asString(thisValue)->size();
else
- ++ropeLength;
+ ++fiberCount;
for (unsigned i = 0; i < args.size(); ++i) {
JSValue v = args.at(i);
if (LIKELY(v.isString()))
- ropeLength += asString(v)->ropeLength();
+ fiberCount += asString(v)->size();
else
- ++ropeLength;
+ ++fiberCount;
}
- RefPtr<JSString::Rope> rope = JSString::Rope::createOrNull(ropeLength);
- if (UNLIKELY(!rope))
+ JSString::RopeBuilder ropeBuilder(fiberCount);
+ if (UNLIKELY(ropeBuilder.isOutOfMemory()))
return throwOutOfMemoryError(exec);
- unsigned index = 0;
if (LIKELY(thisValue.isString()))
- rope->append(index, asString(thisValue));
+ ropeBuilder.append(asString(thisValue));
else
- rope->append(index, thisValue.toString(exec));
+ ropeBuilder.append(thisValue.toString(exec));
+
+ unsigned length = 0;
+ bool overflow = false;
+
for (unsigned i = 0; i < args.size(); ++i) {
JSValue v = args.at(i);
if (LIKELY(v.isString()))
- rope->append(index, asString(v));
+ ropeBuilder.append(asString(v));
else
- rope->append(index, v.toString(exec));
+ ropeBuilder.append(v.toString(exec));
+
+ unsigned newLength = ropeBuilder.length();
+ if (newLength < length)
+ overflow = true;
+ length = newLength;
}
- ASSERT(index == ropeLength);
+
+ if (overflow)
+ return throwOutOfMemoryError(exec);
JSGlobalData* globalData = &exec->globalData();
- return new (globalData) JSString(globalData, rope.release());
+ return new (globalData) JSString(globalData, ropeBuilder.release());
}
// ECMA 11.9.3
void PropertyNameArray::add(UString::Rep* identifier)
{
- ASSERT(identifier == &UString::Rep::null() || identifier == &UString::Rep::empty() || identifier->isIdentifier());
+ ASSERT(identifier == UString::null().rep() || identifier == UString::Rep::empty() || identifier->isIdentifier());
size_t size = m_data->propertyNameVector().size();
if (size < setThreshold) {
namespace JSC {
-JSValue PropertySlot::functionGetter(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue PropertySlot::functionGetter(ExecState* exec) const
{
// Prevent getter functions from observing execution if an exception is pending.
if (exec->hadException())
return exec->exception();
CallData callData;
- CallType callType = slot.m_data.getterFunc->getCallData(callData);
+ CallType callType = m_data.getterFunc->getCallData(callData);
if (callType == CallTypeHost)
- return callData.native.function(exec, slot.m_data.getterFunc, slot.slotBase(), exec->emptyList());
+ return callData.native.function(exec, m_data.getterFunc, thisValue(), exec->emptyList());
ASSERT(callType == CallTypeJS);
// FIXME: Can this be done more efficiently using the callData?
- return asFunction(slot.m_data.getterFunc)->call(exec, slot.slotBase(), exec->emptyList());
+ return asFunction(m_data.getterFunc)->call(exec, thisValue(), exec->emptyList());
}
} // namespace JSC
#define JSC_VALUE_SLOT_MARKER 0
#define JSC_REGISTER_SLOT_MARKER reinterpret_cast<GetValueFunc>(1)
+#define INDEX_GETTER_MARKER reinterpret_cast<GetValueFunc>(2)
+#define GETTER_FUNCTION_MARKER reinterpret_cast<GetValueFunc>(3)
class PropertySlot {
public:
+ enum CachedPropertyType {
+ Uncacheable,
+ Getter,
+ Custom,
+ Value
+ };
+
PropertySlot()
+ : m_cachedPropertyType(Uncacheable)
{
clearBase();
clearOffset();
explicit PropertySlot(const JSValue base)
: m_slotBase(base)
+ , m_cachedPropertyType(Uncacheable)
{
clearOffset();
clearValue();
}
- typedef JSValue (*GetValueFunc)(ExecState*, const Identifier&, const PropertySlot&);
+ typedef JSValue (*GetValueFunc)(ExecState*, JSValue slotBase, const Identifier&);
+ typedef JSValue (*GetIndexValueFunc)(ExecState*, JSValue slotBase, unsigned);
JSValue getValue(ExecState* exec, const Identifier& propertyName) const
{
return *m_data.valueSlot;
if (m_getValue == JSC_REGISTER_SLOT_MARKER)
return (*m_data.registerSlot).jsValue();
- return m_getValue(exec, propertyName, *this);
+ if (m_getValue == INDEX_GETTER_MARKER)
+ return m_getIndexValue(exec, slotBase(), index());
+ if (m_getValue == GETTER_FUNCTION_MARKER)
+ return functionGetter(exec);
+ return m_getValue(exec, slotBase(), propertyName);
}
JSValue getValue(ExecState* exec, unsigned propertyName) const
return *m_data.valueSlot;
if (m_getValue == JSC_REGISTER_SLOT_MARKER)
return (*m_data.registerSlot).jsValue();
- return m_getValue(exec, Identifier::from(exec, propertyName), *this);
+ if (m_getValue == INDEX_GETTER_MARKER)
+ return m_getIndexValue(exec, m_slotBase, m_data.index);
+ if (m_getValue == GETTER_FUNCTION_MARKER)
+ return functionGetter(exec);
+ return m_getValue(exec, slotBase(), Identifier::from(exec, propertyName));
}
- bool isCacheable() const { return m_offset != WTF::notFound; }
+ CachedPropertyType cachedPropertyType() const { return m_cachedPropertyType; }
+ bool isCacheable() const { return m_cachedPropertyType != Uncacheable; }
+ bool isCacheableValue() const { return m_cachedPropertyType == Value; }
size_t cachedOffset() const
{
ASSERT(isCacheable());
m_slotBase = slotBase;
m_data.valueSlot = valueSlot;
m_offset = offset;
+ m_cachedPropertyType = Value;
}
void setValue(JSValue value)
ASSERT(slotBase);
ASSERT(getValue);
m_getValue = getValue;
+ m_getIndexValue = 0;
m_slotBase = slotBase;
}
-
- void setCustomIndex(JSValue slotBase, unsigned index, GetValueFunc getValue)
+
+ void setCacheableCustom(JSValue slotBase, GetValueFunc getValue)
{
ASSERT(slotBase);
ASSERT(getValue);
m_getValue = getValue;
+ m_getIndexValue = 0;
+ m_slotBase = slotBase;
+ m_cachedPropertyType = Custom;
+ }
+
+ void setCustomIndex(JSValue slotBase, unsigned index, GetIndexValueFunc getIndexValue)
+ {
+ ASSERT(slotBase);
+ ASSERT(getIndexValue);
+ m_getValue = INDEX_GETTER_MARKER;
+ m_getIndexValue = getIndexValue;
m_slotBase = slotBase;
m_data.index = index;
}
-
+
void setGetterSlot(JSObject* getterFunc)
{
ASSERT(getterFunc);
- m_getValue = functionGetter;
+ m_thisValue = m_slotBase;
+ m_getValue = GETTER_FUNCTION_MARKER;
m_data.getterFunc = getterFunc;
}
-
+
+ void setCacheableGetterSlot(JSValue slotBase, JSObject* getterFunc, unsigned offset)
+ {
+ ASSERT(getterFunc);
+ m_getValue = GETTER_FUNCTION_MARKER;
+ m_thisValue = m_slotBase;
+ m_slotBase = slotBase;
+ m_data.getterFunc = getterFunc;
+ m_offset = offset;
+ m_cachedPropertyType = Getter;
+ }
+
void setUndefined()
{
setValue(jsUndefined());
{
// Clear offset even in release builds, in case this PropertySlot has been used before.
// (For other data members, we don't need to clear anything because reuse would meaningfully overwrite them.)
- m_offset = WTF::notFound;
+ m_offset = 0;
+ m_cachedPropertyType = Uncacheable;
}
unsigned index() const { return m_data.index; }
+ JSValue thisValue() const { return m_thisValue; }
+
+ GetValueFunc customGetter() const
+ {
+ ASSERT(m_cachedPropertyType == Custom);
+ return m_getValue;
+ }
private:
- static JSValue functionGetter(ExecState*, const Identifier&, const PropertySlot&);
+ JSValue functionGetter(ExecState*) const;
GetValueFunc m_getValue;
+ GetIndexValueFunc m_getIndexValue;
JSValue m_slotBase;
union {
} m_data;
JSValue m_value;
+ JSValue m_thisValue;
size_t m_offset;
+ CachedPropertyType m_cachedPropertyType;
};
} // namespace JSC
#else
-#if ENABLE(WREC)
-#include "JIT.h"
-#include "WRECGenerator.h"
-#endif
#include <pcre/pcre.h>
#endif
namespace JSC {
-#if ENABLE(WREC)
-using namespace WREC;
-#endif
-
inline RegExp::RegExp(JSGlobalData* globalData, const UString& pattern)
: m_pattern(pattern)
, m_flagBits(0)
, m_constructionError(0)
, m_numSubpatterns(0)
+ , m_lastMatchStart(-1)
{
compile(globalData);
}
, m_flagBits(0)
, m_constructionError(0)
, m_numSubpatterns(0)
+ , m_lastMatchStart(-1)
{
// NOTE: The global flag is handled on a case-by-case basis by functions like
// String::match and RegExpObject::match.
- if (flags.find('g') != -1)
+ if (flags.find('g') != UString::NotFound)
m_flagBits |= Global;
- if (flags.find('i') != -1)
+ if (flags.find('i') != UString::NotFound)
m_flagBits |= IgnoreCase;
- if (flags.find('m') != -1)
+ if (flags.find('m') != UString::NotFound)
m_flagBits |= Multiline;
compile(globalData);
if (ovector)
ovector->clear();
- if (startOffset > s.size() || s.isNull())
+ if (static_cast<unsigned>(startOffset) > s.size() || s.isNull()) {
+ m_lastMatchString = UString();
+ m_lastMatchStart = -1;
+ m_lastOVector.shrink(0);
return -1;
+ }
+
+ // Perform check to see if this match call is the same as the last match invocation
+ // and if it is return the prior result.
+ if ((startOffset == m_lastMatchStart) && (s.rep() == m_lastMatchString.rep())) {
+ if (ovector)
+ *ovector = m_lastOVector;
+
+ if (m_lastOVector.isEmpty())
+ return -1;
+
+ return m_lastOVector.at(0);
+ }
#if ENABLE(YARR_JIT)
if (!!m_regExpJITCode) {
if (ovector)
ovector->clear();
}
+
+ m_lastMatchString = s;
+ m_lastMatchStart = startOffset;
+
+ if (ovector)
+ m_lastOVector = *ovector;
+ else
+ m_lastOVector = nonReturnedOvector;
+
return result;
}
+ m_lastMatchString = UString();
+ m_lastMatchStart = -1;
+ m_lastOVector.shrink(0);
+
return -1;
}
#else
-void RegExp::compile(JSGlobalData* globalData)
+void RegExp::compile(JSGlobalData*)
{
m_regExp = 0;
-#if ENABLE(WREC)
- m_wrecFunction = Generator::compileRegExp(globalData, m_pattern, &m_numSubpatterns, &m_constructionError, m_executablePool, ignoreCase(), multiline());
- if (m_wrecFunction || m_constructionError)
- return;
- // Fall through to non-WREC case.
-#else
- UNUSED_PARAM(globalData);
-#endif
-
JSRegExpIgnoreCaseOption ignoreCaseOption = ignoreCase() ? JSRegExpIgnoreCase : JSRegExpDoNotIgnoreCase;
JSRegExpMultilineOption multilineOption = multiline() ? JSRegExpMultiline : JSRegExpSingleLine;
m_regExp = jsRegExpCompile(reinterpret_cast<const UChar*>(m_pattern.data()), m_pattern.size(), ignoreCaseOption, multilineOption, &m_numSubpatterns, &m_constructionError);
if (ovector)
ovector->clear();
- if (startOffset > s.size() || s.isNull())
+ if (static_cast<unsigned>(startOffset) > s.size() || s.isNull())
return -1;
-#if ENABLE(WREC)
- if (m_wrecFunction) {
- int offsetVectorSize = (m_numSubpatterns + 1) * 2;
- int* offsetVector;
- Vector<int, 32> nonReturnedOvector;
- if (ovector) {
- ovector->resize(offsetVectorSize);
- offsetVector = ovector->data();
- } else {
- nonReturnedOvector.resize(offsetVectorSize);
- offsetVector = nonReturnedOvector.data();
- }
- ASSERT(offsetVector);
- for (int j = 0; j < offsetVectorSize; ++j)
- offsetVector[j] = -1;
-
- int result = m_wrecFunction(s.data(), startOffset, s.size(), offsetVector);
-
- if (result < 0) {
-#ifndef NDEBUG
- // TODO: define up a symbol, rather than magic -1
- if (result != -1)
- fprintf(stderr, "jsRegExpExecute failed with result %d\n", result);
-#endif
- if (ovector)
- ovector->clear();
- }
- return result;
- } else
-#endif
if (m_regExp) {
// Set up the offset vector for the result.
// First 2/3 used for result, the last third used by PCRE.
#define RegExp_h
#include "UString.h"
-#include "WREC.h"
#include "ExecutableAllocator.h"
#include <wtf/Forward.h>
#include <wtf/RefCounted.h>
int m_flagBits;
const char* m_constructionError;
unsigned m_numSubpatterns;
+ UString m_lastMatchString;
+ int m_lastMatchStart;
+ Vector<int, 32> m_lastOVector;
#if ENABLE(YARR_JIT)
Yarr::RegexCodeBlock m_regExpJITCode;
#elif ENABLE(YARR)
OwnPtr<Yarr::BytecodePattern> m_regExpBytecode;
#else
-#if ENABLE(WREC)
- WREC::CompiledRegExp m_wrecFunction;
- RefPtr<ExecutablePool> m_executablePool;
-#endif
JSRegExp* m_regExp;
#endif
};
--- /dev/null
+/*
+ * Copyright (C) 2010 University of Szeged
+ * Copyright (C) 2010 Renata Hodovan (hodovan@inf.u-szeged.hu)
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#include "RegExpCache.h"
+
+namespace JSC {
+
+PassRefPtr<RegExp> RegExpCache::lookupOrCreate(const UString& patternString, const UString& flags)
+{
+ if (patternString.size() < maxCacheablePatternLength) {
+ pair<HashMap<RegExpKey, RefPtr<RegExp> >::iterator, bool> result = m_cacheMap.add(RegExpKey(flags, patternString), 0);
+ if (!result.second)
+ return result.first->second;
+ }
+ return create(patternString, flags);
+}
+
+PassRefPtr<RegExp> RegExpCache::create(const UString& patternString, const UString& flags)
+{
+ RefPtr<RegExp> regExp;
+
+ if (!flags.isNull())
+ regExp = RegExp::create(m_globalData, patternString, flags);
+ else
+ regExp = RegExp::create(m_globalData, patternString);
+
+ if (patternString.size() >= maxCacheablePatternLength)
+ return regExp;
+
+ ++m_nextKeyToEvict;
+ if (m_nextKeyToEvict == maxCacheableEntries) {
+ m_nextKeyToEvict = 0;
+ m_isFull = true;
+ }
+ if (m_isFull)
+ m_cacheMap.remove(RegExpKey(patternKeyArray[m_nextKeyToEvict].flagsValue, patternKeyArray[m_nextKeyToEvict].pattern));
+
+ RegExpKey key = RegExpKey(flags, patternString);
+ m_cacheMap.set(key, regExp);
+ patternKeyArray[m_nextKeyToEvict].flagsValue = key.flagsValue;
+ patternKeyArray[m_nextKeyToEvict].pattern = patternString.rep();
+ return regExp;
+}
+
+RegExpCache::RegExpCache(JSGlobalData* globalData)
+ : m_globalData(globalData)
+ , m_nextKeyToEvict(-1)
+ , m_isFull(false)
+{
+}
+
+}
--- /dev/null
+/*
+ * Copyright (C) 2010 University of Szeged
+ * Copyright (C) 2010 Renata Hodovan (hodovan@inf.u-szeged.hu)
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "RegExp.h"
+#include "RegExpKey.h"
+#include "UString.h"
+
+#ifndef RegExpCache_h
+#define RegExpCache_h
+
+namespace JSC {
+
+class RegExpCache {
+public:
+ PassRefPtr<RegExp> lookupOrCreate(const UString& patternString, const UString& flags);
+ PassRefPtr<RegExp> create(const UString& patternString, const UString& flags);
+ RegExpCache(JSGlobalData* globalData);
+
+private:
+ static const unsigned maxCacheablePatternLength = 256;
+ static const int maxCacheableEntries = 256;
+
+ typedef HashMap<RegExpKey, RefPtr<RegExp> > RegExpCacheMap;
+ RegExpKey patternKeyArray[maxCacheableEntries];
+ RegExpCacheMap m_cacheMap;
+ JSGlobalData* m_globalData;
+ int m_nextKeyToEvict;
+ bool m_isFull;
+};
+
+} // namespace JSC
+
+#endif // RegExpCache_h
#include "JSArray.h"
#include "JSFunction.h"
#include "JSString.h"
+#include "Lookup.h"
#include "ObjectPrototype.h"
#include "RegExpMatchesArray.h"
#include "RegExpObject.h"
#include "RegExpPrototype.h"
#include "RegExp.h"
+#include "RegExpCache.h"
namespace JSC {
-static JSValue regExpConstructorInput(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorMultiline(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorLastMatch(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorLastParen(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorLeftContext(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorRightContext(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar1(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar2(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar3(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar4(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar5(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar6(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar7(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar8(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpConstructorDollar9(ExecState*, const Identifier&, const PropertySlot&);
+static JSValue regExpConstructorInput(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorMultiline(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorLastMatch(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorLastParen(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorLeftContext(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorRightContext(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar1(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar2(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar3(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar4(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar5(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar6(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar7(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar8(ExecState*, JSValue, const Identifier&);
+static JSValue regExpConstructorDollar9(ExecState*, JSValue, const Identifier&);
static void setRegExpConstructorInput(ExecState*, JSObject*, JSValue);
static void setRegExpConstructorMultiline(ExecState*, JSObject*, JSValue);
memcpy(d->lastOvector().data(), data->lastOvector().data(), offsetVectorSize * sizeof(int));
// d->multiline is not needed, and remains uninitialized
- setLazyCreationData(d);
+ setSubclassData(d);
}
RegExpMatchesArray::~RegExpMatchesArray()
{
- delete static_cast<RegExpConstructorPrivate*>(lazyCreationData());
+ delete static_cast<RegExpConstructorPrivate*>(subclassData());
}
void RegExpMatchesArray::fillArrayInstance(ExecState* exec)
{
- RegExpConstructorPrivate* d = static_cast<RegExpConstructorPrivate*>(lazyCreationData());
+ RegExpConstructorPrivate* d = static_cast<RegExpConstructorPrivate*>(subclassData());
ASSERT(d);
unsigned lastNumSubpatterns = d->lastNumSubPatterns;
JSArray::put(exec, exec->propertyNames().input, jsString(exec, d->input), slot);
delete d;
- setLazyCreationData(0);
+ setSubclassData(0);
}
JSObject* RegExpConstructor::arrayOfMatches(ExecState* exec) const
return getStaticValueDescriptor<RegExpConstructor, InternalFunction>(exec, ExecState::regExpConstructorTable(exec), this, propertyName, descriptor);
}
-JSValue regExpConstructorDollar1(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar1(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 1);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 1);
}
-JSValue regExpConstructorDollar2(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar2(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 2);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 2);
}
-JSValue regExpConstructorDollar3(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar3(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 3);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 3);
}
-JSValue regExpConstructorDollar4(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar4(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 4);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 4);
}
-JSValue regExpConstructorDollar5(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar5(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 5);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 5);
}
-JSValue regExpConstructorDollar6(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar6(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 6);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 6);
}
-JSValue regExpConstructorDollar7(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar7(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 7);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 7);
}
-JSValue regExpConstructorDollar8(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar8(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 8);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 8);
}
-JSValue regExpConstructorDollar9(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorDollar9(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 9);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 9);
}
-JSValue regExpConstructorInput(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorInput(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return jsString(exec, asRegExpConstructor(slot.slotBase())->input());
+ return jsString(exec, asRegExpConstructor(slotBase)->input());
}
-JSValue regExpConstructorMultiline(ExecState*, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorMultiline(ExecState*, JSValue slotBase, const Identifier&)
{
- return jsBoolean(asRegExpConstructor(slot.slotBase())->multiline());
+ return jsBoolean(asRegExpConstructor(slotBase)->multiline());
}
-JSValue regExpConstructorLastMatch(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorLastMatch(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getBackref(exec, 0);
+ return asRegExpConstructor(slotBase)->getBackref(exec, 0);
}
-JSValue regExpConstructorLastParen(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorLastParen(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getLastParen(exec);
+ return asRegExpConstructor(slotBase)->getLastParen(exec);
}
-JSValue regExpConstructorLeftContext(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorLeftContext(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getLeftContext(exec);
+ return asRegExpConstructor(slotBase)->getLeftContext(exec);
}
-JSValue regExpConstructorRightContext(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpConstructorRightContext(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return asRegExpConstructor(slot.slotBase())->getRightContext(exec);
+ return asRegExpConstructor(slotBase)->getRightContext(exec);
}
void RegExpConstructor::put(ExecState* exec, const Identifier& propertyName, JSValue value, PutPropertySlot& slot)
UString pattern = arg0.isUndefined() ? UString("") : arg0.toString(exec);
UString flags = arg1.isUndefined() ? UString("") : arg1.toString(exec);
- RefPtr<RegExp> regExp = RegExp::create(&exec->globalData(), pattern, flags);
+ RefPtr<RegExp> regExp = exec->globalData().regExpCache()->lookupOrCreate(pattern, flags);
if (!regExp->isValid())
return throwError(exec, SyntaxError, makeString("Invalid regular expression: ", regExp->errorMessage()));
return new (exec) RegExpObject(exec->lexicalGlobalObject()->regExpStructure(), regExp.release());
--- /dev/null
+/*
+ * Copyright (C) 2010 University of Szeged
+ * Copyright (C) 2010 Renata Hodovan (hodovan@inf.u-szeged.hu)
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY UNIVERSITY OF SZEGED ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UNIVERSITY OF SZEGED OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "UString.h"
+
+#ifndef RegExpKey_h
+#define RegExpKey_h
+
+namespace JSC {
+
+struct RegExpKey {
+ int flagsValue;
+ RefPtr<UString::Rep> pattern;
+
+ RegExpKey()
+ : flagsValue(0)
+ {
+ }
+
+ RegExpKey(int flags)
+ : flagsValue(flags)
+ {
+ }
+
+ RegExpKey(int flags, const UString& pattern)
+ : flagsValue(flags)
+ , pattern(pattern.rep())
+ {
+ }
+
+ RegExpKey(int flags, const PassRefPtr<UString::Rep> pattern)
+ : flagsValue(flags)
+ , pattern(pattern)
+ {
+ }
+
+ RegExpKey(const UString& flags, const UString& pattern)
+ : pattern(pattern.rep())
+ {
+ flagsValue = getFlagsValue(flags);
+ }
+
+ int getFlagsValue(const UString flags)
+ {
+ flagsValue = 0;
+ if (flags.find('g') != UString::NotFound)
+ flagsValue += 4;
+ if (flags.find('i') != UString::NotFound)
+ flagsValue += 2;
+ if (flags.find('m') != UString::NotFound)
+ flagsValue += 1;
+ return flagsValue;
+ }
+};
+} // namespace JSC
+
+namespace WTF {
+template<typename T> struct DefaultHash;
+template<typename T> struct RegExpHash;
+
+inline bool operator==(const JSC::RegExpKey& a, const JSC::RegExpKey& b)
+{
+ if (a.flagsValue != b.flagsValue)
+ return false;
+ if (!a.pattern)
+ return !b.pattern;
+ if (!b.pattern)
+ return false;
+ return equal(a.pattern.get(), b.pattern.get());
+}
+
+template<> struct RegExpHash<JSC::RegExpKey> {
+ static unsigned hash(const JSC::RegExpKey& key) { return key.pattern->hash(); }
+ static bool equal(const JSC::RegExpKey& a, const JSC::RegExpKey& b) { return a == b; }
+ static const bool safeToCompareToEmptyOrDeleted = false;
+};
+
+template<> struct DefaultHash<JSC::RegExpKey> {
+ typedef RegExpHash<JSC::RegExpKey> Hash;
+};
+
+template<> struct HashTraits<JSC::RegExpKey> : GenericHashTraits<JSC::RegExpKey> {
+ static void constructDeletedValue(JSC::RegExpKey& slot) { slot.flagsValue = -1; }
+ static bool isDeletedValue(const JSC::RegExpKey& value) { return value.flagsValue == -1; }
+};
+} // namespace WTF
+
+#endif // RegExpKey_h
private:
virtual bool getOwnPropertySlot(ExecState* exec, const Identifier& propertyName, PropertySlot& slot)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
return JSArray::getOwnPropertySlot(exec, propertyName, slot);
}
virtual bool getOwnPropertySlot(ExecState* exec, unsigned propertyName, PropertySlot& slot)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
return JSArray::getOwnPropertySlot(exec, propertyName, slot);
}
virtual bool getOwnPropertyDescriptor(ExecState* exec, const Identifier& propertyName, PropertyDescriptor& descriptor)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
return JSArray::getOwnPropertyDescriptor(exec, propertyName, descriptor);
}
virtual void put(ExecState* exec, const Identifier& propertyName, JSValue v, PutPropertySlot& slot)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
JSArray::put(exec, propertyName, v, slot);
}
virtual void put(ExecState* exec, unsigned propertyName, JSValue v)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
JSArray::put(exec, propertyName, v);
}
virtual bool deleteProperty(ExecState* exec, const Identifier& propertyName)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
return JSArray::deleteProperty(exec, propertyName);
}
virtual bool deleteProperty(ExecState* exec, unsigned propertyName)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
return JSArray::deleteProperty(exec, propertyName);
}
virtual void getOwnPropertyNames(ExecState* exec, PropertyNameArray& arr, EnumerationMode mode = ExcludeDontEnumProperties)
{
- if (lazyCreationData())
+ if (subclassData())
fillArrayInstance(exec);
JSArray::getOwnPropertyNames(exec, arr, mode);
}
#include "JSArray.h"
#include "JSGlobalObject.h"
#include "JSString.h"
+#include "Lookup.h"
#include "RegExpConstructor.h"
#include "RegExpPrototype.h"
namespace JSC {
-static JSValue regExpObjectGlobal(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpObjectIgnoreCase(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpObjectMultiline(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpObjectSource(ExecState*, const Identifier&, const PropertySlot&);
-static JSValue regExpObjectLastIndex(ExecState*, const Identifier&, const PropertySlot&);
+static JSValue regExpObjectGlobal(ExecState*, JSValue, const Identifier&);
+static JSValue regExpObjectIgnoreCase(ExecState*, JSValue, const Identifier&);
+static JSValue regExpObjectMultiline(ExecState*, JSValue, const Identifier&);
+static JSValue regExpObjectSource(ExecState*, JSValue, const Identifier&);
+static JSValue regExpObjectLastIndex(ExecState*, JSValue, const Identifier&);
static void setRegExpObjectLastIndex(ExecState*, JSObject*, JSValue);
} // namespace JSC
return getStaticValueDescriptor<RegExpObject, JSObject>(exec, ExecState::regExpTable(exec), this, propertyName, descriptor);
}
-JSValue regExpObjectGlobal(ExecState*, const Identifier&, const PropertySlot& slot)
+JSValue regExpObjectGlobal(ExecState*, JSValue slotBase, const Identifier&)
{
- return jsBoolean(asRegExpObject(slot.slotBase())->regExp()->global());
+ return jsBoolean(asRegExpObject(slotBase)->regExp()->global());
}
-JSValue regExpObjectIgnoreCase(ExecState*, const Identifier&, const PropertySlot& slot)
+JSValue regExpObjectIgnoreCase(ExecState*, JSValue slotBase, const Identifier&)
{
- return jsBoolean(asRegExpObject(slot.slotBase())->regExp()->ignoreCase());
+ return jsBoolean(asRegExpObject(slotBase)->regExp()->ignoreCase());
}
-JSValue regExpObjectMultiline(ExecState*, const Identifier&, const PropertySlot& slot)
+JSValue regExpObjectMultiline(ExecState*, JSValue slotBase, const Identifier&)
{
- return jsBoolean(asRegExpObject(slot.slotBase())->regExp()->multiline());
+ return jsBoolean(asRegExpObject(slotBase)->regExp()->multiline());
}
-JSValue regExpObjectSource(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpObjectSource(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return jsString(exec, asRegExpObject(slot.slotBase())->regExp()->pattern());
+ return jsString(exec, asRegExpObject(slotBase)->regExp()->pattern());
}
-JSValue regExpObjectLastIndex(ExecState* exec, const Identifier&, const PropertySlot& slot)
+JSValue regExpObjectLastIndex(ExecState* exec, JSValue slotBase, const Identifier&)
{
- return jsNumber(exec, asRegExpObject(slot.slotBase())->lastIndex());
+ return jsNumber(exec, asRegExpObject(slotBase)->lastIndex());
}
void RegExpObject::put(ExecState* exec, const Identifier& propertyName, JSValue value, PutPropertySlot& slot)
#include "JSFunction.h"
#include "JSObject.h"
#include "JSString.h"
+#include "JSStringBuilder.h"
#include "JSValue.h"
#include "ObjectPrototype.h"
#include "PrototypeFunction.h"
#include "RegExpObject.h"
#include "RegExp.h"
+#include "RegExpCache.h"
namespace JSC {
} else {
UString pattern = args.isEmpty() ? UString("") : arg0.toString(exec);
UString flags = arg1.isUndefined() ? UString("") : arg1.toString(exec);
- regExp = RegExp::create(&exec->globalData(), pattern, flags);
+ regExp = exec->globalData().regExpCache()->lookupOrCreate(pattern, flags);
}
if (!regExp->isValid())
postfix[index] = 'm';
UString source = asRegExpObject(thisValue)->get(exec, exec->propertyNames().source).toString(exec);
// If source is empty, use "/(?:)/" to avoid colliding with comment syntax
- return jsNontrivialString(exec, makeString("/", source.size() ? source : UString("(?:)"), postfix));
+ return jsMakeNontrivialString(exec, "/", source.size() ? source : UString("(?:)"), postfix);
}
} // namespace JSC
--- /dev/null
+/*
+ * Copyright (C) 2009 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "RopeImpl.h"
+
+namespace JSC {
+
+void RopeImpl::derefFibersNonRecursive(Vector<RopeImpl*, 32>& workQueue)
+{
+ unsigned fiberCount = this->fiberCount();
+ for (unsigned i = 0; i < fiberCount; ++i) {
+ Fiber& fiber = m_fibers[i];
+ if (isRope(fiber)) {
+ RopeImpl* nextRope = static_cast<RopeImpl*>(fiber);
+ if (nextRope->hasOneRef())
+ workQueue.append(nextRope);
+ else
+ nextRope->deref();
+ } else
+ static_cast<UStringImpl*>(fiber)->deref();
+ }
+}
+
+void RopeImpl::destructNonRecursive()
+{
+ Vector<RopeImpl*, 32> workQueue;
+
+ derefFibersNonRecursive(workQueue);
+ delete this;
+
+ while (!workQueue.isEmpty()) {
+ RopeImpl* rope = workQueue.last();
+ workQueue.removeLast();
+ rope->derefFibersNonRecursive(workQueue);
+ delete rope;
+ }
+}
+
+} // namespace JSC
--- /dev/null
+/*
+ * Copyright (C) 2009, 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef RopeImpl_h
+#define RopeImpl_h
+
+#include "UStringImpl.h"
+
+namespace JSC {
+
+class RopeImpl : public StringImplBase {
+public:
+ // A RopeImpl is composed from a set of smaller strings called Fibers.
+ // Each Fiber in a rope is either UStringImpl or another RopeImpl.
+ typedef StringImplBase* Fiber;
+
+ // Creates a RopeImpl comprising of 'fiberCount' Fibers.
+ // The RopeImpl is constructed in an uninitialized state - initialize must be called for each Fiber in the RopeImpl.
+ static PassRefPtr<RopeImpl> tryCreateUninitialized(unsigned fiberCount)
+ {
+ void* allocation;
+ if (tryFastMalloc(sizeof(RopeImpl) + (fiberCount - 1) * sizeof(Fiber)).getValue(allocation))
+ return adoptRef(new (allocation) RopeImpl(fiberCount));
+ return 0;
+ }
+
+ static bool isRope(Fiber fiber)
+ {
+ return !fiber->isStringImpl();
+ }
+
+ static void deref(Fiber fiber)
+ {
+ if (isRope(fiber))
+ static_cast<RopeImpl*>(fiber)->deref();
+ else
+ static_cast<UStringImpl*>(fiber)->deref();
+ }
+
+ void initializeFiber(unsigned &index, Fiber fiber)
+ {
+ m_fibers[index++] = fiber;
+ fiber->ref();
+ m_length += fiber->length();
+ }
+
+ unsigned fiberCount() { return m_size; }
+ Fiber* fibers() { return m_fibers; }
+
+ ALWAYS_INLINE void deref()
+ {
+ m_refCountAndFlags -= s_refCountIncrement;
+ if (!(m_refCountAndFlags & s_refCountMask))
+ destructNonRecursive();
+ }
+
+private:
+ RopeImpl(unsigned fiberCount)
+ : StringImplBase(ConstructNonStringImpl)
+ , m_size(fiberCount)
+ {
+ }
+
+ void destructNonRecursive();
+ void derefFibersNonRecursive(Vector<RopeImpl*, 32>& workQueue);
+
+ bool hasOneRef() { return (m_refCountAndFlags & s_refCountMask) == s_refCountIncrement; }
+
+ unsigned m_size;
+ Fiber m_fibers[1];
+};
+
+}
+
+#endif
public:
SmallStringsStorage();
- UString::Rep* rep(unsigned char character) { return &m_reps[character]; }
+ UString::Rep* rep(unsigned char character) { return m_reps[character].get(); }
private:
- UString::Rep m_reps[numCharactersToStore];
+ RefPtr<UString::Rep> m_reps[numCharactersToStore];
};
SmallStringsStorage::SmallStringsStorage()
RefPtr<UStringImpl> baseString = UStringImpl::createUninitialized(numCharactersToStore, characterBuffer);
for (unsigned i = 0; i < numCharactersToStore; ++i) {
characterBuffer[i] = i;
- new (&m_reps[i]) UString::Rep(&characterBuffer[i], 1, PassRefPtr<UStringImpl>(baseString));
+ m_reps[i] = UStringImpl::create(baseString, i, 1);
}
}
void clear();
unsigned count() const;
-
+#if ENABLE(JIT)
+ JSString** singleCharacterStrings() { return m_singleCharacterStrings; }
+#endif
private:
void createEmptyString(JSGlobalData*);
void createSingleCharacterString(JSGlobalData*, unsigned char);
void append(const char* str)
{
- buffer.append(str, strlen(str));
+ append(str, strlen(str));
}
void append(const char* str, size_t len)
{
- buffer.reserveCapacity(buffer.size() + len);
+ reserveCapacity(buffer.size() + len);
for (size_t i = 0; i < len; i++)
buffer.append(static_cast<unsigned char>(str[i]));
}
}
bool isEmpty() { return buffer.isEmpty(); }
- void reserveCapacity(size_t newCapacity) { buffer.reserveCapacity(newCapacity); }
+ void reserveCapacity(size_t newCapacity)
+ {
+ if (newCapacity < buffer.capacity())
+ return;
+ buffer.reserveCapacity(std::max(newCapacity, buffer.capacity() + buffer.capacity() / 4 + 1));
+ }
void resize(size_t size) { buffer.resize(size); }
size_t size() const { return buffer.size(); }
UChar operator[](size_t i) const { return buffer.at(i); }
- UString release()
+ UString build()
{
buffer.shrinkToFit();
+ ASSERT(buffer.data() || !buffer.size());
return UString::adopt(buffer);
}
-private:
+protected:
Vector<UChar, 64> buffer;
};
putDirectWithoutTransition(exec->propertyNames().prototype, stringPrototype, ReadOnly | DontEnum | DontDelete);
// ECMA 15.5.3.2 fromCharCode()
+#if ENABLE(JIT) && ENABLE(JIT_OPTIMIZE_NATIVE_CALL)
+ putDirectFunctionWithoutTransition(exec, new (exec) NativeFunctionWrapper(exec, prototypeFunctionStructure, 1, exec->propertyNames().fromCharCode, exec->globalData().getThunk(fromCharCodeThunkGenerator), stringFromCharCode), DontEnum);
+#else
putDirectFunctionWithoutTransition(exec, new (exec) NativeFunctionWrapper(exec, prototypeFunctionStructure, 1, exec->propertyNames().fromCharCode, stringFromCharCode), DontEnum);
-
+#endif
// no. of arguments for constructor
putDirectWithoutTransition(exec->propertyNames().length, jsNumber(exec, 1), ReadOnly | DontEnum | DontDelete);
}
#include "JSGlobalObjectFunctions.h"
#include "JSArray.h"
#include "JSFunction.h"
+#include "JSStringBuilder.h"
+#include "Lookup.h"
#include "ObjectPrototype.h"
#include "Operations.h"
#include "PropertyNameArray.h"
+#include "RegExpCache.h"
#include "RegExpConstructor.h"
#include "RegExpObject.h"
#include <wtf/ASCIICType.h>
// ------------------------------ Functions --------------------------
-static NEVER_INLINE UString substituteBackreferencesSlow(const UString& replacement, const UString& source, const int* ovector, RegExp* reg, int i)
+static NEVER_INLINE UString substituteBackreferencesSlow(const UString& replacement, const UString& source, const int* ovector, RegExp* reg, unsigned i)
{
Vector<UChar> substitutedReplacement;
int offset = 0;
i += 1 + advance;
offset = i + 1;
substitutedReplacement.append(source.data() + backrefStart, backrefLength);
- } while ((i = replacement.find('$', i + 1)) != -1);
+ } while ((i = replacement.find('$', i + 1)) != UString::NotFound);
if (replacement.size() - offset)
substitutedReplacement.append(replacement.data() + offset, replacement.size() - offset);
static inline UString substituteBackreferences(const UString& replacement, const UString& source, const int* ovector, RegExp* reg)
{
- int i = replacement.find('$', 0);
- if (UNLIKELY(i != -1))
+ unsigned i = replacement.find('$', 0);
+ if (UNLIKELY(i != UString::NotFound))
return substituteBackreferencesSlow(replacement, source, ovector, reg, i);
return replacement;
}
return Collator::userDefault()->collate(reinterpret_cast<const ::UChar*>(a.data()), a.size(), reinterpret_cast<const ::UChar*>(b.data()), b.size());
}
+struct StringRange {
+public:
+ StringRange(int pos, int len)
+ : position(pos)
+ , length(len)
+ {
+ }
+
+ StringRange()
+ {
+ }
+
+ int position;
+ int length;
+};
+
+JSValue jsSpliceSubstringsWithSeparators(ExecState* exec, JSString* sourceVal, const UString& source, const StringRange* substringRanges, int rangeCount, const UString* separators, int separatorCount);
+JSValue jsSpliceSubstringsWithSeparators(ExecState* exec, JSString* sourceVal, const UString& source, const StringRange* substringRanges, int rangeCount, const UString* separators, int separatorCount)
+{
+ if (rangeCount == 1 && separatorCount == 0) {
+ int sourceSize = source.size();
+ int position = substringRanges[0].position;
+ int length = substringRanges[0].length;
+ if (position <= 0 && length >= sourceSize)
+ return sourceVal;
+ // We could call UString::substr, but this would result in redundant checks
+ return jsString(exec, UStringImpl::create(source.rep(), max(0, position), min(sourceSize, length)));
+ }
+
+ int totalLength = 0;
+ for (int i = 0; i < rangeCount; i++)
+ totalLength += substringRanges[i].length;
+ for (int i = 0; i < separatorCount; i++)
+ totalLength += separators[i].size();
+
+ if (totalLength == 0)
+ return jsString(exec, "");
+
+ UChar* buffer;
+ PassRefPtr<UStringImpl> impl = UStringImpl::tryCreateUninitialized(totalLength, buffer);
+ if (!impl)
+ return throwOutOfMemoryError(exec);
+
+ int maxCount = max(rangeCount, separatorCount);
+ int bufferPos = 0;
+ for (int i = 0; i < maxCount; i++) {
+ if (i < rangeCount) {
+ UStringImpl::copyChars(buffer + bufferPos, source.data() + substringRanges[i].position, substringRanges[i].length);
+ bufferPos += substringRanges[i].length;
+ }
+ if (i < separatorCount) {
+ UStringImpl::copyChars(buffer + bufferPos, separators[i].data(), separators[i].size());
+ bufferPos += separators[i].size();
+ }
+ }
+
+ return jsString(exec, impl);
+}
+
JSValue JSC_HOST_CALL stringProtoFuncReplace(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
{
JSString* sourceVal = thisValue.toThisJSString(exec);
- const UString& source = sourceVal->value(exec);
-
JSValue pattern = args.at(0);
-
JSValue replacement = args.at(1);
+
UString replacementString;
CallData callData;
CallType callType = replacement.getCallData(callData);
replacementString = replacement.toString(exec);
if (pattern.inherits(&RegExpObject::info)) {
+ const UString& source = sourceVal->value(exec);
+ if (exec->hadException())
+ return JSValue();
RegExp* reg = asRegExpObject(pattern)->regExp();
bool global = reg->global();
RegExpConstructor* regExpConstructor = exec->lexicalGlobalObject()->regExpConstructor();
int lastIndex = 0;
- int startPosition = 0;
+ unsigned startPosition = 0;
- Vector<UString::Range, 16> sourceRanges;
+ Vector<StringRange, 16> sourceRanges;
Vector<UString, 16> replacements;
// This is either a loop (if global is set) or a one-way (if not).
if (matchIndex < 0)
break;
- sourceRanges.append(UString::Range(lastIndex, matchIndex - lastIndex));
+ sourceRanges.append(StringRange(lastIndex, matchIndex - lastIndex));
int completeMatchStart = ovector[0];
unsigned i = 0;
if (matchIndex < 0)
break;
- sourceRanges.append(UString::Range(lastIndex, matchIndex - lastIndex));
+ sourceRanges.append(StringRange(lastIndex, matchIndex - lastIndex));
if (callType != CallTypeNone) {
int completeMatchStart = ovector[0];
if (!lastIndex && replacements.isEmpty())
return sourceVal;
- if (lastIndex < source.size())
- sourceRanges.append(UString::Range(lastIndex, source.size() - lastIndex));
+ if (static_cast<unsigned>(lastIndex) < source.size())
+ sourceRanges.append(StringRange(lastIndex, source.size() - lastIndex));
- return jsString(exec, source.spliceSubstringsWithSeparators(sourceRanges.data(), sourceRanges.size(),
- replacements.data(), replacements.size()));
+ return jsSpliceSubstringsWithSeparators(exec, sourceVal, source, sourceRanges.data(), sourceRanges.size(), replacements.data(), replacements.size());
}
// Not a regular expression, so treat the pattern as a string.
UString patternString = pattern.toString(exec);
- int matchPos = source.find(patternString);
+ if (patternString.size() == 1 && callType == CallTypeNone)
+ return sourceVal->replaceCharacter(exec, patternString[0], replacementString);
+
+ const UString& source = sourceVal->value(exec);
+ unsigned matchPos = source.find(patternString);
- if (matchPos == -1)
+ if (matchPos == UString::NotFound)
return sourceVal;
int matchLen = patternString.size();
replacementString = call(exec, replacement, callType, callData, exec->globalThisValue(), args).toString(exec);
}
-
- int ovector[2] = { matchPos, matchPos + matchLen };
- return jsString(exec, source.replaceRange(matchPos, matchLen, substituteBackreferences(replacementString, source, ovector, 0)));
+
+ size_t matchEnd = matchPos + matchLen;
+ int ovector[2] = { matchPos, matchEnd };
+ return jsString(exec, source.substr(0, matchPos), substituteBackreferences(replacementString, source, ovector, 0), source.substr(matchEnd));
}
JSValue JSC_HOST_CALL stringProtoFuncToString(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
pos = static_cast<int>(dpos);
}
- return jsNumber(exec, s.find(u2, pos));
+ unsigned result = s.find(u2, pos);
+ if (result == UString::NotFound)
+ return jsNumber(exec, -1);
+ return jsNumber(exec, result);
}
JSValue JSC_HOST_CALL stringProtoFuncLastIndexOf(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
else if (isnan(dpos))
dpos = len;
#endif
- return jsNumber(exec, s.rfind(u2, static_cast<int>(dpos)));
+
+ unsigned result = s.rfind(u2, static_cast<unsigned>(dpos));
+ if (result == UString::NotFound)
+ return jsNumber(exec, -1);
+ return jsNumber(exec, result);
}
JSValue JSC_HOST_CALL stringProtoFuncMatch(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
* If regexp is not an object whose [[Class]] property is "RegExp", it is
* replaced with the result of the expression new RegExp(regexp).
*/
- reg = RegExp::create(&exec->globalData(), a0.toString(exec));
+ reg = exec->globalData().regExpCache()->lookupOrCreate(a0.toString(exec), UString::null());
}
RegExpConstructor* regExpConstructor = exec->lexicalGlobalObject()->regExpConstructor();
int pos;
* If regexp is not an object whose [[Class]] property is "RegExp", it is
* replaced with the result of the expression new RegExp(regexp).
*/
- reg = RegExp::create(&exec->globalData(), a0.toString(exec));
+ reg = exec->globalData().regExpCache()->lookupOrCreate(a0.toString(exec), UString::null());
}
RegExpConstructor* regExpConstructor = exec->lexicalGlobalObject()->regExpConstructor();
int pos;
JSArray* result = constructEmptyArray(exec);
unsigned i = 0;
- int p0 = 0;
+ unsigned p0 = 0;
unsigned limit = a1.isUndefined() ? 0xFFFFFFFFU : a1.toUInt32(exec);
if (a0.inherits(&RegExpObject::info)) {
RegExp* reg = asRegExpObject(a0)->regExp();
// empty string matched by regexp -> empty array
return result;
}
- int pos = 0;
+ unsigned pos = 0;
while (i != limit && pos < s.size()) {
Vector<int, 32> ovector;
int mpos = reg->match(s, pos, &ovector);
break;
int mlen = ovector[1] - ovector[0];
pos = mpos + (mlen == 0 ? 1 : mlen);
- if (mpos != p0 || mlen) {
+ if (static_cast<unsigned>(mpos) != p0 || mlen) {
result->put(exec, i++, jsSubstring(exec, s, p0, mpos - p0));
p0 = mpos + mlen;
}
while (i != limit && p0 < s.size() - 1)
result->put(exec, i++, jsSingleCharacterSubstring(exec, s, p0++));
} else {
- int pos;
- while (i != limit && (pos = s.find(u2, p0)) >= 0) {
+ unsigned pos;
+
+ while (i != limit && (pos = s.find(u2, p0)) != UString::NotFound) {
result->put(exec, i++, jsSubstring(exec, s, p0, pos - p0));
p0 = pos + u2.size();
}
JSValue JSC_HOST_CALL stringProtoFuncBig(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<big>", s, "</big>"));
+ return jsMakeNontrivialString(exec, "<big>", s, "</big>");
}
JSValue JSC_HOST_CALL stringProtoFuncSmall(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<small>", s, "</small>"));
+ return jsMakeNontrivialString(exec, "<small>", s, "</small>");
}
JSValue JSC_HOST_CALL stringProtoFuncBlink(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<blink>", s, "</blink>"));
+ return jsMakeNontrivialString(exec, "<blink>", s, "</blink>");
}
JSValue JSC_HOST_CALL stringProtoFuncBold(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<b>", s, "</b>"));
+ return jsMakeNontrivialString(exec, "<b>", s, "</b>");
}
JSValue JSC_HOST_CALL stringProtoFuncFixed(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsString(exec, makeString("<tt>", s, "</tt>"));
+ return jsMakeNontrivialString(exec, "<tt>", s, "</tt>");
}
JSValue JSC_HOST_CALL stringProtoFuncItalics(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<i>", s, "</i>"));
+ return jsMakeNontrivialString(exec, "<i>", s, "</i>");
}
JSValue JSC_HOST_CALL stringProtoFuncStrike(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<strike>", s, "</strike>"));
+ return jsMakeNontrivialString(exec, "<strike>", s, "</strike>");
}
JSValue JSC_HOST_CALL stringProtoFuncSub(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<sub>", s, "</sub>"));
+ return jsMakeNontrivialString(exec, "<sub>", s, "</sub>");
}
JSValue JSC_HOST_CALL stringProtoFuncSup(ExecState* exec, JSObject*, JSValue thisValue, const ArgList&)
{
UString s = thisValue.toThisString(exec);
- return jsNontrivialString(exec, makeString("<sup>", s, "</sup>"));
+ return jsMakeNontrivialString(exec, "<sup>", s, "</sup>");
}
JSValue JSC_HOST_CALL stringProtoFuncFontcolor(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
{
UString s = thisValue.toThisString(exec);
JSValue a0 = args.at(0);
- return jsNontrivialString(exec, makeString("<font color=\"", a0.toString(exec), "\">", s, "</font>"));
+ return jsMakeNontrivialString(exec, "<font color=\"", a0.toString(exec), "\">", s, "</font>");
}
JSValue JSC_HOST_CALL stringProtoFuncFontsize(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
return jsNontrivialString(exec, impl);
}
- return jsNontrivialString(exec, makeString("<font size=\"", a0.toString(exec), "\">", s, "</font>"));
+ return jsMakeNontrivialString(exec, "<font size=\"", a0.toString(exec), "\">", s, "</font>");
}
JSValue JSC_HOST_CALL stringProtoFuncAnchor(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
{
UString s = thisValue.toThisString(exec);
JSValue a0 = args.at(0);
- return jsNontrivialString(exec, makeString("<a name=\"", a0.toString(exec), "\">", s, "</a>"));
+ return jsMakeNontrivialString(exec, "<a name=\"", a0.toString(exec), "\">", s, "</a>");
}
JSValue JSC_HOST_CALL stringProtoFuncLink(ExecState* exec, JSObject*, JSValue thisValue, const ArgList& args)
static inline JSValue trimString(ExecState* exec, JSValue thisValue, int trimKind)
{
UString str = thisValue.toThisString(exec);
- int left = 0;
+ unsigned left = 0;
if (trimKind & TrimLeft) {
while (left < str.size() && isTrimWhitespace(str[left]))
left++;
}
- int right = str.size();
+ unsigned right = str.size();
if (trimKind & TrimRight) {
while (right > left && isTrimWhitespace(str[right - 1]))
right--;
static int comparePropertyMapEntryIndices(const void* a, const void* b);
+inline void Structure::setTransitionTable(TransitionTable* table)
+{
+ ASSERT(m_isUsingSingleSlot);
+#ifndef NDEBUG
+ setSingleTransition(0);
+#endif
+ m_isUsingSingleSlot = false;
+ m_transitions.m_table = table;
+ // This implicitly clears the flag that indicates we're using a single transition
+ ASSERT(!m_isUsingSingleSlot);
+}
+
+// The contains and get methods accept imprecise matches, so if an unspecialised transition exists
+// for the given key they will consider that transition to be a match. If a specialised transition
+// exists and it matches the provided specificValue, get will return the specific transition.
+inline bool Structure::transitionTableContains(const StructureTransitionTableHash::Key& key, JSCell* specificValue)
+{
+ if (m_isUsingSingleSlot) {
+ Structure* existingTransition = singleTransition();
+ return existingTransition && existingTransition->m_nameInPrevious.get() == key.first
+ && existingTransition->m_attributesInPrevious == key.second
+ && (existingTransition->m_specificValueInPrevious == specificValue || existingTransition->m_specificValueInPrevious == 0);
+ }
+ TransitionTable::iterator find = transitionTable()->find(key);
+ if (find == transitionTable()->end())
+ return false;
+
+ return find->second.first || find->second.second->transitionedFor(specificValue);
+}
+
+inline Structure* Structure::transitionTableGet(const StructureTransitionTableHash::Key& key, JSCell* specificValue) const
+{
+ if (m_isUsingSingleSlot) {
+ Structure* existingTransition = singleTransition();
+ if (existingTransition && existingTransition->m_nameInPrevious.get() == key.first
+ && existingTransition->m_attributesInPrevious == key.second
+ && (existingTransition->m_specificValueInPrevious == specificValue || existingTransition->m_specificValueInPrevious == 0))
+ return existingTransition;
+ return 0;
+ }
+
+ Transition transition = transitionTable()->get(key);
+ if (transition.second && transition.second->transitionedFor(specificValue))
+ return transition.second;
+ return transition.first;
+}
+
+inline bool Structure::transitionTableHasTransition(const StructureTransitionTableHash::Key& key) const
+{
+ if (m_isUsingSingleSlot) {
+ Structure* transition = singleTransition();
+ return transition && transition->m_nameInPrevious == key.first
+ && transition->m_attributesInPrevious == key.second;
+ }
+ return transitionTable()->contains(key);
+}
+
+inline void Structure::transitionTableRemove(const StructureTransitionTableHash::Key& key, JSCell* specificValue)
+{
+ if (m_isUsingSingleSlot) {
+ ASSERT(transitionTableContains(key, specificValue));
+ setSingleTransition(0);
+ return;
+ }
+ TransitionTable::iterator find = transitionTable()->find(key);
+ if (!specificValue)
+ find->second.first = 0;
+ else
+ find->second.second = 0;
+ if (!find->second.first && !find->second.second)
+ transitionTable()->remove(find);
+}
+
+inline void Structure::transitionTableAdd(const StructureTransitionTableHash::Key& key, Structure* structure, JSCell* specificValue)
+{
+ if (m_isUsingSingleSlot) {
+ if (!singleTransition()) {
+ setSingleTransition(structure);
+ return;
+ }
+ Structure* existingTransition = singleTransition();
+ TransitionTable* transitionTable = new TransitionTable;
+ setTransitionTable(transitionTable);
+ if (existingTransition)
+ transitionTableAdd(std::make_pair(existingTransition->m_nameInPrevious.get(), existingTransition->m_attributesInPrevious), existingTransition, existingTransition->m_specificValueInPrevious);
+ }
+ if (!specificValue) {
+ TransitionTable::iterator find = transitionTable()->find(key);
+ if (find == transitionTable()->end())
+ transitionTable()->add(key, Transition(structure, static_cast<Structure*>(0)));
+ else
+ find->second.first = structure;
+ } else {
+ // If we're adding a transition to a specific value, then there cannot be
+ // an existing transition
+ ASSERT(!transitionTable()->contains(key));
+ transitionTable()->add(key, Transition(static_cast<Structure*>(0), structure));
+ }
+}
+
void Structure::dumpStatistics()
{
#if DUMP_STRUCTURE_ID_STATISTICS
, m_attributesInPrevious(0)
, m_specificFunctionThrashCount(0)
, m_anonymousSlotCount(anonymousSlotCount)
+ , m_isUsingSingleSlot(true)
{
+ m_transitions.m_singleTransition = 0;
+
ASSERT(m_prototype);
ASSERT(m_prototype.isObject() || m_prototype.isNull());
{
if (m_previous) {
ASSERT(m_nameInPrevious);
- m_previous->table.remove(make_pair(m_nameInPrevious.get(), m_attributesInPrevious), m_specificValueInPrevious);
+ m_previous->transitionTableRemove(make_pair(m_nameInPrevious.get(), m_attributesInPrevious), m_specificValueInPrevious);
}
-
- if (m_enumerationCache)
- m_enumerationCache->setCachedStructure(0);
+ ASSERT(!m_enumerationCache.hasDeadObject());
if (m_propertyTable) {
unsigned entryCount = m_propertyTable->keyCount + m_propertyTable->deletedSentinelCount;
fastFree(m_propertyTable);
}
+ if (!m_isUsingSingleSlot)
+ delete transitionTable();
+
#ifndef NDEBUG
#if ENABLE(JSC_MULTIPLE_THREADS)
MutexLocker protect(ignoreSetMutex);
ASSERT(!structure->isDictionary());
ASSERT(structure->typeInfo().type() == ObjectType);
- if (Structure* existingTransition = structure->table.get(make_pair(propertyName.ustring().rep(), attributes), specificValue)) {
+ if (Structure* existingTransition = structure->transitionTableGet(make_pair(propertyName.ustring().rep(), attributes), specificValue)) {
ASSERT(existingTransition->m_offset != noOffset);
offset = existingTransition->m_offset + existingTransition->m_anonymousSlotCount;
ASSERT(offset >= structure->m_anonymousSlotCount);
transition->m_offset = offset - structure->m_anonymousSlotCount;
ASSERT(structure->anonymousSlotCount() == transition->anonymousSlotCount());
- structure->table.add(make_pair(propertyName.ustring().rep(), attributes), transition.get(), specificValue);
+ structure->transitionTableAdd(make_pair(propertyName.ustring().rep(), attributes), transition.get(), specificValue);
return transition.release();
}
bool Structure::hasTransition(UString::Rep* rep, unsigned attributes)
{
- return table.hasTransition(make_pair(rep, attributes));
+ return transitionTableHasTransition(make_pair(rep, attributes));
}
size_t Structure::remove(const Identifier& propertyName)
void growPropertyStorageCapacity();
unsigned propertyStorageCapacity() const { return m_propertyStorageCapacity; }
- unsigned propertyStorageSize() const { return m_anonymousSlotCount + (m_propertyTable ? m_propertyTable->keyCount + (m_propertyTable->deletedOffsets ? m_propertyTable->deletedOffsets->size() : 0) : m_offset + 1); }
+ unsigned propertyStorageSize() const { return m_anonymousSlotCount + (m_propertyTable ? m_propertyTable->keyCount + (m_propertyTable->deletedOffsets ? m_propertyTable->deletedOffsets->size() : 0) : static_cast<unsigned>(m_offset + 1)); }
bool isUsingInlineStorage() const;
size_t get(const Identifier& propertyName);
// Since the number of transitions is always the same as m_offset, we keep the size of Structure down by not storing both.
return m_offset == noOffset ? 0 : m_offset + 1;
}
+
+ typedef std::pair<Structure*, Structure*> Transition;
+ typedef HashMap<StructureTransitionTableHash::Key, Transition, StructureTransitionTableHash, StructureTransitionTableHashTraits> TransitionTable;
+
+ inline bool transitionTableContains(const StructureTransitionTableHash::Key& key, JSCell* specificValue);
+ inline void transitionTableRemove(const StructureTransitionTableHash::Key& key, JSCell* specificValue);
+ inline void transitionTableAdd(const StructureTransitionTableHash::Key& key, Structure* structure, JSCell* specificValue);
+ inline bool transitionTableHasTransition(const StructureTransitionTableHash::Key& key) const;
+ inline Structure* transitionTableGet(const StructureTransitionTableHash::Key& key, JSCell* specificValue) const;
+
+ TransitionTable* transitionTable() const { ASSERT(!m_isUsingSingleSlot); return m_transitions.m_table; }
+ inline void setTransitionTable(TransitionTable* table);
+ Structure* singleTransition() const { ASSERT(m_isUsingSingleSlot); return m_transitions.m_singleTransition; }
+ void setSingleTransition(Structure* structure) { ASSERT(m_isUsingSingleSlot); m_transitions.m_singleTransition = structure; }
bool isValid(ExecState*, StructureChain* cachedPrototypeChain) const;
RefPtr<UString::Rep> m_nameInPrevious;
JSCell* m_specificValueInPrevious;
- StructureTransitionTable table;
+ // 'm_isUsingSingleSlot' indicates whether we are using the single transition optimisation.
+ union {
+ TransitionTable* m_table;
+ Structure* m_singleTransition;
+ } m_transitions;
WeakGCPtr<JSPropertyNameIterator> m_enumerationCache;
#endif
unsigned m_specificFunctionThrashCount : 2;
unsigned m_anonymousSlotCount : 5;
- // 5 free bits
+ unsigned m_isUsingSingleSlot : 1;
+ // 4 free bits
};
inline size_t Structure::get(const Identifier& propertyName)
return m_propertyTable->entries()[entryIndex - 1].offset;
}
}
-
- bool StructureTransitionTable::contains(const StructureTransitionTableHash::Key& key, JSCell* specificValue)
- {
- if (usingSingleTransitionSlot()) {
- Structure* existingTransition = singleTransition();
- return existingTransition && existingTransition->m_nameInPrevious.get() == key.first
- && existingTransition->m_attributesInPrevious == key.second
- && (existingTransition->m_specificValueInPrevious == specificValue || existingTransition->m_specificValueInPrevious == 0);
- }
- TransitionTable::iterator find = table()->find(key);
- if (find == table()->end())
- return false;
-
- return find->second.first || find->second.second->transitionedFor(specificValue);
- }
- Structure* StructureTransitionTable::get(const StructureTransitionTableHash::Key& key, JSCell* specificValue) const
- {
- if (usingSingleTransitionSlot()) {
- Structure* existingTransition = singleTransition();
- if (existingTransition && existingTransition->m_nameInPrevious.get() == key.first
- && existingTransition->m_attributesInPrevious == key.second
- && (existingTransition->m_specificValueInPrevious == specificValue || existingTransition->m_specificValueInPrevious == 0))
- return existingTransition;
- return 0;
- }
-
- Transition transition = table()->get(key);
- if (transition.second && transition.second->transitionedFor(specificValue))
- return transition.second;
- return transition.first;
- }
-
- bool StructureTransitionTable::hasTransition(const StructureTransitionTableHash::Key& key) const
- {
- if (usingSingleTransitionSlot()) {
- Structure* transition = singleTransition();
- return transition && transition->m_nameInPrevious == key.first
- && transition->m_attributesInPrevious == key.second;
- }
- return table()->contains(key);
- }
-
- void StructureTransitionTable::reifySingleTransition()
- {
- ASSERT(usingSingleTransitionSlot());
- Structure* existingTransition = singleTransition();
- TransitionTable* transitionTable = new TransitionTable;
- setTransitionTable(transitionTable);
- if (existingTransition)
- add(std::make_pair(existingTransition->m_nameInPrevious.get(), existingTransition->m_attributesInPrevious), existingTransition, existingTransition->m_specificValueInPrevious);
- }
} // namespace JSC
#endif // Structure_h
#include <wtf/HashFunctions.h>
#include <wtf/HashMap.h>
#include <wtf/HashTraits.h>
-#include <wtf/PtrAndFlags.h>
#include <wtf/OwnPtr.h>
#include <wtf/RefPtr.h>
static bool isDeletedValue(const TraitType& value) { return FirstTraits::isDeletedValue(value.first); }
};
- class StructureTransitionTable {
- typedef std::pair<Structure*, Structure*> Transition;
- typedef HashMap<StructureTransitionTableHash::Key, Transition, StructureTransitionTableHash, StructureTransitionTableHashTraits> TransitionTable;
- public:
- StructureTransitionTable() {
- m_transitions.m_singleTransition.set(0);
- m_transitions.m_singleTransition.setFlag(usingSingleSlot);
- }
-
- ~StructureTransitionTable() {
- if (!usingSingleTransitionSlot())
- delete table();
- }
-
- // The contains and get methods accept imprecise matches, so if an unspecialised transition exists
- // for the given key they will consider that transition to be a match. If a specialised transition
- // exists and it matches the provided specificValue, get will return the specific transition.
- inline bool contains(const StructureTransitionTableHash::Key&, JSCell* specificValue);
- inline Structure* get(const StructureTransitionTableHash::Key&, JSCell* specificValue) const;
- inline bool hasTransition(const StructureTransitionTableHash::Key& key) const;
- void remove(const StructureTransitionTableHash::Key& key, JSCell* specificValue)
- {
- if (usingSingleTransitionSlot()) {
- ASSERT(contains(key, specificValue));
- setSingleTransition(0);
- return;
- }
- TransitionTable::iterator find = table()->find(key);
- if (!specificValue)
- find->second.first = 0;
- else
- find->second.second = 0;
- if (!find->second.first && !find->second.second)
- table()->remove(find);
- }
- void add(const StructureTransitionTableHash::Key& key, Structure* structure, JSCell* specificValue)
- {
- if (usingSingleTransitionSlot()) {
- if (!singleTransition()) {
- setSingleTransition(structure);
- return;
- }
- reifySingleTransition();
- }
- if (!specificValue) {
- TransitionTable::iterator find = table()->find(key);
- if (find == table()->end())
- table()->add(key, Transition(structure, 0));
- else
- find->second.first = structure;
- } else {
- // If we're adding a transition to a specific value, then there cannot be
- // an existing transition
- ASSERT(!table()->contains(key));
- table()->add(key, Transition(0, structure));
- }
- }
-
- private:
- TransitionTable* table() const { ASSERT(!usingSingleTransitionSlot()); return m_transitions.m_table; }
- Structure* singleTransition() const {
- ASSERT(usingSingleTransitionSlot());
- return m_transitions.m_singleTransition.get();
- }
- bool usingSingleTransitionSlot() const { return m_transitions.m_singleTransition.isFlagSet(usingSingleSlot); }
- void setSingleTransition(Structure* structure)
- {
- ASSERT(usingSingleTransitionSlot());
- m_transitions.m_singleTransition.set(structure);
- }
-
- void setTransitionTable(TransitionTable* table)
- {
- ASSERT(usingSingleTransitionSlot());
-#ifndef NDEBUG
- setSingleTransition(0);
-#endif
- m_transitions.m_table = table;
- // This implicitly clears the flag that indicates we're using a single transition
- ASSERT(!usingSingleTransitionSlot());
- }
- inline void reifySingleTransition();
-
- enum UsingSingleSlot {
- usingSingleSlot
- };
- // Last bit indicates whether we are using the single transition optimisation
- union {
- TransitionTable* m_table;
- PtrAndFlagsBase<Structure, UsingSingleSlot> m_singleTransition;
- } m_transitions;
- };
-
} // namespace JSC
#endif // StructureTransitionTable_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Google Inc. ("Google") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef Terminator_h
+#define Terminator_h
+
+namespace JSC {
+
+class Terminator {
+public:
+ Terminator() : m_shouldTerminate(false) { }
+
+ void terminateSoon() { m_shouldTerminate = true; }
+ bool shouldTerminate() const { return m_shouldTerminate; }
+
+private:
+ bool m_shouldTerminate;
+};
+
+} // namespace JSC
+
+#endif // Terminator_h
GetThreadTimes(GetCurrentThread(), &creationTime, &exitTime, &kernelTime.fileTime, &userTime.fileTime);
return userTime.fileTimeAsLong / 10000 + kernelTime.fileTimeAsLong / 10000;
+#elif OS(SYMBIAN)
+ RThread current;
+ TTimeIntervalMicroSeconds cpuTime;
+
+ TInt err = current.GetCpuTime(cpuTime);
+ ASSERT_WITH_MESSAGE(err == KErrNone, "GetCpuTime failed with %d", err);
+ return cpuTime.Int64() / 1000;
#elif PLATFORM(BREWMP)
// This function returns a continuously and linearly increasing millisecond
// timer from the time the device was powered on.
using namespace std;
namespace JSC {
-
+
extern const double NaN;
extern const double Inf;
-CString::CString(const char* c)
- : m_length(strlen(c))
- , m_data(new char[m_length + 1])
-{
- memcpy(m_data, c, m_length + 1);
-}
-
-CString::CString(const char* c, size_t length)
- : m_length(length)
- , m_data(new char[length + 1])
-{
- memcpy(m_data, c, m_length);
- m_data[m_length] = 0;
-}
-
-CString::CString(const CString& b)
-{
- m_length = b.m_length;
- if (b.m_data) {
- m_data = new char[m_length + 1];
- memcpy(m_data, b.m_data, m_length + 1);
- } else
- m_data = 0;
-}
-
-CString::~CString()
-{
- delete [] m_data;
-}
-
-CString CString::adopt(char* c, size_t length)
-{
- CString s;
- s.m_data = c;
- s.m_length = length;
- return s;
-}
-
-CString& CString::append(const CString& t)
-{
- char* n;
- n = new char[m_length + t.m_length + 1];
- if (m_length)
- memcpy(n, m_data, m_length);
- if (t.m_length)
- memcpy(n + m_length, t.m_data, t.m_length);
- m_length += t.m_length;
- n[m_length] = 0;
-
- delete [] m_data;
- m_data = n;
-
- return *this;
-}
-
-CString& CString::operator=(const char* c)
-{
- if (m_data)
- delete [] m_data;
- m_length = strlen(c);
- m_data = new char[m_length + 1];
- memcpy(m_data, c, m_length + 1);
-
- return *this;
-}
-
-CString& CString::operator=(const CString& str)
-{
- if (this == &str)
- return *this;
-
- if (m_data)
- delete [] m_data;
- m_length = str.m_length;
- if (str.m_data) {
- m_data = new char[m_length + 1];
- memcpy(m_data, str.m_data, m_length + 1);
- } else
- m_data = 0;
-
- return *this;
-}
-
-bool operator==(const CString& c1, const CString& c2)
-{
- size_t len = c1.size();
- return len == c2.size() && (len == 0 || memcmp(c1.c_str(), c2.c_str(), len) == 0);
-}
-
-// These static strings are immutable, except for rc, whose initial value is chosen to
-// reduce the possibility of it becoming zero due to ref/deref not being thread-safe.
-static UChar sharedEmptyChar;
-UStringImpl* UStringImpl::s_null;
-UStringImpl* UStringImpl::s_empty;
-UString* UString::nullUString;
+// The null string is immutable, except for refCount.
+UString* UString::s_nullUString;
void initializeUString()
{
- UStringImpl::s_null = new UStringImpl(0, 0, UStringImpl::ConstructStaticString);
- UStringImpl::s_empty = new UStringImpl(&sharedEmptyChar, 0, UStringImpl::ConstructStaticString);
- UString::nullUString = new UString;
-}
-
-static PassRefPtr<UString::Rep> createRep(const char* c)
-{
- if (!c)
- return &UString::Rep::null();
-
- if (!c[0])
- return &UString::Rep::empty();
-
- size_t length = strlen(c);
- UChar* d;
- PassRefPtr<UStringImpl> result = UStringImpl::tryCreateUninitialized(length, d);
- if (!result)
- return &UString::Rep::null();
-
- for (size_t i = 0; i < length; i++)
- d[i] = static_cast<unsigned char>(c[i]); // use unsigned char to zero-extend instead of sign-extend
- return result;
-}
-
-static inline PassRefPtr<UString::Rep> createRep(const char* c, int length)
-{
- if (!c)
- return &UString::Rep::null();
-
- if (!length)
- return &UString::Rep::empty();
-
- UChar* d;
- PassRefPtr<UStringImpl> result = UStringImpl::tryCreateUninitialized(length, d);
- if (!result)
- return &UString::Rep::null();
+ // UStringImpl::empty() does not construct its static string in a threadsafe fashion,
+ // so ensure it has been initialized from here.
+ UStringImpl::empty();
- for (int i = 0; i < length; i++)
- d[i] = static_cast<unsigned char>(c[i]); // use unsigned char to zero-extend instead of sign-extend
- return result;
+ UString::s_nullUString = new UString;
}
UString::UString(const char* c)
- : m_rep(createRep(c))
+ : m_rep(Rep::create(c))
{
}
-UString::UString(const char* c, int length)
- : m_rep(createRep(c, length))
+UString::UString(const char* c, unsigned length)
+ : m_rep(Rep::create(c, length))
{
}
-UString::UString(const UChar* c, int length)
+UString::UString(const UChar* c, unsigned length)
+ : m_rep(Rep::create(c, length))
{
- if (length == 0)
- m_rep = &Rep::empty();
- else
- m_rep = Rep::create(c, length);
-}
-
-UString UString::createFromUTF8(const char* string)
-{
- if (!string)
- return null();
-
- size_t length = strlen(string);
- Vector<UChar, 1024> buffer(length);
- UChar* p = buffer.data();
- if (conversionOK != convertUTF8ToUTF16(&string, string + length, &p, p + length))
- return null();
-
- return UString(buffer.data(), p - buffer.data());
}
UString UString::from(int i)
UChar buf[1 + sizeof(i) * 3];
UChar* end = buf + sizeof(buf) / sizeof(UChar);
UChar* p = end;
-
+
if (i == 0)
*--p = '0';
else if (i == INT_MIN) {
*--p = '-';
}
- return UString(p, static_cast<int>(end - p));
+ return UString(p, static_cast<unsigned>(end - p));
}
UString UString::from(long long i)
*--p = '0';
else if (i == std::numeric_limits<long long>::min()) {
char minBuf[1 + sizeof(i) * 3];
-#if OS(WINDOWS)
- snprintf(minBuf, sizeof(minBuf) - 1, "%I64d", std::numeric_limits<long long>::min());
-#elif PLATFORM(IPHONE)
snprintf(minBuf, sizeof(minBuf), "%lld", std::numeric_limits<long long>::min());
-#else
- snprintf(minBuf, sizeof(minBuf) - 1, "%lld", std::numeric_limits<long long>::min());
-#endif
return UString(minBuf);
} else {
bool negative = false;
*--p = '-';
}
- return UString(p, static_cast<int>(end - p));
+ return UString(p, static_cast<unsigned>(end - p));
}
-UString UString::from(unsigned int u)
+UString UString::from(unsigned u)
{
UChar buf[sizeof(u) * 3];
UChar* end = buf + sizeof(buf) / sizeof(UChar);
UChar* p = end;
-
+
if (u == 0)
*--p = '0';
else {
u /= 10;
}
}
-
- return UString(p, static_cast<int>(end - p));
+
+ return UString(p, static_cast<unsigned>(end - p));
}
UString UString::from(long l)
*--p = '-';
}
- return UString(p, static_cast<int>(end - p));
+ return UString(p, end - p);
}
UString UString::from(double d)
return UString(buffer, length);
}
-UString UString::spliceSubstringsWithSeparators(const Range* substringRanges, int rangeCount, const UString* separators, int separatorCount) const
-{
- m_rep->checkConsistency();
-
- if (rangeCount == 1 && separatorCount == 0) {
- int thisSize = size();
- int position = substringRanges[0].position;
- int length = substringRanges[0].length;
- if (position <= 0 && length >= thisSize)
- return *this;
- return UString::Rep::create(m_rep, max(0, position), min(thisSize, length));
- }
-
- int totalLength = 0;
- for (int i = 0; i < rangeCount; i++)
- totalLength += substringRanges[i].length;
- for (int i = 0; i < separatorCount; i++)
- totalLength += separators[i].size();
-
- if (totalLength == 0)
- return "";
-
- UChar* buffer;
- PassRefPtr<Rep> rep = Rep::tryCreateUninitialized(totalLength, buffer);
- if (!rep)
- return null();
-
- int maxCount = max(rangeCount, separatorCount);
- int bufferPos = 0;
- for (int i = 0; i < maxCount; i++) {
- if (i < rangeCount) {
- UStringImpl::copyChars(buffer + bufferPos, data() + substringRanges[i].position, substringRanges[i].length);
- bufferPos += substringRanges[i].length;
- }
- if (i < separatorCount) {
- UStringImpl::copyChars(buffer + bufferPos, separators[i].data(), separators[i].size());
- bufferPos += separators[i].size();
- }
- }
-
- return rep;
-}
-
-UString UString::replaceRange(int rangeStart, int rangeLength, const UString& replacement) const
-{
- m_rep->checkConsistency();
-
- int replacementLength = replacement.size();
- int totalLength = size() - rangeLength + replacementLength;
- if (totalLength == 0)
- return "";
-
- UChar* buffer;
- PassRefPtr<Rep> rep = Rep::tryCreateUninitialized(totalLength, buffer);
- if (!rep)
- return null();
-
- UStringImpl::copyChars(buffer, data(), rangeStart);
- UStringImpl::copyChars(buffer + rangeStart, replacement.data(), replacementLength);
- int rangeEnd = rangeStart + rangeLength;
- UStringImpl::copyChars(buffer + rangeStart + replacementLength, data() + rangeEnd, size() - rangeEnd);
-
- return rep;
-}
-
-bool UString::getCString(CStringBuffer& buffer) const
-{
- int length = size();
- int neededSize = length + 1;
- buffer.resize(neededSize);
- char* buf = buffer.data();
-
- UChar ored = 0;
- const UChar* p = data();
- char* q = buf;
- const UChar* limit = p + length;
- while (p != limit) {
- UChar c = p[0];
- ored |= c;
- *q = static_cast<char>(c);
- ++p;
- ++q;
- }
- *q = '\0';
-
- return !(ored & 0xFF00);
-}
-
char* UString::ascii() const
{
static char* asciiBuffer = 0;
- int length = size();
- int neededSize = length + 1;
+ unsigned length = size();
+ unsigned neededSize = length + 1;
delete[] asciiBuffer;
asciiBuffer = new char[neededSize];
return asciiBuffer;
}
-UString& UString::operator=(const char* c)
-{
- if (!c) {
- m_rep = &Rep::null();
- return *this;
- }
-
- if (!c[0]) {
- m_rep = &Rep::empty();
- return *this;
- }
-
- int l = static_cast<int>(strlen(c));
- UChar* d = 0;
- m_rep = Rep::tryCreateUninitialized(l, d);
- if (m_rep) {
- for (int i = 0; i < l; i++)
- d[i] = static_cast<unsigned char>(c[i]); // use unsigned char to zero-extend instead of sign-extend
- } else
- makeNull();
-
- return *this;
-}
-
bool UString::is8Bit() const
{
const UChar* u = data();
return true;
}
-UChar UString::operator[](int pos) const
+UChar UString::operator[](unsigned pos) const
{
if (pos >= size())
return '\0';
return NaN;
}
- // FIXME: If tolerateTrailingJunk is true, then we want to tolerate non-8-bit junk
- // after the number, so this is too strict a check.
- CStringBuffer s;
- if (!getCString(s))
+ // FIXME: If tolerateTrailingJunk is true, then we want to tolerate junk
+ // after the number, even if it contains invalid UTF-16 sequences. So we
+ // shouldn't use the UTF8String function, which returns null when it
+ // encounters invalid UTF-16. Further, we have no need to convert the
+ // non-ASCII characters to UTF-8, so the UTF8String does quite a bit of
+ // unnecessary work.
+ CString s = UTF8String();
+ if (s.isNull())
return NaN;
const char* c = s.data();
while (isASCIISpace(*c))
c++;
// don't allow anything after - unless tolerant=true
+ // FIXME: If string contains a U+0000 character, then this check is incorrect.
if (!tolerateTrailingJunk && *c != '\0')
d = NaN;
*ok = false;
// Empty string is not OK.
- int len = m_rep->size();
+ unsigned len = m_rep->length();
if (len == 0)
return 0;
- const UChar* p = m_rep->data();
+ const UChar* p = m_rep->characters();
unsigned short c = p[0];
// If the first digit is 0, only 0 itself is OK.
}
}
-int UString::find(const UString& f, int pos) const
+unsigned UString::find(const UString& f, unsigned pos) const
{
- int fsz = f.size();
-
- if (pos < 0)
- pos = 0;
+ unsigned fsz = f.size();
if (fsz == 1) {
UChar ch = f[0];
const UChar* end = data() + size();
for (const UChar* c = data() + pos; c < end; c++) {
if (*c == ch)
- return static_cast<int>(c - data());
+ return static_cast<unsigned>(c - data());
}
- return -1;
+ return NotFound;
}
- int sz = size();
+ unsigned sz = size();
if (sz < fsz)
- return -1;
+ return NotFound;
if (fsz == 0)
return pos;
const UChar* end = data() + sz - fsz;
- int fsizeminusone = (fsz - 1) * sizeof(UChar);
+ unsigned fsizeminusone = (fsz - 1) * sizeof(UChar);
const UChar* fdata = f.data();
unsigned short fchar = fdata[0];
++fdata;
for (const UChar* c = data() + pos; c <= end; c++) {
if (c[0] == fchar && !memcmp(c + 1, fdata, fsizeminusone))
- return static_cast<int>(c - data());
+ return static_cast<unsigned>(c - data());
}
- return -1;
+ return NotFound;
}
-int UString::find(UChar ch, int pos) const
+unsigned UString::find(UChar ch, unsigned pos) const
{
- if (pos < 0)
- pos = 0;
const UChar* end = data() + size();
for (const UChar* c = data() + pos; c < end; c++) {
if (*c == ch)
- return static_cast<int>(c - data());
+ return static_cast<unsigned>(c - data());
}
-
- return -1;
+
+ return NotFound;
}
-int UString::rfind(const UString& f, int pos) const
+unsigned UString::rfind(const UString& f, unsigned pos) const
{
- int sz = size();
- int fsz = f.size();
+ unsigned sz = size();
+ unsigned fsz = f.size();
if (sz < fsz)
- return -1;
- if (pos < 0)
- pos = 0;
+ return NotFound;
if (pos > sz - fsz)
pos = sz - fsz;
if (fsz == 0)
return pos;
- int fsizeminusone = (fsz - 1) * sizeof(UChar);
+ unsigned fsizeminusone = (fsz - 1) * sizeof(UChar);
const UChar* fdata = f.data();
for (const UChar* c = data() + pos; c >= data(); c--) {
if (*c == *fdata && !memcmp(c + 1, fdata + 1, fsizeminusone))
- return static_cast<int>(c - data());
+ return static_cast<unsigned>(c - data());
}
- return -1;
+ return NotFound;
}
-int UString::rfind(UChar ch, int pos) const
+unsigned UString::rfind(UChar ch, unsigned pos) const
{
if (isEmpty())
- return -1;
+ return NotFound;
if (pos + 1 >= size())
pos = size() - 1;
for (const UChar* c = data() + pos; c >= data(); c--) {
if (*c == ch)
- return static_cast<int>(c - data());
+ return static_cast<unsigned>(c - data());
}
- return -1;
+ return NotFound;
}
-UString UString::substr(int pos, int len) const
+UString UString::substr(unsigned pos, unsigned len) const
{
- int s = size();
+ unsigned s = size();
- if (pos < 0)
- pos = 0;
- else if (pos >= s)
+ if (pos >= s)
pos = s;
- if (len < 0)
- len = s;
- if (pos + len >= s)
- len = s - pos;
+ unsigned limit = s - pos;
+ if (len > limit)
+ len = limit;
if (pos == 0 && len == s)
return *this;
bool operator<(const UString& s1, const UString& s2)
{
- const int l1 = s1.size();
- const int l2 = s2.size();
- const int lmin = l1 < l2 ? l1 : l2;
+ const unsigned l1 = s1.size();
+ const unsigned l2 = s2.size();
+ const unsigned lmin = l1 < l2 ? l1 : l2;
const UChar* c1 = s1.data();
const UChar* c2 = s2.data();
- int l = 0;
+ unsigned l = 0;
while (l < lmin && *c1 == *c2) {
c1++;
c2++;
bool operator>(const UString& s1, const UString& s2)
{
- const int l1 = s1.size();
- const int l2 = s2.size();
- const int lmin = l1 < l2 ? l1 : l2;
+ const unsigned l1 = s1.size();
+ const unsigned l2 = s2.size();
+ const unsigned lmin = l1 < l2 ? l1 : l2;
const UChar* c1 = s1.data();
const UChar* c2 = s2.data();
- int l = 0;
+ unsigned l = 0;
while (l < lmin && *c1 == *c2) {
c1++;
c2++;
int compare(const UString& s1, const UString& s2)
{
- const int l1 = s1.size();
- const int l2 = s2.size();
- const int lmin = l1 < l2 ? l1 : l2;
+ const unsigned l1 = s1.size();
+ const unsigned l2 = s2.size();
+ const unsigned lmin = l1 < l2 ? l1 : l2;
const UChar* c1 = s1.data();
const UChar* c2 = s2.data();
- int l = 0;
+ unsigned l = 0;
while (l < lmin && *c1 == *c2) {
c1++;
c2++;
return (l1 > l2) ? 1 : -1;
}
-bool equal(const UString::Rep* r, const UString::Rep* b)
-{
- int length = r->size();
- if (length != b->size())
- return false;
- const UChar* d = r->data();
- const UChar* s = b->data();
- for (int i = 0; i != length; ++i) {
- if (d[i] != s[i])
- return false;
- }
- return true;
-}
-
CString UString::UTF8String(bool strict) const
{
// Allocate a buffer big enough to hold all the characters.
- const int length = size();
+ const unsigned length = size();
Vector<char, 1024> buffer(length * 3);
// Convert to runs of 8-bit characters.
return CString(buffer.data(), p - buffer.data());
}
-// For use in error handling code paths -- having this not be inlined helps avoid PIC branches to fetch the global on Mac OS X.
-NEVER_INLINE void UString::makeNull()
-{
- m_rep = &Rep::null();
-}
-
-// For use in error handling code paths -- having this not be inlined helps avoid PIC branches to fetch the global on Mac OS X.
-NEVER_INLINE UString::Rep* UString::nullRep()
-{
- return &Rep::null();
-}
-
} // namespace JSC
#include <wtf/CrossThreadRefCounted.h>
#include <wtf/OwnFastMallocPtr.h>
#include <wtf/PassRefPtr.h>
-#include <wtf/PtrAndFlags.h>
#include <wtf/RefPtr.h>
#include <wtf/Vector.h>
+#include <wtf/text/CString.h>
#include <wtf/unicode/Unicode.h>
namespace JSC {
using WTF::PlacementNewAdoptType;
using WTF::PlacementNewAdopt;
- class CString {
- public:
- CString()
- : m_length(0)
- , m_data(0)
- {
- }
-
- CString(const char*);
- CString(const char*, size_t);
- CString(const CString&);
-
- ~CString();
-
- static CString adopt(char*, size_t); // buffer should be allocated with new[].
-
- CString& append(const CString&);
- CString& operator=(const char* c);
- CString& operator=(const CString&);
- CString& operator+=(const CString& c) { return append(c); }
-
- size_t size() const { return m_length; }
- const char* c_str() const { return m_data; }
-
- private:
- size_t m_length;
- char* m_data;
- };
-
- bool operator==(const CString&, const CString&);
-
- typedef Vector<char, 32> CStringBuffer;
-
class UString {
friend class JIT;
typedef UStringImpl Rep;
public:
- // UString constructors passed char*s assume ISO Latin-1 encoding; for UTF8 use 'createFromUTF8', below.
- UString();
+ UString() {}
UString(const char*); // Constructor for null-terminated string.
- UString(const char*, int length);
- UString(const UChar*, int length);
+ UString(const char*, unsigned length);
+ UString(const UChar*, unsigned length);
UString(const Vector<UChar>& buffer);
UString(const UString& s)
{
}
- ~UString()
- {
- }
-
template<size_t inlineCapacity>
static PassRefPtr<UStringImpl> adopt(Vector<UChar, inlineCapacity>& vector)
{
return Rep::adopt(vector);
}
- static UString createFromUTF8(const char*);
-
static UString from(int);
static UString from(long long);
- static UString from(unsigned int);
+ static UString from(unsigned);
static UString from(long);
static UString from(double);
- struct Range {
- public:
- Range(int pos, int len)
- : position(pos)
- , length(len)
- {
- }
-
- Range()
- {
- }
-
- int position;
- int length;
- };
-
- UString spliceSubstringsWithSeparators(const Range* substringRanges, int rangeCount, const UString* separators, int separatorCount) const;
-
- UString replaceRange(int rangeStart, int RangeEnd, const UString& replacement) const;
-
- bool getCString(CStringBuffer&) const;
-
// NOTE: This method should only be used for *debugging* purposes as it
// is neither Unicode safe nor free from side effects nor thread-safe.
char* ascii() const;
*/
CString UTF8String(bool strict = false) const;
- UString& operator=(const char*c);
+ const UChar* data() const
+ {
+ if (!m_rep)
+ return 0;
+ return m_rep->characters();
+ }
- const UChar* data() const { return m_rep->data(); }
+ unsigned size() const
+ {
+ if (!m_rep)
+ return 0;
+ return m_rep->length();
+ }
- bool isNull() const { return m_rep == &Rep::null(); }
- bool isEmpty() const { return !m_rep->size(); }
+ bool isNull() const { return !m_rep; }
+ bool isEmpty() const { return !m_rep || !m_rep->length(); }
bool is8Bit() const;
- int size() const { return m_rep->size(); }
-
- UChar operator[](int pos) const;
+ UChar operator[](unsigned pos) const;
double toDouble(bool tolerateTrailingJunk, bool tolerateEmptyString) const;
double toDouble(bool tolerateTrailingJunk) const;
unsigned toArrayIndex(bool* ok = 0) const;
- int find(const UString& f, int pos = 0) const;
- int find(UChar, int pos = 0) const;
- int rfind(const UString& f, int pos) const;
- int rfind(UChar, int pos) const;
+ static const unsigned NotFound = 0xFFFFFFFFu;
+ unsigned find(const UString& f, unsigned pos = 0) const;
+ unsigned find(UChar, unsigned pos = 0) const;
+ unsigned rfind(const UString& f, unsigned pos) const;
+ unsigned rfind(UChar, unsigned pos) const;
- UString substr(int pos = 0, int len = -1) const;
+ UString substr(unsigned pos = 0, unsigned len = 0xFFFFFFFF) const;
- static const UString& null() { return *nullUString; }
+ static const UString& null() { return *s_nullUString; }
Rep* rep() const { return m_rep.get(); }
- static Rep* nullRep();
UString(PassRefPtr<Rep> r)
: m_rep(r)
{
- ASSERT(m_rep);
}
- size_t cost() const { return m_rep->cost(); }
+ size_t cost() const
+ {
+ if (!m_rep)
+ return 0;
+ return m_rep->cost();
+ }
private:
- void makeNull();
-
RefPtr<Rep> m_rep;
- static UString* nullUString;
+
+ static UString* s_nullUString;
friend void initializeUString();
friend bool operator==(const UString&, const UString&);
ALWAYS_INLINE bool operator==(const UString& s1, const UString& s2)
{
- int size = s1.size();
- switch (size) {
- case 0:
- return !s2.size();
+ UString::Rep* rep1 = s1.rep();
+ UString::Rep* rep2 = s2.rep();
+ unsigned size1 = 0;
+ unsigned size2 = 0;
+
+ if (rep1 == rep2) // If they're the same rep, they're equal.
+ return true;
+
+ if (rep1)
+ size1 = rep1->length();
+
+ if (rep2)
+ size2 = rep2->length();
+
+ if (size1 != size2) // If the lengths are not the same, we're done.
+ return false;
+
+ if (!size1)
+ return true;
+
+ // At this point we know
+ // (a) that the strings are the same length and
+ // (b) that they are greater than zero length.
+ const UChar* d1 = rep1->characters();
+ const UChar* d2 = rep2->characters();
+
+ if (d1 == d2) // Check to see if the data pointers are the same.
+ return true;
+
+ // Do quick checks for sizes 1 and 2.
+ switch (size1) {
case 1:
- return s2.size() == 1 && s1.data()[0] == s2.data()[0];
- case 2: {
- if (s2.size() != 2)
- return false;
- const UChar* d1 = s1.data();
- const UChar* d2 = s2.data();
+ return d1[0] == d2[0];
+ case 2:
return (d1[0] == d2[0]) & (d1[1] == d2[1]);
- }
default:
- return s2.size() == size && memcmp(s1.data(), s2.data(), size * sizeof(UChar)) == 0;
+ return memcmp(d1, d2, size1 * sizeof(UChar)) == 0;
}
}
int compare(const UString&, const UString&);
- inline UString::UString()
- : m_rep(&Rep::null())
- {
- }
-
// Rule from ECMA 15.2 about what an array index is.
// Must exactly match string form of an unsigned integer, and be less than 2^32 - 1.
inline unsigned UString::toArrayIndex(bool* ok) const
// We'd rather not do shared substring append for small strings, since
// this runs too much risk of a tiny initial string holding down a
// huge buffer.
- // FIXME: this should be size_t but that would cause warnings until we
- // fix UString sizes to be size_t instead of int
- static const int minShareSize = Heap::minExtraCost / sizeof(UChar);
+ static const unsigned minShareSize = Heap::minExtraCost / sizeof(UChar);
struct IdentifierRepHash : PtrHash<RefPtr<JSC::UString::Rep> > {
static unsigned hash(const RefPtr<JSC::UString::Rep>& key) { return key->existingHash(); }
unsigned m_length;
};
+ inline void sumWithOverflow(unsigned& total, unsigned addend, bool& overflow)
+ {
+ unsigned oldTotal = total;
+ total = oldTotal + addend;
+ if (total < oldTotal)
+ overflow = true;
+ }
+
template<typename StringType1, typename StringType2>
- UString makeString(StringType1 string1, StringType2 string2)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType3> adapter3(string3);
- UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length();
+ UChar* buffer = 0;
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3, typename StringType4>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType4> adapter4(string4);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length() + adapter4.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ sumWithOverflow(length, adapter4.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType5> adapter5(string5);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length() + adapter4.length() + adapter5.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ sumWithOverflow(length, adapter4.length(), overflow);
+ sumWithOverflow(length, adapter5.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType6> adapter6(string6);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length() + adapter4.length() + adapter5.length() + adapter6.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ sumWithOverflow(length, adapter4.length(), overflow);
+ sumWithOverflow(length, adapter5.length(), overflow);
+ sumWithOverflow(length, adapter6.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6, typename StringType7>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType7> adapter7(string7);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length() + adapter4.length() + adapter5.length() + adapter6.length() + adapter7.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ sumWithOverflow(length, adapter4.length(), overflow);
+ sumWithOverflow(length, adapter5.length(), overflow);
+ sumWithOverflow(length, adapter6.length(), overflow);
+ sumWithOverflow(length, adapter7.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
}
template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6, typename StringType7, typename StringType8>
- UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7, StringType8 string8)
+ PassRefPtr<UStringImpl> tryMakeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7, StringType8 string8)
{
StringTypeAdapter<StringType1> adapter1(string1);
StringTypeAdapter<StringType2> adapter2(string2);
StringTypeAdapter<StringType8> adapter8(string8);
UChar* buffer;
- unsigned length = adapter1.length() + adapter2.length() + adapter3.length() + adapter4.length() + adapter5.length() + adapter6.length() + adapter7.length() + adapter8.length();
+ bool overflow = false;
+ unsigned length = adapter1.length();
+ sumWithOverflow(length, adapter2.length(), overflow);
+ sumWithOverflow(length, adapter3.length(), overflow);
+ sumWithOverflow(length, adapter4.length(), overflow);
+ sumWithOverflow(length, adapter5.length(), overflow);
+ sumWithOverflow(length, adapter6.length(), overflow);
+ sumWithOverflow(length, adapter7.length(), overflow);
+ sumWithOverflow(length, adapter8.length(), overflow);
+ if (overflow)
+ return 0;
PassRefPtr<UStringImpl> resultImpl = UStringImpl::tryCreateUninitialized(length, buffer);
if (!resultImpl)
- return UString();
+ return 0;
UChar* result = buffer;
adapter1.writeTo(result);
return resultImpl;
}
+ template<typename StringType1, typename StringType2>
+ UString makeString(StringType1 string1, StringType2 string2)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3, typename StringType4>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3, string4);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3, string4, string5);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3, string4, string5, string6);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6, typename StringType7>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3, string4, string5, string6, string7);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
+ template<typename StringType1, typename StringType2, typename StringType3, typename StringType4, typename StringType5, typename StringType6, typename StringType7, typename StringType8>
+ UString makeString(StringType1 string1, StringType2 string2, StringType3 string3, StringType4 string4, StringType5 string5, StringType6 string6, StringType7 string7, StringType8 string8)
+ {
+ PassRefPtr<UStringImpl> resultImpl = tryMakeString(string1, string2, string3, string4, string5, string6, string7, string8);
+ if (!resultImpl)
+ CRASH();
+ return resultImpl;
+ }
+
} // namespace JSC
namespace WTF {
template<> struct StrHash<JSC::UString::Rep*> {
static unsigned hash(const JSC::UString::Rep* key) { return key->hash(); }
- static bool equal(const JSC::UString::Rep* a, const JSC::UString::Rep* b) { return JSC::equal(a, b); }
+ static bool equal(const JSC::UString::Rep* a, const JSC::UString::Rep* b) { return ::equal(a, b); }
static const bool safeToCompareToEmptyOrDeleted = false;
};
using StrHash<JSC::UString::Rep*>::hash;
static unsigned hash(const RefPtr<JSC::UString::Rep>& key) { return key->hash(); }
using StrHash<JSC::UString::Rep*>::equal;
- static bool equal(const RefPtr<JSC::UString::Rep>& a, const RefPtr<JSC::UString::Rep>& b) { return JSC::equal(a.get(), b.get()); }
- static bool equal(const JSC::UString::Rep* a, const RefPtr<JSC::UString::Rep>& b) { return JSC::equal(a, b.get()); }
- static bool equal(const RefPtr<JSC::UString::Rep>& a, const JSC::UString::Rep* b) { return JSC::equal(a.get(), b); }
+ static bool equal(const RefPtr<JSC::UString::Rep>& a, const RefPtr<JSC::UString::Rep>& b) { return ::equal(a.get(), b.get()); }
+ static bool equal(const JSC::UString::Rep* a, const RefPtr<JSC::UString::Rep>& b) { return ::equal(a, b.get()); }
+ static bool equal(const RefPtr<JSC::UString::Rep>& a, const JSC::UString::Rep* b) { return ::equal(a.get(), b); }
static const bool safeToCompareToEmptyOrDeleted = false;
};
- template<> struct DefaultHash<JSC::UString::Rep*> {
- typedef StrHash<JSC::UString::Rep*> Hash;
- };
-
- template<> struct DefaultHash<RefPtr<JSC::UString::Rep> > {
- typedef StrHash<RefPtr<JSC::UString::Rep> > Hash;
-
+ template <> struct VectorTraits<JSC::UString> : SimpleClassVectorTraits
+ {
+ static const bool canInitializeWithMemset = true;
};
-
+
} // namespace WTF
#endif
+++ /dev/null
-/*
- * Copyright (C) 2009 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "UStringImpl.h"
-
-#include "Identifier.h"
-#include "UString.h"
-#include <wtf/unicode/UTF8.h>
-
-using namespace WTF::Unicode;
-using namespace std;
-
-namespace JSC {
-
-SharedUChar* UStringImpl::baseSharedBuffer()
-{
- ASSERT((bufferOwnership() == BufferShared)
- || ((bufferOwnership() == BufferOwned) && !m_dataBuffer.asPtr<void*>()));
-
- if (bufferOwnership() != BufferShared)
- m_dataBuffer = UntypedPtrAndBitfield(SharedUChar::create(new OwnFastMallocPtr<UChar>(m_data)).releaseRef(), BufferShared);
-
- return m_dataBuffer.asPtr<SharedUChar*>();
-}
-
-SharedUChar* UStringImpl::sharedBuffer()
-{
- if (m_length < s_minLengthToShare)
- return 0;
- ASSERT(!isStatic());
-
- UStringImpl* owner = bufferOwnerString();
- if (owner->bufferOwnership() == BufferInternal)
- return 0;
-
- return owner->baseSharedBuffer();
-}
-
-UStringImpl::~UStringImpl()
-{
- ASSERT(!isStatic());
- checkConsistency();
-
- if (isIdentifier())
- Identifier::remove(this);
-
- if (bufferOwnership() != BufferInternal) {
- if (bufferOwnership() == BufferOwned)
- fastFree(m_data);
- else if (bufferOwnership() == BufferSubstring)
- m_dataBuffer.asPtr<UStringImpl*>()->deref();
- else {
- ASSERT(bufferOwnership() == BufferShared);
- m_dataBuffer.asPtr<SharedUChar*>()->deref();
- }
- }
-}
-
-}
/*
- * Copyright (C) 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 1999 Lars Knoll (knoll@kde.org)
+ * Copyright (C) 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2009 Google Inc. All rights reserved.
*
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
*
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef UStringImpl_h
#define UStringImpl_h
-#include <limits>
-#include <wtf/CrossThreadRefCounted.h>
-#include <wtf/OwnFastMallocPtr.h>
-#include <wtf/PossiblyNull.h>
-#include <wtf/StringHashFunctions.h>
-#include <wtf/Vector.h>
-#include <wtf/unicode/Unicode.h>
-
-namespace JSC {
-
-class IdentifierTable;
-
-typedef CrossThreadRefCounted<OwnFastMallocPtr<UChar> > SharedUChar;
-
-class UntypedPtrAndBitfield {
-public:
- UntypedPtrAndBitfield() {}
-
- UntypedPtrAndBitfield(void* ptrValue, uintptr_t bitValue)
- : m_value(reinterpret_cast<uintptr_t>(ptrValue) | bitValue)
-#ifndef NDEBUG
- , m_leaksPtr(ptrValue)
-#endif
- {
- ASSERT(ptrValue == asPtr<void*>());
- ASSERT((*this & ~s_alignmentMask) == bitValue);
- }
-
- template<typename T>
- T asPtr() const { return reinterpret_cast<T>(m_value & s_alignmentMask); }
-
- UntypedPtrAndBitfield& operator&=(uintptr_t bits)
- {
- m_value &= bits | s_alignmentMask;
- return *this;
- }
-
- UntypedPtrAndBitfield& operator|=(uintptr_t bits)
- {
- m_value |= bits & ~s_alignmentMask;
- return *this;
- }
-
- uintptr_t operator&(uintptr_t mask) const
- {
- return m_value & mask & ~s_alignmentMask;
- }
-
-private:
- static const uintptr_t s_alignmentMask = ~static_cast<uintptr_t>(0x7);
- uintptr_t m_value;
-#ifndef NDEBUG
- void* m_leaksPtr; // Only used to allow tools like leaks on OSX to detect that the memory is referenced.
-#endif
-};
-
-class UStringImpl : Noncopyable {
-public:
- template<size_t inlineCapacity>
- static PassRefPtr<UStringImpl> adopt(Vector<UChar, inlineCapacity>& vector)
- {
- if (unsigned length = vector.size())
- return adoptRef(new UStringImpl(vector.releaseBuffer(), length, BufferOwned));
- return &empty();
- }
-
- static PassRefPtr<UStringImpl> create(const UChar* buffer, int length)
- {
- UChar* newBuffer;
- if (PassRefPtr<UStringImpl> impl = tryCreateUninitialized(length, newBuffer)) {
- copyChars(newBuffer, buffer, length);
- return impl;
- }
- return &null();
- }
-
- static PassRefPtr<UStringImpl> create(PassRefPtr<UStringImpl> rep, int offset, int length)
- {
- ASSERT(rep);
- rep->checkConsistency();
- return adoptRef(new UStringImpl(rep->m_data + offset, length, rep->bufferOwnerString()));
- }
-
- static PassRefPtr<UStringImpl> create(PassRefPtr<SharedUChar> sharedBuffer, UChar* buffer, int length)
- {
- return adoptRef(new UStringImpl(buffer, length, sharedBuffer));
- }
-
- static PassRefPtr<UStringImpl> createUninitialized(unsigned length, UChar*& output)
- {
- if (!length) {
- output = 0;
- return &empty();
- }
-
- if (length > ((std::numeric_limits<size_t>::max() - sizeof(UStringImpl)) / sizeof(UChar)))
- CRASH();
- UStringImpl* resultImpl = static_cast<UStringImpl*>(fastMalloc(sizeof(UChar) * length + sizeof(UStringImpl)));
- output = reinterpret_cast<UChar*>(resultImpl + 1);
- return adoptRef(new(resultImpl) UStringImpl(output, length, BufferInternal));
- }
-
- static PassRefPtr<UStringImpl> tryCreateUninitialized(unsigned length, UChar*& output)
- {
- if (!length) {
- output = 0;
- return &empty();
- }
-
- if (length > ((std::numeric_limits<size_t>::max() - sizeof(UStringImpl)) / sizeof(UChar)))
- return 0;
- UStringImpl* resultImpl;
- if (!tryFastMalloc(sizeof(UChar) * length + sizeof(UStringImpl)).getValue(resultImpl))
- return 0;
- output = reinterpret_cast<UChar*>(resultImpl + 1);
- return adoptRef(new(resultImpl) UStringImpl(output, length, BufferInternal));
- }
-
- SharedUChar* sharedBuffer();
- UChar* data() const { return m_data; }
- int size() const { return m_length; }
- size_t cost()
- {
- // For substrings, return the cost of the base string.
- if (bufferOwnership() == BufferSubstring)
- return m_dataBuffer.asPtr<UStringImpl*>()->cost();
-
- if (m_dataBuffer & s_reportedCostBit)
- return 0;
- m_dataBuffer |= s_reportedCostBit;
- return m_length;
- }
- unsigned hash() const { if (!m_hash) m_hash = computeHash(data(), m_length); return m_hash; }
- unsigned existingHash() const { ASSERT(m_hash); return m_hash; } // fast path for Identifiers
- void setHash(unsigned hash) { ASSERT(hash == computeHash(data(), m_length)); m_hash = hash; } // fast path for Identifiers
- bool isIdentifier() const { return m_isIdentifier; }
- void setIsIdentifier(bool isIdentifier) { m_isIdentifier = isIdentifier; }
-
- UStringImpl* ref() { m_refCount += s_refCountIncrement; return this; }
- ALWAYS_INLINE void deref() { if (!(m_refCount -= s_refCountIncrement)) delete this; }
-
- static void copyChars(UChar* destination, const UChar* source, unsigned numCharacters)
- {
- if (numCharacters <= s_copyCharsInlineCutOff) {
- for (unsigned i = 0; i < numCharacters; ++i)
- destination[i] = source[i];
- } else
- memcpy(destination, source, numCharacters * sizeof(UChar));
- }
-
- static unsigned computeHash(const UChar* s, int length) { ASSERT(length >= 0); return WTF::stringHash(s, length); }
- static unsigned computeHash(const char* s, int length) { ASSERT(length >= 0); return WTF::stringHash(s, length); }
- static unsigned computeHash(const char* s) { return WTF::stringHash(s); }
-
- static UStringImpl& null() { return *s_null; }
- static UStringImpl& empty() { return *s_empty; }
-
- ALWAYS_INLINE void checkConsistency() const
- {
- // There is no recursion of substrings.
- ASSERT(bufferOwnerString()->bufferOwnership() != BufferSubstring);
- // Static strings cannot be put in identifier tables, because they are globally shared.
- ASSERT(!isStatic() || !isIdentifier());
- }
-
-private:
- enum BufferOwnership {
- BufferInternal,
- BufferOwned,
- BufferSubstring,
- BufferShared,
- };
-
- // For SmallStringStorage, which allocates an array and uses an in-place new.
- UStringImpl() { }
-
- // Used to construct normal strings with an internal or external buffer.
- UStringImpl(UChar* data, int length, BufferOwnership ownership)
- : m_data(data)
- , m_length(length)
- , m_refCount(s_refCountIncrement)
- , m_hash(0)
- , m_isIdentifier(false)
- , m_dataBuffer(0, ownership)
- {
- ASSERT((ownership == BufferInternal) || (ownership == BufferOwned));
- checkConsistency();
- }
-
- // Used to construct static strings, which have an special refCount that can never hit zero.
- // This means that the static string will never be destroyed, which is important because
- // static strings will be shared across threads & ref-counted in a non-threadsafe manner.
- enum StaticStringConstructType { ConstructStaticString };
- UStringImpl(UChar* data, int length, StaticStringConstructType)
- : m_data(data)
- , m_length(length)
- , m_refCount(s_staticRefCountInitialValue)
- , m_hash(0)
- , m_isIdentifier(false)
- , m_dataBuffer(0, BufferOwned)
- {
- checkConsistency();
- }
-
- // Used to create new strings that are a substring of an existing string.
- UStringImpl(UChar* data, int length, PassRefPtr<UStringImpl> base)
- : m_data(data)
- , m_length(length)
- , m_refCount(s_refCountIncrement)
- , m_hash(0)
- , m_isIdentifier(false)
- , m_dataBuffer(base.releaseRef(), BufferSubstring)
- {
- // Do use static strings as a base for substrings; UntypedPtrAndBitfield assumes
- // that all pointers will be at least 8-byte aligned, we cannot guarantee that of
- // UStringImpls that are not heap allocated.
- ASSERT(m_dataBuffer.asPtr<UStringImpl*>()->size());
- ASSERT(!m_dataBuffer.asPtr<UStringImpl*>()->isStatic());
- checkConsistency();
- }
-
- // Used to construct new strings sharing an existing shared buffer.
- UStringImpl(UChar* data, int length, PassRefPtr<SharedUChar> sharedBuffer)
- : m_data(data)
- , m_length(length)
- , m_refCount(s_refCountIncrement)
- , m_hash(0)
- , m_isIdentifier(false)
- , m_dataBuffer(sharedBuffer.releaseRef(), BufferShared)
- {
- checkConsistency();
- }
-
- using Noncopyable::operator new;
- void* operator new(size_t, void* inPlace) { return inPlace; }
-
- ~UStringImpl();
-
- // This number must be at least 2 to avoid sharing empty, null as well as 1 character strings from SmallStrings.
- static const int s_minLengthToShare = 10;
- static const unsigned s_copyCharsInlineCutOff = 20;
- static const uintptr_t s_bufferOwnershipMask = 3;
- static const uintptr_t s_reportedCostBit = 4;
- // We initialize and increment/decrement the refCount for all normal (non-static) strings by the value 2.
- // We initialize static strings with an odd number (specifically, 1), such that the refCount cannot reach zero.
- static const int s_refCountIncrement = 2;
- static const int s_staticRefCountInitialValue = 1;
-
- UStringImpl* bufferOwnerString() { return (bufferOwnership() == BufferSubstring) ? m_dataBuffer.asPtr<UStringImpl*>() : this; }
- const UStringImpl* bufferOwnerString() const { return (bufferOwnership() == BufferSubstring) ? m_dataBuffer.asPtr<UStringImpl*>() : this; }
- SharedUChar* baseSharedBuffer();
- unsigned bufferOwnership() const { return m_dataBuffer & s_bufferOwnershipMask; }
- bool isStatic() const { return m_refCount & 1; }
-
- // unshared data
- UChar* m_data;
- int m_length;
- unsigned m_refCount;
- mutable unsigned m_hash : 31;
- mutable unsigned m_isIdentifier : 1;
- UntypedPtrAndBitfield m_dataBuffer;
-
- JS_EXPORTDATA static UStringImpl* s_null;
- JS_EXPORTDATA static UStringImpl* s_empty;
-
- friend class JIT;
- friend class SmallStringsStorage;
- friend void initializeUString();
-};
-
-bool equal(const UStringImpl*, const UStringImpl*);
-
-}
+// FIXME: Remove this redundant name!
+#include <wtf/text/StringImpl.h>
+namespace JSC { typedef WebCore::StringImpl UStringImpl; }
#endif
return m_ptr;
}
- void clear(JSCell* ptr)
+ bool clear(JSCell* ptr)
{
- if (ptr == m_ptr)
+ if (ptr == m_ptr) {
m_ptr = 0;
+ return true;
+ }
+ return false;
}
T& operator*() const { return *get(); }
WeakGCPtr& operator=(T*);
+#if !ASSERT_DISABLED
+ bool hasDeadObject() const { return !!m_ptr; }
+#endif
+
private:
void assign(T* ptr)
{
- if (ptr)
- Heap::markCell(ptr);
+ ASSERT(ptr);
+ Heap::markCell(ptr);
m_ptr = ptr;
}
--- /dev/null
+(function () {
+ for (var i = 0; i < 500; ++i) {
+ for (var j = 0; j < 100000; ++j)
+ var a = {};
+ }
+})();
--- /dev/null
+(function () {
+ var a = new Array(100000);
+ for (var i = 0; i < 100000; ++i)
+ a[i] = {};
+
+ for (var i = 0; i < 500; ++i) {
+ for (var j = 0; j < 100000; ++j)
+ var b = {};
+ }
+})();
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "CharacterClass.h"
-
-#if ENABLE(WREC)
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-const CharacterClass& CharacterClass::newline() {
- static const UChar asciiNewlines[2] = { '\n', '\r' };
- static const UChar unicodeNewlines[2] = { 0x2028, 0x2029 };
- static const CharacterClass charClass = {
- asciiNewlines, 2,
- 0, 0,
- unicodeNewlines, 2,
- 0, 0,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::digits() {
- static const CharacterRange asciiDigitsRange[1] = { { '0', '9' } };
- static const CharacterClass charClass = {
- 0, 0,
- asciiDigitsRange, 1,
- 0, 0,
- 0, 0,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::spaces() {
- static const UChar asciiSpaces[1] = { ' ' };
- static const CharacterRange asciiSpacesRange[1] = { { '\t', '\r' } };
- static const UChar unicodeSpaces[8] = { 0x00a0, 0x1680, 0x180e, 0x2028, 0x2029, 0x202f, 0x205f, 0x3000 };
- static const CharacterRange unicodeSpacesRange[1] = { { 0x2000, 0x200a } };
- static const CharacterClass charClass = {
- asciiSpaces, 1,
- asciiSpacesRange, 1,
- unicodeSpaces, 8,
- unicodeSpacesRange, 1,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::wordchar() {
- static const UChar asciiWordchar[1] = { '_' };
- static const CharacterRange asciiWordcharRange[3] = { { '0', '9' }, { 'A', 'Z' }, { 'a', 'z' } };
- static const CharacterClass charClass = {
- asciiWordchar, 1,
- asciiWordcharRange, 3,
- 0, 0,
- 0, 0,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::nondigits() {
- static const CharacterRange asciiNondigitsRange[2] = { { 0, '0' - 1 }, { '9' + 1, 0x7f } };
- static const CharacterRange unicodeNondigitsRange[1] = { { 0x0080, 0xffff } };
- static const CharacterClass charClass = {
- 0, 0,
- asciiNondigitsRange, 2,
- 0, 0,
- unicodeNondigitsRange, 1,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::nonspaces() {
- static const CharacterRange asciiNonspacesRange[3] = { { 0, '\t' - 1 }, { '\r' + 1, ' ' - 1 }, { ' ' + 1, 0x7f } };
- static const CharacterRange unicodeNonspacesRange[9] = {
- { 0x0080, 0x009f },
- { 0x00a1, 0x167f },
- { 0x1681, 0x180d },
- { 0x180f, 0x1fff },
- { 0x200b, 0x2027 },
- { 0x202a, 0x202e },
- { 0x2030, 0x205e },
- { 0x2060, 0x2fff },
- { 0x3001, 0xffff }
- };
- static const CharacterClass charClass = {
- 0, 0,
- asciiNonspacesRange, 3,
- 0, 0,
- unicodeNonspacesRange, 9,
- };
-
- return charClass;
-}
-
-const CharacterClass& CharacterClass::nonwordchar() {
- static const UChar asciiNonwordchar[1] = { '`' };
- static const CharacterRange asciiNonwordcharRange[4] = { { 0, '0' - 1 }, { '9' + 1, 'A' - 1 }, { 'Z' + 1, '_' - 1 }, { 'z' + 1, 0x7f } };
- static const CharacterRange unicodeNonwordcharRange[1] = { { 0x0080, 0xffff } };
- static const CharacterClass charClass = {
- asciiNonwordchar, 1,
- asciiNonwordcharRange, 4,
- 0, 0,
- unicodeNonwordcharRange, 1,
- };
-
- return charClass;
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef CharacterClass_h
-#define CharacterClass_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include <wtf/unicode/Unicode.h>
-
-namespace JSC { namespace WREC {
-
- struct CharacterRange {
- UChar begin;
- UChar end;
- };
-
- struct CharacterClass {
- static const CharacterClass& newline();
- static const CharacterClass& digits();
- static const CharacterClass& spaces();
- static const CharacterClass& wordchar();
- static const CharacterClass& nondigits();
- static const CharacterClass& nonspaces();
- static const CharacterClass& nonwordchar();
-
- const UChar* matches;
- unsigned numMatches;
-
- const CharacterRange* ranges;
- unsigned numRanges;
-
- const UChar* matchesUnicode;
- unsigned numMatchesUnicode;
-
- const CharacterRange* rangesUnicode;
- unsigned numRangesUnicode;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // CharacterClass_h
+++ /dev/null
-/*
- * Copyright (C) 2008, 2009 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "CharacterClassConstructor.h"
-
-#if ENABLE(WREC)
-
-#include "pcre_internal.h"
-#include <wtf/ASCIICType.h>
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-void CharacterClassConstructor::addSorted(Vector<UChar>& matches, UChar ch)
-{
- unsigned pos = 0;
- unsigned range = matches.size();
-
- // binary chop, find position to insert char.
- while (range) {
- unsigned index = range >> 1;
-
- int val = matches[pos+index] - ch;
- if (!val)
- return;
- else if (val > 0)
- range = index;
- else {
- pos += (index+1);
- range -= (index+1);
- }
- }
-
- if (pos == matches.size())
- matches.append(ch);
- else
- matches.insert(pos, ch);
-}
-
-void CharacterClassConstructor::addSortedRange(Vector<CharacterRange>& ranges, UChar lo, UChar hi)
-{
- unsigned end = ranges.size();
-
- // Simple linear scan - I doubt there are that many ranges anyway...
- // feel free to fix this with something faster (eg binary chop).
- for (unsigned i = 0; i < end; ++i) {
- // does the new range fall before the current position in the array
- if (hi < ranges[i].begin) {
- // optional optimization: concatenate appending ranges? - may not be worthwhile.
- if (hi == (ranges[i].begin - 1)) {
- ranges[i].begin = lo;
- return;
- }
- CharacterRange r = {lo, hi};
- ranges.insert(i, r);
- return;
- }
- // Okay, since we didn't hit the last case, the end of the new range is definitely at or after the begining
- // If the new range start at or before the end of the last range, then the overlap (if it starts one after the
- // end of the last range they concatenate, which is just as good.
- if (lo <= (ranges[i].end + 1)) {
- // found an intersect! we'll replace this entry in the array.
- ranges[i].begin = std::min(ranges[i].begin, lo);
- ranges[i].end = std::max(ranges[i].end, hi);
-
- // now check if the new range can subsume any subsequent ranges.
- unsigned next = i+1;
- // each iteration of the loop we will either remove something from the list, or break the loop.
- while (next < ranges.size()) {
- if (ranges[next].begin <= (ranges[i].end + 1)) {
- // the next entry now overlaps / concatenates this one.
- ranges[i].end = std::max(ranges[i].end, ranges[next].end);
- ranges.remove(next);
- } else
- break;
- }
-
- return;
- }
- }
-
- // CharacterRange comes after all existing ranges.
- CharacterRange r = {lo, hi};
- ranges.append(r);
-}
-
-void CharacterClassConstructor::put(UChar ch)
-{
- // Parsing a regular expression like [a-z], we start in an initial empty state:
- // ((m_charBuffer == -1) && !m_isPendingDash)
- // When buffer the 'a' sice it may be (and is in this case) part of a range:
- // ((m_charBuffer != -1) && !m_isPendingDash)
- // Having parsed the hyphen we then record that the dash is also pending:
- // ((m_charBuffer != -1) && m_isPendingDash)
- // The next change will always take us back to the initial state - either because
- // a complete range has been parsed (such as [a-z]), or because a flush is forced,
- // due to an early end in the regexp ([a-]), or a character class escape being added
- // ([a-\s]). The fourth permutation of m_charBuffer and m_isPendingDash is not permitted.
- ASSERT(!((m_charBuffer == -1) && m_isPendingDash));
-
- if (m_charBuffer != -1) {
- if (m_isPendingDash) {
- // EXAMPLE: parsing [-a-c], the 'c' reaches this case - we have buffered a previous character and seen a hyphen, so this is a range.
- UChar lo = m_charBuffer;
- UChar hi = ch;
- // Reset back to the inital state.
- m_charBuffer = -1;
- m_isPendingDash = false;
-
- // This is an error, detected lazily. Do not proceed.
- if (lo > hi) {
- m_isUpsideDown = true;
- return;
- }
-
- if (lo <= 0x7f) {
- char asciiLo = lo;
- char asciiHi = std::min(hi, (UChar)0x7f);
- addSortedRange(m_ranges, lo, asciiHi);
-
- if (m_isCaseInsensitive) {
- if ((asciiLo <= 'Z') && (asciiHi >= 'A'))
- addSortedRange(m_ranges, std::max(asciiLo, 'A')+('a'-'A'), std::min(asciiHi, 'Z')+('a'-'A'));
- if ((asciiLo <= 'z') && (asciiHi >= 'a'))
- addSortedRange(m_ranges, std::max(asciiLo, 'a')+('A'-'a'), std::min(asciiHi, 'z')+('A'-'a'));
- }
- }
- if (hi >= 0x80) {
- UChar unicodeCurr = std::max(lo, (UChar)0x80);
- addSortedRange(m_rangesUnicode, unicodeCurr, hi);
-
- if (m_isCaseInsensitive) {
- // we're going to scan along, updating the start of the range
- while (unicodeCurr <= hi) {
- // Spin forwards over any characters that don't have two cases.
- for (; jsc_pcre_ucp_othercase(unicodeCurr) == -1; ++unicodeCurr) {
- // if this was the last character in the range, we're done.
- if (unicodeCurr == hi)
- return;
- }
- // if we fall through to here, unicodeCurr <= hi & has another case. Get the other case.
- UChar rangeStart = unicodeCurr;
- UChar otherCurr = jsc_pcre_ucp_othercase(unicodeCurr);
-
- // If unicodeCurr is not yet hi, check the next char in the range. If it also has another case,
- // and if it's other case value is one greater then the othercase value for the current last
- // character included in the range, we can include next into the range.
- while ((unicodeCurr < hi) && (jsc_pcre_ucp_othercase(unicodeCurr + 1) == (otherCurr + 1))) {
- // increment unicodeCurr; it points to the end of the range.
- // increment otherCurr, due to the check above other for next must be 1 greater than the currrent other value.
- ++unicodeCurr;
- ++otherCurr;
- }
-
- // otherChar is the last in the range of other case chars, calculate offset to get back to the start.
- addSortedRange(m_rangesUnicode, otherCurr-(unicodeCurr-rangeStart), otherCurr);
-
- // unicodeCurr has been added, move on to the next char.
- ++unicodeCurr;
- }
- }
- }
- } else if (ch == '-')
- // EXAMPLE: parsing [-a-c], the second '-' reaches this case - the hyphen is treated as potentially indicating a range.
- m_isPendingDash = true;
- else {
- // EXAMPLE: Parsing [-a-c], the 'a' reaches this case - we repace the previously buffered char with the 'a'.
- flush();
- m_charBuffer = ch;
- }
- } else
- // EXAMPLE: Parsing [-a-c], the first hyphen reaches this case - there is no buffered character
- // (the hyphen not treated as a special character in this case, same handling for any char).
- m_charBuffer = ch;
-}
-
-// When a character is added to the set we do not immediately add it to the arrays, in case it is actually defining a range.
-// When we have determined the character is not used in specifing a range it is added, in a sorted fashion, to the appropriate
-// array (either ascii or unicode).
-// If the pattern is case insensitive we add entries for both cases.
-void CharacterClassConstructor::flush()
-{
- if (m_charBuffer != -1) {
- if (m_charBuffer <= 0x7f) {
- if (m_isCaseInsensitive && isASCIILower(m_charBuffer))
- addSorted(m_matches, toASCIIUpper(m_charBuffer));
- addSorted(m_matches, m_charBuffer);
- if (m_isCaseInsensitive && isASCIIUpper(m_charBuffer))
- addSorted(m_matches, toASCIILower(m_charBuffer));
- } else {
- addSorted(m_matchesUnicode, m_charBuffer);
- if (m_isCaseInsensitive) {
- int other = jsc_pcre_ucp_othercase(m_charBuffer);
- if (other != -1)
- addSorted(m_matchesUnicode, other);
- }
- }
- m_charBuffer = -1;
- }
-
- if (m_isPendingDash) {
- addSorted(m_matches, '-');
- m_isPendingDash = false;
- }
-}
-
-void CharacterClassConstructor::append(const CharacterClass& other)
-{
- // [x-\s] will add, 'x', '-', and all unicode spaces to new class (same as [x\s-]).
- // Need to check the spec, really, but think this matches PCRE behaviour.
- flush();
-
- if (other.numMatches) {
- for (size_t i = 0; i < other.numMatches; ++i)
- addSorted(m_matches, other.matches[i]);
- }
- if (other.numRanges) {
- for (size_t i = 0; i < other.numRanges; ++i)
- addSortedRange(m_ranges, other.ranges[i].begin, other.ranges[i].end);
- }
- if (other.numMatchesUnicode) {
- for (size_t i = 0; i < other.numMatchesUnicode; ++i)
- addSorted(m_matchesUnicode, other.matchesUnicode[i]);
- }
- if (other.numRangesUnicode) {
- for (size_t i = 0; i < other.numRangesUnicode; ++i)
- addSortedRange(m_rangesUnicode, other.rangesUnicode[i].begin, other.rangesUnicode[i].end);
- }
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef CharacterClassConstructor_h
-#define CharacterClassConstructor_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include "CharacterClass.h"
-#include <wtf/AlwaysInline.h>
-#include <wtf/Vector.h>
-#include <wtf/unicode/Unicode.h>
-
-namespace JSC { namespace WREC {
-
- class CharacterClassConstructor {
- public:
- CharacterClassConstructor(bool isCaseInsensitive)
- : m_charBuffer(-1)
- , m_isPendingDash(false)
- , m_isCaseInsensitive(isCaseInsensitive)
- , m_isUpsideDown(false)
- {
- }
-
- void flush();
-
- // We need to flush prior to an escaped hyphen to prevent it as being treated as indicating
- // a range, e.g. [a\-c] we flush prior to adding the hyphen so that this is not treated as
- // [a-c]. However, we do not want to flush if we have already seen a non escaped hyphen -
- // e.g. [+-\-] should be treated the same as [+--], producing a range that will also match
- // a comma.
- void flushBeforeEscapedHyphen()
- {
- if (!m_isPendingDash)
- flush();
- }
-
- void put(UChar ch);
- void append(const CharacterClass& other);
-
- bool isUpsideDown() { return m_isUpsideDown; }
-
- ALWAYS_INLINE CharacterClass charClass()
- {
- CharacterClass newCharClass = {
- m_matches.begin(), m_matches.size(),
- m_ranges.begin(), m_ranges.size(),
- m_matchesUnicode.begin(), m_matchesUnicode.size(),
- m_rangesUnicode.begin(), m_rangesUnicode.size(),
- };
-
- return newCharClass;
- }
-
- private:
- void addSorted(Vector<UChar>& matches, UChar ch);
- void addSortedRange(Vector<CharacterRange>& ranges, UChar lo, UChar hi);
-
- int m_charBuffer;
- bool m_isPendingDash;
- bool m_isCaseInsensitive;
- bool m_isUpsideDown;
-
- Vector<UChar> m_matches;
- Vector<CharacterRange> m_ranges;
- Vector<UChar> m_matchesUnicode;
- Vector<CharacterRange> m_rangesUnicode;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // CharacterClassConstructor_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef Escapes_h
-#define Escapes_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include <wtf/Assertions.h>
-
-namespace JSC { namespace WREC {
-
- class CharacterClass;
-
- class Escape {
- public:
- enum Type {
- PatternCharacter,
- CharacterClass,
- Backreference,
- WordBoundaryAssertion,
- Error,
- };
-
- Escape(Type type)
- : m_type(type)
- {
- }
-
- Type type() const { return m_type; }
-
- private:
- Type m_type;
-
- protected:
- // Used by subclasses to store data.
- union {
- int i;
- const WREC::CharacterClass* c;
- } m_u;
- bool m_invert;
- };
-
- class PatternCharacterEscape : public Escape {
- public:
- static const PatternCharacterEscape& cast(const Escape& escape)
- {
- ASSERT(escape.type() == PatternCharacter);
- return static_cast<const PatternCharacterEscape&>(escape);
- }
-
- PatternCharacterEscape(int character)
- : Escape(PatternCharacter)
- {
- m_u.i = character;
- }
-
- operator Escape() const { return *this; }
-
- int character() const { return m_u.i; }
- };
-
- class CharacterClassEscape : public Escape {
- public:
- static const CharacterClassEscape& cast(const Escape& escape)
- {
- ASSERT(escape.type() == CharacterClass);
- return static_cast<const CharacterClassEscape&>(escape);
- }
-
- CharacterClassEscape(const WREC::CharacterClass& characterClass, bool invert)
- : Escape(CharacterClass)
- {
- m_u.c = &characterClass;
- m_invert = invert;
- }
-
- operator Escape() { return *this; }
-
- const WREC::CharacterClass& characterClass() const { return *m_u.c; }
- bool invert() const { return m_invert; }
- };
-
- class BackreferenceEscape : public Escape {
- public:
- static const BackreferenceEscape& cast(const Escape& escape)
- {
- ASSERT(escape.type() == Backreference);
- return static_cast<const BackreferenceEscape&>(escape);
- }
-
- BackreferenceEscape(int subpatternId)
- : Escape(Backreference)
- {
- m_u.i = subpatternId;
- }
-
- operator Escape() const { return *this; }
-
- int subpatternId() const { return m_u.i; }
- };
-
- class WordBoundaryAssertionEscape : public Escape {
- public:
- static const WordBoundaryAssertionEscape& cast(const Escape& escape)
- {
- ASSERT(escape.type() == WordBoundaryAssertion);
- return static_cast<const WordBoundaryAssertionEscape&>(escape);
- }
-
- WordBoundaryAssertionEscape(bool invert)
- : Escape(WordBoundaryAssertion)
- {
- m_invert = invert;
- }
-
- operator Escape() const { return *this; }
-
- bool invert() const { return m_invert; }
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // Escapes_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef Quantifier_h
-#define Quantifier_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include <wtf/Assertions.h>
-#include <limits.h>
-
-namespace JSC { namespace WREC {
-
- struct Quantifier {
- enum Type {
- None,
- Greedy,
- NonGreedy,
- Error,
- };
-
- Quantifier(Type type = None, unsigned min = 0, unsigned max = Infinity)
- : type(type)
- , min(min)
- , max(max)
- {
- ASSERT(min <= max);
- }
-
- Type type;
-
- unsigned min;
- unsigned max;
-
- static const unsigned Infinity = UINT_MAX;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // Quantifier_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WREC.h"
-
-#if ENABLE(WREC)
-
-#include "CharacterClassConstructor.h"
-#include "Interpreter.h"
-#include "JSGlobalObject.h"
-#include "RegisterFile.h"
-#include "WRECFunctors.h"
-#include "WRECParser.h"
-#include "pcre_internal.h"
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-CompiledRegExp Generator::compileRegExp(JSGlobalData* globalData, const UString& pattern, unsigned* numSubpatterns_ptr, const char** error_ptr, RefPtr<ExecutablePool>& pool, bool ignoreCase, bool multiline)
-{
- if (pattern.size() > MAX_PATTERN_SIZE) {
- *error_ptr = "regular expression too large";
- return 0;
- }
-
- Parser parser(pattern, ignoreCase, multiline);
- Generator& generator = parser.generator();
- MacroAssembler::JumpList failures;
- MacroAssembler::Jump endOfInput;
-
- generator.generateEnter();
- generator.generateSaveIndex();
-
- Label beginPattern(&generator);
- parser.parsePattern(failures);
- generator.generateReturnSuccess();
-
- failures.link(&generator);
- generator.generateIncrementIndex(&endOfInput);
- parser.parsePattern(failures);
- generator.generateReturnSuccess();
-
- failures.link(&generator);
- generator.generateIncrementIndex();
- generator.generateJumpIfNotEndOfInput(beginPattern);
-
- endOfInput.link(&generator);
- generator.generateReturnFailure();
-
- if (parser.error()) {
- *error_ptr = parser.syntaxError(); // NULL in the case of patterns that WREC doesn't support yet.
- return 0;
- }
-
- *numSubpatterns_ptr = parser.numSubpatterns();
- pool = globalData->executableAllocator.poolForSize(generator.size());
- return reinterpret_cast<CompiledRegExp>(generator.copyCode(pool.get()));
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef WREC_h
-#define WREC_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include <wtf/unicode/Unicode.h>
-
-#if COMPILER(GCC) && CPU(X86)
-#define WREC_CALL __attribute__ ((regparm (3)))
-#else
-#define WREC_CALL
-#endif
-
-namespace JSC {
- class Interpreter;
- class UString;
-}
-
-namespace JSC { namespace WREC {
-
- typedef int (*CompiledRegExp)(const UChar* input, unsigned start, unsigned length, int* output) WREC_CALL;
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // WREC_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WRECFunctors.h"
-
-#if ENABLE(WREC)
-
-#include "WRECGenerator.h"
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-void GeneratePatternCharacterFunctor::generateAtom(Generator* generator, Generator::JumpList& failures)
-{
- generator->generatePatternCharacter(failures, m_ch);
-}
-
-void GeneratePatternCharacterFunctor::backtrack(Generator* generator)
-{
- generator->generateBacktrack1();
-}
-
-void GenerateCharacterClassFunctor::generateAtom(Generator* generator, Generator::JumpList& failures)
-{
- generator->generateCharacterClass(failures, *m_charClass, m_invert);
-}
-
-void GenerateCharacterClassFunctor::backtrack(Generator* generator)
-{
- generator->generateBacktrack1();
-}
-
-void GenerateBackreferenceFunctor::generateAtom(Generator* generator, Generator::JumpList& failures)
-{
- generator->generateBackreference(failures, m_subpatternId);
-}
-
-void GenerateBackreferenceFunctor::backtrack(Generator* generator)
-{
- generator->generateBacktrackBackreference(m_subpatternId);
-}
-
-void GenerateParenthesesNonGreedyFunctor::generateAtom(Generator* generator, Generator::JumpList& failures)
-{
- generator->generateParenthesesNonGreedy(failures, m_start, m_success, m_fail);
-}
-
-void GenerateParenthesesNonGreedyFunctor::backtrack(Generator*)
-{
- // FIXME: do something about this.
- CRASH();
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include "WRECGenerator.h"
-#include <wtf/unicode/Unicode.h>
-
-namespace JSC { namespace WREC {
-
- struct CharacterClass;
-
- class GenerateAtomFunctor {
- public:
- virtual ~GenerateAtomFunctor() {}
-
- virtual void generateAtom(Generator*, Generator::JumpList&) = 0;
- virtual void backtrack(Generator*) = 0;
- };
-
- class GeneratePatternCharacterFunctor : public GenerateAtomFunctor {
- public:
- GeneratePatternCharacterFunctor(const UChar ch)
- : m_ch(ch)
- {
- }
-
- virtual void generateAtom(Generator*, Generator::JumpList&);
- virtual void backtrack(Generator*);
-
- private:
- const UChar m_ch;
- };
-
- class GenerateCharacterClassFunctor : public GenerateAtomFunctor {
- public:
- GenerateCharacterClassFunctor(const CharacterClass* charClass, bool invert)
- : m_charClass(charClass)
- , m_invert(invert)
- {
- }
-
- virtual void generateAtom(Generator*, Generator::JumpList&);
- virtual void backtrack(Generator*);
-
- private:
- const CharacterClass* m_charClass;
- bool m_invert;
- };
-
- class GenerateBackreferenceFunctor : public GenerateAtomFunctor {
- public:
- GenerateBackreferenceFunctor(unsigned subpatternId)
- : m_subpatternId(subpatternId)
- {
- }
-
- virtual void generateAtom(Generator*, Generator::JumpList&);
- virtual void backtrack(Generator*);
-
- private:
- unsigned m_subpatternId;
- };
-
- class GenerateParenthesesNonGreedyFunctor : public GenerateAtomFunctor {
- public:
- GenerateParenthesesNonGreedyFunctor(Generator::Label start, Generator::Jump success, Generator::Jump fail)
- : m_start(start)
- , m_success(success)
- , m_fail(fail)
- {
- }
-
- virtual void generateAtom(Generator*, Generator::JumpList&);
- virtual void backtrack(Generator*);
-
- private:
- Generator::Label m_start;
- Generator::Jump m_success;
- Generator::Jump m_fail;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WREC.h"
-
-#if ENABLE(WREC)
-
-#include "CharacterClassConstructor.h"
-#include "Interpreter.h"
-#include "WRECFunctors.h"
-#include "WRECParser.h"
-#include "pcre_internal.h"
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-void Generator::generateEnter()
-{
-#if CPU(X86)
- // On x86 edi & esi are callee preserved registers.
- push(X86Registers::edi);
- push(X86Registers::esi);
-
-#if COMPILER(MSVC)
- // Move the arguments into registers.
- peek(input, 3);
- peek(index, 4);
- peek(length, 5);
- peek(output, 6);
-#else
- // On gcc the function is regparm(3), so the input, index, and length registers
- // (eax, edx, and ecx respectively) already contain the appropriate values.
- // Just load the fourth argument (output) into edi
- peek(output, 3);
-#endif
-#endif
-}
-
-void Generator::generateReturnSuccess()
-{
- ASSERT(returnRegister != index);
- ASSERT(returnRegister != output);
-
- // Set return value.
- pop(returnRegister); // match begin
- store32(returnRegister, output);
- store32(index, Address(output, 4)); // match end
-
- // Restore callee save registers.
-#if CPU(X86)
- pop(X86Registers::esi);
- pop(X86Registers::edi);
-#endif
- ret();
-}
-
-void Generator::generateSaveIndex()
-{
- push(index);
-}
-
-void Generator::generateIncrementIndex(Jump* failure)
-{
- peek(index);
- if (failure)
- *failure = branch32(Equal, length, index);
- add32(Imm32(1), index);
- poke(index);
-}
-
-void Generator::generateLoadCharacter(JumpList& failures)
-{
- failures.append(branch32(Equal, length, index));
- load16(BaseIndex(input, index, TimesTwo), character);
-}
-
-// For the sake of end-of-line assertions, we treat one-past-the-end as if it
-// were part of the input string.
-void Generator::generateJumpIfNotEndOfInput(Label target)
-{
- branch32(LessThanOrEqual, index, length, target);
-}
-
-void Generator::generateReturnFailure()
-{
- pop();
- move(Imm32(-1), returnRegister);
-
-#if CPU(X86)
- pop(X86Registers::esi);
- pop(X86Registers::edi);
-#endif
- ret();
-}
-
-void Generator::generateBacktrack1()
-{
- sub32(Imm32(1), index);
-}
-
-void Generator::generateBacktrackBackreference(unsigned subpatternId)
-{
- sub32(Address(output, (2 * subpatternId + 1) * sizeof(int)), index);
- add32(Address(output, (2 * subpatternId) * sizeof(int)), index);
-}
-
-void Generator::generateBackreferenceQuantifier(JumpList& failures, Quantifier::Type quantifierType, unsigned subpatternId, unsigned min, unsigned max)
-{
- GenerateBackreferenceFunctor functor(subpatternId);
-
- load32(Address(output, (2 * subpatternId) * sizeof(int)), character);
- Jump skipIfEmpty = branch32(Equal, Address(output, ((2 * subpatternId) + 1) * sizeof(int)), character);
-
- ASSERT(quantifierType == Quantifier::Greedy || quantifierType == Quantifier::NonGreedy);
- if (quantifierType == Quantifier::Greedy)
- generateGreedyQuantifier(failures, functor, min, max);
- else
- generateNonGreedyQuantifier(failures, functor, min, max);
-
- skipIfEmpty.link(this);
-}
-
-void Generator::generateNonGreedyQuantifier(JumpList& failures, GenerateAtomFunctor& functor, unsigned min, unsigned max)
-{
- JumpList atomFailedList;
- JumpList alternativeFailedList;
-
- // (0) Setup: Save, then init repeatCount.
- push(repeatCount);
- move(Imm32(0), repeatCount);
- Jump start = jump();
-
- // (4) Quantifier failed: No more atom reading possible.
- Label quantifierFailed(this);
- pop(repeatCount);
- failures.append(jump());
-
- // (3) Alternative failed: If we can, read another atom, then fall through to (2) to try again.
- Label alternativeFailed(this);
- pop(index);
- if (max != Quantifier::Infinity)
- branch32(Equal, repeatCount, Imm32(max), quantifierFailed);
-
- // (1) Read an atom.
- if (min)
- start.link(this);
- Label readAtom(this);
- functor.generateAtom(this, atomFailedList);
- atomFailedList.linkTo(quantifierFailed, this);
- add32(Imm32(1), repeatCount);
-
- // (2) Keep reading if we're under the minimum.
- if (min > 1)
- branch32(LessThan, repeatCount, Imm32(min), readAtom);
-
- // (3) Test the rest of the alternative.
- if (!min)
- start.link(this);
- push(index);
- m_parser.parseAlternative(alternativeFailedList);
- alternativeFailedList.linkTo(alternativeFailed, this);
-
- pop();
- pop(repeatCount);
-}
-
-void Generator::generateGreedyQuantifier(JumpList& failures, GenerateAtomFunctor& functor, unsigned min, unsigned max)
-{
- if (!max)
- return;
-
- JumpList doneReadingAtomsList;
- JumpList alternativeFailedList;
-
- // (0) Setup: Save, then init repeatCount.
- push(repeatCount);
- move(Imm32(0), repeatCount);
-
- // (1) Greedily read as many copies of the atom as possible, then jump to (2).
- Label readAtom(this);
- functor.generateAtom(this, doneReadingAtomsList);
- add32(Imm32(1), repeatCount);
- if (max == Quantifier::Infinity)
- jump(readAtom);
- else if (max == 1)
- doneReadingAtomsList.append(jump());
- else {
- branch32(NotEqual, repeatCount, Imm32(max), readAtom);
- doneReadingAtomsList.append(jump());
- }
-
- // (5) Quantifier failed: No more backtracking possible.
- Label quantifierFailed(this);
- pop(repeatCount);
- failures.append(jump());
-
- // (4) Alternative failed: Backtrack, then fall through to (2) to try again.
- Label alternativeFailed(this);
- pop(index);
- functor.backtrack(this);
- sub32(Imm32(1), repeatCount);
-
- // (2) Verify that we have enough atoms.
- doneReadingAtomsList.link(this);
- branch32(LessThan, repeatCount, Imm32(min), quantifierFailed);
-
- // (3) Test the rest of the alternative.
- push(index);
- m_parser.parseAlternative(alternativeFailedList);
- alternativeFailedList.linkTo(alternativeFailed, this);
-
- pop();
- pop(repeatCount);
-}
-
-void Generator::generatePatternCharacterSequence(JumpList& failures, int* sequence, size_t count)
-{
- for (size_t i = 0; i < count;) {
- if (i < count - 1) {
- if (generatePatternCharacterPair(failures, sequence[i], sequence[i + 1])) {
- i += 2;
- continue;
- }
- }
-
- generatePatternCharacter(failures, sequence[i]);
- ++i;
- }
-}
-
-bool Generator::generatePatternCharacterPair(JumpList& failures, int ch1, int ch2)
-{
- if (m_parser.ignoreCase()) {
- // Non-trivial case folding requires more than one test, so we can't
- // test as a pair with an adjacent character.
- if (!isASCII(ch1) && Unicode::toLower(ch1) != Unicode::toUpper(ch1))
- return false;
- if (!isASCII(ch2) && Unicode::toLower(ch2) != Unicode::toUpper(ch2))
- return false;
- }
-
- // Optimistically consume 2 characters.
- add32(Imm32(2), index);
- failures.append(branch32(GreaterThan, index, length));
-
- // Load the characters we just consumed, offset -2 characters from index.
- load32(BaseIndex(input, index, TimesTwo, -2 * 2), character);
-
- if (m_parser.ignoreCase()) {
- // Convert ASCII alphabet characters to upper case before testing for
- // equality. (ASCII non-alphabet characters don't require upper-casing
- // because they have no uppercase equivalents. Unicode characters don't
- // require upper-casing because we only handle Unicode characters whose
- // upper and lower cases are equal.)
- int ch1Mask = 0;
- if (isASCIIAlpha(ch1)) {
- ch1 |= 32;
- ch1Mask = 32;
- }
-
- int ch2Mask = 0;
- if (isASCIIAlpha(ch2)) {
- ch2 |= 32;
- ch2Mask = 32;
- }
-
- int mask = ch1Mask | (ch2Mask << 16);
- if (mask)
- or32(Imm32(mask), character);
- }
- int pair = ch1 | (ch2 << 16);
-
- failures.append(branch32(NotEqual, character, Imm32(pair)));
- return true;
-}
-
-void Generator::generatePatternCharacter(JumpList& failures, int ch)
-{
- generateLoadCharacter(failures);
-
- // used for unicode case insensitive
- bool hasUpper = false;
- Jump isUpper;
-
- // if case insensitive match
- if (m_parser.ignoreCase()) {
- UChar lower, upper;
-
- // check for ascii case sensitive characters
- if (isASCIIAlpha(ch)) {
- or32(Imm32(32), character);
- ch |= 32;
- } else if (!isASCII(ch) && ((lower = Unicode::toLower(ch)) != (upper = Unicode::toUpper(ch)))) {
- // handle unicode case sentitive characters - branch to success on upper
- isUpper = branch32(Equal, character, Imm32(upper));
- hasUpper = true;
- ch = lower;
- }
- }
-
- // checks for ch, or lower case version of ch, if insensitive
- failures.append(branch32(NotEqual, character, Imm32((unsigned short)ch)));
-
- if (m_parser.ignoreCase() && hasUpper) {
- // for unicode case insensitive matches, branch here if upper matches.
- isUpper.link(this);
- }
-
- // on success consume the char
- add32(Imm32(1), index);
-}
-
-void Generator::generateCharacterClassInvertedRange(JumpList& failures, JumpList& matchDest, const CharacterRange* ranges, unsigned count, unsigned* matchIndex, const UChar* matches, unsigned matchCount)
-{
- do {
- // pick which range we're going to generate
- int which = count >> 1;
- char lo = ranges[which].begin;
- char hi = ranges[which].end;
-
- // check if there are any ranges or matches below lo. If not, just jl to failure -
- // if there is anything else to check, check that first, if it falls through jmp to failure.
- if ((*matchIndex < matchCount) && (matches[*matchIndex] < lo)) {
- Jump loOrAbove = branch32(GreaterThanOrEqual, character, Imm32((unsigned short)lo));
-
- // generate code for all ranges before this one
- if (which)
- generateCharacterClassInvertedRange(failures, matchDest, ranges, which, matchIndex, matches, matchCount);
-
- while ((*matchIndex < matchCount) && (matches[*matchIndex] < lo)) {
- matchDest.append(branch32(Equal, character, Imm32((unsigned short)matches[*matchIndex])));
- ++*matchIndex;
- }
- failures.append(jump());
-
- loOrAbove.link(this);
- } else if (which) {
- Jump loOrAbove = branch32(GreaterThanOrEqual, character, Imm32((unsigned short)lo));
-
- generateCharacterClassInvertedRange(failures, matchDest, ranges, which, matchIndex, matches, matchCount);
- failures.append(jump());
-
- loOrAbove.link(this);
- } else
- failures.append(branch32(LessThan, character, Imm32((unsigned short)lo)));
-
- while ((*matchIndex < matchCount) && (matches[*matchIndex] <= hi))
- ++*matchIndex;
-
- matchDest.append(branch32(LessThanOrEqual, character, Imm32((unsigned short)hi)));
- // fall through to here, the value is above hi.
-
- // shuffle along & loop around if there are any more matches to handle.
- unsigned next = which + 1;
- ranges += next;
- count -= next;
- } while (count);
-}
-
-void Generator::generateCharacterClassInverted(JumpList& matchDest, const CharacterClass& charClass)
-{
- Jump unicodeFail;
- if (charClass.numMatchesUnicode || charClass.numRangesUnicode) {
- Jump isAscii = branch32(LessThanOrEqual, character, Imm32(0x7f));
-
- if (charClass.numMatchesUnicode) {
- for (unsigned i = 0; i < charClass.numMatchesUnicode; ++i) {
- UChar ch = charClass.matchesUnicode[i];
- matchDest.append(branch32(Equal, character, Imm32(ch)));
- }
- }
-
- if (charClass.numRangesUnicode) {
- for (unsigned i = 0; i < charClass.numRangesUnicode; ++i) {
- UChar lo = charClass.rangesUnicode[i].begin;
- UChar hi = charClass.rangesUnicode[i].end;
-
- Jump below = branch32(LessThan, character, Imm32(lo));
- matchDest.append(branch32(LessThanOrEqual, character, Imm32(hi)));
- below.link(this);
- }
- }
-
- unicodeFail = jump();
- isAscii.link(this);
- }
-
- if (charClass.numRanges) {
- unsigned matchIndex = 0;
- JumpList failures;
- generateCharacterClassInvertedRange(failures, matchDest, charClass.ranges, charClass.numRanges, &matchIndex, charClass.matches, charClass.numMatches);
- while (matchIndex < charClass.numMatches)
- matchDest.append(branch32(Equal, character, Imm32((unsigned short)charClass.matches[matchIndex++])));
-
- failures.link(this);
- } else if (charClass.numMatches) {
- // optimization: gather 'a','A' etc back together, can mask & test once.
- Vector<char> matchesAZaz;
-
- for (unsigned i = 0; i < charClass.numMatches; ++i) {
- char ch = charClass.matches[i];
- if (m_parser.ignoreCase()) {
- if (isASCIILower(ch)) {
- matchesAZaz.append(ch);
- continue;
- }
- if (isASCIIUpper(ch))
- continue;
- }
- matchDest.append(branch32(Equal, character, Imm32((unsigned short)ch)));
- }
-
- if (unsigned countAZaz = matchesAZaz.size()) {
- or32(Imm32(32), character);
- for (unsigned i = 0; i < countAZaz; ++i)
- matchDest.append(branch32(Equal, character, Imm32(matchesAZaz[i])));
- }
- }
-
- if (charClass.numMatchesUnicode || charClass.numRangesUnicode)
- unicodeFail.link(this);
-}
-
-void Generator::generateCharacterClass(JumpList& failures, const CharacterClass& charClass, bool invert)
-{
- generateLoadCharacter(failures);
-
- if (invert)
- generateCharacterClassInverted(failures, charClass);
- else {
- JumpList successes;
- generateCharacterClassInverted(successes, charClass);
- failures.append(jump());
- successes.link(this);
- }
-
- add32(Imm32(1), index);
-}
-
-void Generator::generateParenthesesAssertion(JumpList& failures)
-{
- JumpList disjunctionFailed;
-
- push(index);
- m_parser.parseDisjunction(disjunctionFailed);
- Jump success = jump();
-
- disjunctionFailed.link(this);
- pop(index);
- failures.append(jump());
-
- success.link(this);
- pop(index);
-}
-
-void Generator::generateParenthesesInvertedAssertion(JumpList& failures)
-{
- JumpList disjunctionFailed;
-
- push(index);
- m_parser.parseDisjunction(disjunctionFailed);
-
- // If the disjunction succeeded, the inverted assertion failed.
- pop(index);
- failures.append(jump());
-
- // If the disjunction failed, the inverted assertion succeeded.
- disjunctionFailed.link(this);
- pop(index);
-}
-
-void Generator::generateParenthesesNonGreedy(JumpList& failures, Label start, Jump success, Jump fail)
-{
- jump(start);
- success.link(this);
- failures.append(fail);
-}
-
-Generator::Jump Generator::generateParenthesesResetTrampoline(JumpList& newFailures, unsigned subpatternIdBefore, unsigned subpatternIdAfter)
-{
- Jump skip = jump();
- newFailures.link(this);
- for (unsigned i = subpatternIdBefore + 1; i <= subpatternIdAfter; ++i) {
- store32(Imm32(-1), Address(output, (2 * i) * sizeof(int)));
- store32(Imm32(-1), Address(output, (2 * i + 1) * sizeof(int)));
- }
-
- Jump newFailJump = jump();
- skip.link(this);
-
- return newFailJump;
-}
-
-void Generator::generateAssertionBOL(JumpList& failures)
-{
- if (m_parser.multiline()) {
- JumpList previousIsNewline;
-
- // begin of input == success
- previousIsNewline.append(branch32(Equal, index, Imm32(0)));
-
- // now check prev char against newline characters.
- load16(BaseIndex(input, index, TimesTwo, -2), character);
- generateCharacterClassInverted(previousIsNewline, CharacterClass::newline());
-
- failures.append(jump());
-
- previousIsNewline.link(this);
- } else
- failures.append(branch32(NotEqual, index, Imm32(0)));
-}
-
-void Generator::generateAssertionEOL(JumpList& failures)
-{
- if (m_parser.multiline()) {
- JumpList nextIsNewline;
-
- generateLoadCharacter(nextIsNewline); // end of input == success
- generateCharacterClassInverted(nextIsNewline, CharacterClass::newline());
- failures.append(jump());
- nextIsNewline.link(this);
- } else {
- failures.append(branch32(NotEqual, length, index));
- }
-}
-
-void Generator::generateAssertionWordBoundary(JumpList& failures, bool invert)
-{
- JumpList wordBoundary;
- JumpList notWordBoundary;
-
- // (1) Check if the previous value was a word char
-
- // (1.1) check for begin of input
- Jump atBegin = branch32(Equal, index, Imm32(0));
- // (1.2) load the last char, and chck if is word character
- load16(BaseIndex(input, index, TimesTwo, -2), character);
- JumpList previousIsWord;
- generateCharacterClassInverted(previousIsWord, CharacterClass::wordchar());
- // (1.3) if we get here, previous is not a word char
- atBegin.link(this);
-
- // (2) Handle situation where previous was NOT a \w
-
- generateLoadCharacter(notWordBoundary);
- generateCharacterClassInverted(wordBoundary, CharacterClass::wordchar());
- // (2.1) If we get here, neither chars are word chars
- notWordBoundary.append(jump());
-
- // (3) Handle situation where previous was a \w
-
- // (3.0) link success in first match to here
- previousIsWord.link(this);
- generateLoadCharacter(wordBoundary);
- generateCharacterClassInverted(notWordBoundary, CharacterClass::wordchar());
- // (3.1) If we get here, this is an end of a word, within the input.
-
- // (4) Link everything up
-
- if (invert) {
- // handle the fall through case
- wordBoundary.append(jump());
-
- // looking for non word boundaries, so link boundary fails to here.
- notWordBoundary.link(this);
-
- failures.append(wordBoundary);
- } else {
- // looking for word boundaries, so link successes here.
- wordBoundary.link(this);
-
- failures.append(notWordBoundary);
- }
-}
-
-void Generator::generateBackreference(JumpList& failures, unsigned subpatternId)
-{
- push(index);
- push(repeatCount);
-
- // get the start pos of the backref into repeatCount (multipurpose!)
- load32(Address(output, (2 * subpatternId) * sizeof(int)), repeatCount);
-
- Jump skipIncrement = jump();
- Label topOfLoop(this);
-
- add32(Imm32(1), index);
- add32(Imm32(1), repeatCount);
- skipIncrement.link(this);
-
- // check if we're at the end of backref (if we are, success!)
- Jump endOfBackRef = branch32(Equal, Address(output, ((2 * subpatternId) + 1) * sizeof(int)), repeatCount);
-
- load16(BaseIndex(input, repeatCount, MacroAssembler::TimesTwo), character);
-
- // check if we've run out of input (this would be a can o'fail)
- Jump endOfInput = branch32(Equal, length, index);
-
- branch16(Equal, BaseIndex(input, index, TimesTwo), character, topOfLoop);
-
- endOfInput.link(this);
-
- // Failure
- pop(repeatCount);
- pop(index);
- failures.append(jump());
-
- // Success
- endOfBackRef.link(this);
- pop(repeatCount);
- pop();
-}
-
-void Generator::terminateAlternative(JumpList& successes, JumpList& failures)
-{
- successes.append(jump());
-
- failures.link(this);
- peek(index);
-}
-
-void Generator::terminateDisjunction(JumpList& successes)
-{
- successes.link(this);
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef WRECGenerator_h
-#define WRECGenerator_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include "Quantifier.h"
-#include "MacroAssembler.h"
-#include <wtf/ASCIICType.h>
-#include <wtf/unicode/Unicode.h>
-#include "WREC.h"
-
-namespace JSC {
-
- class JSGlobalData;
-
- namespace WREC {
-
- class CharacterRange;
- class GenerateAtomFunctor;
- class Parser;
- struct CharacterClass;
-
- class Generator : private MacroAssembler {
- public:
- using MacroAssembler::Jump;
- using MacroAssembler::JumpList;
- using MacroAssembler::Label;
-
- enum ParenthesesType { Capturing, NonCapturing, Assertion, InvertedAssertion, Error };
-
- static CompiledRegExp compileRegExp(JSGlobalData*, const UString& pattern, unsigned* numSubpatterns_ptr, const char** error_ptr, RefPtr<ExecutablePool>& pool, bool ignoreCase = false, bool multiline = false);
-
- Generator(Parser& parser)
- : m_parser(parser)
- {
- }
-
-#if CPU(X86)
- static const RegisterID input = X86Registers::eax;
- static const RegisterID index = X86Registers::edx;
- static const RegisterID length = X86Registers::ecx;
- static const RegisterID output = X86Registers::edi;
-
- static const RegisterID character = X86Registers::esi;
- static const RegisterID repeatCount = X86Registers::ebx; // How many times the current atom repeats in the current match.
-
- static const RegisterID returnRegister = X86Registers::eax;
-#endif
-#if CPU(X86_64)
- static const RegisterID input = X86Registers::edi;
- static const RegisterID index = X86Registers::esi;
- static const RegisterID length = X86Registers::edx;
- static const RegisterID output = X86Registers::ecx;
-
- static const RegisterID character = X86Registers::eax;
- static const RegisterID repeatCount = X86Registers::ebx; // How many times the current atom repeats in the current match.
-
- static const RegisterID returnRegister = X86Registers::eax;
-#endif
-
- void generateEnter();
- void generateSaveIndex();
- void generateIncrementIndex(Jump* failure = 0);
- void generateLoadCharacter(JumpList& failures);
- void generateJumpIfNotEndOfInput(Label);
- void generateReturnSuccess();
- void generateReturnFailure();
-
- void generateGreedyQuantifier(JumpList& failures, GenerateAtomFunctor& functor, unsigned min, unsigned max);
- void generateNonGreedyQuantifier(JumpList& failures, GenerateAtomFunctor& functor, unsigned min, unsigned max);
- void generateBacktrack1();
- void generateBacktrackBackreference(unsigned subpatternId);
- void generateCharacterClass(JumpList& failures, const CharacterClass& charClass, bool invert);
- void generateCharacterClassInverted(JumpList& failures, const CharacterClass& charClass);
- void generateCharacterClassInvertedRange(JumpList& failures, JumpList& matchDest, const CharacterRange* ranges, unsigned count, unsigned* matchIndex, const UChar* matches, unsigned matchCount);
- void generatePatternCharacter(JumpList& failures, int ch);
- void generatePatternCharacterSequence(JumpList& failures, int* sequence, size_t count);
- void generateAssertionWordBoundary(JumpList& failures, bool invert);
- void generateAssertionBOL(JumpList& failures);
- void generateAssertionEOL(JumpList& failures);
- void generateBackreference(JumpList& failures, unsigned subpatternID);
- void generateBackreferenceQuantifier(JumpList& failures, Quantifier::Type quantifierType, unsigned subpatternId, unsigned min, unsigned max);
- void generateParenthesesAssertion(JumpList& failures);
- void generateParenthesesInvertedAssertion(JumpList& failures);
- Jump generateParenthesesResetTrampoline(JumpList& newFailures, unsigned subpatternIdBefore, unsigned subpatternIdAfter);
- void generateParenthesesNonGreedy(JumpList& failures, Label start, Jump success, Jump fail);
-
- void terminateAlternative(JumpList& successes, JumpList& failures);
- void terminateDisjunction(JumpList& successes);
-
- private:
- bool generatePatternCharacterPair(JumpList& failures, int ch1, int ch2);
-
- Parser& m_parser;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // WRECGenerator_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "WRECParser.h"
-
-#if ENABLE(WREC)
-
-#include "CharacterClassConstructor.h"
-#include "WRECFunctors.h"
-
-using namespace WTF;
-
-namespace JSC { namespace WREC {
-
-// These error messages match the error messages used by PCRE.
-const char* Parser::QuantifierOutOfOrder = "numbers out of order in {} quantifier";
-const char* Parser::QuantifierWithoutAtom = "nothing to repeat";
-const char* Parser::ParenthesesUnmatched = "unmatched parentheses";
-const char* Parser::ParenthesesTypeInvalid = "unrecognized character after (?";
-const char* Parser::ParenthesesNotSupported = ""; // Not a user-visible syntax error -- just signals a syntax that WREC doesn't support yet.
-const char* Parser::CharacterClassUnmatched = "missing terminating ] for character class";
-const char* Parser::CharacterClassOutOfOrder = "range out of order in character class";
-const char* Parser::EscapeUnterminated = "\\ at end of pattern";
-
-class PatternCharacterSequence {
-typedef Generator::JumpList JumpList;
-
-public:
- PatternCharacterSequence(Generator& generator, JumpList& failures)
- : m_generator(generator)
- , m_failures(failures)
- {
- }
-
- size_t size() { return m_sequence.size(); }
-
- void append(int ch)
- {
- m_sequence.append(ch);
- }
-
- void flush()
- {
- if (!m_sequence.size())
- return;
-
- m_generator.generatePatternCharacterSequence(m_failures, m_sequence.begin(), m_sequence.size());
- m_sequence.clear();
- }
-
- void flush(const Quantifier& quantifier)
- {
- if (!m_sequence.size())
- return;
-
- m_generator.generatePatternCharacterSequence(m_failures, m_sequence.begin(), m_sequence.size() - 1);
-
- switch (quantifier.type) {
- case Quantifier::None:
- case Quantifier::Error:
- ASSERT_NOT_REACHED();
- break;
-
- case Quantifier::Greedy: {
- GeneratePatternCharacterFunctor functor(m_sequence.last());
- m_generator.generateGreedyQuantifier(m_failures, functor, quantifier.min, quantifier.max);
- break;
- }
-
- case Quantifier::NonGreedy: {
- GeneratePatternCharacterFunctor functor(m_sequence.last());
- m_generator.generateNonGreedyQuantifier(m_failures, functor, quantifier.min, quantifier.max);
- break;
- }
- }
-
- m_sequence.clear();
- }
-
-private:
- Generator& m_generator;
- JumpList& m_failures;
- Vector<int, 8> m_sequence;
-};
-
-ALWAYS_INLINE Quantifier Parser::consumeGreedyQuantifier()
-{
- switch (peek()) {
- case '?':
- consume();
- return Quantifier(Quantifier::Greedy, 0, 1);
-
- case '*':
- consume();
- return Quantifier(Quantifier::Greedy, 0);
-
- case '+':
- consume();
- return Quantifier(Quantifier::Greedy, 1);
-
- case '{': {
- SavedState state(*this);
- consume();
-
- // Accept: {n}, {n,}, {n,m}.
- // Reject: {n,m} where n > m.
- // Ignore: Anything else, such as {n, m}.
-
- if (!peekIsDigit()) {
- state.restore();
- return Quantifier();
- }
-
- unsigned min = consumeNumber();
- unsigned max = min;
-
- if (peek() == ',') {
- consume();
- max = peekIsDigit() ? consumeNumber() : Quantifier::Infinity;
- }
-
- if (peek() != '}') {
- state.restore();
- return Quantifier();
- }
- consume();
-
- if (min > max) {
- setError(QuantifierOutOfOrder);
- return Quantifier(Quantifier::Error);
- }
-
- return Quantifier(Quantifier::Greedy, min, max);
- }
-
- default:
- return Quantifier(); // No quantifier.
- }
-}
-
-Quantifier Parser::consumeQuantifier()
-{
- Quantifier q = consumeGreedyQuantifier();
-
- if ((q.type == Quantifier::Greedy) && (peek() == '?')) {
- consume();
- q.type = Quantifier::NonGreedy;
- }
-
- return q;
-}
-
-bool Parser::parseCharacterClassQuantifier(JumpList& failures, const CharacterClass& charClass, bool invert)
-{
- Quantifier q = consumeQuantifier();
-
- switch (q.type) {
- case Quantifier::None: {
- m_generator.generateCharacterClass(failures, charClass, invert);
- break;
- }
-
- case Quantifier::Greedy: {
- GenerateCharacterClassFunctor functor(&charClass, invert);
- m_generator.generateGreedyQuantifier(failures, functor, q.min, q.max);
- break;
- }
-
- case Quantifier::NonGreedy: {
- GenerateCharacterClassFunctor functor(&charClass, invert);
- m_generator.generateNonGreedyQuantifier(failures, functor, q.min, q.max);
- break;
- }
-
- case Quantifier::Error:
- return false;
- }
-
- return true;
-}
-
-bool Parser::parseBackreferenceQuantifier(JumpList& failures, unsigned subpatternId)
-{
- Quantifier q = consumeQuantifier();
-
- switch (q.type) {
- case Quantifier::None: {
- m_generator.generateBackreference(failures, subpatternId);
- break;
- }
-
- case Quantifier::Greedy:
- case Quantifier::NonGreedy:
- m_generator.generateBackreferenceQuantifier(failures, q.type, subpatternId, q.min, q.max);
- return true;
-
- case Quantifier::Error:
- return false;
- }
-
- return true;
-}
-
-bool Parser::parseParentheses(JumpList& failures)
-{
- ParenthesesType type = consumeParenthesesType();
-
- // FIXME: WREC originally failed to backtrack correctly in cases such as
- // "c".match(/(.*)c/). Now, most parentheses handling is disabled. For
- // unsupported parentheses, we fall back on PCRE.
-
- switch (type) {
- case Generator::Assertion: {
- m_generator.generateParenthesesAssertion(failures);
-
- if (consume() != ')') {
- setError(ParenthesesUnmatched);
- return false;
- }
-
- Quantifier quantifier = consumeQuantifier();
- if (quantifier.type != Quantifier::None && quantifier.min == 0) {
- setError(ParenthesesNotSupported);
- return false;
- }
-
- return true;
- }
- case Generator::InvertedAssertion: {
- m_generator.generateParenthesesInvertedAssertion(failures);
-
- if (consume() != ')') {
- setError(ParenthesesUnmatched);
- return false;
- }
-
- Quantifier quantifier = consumeQuantifier();
- if (quantifier.type != Quantifier::None && quantifier.min == 0) {
- setError(ParenthesesNotSupported);
- return false;
- }
-
- return true;
- }
- default:
- setError(ParenthesesNotSupported);
- return false;
- }
-}
-
-bool Parser::parseCharacterClass(JumpList& failures)
-{
- bool invert = false;
- if (peek() == '^') {
- consume();
- invert = true;
- }
-
- CharacterClassConstructor constructor(m_ignoreCase);
-
- int ch;
- while ((ch = peek()) != ']') {
- switch (ch) {
- case EndOfPattern:
- setError(CharacterClassUnmatched);
- return false;
-
- case '\\': {
- consume();
- Escape escape = consumeEscape(true);
-
- switch (escape.type()) {
- case Escape::PatternCharacter: {
- int character = PatternCharacterEscape::cast(escape).character();
- if (character == '-')
- constructor.flushBeforeEscapedHyphen();
- constructor.put(character);
- break;
- }
- case Escape::CharacterClass: {
- const CharacterClassEscape& characterClassEscape = CharacterClassEscape::cast(escape);
- ASSERT(!characterClassEscape.invert());
- constructor.append(characterClassEscape.characterClass());
- break;
- }
- case Escape::Error:
- return false;
- case Escape::Backreference:
- case Escape::WordBoundaryAssertion: {
- ASSERT_NOT_REACHED();
- break;
- }
- }
- break;
- }
-
- default:
- consume();
- constructor.put(ch);
- }
- }
- consume();
-
- // lazily catch reversed ranges ([z-a])in character classes
- if (constructor.isUpsideDown()) {
- setError(CharacterClassOutOfOrder);
- return false;
- }
-
- constructor.flush();
- CharacterClass charClass = constructor.charClass();
- return parseCharacterClassQuantifier(failures, charClass, invert);
-}
-
-bool Parser::parseNonCharacterEscape(JumpList& failures, const Escape& escape)
-{
- switch (escape.type()) {
- case Escape::PatternCharacter:
- ASSERT_NOT_REACHED();
- return false;
-
- case Escape::CharacterClass:
- return parseCharacterClassQuantifier(failures, CharacterClassEscape::cast(escape).characterClass(), CharacterClassEscape::cast(escape).invert());
-
- case Escape::Backreference:
- return parseBackreferenceQuantifier(failures, BackreferenceEscape::cast(escape).subpatternId());
-
- case Escape::WordBoundaryAssertion:
- m_generator.generateAssertionWordBoundary(failures, WordBoundaryAssertionEscape::cast(escape).invert());
- return true;
-
- case Escape::Error:
- return false;
- }
-
- ASSERT_NOT_REACHED();
- return false;
-}
-
-Escape Parser::consumeEscape(bool inCharacterClass)
-{
- switch (peek()) {
- case EndOfPattern:
- setError(EscapeUnterminated);
- return Escape(Escape::Error);
-
- // Assertions
- case 'b':
- consume();
- if (inCharacterClass)
- return PatternCharacterEscape('\b');
- return WordBoundaryAssertionEscape(false); // do not invert
- case 'B':
- consume();
- if (inCharacterClass)
- return PatternCharacterEscape('B');
- return WordBoundaryAssertionEscape(true); // invert
-
- // CharacterClassEscape
- case 'd':
- consume();
- return CharacterClassEscape(CharacterClass::digits(), false);
- case 's':
- consume();
- return CharacterClassEscape(CharacterClass::spaces(), false);
- case 'w':
- consume();
- return CharacterClassEscape(CharacterClass::wordchar(), false);
- case 'D':
- consume();
- return inCharacterClass
- ? CharacterClassEscape(CharacterClass::nondigits(), false)
- : CharacterClassEscape(CharacterClass::digits(), true);
- case 'S':
- consume();
- return inCharacterClass
- ? CharacterClassEscape(CharacterClass::nonspaces(), false)
- : CharacterClassEscape(CharacterClass::spaces(), true);
- case 'W':
- consume();
- return inCharacterClass
- ? CharacterClassEscape(CharacterClass::nonwordchar(), false)
- : CharacterClassEscape(CharacterClass::wordchar(), true);
-
- // DecimalEscape
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9': {
- if (peekDigit() > m_numSubpatterns || inCharacterClass) {
- // To match Firefox, we parse an invalid backreference in the range [1-7]
- // as an octal escape.
- return peekDigit() > 7 ? PatternCharacterEscape('\\') : PatternCharacterEscape(consumeOctal());
- }
-
- int value = 0;
- do {
- unsigned newValue = value * 10 + peekDigit();
- if (newValue > m_numSubpatterns)
- break;
- value = newValue;
- consume();
- } while (peekIsDigit());
-
- return BackreferenceEscape(value);
- }
-
- // Octal escape
- case '0':
- consume();
- return PatternCharacterEscape(consumeOctal());
-
- // ControlEscape
- case 'f':
- consume();
- return PatternCharacterEscape('\f');
- case 'n':
- consume();
- return PatternCharacterEscape('\n');
- case 'r':
- consume();
- return PatternCharacterEscape('\r');
- case 't':
- consume();
- return PatternCharacterEscape('\t');
- case 'v':
- consume();
- return PatternCharacterEscape('\v');
-
- // ControlLetter
- case 'c': {
- SavedState state(*this);
- consume();
-
- int control = consume();
- // To match Firefox, inside a character class, we also accept numbers
- // and '_' as control characters.
- if ((!inCharacterClass && !isASCIIAlpha(control)) || (!isASCIIAlphanumeric(control) && control != '_')) {
- state.restore();
- return PatternCharacterEscape('\\');
- }
- return PatternCharacterEscape(control & 31);
- }
-
- // HexEscape
- case 'x': {
- consume();
-
- SavedState state(*this);
- int x = consumeHex(2);
- if (x == -1) {
- state.restore();
- return PatternCharacterEscape('x');
- }
- return PatternCharacterEscape(x);
- }
-
- // UnicodeEscape
- case 'u': {
- consume();
-
- SavedState state(*this);
- int x = consumeHex(4);
- if (x == -1) {
- state.restore();
- return PatternCharacterEscape('u');
- }
- return PatternCharacterEscape(x);
- }
-
- // IdentityEscape
- default:
- return PatternCharacterEscape(consume());
- }
-}
-
-void Parser::parseAlternative(JumpList& failures)
-{
- PatternCharacterSequence sequence(m_generator, failures);
-
- while (1) {
- switch (peek()) {
- case EndOfPattern:
- case '|':
- case ')':
- sequence.flush();
- return;
-
- case '*':
- case '+':
- case '?':
- case '{': {
- Quantifier q = consumeQuantifier();
-
- if (q.type == Quantifier::None) {
- sequence.append(consume());
- continue;
- }
-
- if (q.type == Quantifier::Error)
- return;
-
- if (!sequence.size()) {
- setError(QuantifierWithoutAtom);
- return;
- }
-
- sequence.flush(q);
- continue;
- }
-
- case '^':
- consume();
-
- sequence.flush();
- m_generator.generateAssertionBOL(failures);
- continue;
-
- case '$':
- consume();
-
- sequence.flush();
- m_generator.generateAssertionEOL(failures);
- continue;
-
- case '.':
- consume();
-
- sequence.flush();
- if (!parseCharacterClassQuantifier(failures, CharacterClass::newline(), true))
- return;
- continue;
-
- case '[':
- consume();
-
- sequence.flush();
- if (!parseCharacterClass(failures))
- return;
- continue;
-
- case '(':
- consume();
-
- sequence.flush();
- if (!parseParentheses(failures))
- return;
- continue;
-
- case '\\': {
- consume();
-
- Escape escape = consumeEscape(false);
- if (escape.type() == Escape::PatternCharacter) {
- sequence.append(PatternCharacterEscape::cast(escape).character());
- continue;
- }
-
- sequence.flush();
- if (!parseNonCharacterEscape(failures, escape))
- return;
- continue;
- }
-
- default:
- sequence.append(consume());
- continue;
- }
- }
-}
-
-/*
- TOS holds index.
-*/
-void Parser::parseDisjunction(JumpList& failures)
-{
- parseAlternative(failures);
- if (peek() != '|')
- return;
-
- JumpList successes;
- do {
- consume();
- m_generator.terminateAlternative(successes, failures);
- parseAlternative(failures);
- } while (peek() == '|');
-
- m_generator.terminateDisjunction(successes);
-}
-
-Generator::ParenthesesType Parser::consumeParenthesesType()
-{
- if (peek() != '?')
- return Generator::Capturing;
- consume();
-
- switch (consume()) {
- case ':':
- return Generator::NonCapturing;
-
- case '=':
- return Generator::Assertion;
-
- case '!':
- return Generator::InvertedAssertion;
-
- default:
- setError(ParenthesesTypeInvalid);
- return Generator::Error;
- }
-}
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
+++ /dev/null
-/*
- * Copyright (C) 2008 Apple Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef Parser_h
-#define Parser_h
-
-#include <wtf/Platform.h>
-
-#if ENABLE(WREC)
-
-#include "Escapes.h"
-#include "Quantifier.h"
-#include "UString.h"
-#include "WRECGenerator.h"
-#include <wtf/ASCIICType.h>
-
-namespace JSC { namespace WREC {
-
- struct CharacterClass;
-
- class Parser {
- typedef Generator::JumpList JumpList;
- typedef Generator::ParenthesesType ParenthesesType;
-
- friend class SavedState;
-
- public:
- Parser(const UString& pattern, bool ignoreCase, bool multiline)
- : m_generator(*this)
- , m_data(pattern.data())
- , m_size(pattern.size())
- , m_ignoreCase(ignoreCase)
- , m_multiline(multiline)
- {
- reset();
- }
-
- Generator& generator() { return m_generator; }
-
- bool ignoreCase() const { return m_ignoreCase; }
- bool multiline() const { return m_multiline; }
-
- void recordSubpattern() { ++m_numSubpatterns; }
- unsigned numSubpatterns() const { return m_numSubpatterns; }
-
- const char* error() const { return m_error; }
- const char* syntaxError() const { return m_error == ParenthesesNotSupported ? 0 : m_error; }
-
- void parsePattern(JumpList& failures)
- {
- reset();
-
- parseDisjunction(failures);
-
- if (peek() != EndOfPattern)
- setError(ParenthesesUnmatched); // Parsing the pattern should fully consume it.
- }
-
- void parseDisjunction(JumpList& failures);
- void parseAlternative(JumpList& failures);
- bool parseTerm(JumpList& failures);
- bool parseNonCharacterEscape(JumpList& failures, const Escape&);
- bool parseParentheses(JumpList& failures);
- bool parseCharacterClass(JumpList& failures);
- bool parseCharacterClassQuantifier(JumpList& failures, const CharacterClass& charClass, bool invert);
- bool parseBackreferenceQuantifier(JumpList& failures, unsigned subpatternId);
-
- private:
- class SavedState {
- public:
- SavedState(Parser& parser)
- : m_parser(parser)
- , m_index(parser.m_index)
- {
- }
-
- void restore()
- {
- m_parser.m_index = m_index;
- }
-
- private:
- Parser& m_parser;
- unsigned m_index;
- };
-
- void reset()
- {
- m_index = 0;
- m_numSubpatterns = 0;
- m_error = 0;
- }
-
- void setError(const char* error)
- {
- if (m_error)
- return;
- m_error = error;
- }
-
- int peek()
- {
- if (m_index >= m_size)
- return EndOfPattern;
- return m_data[m_index];
- }
-
- int consume()
- {
- if (m_index >= m_size)
- return EndOfPattern;
- return m_data[m_index++];
- }
-
- bool peekIsDigit()
- {
- return WTF::isASCIIDigit(peek());
- }
-
- unsigned peekDigit()
- {
- ASSERT(peekIsDigit());
- return peek() - '0';
- }
-
- unsigned consumeDigit()
- {
- ASSERT(peekIsDigit());
- return consume() - '0';
- }
-
- unsigned consumeNumber()
- {
- int n = consumeDigit();
- while (peekIsDigit()) {
- n *= 10;
- n += consumeDigit();
- }
- return n;
- }
-
- int consumeHex(int count)
- {
- int n = 0;
- while (count--) {
- if (!WTF::isASCIIHexDigit(peek()))
- return -1;
- n = (n << 4) | WTF::toASCIIHexValue(consume());
- }
- return n;
- }
-
- unsigned consumeOctal()
- {
- unsigned n = 0;
- while (n < 32 && WTF::isASCIIOctalDigit(peek()))
- n = n * 8 + consumeDigit();
- return n;
- }
-
- ALWAYS_INLINE Quantifier consumeGreedyQuantifier();
- Quantifier consumeQuantifier();
- Escape consumeEscape(bool inCharacterClass);
- ParenthesesType consumeParenthesesType();
-
- static const int EndOfPattern = -1;
-
- // Error messages.
- static const char* QuantifierOutOfOrder;
- static const char* QuantifierWithoutAtom;
- static const char* ParenthesesUnmatched;
- static const char* ParenthesesTypeInvalid;
- static const char* ParenthesesNotSupported;
- static const char* CharacterClassUnmatched;
- static const char* CharacterClassOutOfOrder;
- static const char* EscapeUnterminated;
-
- Generator m_generator;
- const UChar* m_data;
- unsigned m_size;
- unsigned m_index;
- bool m_ignoreCase;
- bool m_multiline;
- unsigned m_numSubpatterns;
- const char* m_error;
- };
-
-} } // namespace JSC::WREC
-
-#endif // ENABLE(WREC)
-
-#endif // Parser_h
from settings import *
jscore_excludes = ['jsc.cpp', 'ucptable.cpp']
-jscore_excludes.extend(get_excludes(jscore_dir, ['*CF.cpp', '*Symbian.cpp']))
+jscore_excludes.extend(get_excludes(jscore_dir, ['*Brew.cpp', '*CF.cpp', '*Symbian.cpp']))
sources = []
full_dirs = get_dirs_for_features(jscore_dir, features=[build_port], dirs=jscore_dirs)
includes = common_includes + full_dirs
+ if sys.platform.startswith('darwin'):
+ includes.append(os.path.join(jscore_dir, 'icu'))
# 1. A simple program
jscore = bld.new_task_gen(
features = 'cxx cstaticlib',
- includes = '. .. assembler wrec DerivedSources ForwardingHeaders ' + ' '.join(includes),
+ includes = '. .. assembler DerivedSources ForwardingHeaders ' + ' '.join(includes),
source = sources,
target = 'jscore',
uselib = 'WX ICU ' + get_config(),
obj = bld.new_task_gen(
features = 'cxx cprogram',
- includes = '. .. assembler wrec DerivedSources ForwardingHeaders ' + ' '.join(includes),
+ includes = '. .. assembler DerivedSources ForwardingHeaders ' + ' '.join(includes),
source = 'jsc.cpp',
target = 'jsc',
uselib = 'WX ICU ' + get_config(),
#define WTF_ASCIICType_h
#include <wtf/Assertions.h>
-#include <wtf/Platform.h>
// The behavior of many of the functions in the <ctype.h> header is dependent
// on the current locale. But in the WebKit project, all uses of those functions
#ifndef NEVER_INLINE
#if COMPILER(GCC)
#define NEVER_INLINE __attribute__((__noinline__))
+#elif COMPILER(RVCT)
+#define NEVER_INLINE __declspec(noinline)
#else
#define NEVER_INLINE
#endif
#ifndef NO_RETURN
#if COMPILER(GCC)
#define NO_RETURN __attribute((__noreturn__))
+#elif COMPILER(MSVC) || COMPILER(RVCT)
+#define NO_RETURN __declspec(noreturn)
#else
#define NO_RETURN
#endif
#endif
+
+#ifndef NO_RETURN_WITH_VALUE
+#if !COMPILER(MSVC)
+#define NO_RETURN_WITH_VALUE NO_RETURN
+#else
+#define NO_RETURN_WITH_VALUE
+#endif
+#endif
#endif
#ifdef NDEBUG
+/* Disable ASSERT* macros in release mode. */
#define ASSERTIONS_DISABLED_DEFAULT 1
#else
#define ASSERTIONS_DISABLED_DEFAULT 0
#endif
-#if COMPILER(MSVC7) || COMPILER(WINSCW)
+#if COMPILER(MSVC7_OR_LOWER) || COMPILER(WINSCW)
#define HAVE_VARIADIC_MACRO 0
#else
#define HAVE_VARIADIC_MACRO 1
}
#endif
-/* CRASH -- gets us into the debugger or the crash reporter -- signals are ignored by the crash reporter so we must do better */
+/* CRASH() - Raises a fatal error resulting in program termination and triggering either the debugger or the crash reporter.
+ Use CRASH() in response to known, unrecoverable errors like out-of-memory.
+ Macro is enabled in both debug and release mode.
+ To test for unknown errors and verify assumptions, use ASSERT instead, to avoid impacting performance in release builds.
+
+ Signals are ignored by the crash reporter on OS X so we must do better.
+*/
#ifndef CRASH
#if OS(SYMBIAN)
#define CRASH() do { \
#endif
#endif
-/* ASSERT, ASSERT_NOT_REACHED, ASSERT_UNUSED */
+/* ASSERT, ASSERT_NOT_REACHED, ASSERT_UNUSED
+
+ These macros are compiled out of release builds.
+ Expressions inside them are evaluated in debug builds only.
+*/
#if OS(WINCE) && !PLATFORM(TORCHMOBILE)
/* FIXME: We include this here only to avoid a conflict with the ASSERT macro. */
#undef ASSERT
#endif
+#if PLATFORM(BREWMP)
+/* FIXME: We include this here only to avoid a conflict with the COMPILE_ASSERT macro. */
+#include <AEEClassIDs.h>
+
+/* FIXME: Change to use something other than COMPILE_ASSERT to avoid this conflict with the underlying platform */
+#undef COMPILE_ASSERT
+#endif
+
#if ASSERT_DISABLED
#define ASSERT(assertion) ((void)0)
/* ASSERT_WITH_MESSAGE */
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
#define ASSERT_WITH_MESSAGE(assertion) ((void)0)
#elif COMPILER(WINSCW)
#define ASSERT_WITH_MESSAGE(assertion, arg...) ((void)0)
/* FATAL */
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
#define FATAL() ((void)0)
#elif COMPILER(WINSCW)
#define FATAL(arg...) ((void)0)
/* LOG_ERROR */
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
#define LOG_ERROR() ((void)0)
#elif COMPILER(WINSCW)
#define LOG_ERROR(arg...) ((void)0)
/* LOG */
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
#define LOG() ((void)0)
#elif COMPILER(WINSCW)
#define LOG(arg...) ((void)0)
/* LOG_VERBOSE */
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
#define LOG_VERBOSE(channel) ((void)0)
#elif COMPILER(WINSCW)
#define LOG_VERBOSE(channel, arg...) ((void)0)
--- /dev/null
+/*
+ * Copyright (C) 2007, 2008, 2010 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ *
+ * Note: The implementations of InterlockedIncrement and InterlockedDecrement are based
+ * on atomic_increment and atomic_exchange_and_add from the Boost C++ Library. The license
+ * is virtually identical to the Apple license above but is included here for completeness.
+ *
+ * Boost Software License - Version 1.0 - August 17th, 2003
+ *
+ * Permission is hereby granted, free of charge, to any person or organization
+ * obtaining a copy of the software and accompanying documentation covered by
+ * this license (the "Software") to use, reproduce, display, distribute,
+ * execute, and transmit the Software, and to prepare derivative works of the
+ * Software, and to permit third-parties to whom the Software is furnished to
+ * do so, all subject to the following:
+ *
+ * The copyright notices in the Software and this entire statement, including
+ * the above license grant, this restriction and the following disclaimer,
+ * must be included in all copies of the Software, in whole or in part, and
+ * all derivative works of the Software, unless such copies or derivative
+ * works are solely in the form of machine-executable object code generated by
+ * a source language processor.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+ * FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ */
+
+#ifndef Atomics_h
+#define Atomics_h
+
+#include "Platform.h"
+
+#if OS(WINDOWS)
+#include <windows.h>
+#elif OS(DARWIN)
+#include <libkern/OSAtomic.h>
+#elif OS(ANDROID)
+#include <cutils/atomic.h>
+#elif COMPILER(GCC) && !OS(SYMBIAN)
+#if (__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ >= 2))
+#include <ext/atomicity.h>
+#else
+#include <bits/atomicity.h>
+#endif
+#endif
+
+namespace WTF {
+
+#if OS(WINDOWS)
+#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
+
+#if COMPILER(MINGW) || COMPILER(MSVC7_OR_LOWER) || OS(WINCE)
+inline int atomicIncrement(int* addend) { return InterlockedIncrement(reinterpret_cast<long*>(addend)); }
+inline int atomicDecrement(int* addend) { return InterlockedDecrement(reinterpret_cast<long*>(addend)); }
+#else
+inline int atomicIncrement(int volatile* addend) { return InterlockedIncrement(reinterpret_cast<long volatile*>(addend)); }
+inline int atomicDecrement(int volatile* addend) { return InterlockedDecrement(reinterpret_cast<long volatile*>(addend)); }
+#endif
+
+#elif OS(DARWIN)
+#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
+
+inline int atomicIncrement(int volatile* addend) { return OSAtomicIncrement32Barrier(const_cast<int*>(addend)); }
+inline int atomicDecrement(int volatile* addend) { return OSAtomicDecrement32Barrier(const_cast<int*>(addend)); }
+
+#elif OS(ANDROID)
+
+inline int atomicIncrement(int volatile* addend) { return android_atomic_inc(addend); }
+inline int atomicDecrement(int volatile* addend) { return android_atomic_dec(addend); }
+
+#elif COMPILER(GCC) && !CPU(SPARC64) && !OS(SYMBIAN) // sizeof(_Atomic_word) != sizeof(int) on sparc64 gcc
+#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
+
+inline int atomicIncrement(int volatile* addend) { return __gnu_cxx::__exchange_and_add(addend, 1) + 1; }
+inline int atomicDecrement(int volatile* addend) { return __gnu_cxx::__exchange_and_add(addend, -1) - 1; }
+
+#endif
+
+} // namespace WTF
+
+#if USE(LOCKFREE_THREADSAFESHARED)
+using WTF::atomicDecrement;
+using WTF::atomicIncrement;
+#endif
+
+#endif // Atomics_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef Complex_h
+#define Complex_h
+
+#include <complex>
+#include <wtf/MathExtras.h>
+
+namespace WebCore {
+
+typedef std::complex<double> Complex;
+
+inline Complex complexFromMagnitudePhase(double magnitude, double phase)
+{
+ return Complex(magnitude * cos(phase), magnitude * sin(phase));
+}
+
+} // namespace WebCore
+
+#endif // Complex_h
/*
- * Copyright (C) 2006 Apple Computer, Inc. All rights reserved.
+ * Copyright (C) 2006, 2010 Apple Inc. All rights reserved.
* Copyright (C) 2008 Google Inc. All rights reserved.
* Copyright (C) 2007-2009 Torch Mobile, Inc.
*
#endif
#endif
-#elif PLATFORM(CF)
-#include <CoreFoundation/CFDate.h>
#elif PLATFORM(GTK)
#include <glib.h>
#elif PLATFORM(WX)
#include <wx/datetime.h>
-#else // Posix systems relying on the gettimeofday()
+#elif PLATFORM(BREWMP)
+#include <AEEStdLib.h>
+#else
#include <sys/time.h>
#endif
#endif // USE(QUERY_PERFORMANCE_COUNTER)
-#elif PLATFORM(CF)
-
-double currentTime()
-{
- return CFAbsoluteTimeGetCurrent() + kCFAbsoluteTimeIntervalSince1970;
-}
-
#elif PLATFORM(GTK)
// Note: GTK on Windows will pick up the PLATFORM(WIN) implementation above which provides
return (double)now.GetTicks() + (double)(now.GetMillisecond() / 1000.0);
}
-#else // Other Posix systems rely on the gettimeofday().
+#elif PLATFORM(BREWMP)
+// GETUTCSECONDS returns the number of seconds since 1980/01/06 00:00:00 UTC,
+// and GETTIMEMS returns the number of milliseconds that have elapsed since the last
+// occurrence of 00:00:00 local time.
+// We can combine GETUTCSECONDS and GETTIMEMS to calculate the number of milliseconds
+// since 1970/01/01 00:00:00 UTC.
double currentTime()
{
- struct timeval now;
- struct timezone zone;
+ // diffSeconds is the number of seconds from 1970/01/01 to 1980/01/06
+ const unsigned diffSeconds = 315964800;
+ return static_cast<double>(diffSeconds + GETUTCSECONDS() + ((GETTIMEMS() % 1000) / msPerSecond));
+}
+
+#else
- gettimeofday(&now, &zone);
- return static_cast<double>(now.tv_sec) + (double)(now.tv_usec / 1000000.0);
+double currentTime()
+{
+ struct timeval now;
+ gettimeofday(&now, 0);
+ return now.tv_sec + now.tv_usec / 1000000.0;
}
#endif
inline void getLocalTime(const time_t* localTime, struct tm* localTM)
{
- #if COMPILER(MSVC7) || COMPILER(MINGW) || OS(WINCE)
+ #if COMPILER(MSVC7_OR_LOWER) || COMPILER(MINGW) || OS(WINCE)
*localTM = *localtime(localTime);
#elif COMPILER(MSVC)
localtime_s(localTM, localTime);
} // namespace WTF
using WTF::currentTime;
+using WTF::getLocalTime;
#endif // CurrentTime_h
#if ENABLE(JSC_MULTIPLE_THREADS)
#include <pthread.h>
#endif
+#if USE(PTHREAD_GETSPECIFIC_DIRECT)
+#include <System/pthread_machdep.h>
+#endif
#ifndef NO_TCMALLOC_SAMPLES
#ifdef WTF_CHANGES
#if FORCE_SYSTEM_MALLOC
+#if PLATFORM(BREWMP)
+#include "brew/SystemMallocBrew.h"
+#endif
+
+#if OS(DARWIN)
+#include <malloc/malloc.h>
+#elif COMPILER(MSVC)
+#include <malloc.h>
+#endif
+
namespace WTF {
TryMallocReturnValue tryFastMalloc(size_t n)
void* result = malloc(n);
#endif
- if (!result)
+ if (!result) {
+#if PLATFORM(BREWMP)
+ // The behavior of malloc(0) is implementation defined.
+ // To make sure that fastMalloc never returns 0, retry with fastMalloc(1).
+ if (!n)
+ return fastMalloc(1);
+#endif
CRASH();
+ }
+
return result;
}
void* result = calloc(n_elements, element_size);
#endif
- if (!result)
+ if (!result) {
+#if PLATFORM(BREWMP)
+ // If either n_elements or element_size is 0, the behavior of calloc is implementation defined.
+ // To make sure that fastCalloc never returns 0, retry with fastCalloc(1, 1).
+ if (!n_elements || !element_size)
+ return fastCalloc(1, 1);
+#endif
CRASH();
+ }
+
return result;
}
FastMallocStatistics fastMallocStatistics()
{
- FastMallocStatistics statistics = { 0, 0, 0, 0 };
+ FastMallocStatistics statistics = { 0, 0, 0 };
return statistics;
}
+size_t fastMallocSize(const void* p)
+{
+#if OS(DARWIN)
+ return malloc_size(p);
+#elif COMPILER(MSVC)
+ return _msize(const_cast<void*>(p));
+#else
+ return 1;
+#endif
+}
+
} // namespace WTF
#if OS(DARWIN)
#include <algorithm>
#include <errno.h>
#include <limits>
-#include <new>
#include <pthread.h>
#include <stdarg.h>
#include <stddef.h>
#include <windows.h>
#endif
-#if WTF_CHANGES
+#ifdef WTF_CHANGES
#if OS(DARWIN)
#include "MallocZoneSupport.h"
// call to the function on Mac OS X, and it's used in performance-critical code. So we
// use a function pointer. But that's not necessarily faster on other platforms, and we had
// problems with this technique on Windows, so we'll do this only on Mac OS X.
+#if USE(PTHREAD_GETSPECIFIC_DIRECT)
+#define pthread_getspecific(key) _pthread_getspecific_direct(key)
+#define pthread_setspecific(key, val) _pthread_setspecific_direct(key, (val))
+#else
#if OS(DARWIN)
static void* (*pthread_getspecific_function_pointer)(pthread_key_t) = pthread_getspecific;
#define pthread_getspecific(key) pthread_getspecific_function_pointer(key)
#endif
+#endif
#define DEFINE_VARIABLE(type, name, value, meaning) \
namespace FLAG__namespace_do_not_use_directly_use_DECLARE_##type##_instead { \
#define CHECK_CONDITION ASSERT
#if OS(DARWIN)
-class Span;
+struct Span;
class TCMalloc_Central_FreeListPadded;
class TCMalloc_PageHeap;
class TCMalloc_ThreadCache;
// -------------------------------------------------------------------------
#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
-// The central page heap collects spans of memory that have been deleted but are still committed until they are released
-// back to the system. We use a background thread to periodically scan the list of free spans and release some back to the
-// system. Every 5 seconds, the background thread wakes up and does the following:
-// - Check if we needed to commit memory in the last 5 seconds. If so, skip this scavenge because it's a sign that we are short
-// of free committed pages and so we should not release them back to the system yet.
-// - Otherwise, go through the list of free spans (from largest to smallest) and release up to a fraction of the free committed pages
-// back to the system.
-// - If the number of free committed pages reaches kMinimumFreeCommittedPageCount, we can stop the scavenging and block the
-// scavenging thread until the number of free committed pages goes above kMinimumFreeCommittedPageCount.
-
-// Background thread wakes up every 5 seconds to scavenge as long as there is memory available to return to the system.
-static const int kScavengeTimerDelayInSeconds = 5;
-
-// Number of free committed pages that we want to keep around.
-static const size_t kMinimumFreeCommittedPageCount = 512;
-
-// During a scavenge, we'll release up to a fraction of the free committed pages.
-#if OS(WINDOWS)
-// We are slightly less aggressive in releasing memory on Windows due to performance reasons.
-static const int kMaxScavengeAmountFactor = 3;
-#else
-static const int kMaxScavengeAmountFactor = 2;
-#endif
+// The page heap maintains a free list for spans that are no longer in use by
+// the central cache or any thread caches. We use a background thread to
+// periodically scan the free list and release a percentage of it back to the OS.
+
+// If free_committed_pages_ exceeds kMinimumFreeCommittedPageCount, the
+// background thread:
+// - wakes up
+// - pauses for kScavengeDelayInSeconds
+// - returns to the OS a percentage of the memory that remained unused during
+// that pause (kScavengePercentage * min_free_committed_pages_since_last_scavenge_)
+// The goal of this strategy is to reduce memory pressure in a timely fashion
+// while avoiding thrashing the OS allocator.
+
+// Time delay before the page heap scavenger will consider returning pages to
+// the OS.
+static const int kScavengeDelayInSeconds = 2;
+
+// Approximate percentage of free committed pages to return to the OS in one
+// scavenge.
+static const float kScavengePercentage = .5f;
+
+// number of span lists to keep spans in when memory is returned.
+static const int kMinSpanListsWithSpans = 32;
+
+// Number of free committed pages that we want to keep around. The minimum number of pages used when there
+// is 1 span in each of the first kMinSpanListsWithSpans spanlists. Currently 528 pages.
+static const size_t kMinimumFreeCommittedPageCount = kMinSpanListsWithSpans * ((1.0f+kMinSpanListsWithSpans) / 2.0f);
+
#endif
class TCMalloc_PageHeap {
// Number of pages kept in free lists that are still committed.
Length free_committed_pages_;
- // Number of pages that we committed in the last scavenge wait interval.
- Length pages_committed_since_last_scavenge_;
+ // Minimum number of free committed pages since last scavenge. (Can be 0 if
+ // we've committed new pages since the last scavenge.)
+ Length min_free_committed_pages_since_last_scavenge_;
#endif
bool GrowHeap(Length n);
void initializeScavenger();
ALWAYS_INLINE void signalScavenger();
void scavenge();
- ALWAYS_INLINE bool shouldContinueScavenging() const;
+ ALWAYS_INLINE bool shouldScavenge() const;
#if !HAVE(DISPATCH_H)
- static NO_RETURN void* runScavengerThread(void*);
+ static NO_RETURN_WITH_VALUE void* runScavengerThread(void*);
NO_RETURN void scavengerThread();
- // Keeps track of whether the background thread is actively scavenging memory every kScavengeTimerDelayInSeconds, or
+ // Keeps track of whether the background thread is actively scavenging memory every kScavengeDelayInSeconds, or
// it's blocked waiting for more pages to be deleted.
bool m_scavengeThreadActive;
#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
free_committed_pages_ = 0;
- pages_committed_since_last_scavenge_ = 0;
+ min_free_committed_pages_since_last_scavenge_ = 0;
#endif // USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
scavenge_counter_ = 0;
ALWAYS_INLINE void TCMalloc_PageHeap::signalScavenger()
{
- if (!m_scavengeThreadActive && shouldContinueScavenging())
+ if (!m_scavengeThreadActive && shouldScavenge())
pthread_cond_signal(&m_scavengeCondition);
}
{
m_scavengeQueue = dispatch_queue_create("com.apple.JavaScriptCore.FastMallocSavenger", NULL);
m_scavengeTimer = dispatch_source_create(DISPATCH_SOURCE_TYPE_TIMER, 0, 0, m_scavengeQueue);
- dispatch_time_t startTime = dispatch_time(DISPATCH_TIME_NOW, kScavengeTimerDelayInSeconds * NSEC_PER_SEC);
- dispatch_source_set_timer(m_scavengeTimer, startTime, kScavengeTimerDelayInSeconds * NSEC_PER_SEC, 1000 * NSEC_PER_USEC);
+ dispatch_time_t startTime = dispatch_time(DISPATCH_TIME_NOW, kScavengeDelayInSeconds * NSEC_PER_SEC);
+ dispatch_source_set_timer(m_scavengeTimer, startTime, kScavengeDelayInSeconds * NSEC_PER_SEC, 1000 * NSEC_PER_USEC);
dispatch_source_set_event_handler(m_scavengeTimer, ^{ periodicScavenge(); });
m_scavengingScheduled = false;
}
ALWAYS_INLINE void TCMalloc_PageHeap::signalScavenger()
{
- if (!m_scavengingScheduled && shouldContinueScavenging()) {
+ if (!m_scavengingScheduled && shouldScavenge()) {
m_scavengingScheduled = true;
dispatch_resume(m_scavengeTimer);
}
#endif
-void TCMalloc_PageHeap::scavenge()
+void TCMalloc_PageHeap::scavenge()
{
- // If we have to commit memory in the last 5 seconds, it means we don't have enough free committed pages
- // for the amount of allocations that we do. So hold off on releasing memory back to the system.
- if (pages_committed_since_last_scavenge_ > 0) {
- pages_committed_since_last_scavenge_ = 0;
- return;
- }
- Length pagesDecommitted = 0;
- for (int i = kMaxPages; i >= 0; i--) {
- SpanList* slist = (static_cast<size_t>(i) == kMaxPages) ? &large_ : &free_[i];
- if (!DLL_IsEmpty(&slist->normal)) {
- // Release the last span on the normal portion of this list
- Span* s = slist->normal.prev;
- // Only decommit up to a fraction of the free committed pages if pages_allocated_since_last_scavenge_ > 0.
- if ((pagesDecommitted + s->length) * kMaxScavengeAmountFactor > free_committed_pages_)
- continue;
- DLL_Remove(s);
- TCMalloc_SystemRelease(reinterpret_cast<void*>(s->start << kPageShift),
- static_cast<size_t>(s->length << kPageShift));
- if (!s->decommitted) {
- pagesDecommitted += s->length;
- s->decommitted = true;
+ size_t pagesToRelease = min_free_committed_pages_since_last_scavenge_ * kScavengePercentage;
+ size_t targetPageCount = std::max<size_t>(kMinimumFreeCommittedPageCount, free_committed_pages_ - pagesToRelease);
+
+ while (free_committed_pages_ > targetPageCount) {
+ for (int i = kMaxPages; i > 0 && free_committed_pages_ >= targetPageCount; i--) {
+ SpanList* slist = (static_cast<size_t>(i) == kMaxPages) ? &large_ : &free_[i];
+ // If the span size is bigger than kMinSpanListsWithSpans pages return all the spans in the list, else return all but 1 span.
+ // Return only 50% of a spanlist at a time so spans of size 1 are not the only ones left.
+ size_t numSpansToReturn = (i > kMinSpanListsWithSpans) ? DLL_Length(&slist->normal) : static_cast<size_t>(.5 * DLL_Length(&slist->normal));
+ for (int j = 0; static_cast<size_t>(j) < numSpansToReturn && !DLL_IsEmpty(&slist->normal) && free_committed_pages_ > targetPageCount; j++) {
+ Span* s = slist->normal.prev;
+ DLL_Remove(s);
+ ASSERT(!s->decommitted);
+ if (!s->decommitted) {
+ TCMalloc_SystemRelease(reinterpret_cast<void*>(s->start << kPageShift),
+ static_cast<size_t>(s->length << kPageShift));
+ ASSERT(free_committed_pages_ >= s->length);
+ free_committed_pages_ -= s->length;
+ s->decommitted = true;
+ }
+ DLL_Prepend(&slist->returned, s);
}
- DLL_Prepend(&slist->returned, s);
- // We can stop scavenging if the number of free committed pages left is less than or equal to the minimum number we want to keep around.
- if (free_committed_pages_ <= kMinimumFreeCommittedPageCount + pagesDecommitted)
- break;
}
}
- pages_committed_since_last_scavenge_ = 0;
- ASSERT(free_committed_pages_ >= pagesDecommitted);
- free_committed_pages_ -= pagesDecommitted;
+
+ min_free_committed_pages_since_last_scavenge_ = free_committed_pages_;
}
-ALWAYS_INLINE bool TCMalloc_PageHeap::shouldContinueScavenging() const
+ALWAYS_INLINE bool TCMalloc_PageHeap::shouldScavenge() const
{
return free_committed_pages_ > kMinimumFreeCommittedPageCount;
}
Span* result = ll->next;
Carve(result, n, released);
- if (result->decommitted) {
- TCMalloc_SystemCommit(reinterpret_cast<void*>(result->start << kPageShift), static_cast<size_t>(n << kPageShift));
- result->decommitted = false;
#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
- pages_committed_since_last_scavenge_ += n;
-#endif
- }
-#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
- else {
- // The newly allocated memory is from a span that's in the normal span list (already committed). Update the
- // free committed pages count.
- ASSERT(free_committed_pages_ >= n);
- free_committed_pages_ -= n;
- }
+ // The newly allocated memory is from a span that's in the normal span list (already committed). Update the
+ // free committed pages count.
+ ASSERT(free_committed_pages_ >= n);
+ free_committed_pages_ -= n;
+ if (free_committed_pages_ < min_free_committed_pages_since_last_scavenge_)
+ min_free_committed_pages_since_last_scavenge_ = free_committed_pages_;
#endif // USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
ASSERT(Check());
free_pages_ -= n;
if (best != NULL) {
Carve(best, n, from_released);
- if (best->decommitted) {
- TCMalloc_SystemCommit(reinterpret_cast<void*>(best->start << kPageShift), static_cast<size_t>(n << kPageShift));
- best->decommitted = false;
#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
- pages_committed_since_last_scavenge_ += n;
-#endif
- }
-#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
- else {
- // The newly allocated memory is from a span that's in the normal span list (already committed). Update the
- // free committed pages count.
- ASSERT(free_committed_pages_ >= n);
- free_committed_pages_ -= n;
- }
+ // The newly allocated memory is from a span that's in the normal span list (already committed). Update the
+ // free committed pages count.
+ ASSERT(free_committed_pages_ >= n);
+ free_committed_pages_ -= n;
+ if (free_committed_pages_ < min_free_committed_pages_since_last_scavenge_)
+ min_free_committed_pages_since_last_scavenge_ = free_committed_pages_;
#endif // USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
ASSERT(Check());
free_pages_ -= n;
return leftover;
}
-static ALWAYS_INLINE void propagateDecommittedState(Span* destination, Span* source)
-{
- destination->decommitted = source->decommitted;
-}
-
inline void TCMalloc_PageHeap::Carve(Span* span, Length n, bool released) {
ASSERT(n > 0);
DLL_Remove(span);
span->free = 0;
Event(span, 'A', n);
+ if (released) {
+ // If the span chosen to carve from is decommited, commit the entire span at once to avoid committing spans 1 page at a time.
+ ASSERT(span->decommitted);
+ TCMalloc_SystemCommit(reinterpret_cast<void*>(span->start << kPageShift), static_cast<size_t>(span->length << kPageShift));
+ span->decommitted = false;
+#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
+ free_committed_pages_ += span->length;
+#endif
+ }
+
const int extra = static_cast<int>(span->length - n);
ASSERT(extra >= 0);
if (extra > 0) {
Span* leftover = NewSpan(span->start + n, extra);
leftover->free = 1;
- propagateDecommittedState(leftover, span);
+ leftover->decommitted = false;
Event(leftover, 'S', extra);
RecordSpan(leftover);
// Place leftover span on appropriate free list
SpanList* listpair = (static_cast<size_t>(extra) < kMaxPages) ? &free_[extra] : &large_;
- Span* dst = released ? &listpair->returned : &listpair->normal;
+ Span* dst = &listpair->normal;
DLL_Prepend(dst, leftover);
span->length = n;
// If the merged span is decommitted, that means we decommitted any neighboring spans that were
// committed. Update the free committed pages count.
free_committed_pages_ -= neighboringCommittedSpansLength;
+ if (free_committed_pages_ < min_free_committed_pages_since_last_scavenge_)
+ min_free_committed_pages_since_last_scavenge_ = free_committed_pages_;
} else {
// If the merged span remains committed, add the deleted span's size to the free committed pages count.
free_committed_pages_ += n;
}
ask = actual_size >> kPageShift;
-#if USE_BACKGROUND_THREAD_TO_SCAVENGE_MEMORY
- pages_committed_since_last_scavenge_ += ask;
-#endif
-
uint64_t old_system_bytes = system_bytes_;
system_bytes_ += (ask << kPageShift);
const PageID p = reinterpret_cast<uintptr_t>(ptr) >> kPageShift;
// Total byte size in cache
size_t Size() const { return size_; }
- void* Allocate(size_t size);
+ ALWAYS_INLINE void* Allocate(size_t size);
void Deallocate(void* ptr, size_t size_class);
- void FetchFromCentralCache(size_t cl, size_t allocationSize);
+ ALWAYS_INLINE void FetchFromCentralCache(size_t cl, size_t allocationSize);
void ReleaseToCentralCache(size_t cl, int N);
void Scavenge();
void Print() const;
// REQUIRES: lock_ is held
// Release an object to spans.
// May temporarily release lock_.
- void ReleaseToSpans(void* object);
+ ALWAYS_INLINE void ReleaseToSpans(void* object);
// REQUIRES: lock_ is held
// Populate cache by fetching from the page heap.
// May temporarily release lock_.
- void Populate();
+ ALWAYS_INLINE void Populate();
// REQUIRES: lock is held.
// Tries to make room for a TCEntry. If the cache is full it will try to
// just iterates over the sizeclasses but does so without taking a lock.
// Returns true on success.
// May temporarily lock a "random" size class.
- static bool EvictRandomSizeClass(size_t locked_size_class, bool force);
+ static ALWAYS_INLINE bool EvictRandomSizeClass(size_t locked_size_class, bool force);
// REQUIRES: lock_ is *not* held.
// Tries to shrink the Cache. If force is true it will relase objects to
#if PLATFORM(ARM)
static void* pageheap_memory[(sizeof(TCMalloc_PageHeap) + sizeof(void*) - 1) / sizeof(void*)] __attribute__((aligned));
#else
-static void* pageheap_memory[(sizeof(TCMalloc_PageHeap) + sizeof(void*) - 1) / sizeof(void*)];
+static AllocAlignmentInteger pageheap_memory[(sizeof(TCMalloc_PageHeap) + sizeof(AllocAlignmentInteger) - 1) / sizeof(AllocAlignmentInteger)];
#endif
static bool phinited = false;
#endif
while (1) {
- if (!shouldContinueScavenging()) {
+ if (!shouldScavenge()) {
pthread_mutex_lock(&m_scavengeMutex);
m_scavengeThreadActive = false;
- // Block until there are enough freed pages to release back to the system.
+ // Block until there are enough free committed pages to release back to the system.
pthread_cond_wait(&m_scavengeCondition, &m_scavengeMutex);
m_scavengeThreadActive = true;
pthread_mutex_unlock(&m_scavengeMutex);
}
- sleep(kScavengeTimerDelayInSeconds);
+ sleep(kScavengeDelayInSeconds);
{
SpinLockHolder h(&pageheap_lock);
pageheap->scavenge();
pageheap->scavenge();
}
- if (!shouldContinueScavenging()) {
+ if (!shouldScavenge()) {
m_scavengingScheduled = false;
dispatch_suspend(m_scavengeTimer);
}
// Therefore, we use TSD keys only after tsd_inited is set to true.
// Until then, we use a slow path to get the heap object.
static bool tsd_inited = false;
+#if USE(PTHREAD_GETSPECIFIC_DIRECT)
+static pthread_key_t heap_key = __PTK_FRAMEWORK_JAVASCRIPTCORE_KEY0;
+#else
static pthread_key_t heap_key;
+#endif
#if COMPILER(MSVC)
DWORD tlsIndex = TLS_OUT_OF_INDEXES;
#endif
void TCMalloc_ThreadCache::InitTSD() {
ASSERT(!tsd_inited);
+#if USE(PTHREAD_GETSPECIFIC_DIRECT)
+ pthread_key_init_np(heap_key, DestroyThreadCache);
+#else
pthread_key_create(&heap_key, DestroyThreadCache);
+#endif
#if COMPILER(MSVC)
tlsIndex = TlsAlloc();
#endif
#define do_malloc do_malloc<crashOnFailure>
template <bool crashOnFailure>
-void* malloc(size_t);
+ALWAYS_INLINE void* malloc(size_t);
void* fastMalloc(size_t size)
{
extern "C"
#else
template <bool crashOnFailure>
-void* calloc(size_t, size_t);
+ALWAYS_INLINE void* calloc(size_t, size_t);
void* fastCalloc(size_t n, size_t elem_size)
{
extern "C"
#else
template <bool crashOnFailure>
-void* realloc(void*, size_t);
+ALWAYS_INLINE void* realloc(void*, size_t);
void* fastRealloc(void* old_ptr, size_t new_size)
{
}
}
+#if ENABLE(GLOBAL_FASTMALLOC_NEW)
+
void* operator new(size_t size) {
void* p = cpp_alloc(size, false);
// We keep this next instruction out of cpp_alloc for a reason: when
do_free(p);
}
+#endif
+
extern "C" void* memalign(size_t align, size_t size) __THROW {
void* result = do_memalign(align, size);
MallocHook::InvokeNewHook(result, size);
#endif
-#if defined(WTF_CHANGES) && OS(DARWIN)
+#ifdef WTF_CHANGES
+void releaseFastMallocFreeMemory()
+{
+ // Flush free pages in the current thread cache back to the page heap.
+ // Low watermark mechanism in Scavenge() prevents full return on the first pass.
+ // The second pass flushes everything.
+ if (TCMalloc_ThreadCache* threadCache = TCMalloc_ThreadCache::GetCacheIfPresent()) {
+ threadCache->Scavenge();
+ threadCache->Scavenge();
+ }
+
+ SpinLockHolder h(&pageheap_lock);
+ pageheap->ReleaseFreePages();
+}
+
+FastMallocStatistics fastMallocStatistics()
+{
+ FastMallocStatistics statistics;
+
+ SpinLockHolder lockHolder(&pageheap_lock);
+ statistics.reservedVMBytes = static_cast<size_t>(pageheap->SystemBytes());
+ statistics.committedVMBytes = statistics.reservedVMBytes - pageheap->ReturnedBytes();
+
+ statistics.freeListBytes = 0;
+ for (unsigned cl = 0; cl < kNumClasses; ++cl) {
+ const int length = central_cache[cl].length();
+ const int tc_length = central_cache[cl].tc_length();
+
+ statistics.freeListBytes += ByteSizeForClass(cl) * (length + tc_length);
+ }
+ for (TCMalloc_ThreadCache* threadCache = thread_heaps; threadCache ; threadCache = threadCache->next_)
+ statistics.freeListBytes += threadCache->Size();
+
+ return statistics;
+}
+
+size_t fastMallocSize(const void* ptr)
+{
+ const PageID p = reinterpret_cast<uintptr_t>(ptr) >> kPageShift;
+ Span* span = pageheap->GetDescriptorEnsureSafe(p);
+
+ if (!span || span->free)
+ return 0;
+
+ for (void* free = span->objects; free != NULL; free = *((void**) free)) {
+ if (ptr == free)
+ return 0;
+ }
+
+ if (size_t cl = span->sizeclass)
+ return ByteSizeForClass(cl);
+
+ return span->length << kPageShift;
+}
+
+#if OS(DARWIN)
class FreeObjectFinder {
const RemoteMemoryReader& m_reader;
malloc_introspection_t jscore_fastmalloc_introspection = { &FastMallocZone::enumerate, &FastMallocZone::goodSize, &FastMallocZone::check, &FastMallocZone::print,
&FastMallocZone::log, &FastMallocZone::forceLock, &FastMallocZone::forceUnlock, &FastMallocZone::statistics
-#if !defined(BUILDING_ON_TIGER) && !defined(BUILDING_ON_LEOPARD) || PLATFORM(IPHONE)
, 0 // zone_locked will not be called on the zone unless it advertises itself as version five or higher.
-#endif
-#if !defined(BUILDING_ON_TIGER) && !defined(BUILDING_ON_LEOPARD) && !defined(BUILDING_ON_SNOW_LEOPARD) && !OS(IPHONE_OS)
- , 0, 0, 0, 0 // These members will not be used unless the zone advertises itself as version seven or higher.
-#endif
};
}
static FastMallocZone zone(pageheap, &thread_heaps, static_cast<TCMalloc_Central_FreeListPadded*>(central_cache), &span_allocator, &threadheap_allocator);
}
-#endif
-
-#if WTF_CHANGES
-void releaseFastMallocFreeMemory()
-{
- // Flush free pages in the current thread cache back to the page heap.
- // Low watermark mechanism in Scavenge() prevents full return on the first pass.
- // The second pass flushes everything.
- if (TCMalloc_ThreadCache* threadCache = TCMalloc_ThreadCache::GetCacheIfPresent()) {
- threadCache->Scavenge();
- threadCache->Scavenge();
- }
-
- SpinLockHolder h(&pageheap_lock);
- pageheap->ReleaseFreePages();
-}
-
-FastMallocStatistics fastMallocStatistics()
-{
- FastMallocStatistics statistics;
- {
- SpinLockHolder lockHolder(&pageheap_lock);
- statistics.heapSize = static_cast<size_t>(pageheap->SystemBytes());
- statistics.freeSizeInHeap = static_cast<size_t>(pageheap->FreeBytes());
- statistics.returnedSize = pageheap->ReturnedBytes();
- statistics.freeSizeInCaches = 0;
- for (TCMalloc_ThreadCache* threadCache = thread_heaps; threadCache ; threadCache = threadCache->next_)
- statistics.freeSizeInCaches += threadCache->Size();
- }
- for (unsigned cl = 0; cl < kNumClasses; ++cl) {
- const int length = central_cache[cl].length();
- const int tc_length = central_cache[cl].tc_length();
- statistics.freeSizeInCaches += ByteSizeForClass(cl) * (length + tc_length);
- }
- return statistics;
-}
+#endif // OS(DARWIN)
} // namespace WTF
-#endif
+#endif // WTF_CHANGES
#endif // FORCE_SYSTEM_MALLOC
void* fastCalloc(size_t numElements, size_t elementSize);
void* fastRealloc(void*, size_t);
char* fastStrDup(const char*);
+ size_t fastMallocSize(const void*);
struct TryMallocReturnValue {
TryMallocReturnValue(void* data)
void releaseFastMallocFreeMemory();
struct FastMallocStatistics {
- size_t heapSize;
- size_t freeSizeInHeap;
- size_t freeSizeInCaches;
- size_t returnedSize;
+ size_t reservedVMBytes;
+ size_t committedVMBytes;
+ size_t freeListBytes;
};
FastMallocStatistics fastMallocStatistics();
} // namespace WTF
-using WTF::fastMalloc;
-using WTF::fastZeroedMalloc;
using WTF::fastCalloc;
+using WTF::fastFree;
+using WTF::fastMalloc;
+using WTF::fastMallocSize;
using WTF::fastRealloc;
-using WTF::tryFastMalloc;
-using WTF::tryFastZeroedMalloc;
+using WTF::fastStrDup;
+using WTF::fastZeroedMalloc;
using WTF::tryFastCalloc;
+using WTF::tryFastMalloc;
using WTF::tryFastRealloc;
-using WTF::fastFree;
-using WTF::fastStrDup;
+using WTF::tryFastZeroedMalloc;
#ifndef NDEBUG
using WTF::fastMallocForbid;
// debug-only code to make sure we don't use the system malloc via the default operator
// new by accident.
-// We musn't customize the global operator new and delete for the Qt port.
-#if !PLATFORM(QT)
+#if ENABLE(GLOBAL_FASTMALLOC_NEW)
#if COMPILER(MSVC)
#pragma warning(push)
int capacity() const;
bool isEmpty() const;
- // iterators iterate over pairs of values and counts
+ // Iterators iterate over pairs of values and counts.
iterator begin();
iterator end();
const_iterator begin() const;
bool contains(const ValueType&) const;
unsigned count(const ValueType&) const;
- // increases the count if an equal value is already present
- // the return value is a pair of an interator to the new value's location,
- // and a bool that is true if an new entry was added
+ // Increases the count if an equal value is already present
+ // the return value is a pair of an interator to the new value's
+ // location, and a bool that is true if an new entry was added.
std::pair<iterator, bool> add(const ValueType&);
- // reduces the count of the value, and removes it if count
- // goes down to zero
- void remove(const ValueType&);
- void remove(iterator);
+ // Reduces the count of the value, and removes it if count
+ // goes down to zero, returns true if the value is removed.
+ bool remove(const ValueType&);
+ bool remove(iterator);
- // removes the value, regardless of its count
+ // Removes the value, regardless of its count.
void removeAll(iterator);
void removeAll(const ValueType&);
- // clears the whole set
+ // Clears the whole set.
void clear();
private:
}
template<typename Value, typename HashFunctions, typename Traits>
- inline void HashCountedSet<Value, HashFunctions, Traits>::remove(const ValueType& value)
+ inline bool HashCountedSet<Value, HashFunctions, Traits>::remove(const ValueType& value)
{
- remove(find(value));
+ return remove(find(value));
}
template<typename Value, typename HashFunctions, typename Traits>
- inline void HashCountedSet<Value, HashFunctions, Traits>::remove(iterator it)
+ inline bool HashCountedSet<Value, HashFunctions, Traits>::remove(iterator it)
{
if (it == end())
- return;
+ return false;
unsigned oldVal = it->second;
- ASSERT(oldVal != 0);
+ ASSERT(oldVal);
unsigned newVal = oldVal - 1;
- if (newVal == 0)
- m_impl.remove(it);
- else
+ if (newVal) {
it->second = newVal;
+ return false;
+ }
+
+ m_impl.remove(it);
+ return true;
}
template<typename Value, typename HashFunctions, typename Traits>
static unsigned hash(const T& key) { return Translator::hash(key); }
static bool equal(const KeyType& a, const T& b) { return Translator::equal(a, b); }
- static void translate(ValueType& location, const T& key, const MappedType&, unsigned hashCode)
+ static void translate(ValueType& location, const T& key, const MappedType& mapped, unsigned hashCode)
{
Translator::translate(location.first, key, hashCode);
+ location.second = mapped;
}
};
// and an append that moves the element to the end even if already present,
// but unclear yet if these are needed.
- template<typename Value, typename HashFunctions> class ListHashSet;
+ template<typename Value, size_t inlineCapacity, typename HashFunctions> class ListHashSet;
template<typename T> struct IdentityExtractor;
- template<typename Value, typename HashFunctions>
- void deleteAllValues(const ListHashSet<Value, HashFunctions>&);
+ template<typename Value, size_t inlineCapacity, typename HashFunctions>
+ void deleteAllValues(const ListHashSet<Value, inlineCapacity, HashFunctions>&);
- template<typename ValueArg, typename HashArg> class ListHashSetIterator;
- template<typename ValueArg, typename HashArg> class ListHashSetConstIterator;
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> class ListHashSetIterator;
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> class ListHashSetConstIterator;
- template<typename ValueArg> struct ListHashSetNode;
- template<typename ValueArg> struct ListHashSetNodeAllocator;
- template<typename ValueArg, typename HashArg> struct ListHashSetNodeHashFunctions;
+ template<typename ValueArg, size_t inlineCapacity> struct ListHashSetNode;
+ template<typename ValueArg, size_t inlineCapacity> struct ListHashSetNodeAllocator;
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> struct ListHashSetNodeHashFunctions;
- template<typename ValueArg, typename HashArg = typename DefaultHash<ValueArg>::Hash> class ListHashSet : public FastAllocBase {
+ template<typename ValueArg, size_t inlineCapacity = 256, typename HashArg = typename DefaultHash<ValueArg>::Hash> class ListHashSet : public FastAllocBase {
private:
- typedef ListHashSetNode<ValueArg> Node;
- typedef ListHashSetNodeAllocator<ValueArg> NodeAllocator;
+ typedef ListHashSetNode<ValueArg, inlineCapacity> Node;
+ typedef ListHashSetNodeAllocator<ValueArg, inlineCapacity> NodeAllocator;
typedef HashTraits<Node*> NodeTraits;
- typedef ListHashSetNodeHashFunctions<ValueArg, HashArg> NodeHash;
+ typedef ListHashSetNodeHashFunctions<ValueArg, inlineCapacity, HashArg> NodeHash;
typedef HashTable<Node*, Node*, IdentityExtractor<Node*>, NodeHash, NodeTraits, NodeTraits> ImplType;
typedef HashTableIterator<Node*, Node*, IdentityExtractor<Node*>, NodeHash, NodeTraits, NodeTraits> ImplTypeIterator;
public:
typedef ValueArg ValueType;
- typedef ListHashSetIterator<ValueType, HashArg> iterator;
- typedef ListHashSetConstIterator<ValueType, HashArg> const_iterator;
+ typedef ListHashSetIterator<ValueType, inlineCapacity, HashArg> iterator;
+ typedef ListHashSetConstIterator<ValueType, inlineCapacity, HashArg> const_iterator;
- friend class ListHashSetConstIterator<ValueType, HashArg>;
+ friend class ListHashSetConstIterator<ValueType, inlineCapacity, HashArg>;
ListHashSet();
ListHashSet(const ListHashSet&);
OwnPtr<NodeAllocator> m_allocator;
};
- template<typename ValueArg> struct ListHashSetNodeAllocator {
- typedef ListHashSetNode<ValueArg> Node;
- typedef ListHashSetNodeAllocator<ValueArg> NodeAllocator;
+ template<typename ValueArg, size_t inlineCapacity> struct ListHashSetNodeAllocator {
+ typedef ListHashSetNode<ValueArg, inlineCapacity> Node;
+ typedef ListHashSetNodeAllocator<ValueArg, inlineCapacity> NodeAllocator;
ListHashSetNodeAllocator()
: m_freeList(pool())
Node* m_freeList;
bool m_isDoneWithInitialFreeList;
- static const size_t m_poolSize = 256;
+ static const size_t m_poolSize = inlineCapacity;
union {
char pool[sizeof(Node) * m_poolSize];
double forAlignment;
} m_pool;
};
- template<typename ValueArg> struct ListHashSetNode {
- typedef ListHashSetNodeAllocator<ValueArg> NodeAllocator;
+ template<typename ValueArg, size_t inlineCapacity> struct ListHashSetNode {
+ typedef ListHashSetNodeAllocator<ValueArg, inlineCapacity> NodeAllocator;
ListHashSetNode(ValueArg value)
: m_value(value)
#endif
};
- template<typename ValueArg, typename HashArg> struct ListHashSetNodeHashFunctions {
- typedef ListHashSetNode<ValueArg> Node;
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> struct ListHashSetNodeHashFunctions {
+ typedef ListHashSetNode<ValueArg, inlineCapacity> Node;
static unsigned hash(Node* const& key) { return HashArg::hash(key->m_value); }
static bool equal(Node* const& a, Node* const& b) { return HashArg::equal(a->m_value, b->m_value); }
static const bool safeToCompareToEmptyOrDeleted = false;
};
- template<typename ValueArg, typename HashArg> class ListHashSetIterator {
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> class ListHashSetIterator {
private:
- typedef ListHashSet<ValueArg, HashArg> ListHashSetType;
- typedef ListHashSetIterator<ValueArg, HashArg> iterator;
- typedef ListHashSetConstIterator<ValueArg, HashArg> const_iterator;
- typedef ListHashSetNode<ValueArg> Node;
+ typedef ListHashSet<ValueArg, inlineCapacity, HashArg> ListHashSetType;
+ typedef ListHashSetIterator<ValueArg, inlineCapacity, HashArg> iterator;
+ typedef ListHashSetConstIterator<ValueArg, inlineCapacity, HashArg> const_iterator;
+ typedef ListHashSetNode<ValueArg, inlineCapacity> Node;
typedef ValueArg ValueType;
typedef ValueType& ReferenceType;
typedef ValueType* PointerType;
- friend class ListHashSet<ValueArg, HashArg>;
+ friend class ListHashSet<ValueArg, inlineCapacity, HashArg>;
ListHashSetIterator(const ListHashSetType* set, Node* position) : m_iterator(set, position) { }
const_iterator m_iterator;
};
- template<typename ValueArg, typename HashArg> class ListHashSetConstIterator {
+ template<typename ValueArg, size_t inlineCapacity, typename HashArg> class ListHashSetConstIterator {
private:
- typedef ListHashSet<ValueArg, HashArg> ListHashSetType;
- typedef ListHashSetIterator<ValueArg, HashArg> iterator;
- typedef ListHashSetConstIterator<ValueArg, HashArg> const_iterator;
- typedef ListHashSetNode<ValueArg> Node;
+ typedef ListHashSet<ValueArg, inlineCapacity, HashArg> ListHashSetType;
+ typedef ListHashSetIterator<ValueArg, inlineCapacity, HashArg> iterator;
+ typedef ListHashSetConstIterator<ValueArg, inlineCapacity, HashArg> const_iterator;
+ typedef ListHashSetNode<ValueArg, inlineCapacity> Node;
typedef ValueArg ValueType;
typedef const ValueType& ReferenceType;
typedef const ValueType* PointerType;
- friend class ListHashSet<ValueArg, HashArg>;
- friend class ListHashSetIterator<ValueArg, HashArg>;
+ friend class ListHashSet<ValueArg, inlineCapacity, HashArg>;
+ friend class ListHashSetIterator<ValueArg, inlineCapacity, HashArg>;
ListHashSetConstIterator(const ListHashSetType* set, Node* position)
: m_set(set)
};
- template<typename ValueType, typename HashFunctions>
+ template<typename ValueType, size_t inlineCapacity, typename HashFunctions>
struct ListHashSetTranslator {
private:
- typedef ListHashSetNode<ValueType> Node;
- typedef ListHashSetNodeAllocator<ValueType> NodeAllocator;
+ typedef ListHashSetNode<ValueType, inlineCapacity> Node;
+ typedef ListHashSetNodeAllocator<ValueType, inlineCapacity> NodeAllocator;
public:
static unsigned hash(const ValueType& key) { return HashFunctions::hash(key); }
static bool equal(Node* const& a, const ValueType& b) { return HashFunctions::equal(a->m_value, b); }
}
};
- template<typename T, typename U>
- inline ListHashSet<T, U>::ListHashSet()
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSet<T, inlineCapacity, U>::ListHashSet()
: m_head(0)
, m_tail(0)
, m_allocator(new NodeAllocator)
{
}
- template<typename T, typename U>
- inline ListHashSet<T, U>::ListHashSet(const ListHashSet& other)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSet<T, inlineCapacity, U>::ListHashSet(const ListHashSet& other)
: m_head(0)
, m_tail(0)
, m_allocator(new NodeAllocator)
add(*it);
}
- template<typename T, typename U>
- inline ListHashSet<T, U>& ListHashSet<T, U>::operator=(const ListHashSet& other)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSet<T, inlineCapacity, U>& ListHashSet<T, inlineCapacity, U>::operator=(const ListHashSet& other)
{
ListHashSet tmp(other);
swap(tmp);
return *this;
}
- template<typename T, typename U>
- inline void ListHashSet<T, U>::swap(ListHashSet& other)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline void ListHashSet<T, inlineCapacity, U>::swap(ListHashSet& other)
{
m_impl.swap(other.m_impl);
std::swap(m_head, other.m_head);
m_allocator.swap(other.m_allocator);
}
- template<typename T, typename U>
- inline ListHashSet<T, U>::~ListHashSet()
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSet<T, inlineCapacity, U>::~ListHashSet()
{
deleteAllNodes();
}
- template<typename T, typename U>
- inline int ListHashSet<T, U>::size() const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline int ListHashSet<T, inlineCapacity, U>::size() const
{
return m_impl.size();
}
- template<typename T, typename U>
- inline int ListHashSet<T, U>::capacity() const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline int ListHashSet<T, inlineCapacity, U>::capacity() const
{
return m_impl.capacity();
}
- template<typename T, typename U>
- inline bool ListHashSet<T, U>::isEmpty() const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline bool ListHashSet<T, inlineCapacity, U>::isEmpty() const
{
return m_impl.isEmpty();
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::iterator ListHashSet<T, U>::begin()
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::iterator ListHashSet<T, inlineCapacity, U>::begin()
{
return makeIterator(m_head);
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::iterator ListHashSet<T, U>::end()
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::iterator ListHashSet<T, inlineCapacity, U>::end()
{
return makeIterator(0);
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::const_iterator ListHashSet<T, U>::begin() const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::const_iterator ListHashSet<T, inlineCapacity, U>::begin() const
{
return makeConstIterator(m_head);
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::const_iterator ListHashSet<T, U>::end() const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::const_iterator ListHashSet<T, inlineCapacity, U>::end() const
{
return makeConstIterator(0);
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::iterator ListHashSet<T, U>::find(const ValueType& value)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::iterator ListHashSet<T, inlineCapacity, U>::find(const ValueType& value)
{
- typedef ListHashSetTranslator<ValueType, HashFunctions> Translator;
+ typedef ListHashSetTranslator<ValueType, inlineCapacity, HashFunctions> Translator;
ImplTypeIterator it = m_impl.template find<ValueType, Translator>(value);
if (it == m_impl.end())
return end();
return makeIterator(*it);
}
- template<typename T, typename U>
- inline typename ListHashSet<T, U>::const_iterator ListHashSet<T, U>::find(const ValueType& value) const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline typename ListHashSet<T, inlineCapacity, U>::const_iterator ListHashSet<T, inlineCapacity, U>::find(const ValueType& value) const
{
- typedef ListHashSetTranslator<ValueType, HashFunctions> Translator;
+ typedef ListHashSetTranslator<ValueType, inlineCapacity, HashFunctions> Translator;
ImplTypeConstIterator it = m_impl.template find<ValueType, Translator>(value);
if (it == m_impl.end())
return end();
return makeConstIterator(*it);
}
- template<typename T, typename U>
- inline bool ListHashSet<T, U>::contains(const ValueType& value) const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline bool ListHashSet<T, inlineCapacity, U>::contains(const ValueType& value) const
{
- typedef ListHashSetTranslator<ValueType, HashFunctions> Translator;
+ typedef ListHashSetTranslator<ValueType, inlineCapacity, HashFunctions> Translator;
return m_impl.template contains<ValueType, Translator>(value);
}
- template<typename T, typename U>
- pair<typename ListHashSet<T, U>::iterator, bool> ListHashSet<T, U>::add(const ValueType &value)
+ template<typename T, size_t inlineCapacity, typename U>
+ pair<typename ListHashSet<T, inlineCapacity, U>::iterator, bool> ListHashSet<T, inlineCapacity, U>::add(const ValueType &value)
{
- typedef ListHashSetTranslator<ValueType, HashFunctions> Translator;
+ typedef ListHashSetTranslator<ValueType, inlineCapacity, HashFunctions> Translator;
pair<typename ImplType::iterator, bool> result = m_impl.template add<ValueType, NodeAllocator*, Translator>(value, m_allocator.get());
if (result.second)
appendNode(*result.first);
return std::make_pair(makeIterator(*result.first), result.second);
}
- template<typename T, typename U>
- pair<typename ListHashSet<T, U>::iterator, bool> ListHashSet<T, U>::insertBefore(iterator it, const ValueType& newValue)
+ template<typename T, size_t inlineCapacity, typename U>
+ pair<typename ListHashSet<T, inlineCapacity, U>::iterator, bool> ListHashSet<T, inlineCapacity, U>::insertBefore(iterator it, const ValueType& newValue)
{
- typedef ListHashSetTranslator<ValueType, HashFunctions> Translator;
+ typedef ListHashSetTranslator<ValueType, inlineCapacity, HashFunctions> Translator;
pair<typename ImplType::iterator, bool> result = m_impl.template add<ValueType, NodeAllocator*, Translator>(newValue, m_allocator.get());
if (result.second)
insertNodeBefore(it.node(), *result.first);
}
- template<typename T, typename U>
- pair<typename ListHashSet<T, U>::iterator, bool> ListHashSet<T, U>::insertBefore(const ValueType& beforeValue, const ValueType& newValue)
+ template<typename T, size_t inlineCapacity, typename U>
+ pair<typename ListHashSet<T, inlineCapacity, U>::iterator, bool> ListHashSet<T, inlineCapacity, U>::insertBefore(const ValueType& beforeValue, const ValueType& newValue)
{
return insertBefore(find(beforeValue), newValue);
}
- template<typename T, typename U>
- inline void ListHashSet<T, U>::remove(iterator it)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline void ListHashSet<T, inlineCapacity, U>::remove(iterator it)
{
if (it == end())
return;
unlinkAndDelete(it.node());
}
- template<typename T, typename U>
- inline void ListHashSet<T, U>::remove(const ValueType& value)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline void ListHashSet<T, inlineCapacity, U>::remove(const ValueType& value)
{
remove(find(value));
}
- template<typename T, typename U>
- inline void ListHashSet<T, U>::clear()
+ template<typename T, size_t inlineCapacity, typename U>
+ inline void ListHashSet<T, inlineCapacity, U>::clear()
{
deleteAllNodes();
m_impl.clear();
m_tail = 0;
}
- template<typename T, typename U>
- void ListHashSet<T, U>::unlinkAndDelete(Node* node)
+ template<typename T, size_t inlineCapacity, typename U>
+ void ListHashSet<T, inlineCapacity, U>::unlinkAndDelete(Node* node)
{
if (!node->m_prev) {
ASSERT(node == m_head);
node->destroy(m_allocator.get());
}
- template<typename T, typename U>
- void ListHashSet<T, U>::appendNode(Node* node)
+ template<typename T, size_t inlineCapacity, typename U>
+ void ListHashSet<T, inlineCapacity, U>::appendNode(Node* node)
{
node->m_prev = m_tail;
node->m_next = 0;
m_tail = node;
}
- template<typename T, typename U>
- void ListHashSet<T, U>::insertNodeBefore(Node* beforeNode, Node* newNode)
+ template<typename T, size_t inlineCapacity, typename U>
+ void ListHashSet<T, inlineCapacity, U>::insertNodeBefore(Node* beforeNode, Node* newNode)
{
if (!beforeNode)
return appendNode(newNode);
m_head = newNode;
}
- template<typename T, typename U>
- void ListHashSet<T, U>::deleteAllNodes()
+ template<typename T, size_t inlineCapacity, typename U>
+ void ListHashSet<T, inlineCapacity, U>::deleteAllNodes()
{
if (!m_head)
return;
node->destroy(m_allocator.get());
}
- template<typename T, typename U>
- inline ListHashSetIterator<T, U> ListHashSet<T, U>::makeIterator(Node* position)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSetIterator<T, inlineCapacity, U> ListHashSet<T, inlineCapacity, U>::makeIterator(Node* position)
{
- return ListHashSetIterator<T, U>(this, position);
+ return ListHashSetIterator<T, inlineCapacity, U>(this, position);
}
- template<typename T, typename U>
- inline ListHashSetConstIterator<T, U> ListHashSet<T, U>::makeConstIterator(Node* position) const
+ template<typename T, size_t inlineCapacity, typename U>
+ inline ListHashSetConstIterator<T, inlineCapacity, U> ListHashSet<T, inlineCapacity, U>::makeConstIterator(Node* position) const
{
- return ListHashSetConstIterator<T, U>(this, position);
+ return ListHashSetConstIterator<T, inlineCapacity, U>(this, position);
}
template<bool, typename ValueType, typename HashTableType>
delete (*it)->m_value;
}
- template<typename T, typename U>
- inline void deleteAllValues(const ListHashSet<T, U>& collection)
+ template<typename T, size_t inlineCapacity, typename U>
+ inline void deleteAllValues(const ListHashSet<T, inlineCapacity, U>& collection)
{
- deleteAllValues<true, typename ListHashSet<T, U>::ValueType>(collection.m_impl);
+ deleteAllValues<true, typename ListHashSet<T, inlineCapacity, U>::ValueType>(collection.m_impl);
}
} // namespace WTF
--- /dev/null
+// The original file was copied from sqlite, and was in the public domain.
+// Modifications Copyright 2006 Google Inc. All Rights Reserved
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+/*
+ * This code implements the MD5 message-digest algorithm.
+ * The algorithm is due to Ron Rivest. This code was
+ * written by Colin Plumb in 1993, no copyright is claimed.
+ * This code is in the public domain; do with it what you wish.
+ *
+ * Equivalent code is available from RSA Data Security, Inc.
+ * This code has been tested against that, and is equivalent,
+ * except that you don't need to include two pages of legalese
+ * with every copy.
+ *
+ * To compute the message digest of a chunk of bytes, construct an
+ * MD5 instance, call addBytes as needed on buffers full of bytes,
+ * and then call checksum, which will fill a supplied 16-byte array
+ * with the digest.
+ */
+
+#include "config.h"
+#include "MD5.h"
+
+#include "Assertions.h"
+#ifndef NDEBUG
+#include "StringExtras.h"
+#include "text/CString.h"
+#endif
+
+namespace WTF {
+
+#ifdef NDEBUG
+static inline void testMD5() { }
+#else
+// MD5 test case.
+static bool isTestMD5Done;
+
+static void expectMD5(CString input, CString expected)
+{
+ MD5 md5;
+ md5.addBytes(reinterpret_cast<const uint8_t*>(input.data()), input.length());
+ Vector<uint8_t, 16> digest = md5.checksum();
+ char* buf = 0;
+ CString actual = CString::newUninitialized(32, buf);
+ for (size_t i = 0; i < 16; i++) {
+ snprintf(buf, 3, "%02x", digest.at(i));
+ buf += 2;
+ }
+ ASSERT_WITH_MESSAGE(actual == expected, "input:%s[%d] actual:%s expected:%s", input.data(), input.length(), actual.data(), expected.data());
+}
+
+static void testMD5()
+{
+ if (isTestMD5Done)
+ return;
+ isTestMD5Done = true;
+
+ // MD5 Test suite from http://www.ietf.org/rfc/rfc1321.txt
+ expectMD5("", "d41d8cd98f00b204e9800998ecf8427e");
+ expectMD5("a", "0cc175b9c0f1b6a831c399e269772661");
+ expectMD5("abc", "900150983cd24fb0d6963f7d28e17f72");
+ expectMD5("message digest", "f96b697d7cb7938d525a2f31aaf161d0");
+ expectMD5("abcdefghijklmnopqrstuvwxyz", "c3fcd3d76192e4007dfb496cca67e13b");
+ expectMD5("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789", "d174ab98d277d9f5a5611c2c9f419d9f");
+ expectMD5("12345678901234567890123456789012345678901234567890123456789012345678901234567890", "57edf4a22be3c955ac49da2e2107b67a");
+}
+#endif
+
+// Note: this code is harmless on little-endian machines.
+
+static void reverseBytes(uint8_t* buf, unsigned longs)
+{
+ ASSERT(longs > 0);
+ do {
+ uint32_t t = static_cast<uint32_t>(buf[3] << 8 | buf[2]) << 16 | buf[1] << 8 | buf[0];
+ ASSERT_WITH_MESSAGE(!(reinterpret_cast<uintptr_t>(buf) % sizeof(t)), "alignment error of buf");
+ *reinterpret_cast<uint32_t *>(buf) = t;
+ buf += 4;
+ } while (--longs);
+}
+
+// The four core functions.
+// F1 is originally defined as (x & y | ~x & z), but optimized somewhat: 4 bit ops -> 3 bit ops.
+#define F1(x, y, z) (z ^ (x & (y ^ z)))
+#define F2(x, y, z) F1(z, x, y)
+#define F3(x, y, z) (x ^ y ^ z)
+#define F4(x, y, z) (y ^ (x | ~z))
+
+// This is the central step in the MD5 algorithm.
+#define MD5STEP(f, w, x, y, z, data, s) \
+ (w += f(x, y, z) + data, w = w << s | w >> (32 - s), w += x)
+
+static void MD5Transform(uint32_t buf[4], const uint32_t in[16])
+{
+ uint32_t a = buf[0];
+ uint32_t b = buf[1];
+ uint32_t c = buf[2];
+ uint32_t d = buf[3];
+
+ MD5STEP(F1, a, b, c, d, in[ 0]+0xd76aa478, 7);
+ MD5STEP(F1, d, a, b, c, in[ 1]+0xe8c7b756, 12);
+ MD5STEP(F1, c, d, a, b, in[ 2]+0x242070db, 17);
+ MD5STEP(F1, b, c, d, a, in[ 3]+0xc1bdceee, 22);
+ MD5STEP(F1, a, b, c, d, in[ 4]+0xf57c0faf, 7);
+ MD5STEP(F1, d, a, b, c, in[ 5]+0x4787c62a, 12);
+ MD5STEP(F1, c, d, a, b, in[ 6]+0xa8304613, 17);
+ MD5STEP(F1, b, c, d, a, in[ 7]+0xfd469501, 22);
+ MD5STEP(F1, a, b, c, d, in[ 8]+0x698098d8, 7);
+ MD5STEP(F1, d, a, b, c, in[ 9]+0x8b44f7af, 12);
+ MD5STEP(F1, c, d, a, b, in[10]+0xffff5bb1, 17);
+ MD5STEP(F1, b, c, d, a, in[11]+0x895cd7be, 22);
+ MD5STEP(F1, a, b, c, d, in[12]+0x6b901122, 7);
+ MD5STEP(F1, d, a, b, c, in[13]+0xfd987193, 12);
+ MD5STEP(F1, c, d, a, b, in[14]+0xa679438e, 17);
+ MD5STEP(F1, b, c, d, a, in[15]+0x49b40821, 22);
+
+ MD5STEP(F2, a, b, c, d, in[ 1]+0xf61e2562, 5);
+ MD5STEP(F2, d, a, b, c, in[ 6]+0xc040b340, 9);
+ MD5STEP(F2, c, d, a, b, in[11]+0x265e5a51, 14);
+ MD5STEP(F2, b, c, d, a, in[ 0]+0xe9b6c7aa, 20);
+ MD5STEP(F2, a, b, c, d, in[ 5]+0xd62f105d, 5);
+ MD5STEP(F2, d, a, b, c, in[10]+0x02441453, 9);
+ MD5STEP(F2, c, d, a, b, in[15]+0xd8a1e681, 14);
+ MD5STEP(F2, b, c, d, a, in[ 4]+0xe7d3fbc8, 20);
+ MD5STEP(F2, a, b, c, d, in[ 9]+0x21e1cde6, 5);
+ MD5STEP(F2, d, a, b, c, in[14]+0xc33707d6, 9);
+ MD5STEP(F2, c, d, a, b, in[ 3]+0xf4d50d87, 14);
+ MD5STEP(F2, b, c, d, a, in[ 8]+0x455a14ed, 20);
+ MD5STEP(F2, a, b, c, d, in[13]+0xa9e3e905, 5);
+ MD5STEP(F2, d, a, b, c, in[ 2]+0xfcefa3f8, 9);
+ MD5STEP(F2, c, d, a, b, in[ 7]+0x676f02d9, 14);
+ MD5STEP(F2, b, c, d, a, in[12]+0x8d2a4c8a, 20);
+
+ MD5STEP(F3, a, b, c, d, in[ 5]+0xfffa3942, 4);
+ MD5STEP(F3, d, a, b, c, in[ 8]+0x8771f681, 11);
+ MD5STEP(F3, c, d, a, b, in[11]+0x6d9d6122, 16);
+ MD5STEP(F3, b, c, d, a, in[14]+0xfde5380c, 23);
+ MD5STEP(F3, a, b, c, d, in[ 1]+0xa4beea44, 4);
+ MD5STEP(F3, d, a, b, c, in[ 4]+0x4bdecfa9, 11);
+ MD5STEP(F3, c, d, a, b, in[ 7]+0xf6bb4b60, 16);
+ MD5STEP(F3, b, c, d, a, in[10]+0xbebfbc70, 23);
+ MD5STEP(F3, a, b, c, d, in[13]+0x289b7ec6, 4);
+ MD5STEP(F3, d, a, b, c, in[ 0]+0xeaa127fa, 11);
+ MD5STEP(F3, c, d, a, b, in[ 3]+0xd4ef3085, 16);
+ MD5STEP(F3, b, c, d, a, in[ 6]+0x04881d05, 23);
+ MD5STEP(F3, a, b, c, d, in[ 9]+0xd9d4d039, 4);
+ MD5STEP(F3, d, a, b, c, in[12]+0xe6db99e5, 11);
+ MD5STEP(F3, c, d, a, b, in[15]+0x1fa27cf8, 16);
+ MD5STEP(F3, b, c, d, a, in[ 2]+0xc4ac5665, 23);
+
+ MD5STEP(F4, a, b, c, d, in[ 0]+0xf4292244, 6);
+ MD5STEP(F4, d, a, b, c, in[ 7]+0x432aff97, 10);
+ MD5STEP(F4, c, d, a, b, in[14]+0xab9423a7, 15);
+ MD5STEP(F4, b, c, d, a, in[ 5]+0xfc93a039, 21);
+ MD5STEP(F4, a, b, c, d, in[12]+0x655b59c3, 6);
+ MD5STEP(F4, d, a, b, c, in[ 3]+0x8f0ccc92, 10);
+ MD5STEP(F4, c, d, a, b, in[10]+0xffeff47d, 15);
+ MD5STEP(F4, b, c, d, a, in[ 1]+0x85845dd1, 21);
+ MD5STEP(F4, a, b, c, d, in[ 8]+0x6fa87e4f, 6);
+ MD5STEP(F4, d, a, b, c, in[15]+0xfe2ce6e0, 10);
+ MD5STEP(F4, c, d, a, b, in[ 6]+0xa3014314, 15);
+ MD5STEP(F4, b, c, d, a, in[13]+0x4e0811a1, 21);
+ MD5STEP(F4, a, b, c, d, in[ 4]+0xf7537e82, 6);
+ MD5STEP(F4, d, a, b, c, in[11]+0xbd3af235, 10);
+ MD5STEP(F4, c, d, a, b, in[ 2]+0x2ad7d2bb, 15);
+ MD5STEP(F4, b, c, d, a, in[ 9]+0xeb86d391, 21);
+
+ buf[0] += a;
+ buf[1] += b;
+ buf[2] += c;
+ buf[3] += d;
+}
+
+MD5::MD5()
+{
+ testMD5();
+ m_buf[0] = 0x67452301;
+ m_buf[1] = 0xefcdab89;
+ m_buf[2] = 0x98badcfe;
+ m_buf[3] = 0x10325476;
+ m_bits[0] = 0;
+ m_bits[1] = 0;
+ memset(m_in, 0, sizeof(m_in));
+ ASSERT_WITH_MESSAGE(!(reinterpret_cast<uintptr_t>(m_in) % sizeof(uint32_t)), "alignment error of m_in");
+}
+
+void MD5::addBytes(const uint8_t* input, size_t length)
+{
+ const uint8_t* buf = input;
+
+ // Update bitcount
+ uint32_t t = m_bits[0];
+ m_bits[0] = t + (length << 3);
+ if (m_bits[0] < t)
+ m_bits[1]++; // Carry from low to high
+ m_bits[1] += length >> 29;
+
+ t = (t >> 3) & 0x3f; // Bytes already in shsInfo->data
+
+ // Handle any leading odd-sized chunks
+
+ if (t) {
+ uint8_t* p = m_in + t;
+
+ t = 64 - t;
+ if (length < t) {
+ memcpy(p, buf, length);
+ return;
+ }
+ memcpy(p, buf, t);
+ reverseBytes(m_in, 16);
+ MD5Transform(m_buf, reinterpret_cast<uint32_t*>(m_in)); // m_in is 4-byte aligned.
+ buf += t;
+ length -= t;
+ }
+
+ // Process data in 64-byte chunks
+
+ while (length >= 64) {
+ memcpy(m_in, buf, 64);
+ reverseBytes(m_in, 16);
+ MD5Transform(m_buf, reinterpret_cast<uint32_t*>(m_in)); // m_in is 4-byte aligned.
+ buf += 64;
+ length -= 64;
+ }
+
+ // Handle any remaining bytes of data.
+ memcpy(m_in, buf, length);
+}
+
+Vector<uint8_t, 16> MD5::checksum()
+{
+ // Compute number of bytes mod 64
+ unsigned count = (m_bits[0] >> 3) & 0x3F;
+
+ // Set the first char of padding to 0x80. This is safe since there is
+ // always at least one byte free
+ uint8_t* p = m_in + count;
+ *p++ = 0x80;
+
+ // Bytes of padding needed to make 64 bytes
+ count = 64 - 1 - count;
+
+ // Pad out to 56 mod 64
+ if (count < 8) {
+ // Two lots of padding: Pad the first block to 64 bytes
+ memset(p, 0, count);
+ reverseBytes(m_in, 16);
+ MD5Transform(m_buf, reinterpret_cast<uint32_t *>(m_in)); // m_in is 4-byte aligned.
+
+ // Now fill the next block with 56 bytes
+ memset(m_in, 0, 56);
+ } else {
+ // Pad block to 56 bytes
+ memset(p, 0, count - 8);
+ }
+ reverseBytes(m_in, 14);
+
+ // Append length in bits and transform
+ // m_in is 4-byte aligned.
+ (reinterpret_cast<uint32_t*>(m_in))[14] = m_bits[0];
+ (reinterpret_cast<uint32_t*>(m_in))[15] = m_bits[1];
+
+ MD5Transform(m_buf, reinterpret_cast<uint32_t*>(m_in));
+ reverseBytes(reinterpret_cast<uint8_t*>(m_buf), 4);
+ Vector<uint8_t, 16> digest;
+ digest.append(reinterpret_cast<uint8_t*>(m_buf), 16);
+
+ // In case it's sensitive
+ memset(m_buf, 0, sizeof(m_buf));
+ memset(m_bits, 0, sizeof(m_bits));
+ memset(m_in, 0, sizeof(m_in));
+ return digest;
+}
+
+} // namespace WTF
--- /dev/null
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ *
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following disclaimer
+ * in the documentation and/or other materials provided with the
+ * distribution.
+ * * Neither the name of Google Inc. nor the names of its
+ * contributors may be used to endorse or promote products derived from
+ * this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+ * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+ * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+ * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+ * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+ * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef WTF_MD5_h
+#define WTF_MD5_h
+
+#include <wtf/Vector.h>
+
+namespace WTF {
+
+class MD5 {
+public:
+ MD5();
+
+ void addBytes(const Vector<uint8_t>& input)
+ {
+ addBytes(input.data(), input.size());
+ }
+ void addBytes(const uint8_t* input, size_t length);
+
+ // checksum has a side effect of resetting the state of the object.
+ Vector<uint8_t, 16> checksum();
+
+private:
+ uint32_t m_buf[4];
+ uint32_t m_bits[2];
+ uint8_t m_in[64];
+};
+
+} // namespace WTF
+
+using WTF::MD5;
+
+#endif // WTF_MD5_h
#include "config.h"
#include "MainThread.h"
-#include "StdLibExtras.h"
#include "CurrentTime.h"
#include "Deque.h"
+#include "StdLibExtras.h"
#include "Threading.h"
+#if PLATFORM(CHROMIUM)
+#error Chromium uses a different main thread implementation
+#endif
+
namespace WTF {
struct FunctionWithContext {
, syncFlag(syncFlag)
{
}
+ bool operator == (const FunctionWithContext& o)
+ {
+ return function == o.function
+ && context == o.context
+ && syncFlag == o.syncFlag;
+ }
};
+class FunctionWithContextFinder {
+public:
+ FunctionWithContextFinder(const FunctionWithContext& m) : m(m) {}
+ bool operator() (FunctionWithContext& o) { return o == m; }
+ FunctionWithContext m;
+};
+
+
typedef Deque<FunctionWithContext> FunctionQueue;
static bool callbacksPaused; // This global variable is only accessed from main thread.
+#if !PLATFORM(MAC) && !PLATFORM(QT)
+static ThreadIdentifier mainThreadIdentifier;
+#endif
-Mutex& mainThreadFunctionQueueMutex()
+static Mutex& mainThreadFunctionQueueMutex()
{
DEFINE_STATIC_LOCAL(Mutex, staticMutex, ());
return staticMutex;
return staticFunctionQueue;
}
+
+#if !PLATFORM(MAC)
+
void initializeMainThread()
+{
+ static bool initializedMainThread;
+ if (initializedMainThread)
+ return;
+ initializedMainThread = true;
+
+#if !PLATFORM(QT)
+ mainThreadIdentifier = currentThread();
+#endif
+
+ mainThreadFunctionQueueMutex();
+ initializeMainThreadPlatform();
+}
+
+#else
+
+static pthread_once_t initializeMainThreadKeyOnce = PTHREAD_ONCE_INIT;
+
+static void initializeMainThreadOnce()
{
mainThreadFunctionQueueMutex();
initializeMainThreadPlatform();
}
+void initializeMainThread()
+{
+ pthread_once(&initializeMainThreadKeyOnce, initializeMainThreadOnce);
+}
+
+static void initializeMainThreadToProcessMainThreadOnce()
+{
+ mainThreadFunctionQueueMutex();
+ initializeMainThreadToProcessMainThreadPlatform();
+}
+
+void initializeMainThreadToProcessMainThread()
+{
+ pthread_once(&initializeMainThreadKeyOnce, initializeMainThreadToProcessMainThreadOnce);
+}
+#endif
+
// 0.1 sec delays in UI is approximate threshold when they become noticeable. Have a limit that's half of that.
static const double maxRunLoopSuspensionTime = 0.05;
syncFlag.wait(functionQueueMutex);
}
+void cancelCallOnMainThread(MainThreadFunction* function, void* context)
+{
+ ASSERT(function);
+
+ MutexLocker locker(mainThreadFunctionQueueMutex());
+
+ FunctionWithContextFinder pred(FunctionWithContext(function, context));
+
+ while (true) {
+ // We must redefine 'i' each pass, because the itererator's operator=
+ // requires 'this' to be valid, and remove() invalidates all iterators
+ FunctionQueue::iterator i(functionQueue().findIf(pred));
+ if (i == functionQueue().end())
+ break;
+ functionQueue().remove(i);
+ }
+}
+
void setMainThreadCallbacksPaused(bool paused)
{
ASSERT(isMainThread());
scheduleDispatchFunctionsOnMainThread();
}
+#if !PLATFORM(MAC) && !PLATFORM(QT)
+bool isMainThread()
+{
+ return currentThread() == mainThreadIdentifier;
+}
+#endif
+
} // namespace WTF
/*
- * Copyright (C) 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2007, 2008, 2010 Apple Inc. All rights reserved.
* Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
*
* Redistribution and use in source and binary forms, with or without
#ifndef MainThread_h
#define MainThread_h
-namespace WTF {
+#include <stdint.h>
-class Mutex;
+namespace WTF {
+typedef uint32_t ThreadIdentifier;
typedef void MainThreadFunction(void*);
-void callOnMainThread(MainThreadFunction*, void* context);
+// Must be called from the main thread.
+void initializeMainThread();
-// Blocks the thread until the call finishes on the main thread. Misusing this can easily cause deadlocks.
+void callOnMainThread(MainThreadFunction*, void* context);
void callOnMainThreadAndWait(MainThreadFunction*, void* context);
+void cancelCallOnMainThread(MainThreadFunction*, void* context);
void setMainThreadCallbacksPaused(bool paused);
-// Must be called from the main thread (Darwin is an exception to this rule).
-void initializeMainThread();
+bool isMainThread();
+bool isWebThread();
-// These functions are internal to the callOnMainThread implementation.
+// NOTE: these functions are internal to the callOnMainThread implementation.
void initializeMainThreadPlatform();
void scheduleDispatchFunctionsOnMainThread();
-Mutex& mainThreadFunctionQueueMutex();
void dispatchFunctionsFromMainThread();
+#if PLATFORM(MAC)
+// This version of initializeMainThread sets up the main thread as corresponding
+// to the process's main thread, and not necessarily the thread that calls this
+// function. It should only be used as a legacy aid for Mac WebKit.
+void initializeMainThreadToProcessMainThread();
+void initializeMainThreadToProcessMainThreadPlatform();
+#endif
+
} // namespace WTF
using WTF::callOnMainThread;
using WTF::callOnMainThreadAndWait;
+using WTF::cancelCallOnMainThread;
using WTF::setMainThreadCallbacksPaused;
-
+using WTF::isMainThread;
#endif // MainThread_h
/*
- * Copyright (C) 2006, 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2006, 2007, 2008, 2009, 2010 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
#ifndef WTF_MathExtras_h
#define WTF_MathExtras_h
+#include <cmath>
#include <float.h>
-#include <math.h>
#include <stdlib.h>
#if OS(SOLARIS)
#if COMPILER(MSVC) || COMPILER(RVCT)
-inline long long llround(double num) { return static_cast<long long>(num > 0 ? num + 0.5 : ceil(num - 0.5)); }
-inline long long llroundf(float num) { return static_cast<long long>(num > 0 ? num + 0.5f : ceil(num - 0.5f)); }
-inline long lround(double num) { return static_cast<long>(num > 0 ? num + 0.5 : ceil(num - 0.5)); }
-inline long lroundf(float num) { return static_cast<long>(num > 0 ? num + 0.5f : ceilf(num - 0.5f)); }
-inline double round(double num) { return num > 0 ? floor(num + 0.5) : ceil(num - 0.5); }
-inline float roundf(float num) { return num > 0 ? floorf(num + 0.5f) : ceilf(num - 0.5f); }
+// We must not do 'num + 0.5' or 'num - 0.5' because they can cause precision loss.
+static double round(double num)
+{
+ double integer = ceil(num);
+ if (num > 0)
+ return integer - num > 0.5 ? integer - 1.0 : integer;
+ return integer - num >= 0.5 ? integer - 1.0 : integer;
+}
+static float roundf(float num)
+{
+ float integer = ceilf(num);
+ if (num > 0)
+ return integer - num > 0.5f ? integer - 1.0f : integer;
+ return integer - num >= 0.5f ? integer - 1.0f : integer;
+}
+inline long long llround(double num) { return static_cast<long long>(round(num)); }
+inline long long llroundf(float num) { return static_cast<long long>(roundf(num)); }
+inline long lround(double num) { return static_cast<long>(round(num)); }
+inline long lroundf(float num) { return static_cast<long>(roundf(num)); }
inline double trunc(double num) { return num > 0 ? floor(num) : ceil(num); }
#endif
#if COMPILER(MSVC)
+// The 64bit version of abs() is already defined in stdlib.h which comes with VC10
+#if COMPILER(MSVC9_OR_LOWER)
+inline long long abs(long long num) { return _abs64(num); }
+#endif
inline bool isinf(double num) { return !_finite(num) && !_isnan(num); }
inline bool isnan(double num) { return !!_isnan(num); }
inline float rad2grad(float r) { return r * 200.0f / piFloat; }
inline float grad2rad(float g) { return g * piFloat / 200.0f; }
+#if !COMPILER(MSVC) && !COMPILER(WINSCW) && !(COMPILER(RVCT) && OS(SYMBIAN))
+using std::isfinite;
+using std::isinf;
+using std::isnan;
+using std::signbit;
+#endif
+
#endif // #ifndef WTF_MathExtras_h
~OwnFastMallocPtr()
{
- fastFree(m_ptr);
+ fastFree(const_cast<void*>(static_cast<const void*>(const_cast<const T*>(m_ptr))));
}
T* get() const { return m_ptr; }
typedef ValueType* PtrType;
explicit OwnPtr(PtrType ptr = 0) : m_ptr(ptr) { }
- OwnPtr(std::auto_ptr<ValueType> autoPtr) : m_ptr(autoPtr.release()) { }
// See comment in PassOwnPtr.h for why this takes a const reference.
template <typename U> OwnPtr(const PassOwnPtr<U>& o);
// FIXME: This should be renamed to adopt.
void set(PtrType ptr) { ASSERT(!ptr || m_ptr != ptr); deleteOwnedPtr(m_ptr); m_ptr = ptr; }
- void adopt(std::auto_ptr<ValueType> autoPtr) { ASSERT(!autoPtr.get() || m_ptr != autoPtr.get()); deleteOwnedPtr(m_ptr); m_ptr = autoPtr.release(); }
-
void clear() { deleteOwnedPtr(m_ptr); m_ptr = 0; }
ValueType& operator*() const { ASSERT(m_ptr); return *m_ptr; }
/*
* Copyright (C) 2009 Apple Inc. All rights reserved.
* Copyright (C) 2009 Torch Mobile, Inc.
+ * Copyright (C) 2010 Company 100 Inc.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
typedef struct HRGN__* HRGN;
#endif
+#if PLATFORM(BREWMP)
+// Forward delcarations at this point avoid the need to include BREW includes
+// in WTF headers.
+typedef struct _IFileMgr IFileMgr;
+typedef struct _IFile IFile;
+typedef struct IBitmap IBitmap;
+#endif
+
namespace WTF {
template <typename T> inline void deleteOwnedPtr(T* ptr)
void deleteOwnedPtr(HRGN);
#endif
+#if PLATFORM(BREWMP)
+ void deleteOwnedPtr(IFileMgr*);
+ void deleteOwnedPtr(IFile*);
+ void deleteOwnedPtr(IBitmap*);
+#endif
+
} // namespace WTF
#endif // WTF_OwnPtrCommon_h
+++ /dev/null
-/*
- * Copyright (C) 2007 Apple Inc. All rights reserved.
- * Copyright (C) 2008, 2009 Torch Mobile, Inc.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
- * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
- * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
- * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
- * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
- * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
- * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
- * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#include "config.h"
-#include "OwnPtr.h"
-
-#include <windows.h>
-
-namespace WTF {
-
-void deleteOwnedPtr(HBITMAP ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-void deleteOwnedPtr(HBRUSH ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-void deleteOwnedPtr(HDC ptr)
-{
- if (ptr)
- DeleteDC(ptr);
-}
-
-void deleteOwnedPtr(HFONT ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-void deleteOwnedPtr(HPALETTE ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-void deleteOwnedPtr(HPEN ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-void deleteOwnedPtr(HRGN ptr)
-{
- if (ptr)
- DeleteObject(ptr);
-}
-
-}
template<typename T> class RefPtr;
template<typename T> class PassRefPtr;
template <typename T> PassRefPtr<T> adoptRef(T*);
-
- // Remove inline for winscw compiler to prevent the compiler agressively resolving
- // T::deref(), which will fail compiling when PassRefPtr<T> is used as class member
- // or function arguments before T is defined.
+
+
+ // Remove inline for WINSCW compiler to prevent the compiler agressively resolving
+ // T::ref() and T::deref(), which will fail compiling when PassRefPtr<T> is used as
+ // a class member or function arguments before T is defined.
+
+ // [Qt]r57240 broke Qt build (might be a gcc bug)
+ // FIXME! See: https://bugs.webkit.org/show_bug.cgi?id=37253
+ template<typename T>
+#if !COMPILER(WINSCW)
+#if !PLATFORM(QT)
+ ALWAYS_INLINE
+#else
+ inline
+#endif
+#endif
+ void refIfNotNull(T* ptr)
+ {
+ if (LIKELY(ptr != 0))
+ ptr->ref();
+ }
+
+ // [Qt]r57240 broke Qt build (might be a gcc bug)
+ // FIXME! See: https://bugs.webkit.org/show_bug.cgi?id=37253
template<typename T>
#if !COMPILER(WINSCW)
- inline
+#if !PLATFORM(QT)
+ ALWAYS_INLINE
+#else
+ inline
+#endif
#endif
void derefIfNotNull(T* ptr)
{
- if (UNLIKELY(ptr != 0))
+ if (LIKELY(ptr != 0))
ptr->deref();
}
template<typename T> class PassRefPtr {
public:
PassRefPtr() : m_ptr(0) {}
- PassRefPtr(T* ptr) : m_ptr(ptr) { if (ptr) ptr->ref(); }
+ PassRefPtr(T* ptr) : m_ptr(ptr) { refIfNotNull(ptr); }
// It somewhat breaks the type system to allow transfer of ownership out of
// a const PassRefPtr. However, it makes it much easier to work with PassRefPtr
// temporaries, and we don't really have a need to use real const PassRefPtrs
PassRefPtr(const PassRefPtr& o) : m_ptr(o.releaseRef()) {}
template <typename U> PassRefPtr(const PassRefPtr<U>& o) : m_ptr(o.releaseRef()) { }
- ALWAYS_INLINE ~PassRefPtr() { derefIfNotNull<T>(m_ptr); }
+ ALWAYS_INLINE ~PassRefPtr() { derefIfNotNull(m_ptr); }
template <class U>
- PassRefPtr(const RefPtr<U>& o) : m_ptr(o.get()) { if (T* ptr = m_ptr) ptr->ref(); }
+ PassRefPtr(const RefPtr<U>& o) : m_ptr(o.get()) { T* ptr = m_ptr; refIfNotNull(ptr); }
T* get() const { return m_ptr; }
- void clear() { if (T* ptr = m_ptr) ptr->deref(); m_ptr = 0; }
+ void clear() { T* ptr = m_ptr; derefIfNotNull(ptr); m_ptr = 0; }
T* releaseRef() const { T* tmp = m_ptr; m_ptr = 0; return tmp; }
T& operator*() const { return *m_ptr; }
template <typename T> template <typename U> inline PassRefPtr<T>& PassRefPtr<T>::operator=(const RefPtr<U>& o)
{
T* optr = o.get();
- if (optr)
- optr->ref();
+ refIfNotNull(optr);
T* ptr = m_ptr;
m_ptr = optr;
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
template <typename T> inline PassRefPtr<T>& PassRefPtr<T>::operator=(T* optr)
{
- if (optr)
- optr->ref();
+ refIfNotNull(optr);
T* ptr = m_ptr;
m_ptr = optr;
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = ref.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = ref.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
/* ==== COMPILER() - the compiler being used to build the project ==== */
/* COMPILER(MSVC) Microsoft Visual C++ */
-/* COMPILER(MSVC7) Microsoft Visual C++ v7 or lower*/
+/* COMPILER(MSVC7_OR_LOWER) Microsoft Visual C++ 2003 or lower*/
+/* COMPILER(MSVC9_OR_LOWER) Microsoft Visual C++ 2008 or lower*/
#if defined(_MSC_VER)
#define WTF_COMPILER_MSVC 1
#if _MSC_VER < 1400
-#define WTF_COMPILER_MSVC7 1
+#define WTF_COMPILER_MSVC7_OR_LOWER 1
+#elif _MSC_VER < 1600
+#define WTF_COMPILER_MSVC9_OR_LOWER 1
#endif
#endif
#endif
/* COMPILER(MINGW) - MinGW GCC */
-#if defined(MINGW) || defined(__MINGW32__)
+/* COMPILER(MINGW64) - mingw-w64 GCC - only used as additional check to exclude mingw.org specific functions */
+#if defined(__MINGW32__)
#define WTF_COMPILER_MINGW 1
-#endif
+#include <_mingw.h> /* private MinGW header */
+ #if defined(__MINGW64_VERSION_MAJOR) /* best way to check for mingw-w64 vs mingw.org */
+ #define WTF_COMPILER_MINGW64 1
+ #endif /* __MINGW64_VERSION_MAJOR */
+#endif /* __MINGW32__ */
/* COMPILER(WINSCW) - CodeWarrior for Symbian emulator */
#if defined(__WINSCW__)
#define WTF_COMPILER_WINSCW 1
+/* cross-compiling, it is not really windows */
+#undef WIN32
+#undef _WIN32
#endif
/* CPU(IA64) - Itanium / IA-64 */
#if defined(__ia64__)
#define WTF_CPU_IA64 1
+/* 32-bit mode on Itanium */
+#if !defined(__LP64__)
+#define WTF_CPU_IA64_32 1
+#endif
+#endif
+
+/* CPU(MIPS) - MIPS 32-bit */
+/* Note: Only O32 ABI is tested, so we enable it for O32 ABI for now. */
+#if (defined(mips) || defined(__mips__)) \
+ && defined(_ABIO32)
+#define WTF_CPU_MIPS 1
+#if defined(__MIPSEB__)
+#define WTF_CPU_BIG_ENDIAN 1
#endif
+#define WTF_MIPS_PIC (defined __PIC__)
+#define WTF_MIPS_ARCH __mips
+#define WTF_MIPS_ISA(v) (defined WTF_MIPS_ARCH && WTF_MIPS_ARCH == v)
+#define WTF_MIPS_ISA_AT_LEAST(v) (defined WTF_MIPS_ARCH && WTF_MIPS_ARCH >= v)
+#define WTF_MIPS_ARCH_REV __mips_isa_rev
+#define WTF_MIPS_ISA_REV(v) (defined WTF_MIPS_ARCH_REV && WTF_MIPS_ARCH_REV == v)
+#define WTF_MIPS_DOUBLE_FLOAT (defined __mips_hard_float && !defined __mips_single_float)
+#endif /* MIPS */
/* CPU(PPC) - PowerPC 32-bit */
#if defined(__ppc__) \
/* CPU(SPARC) - any SPARC, true for CPU(SPARC32) and CPU(SPARC64) */
#if CPU(SPARC32) || CPU(SPARC64)
-#define WTF_CPU_SPARC
+#define WTF_CPU_SPARC 1
#endif
/* CPU(X86) - i386 / x86 32-bit */
/* CPU(ARM) - ARM, any version*/
#if defined(arm) \
- || defined(__arm__)
+ || defined(__arm__) \
+ || defined(ARM) \
+ || defined(_ARM_)
#define WTF_CPU_ARM 1
#if defined(__ARMEB__)
#elif !defined(__ARM_EABI__) \
&& !defined(__EABI__) \
&& !defined(__VFP_FP__) \
+ && !defined(_WIN32_WCE) \
&& !defined(ANDROID)
#define WTF_CPU_MIDDLE_ENDIAN 1
/* OS(SYMBIAN) - Symbian */
#if defined (__SYMBIAN32__)
-/* we are cross-compiling, it is not really windows */
-#undef WTF_OS_WINDOWS
-#undef WTF_PLATFORM_WIN
#define WTF_OS_SYMBIAN 1
#endif
#define WTF_PLATFORM_GTK 1
#elif defined(BUILDING_HAIKU__)
#define WTF_PLATFORM_HAIKU 1
+#elif defined(BUILDING_BREWMP__)
+#define WTF_PLATFORM_BREWMP 1
+#if defined(AEE_SIMULATOR)
+#define WTF_PLATFORM_BREWMP_SIMULATOR 1
+#else
+#define WTF_PLATFORM_BREWMP_SIMULATOR 0
+#endif
+#undef WTF_OS_WINDOWS
+#undef WTF_PLATFORM_WIN
#elif OS(DARWIN)
#define WTF_PLATFORM_MAC 1
#elif OS(WINDOWS)
*/
#if OS(WINCE) && PLATFORM(QT)
# include <QtGlobal>
-# undef WTF_PLATFORM_BIG_ENDIAN
-# undef WTF_PLATFORM_MIDDLE_ENDIAN
-# if Q_BYTE_ORDER == Q_BIG_EDIAN
-# define WTF_PLATFORM_BIG_ENDIAN 1
+# undef WTF_CPU_BIG_ENDIAN
+# undef WTF_CPU_MIDDLE_ENDIAN
+# if Q_BYTE_ORDER == Q_BIG_ENDIAN
+# define WTF_CPU_BIG_ENDIAN 1
# endif
# include <ce_time.h>
#if PLATFORM(QT)
#define WTF_USE_QT4_UNICODE 1
+#if !defined(ENABLE_WIDGETS_10_SUPPORT)
+#define ENABLE_WIDGETS_10_SUPPORT 1
+#endif
#elif OS(WINCE)
#define WTF_USE_WINCE_UNICODE 1
#elif PLATFORM(GTK)
#endif
+
#if PLATFORM(CHROMIUM) && OS(DARWIN)
#define WTF_PLATFORM_CF 1
#define WTF_USE_PTHREADS 1
#define HAVE_PTHREAD_RWLOCK 1
+#define WTF_USE_CARBON_SECURE_INPUT_MODE 1
#endif
#define DONT_FINALIZE_ON_MAIN_THREAD 1
#define WTF_PLATFORM_CF 1
#endif
+#if OS(DARWIN) && !defined(BUILDING_ON_TIGER) && !PLATFORM(GTK) && !PLATFORM(QT)
+#define ENABLE_PURGEABLE_MEMORY 1
+#endif
+
#define ENABLE_CONTEXT_MENUS 0
#define ENABLE_DRAG_SUPPORT 0
#define ENABLE_FTPDIR 1
#define ENABLE_GEOLOCATION 1
+#define ENABLE_GEOLOCATION_PERMISSION_CACHE 1
#define ENABLE_ICONDATABASE 0
-#define ENABLE_INSPECTOR 0
-#define ENABLE_JIT 0
-#define ENABLE_MAC_JAVA_BRIDGE 0
+#define ENABLE_JAVA_BRIDGE 0
#define ENABLE_NETSCAPE_PLUGIN_API 0
#define ENABLE_ORIENTATION_EVENTS 1
#define ENABLE_RANGETYPE_AS_TEXT 1
#define ENABLE_REPAINT_THROTTLING 1
#define ENABLE_RESPECT_EXIF_ORIENTATION 1
-#define ENABLE_YARR 0
-#define ENABLE_YARR_JIT 0
#define HAVE_PTHREAD_RWLOCK 1
#define HAVE_READLINE 1
#define HAVE_RUNLOOP_TIMER 0
#define WTF_USE_PTHREADS 1
#define WTF_USE_WEB_THREAD 1
+#undef ENABLE_PURGEABLE_MEMORY
+
+#if defined(WTF_ARM_ARCH_VERSION) && WTF_ARM_ARCH_VERSION == 6
+#define ENABLE_INSPECTOR 0
+#define ENABLE_PURGEABLE_MEMORY 0
+#else
+#define ENABLE_INSPECTOR 1
+#define ENABLE_PURGEABLE_MEMORY 1
+#endif
+
+#define WTF_USE_PTHREAD_GETSPECIFIC_DIRECT 1
+
+#define ENABLE_JIT 0
+#define ENABLE_YARR 0
+#define ENABLE_YARR_JIT 0
+#ifdef __llvm__
+#define WTF_USE_JSVALUE32_64 1
+#else
+#define WTF_USE_JSVALUE32 1
+#endif
+
+#undef ENABLE_3D_CANVAS
+#if defined(WTF_ARM_ARCH_VERSION) && WTF_ARM_ARCH_VERSION == 6
+#define ENABLE_3D_CANVAS 0
+#else
+#define ENABLE_3D_CANVAS 1
+#endif
+
+
#if PLATFORM(ANDROID)
#define WTF_USE_PTHREADS 1
#define WTF_PLATFORM_SGL 1
#define USE_SYSTEM_MALLOC 1
-#define ENABLE_MAC_JAVA_BRIDGE 1
+#define ENABLE_JAVA_BRIDGE 1
#define LOG_DISABLED 1
/* Prevents Webkit from drawing the caret in textfields and textareas
This prevents unnecessary invals. */
#if PLATFORM(WX)
#define ENABLE_ASSEMBLER 1
+#define ENABLE_GLOBAL_FASTMALLOC_NEW 0
#if OS(DARWIN)
#define WTF_PLATFORM_CF 1
+#ifndef BUILDING_ON_TIGER
+#define WTF_USE_CORE_TEXT 1
+#else
+#define WTF_USE_ATSUI 1
+#endif
#endif
#endif
#define ENABLE_NETSCAPE_PLUGIN_API 0
#endif
+#if PLATFORM(BREWMP)
+#define USE_SYSTEM_MALLOC 1
+#endif
+
#if !defined(HAVE_ACCESSIBILITY)
#define HAVE_ACCESSIBILITY 1
#endif /* !defined(HAVE_ACCESSIBILITY) */
#if !OS(WINDOWS) && !OS(SOLARIS) && !OS(QNX) \
&& !OS(SYMBIAN) && !OS(HAIKU) && !OS(RVCT) \
- && !OS(ANDROID)
+ && !OS(ANDROID) && !PLATFORM(BREWMP)
#define HAVE_TM_GMTOFF 1
#define HAVE_TM_ZONE 1
#define HAVE_TIMEGM 1
#define HAVE_SYS_PARAM_H 1
#endif
+#elif PLATFORM(BREWMP)
+
+#define HAVE_ERRNO_H 1
+
#elif OS(QNX)
#define HAVE_ERRNO_H 1
/* ENABLE macro defaults */
+#if PLATFORM(QT)
+// We musn't customize the global operator new and delete for the Qt port.
+#define ENABLE_GLOBAL_FASTMALLOC_NEW 0
+#endif
+
/* fastMalloc match validation allows for runtime verification that
new is matched by delete, fastMalloc is matched by fastFree, etc. */
#if !defined(ENABLE_FAST_MALLOC_MATCH_VALIDATION)
#define ENABLE_DASHBOARD_SUPPORT 0
#endif
+#if !defined(ENABLE_WIDGETS_10_SUPPORT)
+#define ENABLE_WIDGETS_10_SUPPORT 0
+#endif
+
#if !defined(ENABLE_GEOLOCATION_PERMISSION_CACHE)
#define ENABLE_GEOLOCATION_PERMISSION_CACHE 0
#endif
#define ENABLE_INSPECTOR 1
#endif
-#if !defined(ENABLE_MAC_JAVA_BRIDGE)
-#define ENABLE_MAC_JAVA_BRIDGE 0
+#if !defined(ENABLE_JAVA_BRIDGE)
+#define ENABLE_JAVA_BRIDGE 0
#endif
#if !defined(ENABLE_NETSCAPE_PLUGIN_API)
#define ENABLE_NETSCAPE_PLUGIN_API 1
#endif
+#if !defined(ENABLE_PURGEABLE_MEMORY)
+#define ENABLE_PURGEABLE_MEMORY 0
+#endif
+
#if !defined(WTF_USE_PLUGIN_HOST_PROCESS)
#define WTF_USE_PLUGIN_HOST_PROCESS 0
#endif
#define ENABLE_RESPECT_EXIF_ORIENTATION 0
#endif
+#if !defined(WTF_USE_PTHREAD_GETSPECIFIC_DIRECT)
+#define WTF_USE_PTHREAD_GETSPECIFIC_DIRECT 0
+#endif
+
#if !defined(ENABLE_OPCODE_STATS)
#define ENABLE_OPCODE_STATS 0
#endif
+#if !defined(ENABLE_GLOBAL_FASTMALLOC_NEW)
+#define ENABLE_GLOBAL_FASTMALLOC_NEW 1
+#endif
+
+#define ENABLE_DEBUG_WITH_BREAKPOINT 0
#define ENABLE_SAMPLING_COUNTERS 0
#define ENABLE_SAMPLING_FLAGS 0
#define ENABLE_OPCODE_SAMPLING 0
#define ENABLE_ON_FIRST_TEXTAREA_FOCUS_SELECT_ALL 0
#endif
+#if !defined(ENABLE_CONTENTEDITABLE)
+#define ENABLE_CONTENTEDITABLE 0
+#endif
+
#if !defined(WTF_USE_JSVALUE64) && !defined(WTF_USE_JSVALUE32) && !defined(WTF_USE_JSVALUE32_64)
-#if (CPU(X86_64) && (OS(UNIX) || OS(WINDOWS))) || CPU(IA64) || CPU(ALPHA)
+#if (CPU(X86_64) && (OS(UNIX) || OS(WINDOWS))) \
+ || (CPU(IA64) && !CPU(IA64_32)) \
+ || CPU(ALPHA) \
+ || CPU(SPARC64)
#define WTF_USE_JSVALUE64 1
-#elif CPU(ARM) && !PLATFORM(IPHONE) || CPU(PPC64) || (PLATFORM(IPHONE) && defined(WTF_ARM_ARCH_VERSION) && WTF_ARM_ARCH_VERSION == 6 && !defined(__llvm__))
+#elif CPU(ARM_TRADITIONAL) && !PLATFORM(IPHONE) || CPU(PPC64) || CPU(MIPS) || (PLATFORM(IPHONE) && defined(WTF_ARM_ARCH_VERSION) && WTF_ARM_ARCH_VERSION == 6 && !defined(__llvm__))
/* FIXME: <rdar://problem/7478149> gcc-4.2 compiler bug with USE(JSVALUE32_64) and armv6 target */
#define WTF_USE_JSVALUE32 1
#elif OS(WINDOWS) && COMPILER(MINGW)
#define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 1
#elif CPU(ARM_THUMB2) && PLATFORM(IPHONE)
#define ENABLE_JIT 1
+/* The JIT is tested & working on Android */
+#elif CPU(ARM_THUMB2) && PLATFORM(ANDROID) && ENABLE(ANDROID_JSC_JIT)
+ #define ENABLE_JIT 1
/* The JIT is tested & working on x86 Windows */
#elif CPU(X86) && PLATFORM(WIN)
#define ENABLE_JIT 1
#endif
-#if PLATFORM(QT)
+#if PLATFORM(QT) || PLATFORM(WX)
#if CPU(X86_64) && OS(DARWIN)
#define ENABLE_JIT 1
#elif CPU(X86) && OS(DARWIN)
#elif CPU(X86) && OS(WINDOWS) && COMPILER(MINGW) && GCC_VERSION >= 40100
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 1
+#elif CPU(X86_64) && OS(WINDOWS) && COMPILER(MINGW64) && GCC_VERSION >= 40100
+ #define ENABLE_JIT 1
#elif CPU(X86) && OS(WINDOWS) && COMPILER(MSVC)
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_REGISTER 1
#define ENABLE_JIT 1
#elif CPU(ARM_TRADITIONAL) && OS(LINUX)
#define ENABLE_JIT 1
+#elif CPU(ARM_TRADITIONAL) && OS(SYMBIAN) && COMPILER(RVCT)
+ #define ENABLE_JIT 1
+#elif CPU(MIPS) && OS(LINUX)
+ #define ENABLE_JIT 1
+ #define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 0
#endif
#endif /* PLATFORM(QT) */
#endif /* !defined(ENABLE_JIT) */
+#if 1 || !ENABLE(JIT)
+#define ENABLE_INTERPRETER 1
+#endif
+
+#if !(ENABLE(JIT) || ENABLE(INTERPRETER))
+#error You have to have at least one execution model enabled to build JSC
+#endif
+
+/* CPU architecture specific optimizations */
+#if CPU(ARM_TRADITIONAL)
+#if ENABLE(JIT) && !defined(ENABLE_JIT_OPTIMIZE_MOD) && WTF_ARM_ARCH_AT_LEAST(5)
+#define ENABLE_JIT_OPTIMIZE_MOD 1
+#endif
+#endif
+
#if ENABLE(JIT)
#ifndef ENABLE_JIT_OPTIMIZE_CALL
#define ENABLE_JIT_OPTIMIZE_CALL 1
#endif
#ifndef ENABLE_JIT_OPTIMIZE_NATIVE_CALL
-#define ENABLE_JIT_OPTIMIZE_NATIVE_CALL 1
+#define ENABLE_JIT_OPTIMIZE_NATIVE_CALL 0
#endif
#ifndef ENABLE_JIT_OPTIMIZE_PROPERTY_ACCESS
#define ENABLE_JIT_OPTIMIZE_PROPERTY_ACCESS 1
#ifndef ENABLE_JIT_OPTIMIZE_METHOD_CALLS
#define ENABLE_JIT_OPTIMIZE_METHOD_CALLS 1
#endif
+#ifndef ENABLE_JIT_OPTIMIZE_MOD
+#define ENABLE_JIT_OPTIMIZE_MOD 0
+#endif
#endif
#if CPU(X86) && COMPILER(MSVC)
#define JSC_HOST_CALL
#endif
-#if COMPILER(GCC) && !ENABLE(JIT)
+#if COMPILER(GCC)
#define HAVE_COMPUTED_GOTO 1
#endif
-#if ENABLE(JIT) && defined(COVERAGE)
- #define WTF_USE_INTERPRETER 0
-#else
- #define WTF_USE_INTERPRETER 1
+#if HAVE(COMPUTED_GOTO) && ENABLE(INTERPRETER)
+#define ENABLE_COMPUTED_GOTO_INTERPRETER 1
#endif
/* Yet Another Regex Runtime. */
#if (CPU(X86) && PLATFORM(MAC)) \
|| (CPU(X86_64) && PLATFORM(MAC)) \
|| (CPU(ARM_THUMB2) && PLATFORM(IPHONE)) \
- || (CPU(X86) && PLATFORM(WIN))
+ || (CPU(ARM_THUMB2) && PLATFORM(ANDROID) && ENABLE(ANDROID_JSC_JIT)) \
+ || (CPU(X86) && PLATFORM(WIN)) \
+ || (CPU(X86) && PLATFORM(WX))
#define ENABLE_YARR 1
#define ENABLE_YARR_JIT 1
#endif
#if PLATFORM(QT)
#if (CPU(X86) && OS(WINDOWS) && COMPILER(MINGW) && GCC_VERSION >= 40100) \
+ || (CPU(X86_64) && OS(WINDOWS) && COMPILER(MINGW64) && GCC_VERSION >= 40100) \
|| (CPU(X86) && OS(WINDOWS) && COMPILER(MSVC)) \
|| (CPU(X86) && OS(LINUX) && GCC_VERSION >= 40100) \
|| (CPU(X86_64) && OS(LINUX) && GCC_VERSION >= 40100) \
- || (CPU(ARM_TRADITIONAL) && OS(LINUX))
+ || (CPU(ARM_TRADITIONAL) && OS(LINUX)) \
+ || (CPU(ARM_TRADITIONAL) && OS(SYMBIAN) && COMPILER(RVCT)) \
+ || (CPU(MIPS) && OS(LINUX))
#define ENABLE_YARR 1
#define ENABLE_YARR_JIT 1
#endif
#if ENABLE(JIT) || ENABLE(YARR_JIT)
#define ENABLE_ASSEMBLER 1
#endif
-/* Setting this flag prevents the assembler from using RWX memory; this may improve
- security but currectly comes at a significant performance cost. */
-#define ENABLE_ASSEMBLER_WX_EXCLUSIVE 1
+
+/* Pick which allocator to use; we only need an executable allocator if the assembler is compiled in.
+ On x86-64 we use a single fixed mmap, on other platforms we mmap on demand. */
+#if ENABLE(ASSEMBLER)
+#define ENABLE_EXECUTABLE_ALLOCATOR_FIXED 1
+#endif
#if !defined(ENABLE_PAN_SCROLLING) && OS(WINDOWS)
#define ENABLE_PAN_SCROLLING 1
#define WTF_USE_FONT_FAST_PATH 1
#endif
-/* Accelerated compositing */
#if PLATFORM(MAC)
+/* Complex text framework */
+#if !defined(BUILDING_ON_TIGER) && !defined(BUILDING_ON_LEOPARD)
+#define WTF_USE_ATSUI 0
+#define WTF_USE_CORE_TEXT 1
+#else
+#define WTF_USE_ATSUI 1
+#define WTF_USE_CORE_TEXT 0
+#endif
+
+/* Accelerated compositing */
#if !defined(BUILDING_ON_TIGER)
#define WTF_USE_ACCELERATED_COMPOSITING 1
#endif
#endif
#endif
+#if (PLATFORM(MAC) && !defined(BUILDING_ON_TIGER) && !defined(BUILDING_ON_LEOPARD)) || PLATFORM(IPHONE)
+#define WTF_USE_PROTECTION_SPACE_AUTH_CALLBACK 1
+#endif
+
#if COMPILER(GCC)
#define WARN_UNUSED_RETURN __attribute__ ((warn_unused_result))
#else
+++ /dev/null
-/*
- * Copyright (C) 2009 Google Inc. All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions are
- * met:
- *
- * * Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * * Redistributions in binary form must reproduce the above
- * copyright notice, this list of conditions and the following disclaimer
- * in the documentation and/or other materials provided with the
- * distribution.
- * * Neither the name of Google Inc. nor the names of its
- * contributors may be used to endorse or promote products derived from
- * this software without specific prior written permission.
- *
- * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
- */
-
-#ifndef PtrAndFlags_h
-#define PtrAndFlags_h
-
-#include <wtf/Assertions.h>
-
-namespace WTF {
- template<class T, typename FlagEnum> class PtrAndFlagsBase {
- public:
- bool isFlagSet(FlagEnum flagNumber) const { ASSERT(flagNumber < 2); return m_ptrAndFlags & (1 << flagNumber); }
- void setFlag(FlagEnum flagNumber) { ASSERT(flagNumber < 2); m_ptrAndFlags |= (1 << flagNumber);}
- void clearFlag(FlagEnum flagNumber) { ASSERT(flagNumber < 2); m_ptrAndFlags &= ~(1 << flagNumber);}
- T* get() const { return reinterpret_cast<T*>(m_ptrAndFlags & ~3); }
- void set(T* ptr)
- {
- ASSERT(!(reinterpret_cast<intptr_t>(ptr) & 3));
- m_ptrAndFlags = reinterpret_cast<intptr_t>(ptr) | (m_ptrAndFlags & 3);
-#ifndef NDEBUG
- m_leaksPtr = ptr;
-#endif
- }
-
- bool operator!() const { return !get(); }
- T* operator->() const { return reinterpret_cast<T*>(m_ptrAndFlags & ~3); }
-
- protected:
- intptr_t m_ptrAndFlags;
-#ifndef NDEBUG
- void* m_leaksPtr; // Only used to allow tools like leaks on OSX to detect that the memory is referenced.
-#endif
- };
-
- template<class T, typename FlagEnum> class PtrAndFlags : public PtrAndFlagsBase<T, FlagEnum> {
- public:
- PtrAndFlags()
- {
- PtrAndFlagsBase<T, FlagEnum>::m_ptrAndFlags = 0;
- }
- PtrAndFlags(T* ptr)
- {
- PtrAndFlagsBase<T, FlagEnum>::m_ptrAndFlags = 0;
- PtrAndFlagsBase<T, FlagEnum>::set(ptr);
- }
- };
-} // namespace WTF
-
-using WTF::PtrAndFlagsBase;
-using WTF::PtrAndFlags;
-
-#endif // PtrAndFlags_h
}
+using WTF::randomNumber;
+
#endif
*
*/
+// RefPtr and PassRefPtr are documented at http://webkit.org/coding/RefPtr.html
+
#ifndef WTF_RefPtr_h
#define WTF_RefPtr_h
#include <algorithm>
#include "AlwaysInline.h"
#include "FastAllocBase.h"
+#include "PassRefPtr.h"
namespace WTF {
template <typename T> class RefPtr : public FastAllocBase {
public:
RefPtr() : m_ptr(0) { }
- RefPtr(T* ptr) : m_ptr(ptr) { if (ptr) ptr->ref(); }
- RefPtr(const RefPtr& o) : m_ptr(o.m_ptr) { if (T* ptr = m_ptr) ptr->ref(); }
+ RefPtr(T* ptr) : m_ptr(ptr) { refIfNotNull(ptr); }
+ RefPtr(const RefPtr& o) : m_ptr(o.m_ptr) { T* ptr = m_ptr; refIfNotNull(ptr); }
// see comment in PassRefPtr.h for why this takes const reference
template <typename U> RefPtr(const PassRefPtr<U>&);
template <typename U> RefPtr(const NonNullPassRefPtr<U>&);
RefPtr(HashTableDeletedValueType) : m_ptr(hashTableDeletedValue()) { }
bool isHashTableDeletedValue() const { return m_ptr == hashTableDeletedValue(); }
- ~RefPtr() { if (T* ptr = m_ptr) ptr->deref(); }
+ ~RefPtr() { derefIfNotNull(m_ptr); }
- template <typename U> RefPtr(const RefPtr<U>& o) : m_ptr(o.get()) { if (T* ptr = m_ptr) ptr->ref(); }
+ template <typename U> RefPtr(const RefPtr<U>& o) : m_ptr(o.get()) { T* ptr = m_ptr; refIfNotNull(ptr); }
T* get() const { return m_ptr; }
- void clear() { if (T* ptr = m_ptr) ptr->deref(); m_ptr = 0; }
+ void clear() { derefIfNotNull(m_ptr); m_ptr = 0; }
PassRefPtr<T> release() { PassRefPtr<T> tmp = adoptRef(m_ptr); m_ptr = 0; return tmp; }
T& operator*() const { return *m_ptr; }
bool operator!() const { return !m_ptr; }
// This conversion operator allows implicit conversion to bool but not to other integer types.
-#if COMPILER(WINSCW)
- operator bool() const { return m_ptr; }
-#else
- typedef T* RefPtr::*UnspecifiedBoolType;
+ typedef T* (RefPtr::*UnspecifiedBoolType);
operator UnspecifiedBoolType() const { return m_ptr ? &RefPtr::m_ptr : 0; }
-#endif
RefPtr& operator=(const RefPtr&);
RefPtr& operator=(T*);
void swap(RefPtr&);
- private:
static T* hashTableDeletedValue() { return reinterpret_cast<T*>(-1); }
+ private:
T* m_ptr;
};
template <typename T> inline RefPtr<T>& RefPtr<T>::operator=(const RefPtr<T>& o)
{
T* optr = o.get();
- if (optr)
- optr->ref();
+ refIfNotNull(optr);
T* ptr = m_ptr;
m_ptr = optr;
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
template <typename T> template <typename U> inline RefPtr<T>& RefPtr<T>::operator=(const RefPtr<U>& o)
{
T* optr = o.get();
- if (optr)
- optr->ref();
+ refIfNotNull(optr);
T* ptr = m_ptr;
m_ptr = optr;
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
template <typename T> inline RefPtr<T>& RefPtr<T>::operator=(T* optr)
{
- if (optr)
- optr->ref();
+ refIfNotNull(optr);
T* ptr = m_ptr;
m_ptr = optr;
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = o.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = o.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = o.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
{
T* ptr = m_ptr;
m_ptr = o.releaseRef();
- if (ptr)
- ptr->deref();
+ derefIfNotNull(ptr);
return *this;
}
--- /dev/null
+/*
+ * Copyright (C) 2006 Apple Computer, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef StaticConstructors_h
+#define StaticConstructors_h
+
+// We need to avoid having static constructors. We achieve this
+// with two separate methods for GCC and MSVC. Both methods prevent the static
+// initializers from being registered and called on program startup. On GCC, we
+// declare the global objects with a different type that can be POD default
+// initialized by the linker/loader. On MSVC we use a special compiler feature
+// to have the CRT ignore our static initializers. The constructors will never
+// be called and the objects will be left uninitialized.
+//
+// With both of these approaches, we must define and explicitly call an init
+// routine that uses placement new to create the objects and overwrite the
+// uninitialized placeholders.
+//
+// This is not completely portable, but is what we have for now without
+// changing how a lot of code accesses these global objects.
+
+#ifdef SKIP_STATIC_CONSTRUCTORS_ON_MSVC
+// - Assume that all includes of this header want ALL of their static
+// initializers ignored. This is currently the case. This means that if
+// a .cc includes this header (or it somehow gets included), all static
+// initializers after the include will not be executed.
+// - We do this with a pragma, so that all of the static initializer pointers
+// go into our own section, and the CRT won't call them. Eventually it would
+// be nice if the section was discarded, because we don't want the pointers.
+// See: http://msdn.microsoft.com/en-us/library/7977wcck(VS.80).aspx
+#pragma warning(disable:4075)
+#pragma init_seg(".unwantedstaticinits")
+#endif
+
+#ifndef SKIP_STATIC_CONSTRUCTORS_ON_GCC
+ // Define an global in the normal way.
+#if COMPILER(MSVC7_OR_LOWER)
+#define DEFINE_GLOBAL(type, name) \
+ const type name;
+#elif COMPILER(WINSCW)
+#define DEFINE_GLOBAL(type, name, arg...) \
+ const type name;
+#else
+#define DEFINE_GLOBAL(type, name, ...) \
+ const type name;
+#endif
+
+#else
+// Define an correctly-sized array of pointers to avoid static initialization.
+// Use an array of pointers instead of an array of char in case there is some alignment issue.
+#if COMPILER(MSVC7_OR_LOWER)
+#define DEFINE_GLOBAL(type, name) \
+ void * name[(sizeof(type) + sizeof(void *) - 1) / sizeof(void *)];
+#elif COMPILER(WINSCW)
+#define DEFINE_GLOBAL(type, name, arg...) \
+ void * name[(sizeof(type) + sizeof(void *) - 1) / sizeof(void *)];
+#else
+#define DEFINE_GLOBAL(type, name, ...) \
+ void * name[(sizeof(type) + sizeof(void *) - 1) / sizeof(void *)];
+#endif
+#endif
+
+#endif // StaticConstructors_h
#ifndef WTF_StdLibExtras_h
#define WTF_StdLibExtras_h
-#include <wtf/Platform.h>
#include <wtf/Assertions.h>
// Use these to declare and define a static local variable (static T;) so that
return result;
}
-#if COMPILER(MSVC7) || OS(WINCE)
+#if COMPILER(MSVC7_OR_LOWER) || OS(WINCE)
inline int vsnprintf(char* buffer, size_t count, const char* format, va_list args)
{
--- /dev/null
+/*
+ * Copyright (C) 2007, 2008, 2010 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ *
+ * Note: The implementations of InterlockedIncrement and InterlockedDecrement are based
+ * on atomic_increment and atomic_exchange_and_add from the Boost C++ Library. The license
+ * is virtually identical to the Apple license above but is included here for completeness.
+ *
+ * Boost Software License - Version 1.0 - August 17th, 2003
+ *
+ * Permission is hereby granted, free of charge, to any person or organization
+ * obtaining a copy of the software and accompanying documentation covered by
+ * this license (the "Software") to use, reproduce, display, distribute,
+ * execute, and transmit the Software, and to prepare derivative works of the
+ * Software, and to permit third-parties to whom the Software is furnished to
+ * do so, all subject to the following:
+ *
+ * The copyright notices in the Software and this entire statement, including
+ * the above license grant, this restriction and the following disclaimer,
+ * must be included in all copies of the Software, in whole or in part, and
+ * all derivative works of the Software, unless such copies or derivative
+ * works are solely in the form of machine-executable object code generated by
+ * a source language processor.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
+ * SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
+ * FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
+ * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ */
+
+#ifndef ThreadSafeShared_h
+#define ThreadSafeShared_h
+
+#include "Platform.h"
+
+#include <wtf/Atomics.h>
+#include <wtf/Noncopyable.h>
+#include <wtf/ThreadingPrimitives.h>
+
+namespace WTF {
+
+class ThreadSafeSharedBase : public Noncopyable {
+public:
+ ThreadSafeSharedBase(int initialRefCount = 1)
+ : m_refCount(initialRefCount)
+ {
+ }
+
+ void ref()
+ {
+#if USE(LOCKFREE_THREADSAFESHARED)
+ atomicIncrement(&m_refCount);
+#else
+ MutexLocker locker(m_mutex);
+ ++m_refCount;
+#endif
+ }
+
+ bool hasOneRef()
+ {
+ return refCount() == 1;
+ }
+
+ int refCount() const
+ {
+#if !USE(LOCKFREE_THREADSAFESHARED)
+ MutexLocker locker(m_mutex);
+#endif
+ return static_cast<int const volatile &>(m_refCount);
+ }
+
+protected:
+ // Returns whether the pointer should be freed or not.
+ bool derefBase()
+ {
+#if USE(LOCKFREE_THREADSAFESHARED)
+ if (atomicDecrement(&m_refCount) <= 0)
+ return true;
+#else
+ int refCount;
+ {
+ MutexLocker locker(m_mutex);
+ --m_refCount;
+ refCount = m_refCount;
+ }
+ if (refCount <= 0)
+ return true;
+#endif
+ return false;
+ }
+
+private:
+ template<class T>
+ friend class CrossThreadRefCounted;
+
+ int m_refCount;
+#if !USE(LOCKFREE_THREADSAFESHARED)
+ mutable Mutex m_mutex;
+#endif
+};
+
+template<class T> class ThreadSafeShared : public ThreadSafeSharedBase {
+public:
+ ThreadSafeShared(int initialRefCount = 1)
+ : ThreadSafeSharedBase(initialRefCount)
+ {
+ }
+
+ void deref()
+ {
+ if (derefBase())
+ delete static_cast<T*>(this);
+ }
+};
+
+} // namespace WTF
+
+using WTF::ThreadSafeShared;
+
+#endif // ThreadSafeShared_h
#elif OS(WINDOWS)
+// TLS_OUT_OF_INDEXES is not defined on WinCE.
+#ifndef TLS_OUT_OF_INDEXES
+#define TLS_OUT_OF_INDEXES 0xffffffff
+#endif
+
// The maximum number of TLS keys that can be created. For simplification, we assume that:
// 1) Once the instance of ThreadSpecific<> is created, it will not be destructed until the program dies.
// 2) We do not need to hold many instances of ThreadSpecific<> data. This fixed number should be far enough.
inline ThreadSpecific<T>::ThreadSpecific()
: m_index(-1)
{
- DWORD tls_key = TlsAlloc();
- if (tls_key == TLS_OUT_OF_INDEXES)
+ DWORD tlsKey = TlsAlloc();
+ if (tlsKey == TLS_OUT_OF_INDEXES)
CRASH();
m_index = InterlockedIncrement(&tlsKeyCount()) - 1;
if (m_index >= kMaxTlsKeySize)
CRASH();
- tlsKeys()[m_index] = tls_key;
+ tlsKeys()[m_index] = tlsKey;
}
template<typename T>
/*
- * Copyright (C) 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2007, 2008, 2010 Apple Inc. All rights reserved.
* Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
*
* Redistribution and use in source and binary forms, with or without
#include "Platform.h"
-#if OS(WINCE)
-#include <windows.h>
-#endif
-
+#include <stdint.h>
#include <wtf/Assertions.h>
+#include <wtf/Atomics.h>
#include <wtf/Locker.h>
+#include <wtf/MainThread.h>
#include <wtf/Noncopyable.h>
-
-#if OS(WINDOWS) && !OS(WINCE)
-#include <windows.h>
-#elif OS(DARWIN)
-#include <libkern/OSAtomic.h>
-#elif OS(ANDROID)
-#include <cutils/atomic.h>
-#elif COMPILER(GCC)
-#if (__GNUC__ > 4) || ((__GNUC__ == 4) && (__GNUC_MINOR__ >= 2))
-#include <ext/atomicity.h>
-#else
-#include <bits/atomicity.h>
-#endif
-#endif
-
-#if USE(PTHREADS)
-#include <pthread.h>
-#elif PLATFORM(GTK)
-#include <wtf/gtk/GOwnPtr.h>
-typedef struct _GMutex GMutex;
-typedef struct _GCond GCond;
-#endif
-
-#if PLATFORM(QT)
-#include <qglobal.h>
-QT_BEGIN_NAMESPACE
-class QMutex;
-class QWaitCondition;
-QT_END_NAMESPACE
-#endif
-
-#include <stdint.h>
+#include <wtf/ThreadSafeShared.h>
+#include <wtf/ThreadingPrimitives.h>
// For portability, we do not use thread-safe statics natively supported by some compilers (e.g. gcc).
#define AtomicallyInitializedStatic(T, name) \
typedef uint32_t ThreadIdentifier;
typedef void* (*ThreadFunction)(void* argument);
+// This function must be called from the main thread. It is safe to call it repeatedly.
+// Darwin is an exception to this rule: it is OK to call it from any thread, the only
+// requirement is that the calls are not reentrant.
+void initializeThreading();
+
// Returns 0 if thread creation failed.
// The thread name must be a literal since on some platforms it's passed in to the thread.
ThreadIdentifier createThread(ThreadFunction, void*, const char* threadName);
void initializeCurrentThreadInternal(const char* threadName);
ThreadIdentifier currentThread();
-bool isMainThread();
int waitForThreadCompletion(ThreadIdentifier, void**);
void detachThread(ThreadIdentifier);
-#if USE(PTHREADS)
-typedef pthread_mutex_t PlatformMutex;
-#if HAVE(PTHREAD_RWLOCK)
-typedef pthread_rwlock_t PlatformReadWriteLock;
-#else
-typedef void* PlatformReadWriteLock;
-#endif
-typedef pthread_cond_t PlatformCondition;
-#elif PLATFORM(GTK)
-typedef GOwnPtr<GMutex> PlatformMutex;
-typedef void* PlatformReadWriteLock; // FIXME: Implement.
-typedef GOwnPtr<GCond> PlatformCondition;
-#elif PLATFORM(QT)
-typedef QT_PREPEND_NAMESPACE(QMutex)* PlatformMutex;
-typedef void* PlatformReadWriteLock; // FIXME: Implement.
-typedef QT_PREPEND_NAMESPACE(QWaitCondition)* PlatformCondition;
-#elif OS(WINDOWS)
-struct PlatformMutex {
- CRITICAL_SECTION m_internalMutex;
- size_t m_recursionCount;
-};
-typedef void* PlatformReadWriteLock; // FIXME: Implement.
-struct PlatformCondition {
- size_t m_waitersGone;
- size_t m_waitersBlocked;
- size_t m_waitersToUnblock;
- HANDLE m_blockLock;
- HANDLE m_blockQueue;
- HANDLE m_unblockLock;
-
- bool timedWait(PlatformMutex&, DWORD durationMilliseconds);
- void signal(bool unblockAll);
-};
-#else
-typedef void* PlatformMutex;
-typedef void* PlatformReadWriteLock;
-typedef void* PlatformCondition;
-#endif
-
-class Mutex : public Noncopyable {
-public:
- Mutex();
- ~Mutex();
-
- void lock();
- bool tryLock();
- void unlock();
-
-public:
- PlatformMutex& impl() { return m_mutex; }
-private:
- PlatformMutex m_mutex;
-};
-
-typedef Locker<Mutex> MutexLocker;
-
-class ReadWriteLock : public Noncopyable {
-public:
- ReadWriteLock();
- ~ReadWriteLock();
-
- void readLock();
- bool tryReadLock();
-
- void writeLock();
- bool tryWriteLock();
-
- void unlock();
-
-private:
- PlatformReadWriteLock m_readWriteLock;
-};
-
-class ThreadCondition : public Noncopyable {
-public:
- ThreadCondition();
- ~ThreadCondition();
-
- void wait(Mutex& mutex);
- // Returns true if the condition was signaled before absoluteTime, false if the absoluteTime was reached or is in the past.
- // The absoluteTime is in seconds, starting on January 1, 1970. The time is assumed to use the same time zone as WTF::currentTime().
- bool timedWait(Mutex&, double absoluteTime);
- void signal();
- void broadcast();
-
-private:
- PlatformCondition m_condition;
-};
-
-#if OS(WINDOWS)
-#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
-
-#if COMPILER(MINGW) || COMPILER(MSVC7) || OS(WINCE)
-inline int atomicIncrement(int* addend) { return InterlockedIncrement(reinterpret_cast<long*>(addend)); }
-inline int atomicDecrement(int* addend) { return InterlockedDecrement(reinterpret_cast<long*>(addend)); }
-#else
-inline int atomicIncrement(int volatile* addend) { return InterlockedIncrement(reinterpret_cast<long volatile*>(addend)); }
-inline int atomicDecrement(int volatile* addend) { return InterlockedDecrement(reinterpret_cast<long volatile*>(addend)); }
-#endif
-
-#elif OS(DARWIN)
-#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
-
-inline int atomicIncrement(int volatile* addend) { return OSAtomicIncrement32Barrier(const_cast<int*>(addend)); }
-inline int atomicDecrement(int volatile* addend) { return OSAtomicDecrement32Barrier(const_cast<int*>(addend)); }
-
-#elif OS(ANDROID)
-
-inline int atomicIncrement(int volatile* addend) { return android_atomic_inc(addend); }
-inline int atomicDecrement(int volatile* addend) { return android_atomic_dec(addend); }
-
-#elif COMPILER(GCC) && !CPU(SPARC64) // sizeof(_Atomic_word) != sizeof(int) on sparc64 gcc
-#define WTF_USE_LOCKFREE_THREADSAFESHARED 1
-
-inline int atomicIncrement(int volatile* addend) { return __gnu_cxx::__exchange_and_add(addend, 1) + 1; }
-inline int atomicDecrement(int volatile* addend) { return __gnu_cxx::__exchange_and_add(addend, -1) - 1; }
-
-#endif
-
-class ThreadSafeSharedBase : public Noncopyable {
-public:
- ThreadSafeSharedBase(int initialRefCount = 1)
- : m_refCount(initialRefCount)
- {
- }
-
- void ref()
- {
-#if USE(LOCKFREE_THREADSAFESHARED)
- atomicIncrement(&m_refCount);
-#else
- MutexLocker locker(m_mutex);
- ++m_refCount;
-#endif
- }
-
- bool hasOneRef()
- {
- return refCount() == 1;
- }
-
- int refCount() const
- {
-#if !USE(LOCKFREE_THREADSAFESHARED)
- MutexLocker locker(m_mutex);
-#endif
- return static_cast<int const volatile &>(m_refCount);
- }
-
-protected:
- // Returns whether the pointer should be freed or not.
- bool derefBase()
- {
-#if USE(LOCKFREE_THREADSAFESHARED)
- if (atomicDecrement(&m_refCount) <= 0)
- return true;
-#else
- int refCount;
- {
- MutexLocker locker(m_mutex);
- --m_refCount;
- refCount = m_refCount;
- }
- if (refCount <= 0)
- return true;
-#endif
- return false;
- }
-
-private:
- template<class T>
- friend class CrossThreadRefCounted;
-
- int m_refCount;
-#if !USE(LOCKFREE_THREADSAFESHARED)
- mutable Mutex m_mutex;
-#endif
-};
-
-template<class T> class ThreadSafeShared : public ThreadSafeSharedBase {
-public:
- ThreadSafeShared(int initialRefCount = 1)
- : ThreadSafeSharedBase(initialRefCount)
- {
- }
-
- void deref()
- {
- if (derefBase())
- delete static_cast<T*>(this);
- }
-};
-
-// This function must be called from the main thread. It is safe to call it repeatedly.
-// Darwin is an exception to this rule: it is OK to call it from any thread, the only requirement is that the calls are not reentrant.
-void initializeThreading();
void lockAtomicallyInitializedStaticMutex();
void unlockAtomicallyInitializedStaticMutex();
} // namespace WTF
-using WTF::Mutex;
-using WTF::MutexLocker;
-using WTF::ThreadCondition;
using WTF::ThreadIdentifier;
-using WTF::ThreadSafeShared;
-
-#if USE(LOCKFREE_THREADSAFESHARED)
-using WTF::atomicDecrement;
-using WTF::atomicIncrement;
-#endif
-
using WTF::createThread;
using WTF::currentThread;
-using WTF::isMainThread;
using WTF::detachThread;
using WTF::waitForThreadCompletion;
--- /dev/null
+/*
+ * Copyright (C) 2007, 2008, 2010 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ThreadingPrimitives_h
+#define ThreadingPrimitives_h
+
+#include "Platform.h"
+
+#include <wtf/Assertions.h>
+#include <wtf/Locker.h>
+#include <wtf/Noncopyable.h>
+
+#if OS(WINDOWS)
+#include <windows.h>
+#endif
+
+#if USE(PTHREADS)
+#include <pthread.h>
+#elif PLATFORM(GTK)
+#include "GOwnPtr.h"
+typedef struct _GMutex GMutex;
+typedef struct _GCond GCond;
+#endif
+
+#if PLATFORM(QT)
+#include <qglobal.h>
+QT_BEGIN_NAMESPACE
+class QMutex;
+class QWaitCondition;
+QT_END_NAMESPACE
+#endif
+
+namespace WTF {
+
+#if USE(PTHREADS)
+typedef pthread_mutex_t PlatformMutex;
+#if HAVE(PTHREAD_RWLOCK)
+typedef pthread_rwlock_t PlatformReadWriteLock;
+#else
+typedef void* PlatformReadWriteLock;
+#endif
+typedef pthread_cond_t PlatformCondition;
+#elif PLATFORM(GTK)
+typedef GOwnPtr<GMutex> PlatformMutex;
+typedef void* PlatformReadWriteLock; // FIXME: Implement.
+typedef GOwnPtr<GCond> PlatformCondition;
+#elif PLATFORM(QT)
+typedef QT_PREPEND_NAMESPACE(QMutex)* PlatformMutex;
+typedef void* PlatformReadWriteLock; // FIXME: Implement.
+typedef QT_PREPEND_NAMESPACE(QWaitCondition)* PlatformCondition;
+#elif OS(WINDOWS)
+struct PlatformMutex {
+ CRITICAL_SECTION m_internalMutex;
+ size_t m_recursionCount;
+};
+typedef void* PlatformReadWriteLock; // FIXME: Implement.
+struct PlatformCondition {
+ size_t m_waitersGone;
+ size_t m_waitersBlocked;
+ size_t m_waitersToUnblock;
+ HANDLE m_blockLock;
+ HANDLE m_blockQueue;
+ HANDLE m_unblockLock;
+
+ bool timedWait(PlatformMutex&, DWORD durationMilliseconds);
+ void signal(bool unblockAll);
+};
+#else
+typedef void* PlatformMutex;
+typedef void* PlatformReadWriteLock;
+typedef void* PlatformCondition;
+#endif
+
+class Mutex : public Noncopyable {
+public:
+ Mutex();
+ ~Mutex();
+
+ void lock();
+ bool tryLock();
+ void unlock();
+
+public:
+ PlatformMutex& impl() { return m_mutex; }
+private:
+ PlatformMutex m_mutex;
+};
+
+typedef Locker<Mutex> MutexLocker;
+
+class ReadWriteLock : public Noncopyable {
+public:
+ ReadWriteLock();
+ ~ReadWriteLock();
+
+ void readLock();
+ bool tryReadLock();
+
+ void writeLock();
+ bool tryWriteLock();
+
+ void unlock();
+
+private:
+ PlatformReadWriteLock m_readWriteLock;
+};
+
+class ThreadCondition : public Noncopyable {
+public:
+ ThreadCondition();
+ ~ThreadCondition();
+
+ void wait(Mutex& mutex);
+ // Returns true if the condition was signaled before absoluteTime, false if the absoluteTime was reached or is in the past.
+ // The absoluteTime is in seconds, starting on January 1, 1970. The time is assumed to use the same time zone as WTF::currentTime().
+ bool timedWait(Mutex&, double absoluteTime);
+ void signal();
+ void broadcast();
+
+private:
+ PlatformCondition m_condition;
+};
+
+} // namespace WTF
+
+using WTF::Mutex;
+using WTF::MutexLocker;
+using WTF::ThreadCondition;
+
+#endif // ThreadingPrimitives_h
static Mutex* atomicallyInitializedStaticMutex;
-#if !OS(DARWIN) || PLATFORM(CHROMIUM) || USE(WEB_THREAD)
-static pthread_t mainThread; // The thread that was the first to call initializeThreading(), which must be the main thread.
-#endif
-
void clearPthreadHandleForIdentifier(ThreadIdentifier);
static Mutex& threadMapMutex()
void initializeThreading()
{
- if (!atomicallyInitializedStaticMutex) {
- atomicallyInitializedStaticMutex = new Mutex;
- threadMapMutex();
- initializeRandomNumberGenerator();
-#if !OS(DARWIN) || PLATFORM(CHROMIUM) || USE(WEB_THREAD)
- mainThread = pthread_self();
-#endif
- initializeMainThread();
- }
+ if (atomicallyInitializedStaticMutex)
+ return;
+
+ atomicallyInitializedStaticMutex = new Mutex;
+ threadMapMutex();
+ initializeRandomNumberGenerator();
}
void lockAtomicallyInitializedStaticMutex()
return id;
}
-bool isMainThread()
-{
-#if OS(DARWIN) && !PLATFORM(CHROMIUM) && !USE(WEB_THREAD)
- return pthread_main_np();
-#else
- return pthread_equal(pthread_self(), mainThread);
-#endif
-}
-
Mutex::Mutex()
{
pthread_mutex_init(&m_mutex, NULL);
atomicallyInitializedStaticMutex->unlock();
}
-static ThreadIdentifier mainThreadIdentifier;
-
static Mutex& threadMapMutex()
{
static Mutex mutex;
void initializeThreading()
{
- if (!atomicallyInitializedStaticMutex) {
- atomicallyInitializedStaticMutex = new Mutex;
- threadMapMutex();
- initializeRandomNumberGenerator();
- initializeMainThread();
- mainThreadIdentifier = currentThread();
- initializeCurrentThreadInternal("Main Thread");
- }
+ if (atomicallyInitializedStaticMutex)
+ return;
+
+ atomicallyInitializedStaticMutex = new Mutex;
+ threadMapMutex();
+ initializeRandomNumberGenerator();
}
static HashMap<DWORD, HANDLE>& threadMap()
return static_cast<ThreadIdentifier>(GetCurrentThreadId());
}
-bool isMainThread()
-{
- return currentThread() == mainThreadIdentifier;
-}
-
Mutex::Mutex()
{
m_mutex.m_recursionCount = 0;
#ifndef VMTags_h
#define VMTags_h
-#include <wtf/Platform.h>
-
// On Mac OS X, the VM subsystem allows tagging memory requested from mmap and vm_map
// in order to aid tools that inspect system memory use.
-#if OS(DARWIN) && !defined(BUILDING_ON_TIGER)
+#if OS(DARWIN)
#include <mach/vm_statistics.h>
+#if !defined(TARGETING_TIGER)
+
#if defined(VM_MEMORY_TCMALLOC)
#define VM_TAG_FOR_TCMALLOC_MEMORY VM_MAKE_TAG(VM_MEMORY_TCMALLOC)
#else
#define VM_TAG_FOR_TCMALLOC_MEMORY VM_MAKE_TAG(53)
#endif // defined(VM_MEMORY_TCMALLOC)
-#if defined(VM_MEMORY_JAVASCRIPT_CORE) && defined(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE) && defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR) && defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR)
-#define VM_TAG_FOR_COLLECTOR_MEMORY VM_MAKE_TAG(VM_MEMORY_JAVASCRIPT_CORE)
-#define VM_TAG_FOR_REGISTERFILE_MEMORY VM_MAKE_TAG(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE)
+#if defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR)
#define VM_TAG_FOR_EXECUTABLEALLOCATOR_MEMORY VM_MAKE_TAG(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR)
#else
-#define VM_TAG_FOR_COLLECTOR_MEMORY VM_MAKE_TAG(63)
#define VM_TAG_FOR_EXECUTABLEALLOCATOR_MEMORY VM_MAKE_TAG(64)
+#endif // defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR)
+
+#if defined(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE)
+#define VM_TAG_FOR_REGISTERFILE_MEMORY VM_MAKE_TAG(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE)
+#else
#define VM_TAG_FOR_REGISTERFILE_MEMORY VM_MAKE_TAG(65)
-#endif // defined(VM_MEMORY_JAVASCRIPT_CORE) && defined(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE) && defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR) && defined(VM_MEMORY_JAVASCRIPT_JIT_EXECUTABLE_ALLOCATOR)
+#endif // defined(VM_MEMORY_JAVASCRIPT_JIT_REGISTER_FILE)
+
+#else // !defined(TARGETING_TIGER)
+
+// mmap on Tiger fails with tags that work on Leopard, so fall
+// back to Tiger-compatible tags (that also work on Leopard)
+// when targeting Tiger.
+#define VM_TAG_FOR_TCMALLOC_MEMORY -1
+#define VM_TAG_FOR_EXECUTABLEALLOCATOR_MEMORY -1
+#define VM_TAG_FOR_REGISTERFILE_MEMORY -1
+
+#endif // !defined(TARGETING_TIGER)
+
+// Tags for vm_map and vm_allocate work on both Tiger and Leopard.
+
+#if defined(VM_MEMORY_JAVASCRIPT_CORE)
+#define VM_TAG_FOR_COLLECTOR_MEMORY VM_MAKE_TAG(VM_MEMORY_JAVASCRIPT_CORE)
+#else
+#define VM_TAG_FOR_COLLECTOR_MEMORY VM_MAKE_TAG(63)
+#endif // defined(VM_MEMORY_JAVASCRIPT_CORE)
#if defined(VM_MEMORY_WEBCORE_PURGEABLE_BUFFERS)
#define VM_TAG_FOR_WEBCORE_PURGEABLE_MEMORY VM_MAKE_TAG(VM_MEMORY_WEBCORE_PURGEABLE_BUFFERS)
#define VM_TAG_FOR_WEBCORE_PURGEABLE_MEMORY VM_MAKE_TAG(69)
#endif // defined(VM_MEMORY_WEBCORE_PURGEABLE_BUFFERS)
-#else // OS(DARWIN) && !defined(BUILDING_ON_TIGER)
+#else // OS(DARWIN)
#define VM_TAG_FOR_TCMALLOC_MEMORY -1
#define VM_TAG_FOR_COLLECTOR_MEMORY -1
#define VM_TAG_FOR_REGISTERFILE_MEMORY -1
#define VM_TAG_FOR_WEBCORE_PURGEABLE_MEMORY -1
-#endif // OS(DARWIN) && !defined(BUILDING_ON_TIGER)
+#endif // OS(DARWIN)
#endif // VMTags_h
#ifndef ValueCheck_h
#define ValueCheck_h
-// For malloc_size and _msize.
-#if OS(DARWIN)
-#include <malloc/malloc.h>
-#elif COMPILER(MSVC)
-#include <malloc.h>
-#endif
+#include <wtf/FastMalloc.h>
namespace WTF {
{
if (!p)
return;
-#if (defined(USE_SYSTEM_MALLOC) && USE_SYSTEM_MALLOC) || !defined(NDEBUG)
-#if OS(DARWIN)
- ASSERT(malloc_size(p));
-#elif COMPILER(MSVC)
- ASSERT(_msize(const_cast<P*>(p)));
-#endif
-#endif
+ ASSERT(fastMallocSize(p));
ValueCheck<P>::checkConsistency(*p);
}
};
m_buffer = static_cast<T*>(fastMalloc(newCapacity * sizeof(T)));
}
+ bool tryAllocateBuffer(size_t newCapacity)
+ {
+ if (newCapacity > std::numeric_limits<size_t>::max() / sizeof(T))
+ return false;
+
+ T* newBuffer;
+ if (tryFastMalloc(newCapacity * sizeof(T)).getValue(newBuffer)) {
+ m_capacity = newCapacity;
+ m_buffer = newBuffer;
+ return true;
+ }
+ return false;
+ }
+
void deallocateBuffer(T* bufferToDeallocate)
{
if (m_buffer == bufferToDeallocate) {
void restoreInlineBufferIfNeeded() { }
using Base::allocateBuffer;
+ using Base::tryAllocateBuffer;
using Base::deallocateBuffer;
using Base::buffer;
}
}
+ bool tryAllocateBuffer(size_t newCapacity)
+ {
+ if (newCapacity > inlineCapacity)
+ return Base::tryAllocateBuffer(newCapacity);
+ m_buffer = inlineBuffer();
+ m_capacity = inlineCapacity;
+ return true;
+ }
+
void deallocateBuffer(T* bufferToDeallocate)
{
if (bufferToDeallocate == inlineBuffer())
void grow(size_t size);
void resize(size_t size);
void reserveCapacity(size_t newCapacity);
+ bool tryReserveCapacity(size_t newCapacity);
void reserveInitialCapacity(size_t initialCapacity);
void shrinkCapacity(size_t newCapacity);
void shrinkToFit() { shrinkCapacity(size()); }
template<typename U> void append(const U&);
template<typename U> void uncheckedAppend(const U& val);
template<size_t otherCapacity> void append(const Vector<T, otherCapacity>&);
+ template<typename U> bool tryAppend(const U*, size_t);
template<typename U> void insert(size_t position, const U*, size_t);
template<typename U> void insert(size_t position, const U&);
private:
void expandCapacity(size_t newMinCapacity);
const T* expandCapacity(size_t newMinCapacity, const T*);
+ bool tryExpandCapacity(size_t newMinCapacity);
+ const T* tryExpandCapacity(size_t newMinCapacity, const T*);
template<typename U> U* expandCapacity(size_t newMinCapacity, U*);
size_t m_size;
return begin() + index;
}
+ template<typename T, size_t inlineCapacity>
+ bool Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity)
+ {
+ return tryReserveCapacity(max(newMinCapacity, max(static_cast<size_t>(16), capacity() + capacity() / 4 + 1)));
+ }
+
+ template<typename T, size_t inlineCapacity>
+ const T* Vector<T, inlineCapacity>::tryExpandCapacity(size_t newMinCapacity, const T* ptr)
+ {
+ if (ptr < begin() || ptr >= end()) {
+ if (!tryExpandCapacity(newMinCapacity))
+ return 0;
+ return ptr;
+ }
+ size_t index = ptr - begin();
+ if (!tryExpandCapacity(newMinCapacity))
+ return 0;
+ return begin() + index;
+ }
+
template<typename T, size_t inlineCapacity> template<typename U>
inline U* Vector<T, inlineCapacity>::expandCapacity(size_t newMinCapacity, U* ptr)
{
m_buffer.deallocateBuffer(oldBuffer);
}
+ template<typename T, size_t inlineCapacity>
+ bool Vector<T, inlineCapacity>::tryReserveCapacity(size_t newCapacity)
+ {
+ if (newCapacity <= capacity())
+ return true;
+ T* oldBuffer = begin();
+ T* oldEnd = end();
+ if (!m_buffer.tryAllocateBuffer(newCapacity))
+ return false;
+ ASSERT(begin());
+ TypeOperations::move(oldBuffer, oldEnd, begin());
+ m_buffer.deallocateBuffer(oldBuffer);
+ return true;
+ }
+
template<typename T, size_t inlineCapacity>
inline void Vector<T, inlineCapacity>::reserveInitialCapacity(size_t initialCapacity)
{
m_size = newSize;
}
+ template<typename T, size_t inlineCapacity> template<typename U>
+ bool Vector<T, inlineCapacity>::tryAppend(const U* data, size_t dataSize)
+ {
+ size_t newSize = m_size + dataSize;
+ if (newSize > capacity()) {
+ data = tryExpandCapacity(newSize, data);
+ if (!data)
+ return false;
+ ASSERT(begin());
+ }
+ if (newSize < m_size)
+ return false;
+ T* dest = end();
+ for (size_t i = 0; i < dataSize; ++i)
+ new (&dest[i]) T(data[i]);
+ m_size = newSize;
+ return true;
+ }
+
template<typename T, size_t inlineCapacity> template<typename U>
ALWAYS_INLINE void Vector<T, inlineCapacity>::append(const U& val)
{
return;
}
-#if COMPILER(MSVC7)
+#if COMPILER(MSVC7_OR_LOWER)
// FIXME: MSVC7 generates compilation errors when trying to assign
// a pointer to a Vector of its base class (i.e. can't downcast). So far
// I've been unable to determine any logical reason for this, so I can
--- /dev/null
+/*
+ * Copyright (C) 2010 Google Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef Vector3_h
+#define Vector3_h
+
+#include <math.h>
+
+namespace WebCore {
+
+class Vector3 {
+public:
+ Vector3()
+ : m_x(0.0)
+ , m_y(0.0)
+ , m_z(0.0)
+ {
+ }
+
+ Vector3(double x, double y, double z)
+ : m_x(x)
+ , m_y(y)
+ , m_z(z)
+ {
+ }
+
+ Vector3(const float p[3])
+ : m_x(p[0])
+ , m_y(p[1])
+ , m_z(p[2])
+ {
+ }
+
+ Vector3(const double p[3])
+ : m_x(p[0])
+ , m_y(p[1])
+ , m_z(p[2])
+ {
+ }
+
+ double abs() const
+ {
+ return sqrt(m_x * m_x + m_y * m_y + m_z * m_z);
+ }
+
+ bool isZero() const
+ {
+ return !m_x && !m_y && !m_z;
+ }
+
+ void normalize()
+ {
+ double absValue = abs();
+ if (!absValue)
+ return;
+
+ double k = 1.0 / absValue;
+ m_x *= k;
+ m_y *= k;
+ m_z *= k;
+ }
+
+ double x() const { return m_x; }
+ double y() const { return m_y; }
+ double z() const { return m_z; }
+
+private:
+ double m_x;
+ double m_y;
+ double m_z;
+};
+
+inline Vector3 operator+(const Vector3& v1, const Vector3& v2)
+{
+ return Vector3(v1.x() + v2.x(), v1.y() + v2.y(), v1.z() + v2.z());
+}
+
+inline Vector3 operator-(const Vector3& v1, const Vector3& v2)
+{
+ return Vector3(v1.x() - v2.x(), v1.y() - v2.y(), v1.z() - v2.z());
+}
+
+inline Vector3 operator*(double k, const Vector3& v)
+{
+ return Vector3(k * v.x(), k * v.y(), k * v.z());
+}
+
+inline Vector3 operator*(const Vector3& v, double k)
+{
+ return Vector3(k * v.x(), k * v.y(), k * v.z());
+}
+
+inline double dot(const Vector3& v1, const Vector3& v2)
+{
+ return v1.x() * v2.x() + v1.y() * v2.y() + v1.z() * v2.z();
+}
+
+inline Vector3 cross(const Vector3& v1, const Vector3& v2)
+{
+ double x3 = v1.y() * v2.z() - v1.z() * v2.y();
+ double y3 = v1.z() * v2.x() - v1.x() * v2.z();
+ double z3 = v1.x() * v2.y() - v1.y() * v2.x();
+ return Vector3(x3, y3, z3);
+}
+
+inline double distance(const Vector3& v1, const Vector3& v2)
+{
+ return (v1 - v2).abs();
+}
+
+} // WebCore
+
+#endif // Vector3_h
template<typename P>
struct VectorTraits<OwnPtr<P> > : SimpleClassVectorTraits { };
- template<typename P>
- struct VectorTraits<std::auto_ptr<P> > : SimpleClassVectorTraits { };
-
template<typename First, typename Second>
struct VectorTraits<pair<First, Second> >
{
--- /dev/null
+/*
+ * Copyright (C) 2008, 2010 Apple Inc. All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#include "config.h"
+#include "WTFThreadData.h"
+
+namespace WTF {
+
+#if WTFTHREADDATA_MULTITHREADED
+ThreadSpecific<WTFThreadData>* WTFThreadData::staticData;
+#else
+WTFThreadData* WTFThreadData::staticData;
+#endif
+
+WTFThreadData::WTFThreadData()
+ : m_atomicStringTable(0)
+ , m_atomicStringTableDestructor(0)
+{
+ static JSC::IdentifierTable* sharedIdentifierTable = new JSC::IdentifierTable();
+ if (pthread_main_np() || isWebThread())
+ m_defaultIdentifierTable = sharedIdentifierTable;
+ else
+ m_defaultIdentifierTable = new JSC::IdentifierTable();
+
+ m_currentIdentifierTable = m_defaultIdentifierTable;
+}
+
+WTFThreadData::~WTFThreadData()
+{
+ if (m_atomicStringTableDestructor)
+ m_atomicStringTableDestructor(m_atomicStringTable);
+ delete m_defaultIdentifierTable;
+}
+
+} // namespace WebCore
--- /dev/null
+/*
+ * Copyright (C) 2008 Apple Inc. All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef WTFThreadData_h
+#define WTFThreadData_h
+
+#include <wtf/HashMap.h>
+#include <wtf/HashSet.h>
+#include <wtf/Noncopyable.h>
+#include <wtf/text/StringHash.h>
+
+// This was ENABLE(WORKERS) in WebCore, but this is not defined when compiling JSC.
+// However this check was not correct anyway, re this comment:
+// // FIXME: Workers are not necessarily the only feature that make per-thread global data necessary.
+// // We need to check for e.g. database objects manipulating strings on secondary threads.
+// Always enabling this is safe, and should be a better option until we can come up
+// with a better define.
+#define WTFTHREADDATA_MULTITHREADED 1
+
+#if WTFTHREADDATA_MULTITHREADED
+#include <wtf/ThreadSpecific.h>
+#include <wtf/Threading.h>
+#endif
+
+// FIXME: This is a temporary layering violation while we move more string code to WTF.
+namespace WebCore {
+class AtomicStringTable;
+class StringImpl;
+}
+using WebCore::StringImpl;
+
+typedef void (*AtomicStringTableDestructor)(WebCore::AtomicStringTable*);
+
+#if USE(JSC)
+// FIXME: This is a temporary layering violation while we move more string code to WTF.
+namespace JSC {
+
+typedef HashMap<const char*, RefPtr<StringImpl>, PtrHash<const char*> > LiteralIdentifierTable;
+
+class IdentifierTable : public FastAllocBase {
+public:
+ ~IdentifierTable();
+
+ std::pair<HashSet<StringImpl*>::iterator, bool> add(StringImpl* value);
+ template<typename U, typename V>
+ std::pair<HashSet<StringImpl*>::iterator, bool> add(U value);
+
+ void remove(StringImpl* r) { m_table.remove(r); }
+
+ LiteralIdentifierTable& literalTable() { return m_literalTable; }
+
+private:
+ HashSet<StringImpl*> m_table;
+ LiteralIdentifierTable m_literalTable;
+};
+
+}
+#endif
+
+namespace WTF {
+
+class WTFThreadData : public Noncopyable {
+public:
+ WTFThreadData();
+ ~WTFThreadData();
+
+ WebCore::AtomicStringTable* atomicStringTable()
+ {
+ return m_atomicStringTable;
+ }
+
+#if USE(JSC)
+
+ JSC::IdentifierTable* currentIdentifierTable()
+ {
+ return m_currentIdentifierTable;
+ }
+
+ JSC::IdentifierTable* setCurrentIdentifierTable(JSC::IdentifierTable* identifierTable)
+ {
+ JSC::IdentifierTable* oldIdentifierTable = m_currentIdentifierTable;
+ m_currentIdentifierTable = identifierTable;
+ return oldIdentifierTable;
+ }
+
+ void resetCurrentIdentifierTable()
+ {
+ m_currentIdentifierTable = m_defaultIdentifierTable;
+ }
+#endif
+
+private:
+ WebCore::AtomicStringTable* m_atomicStringTable;
+ AtomicStringTableDestructor m_atomicStringTableDestructor;
+
+#if USE(JSC)
+ JSC::IdentifierTable* m_defaultIdentifierTable;
+ JSC::IdentifierTable* m_currentIdentifierTable;
+#endif
+
+#if WTFTHREADDATA_MULTITHREADED
+ static JS_EXPORTDATA ThreadSpecific<WTFThreadData>* staticData;
+#else
+ static JS_EXPORTDATA WTFThreadData* staticData;
+#endif
+ friend WTFThreadData& wtfThreadData();
+ friend class WebCore::AtomicStringTable;
+};
+
+inline WTFThreadData& wtfThreadData()
+{
+#if WTFTHREADDATA_MULTITHREADED
+ // WRT WebCore:
+ // WTFThreadData is used on main thread before it could possibly be used
+ // on secondary ones, so there is no need for synchronization here.
+ // WRT JavaScriptCore:
+ // wtfThreadData() is initially called from initializeThreading(), ensuring
+ // this is initially called in a pthread_once locked context.
+ if (!WTFThreadData::staticData)
+ WTFThreadData::staticData = new ThreadSpecific<WTFThreadData>;
+ return **WTFThreadData::staticData;
+#else
+ if (!WTFThreadData::staticData) {
+ WTFThreadData::staticData = static_cast<WTFThreadData*>(fastMalloc(sizeof(WTFThreadData)));
+ // WTFThreadData constructor indirectly uses staticData, so we need to set up the memory before invoking it.
+ new (WTFThreadData::staticData) WTFThreadData;
+ }
+ return *WTFThreadData::staticData;
+#endif
+}
+
+} // namespace WTF
+
+using WTF::WTFThreadData;
+using WTF::wtfThreadData;
+
+#endif // WTFThreadData_h
--- /dev/null
+/*
+ * Copyright (C) 2009 Company 100, Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "MainThread.h"
+
+namespace WTF {
+
+void initializeMainThreadPlatform()
+{
+ // not implemented
+}
+
+void scheduleDispatchFunctionsOnMainThread()
+{
+ // not implemented
+}
+
+} // namespace WTF
+
--- /dev/null
+/*
+ * Copyright (C) 2010 Company 100 Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "OwnPtr.h"
+
+#include <AEEBitmap.h>
+#include <AEEFile.h>
+#include <AEEStdLib.h>
+
+namespace WTF {
+
+void deleteOwnedPtr(IFileMgr* ptr)
+{
+ if (ptr)
+ IFILEMGR_Release(ptr);
+}
+
+void deleteOwnedPtr(IFile* ptr)
+{
+ if (ptr)
+ IFILE_Release(ptr);
+}
+
+void deleteOwnedPtr(IBitmap* ptr)
+{
+ if (ptr)
+ IBitmap_Release(ptr);
+}
+
+}
--- /dev/null
+/*
+ * Copyright (C) 2010 Company 100 Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef ShellBrew_h
+#define ShellBrew_h
+
+#include <AEEAppGen.h>
+#include <AEEStdLib.h>
+
+#include <wtf/Assertions.h>
+#include <wtf/PassOwnPtr.h>
+
+namespace WTF {
+
+template <typename T>
+static inline PassOwnPtr<T> createInstance(AEECLSID cls)
+{
+ T* instance = 0;
+
+ IShell* shell = reinterpret_cast<AEEApplet*>(GETAPPINSTANCE())->m_pIShell;
+ ISHELL_CreateInstance(shell, cls, reinterpret_cast<void**>(&instance));
+ ASSERT(instance);
+
+ return instance;
+}
+
+} // namespace WTF
+
+using WTF::createInstance;
+
+#endif // ShellBrew_h
--- /dev/null
+/*
+ * Copyright (C) 2010 Company 100, Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef SystemMallocBrew_h
+#define SystemMallocBrew_h
+
+#include <AEEStdLib.h>
+
+static inline void* mallocBrew(size_t n)
+{
+ // By default, memory allocated using MALLOC() is initialized
+ // to zero. This behavior can be disabled by performing a bitwise
+ // OR of the flag ALLOC_NO_ZMEM with the dwSize parameter.
+ return MALLOC(n | ALLOC_NO_ZMEM);
+}
+
+static inline void* callocBrew(size_t numElements, size_t elementSize)
+{
+ return MALLOC(numElements * elementSize);
+}
+
+static inline void freeBrew(void* p)
+{
+ return FREE(p);
+}
+
+static inline void* reallocBrew(void* p, size_t n)
+{
+ return REALLOC(p, n | ALLOC_NO_ZMEM);
+}
+
+// Use MALLOC macro instead of the standard malloc function.
+// Although RVCT provides malloc, we can't use it in BREW
+// because the loader does not initialize the base address properly.
+#define malloc(n) mallocBrew(n)
+#define calloc(n, s) callocBrew(n, s)
+#define realloc(p, n) reallocBrew(p, n)
+#define free(p) freeBrew(p)
+
+#endif // SystemMallocBrew_h
// An interface to the embedding layer, which provides threading support.
class ChromiumThreading {
public:
- static void initializeMainThread();
- static void scheduleDispatchFunctionsOnMainThread();
+ static void callOnMainThread(void (*func)(void*), void* context);
};
} // namespace WTF
#include "config.h"
#include "MainThread.h"
+#include "Assertions.h"
#include "ChromiumThreading.h"
+#include "Threading.h"
namespace WTF {
-void initializeMainThreadPlatform()
+static ThreadIdentifier mainThreadIdentifier;
+
+void initializeMainThread()
+{
+ static bool initializedMainThread;
+ if (initializedMainThread)
+ return;
+ initializedMainThread = true;
+
+ mainThreadIdentifier = currentThread();
+}
+
+void callOnMainThread(MainThreadFunction* function, void* context)
+{
+ ChromiumThreading::callOnMainThread(function, context);
+}
+
+void callOnMainThreadAndWait(MainThreadFunction*, void*)
+{
+ ASSERT_NOT_REACHED();
+}
+
+void setMainThreadCallbacksPaused(bool)
{
- ChromiumThreading::initializeMainThread();
+ ASSERT_NOT_REACHED();
}
-void scheduleDispatchFunctionsOnMainThread()
+bool isMainThread()
{
- ChromiumThreading::scheduleDispatchFunctionsOnMainThread();
+ return currentThread() == mainThreadIdentifier;
}
} // namespace WTF
#endif
#define INFNAN_CHECK
+#define No_Hex_NaN
#if defined(IEEE_8087) + defined(IEEE_MC68k) + defined(IEEE_ARM) != 1
Exactly one of IEEE_8087, IEEE_ARM or IEEE_MC68k should be defined.
--- /dev/null
+/*
+ * Copyright (C) 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2007 Justin Haygood (jhaygood@reaktix.com)
+ * Copyright (C) 2008 Diego Gonzalez
+ * Copyright (C) 2008 Kenneth Rohde Christiansen
+ * Copyright (C) 2009-2010 ProFUSION embedded systems
+ * Copyright (C) 2009-2010 Samsung Electronics
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "MainThread.h"
+
+#include <Ecore.h>
+
+namespace WTF {
+
+void initializeMainThreadPlatform()
+{
+}
+
+static int timeoutFired(void*)
+{
+ dispatchFunctionsFromMainThread();
+ return ECORE_CALLBACK_CANCEL;
+}
+
+void scheduleDispatchFunctionsOnMainThread()
+{
+ ecore_timer_add(0, timeoutFired, 0);
+}
+
+
+}
--- /dev/null
+/*
+ * Copyright (C) 2008 Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+#include "GOwnPtr.h"
+
+#include <gio/gio.h>
+#include <glib.h>
+
+namespace WTF {
+
+template <> void freeOwnedGPtr<GError>(GError* ptr)
+{
+ if (ptr)
+ g_error_free(ptr);
+}
+
+template <> void freeOwnedGPtr<GList>(GList* ptr)
+{
+ g_list_free(ptr);
+}
+
+template <> void freeOwnedGPtr<GCond>(GCond* ptr)
+{
+ if (ptr)
+ g_cond_free(ptr);
+}
+
+template <> void freeOwnedGPtr<GMutex>(GMutex* ptr)
+{
+ if (ptr)
+ g_mutex_free(ptr);
+}
+
+template <> void freeOwnedGPtr<GPatternSpec>(GPatternSpec* ptr)
+{
+ if (ptr)
+ g_pattern_spec_free(ptr);
+}
+
+template <> void freeOwnedGPtr<GDir>(GDir* ptr)
+{
+ if (ptr)
+ g_dir_close(ptr);
+}
+
+template <> void freeOwnedGPtr<GFile>(GFile* ptr)
+{
+ if (ptr)
+ g_object_unref(ptr);
+}
+} // namespace WTF
--- /dev/null
+/*
+ * Copyright (C) 2006, 2007 Apple Inc. All rights reserved.
+ * Copyright (C) 2008 Collabora Ltd.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef GOwnPtr_h
+#define GOwnPtr_h
+
+#include <algorithm>
+#include <wtf/Assertions.h>
+#include <wtf/Noncopyable.h>
+
+// Forward delcarations at this point avoid the need to include GLib includes
+// in WTF headers.
+typedef struct _GError GError;
+typedef struct _GList GList;
+typedef struct _GCond GCond;
+typedef struct _GMutex GMutex;
+typedef struct _GPatternSpec GPatternSpec;
+typedef struct _GDir GDir;
+typedef struct _GHashTable GHashTable;
+typedef struct _GFile GFile;
+extern "C" void g_free(void*);
+
+namespace WTF {
+
+template <typename T> inline void freeOwnedGPtr(T* ptr);
+template<> void freeOwnedGPtr<GError>(GError*);
+template<> void freeOwnedGPtr<GList>(GList*);
+template<> void freeOwnedGPtr<GCond>(GCond*);
+template<> void freeOwnedGPtr<GMutex>(GMutex*);
+template<> void freeOwnedGPtr<GPatternSpec>(GPatternSpec*);
+template<> void freeOwnedGPtr<GDir>(GDir*);
+template<> void freeOwnedGPtr<GHashTable>(GHashTable*);
+template<> void freeOwnedGPtr<GFile>(GFile*);
+
+template <typename T> class GOwnPtr : public Noncopyable {
+public:
+ explicit GOwnPtr(T* ptr = 0) : m_ptr(ptr) { }
+ ~GOwnPtr() { freeOwnedGPtr(m_ptr); }
+
+ T* get() const { return m_ptr; }
+ T* release()
+ {
+ T* ptr = m_ptr;
+ m_ptr = 0;
+ return ptr;
+ }
+
+ T*& outPtr()
+ {
+ ASSERT(!m_ptr);
+ return m_ptr;
+ }
+
+ void set(T* ptr)
+ {
+ ASSERT(!ptr || m_ptr != ptr);
+ freeOwnedGPtr(m_ptr);
+ m_ptr = ptr;
+ }
+
+ void clear()
+ {
+ freeOwnedGPtr(m_ptr);
+ m_ptr = 0;
+ }
+
+ T& operator*() const
+ {
+ ASSERT(m_ptr);
+ return *m_ptr;
+ }
+
+ T* operator->() const
+ {
+ ASSERT(m_ptr);
+ return m_ptr;
+ }
+
+ bool operator!() const { return !m_ptr; }
+
+ // This conversion operator allows implicit conversion to bool but not to other integer types.
+ typedef T* GOwnPtr::*UnspecifiedBoolType;
+ operator UnspecifiedBoolType() const { return m_ptr ? &GOwnPtr::m_ptr : 0; }
+
+ void swap(GOwnPtr& o) { std::swap(m_ptr, o.m_ptr); }
+
+private:
+ T* m_ptr;
+};
+
+template <typename T> inline void swap(GOwnPtr<T>& a, GOwnPtr<T>& b)
+{
+ a.swap(b);
+}
+
+template <typename T, typename U> inline bool operator==(const GOwnPtr<T>& a, U* b)
+{
+ return a.get() == b;
+}
+
+template <typename T, typename U> inline bool operator==(T* a, const GOwnPtr<U>& b)
+{
+ return a == b.get();
+}
+
+template <typename T, typename U> inline bool operator!=(const GOwnPtr<T>& a, U* b)
+{
+ return a.get() != b;
+}
+
+template <typename T, typename U> inline bool operator!=(T* a, const GOwnPtr<U>& b)
+{
+ return a != b.get();
+}
+
+template <typename T> inline typename GOwnPtr<T>::PtrType getPtr(const GOwnPtr<T>& p)
+{
+ return p.get();
+}
+
+template <typename T> inline void freeOwnedGPtr(T* ptr)
+{
+ g_free(ptr);
+}
+
+} // namespace WTF
+
+using WTF::GOwnPtr;
+
+#endif // GOwnPtr_h
--- /dev/null
+/*
+ * Copyright (C) 2009 Martin Robinson
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "config.h"
+#include "GRefPtr.h"
+
+#include <glib.h>
+
+namespace WTF {
+
+template <> GHashTable* refGPtr(GHashTable* ptr)
+{
+ if (ptr)
+ g_hash_table_ref(ptr);
+ return ptr;
+}
+
+template <> void derefGPtr(GHashTable* ptr)
+{
+ g_hash_table_unref(ptr);
+}
+
+} // namespace WTF
--- /dev/null
+/*
+ * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2008 Collabora Ltd.
+ * Copyright (C) 2009 Martin Robinson
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef WTF_GRefPtr_h
+#define WTF_GRefPtr_h
+
+#include "AlwaysInline.h"
+#include <algorithm>
+
+typedef struct _GHashTable GHashTable;
+typedef void* gpointer;
+extern "C" void g_object_unref(gpointer object);
+extern "C" gpointer g_object_ref_sink(gpointer object);
+
+namespace WTF {
+
+enum GRefPtrAdoptType { GRefPtrAdopt };
+template <typename T> inline T* refGPtr(T*);
+template <typename T> inline void derefGPtr(T*);
+template <typename T> class GRefPtr;
+template <typename T> GRefPtr<T> adoptGRef(T*);
+template <> GHashTable* refGPtr(GHashTable* ptr);
+template <> void derefGPtr(GHashTable* ptr);
+
+template <typename T> class GRefPtr {
+public:
+ GRefPtr() : m_ptr(0) { }
+ GRefPtr(T* ptr) : m_ptr(ptr) { if (ptr) refGPtr(ptr); }
+ GRefPtr(const GRefPtr& o) : m_ptr(o.m_ptr) { if (T* ptr = m_ptr) refGPtr(ptr); }
+ template <typename U> GRefPtr(const GRefPtr<U>& o) : m_ptr(o.get()) { if (T* ptr = m_ptr) refGPtr(ptr); }
+
+ ~GRefPtr() { if (T* ptr = m_ptr) derefGPtr(ptr); }
+
+ void clear()
+ {
+ if (T* ptr = m_ptr)
+ derefGPtr(ptr);
+ m_ptr = 0;
+ }
+
+ T* get() const { return m_ptr; }
+ T& operator*() const { return *m_ptr; }
+ ALWAYS_INLINE T* operator->() const { return m_ptr; }
+
+ bool operator!() const { return !m_ptr; }
+
+ // This conversion operator allows implicit conversion to bool but not to other integer types.
+ typedef T* GRefPtr::*UnspecifiedBoolType;
+ operator UnspecifiedBoolType() const { return m_ptr ? &GRefPtr::m_ptr : 0; }
+
+ GRefPtr& operator=(const GRefPtr&);
+ GRefPtr& operator=(T*);
+ template <typename U> GRefPtr& operator=(const GRefPtr<U>&);
+
+ void swap(GRefPtr&);
+ friend GRefPtr adoptGRef<T>(T*);
+
+private:
+ static T* hashTableDeletedValue() { return reinterpret_cast<T*>(-1); }
+ // Adopting constructor.
+ GRefPtr(T* ptr, GRefPtrAdoptType) : m_ptr(ptr) {}
+
+ T* m_ptr;
+};
+
+template <typename T> inline GRefPtr<T>& GRefPtr<T>::operator=(const GRefPtr<T>& o)
+{
+ T* optr = o.get();
+ if (optr)
+ refGPtr(optr);
+ T* ptr = m_ptr;
+ m_ptr = optr;
+ if (ptr)
+ derefGPtr(ptr);
+ return *this;
+}
+
+template <typename T> inline GRefPtr<T>& GRefPtr<T>::operator=(T* optr)
+{
+ T* ptr = m_ptr;
+ if (optr)
+ refGPtr(optr);
+ m_ptr = optr;
+ if (ptr)
+ derefGPtr(ptr);
+ return *this;
+}
+
+template <class T> inline void GRefPtr<T>::swap(GRefPtr<T>& o)
+{
+ std::swap(m_ptr, o.m_ptr);
+}
+
+template <class T> inline void swap(GRefPtr<T>& a, GRefPtr<T>& b)
+{
+ a.swap(b);
+}
+
+template <typename T, typename U> inline bool operator==(const GRefPtr<T>& a, const GRefPtr<U>& b)
+{
+ return a.get() == b.get();
+}
+
+template <typename T, typename U> inline bool operator==(const GRefPtr<T>& a, U* b)
+{
+ return a.get() == b;
+}
+
+template <typename T, typename U> inline bool operator==(T* a, const GRefPtr<U>& b)
+{
+ return a == b.get();
+}
+
+template <typename T, typename U> inline bool operator!=(const GRefPtr<T>& a, const GRefPtr<U>& b)
+{
+ return a.get() != b.get();
+}
+
+template <typename T, typename U> inline bool operator!=(const GRefPtr<T>& a, U* b)
+{
+ return a.get() != b;
+}
+
+template <typename T, typename U> inline bool operator!=(T* a, const GRefPtr<U>& b)
+{
+ return a != b.get();
+}
+
+template <typename T, typename U> inline GRefPtr<T> static_pointer_cast(const GRefPtr<U>& p)
+{
+ return GRefPtr<T>(static_cast<T*>(p.get()));
+}
+
+template <typename T, typename U> inline GRefPtr<T> const_pointer_cast(const GRefPtr<U>& p)
+{
+ return GRefPtr<T>(const_cast<T*>(p.get()));
+}
+
+template <typename T> inline T* getPtr(const GRefPtr<T>& p)
+{
+ return p.get();
+}
+
+template <typename T> GRefPtr<T> adoptGRef(T* p)
+{
+ return GRefPtr<T>(p, GRefPtrAdopt);
+}
+
+template <typename T> inline T* refGPtr(T* ptr)
+{
+ if (ptr)
+ g_object_ref_sink(ptr);
+ return ptr;
+}
+
+template <typename T> inline void derefGPtr(T* ptr)
+{
+ if (ptr)
+ g_object_unref(ptr);
+}
+
+} // namespace WTF
+
+using WTF::GRefPtr;
+using WTF::refGPtr;
+using WTF::derefGPtr;
+using WTF::adoptGRef;
+using WTF::static_pointer_cast;
+using WTF::const_pointer_cast;
+
+#endif // WTF_GRefPtr_h
+++ /dev/null
-/*
- * Copyright (C) 2008 Collabora Ltd.
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this library; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-
-#include "config.h"
-#include "GOwnPtr.h"
-
-#include <glib.h>
-
-namespace WTF {
-
-template <> void freeOwnedGPtr<GError>(GError* ptr)
-{
- if (ptr)
- g_error_free(ptr);
-}
-
-template <> void freeOwnedGPtr<GList>(GList* ptr)
-{
- g_list_free(ptr);
-}
-
-template <> void freeOwnedGPtr<GCond>(GCond* ptr)
-{
- if (ptr)
- g_cond_free(ptr);
-}
-
-template <> void freeOwnedGPtr<GMutex>(GMutex* ptr)
-{
- if (ptr)
- g_mutex_free(ptr);
-}
-
-template <> void freeOwnedGPtr<GPatternSpec>(GPatternSpec* ptr)
-{
- if (ptr)
- g_pattern_spec_free(ptr);
-}
-
-template <> void freeOwnedGPtr<GDir>(GDir* ptr)
-{
- if (ptr)
- g_dir_close(ptr);
-}
-} // namespace WTF
+++ /dev/null
-/*
- * Copyright (C) 2006, 2007 Apple Inc. All rights reserved.
- * Copyright (C) 2008 Collabora Ltd.
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public License
- * along with this library; see the file COPYING.LIB. If not, write to
- * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
- * Boston, MA 02110-1301, USA.
- *
- */
-
-#ifndef GOwnPtr_h
-#define GOwnPtr_h
-
-#include <algorithm>
-#include <wtf/Assertions.h>
-#include <wtf/Noncopyable.h>
-
-// Forward delcarations at this point avoid the need to include GLib includes
-// in WTF headers.
-typedef struct _GError GError;
-typedef struct _GList GList;
-typedef struct _GCond GCond;
-typedef struct _GMutex GMutex;
-typedef struct _GPatternSpec GPatternSpec;
-typedef struct _GDir GDir;
-typedef struct _GHashTable GHashTable;
-extern "C" void g_free(void*);
-
-namespace WTF {
-
-template <typename T> inline void freeOwnedGPtr(T* ptr);
-template<> void freeOwnedGPtr<GError>(GError*);
-template<> void freeOwnedGPtr<GList>(GList*);
-template<> void freeOwnedGPtr<GCond>(GCond*);
-template<> void freeOwnedGPtr<GMutex>(GMutex*);
-template<> void freeOwnedGPtr<GPatternSpec>(GPatternSpec*);
-template<> void freeOwnedGPtr<GDir>(GDir*);
-template<> void freeOwnedGPtr<GHashTable>(GHashTable*);
-
-template <typename T> class GOwnPtr : public Noncopyable {
-public:
- explicit GOwnPtr(T* ptr = 0) : m_ptr(ptr) { }
- ~GOwnPtr() { freeOwnedGPtr(m_ptr); }
-
- T* get() const { return m_ptr; }
- T* release()
- {
- T* ptr = m_ptr;
- m_ptr = 0;
- return ptr;
- }
-
- T*& outPtr()
- {
- ASSERT(!m_ptr);
- return m_ptr;
- }
-
- void set(T* ptr)
- {
- ASSERT(!ptr || m_ptr != ptr);
- freeOwnedGPtr(m_ptr);
- m_ptr = ptr;
- }
-
- void clear()
- {
- freeOwnedGPtr(m_ptr);
- m_ptr = 0;
- }
-
- T& operator*() const
- {
- ASSERT(m_ptr);
- return *m_ptr;
- }
-
- T* operator->() const
- {
- ASSERT(m_ptr);
- return m_ptr;
- }
-
- bool operator!() const { return !m_ptr; }
-
- // This conversion operator allows implicit conversion to bool but not to other integer types.
- typedef T* GOwnPtr::*UnspecifiedBoolType;
- operator UnspecifiedBoolType() const { return m_ptr ? &GOwnPtr::m_ptr : 0; }
-
- void swap(GOwnPtr& o) { std::swap(m_ptr, o.m_ptr); }
-
-private:
- T* m_ptr;
-};
-
-template <typename T> inline void swap(GOwnPtr<T>& a, GOwnPtr<T>& b)
-{
- a.swap(b);
-}
-
-template <typename T, typename U> inline bool operator==(const GOwnPtr<T>& a, U* b)
-{
- return a.get() == b;
-}
-
-template <typename T, typename U> inline bool operator==(T* a, const GOwnPtr<U>& b)
-{
- return a == b.get();
-}
-
-template <typename T, typename U> inline bool operator!=(const GOwnPtr<T>& a, U* b)
-{
- return a.get() != b;
-}
-
-template <typename T, typename U> inline bool operator!=(T* a, const GOwnPtr<U>& b)
-{
- return a != b.get();
-}
-
-template <typename T> inline typename GOwnPtr<T>::PtrType getPtr(const GOwnPtr<T>& p)
-{
- return p.get();
-}
-
-template <typename T> inline void freeOwnedGPtr(T* ptr)
-{
- g_free(ptr);
-}
-
-} // namespace WTF
-
-using WTF::GOwnPtr;
-
-#endif // GOwnPtr_h
+++ /dev/null
-/*
- * Copyright (C) 2009 Martin Robinson
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Lesser General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Lesser General Public License for more details.
- *
- * You should have received a copy of the GNU Lesser General Public
- * License along with this library; if not, write to the Free Software
- * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- */
-
-#include "config.h"
-#include "GRefPtr.h"
-
-#include <glib.h>
-
-namespace WTF {
-
-template <> GHashTable* refGPtr(GHashTable* ptr)
-{
- if (ptr)
- g_hash_table_ref(ptr);
- return ptr;
-}
-
-template <> void derefGPtr(GHashTable* ptr)
-{
- g_hash_table_unref(ptr);
-}
-
-} // namespace WTF
+++ /dev/null
-/*
- * Copyright (C) 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
- * Copyright (C) 2008 Collabora Ltd.
- * Copyright (C) 2009 Martin Robinson
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public License
- * along with this library; see the file COPYING.LIB. If not, write to
- * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
- * Boston, MA 02110-1301, USA.
- *
- */
-
-#ifndef WTF_GRefPtr_h
-#define WTF_GRefPtr_h
-
-#include "AlwaysInline.h"
-#include <algorithm>
-
-typedef struct _GHashTable GHashTable;
-
-namespace WTF {
-
-enum GRefPtrAdoptType { GRefPtrAdopt };
-template <typename T> inline T* refGPtr(T*);
-template <typename T> inline void derefGPtr(T*);
-template <typename T> class GRefPtr;
-template <typename T> GRefPtr<T> adoptGRef(T*);
-template <> GHashTable* refGPtr(GHashTable* ptr);
-template <> void derefGPtr(GHashTable* ptr);
-
-template <typename T> class GRefPtr {
-public:
- GRefPtr() : m_ptr(0) { }
- GRefPtr(T* ptr) : m_ptr(ptr) { if (ptr) refGPtr(ptr); }
- GRefPtr(const GRefPtr& o) : m_ptr(o.m_ptr) { if (T* ptr = m_ptr) refGPtr(ptr); }
- template <typename U> GRefPtr(const GRefPtr<U>& o) : m_ptr(o.get()) { if (T* ptr = m_ptr) refGPtr(ptr); }
-
- ~GRefPtr() { if (T* ptr = m_ptr) derefGPtr(ptr); }
-
- void clear()
- {
- if (T* ptr = m_ptr)
- derefGPtr(ptr);
- m_ptr = 0;
- }
-
- T* get() const { return m_ptr; }
- T& operator*() const { return *m_ptr; }
- ALWAYS_INLINE T* operator->() const { return m_ptr; }
-
- bool operator!() const { return !m_ptr; }
-
- // This conversion operator allows implicit conversion to bool but not to other integer types.
- typedef T* GRefPtr::*UnspecifiedBoolType;
- operator UnspecifiedBoolType() const { return m_ptr ? &GRefPtr::m_ptr : 0; }
-
- GRefPtr& operator=(const GRefPtr&);
- GRefPtr& operator=(T*);
- template <typename U> GRefPtr& operator=(const GRefPtr<U>&);
-
- void swap(GRefPtr&);
- friend GRefPtr adoptGRef<T>(T*);
-
-private:
- static T* hashTableDeletedValue() { return reinterpret_cast<T*>(-1); }
- // Adopting constructor.
- GRefPtr(T* ptr, GRefPtrAdoptType) : m_ptr(ptr) {}
-
- T* m_ptr;
-};
-
-template <typename T> inline GRefPtr<T>& GRefPtr<T>::operator=(const GRefPtr<T>& o)
-{
- T* optr = o.get();
- if (optr)
- refGPtr(optr);
- T* ptr = m_ptr;
- m_ptr = optr;
- if (ptr)
- derefGPtr(ptr);
- return *this;
-}
-
-template <typename T> inline GRefPtr<T>& GRefPtr<T>::operator=(T* optr)
-{
- T* ptr = m_ptr;
- if (optr)
- refGPtr(optr);
- m_ptr = optr;
- if (ptr)
- derefGPtr(ptr);
- return *this;
-}
-
-template <class T> inline void GRefPtr<T>::swap(GRefPtr<T>& o)
-{
- std::swap(m_ptr, o.m_ptr);
-}
-
-template <class T> inline void swap(GRefPtr<T>& a, GRefPtr<T>& b)
-{
- a.swap(b);
-}
-
-template <typename T, typename U> inline bool operator==(const GRefPtr<T>& a, const GRefPtr<U>& b)
-{
- return a.get() == b.get();
-}
-
-template <typename T, typename U> inline bool operator==(const GRefPtr<T>& a, U* b)
-{
- return a.get() == b;
-}
-
-template <typename T, typename U> inline bool operator==(T* a, const GRefPtr<U>& b)
-{
- return a == b.get();
-}
-
-template <typename T, typename U> inline bool operator!=(const GRefPtr<T>& a, const GRefPtr<U>& b)
-{
- return a.get() != b.get();
-}
-
-template <typename T, typename U> inline bool operator!=(const GRefPtr<T>& a, U* b)
-{
- return a.get() != b;
-}
-
-template <typename T, typename U> inline bool operator!=(T* a, const GRefPtr<U>& b)
-{
- return a != b.get();
-}
-
-template <typename T, typename U> inline GRefPtr<T> static_pointer_cast(const GRefPtr<U>& p)
-{
- return GRefPtr<T>(static_cast<T*>(p.get()));
-}
-
-template <typename T, typename U> inline GRefPtr<T> const_pointer_cast(const GRefPtr<U>& p)
-{
- return GRefPtr<T>(const_cast<T*>(p.get()));
-}
-
-template <typename T> inline T* getPtr(const GRefPtr<T>& p)
-{
- return p.get();
-}
-
-template <typename T> GRefPtr<T> adoptGRef(T* p)
-{
- return GRefPtr<T>(p, GRefPtrAdopt);
-}
-
-template <typename T> inline T* refGPtr(T* ptr)
-{
- if (ptr)
- g_object_ref_sink(ptr);
- return ptr;
-}
-
-template <typename T> inline void derefGPtr(T* ptr)
-{
- if (ptr)
- g_object_unref(ptr);
-}
-
-} // namespace WTF
-
-using WTF::GRefPtr;
-using WTF::refGPtr;
-using WTF::derefGPtr;
-using WTF::adoptGRef;
-using WTF::static_pointer_cast;
-using WTF::const_pointer_cast;
-
-#endif // WTF_GRefPtr_h
static Mutex* atomicallyInitializedStaticMutex;
-static ThreadIdentifier mainThreadIdentifier;
-
static Mutex& threadMapMutex()
{
static Mutex mutex;
atomicallyInitializedStaticMutex = new Mutex;
threadMapMutex();
initializeRandomNumberGenerator();
- mainThreadIdentifier = currentThread();
- initializeMainThread();
}
}
return establishIdentifierForThread(currentThread);
}
-bool isMainThread()
-{
- return currentThread() == mainThreadIdentifier;
-}
-
Mutex::Mutex()
: m_mutex(g_mutex_new())
{
#import "config.h"
#import "MainThread.h"
+#import <CoreFoundation/CoreFoundation.h>
#import <Foundation/NSThread.h>
+#import <stdio.h>
#import <wtf/Assertions.h>
+#import <wtf/Threading.h>
@interface WTFMainThreadCaller : NSObject {
}
namespace WTF {
-static WTFMainThreadCaller* staticMainThreadCaller = nil;
-#if USE(WEB_THREAD)
-static NSThread* webThread = nil;
-#endif
+static WTFMainThreadCaller* staticMainThreadCaller;
+static bool isTimerPosted; // This is only accessed on the 'main' thread.
+static bool mainThreadEstablishedAsPthreadMain;
+static pthread_t mainThreadPthread;
+static NSThread* mainThreadNSThread;
void initializeMainThreadPlatform()
{
+#if !defined(BUILDING_ON_TIGER)
ASSERT(!staticMainThreadCaller);
staticMainThreadCaller = [[WTFMainThreadCaller alloc] init];
-#if USE(WEB_THREAD)
- webThread = [[NSThread currentThread] retain];
+ mainThreadEstablishedAsPthreadMain = false;
+ mainThreadPthread = pthread_self();
+ mainThreadNSThread = [[NSThread currentThread] retain];
+#else
+ ASSERT_NOT_REACHED();
#endif
}
+void initializeMainThreadToProcessMainThreadPlatform()
+{
+ if (!pthread_main_np())
+ NSLog(@"WebKit Threading Violation - initial use of WebKit from a secondary thread.");
+
+ ASSERT(!staticMainThreadCaller);
+ staticMainThreadCaller = [[WTFMainThreadCaller alloc] init];
+
+ mainThreadEstablishedAsPthreadMain = true;
+ mainThreadPthread = 0;
+ mainThreadNSThread = nil;
+}
+
+static void timerFired(CFRunLoopTimerRef timer, void*)
+{
+ CFRelease(timer);
+ isTimerPosted = false;
+ WTF::dispatchFunctionsFromMainThread();
+}
+
+static void postTimer()
+{
+ ASSERT(isMainThread());
+
+ if (isTimerPosted)
+ return;
+
+ isTimerPosted = true;
+ CFRunLoopAddTimer(CFRunLoopGetCurrent(), CFRunLoopTimerCreate(0, 0, 0, 0, 0, timerFired, 0), kCFRunLoopCommonModes);
+}
+
void scheduleDispatchFunctionsOnMainThread()
{
ASSERT(staticMainThreadCaller);
-#if USE(WEB_THREAD)
- [staticMainThreadCaller performSelector:@selector(call) onThread:webThread withObject:nil waitUntilDone:NO];
+
+ if (isMainThread()) {
+ postTimer();
+ return;
+ }
+
+ if (mainThreadEstablishedAsPthreadMain) {
+ ASSERT(!mainThreadNSThread);
+ [staticMainThreadCaller performSelectorOnMainThread:@selector(call) withObject:nil waitUntilDone:NO];
+ return;
+ }
+
+#if !defined(BUILDING_ON_TIGER)
+ ASSERT(mainThreadNSThread);
+ [staticMainThreadCaller performSelector:@selector(call) onThread:mainThreadNSThread withObject:nil waitUntilDone:NO];
#else
- [staticMainThreadCaller performSelectorOnMainThread:@selector(call) withObject:nil waitUntilDone:NO];
+ ASSERT_NOT_REACHED();
#endif
}
+bool isMainThread()
+{
+ if (mainThreadEstablishedAsPthreadMain) {
+ ASSERT(!mainThreadPthread);
+ return pthread_main_np();
+ }
+
+#if !defined(BUILDING_ON_TIGER)
+ ASSERT(mainThreadPthread);
+ return pthread_equal(pthread_self(), mainThreadPthread);
+#else
+ ASSERT_NOT_REACHED();
+ return false;
+#endif
+}
+
+// This function is the same as isMainThread() above except that it does not do
+// a ASSERT(mainThreadPthread). This should only be used by code that can get
+// invoked when the WebThread hasn't been started. See <rdar://8502487>.
+bool isWebThread()
+{
+ ASSERT(!mainThreadEstablishedAsPthreadMain);
+ return pthread_equal(pthread_self(), mainThreadPthread);
+}
+
} // namespace WTF
#include <QtCore/QObject>
#include <QtCore/QCoreApplication>
-
+#include <QThread>
namespace WTF {
QMetaObject::invokeMethod(webkit_main_thread_invoker(), "dispatch", Qt::QueuedConnection);
}
+bool isMainThread()
+{
+ return QThread::currentThread() == QCoreApplication::instance()->thread();
+}
+
} // namespace WTF
#include "MainThreadQt.moc"
--- /dev/null
+/*
+ * Copyright (C) 2006 Nikolas Zimmermann <zimmermann@kde.org>
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#include <wtf/text/WTFString.h>
+
+#include <QString>
+
+namespace WebCore {
+
+// String conversions
+String::String(const QString& qstr)
+{
+ if (qstr.isNull())
+ return;
+ m_impl = StringImpl::create(reinterpret_cast<const UChar*>(qstr.constData()), qstr.length());
+}
+
+String::String(const QStringRef& ref)
+{
+ if (!ref.string())
+ return;
+ m_impl = StringImpl::create(reinterpret_cast<const UChar*>(ref.unicode()), ref.length());
+}
+
+String::operator QString() const
+{
+ return QString(reinterpret_cast<const QChar*>(characters()), length());
+}
+
+QDataStream& operator<<(QDataStream& stream, const String& str)
+{
+ // could be faster
+ stream << QString(str);
+ return stream;
+}
+
+QDataStream& operator>>(QDataStream& stream, String& str)
+{
+ // mabe not the fastest way, but really easy
+ QString tmp;
+ stream >> tmp;
+ str = tmp;
+ return stream;
+}
+
+}
+
+// vim: ts=4 sw=4 et
static Mutex* atomicallyInitializedStaticMutex;
-static ThreadIdentifier mainThreadIdentifier;
-
static Mutex& threadMapMutex()
{
static Mutex mutex;
atomicallyInitializedStaticMutex = new Mutex;
threadMapMutex();
initializeRandomNumberGenerator();
- QThread* mainThread = QCoreApplication::instance()->thread();
- mainThreadIdentifier = identifierByQthreadHandle(mainThread);
- if (!mainThreadIdentifier)
- mainThreadIdentifier = establishIdentifierForThread(mainThread);
- initializeMainThread();
}
}
return establishIdentifierForThread(currentThread);
}
-bool isMainThread()
-{
- return QThread::currentThread() == QCoreApplication::instance()->thread();
-}
-
Mutex::Mutex()
: m_mutex(new QMutex())
{
--- /dev/null
+/*
+ * Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#if OS(SYMBIAN)
+
+#include "BlockAllocatorSymbian.h"
+
+
+namespace WTF {
+
+/** Efficiently allocates blocks of size blockSize with blockSize alignment.
+ * Primarly designed for JSC Collector's needs.
+ * Not thread-safe.
+ */
+AlignedBlockAllocator::AlignedBlockAllocator(TUint32 reservationSize, TUint32 blockSize )
+ : m_reservation(reservationSize),
+ m_blockSize(blockSize)
+{
+
+ // Get system's page size value.
+ SYMBIAN_PAGESIZE(m_pageSize);
+
+ // We only accept multiples of system page size for both initial reservation and the alignment/block size
+ m_reservation = SYMBIAN_ROUNDUPTOMULTIPLE(m_reservation, m_pageSize);
+ __ASSERT_ALWAYS(SYMBIAN_ROUNDUPTOMULTIPLE(m_blockSize, m_pageSize), User::Panic(_L("AlignedBlockAllocator1"), KErrArgument));
+
+ // Calculate max. bit flags we need to carve a reservationSize range into blockSize-sized blocks
+ m_map.numBits = m_reservation / m_blockSize;
+ const TUint32 bitsPerWord = 8*sizeof(TUint32);
+ const TUint32 numWords = (m_map.numBits + bitsPerWord -1) / bitsPerWord;
+
+ m_map.bits = new TUint32[numWords];
+ __ASSERT_ALWAYS(m_map.bits, User::Panic(_L("AlignedBlockAllocator2"), KErrNoMemory));
+ m_map.clearAll();
+
+ // Open a Symbian RChunk, and reserve requested virtual address range
+ // Any thread in this process can operate this rchunk due to EOwnerProcess access rights.
+ TInt ret = m_chunk.CreateDisconnectedLocal(0 , 0, (TInt)m_reservation , EOwnerProcess);
+ if (ret != KErrNone)
+ User::Panic(_L("AlignedBlockAllocator3"), ret);
+
+ // This is the offset to m_chunk.Base() required to make it m_blockSize-aligned
+ m_offset = SYMBIAN_ROUNDUPTOMULTIPLE(TUint32(m_chunk.Base()), m_blockSize) - TUint(m_chunk.Base());
+
+}
+
+void* AlignedBlockAllocator::alloc()
+{
+
+ TInt freeRam = 0;
+ void* address = 0;
+
+ // Look up first free slot in bit map
+ const TInt freeIdx = m_map.findFree();
+
+ // Pseudo OOM: We ate up the address space we reserved..
+ // ..even though the device may have free RAM left
+ if (freeIdx < 0)
+ return 0;
+
+ TInt ret = m_chunk.Commit(m_offset + (m_blockSize * freeIdx), m_blockSize);
+ if (ret != KErrNone)
+ return 0; // True OOM: Device didn't have physical RAM to spare
+
+ // Updated bit to mark region as in use.
+ m_map.set(freeIdx);
+
+ // Calculate address of committed region (block)
+ address = (void*)( (m_chunk.Base() + m_offset) + (TUint)(m_blockSize * freeIdx) );
+
+ return address;
+}
+
+void AlignedBlockAllocator::free(void* block)
+{
+ // Calculate index of block to be freed
+ TInt idx = TUint(static_cast<TUint8*>(block) - m_chunk.Base() - m_offset) / m_blockSize;
+
+ __ASSERT_DEBUG(idx >= 0 && idx < m_map.numBits, User::Panic(_L("AlignedBlockAllocator4"), KErrCorrupt)); // valid index check
+ __ASSERT_DEBUG(m_map.get(idx), User::Panic(_L("AlignedBlockAllocator5"), KErrCorrupt)); // in-use flag check
+
+ // Return committed region to system RAM pool (the physical RAM becomes usable by others)
+ TInt ret = m_chunk.Decommit(m_offset + m_blockSize * idx, m_blockSize);
+
+ // mark this available again
+ m_map.clear(idx);
+}
+
+void AlignedBlockAllocator::destroy()
+{
+ // release everything!
+ m_chunk.Decommit(0, m_chunk.MaxSize());
+ m_map.clearAll();
+}
+
+AlignedBlockAllocator::~AlignedBlockAllocator()
+{
+ destroy();
+ m_chunk.Close();
+ delete [] m_map.bits;
+}
+
+} // end of namespace
+
+#endif // SYMBIAN
--- /dev/null
+/*
+ * Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies)
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of
+ * its contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef BlockAllocatorSymbian_h
+#define BlockAllocatorSymbian_h
+
+#include <e32cmn.h>
+#include <e32std.h>
+#include <hal.h>
+
+
+#define SYMBIAN_PAGESIZE(x) (HAL::Get(HALData::EMemoryPageSize, x));
+#define SYMBIAN_FREERAM(x) (HAL::Get(HALData::EMemoryRAMFree, x));
+#define SYMBIAN_ROUNDUPTOMULTIPLE(x, multipleof) ( (x + multipleof - 1) & ~(multipleof - 1) )
+
+// Set sane defaults if -D<flagname=value> wasn't provided via compiler args
+#ifndef JSCCOLLECTOR_VIRTUALMEM_RESERVATION
+#if defined(__WINS__)
+ // Emulator has limited virtual address space
+ #define JSCCOLLECTOR_VIRTUALMEM_RESERVATION (4*1024*1024)
+#else
+ // HW has plenty of virtual addresses
+ #define JSCCOLLECTOR_VIRTUALMEM_RESERVATION (128*1024*1024)
+#endif
+#endif
+
+namespace WTF {
+
+/**
+ * Allocates contiguous region of size blockSize with blockSize-aligned address.
+ * blockSize must be a multiple of system page size (typically 4K on Symbian/ARM)
+ *
+ * @param reservationSize Virtual address range to be reserved upon creation of chunk (bytes).
+ * @param blockSize Size of a single allocation. Returned address will also be blockSize-aligned.
+ */
+class AlignedBlockAllocator {
+ public:
+ AlignedBlockAllocator(TUint32 reservationSize, TUint32 blockSize);
+ ~AlignedBlockAllocator();
+ void destroy();
+ void* alloc();
+ void free(void* data);
+
+ private:
+ RChunk m_chunk; // Symbian chunk that lets us reserve/commit/decommit
+ TUint m_offset; // offset of first committed region from base
+ TInt m_pageSize; // cached value of system page size, typically 4K on Symbian
+ TUint32 m_reservation;
+ TUint32 m_blockSize;
+
+ // Tracks comitted/decommitted state of a blockSize region
+ struct {
+
+ TUint32 *bits; // array of bit flags
+ TUint32 numBits; // number of regions to keep track of
+
+ bool get(TUint32 n) const
+ {
+ return !!(bits[n >> 5] & (1 << (n & 0x1F)));
+ }
+
+ void set(TUint32 n)
+ {
+ bits[n >> 5] |= (1 << (n & 0x1F));
+ }
+
+ void clear(TUint32 n)
+ {
+ bits[n >> 5] &= ~(1 << (n & 0x1F));
+ }
+
+ void clearAll()
+ {
+ for (TUint32 i = 0; i < numBits; i++)
+ clear(i);
+ }
+
+ TInt findFree() const
+ {
+ for (TUint32 i = 0; i < numBits; i++) {
+ if (!get(i))
+ return i;
+ }
+ return -1;
+ }
+
+ } m_map;
+
+};
+
+}
+
+#endif // end of BlockAllocatorSymbian_h
+
+
--- /dev/null
+/*
+ * Copyright (C) 2004, 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#include "config.h"
+
+#include <libkern/OSAtomic.h>
+
+#include "AtomicString.h"
+
+#include "StringHash.h"
+#include <wtf/HashSet.h>
+#include <wtf/Threading.h>
+#include <wtf/WTFThreadData.h>
+
+namespace WebCore {
+
+COMPILE_ASSERT(sizeof(AtomicString) == sizeof(String), atomic_string_and_string_must_be_same_size);
+
+class AtomicStringTableLocker {
+ static OSSpinLock s_stringTableLock;
+public:
+ AtomicStringTableLocker() { OSSpinLockLock(&s_stringTableLock); }
+ ~AtomicStringTableLocker() { OSSpinLockUnlock(&s_stringTableLock); }
+};
+OSSpinLock AtomicStringTableLocker::s_stringTableLock = OS_SPINLOCK_INIT;
+
+class AtomicStringTable {
+public:
+ static AtomicStringTable* create()
+ {
+ static AtomicStringTable* sharedStringTable = new AtomicStringTable;
+
+ WTFThreadData& data = wtfThreadData();
+ if (pthread_main_np() || WTF::isWebThread())
+ data.m_atomicStringTable = sharedStringTable;
+ else
+ data.m_atomicStringTable = new AtomicStringTable;
+
+ // The AtomicStringTable is shared between the main UI thread and the
+ // WebThread. We do the following so that its destruction happens only
+ // once - on the main UI thread.
+ if (!WTF::isWebThread())
+ data.m_atomicStringTableDestructor = AtomicStringTable::destroy;
+ return data.m_atomicStringTable;
+ }
+
+ HashSet<StringImpl*>& table()
+ {
+ return m_table;
+ }
+
+private:
+ static void destroy(AtomicStringTable* table)
+ {
+ HashSet<StringImpl*>::iterator end = table->m_table.end();
+ for (HashSet<StringImpl*>::iterator iter = table->m_table.begin(); iter != end; ++iter)
+ (*iter)->setIsAtomic(false);
+ delete table;
+ }
+
+ HashSet<StringImpl*> m_table;
+};
+
+static inline HashSet<StringImpl*>& stringTable()
+{
+ // Once possible we should make this non-lazy (constructed in WTFThreadData's constructor).
+ AtomicStringTable* table = wtfThreadData().atomicStringTable();
+ if (UNLIKELY(!table))
+ table = AtomicStringTable::create();
+ return table->table();
+}
+
+struct CStringTranslator {
+ static unsigned hash(const char* c)
+ {
+ return StringImpl::computeHash(c);
+ }
+
+ static bool equal(StringImpl* r, const char* s)
+ {
+ int length = r->length();
+ const UChar* d = r->characters();
+ for (int i = 0; i != length; ++i) {
+ unsigned char c = s[i];
+ if (d[i] != c)
+ return false;
+ }
+ return s[length] == 0;
+ }
+
+ static void translate(StringImpl*& location, const char* const& c, unsigned hash)
+ {
+ location = StringImpl::create(c).releaseRef();
+ location->setHash(hash);
+ location->setIsAtomic(true);
+ }
+};
+
+bool operator==(const AtomicString& a, const char* b)
+{
+ StringImpl* impl = a.impl();
+ if ((!impl || !impl->characters()) && !b)
+ return true;
+ if ((!impl || !impl->characters()) || !b)
+ return false;
+ return CStringTranslator::equal(impl, b);
+}
+
+PassRefPtr<StringImpl> AtomicString::add(const char* c)
+{
+ if (!c)
+ return 0;
+ if (!*c)
+ return StringImpl::empty();
+ AtomicStringTableLocker locker;
+ pair<HashSet<StringImpl*>::iterator, bool> addResult = stringTable().add<const char*, CStringTranslator>(c);
+ if (!addResult.second)
+ return *addResult.first;
+ return adoptRef(*addResult.first);
+}
+
+struct UCharBuffer {
+ const UChar* s;
+ unsigned length;
+};
+
+static inline bool equal(StringImpl* string, const UChar* characters, unsigned length)
+{
+ if (string->length() != length)
+ return false;
+
+ // FIXME: perhaps we should have a more abstract macro that indicates when
+ // going 4 bytes at a time is unsafe
+#if CPU(ARM) || CPU(SH4)
+ const UChar* stringCharacters = string->characters();
+ for (unsigned i = 0; i != length; ++i) {
+ if (*stringCharacters++ != *characters++)
+ return false;
+ }
+ return true;
+#else
+ /* Do it 4-bytes-at-a-time on architectures where it's safe */
+
+ const uint32_t* stringCharacters = reinterpret_cast<const uint32_t*>(string->characters());
+ const uint32_t* bufferCharacters = reinterpret_cast<const uint32_t*>(characters);
+
+ unsigned halfLength = length >> 1;
+ for (unsigned i = 0; i != halfLength; ++i) {
+ if (*stringCharacters++ != *bufferCharacters++)
+ return false;
+ }
+
+ if (length & 1 && *reinterpret_cast<const uint16_t*>(stringCharacters) != *reinterpret_cast<const uint16_t*>(bufferCharacters))
+ return false;
+
+ return true;
+#endif
+}
+
+struct UCharBufferTranslator {
+ static unsigned hash(const UCharBuffer& buf)
+ {
+ return StringImpl::computeHash(buf.s, buf.length);
+ }
+
+ static bool equal(StringImpl* const& str, const UCharBuffer& buf)
+ {
+ return WebCore::equal(str, buf.s, buf.length);
+ }
+
+ static void translate(StringImpl*& location, const UCharBuffer& buf, unsigned hash)
+ {
+ location = StringImpl::create(buf.s, buf.length).releaseRef();
+ location->setHash(hash);
+ location->setIsAtomic(true);
+ }
+};
+
+struct HashAndCharacters {
+ unsigned hash;
+ const UChar* characters;
+ unsigned length;
+};
+
+struct HashAndCharactersTranslator {
+ static unsigned hash(const HashAndCharacters& buffer)
+ {
+ ASSERT(buffer.hash == StringImpl::computeHash(buffer.characters, buffer.length));
+ return buffer.hash;
+ }
+
+ static bool equal(StringImpl* const& string, const HashAndCharacters& buffer)
+ {
+ return WebCore::equal(string, buffer.characters, buffer.length);
+ }
+
+ static void translate(StringImpl*& location, const HashAndCharacters& buffer, unsigned hash)
+ {
+ location = StringImpl::create(buffer.characters, buffer.length).releaseRef();
+ location->setHash(hash);
+ location->setIsAtomic(true);
+ }
+};
+
+PassRefPtr<StringImpl> AtomicString::add(const UChar* s, unsigned length)
+{
+ if (!s)
+ return 0;
+
+ if (length == 0)
+ return StringImpl::empty();
+
+ UCharBuffer buf = { s, length };
+ AtomicStringTableLocker locker;
+ pair<HashSet<StringImpl*>::iterator, bool> addResult = stringTable().add<UCharBuffer, UCharBufferTranslator>(buf);
+
+ // If the string is newly-translated, then we need to adopt it.
+ // The boolean in the pair tells us if that is so.
+ return addResult.second ? adoptRef(*addResult.first) : *addResult.first;
+}
+
+PassRefPtr<StringImpl> AtomicString::add(const UChar* s, unsigned length, unsigned existingHash)
+{
+ ASSERT(s);
+ ASSERT(existingHash);
+
+ if (length == 0)
+ return StringImpl::empty();
+
+ HashAndCharacters buffer = { existingHash, s, length };
+ AtomicStringTableLocker locker;
+ pair<HashSet<StringImpl*>::iterator, bool> addResult = stringTable().add<HashAndCharacters, HashAndCharactersTranslator>(buffer);
+ if (!addResult.second)
+ return *addResult.first;
+ return adoptRef(*addResult.first);
+}
+
+PassRefPtr<StringImpl> AtomicString::add(const UChar* s)
+{
+ if (!s)
+ return 0;
+
+ int length = 0;
+ while (s[length] != UChar(0))
+ length++;
+
+ if (length == 0)
+ return StringImpl::empty();
+
+ UCharBuffer buf = {s, length};
+ AtomicStringTableLocker locker;
+ pair<HashSet<StringImpl*>::iterator, bool> addResult = stringTable().add<UCharBuffer, UCharBufferTranslator>(buf);
+
+ // If the string is newly-translated, then we need to adopt it.
+ // The boolean in the pair tells us if that is so.
+ return addResult.second ? adoptRef(*addResult.first) : *addResult.first;
+}
+
+PassRefPtr<StringImpl> AtomicString::addSlowCase(StringImpl* r)
+{
+ if (!r || r->isAtomic())
+ return r;
+
+ if (r->length() == 0)
+ return StringImpl::empty();
+
+ AtomicStringTableLocker locker;
+ StringImpl* result = *stringTable().add(r).first;
+ if (result == r)
+ r->setIsAtomic(true);
+ return result;
+}
+
+AtomicStringImpl* AtomicString::find(const UChar* s, unsigned length, unsigned existingHash)
+{
+ ASSERT(s);
+ ASSERT(existingHash);
+
+ if (length == 0)
+ return static_cast<AtomicStringImpl*>(StringImpl::empty());
+
+ HashAndCharacters buffer = { existingHash, s, length };
+ AtomicStringTableLocker locker;
+ HashSet<StringImpl*>::iterator iterator = stringTable().find<HashAndCharacters, HashAndCharactersTranslator>(buffer);
+ if (iterator == stringTable().end())
+ return 0;
+ return static_cast<AtomicStringImpl*>(*iterator);
+}
+
+void AtomicString::remove(StringImpl* r)
+{
+ AtomicStringTableLocker locker;
+ stringTable().remove(r);
+}
+
+AtomicString AtomicString::lower() const
+{
+ // Note: This is a hot function in the Dromaeo benchmark.
+ StringImpl* impl = this->impl();
+ RefPtr<StringImpl> newImpl = impl->lower();
+ if (LIKELY(newImpl == impl))
+ return *this;
+ return AtomicString(newImpl);
+}
+
+}
--- /dev/null
+/*
+ * Copyright (C) 2004, 2005, 2006, 2008 Apple Inc. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef AtomicString_h
+#define AtomicString_h
+
+#include "AtomicStringImpl.h"
+#include "WTFString.h"
+
+// Define 'NO_IMPLICIT_ATOMICSTRING' before including this header,
+// to disallow (expensive) implicit String-->AtomicString conversions.
+#ifdef NO_IMPLICIT_ATOMICSTRING
+#define ATOMICSTRING_CONVERSION explicit
+#else
+#define ATOMICSTRING_CONVERSION
+#endif
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace WebCore {
+
+struct AtomicStringHash;
+
+class AtomicString {
+public:
+ static void init();
+
+ AtomicString() { }
+ AtomicString(const char* s) : m_string(add(s)) { }
+ AtomicString(const UChar* s, unsigned length) : m_string(add(s, length)) { }
+ AtomicString(const UChar* s, unsigned length, unsigned existingHash) : m_string(add(s, length, existingHash)) { }
+ AtomicString(const UChar* s) : m_string(add(s)) { }
+ ATOMICSTRING_CONVERSION AtomicString(StringImpl* imp) : m_string(add(imp)) { }
+ AtomicString(AtomicStringImpl* imp) : m_string(imp) { }
+ ATOMICSTRING_CONVERSION AtomicString(const String& s) : m_string(add(s.impl())) { }
+
+ // Hash table deleted values, which are only constructed and never copied or destroyed.
+ AtomicString(WTF::HashTableDeletedValueType) : m_string(WTF::HashTableDeletedValue) { }
+ bool isHashTableDeletedValue() const { return m_string.isHashTableDeletedValue(); }
+
+ static AtomicStringImpl* find(const UChar* s, unsigned length, unsigned existingHash);
+
+ operator const String&() const { return m_string; }
+ const String& string() const { return m_string; };
+
+ AtomicStringImpl* impl() const { return static_cast<AtomicStringImpl *>(m_string.impl()); }
+
+ const UChar* characters() const { return m_string.characters(); }
+ unsigned length() const { return m_string.length(); }
+
+ UChar operator[](unsigned int i) const { return m_string[i]; }
+
+ bool contains(UChar c) const { return m_string.contains(c); }
+ bool contains(const char* s, bool caseSensitive = true) const
+ { return m_string.contains(s, caseSensitive); }
+ bool contains(const String& s, bool caseSensitive = true) const
+ { return m_string.contains(s, caseSensitive); }
+
+ int find(UChar c, int start = 0) const { return m_string.find(c, start); }
+ int find(const char* s, int start = 0, bool caseSentitive = true) const
+ { return m_string.find(s, start, caseSentitive); }
+ int find(const String& s, int start = 0, bool caseSentitive = true) const
+ { return m_string.find(s, start, caseSentitive); }
+
+ bool startsWith(const String& s, bool caseSensitive = true) const
+ { return m_string.startsWith(s, caseSensitive); }
+ bool endsWith(const String& s, bool caseSensitive = true) const
+ { return m_string.endsWith(s, caseSensitive); }
+
+ AtomicString lower() const;
+ AtomicString upper() const { return AtomicString(impl()->upper()); }
+
+ int toInt(bool* ok = 0) const { return m_string.toInt(ok); }
+ double toDouble(bool* ok = 0) const { return m_string.toDouble(ok); }
+ float toFloat(bool* ok = 0) const { return m_string.toFloat(ok); }
+ bool percentage(int& p) const { return m_string.percentage(p); }
+
+ bool isNull() const { return m_string.isNull(); }
+ bool isEmpty() const { return m_string.isEmpty(); }
+
+ static void remove(StringImpl*);
+
+#if PLATFORM(CF)
+ AtomicString(CFStringRef s) : m_string(add(String(s).impl())) { }
+ CFStringRef createCFString() const { return m_string.createCFString(); }
+#endif
+#ifdef __OBJC__
+ AtomicString(NSString* s) : m_string(add(String(s).impl())) { }
+ operator NSString*() const { return m_string; }
+#endif
+#if PLATFORM(QT)
+ AtomicString(const QString& s) : m_string(add(String(s).impl())) { }
+ operator QString() const { return m_string; }
+#endif
+
+private:
+ String m_string;
+
+ static PassRefPtr<StringImpl> add(const char*);
+ static PassRefPtr<StringImpl> add(const UChar*, unsigned length);
+ static PassRefPtr<StringImpl> add(const UChar*, unsigned length, unsigned existingHash);
+ static PassRefPtr<StringImpl> add(const UChar*);
+ ALWAYS_INLINE PassRefPtr<StringImpl> add(StringImpl* r)
+ {
+ if (!r || r->isAtomic())
+ return r;
+ return addSlowCase(r);
+ }
+ static PassRefPtr<StringImpl> addSlowCase(StringImpl*);
+};
+
+inline bool operator==(const AtomicString& a, const AtomicString& b) { return a.impl() == b.impl(); }
+bool operator==(const AtomicString& a, const char* b);
+inline bool operator==(const AtomicString& a, const String& b) { return equal(a.impl(), b.impl()); }
+inline bool operator==(const char* a, const AtomicString& b) { return b == a; }
+inline bool operator==(const String& a, const AtomicString& b) { return equal(a.impl(), b.impl()); }
+
+inline bool operator!=(const AtomicString& a, const AtomicString& b) { return a.impl() != b.impl(); }
+inline bool operator!=(const AtomicString& a, const char *b) { return !(a == b); }
+inline bool operator!=(const AtomicString& a, const String& b) { return !equal(a.impl(), b.impl()); }
+inline bool operator!=(const char* a, const AtomicString& b) { return !(b == a); }
+inline bool operator!=(const String& a, const AtomicString& b) { return !equal(a.impl(), b.impl()); }
+
+inline bool equalIgnoringCase(const AtomicString& a, const AtomicString& b) { return equalIgnoringCase(a.impl(), b.impl()); }
+inline bool equalIgnoringCase(const AtomicString& a, const char* b) { return equalIgnoringCase(a.impl(), b); }
+inline bool equalIgnoringCase(const AtomicString& a, const String& b) { return equalIgnoringCase(a.impl(), b.impl()); }
+inline bool equalIgnoringCase(const char* a, const AtomicString& b) { return equalIgnoringCase(a, b.impl()); }
+inline bool equalIgnoringCase(const String& a, const AtomicString& b) { return equalIgnoringCase(a.impl(), b.impl()); }
+
+// Define external global variables for the commonly used atomic strings.
+// These are only usable from the main thread.
+#ifndef ATOMICSTRING_HIDE_GLOBALS
+ extern const JS_EXPORTDATA AtomicString nullAtom;
+ extern const JS_EXPORTDATA AtomicString emptyAtom;
+ extern const JS_EXPORTDATA AtomicString textAtom;
+ extern const JS_EXPORTDATA AtomicString commentAtom;
+ extern const JS_EXPORTDATA AtomicString starAtom;
+ extern const JS_EXPORTDATA AtomicString xmlAtom;
+ extern const JS_EXPORTDATA AtomicString xmlnsAtom;
+#endif
+
+} // namespace WebCore
+
+
+namespace WTF {
+
+ // AtomicStringHash is the default hash for AtomicString
+ template<typename T> struct DefaultHash;
+ template<> struct DefaultHash<WebCore::AtomicString> {
+ typedef WebCore::AtomicStringHash Hash;
+ };
+
+} // namespace WTF
+
+#endif // AtomicString_h
--- /dev/null
+/*
+ * Copyright (C) 2006 Apple Computer, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef AtomicStringImpl_h
+#define AtomicStringImpl_h
+
+#include "StringImpl.h"
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace WebCore {
+
+class AtomicStringImpl : public StringImpl
+{
+public:
+ AtomicStringImpl() : StringImpl(0) {}
+};
+
+}
+
+#endif
--- /dev/null
+/*
+ * Copyright (C) 2003, 2006, 2008, 2009 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+
+#include "config.h"
+#include "CString.h"
+
+using std::min;
+
+namespace WTF {
+
+CString::CString(const char* str)
+{
+ init(str, strlen(str));
+}
+
+CString::CString(const char* str, unsigned length)
+{
+ init(str, length);
+}
+
+void CString::init(const char* str, unsigned length)
+{
+ if (!str)
+ return;
+
+ m_buffer = CStringBuffer::create(length + 1);
+ memcpy(m_buffer->mutableData(), str, length);
+ m_buffer->mutableData()[length] = '\0';
+}
+
+char* CString::mutableData()
+{
+ copyBufferIfNeeded();
+ if (!m_buffer)
+ return 0;
+ return m_buffer->mutableData();
+}
+
+CString CString::newUninitialized(size_t length, char*& characterBuffer)
+{
+ CString result;
+ result.m_buffer = CStringBuffer::create(length + 1);
+ char* bytes = result.m_buffer->mutableData();
+ bytes[length] = '\0';
+ characterBuffer = bytes;
+ return result;
+}
+
+void CString::copyBufferIfNeeded()
+{
+ if (!m_buffer || m_buffer->hasOneRef())
+ return;
+
+ int len = m_buffer->length();
+ RefPtr<CStringBuffer> m_temp = m_buffer;
+ m_buffer = CStringBuffer::create(len);
+ memcpy(m_buffer->mutableData(), m_temp->data(), len);
+}
+
+bool operator==(const CString& a, const CString& b)
+{
+ if (a.isNull() != b.isNull())
+ return false;
+ if (a.length() != b.length())
+ return false;
+ return !strncmp(a.data(), b.data(), min(a.length(), b.length()));
+}
+
+} // namespace WTF
--- /dev/null
+/*
+ * Copyright (C) 2003, 2006, 2008, 2009, 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef CString_h
+#define CString_h
+
+#include "PassRefPtr.h"
+#include "RefCounted.h"
+#include "Vector.h"
+
+namespace WTF {
+
+class CStringBuffer : public RefCounted<CStringBuffer> {
+public:
+ const char* data() { return m_vector.data(); }
+ size_t length() { return m_vector.size(); }
+
+private:
+ friend class CString;
+
+ static PassRefPtr<CStringBuffer> create(unsigned length) { return adoptRef(new CStringBuffer(length)); }
+ CStringBuffer(unsigned length) : m_vector(length) { }
+ char* mutableData() { return m_vector.data(); }
+
+ Vector<char> m_vector;
+};
+
+// A container for a null-terminated char array supporting copy-on-write
+// assignment. The contained char array may be null.
+class CString {
+public:
+ CString() { }
+ CString(const char*);
+ CString(const char*, unsigned length);
+ CString(CStringBuffer* buffer) : m_buffer(buffer) { }
+ static CString newUninitialized(size_t length, char*& characterBuffer);
+
+ const char* data() const
+ {
+ return m_buffer ? m_buffer->data() : 0;
+ }
+ char* mutableData();
+ unsigned length() const
+ {
+ return m_buffer ? m_buffer->length() - 1 : 0;
+ }
+
+ bool isNull() const { return !m_buffer; }
+
+ CStringBuffer* buffer() const { return m_buffer.get(); }
+
+private:
+ void copyBufferIfNeeded();
+ void init(const char*, unsigned length);
+ RefPtr<CStringBuffer> m_buffer;
+};
+
+bool operator==(const CString& a, const CString& b);
+inline bool operator!=(const CString& a, const CString& b) { return !(a == b); }
+
+} // namespace WTF
+
+using WTF::CString;
+
+#endif // CString_h
--- /dev/null
+/*
+ * Copyright (C) 2008, 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ * 3. Neither the name of Apple Inc. ("Apple") nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+ * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+ * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+ * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
+ * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef StringBuffer_h
+#define StringBuffer_h
+
+#include <wtf/Assertions.h>
+#include <wtf/Noncopyable.h>
+#include <wtf/unicode/Unicode.h>
+#include <limits>
+
+namespace WebCore {
+
+class StringBuffer : public Noncopyable {
+public:
+ explicit StringBuffer(unsigned length)
+ : m_length(length)
+ {
+ if (m_length > std::numeric_limits<unsigned>::max() / sizeof(UChar))
+ CRASH();
+ m_data = static_cast<UChar*>(fastMalloc(m_length * sizeof(UChar)));
+ }
+
+ ~StringBuffer()
+ {
+ fastFree(m_data);
+ }
+
+ void shrink(unsigned newLength)
+ {
+ ASSERT(newLength <= m_length);
+ m_length = newLength;
+ }
+
+ void resize(unsigned newLength)
+ {
+ if (newLength > m_length) {
+ if (newLength > std::numeric_limits<unsigned>::max() / sizeof(UChar))
+ CRASH();
+ m_data = static_cast<UChar*>(fastRealloc(m_data, newLength * sizeof(UChar)));
+ }
+ m_length = newLength;
+ }
+
+ unsigned length() const { return m_length; }
+ UChar* characters() { return m_data; }
+
+ UChar& operator[](unsigned i) { ASSERT(i < m_length); return m_data[i]; }
+
+ UChar* release() { UChar* data = m_data; m_data = 0; return data; }
+
+private:
+ unsigned m_length;
+ UChar* m_data;
+};
+
+} // namespace WTF
+
+#endif // StringBuffer_h
--- /dev/null
+/*
+ * Copyright (C) 2006, 2007, 2008 Apple Inc. All rights reserved
+ * Copyright (C) Research In Motion Limited 2009. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef StringHash_h
+#define StringHash_h
+
+#include "AtomicString.h"
+#include "WTFString.h"
+#include <wtf/HashTraits.h>
+#include <wtf/StringHashFunctions.h>
+#include <wtf/unicode/Unicode.h>
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace WebCore {
+
+ // The hash() functions on StringHash and CaseFoldingHash do not support
+ // null strings. get(), contains(), and add() on HashMap<String,..., StringHash>
+ // cause a null-pointer dereference when passed null strings.
+
+ // FIXME: We should really figure out a way to put the computeHash function that's
+ // currently a member function of StringImpl into this file so we can be a little
+ // closer to having all the nearly-identical hash functions in one place.
+
+ struct StringHash {
+ static unsigned hash(StringImpl* key) { return key->hash(); }
+ static bool equal(const StringImpl* a, const StringImpl* b)
+ {
+ if (a == b)
+ return true;
+ if (!a || !b)
+ return false;
+
+ unsigned aLength = a->length();
+ unsigned bLength = b->length();
+ if (aLength != bLength)
+ return false;
+
+ // FIXME: perhaps we should have a more abstract macro that indicates when
+ // going 4 bytes at a time is unsafe
+#if CPU(ARM) || CPU(SH4)
+ const UChar* aChars = a->characters();
+ const UChar* bChars = b->characters();
+ for (unsigned i = 0; i != aLength; ++i) {
+ if (*aChars++ != *bChars++)
+ return false;
+ }
+ return true;
+#else
+ /* Do it 4-bytes-at-a-time on architectures where it's safe */
+ const uint32_t* aChars = reinterpret_cast<const uint32_t*>(a->characters());
+ const uint32_t* bChars = reinterpret_cast<const uint32_t*>(b->characters());
+
+ unsigned halfLength = aLength >> 1;
+ for (unsigned i = 0; i != halfLength; ++i)
+ if (*aChars++ != *bChars++)
+ return false;
+
+ if (aLength & 1 && *reinterpret_cast<const uint16_t*>(aChars) != *reinterpret_cast<const uint16_t*>(bChars))
+ return false;
+
+ return true;
+#endif
+ }
+
+ static unsigned hash(const RefPtr<StringImpl>& key) { return key->hash(); }
+ static bool equal(const RefPtr<StringImpl>& a, const RefPtr<StringImpl>& b)
+ {
+ return equal(a.get(), b.get());
+ }
+
+ static unsigned hash(const String& key) { return key.impl()->hash(); }
+ static bool equal(const String& a, const String& b)
+ {
+ return equal(a.impl(), b.impl());
+ }
+
+ static const bool safeToCompareToEmptyOrDeleted = false;
+ };
+
+ class CaseFoldingHash {
+ public:
+ // Paul Hsieh's SuperFastHash
+ // http://www.azillionmonkeys.com/qed/hash.html
+ static unsigned hash(const UChar* data, unsigned length)
+ {
+ unsigned l = length;
+ const UChar* s = data;
+ uint32_t hash = WTF::stringHashingStartValue;
+ uint32_t tmp;
+
+ int rem = l & 1;
+ l >>= 1;
+
+ // Main loop.
+ for (; l > 0; l--) {
+ hash += WTF::Unicode::foldCase(s[0]);
+ tmp = (WTF::Unicode::foldCase(s[1]) << 11) ^ hash;
+ hash = (hash << 16) ^ tmp;
+ s += 2;
+ hash += hash >> 11;
+ }
+
+ // Handle end case.
+ if (rem) {
+ hash += WTF::Unicode::foldCase(s[0]);
+ hash ^= hash << 11;
+ hash += hash >> 17;
+ }
+
+ // Force "avalanching" of final 127 bits.
+ hash ^= hash << 3;
+ hash += hash >> 5;
+ hash ^= hash << 2;
+ hash += hash >> 15;
+ hash ^= hash << 10;
+
+ // This avoids ever returning a hash code of 0, since that is used to
+ // signal "hash not computed yet", using a value that is likely to be
+ // effectively the same as 0 when the low bits are masked.
+ hash |= !hash << 31;
+
+ return hash;
+ }
+
+ static unsigned hash(StringImpl* str)
+ {
+ return hash(str->characters(), str->length());
+ }
+
+ static unsigned hash(const char* str, unsigned length)
+ {
+ // This hash is designed to work on 16-bit chunks at a time. But since the normal case
+ // (above) is to hash UTF-16 characters, we just treat the 8-bit chars as if they
+ // were 16-bit chunks, which will give matching results.
+
+ unsigned l = length;
+ const char* s = str;
+ uint32_t hash = WTF::stringHashingStartValue;
+ uint32_t tmp;
+
+ int rem = l & 1;
+ l >>= 1;
+
+ // Main loop
+ for (; l > 0; l--) {
+ hash += WTF::Unicode::foldCase(s[0]);
+ tmp = (WTF::Unicode::foldCase(s[1]) << 11) ^ hash;
+ hash = (hash << 16) ^ tmp;
+ s += 2;
+ hash += hash >> 11;
+ }
+
+ // Handle end case
+ if (rem) {
+ hash += WTF::Unicode::foldCase(s[0]);
+ hash ^= hash << 11;
+ hash += hash >> 17;
+ }
+
+ // Force "avalanching" of final 127 bits
+ hash ^= hash << 3;
+ hash += hash >> 5;
+ hash ^= hash << 2;
+ hash += hash >> 15;
+ hash ^= hash << 10;
+
+ // this avoids ever returning a hash code of 0, since that is used to
+ // signal "hash not computed yet", using a value that is likely to be
+ // effectively the same as 0 when the low bits are masked
+ hash |= !hash << 31;
+
+ return hash;
+ }
+
+ static bool equal(const StringImpl* a, const StringImpl* b)
+ {
+ if (a == b)
+ return true;
+ if (!a || !b)
+ return false;
+ unsigned length = a->length();
+ if (length != b->length())
+ return false;
+ return WTF::Unicode::umemcasecmp(a->characters(), b->characters(), length) == 0;
+ }
+
+ static unsigned hash(const RefPtr<StringImpl>& key)
+ {
+ return hash(key.get());
+ }
+
+ static bool equal(const RefPtr<StringImpl>& a, const RefPtr<StringImpl>& b)
+ {
+ return equal(a.get(), b.get());
+ }
+
+ static unsigned hash(const String& key)
+ {
+ return hash(key.impl());
+ }
+ static unsigned hash(const AtomicString& key)
+ {
+ return hash(key.impl());
+ }
+ static bool equal(const String& a, const String& b)
+ {
+ return equal(a.impl(), b.impl());
+ }
+ static bool equal(const AtomicString& a, const AtomicString& b)
+ {
+ return (a == b) || equal(a.impl(), b.impl());
+ }
+
+ static const bool safeToCompareToEmptyOrDeleted = false;
+ };
+
+ // This hash can be used in cases where the key is a hash of a string, but we don't
+ // want to store the string. It's not really specific to string hashing, but all our
+ // current uses of it are for strings.
+ struct AlreadyHashed : IntHash<unsigned> {
+ static unsigned hash(unsigned key) { return key; }
+
+ // To use a hash value as a key for a hash table, we need to eliminate the
+ // "deleted" value, which is negative one. That could be done by changing
+ // the string hash function to never generate negative one, but this works
+ // and is still relatively efficient.
+ static unsigned avoidDeletedValue(unsigned hash)
+ {
+ ASSERT(hash);
+ unsigned newHash = hash | (!(hash + 1) << 31);
+ ASSERT(newHash);
+ ASSERT(newHash != 0xFFFFFFFF);
+ return newHash;
+ }
+ };
+
+}
+
+namespace WTF {
+
+ template<> struct HashTraits<WebCore::String> : GenericHashTraits<WebCore::String> {
+ static const bool emptyValueIsZero = true;
+ static void constructDeletedValue(WebCore::String& slot) { new (&slot) WebCore::String(HashTableDeletedValue); }
+ static bool isDeletedValue(const WebCore::String& slot) { return slot.isHashTableDeletedValue(); }
+ };
+
+}
+
+#endif
--- /dev/null
+/*
+ * Copyright (C) 1999 Lars Knoll (knoll@kde.org)
+ * (C) 1999 Antti Koivisto (koivisto@kde.org)
+ * (C) 2001 Dirk Mueller ( mueller@kde.org )
+ * Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
+ * Copyright (C) 2006 Andrew Wellington (proton@wiretapped.net)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#include "config.h"
+#include "StringImpl.h"
+
+#include "AtomicString.h"
+#include "StringBuffer.h"
+#include "StringHash.h"
+#include <wtf/StdLibExtras.h>
+#include <wtf/WTFThreadData.h>
+
+using namespace WTF;
+using namespace Unicode;
+using namespace std;
+
+namespace WebCore {
+
+static const unsigned minLengthToShare = 20;
+
+StringImpl::~StringImpl()
+{
+ ASSERT(!isStatic());
+
+ if (isAtomic())
+ AtomicString::remove(this);
+#if USE(JSC)
+ if (isIdentifier())
+ wtfThreadData().currentIdentifierTable()->remove(this);
+#endif
+
+ BufferOwnership ownership = bufferOwnership();
+ if (ownership != BufferInternal) {
+ if (ownership == BufferOwned) {
+ ASSERT(!m_sharedBuffer);
+ ASSERT(m_data);
+ fastFree(const_cast<UChar*>(m_data));
+ } else if (ownership == BufferSubstring) {
+ ASSERT(m_substringBuffer);
+ m_substringBuffer->deref();
+ } else {
+ ASSERT(ownership == BufferShared);
+ ASSERT(m_sharedBuffer);
+ m_sharedBuffer->deref();
+ }
+ }
+}
+
+PassRefPtr<StringImpl> StringImpl::createUninitialized(unsigned length, UChar*& data)
+{
+ if (!length) {
+ data = 0;
+ return empty();
+ }
+
+ // Allocate a single buffer large enough to contain the StringImpl
+ // struct as well as the data which it contains. This removes one
+ // heap allocation from this call.
+ if (length > ((std::numeric_limits<size_t>::max() - sizeof(StringImpl)) / sizeof(UChar)))
+ CRASH();
+ size_t size = sizeof(StringImpl) + length * sizeof(UChar);
+ StringImpl* string = static_cast<StringImpl*>(fastMalloc(size));
+
+ data = reinterpret_cast<UChar*>(string + 1);
+ return adoptRef(new (string) StringImpl(length));
+}
+
+PassRefPtr<StringImpl> StringImpl::create(const UChar* characters, unsigned length)
+{
+ if (!characters || !length)
+ return empty();
+
+ UChar* data;
+ PassRefPtr<StringImpl> string = createUninitialized(length, data);
+ memcpy(data, characters, length * sizeof(UChar));
+ return string;
+}
+
+PassRefPtr<StringImpl> StringImpl::create(const char* characters, unsigned length)
+{
+ if (!characters || !length)
+ return empty();
+
+ UChar* data;
+ PassRefPtr<StringImpl> string = createUninitialized(length, data);
+ for (unsigned i = 0; i != length; ++i) {
+ unsigned char c = characters[i];
+ data[i] = c;
+ }
+ return string;
+}
+
+PassRefPtr<StringImpl> StringImpl::create(const char* string)
+{
+ if (!string)
+ return empty();
+ return create(string, strlen(string));
+}
+
+PassRefPtr<StringImpl> StringImpl::create(const UChar* characters, unsigned length, PassRefPtr<SharedUChar> sharedBuffer)
+{
+ ASSERT(characters);
+ ASSERT(minLengthToShare && length >= minLengthToShare);
+ return adoptRef(new StringImpl(characters, length, sharedBuffer));
+}
+
+SharedUChar* StringImpl::sharedBuffer()
+{
+ if (m_length < minLengthToShare)
+ return 0;
+ // All static strings are smaller that the minimim length to share.
+ ASSERT(!isStatic());
+
+ BufferOwnership ownership = bufferOwnership();
+
+ if (ownership == BufferInternal)
+ return 0;
+ if (ownership == BufferSubstring)
+ return m_substringBuffer->sharedBuffer();
+ if (ownership == BufferOwned) {
+ ASSERT(!m_sharedBuffer);
+ m_sharedBuffer = SharedUChar::create(new SharableUChar(m_data)).releaseRef();
+ m_refCountAndFlags = (m_refCountAndFlags & ~s_refCountMaskBufferOwnership) | BufferShared;
+ }
+
+ ASSERT(bufferOwnership() == BufferShared);
+ ASSERT(m_sharedBuffer);
+ return m_sharedBuffer;
+}
+
+bool StringImpl::containsOnlyWhitespace()
+{
+ // FIXME: The definition of whitespace here includes a number of characters
+ // that are not whitespace from the point of view of RenderText; I wonder if
+ // that's a problem in practice.
+ for (unsigned i = 0; i < m_length; i++)
+ if (!isASCIISpace(m_data[i]))
+ return false;
+ return true;
+}
+
+PassRefPtr<StringImpl> StringImpl::substring(unsigned start, unsigned length)
+{
+ if (start >= m_length)
+ return empty();
+ unsigned maxLength = m_length - start;
+ if (length >= maxLength) {
+ if (!start)
+ return this;
+ length = maxLength;
+ }
+ return create(m_data + start, length);
+}
+
+UChar32 StringImpl::characterStartingAt(unsigned i)
+{
+ if (U16_IS_SINGLE(m_data[i]))
+ return m_data[i];
+ if (i + 1 < m_length && U16_IS_LEAD(m_data[i]) && U16_IS_TRAIL(m_data[i + 1]))
+ return U16_GET_SUPPLEMENTARY(m_data[i], m_data[i + 1]);
+ return 0;
+}
+
+PassRefPtr<StringImpl> StringImpl::lower()
+{
+ // Note: This is a hot function in the Dromaeo benchmark, specifically the
+ // no-op code path up through the first 'return' statement.
+
+ // First scan the string for uppercase and non-ASCII characters:
+ UChar ored = 0;
+ bool noUpper = true;
+ const UChar *end = m_data + m_length;
+ for (const UChar* chp = m_data; chp != end; chp++) {
+ if (UNLIKELY(isASCIIUpper(*chp)))
+ noUpper = false;
+ ored |= *chp;
+ }
+
+ // Nothing to do if the string is all ASCII with no uppercase.
+ if (noUpper && !(ored & ~0x7F))
+ return this;
+
+ int32_t length = m_length;
+ UChar* data;
+ RefPtr<StringImpl> newImpl = createUninitialized(m_length, data);
+
+ if (!(ored & ~0x7F)) {
+ // Do a faster loop for the case where all the characters are ASCII.
+ for (int i = 0; i < length; i++) {
+ UChar c = m_data[i];
+ data[i] = toASCIILower(c);
+ }
+ return newImpl;
+ }
+
+ // Do a slower implementation for cases that include non-ASCII characters.
+ bool error;
+ int32_t realLength = Unicode::toLower(data, length, m_data, m_length, &error);
+ if (!error && realLength == length)
+ return newImpl;
+ newImpl = createUninitialized(realLength, data);
+ Unicode::toLower(data, realLength, m_data, m_length, &error);
+ if (error)
+ return this;
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::upper()
+{
+ // This function could be optimized for no-op cases the way lower() is,
+ // but in empirical testing, few actual calls to upper() are no-ops, so
+ // it wouldn't be worth the extra time for pre-scanning.
+ UChar* data;
+ PassRefPtr<StringImpl> newImpl = createUninitialized(m_length, data);
+ int32_t length = m_length;
+
+ // Do a faster loop for the case where all the characters are ASCII.
+ UChar ored = 0;
+ for (int i = 0; i < length; i++) {
+ UChar c = m_data[i];
+ ored |= c;
+ data[i] = toASCIIUpper(c);
+ }
+ if (!(ored & ~0x7F))
+ return newImpl;
+
+ // Do a slower implementation for cases that include non-ASCII characters.
+ bool error;
+ int32_t realLength = Unicode::toUpper(data, length, m_data, m_length, &error);
+ if (!error && realLength == length)
+ return newImpl;
+ newImpl = createUninitialized(realLength, data);
+ Unicode::toUpper(data, realLength, m_data, m_length, &error);
+ if (error)
+ return this;
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::secure(UChar aChar, bool last)
+{
+ int length = m_length;
+ Vector<UChar> data(length);
+ if (length > 0) {
+ for (int i = 0; i < length - 1; ++i)
+ data[i] = aChar;
+ data[length - 1] = (last ? aChar : m_data[length - 1]);
+ }
+ return adopt(data);
+}
+
+PassRefPtr<StringImpl> StringImpl::foldCase()
+{
+ UChar* data;
+ PassRefPtr<StringImpl> newImpl = createUninitialized(m_length, data);
+ int32_t length = m_length;
+
+ // Do a faster loop for the case where all the characters are ASCII.
+ UChar ored = 0;
+ for (int i = 0; i < length; i++) {
+ UChar c = m_data[i];
+ ored |= c;
+ data[i] = toASCIILower(c);
+ }
+ if (!(ored & ~0x7F))
+ return newImpl;
+
+ // Do a slower implementation for cases that include non-ASCII characters.
+ bool error;
+ int32_t realLength = Unicode::foldCase(data, length, m_data, m_length, &error);
+ if (!error && realLength == length)
+ return newImpl;
+ newImpl = createUninitialized(realLength, data);
+ Unicode::foldCase(data, realLength, m_data, m_length, &error);
+ if (error)
+ return this;
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::stripWhiteSpace()
+{
+ if (!m_length)
+ return empty();
+
+ unsigned start = 0;
+ unsigned end = m_length - 1;
+
+ // skip white space from start
+ while (start <= end && isSpaceOrNewline(m_data[start]))
+ start++;
+
+ // only white space
+ if (start > end)
+ return empty();
+
+ // skip white space from end
+ while (end && isSpaceOrNewline(m_data[end]))
+ end--;
+
+ if (!start && end == m_length - 1)
+ return this;
+ return create(m_data + start, end + 1 - start);
+}
+
+PassRefPtr<StringImpl> StringImpl::removeCharacters(CharacterMatchFunctionPtr findMatch)
+{
+ const UChar* from = m_data;
+ const UChar* fromend = from + m_length;
+
+ // Assume the common case will not remove any characters
+ while (from != fromend && !findMatch(*from))
+ from++;
+ if (from == fromend)
+ return this;
+
+ StringBuffer data(m_length);
+ UChar* to = data.characters();
+ unsigned outc = from - m_data;
+
+ if (outc)
+ memcpy(to, m_data, outc * sizeof(UChar));
+
+ while (true) {
+ while (from != fromend && findMatch(*from))
+ from++;
+ while (from != fromend && !findMatch(*from))
+ to[outc++] = *from++;
+ if (from == fromend)
+ break;
+ }
+
+ data.shrink(outc);
+
+ return adopt(data);
+}
+
+PassRefPtr<StringImpl> StringImpl::simplifyWhiteSpace()
+{
+ StringBuffer data(m_length);
+
+ const UChar* from = m_data;
+ const UChar* fromend = from + m_length;
+ int outc = 0;
+ bool changedToSpace = false;
+
+ UChar* to = data.characters();
+
+ while (true) {
+ while (from != fromend && isSpaceOrNewline(*from)) {
+ if (*from != ' ')
+ changedToSpace = true;
+ from++;
+ }
+ while (from != fromend && !isSpaceOrNewline(*from))
+ to[outc++] = *from++;
+ if (from != fromend)
+ to[outc++] = ' ';
+ else
+ break;
+ }
+
+ if (outc > 0 && to[outc - 1] == ' ')
+ outc--;
+
+ if (static_cast<unsigned>(outc) == m_length && !changedToSpace)
+ return this;
+
+ data.shrink(outc);
+
+ return adopt(data);
+}
+
+int StringImpl::toIntStrict(bool* ok, int base)
+{
+ return charactersToIntStrict(m_data, m_length, ok, base);
+}
+
+unsigned StringImpl::toUIntStrict(bool* ok, int base)
+{
+ return charactersToUIntStrict(m_data, m_length, ok, base);
+}
+
+int64_t StringImpl::toInt64Strict(bool* ok, int base)
+{
+ return charactersToInt64Strict(m_data, m_length, ok, base);
+}
+
+uint64_t StringImpl::toUInt64Strict(bool* ok, int base)
+{
+ return charactersToUInt64Strict(m_data, m_length, ok, base);
+}
+
+intptr_t StringImpl::toIntPtrStrict(bool* ok, int base)
+{
+ return charactersToIntPtrStrict(m_data, m_length, ok, base);
+}
+
+int StringImpl::toInt(bool* ok)
+{
+ return charactersToInt(m_data, m_length, ok);
+}
+
+unsigned StringImpl::toUInt(bool* ok)
+{
+ return charactersToUInt(m_data, m_length, ok);
+}
+
+int64_t StringImpl::toInt64(bool* ok)
+{
+ return charactersToInt64(m_data, m_length, ok);
+}
+
+uint64_t StringImpl::toUInt64(bool* ok)
+{
+ return charactersToUInt64(m_data, m_length, ok);
+}
+
+intptr_t StringImpl::toIntPtr(bool* ok)
+{
+ return charactersToIntPtr(m_data, m_length, ok);
+}
+
+double StringImpl::toDouble(bool* ok)
+{
+ return charactersToDouble(m_data, m_length, ok);
+}
+
+float StringImpl::toFloat(bool* ok)
+{
+ return charactersToFloat(m_data, m_length, ok);
+}
+
+static bool equal(const UChar* a, const char* b, int length)
+{
+ ASSERT(length >= 0);
+ while (length--) {
+ unsigned char bc = *b++;
+ if (*a++ != bc)
+ return false;
+ }
+ return true;
+}
+
+bool equalIgnoringCase(const UChar* a, const char* b, unsigned length)
+{
+ while (length--) {
+ unsigned char bc = *b++;
+ if (foldCase(*a++) != foldCase(bc))
+ return false;
+ }
+ return true;
+}
+
+static inline bool equalIgnoringCase(const UChar* a, const UChar* b, int length)
+{
+ ASSERT(length >= 0);
+ return umemcasecmp(a, b, length) == 0;
+}
+
+int StringImpl::find(const char* chs, int index, bool caseSensitive)
+{
+ if (!chs || index < 0)
+ return -1;
+
+ int chsLength = strlen(chs);
+ int n = m_length - index;
+ if (n < 0)
+ return -1;
+ n -= chsLength - 1;
+ if (n <= 0)
+ return -1;
+
+ const char* chsPlusOne = chs + 1;
+ int chsLengthMinusOne = chsLength - 1;
+
+ const UChar* ptr = m_data + index - 1;
+ if (caseSensitive) {
+ UChar c = *chs;
+ do {
+ if (*++ptr == c && equal(ptr + 1, chsPlusOne, chsLengthMinusOne))
+ return m_length - chsLength - n + 1;
+ } while (--n);
+ } else {
+ UChar lc = Unicode::foldCase(*chs);
+ do {
+ if (Unicode::foldCase(*++ptr) == lc && equalIgnoringCase(ptr + 1, chsPlusOne, chsLengthMinusOne))
+ return m_length - chsLength - n + 1;
+ } while (--n);
+ }
+
+ return -1;
+}
+
+int StringImpl::find(UChar c, int start)
+{
+ return WebCore::find(m_data, m_length, c, start);
+}
+
+int StringImpl::find(CharacterMatchFunctionPtr matchFunction, int start)
+{
+ return WebCore::find(m_data, m_length, matchFunction, start);
+}
+
+int StringImpl::find(StringImpl* str, int index, bool caseSensitive)
+{
+ /*
+ We use a simple trick for efficiency's sake. Instead of
+ comparing strings, we compare the sum of str with that of
+ a part of this string. Only if that matches, we call memcmp
+ or ucstrnicmp.
+ */
+ ASSERT(str);
+ if (index < 0)
+ index += m_length;
+ int lstr = str->m_length;
+ int lthis = m_length - index;
+ if ((unsigned)lthis > m_length)
+ return -1;
+ int delta = lthis - lstr;
+ if (delta < 0)
+ return -1;
+
+ const UChar* uthis = m_data + index;
+ const UChar* ustr = str->m_data;
+ unsigned hthis = 0;
+ unsigned hstr = 0;
+ if (caseSensitive) {
+ for (int i = 0; i < lstr; i++) {
+ hthis += uthis[i];
+ hstr += ustr[i];
+ }
+ int i = 0;
+ while (1) {
+ if (hthis == hstr && memcmp(uthis + i, ustr, lstr * sizeof(UChar)) == 0)
+ return index + i;
+ if (i == delta)
+ return -1;
+ hthis += uthis[i + lstr];
+ hthis -= uthis[i];
+ i++;
+ }
+ } else {
+ for (int i = 0; i < lstr; i++ ) {
+ hthis += toASCIILower(uthis[i]);
+ hstr += toASCIILower(ustr[i]);
+ }
+ int i = 0;
+ while (1) {
+ if (hthis == hstr && equalIgnoringCase(uthis + i, ustr, lstr))
+ return index + i;
+ if (i == delta)
+ return -1;
+ hthis += toASCIILower(uthis[i + lstr]);
+ hthis -= toASCIILower(uthis[i]);
+ i++;
+ }
+ }
+}
+
+int StringImpl::reverseFind(UChar c, int index)
+{
+ return WebCore::reverseFind(m_data, m_length, c, index);
+}
+
+int StringImpl::reverseFind(StringImpl* str, int index, bool caseSensitive)
+{
+ /*
+ See StringImpl::find() for explanations.
+ */
+ ASSERT(str);
+ int lthis = m_length;
+ if (index < 0)
+ index += lthis;
+
+ int lstr = str->m_length;
+ int delta = lthis - lstr;
+ if ( index < 0 || index > lthis || delta < 0 )
+ return -1;
+ if ( index > delta )
+ index = delta;
+
+ const UChar *uthis = m_data;
+ const UChar *ustr = str->m_data;
+ unsigned hthis = 0;
+ unsigned hstr = 0;
+ int i;
+ if (caseSensitive) {
+ for ( i = 0; i < lstr; i++ ) {
+ hthis += uthis[index + i];
+ hstr += ustr[i];
+ }
+ i = index;
+ while (1) {
+ if (hthis == hstr && memcmp(uthis + i, ustr, lstr * sizeof(UChar)) == 0)
+ return i;
+ if (i == 0)
+ return -1;
+ i--;
+ hthis -= uthis[i + lstr];
+ hthis += uthis[i];
+ }
+ } else {
+ for (i = 0; i < lstr; i++) {
+ hthis += toASCIILower(uthis[index + i]);
+ hstr += toASCIILower(ustr[i]);
+ }
+ i = index;
+ while (1) {
+ if (hthis == hstr && equalIgnoringCase(uthis + i, ustr, lstr) )
+ return i;
+ if (i == 0)
+ return -1;
+ i--;
+ hthis -= toASCIILower(uthis[i + lstr]);
+ hthis += toASCIILower(uthis[i]);
+ }
+ }
+
+ // Should never get here.
+ return -1;
+}
+
+bool StringImpl::endsWith(StringImpl* m_data, bool caseSensitive)
+{
+ ASSERT(m_data);
+ int start = m_length - m_data->m_length;
+ if (start >= 0)
+ return (find(m_data, start, caseSensitive) == start);
+ return false;
+}
+
+PassRefPtr<StringImpl> StringImpl::replace(UChar oldC, UChar newC)
+{
+ if (oldC == newC)
+ return this;
+ unsigned i;
+ for (i = 0; i != m_length; ++i)
+ if (m_data[i] == oldC)
+ break;
+ if (i == m_length)
+ return this;
+
+ UChar* data;
+ PassRefPtr<StringImpl> newImpl = createUninitialized(m_length, data);
+
+ for (i = 0; i != m_length; ++i) {
+ UChar ch = m_data[i];
+ if (ch == oldC)
+ ch = newC;
+ data[i] = ch;
+ }
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::replace(unsigned position, unsigned lengthToReplace, StringImpl* str)
+{
+ position = min(position, length());
+ lengthToReplace = min(lengthToReplace, length() - position);
+ unsigned lengthToInsert = str ? str->length() : 0;
+ if (!lengthToReplace && !lengthToInsert)
+ return this;
+ UChar* data;
+
+ if ((length() - lengthToReplace) >= (numeric_limits<unsigned>::max() - lengthToInsert))
+ CRASH();
+
+ PassRefPtr<StringImpl> newImpl =
+ createUninitialized(length() - lengthToReplace + lengthToInsert, data);
+ memcpy(data, characters(), position * sizeof(UChar));
+ if (str)
+ memcpy(data + position, str->characters(), lengthToInsert * sizeof(UChar));
+ memcpy(data + position + lengthToInsert, characters() + position + lengthToReplace,
+ (length() - position - lengthToReplace) * sizeof(UChar));
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::replace(UChar pattern, StringImpl* replacement)
+{
+ if (!replacement)
+ return this;
+
+ int repStrLength = replacement->length();
+ int srcSegmentStart = 0;
+ unsigned matchCount = 0;
+
+ // Count the matches
+ while ((srcSegmentStart = find(pattern, srcSegmentStart)) >= 0) {
+ ++matchCount;
+ ++srcSegmentStart;
+ }
+
+ // If we have 0 matches, we don't have to do any more work
+ if (!matchCount)
+ return this;
+
+ if (repStrLength && matchCount > numeric_limits<unsigned>::max() / repStrLength)
+ CRASH();
+
+ unsigned replaceSize = matchCount * repStrLength;
+ unsigned newSize = m_length - matchCount;
+ if (newSize >= (numeric_limits<unsigned>::max() - replaceSize))
+ CRASH();
+
+ newSize += replaceSize;
+
+ UChar* data;
+ PassRefPtr<StringImpl> newImpl = createUninitialized(newSize, data);
+
+ // Construct the new data
+ int srcSegmentEnd;
+ int srcSegmentLength;
+ srcSegmentStart = 0;
+ int dstOffset = 0;
+
+ while ((srcSegmentEnd = find(pattern, srcSegmentStart)) >= 0) {
+ srcSegmentLength = srcSegmentEnd - srcSegmentStart;
+ memcpy(data + dstOffset, m_data + srcSegmentStart, srcSegmentLength * sizeof(UChar));
+ dstOffset += srcSegmentLength;
+ memcpy(data + dstOffset, replacement->m_data, repStrLength * sizeof(UChar));
+ dstOffset += repStrLength;
+ srcSegmentStart = srcSegmentEnd + 1;
+ }
+
+ srcSegmentLength = m_length - srcSegmentStart;
+ memcpy(data + dstOffset, m_data + srcSegmentStart, srcSegmentLength * sizeof(UChar));
+
+ ASSERT(dstOffset + srcSegmentLength == static_cast<int>(newImpl->length()));
+
+ return newImpl;
+}
+
+PassRefPtr<StringImpl> StringImpl::replace(StringImpl* pattern, StringImpl* replacement)
+{
+ if (!pattern || !replacement)
+ return this;
+
+ int patternLength = pattern->length();
+ if (!patternLength)
+ return this;
+
+ int repStrLength = replacement->length();
+ int srcSegmentStart = 0;
+ unsigned matchCount = 0;
+
+ // Count the matches
+ while ((srcSegmentStart = find(pattern, srcSegmentStart)) >= 0) {
+ ++matchCount;
+ srcSegmentStart += patternLength;
+ }
+
+ // If we have 0 matches, we don't have to do any more work
+ if (!matchCount)
+ return this;
+
+ unsigned newSize = m_length - matchCount * patternLength;
+ if (repStrLength && matchCount > numeric_limits<unsigned>::max() / repStrLength)
+ CRASH();
+
+ if (newSize > (numeric_limits<unsigned>::max() - matchCount * repStrLength))
+ CRASH();
+
+ newSize += matchCount * repStrLength;
+
+ UChar* data;
+ PassRefPtr<StringImpl> newImpl = createUninitialized(newSize, data);
+
+ // Construct the new data
+ int srcSegmentEnd;
+ int srcSegmentLength;
+ srcSegmentStart = 0;
+ int dstOffset = 0;
+
+ while ((srcSegmentEnd = find(pattern, srcSegmentStart)) >= 0) {
+ srcSegmentLength = srcSegmentEnd - srcSegmentStart;
+ memcpy(data + dstOffset, m_data + srcSegmentStart, srcSegmentLength * sizeof(UChar));
+ dstOffset += srcSegmentLength;
+ memcpy(data + dstOffset, replacement->m_data, repStrLength * sizeof(UChar));
+ dstOffset += repStrLength;
+ srcSegmentStart = srcSegmentEnd + patternLength;
+ }
+
+ srcSegmentLength = m_length - srcSegmentStart;
+ memcpy(data + dstOffset, m_data + srcSegmentStart, srcSegmentLength * sizeof(UChar));
+
+ ASSERT(dstOffset + srcSegmentLength == static_cast<int>(newImpl->length()));
+
+ return newImpl;
+}
+
+bool equal(const StringImpl* a, const StringImpl* b)
+{
+ return StringHash::equal(a, b);
+}
+
+bool equal(const StringImpl* a, const char* b)
+{
+ if (!a)
+ return !b;
+ if (!b)
+ return !a;
+
+ unsigned length = a->length();
+ const UChar* as = a->characters();
+ for (unsigned i = 0; i != length; ++i) {
+ unsigned char bc = b[i];
+ if (!bc)
+ return false;
+ if (as[i] != bc)
+ return false;
+ }
+
+ return !b[length];
+}
+
+bool equalIgnoringCase(StringImpl* a, StringImpl* b)
+{
+ return CaseFoldingHash::equal(a, b);
+}
+
+bool equalIgnoringCase(StringImpl* a, const char* b)
+{
+ if (!a)
+ return !b;
+ if (!b)
+ return !a;
+
+ unsigned length = a->length();
+ const UChar* as = a->characters();
+
+ // Do a faster loop for the case where all the characters are ASCII.
+ UChar ored = 0;
+ bool equal = true;
+ for (unsigned i = 0; i != length; ++i) {
+ char bc = b[i];
+ if (!bc)
+ return false;
+ UChar ac = as[i];
+ ored |= ac;
+ equal = equal && (toASCIILower(ac) == toASCIILower(bc));
+ }
+
+ // Do a slower implementation for cases that include non-ASCII characters.
+ if (ored & ~0x7F) {
+ equal = true;
+ for (unsigned i = 0; i != length; ++i) {
+ unsigned char bc = b[i];
+ equal = equal && (foldCase(as[i]) == foldCase(bc));
+ }
+ }
+
+ return equal && !b[length];
+}
+
+bool equalIgnoringNullity(StringImpl* a, StringImpl* b)
+{
+ if (StringHash::equal(a, b))
+ return true;
+ if (!a && b && !b->length())
+ return true;
+ if (!b && a && !a->length())
+ return true;
+
+ return false;
+}
+
+Vector<char> StringImpl::ascii()
+{
+ Vector<char> buffer(m_length + 1);
+ for (unsigned i = 0; i != m_length; ++i) {
+ UChar c = m_data[i];
+ if ((c >= 0x20 && c < 0x7F) || c == 0x00)
+ buffer[i] = static_cast<char>(c);
+ else
+ buffer[i] = '?';
+ }
+ buffer[m_length] = '\0';
+ return buffer;
+}
+
+WTF::Unicode::Direction StringImpl::defaultWritingDirection()
+{
+ for (unsigned i = 0; i < m_length; ++i) {
+ WTF::Unicode::Direction charDirection = WTF::Unicode::direction(m_data[i]);
+ if (charDirection == WTF::Unicode::LeftToRight)
+ return WTF::Unicode::LeftToRight;
+ if (charDirection == WTF::Unicode::RightToLeft || charDirection == WTF::Unicode::RightToLeftArabic)
+ return WTF::Unicode::RightToLeft;
+ }
+ return WTF::Unicode::LeftToRight;
+}
+
+// This is a hot function because it's used when parsing HTML.
+PassRefPtr<StringImpl> StringImpl::createStrippingNullCharactersSlowCase(const UChar* characters, unsigned length)
+{
+ StringBuffer strippedCopy(length);
+ unsigned strippedLength = 0;
+ for (unsigned i = 0; i < length; i++) {
+ if (int c = characters[i])
+ strippedCopy[strippedLength++] = c;
+ }
+ ASSERT(strippedLength < length); // Only take the slow case when stripping.
+ strippedCopy.shrink(strippedLength);
+ return adopt(strippedCopy);
+}
+
+PassRefPtr<StringImpl> StringImpl::adopt(StringBuffer& buffer)
+{
+ unsigned length = buffer.length();
+ if (length == 0)
+ return empty();
+ return adoptRef(new StringImpl(buffer.release(), length));
+}
+
+int StringImpl::wordCount(int maxWordsToCount)
+{
+ unsigned wordCount = 0;
+ unsigned i;
+ bool atWord = false;
+ for (i = 0; i < m_length; i++) {
+ if (u_isspace(m_data[i])) {
+ atWord = false;
+ } else if (!atWord) {
+ wordCount++;
+ if (wordCount >= (unsigned)maxWordsToCount)
+ return wordCount;
+ atWord = true;
+ }
+ }
+ return wordCount;
+}
+
+PassRefPtr<StringImpl> StringImpl::createWithTerminatingNullCharacter(const StringImpl& string)
+{
+ // Use createUninitialized instead of 'new StringImpl' so that the string and its buffer
+ // get allocated in a single malloc block.
+ UChar* data;
+ int length = string.m_length;
+ RefPtr<StringImpl> terminatedString = createUninitialized(length + 1, data);
+ memcpy(data, string.m_data, length * sizeof(UChar));
+ data[length] = 0;
+ terminatedString->m_length--;
+ terminatedString->m_hash = string.m_hash;
+ terminatedString->m_refCountAndFlags |= s_refCountFlagHasTerminatingNullCharacter;
+ return terminatedString.release();
+}
+
+PassRefPtr<StringImpl> StringImpl::threadsafeCopy() const
+{
+ return create(m_data, m_length);
+}
+
+PassRefPtr<StringImpl> StringImpl::crossThreadString()
+{
+ if (SharedUChar* sharedBuffer = this->sharedBuffer())
+ return adoptRef(new StringImpl(m_data, m_length, sharedBuffer->crossThreadCopy()));
+
+ // If no shared buffer is available, create a copy.
+ return threadsafeCopy();
+}
+
+} // namespace WebCore
--- /dev/null
+/*
+ * Copyright (C) 1999 Lars Knoll (knoll@kde.org)
+ * Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010 Apple Inc. All rights reserved.
+ * Copyright (C) 2009 Google Inc. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef StringImpl_h
+#define StringImpl_h
+
+#include <limits.h>
+#include <wtf/ASCIICType.h>
+#include <wtf/CrossThreadRefCounted.h>
+#include <wtf/OwnFastMallocPtr.h>
+#include <wtf/StdLibExtras.h>
+#include <wtf/StringHashFunctions.h>
+#include <wtf/Vector.h>
+#include <wtf/text/StringImplBase.h>
+#include <wtf/unicode/Unicode.h>
+
+#if PLATFORM(CF)
+typedef const struct __CFString * CFStringRef;
+#endif
+
+#ifdef __OBJC__
+@class NSString;
+#endif
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace JSC {
+
+struct IdentifierCStringTranslator;
+struct IdentifierUCharBufferTranslator;
+
+}
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace WebCore {
+
+class StringBuffer;
+
+struct CStringTranslator;
+struct HashAndCharactersTranslator;
+struct StringHash;
+struct UCharBufferTranslator;
+
+enum TextCaseSensitivity { TextCaseSensitive, TextCaseInsensitive };
+
+typedef OwnFastMallocPtr<const UChar> SharableUChar;
+typedef CrossThreadRefCounted<SharableUChar> SharedUChar;
+typedef bool (*CharacterMatchFunctionPtr)(UChar);
+
+class StringImpl : public StringImplBase {
+ friend struct JSC::IdentifierCStringTranslator;
+ friend struct JSC::IdentifierUCharBufferTranslator;
+ friend struct CStringTranslator;
+ friend struct HashAndCharactersTranslator;
+ friend struct UCharBufferTranslator;
+ friend class AtomicStringImpl;
+private:
+ // Used to construct static strings, which have an special refCount that can never hit zero.
+ // This means that the static string will never be destroyed, which is important because
+ // static strings will be shared across threads & ref-counted in a non-threadsafe manner.
+ StringImpl(const UChar* characters, unsigned length, StaticStringConstructType)
+ : StringImplBase(length, ConstructStaticString)
+ , m_data(characters)
+ , m_buffer(0)
+ , m_hash(0)
+ {
+ // Ensure that the hash is computed so that AtomicStringHash can call existingHash()
+ // with impunity. The empty string is special because it is never entered into
+ // AtomicString's HashKey, but still needs to compare correctly.
+ hash();
+ }
+
+ // Create a normal string with internal storage (BufferInternal)
+ StringImpl(unsigned length)
+ : StringImplBase(length, BufferInternal)
+ , m_data(reinterpret_cast<const UChar*>(this + 1))
+ , m_buffer(0)
+ , m_hash(0)
+ {
+ ASSERT(m_data);
+ ASSERT(m_length);
+ }
+
+ // Create a StringImpl adopting ownership of the provided buffer (BufferOwned)
+ StringImpl(const UChar* characters, unsigned length)
+ : StringImplBase(length, BufferOwned)
+ , m_data(characters)
+ , m_buffer(0)
+ , m_hash(0)
+ {
+ ASSERT(m_data);
+ ASSERT(m_length);
+ }
+
+ // Used to create new strings that are a substring of an existing StringImpl (BufferSubstring)
+ StringImpl(const UChar* characters, unsigned length, PassRefPtr<StringImpl> base)
+ : StringImplBase(length, BufferSubstring)
+ , m_data(characters)
+ , m_substringBuffer(base.releaseRef())
+ , m_hash(0)
+ {
+ ASSERT(m_data);
+ ASSERT(m_length);
+ ASSERT(m_substringBuffer->bufferOwnership() != BufferSubstring);
+ }
+
+ // Used to construct new strings sharing an existing SharedUChar (BufferShared)
+ StringImpl(const UChar* characters, unsigned length, PassRefPtr<SharedUChar> sharedBuffer)
+ : StringImplBase(length, BufferShared)
+ , m_data(characters)
+ , m_sharedBuffer(sharedBuffer.releaseRef())
+ , m_hash(0)
+ {
+ ASSERT(m_data);
+ ASSERT(m_length);
+ }
+
+ // For use only by AtomicString's XXXTranslator helpers.
+ void setHash(unsigned hash)
+ {
+ ASSERT(!isStatic());
+ ASSERT(!m_hash);
+ ASSERT(hash == computeHash(m_data, m_length));
+ m_hash = hash;
+ }
+
+public:
+ ~StringImpl();
+
+ static PassRefPtr<StringImpl> create(const UChar*, unsigned length);
+ static PassRefPtr<StringImpl> create(const char*, unsigned length);
+ static PassRefPtr<StringImpl> create(const char*);
+ static PassRefPtr<StringImpl> create(const UChar*, unsigned length, PassRefPtr<SharedUChar> sharedBuffer);
+ static PassRefPtr<StringImpl> create(PassRefPtr<StringImpl> rep, unsigned offset, unsigned length)
+ {
+ ASSERT(rep);
+ ASSERT(length <= rep->length());
+
+ if (!length)
+ return empty();
+
+ StringImpl* ownerRep = (rep->bufferOwnership() == BufferSubstring) ? rep->m_substringBuffer : rep.get();
+ return adoptRef(new StringImpl(rep->m_data + offset, length, ownerRep));
+ }
+
+ static PassRefPtr<StringImpl> createUninitialized(unsigned length, UChar*& data);
+ static PassRefPtr<StringImpl> tryCreateUninitialized(unsigned length, UChar*& output)
+ {
+ if (!length) {
+ output = 0;
+ return empty();
+ }
+
+ if (length > ((std::numeric_limits<size_t>::max() - sizeof(StringImpl)) / sizeof(UChar)))
+ return 0;
+ StringImpl* resultImpl;
+ if (!tryFastMalloc(sizeof(UChar) * length + sizeof(StringImpl)).getValue(resultImpl))
+ return 0;
+ output = reinterpret_cast<UChar*>(resultImpl + 1);
+ return adoptRef(new(resultImpl) StringImpl(length));
+ }
+
+ static unsigned dataOffset() { return OBJECT_OFFSETOF(StringImpl, m_data); }
+ static PassRefPtr<StringImpl> createWithTerminatingNullCharacter(const StringImpl&);
+ static PassRefPtr<StringImpl> createStrippingNullCharacters(const UChar*, unsigned length);
+
+ template<size_t inlineCapacity>
+ static PassRefPtr<StringImpl> adopt(Vector<UChar, inlineCapacity>& vector)
+ {
+ if (size_t size = vector.size()) {
+ ASSERT(vector.data());
+ return adoptRef(new StringImpl(vector.releaseBuffer(), size));
+ }
+ return empty();
+ }
+ static PassRefPtr<StringImpl> adopt(StringBuffer&);
+
+ SharedUChar* sharedBuffer();
+ const UChar* characters() const { return m_data; }
+
+ size_t cost()
+ {
+ // For substrings, return the cost of the base string.
+ if (bufferOwnership() == BufferSubstring)
+ return m_substringBuffer->cost();
+
+ if (m_refCountAndFlags & s_refCountFlagShouldReportedCost) {
+ m_refCountAndFlags &= ~s_refCountFlagShouldReportedCost;
+ return m_length;
+ }
+ return 0;
+ }
+
+ bool isIdentifier() const { return m_refCountAndFlags & s_refCountFlagIsIdentifier; }
+ void setIsIdentifier(bool isIdentifier)
+ {
+ ASSERT(!isStatic());
+ if (isIdentifier)
+ m_refCountAndFlags |= s_refCountFlagIsIdentifier;
+ else
+ m_refCountAndFlags &= ~s_refCountFlagIsIdentifier;
+ }
+
+ bool hasTerminatingNullCharacter() const { return m_refCountAndFlags & s_refCountFlagHasTerminatingNullCharacter; }
+
+ bool isAtomic() const { return m_refCountAndFlags & s_refCountFlagIsAtomic; }
+ void setIsAtomic(bool isIdentifier)
+ {
+ ASSERT(!isStatic());
+ if (isIdentifier)
+ m_refCountAndFlags |= s_refCountFlagIsAtomic;
+ else
+ m_refCountAndFlags &= ~s_refCountFlagIsAtomic;
+ }
+
+ unsigned hash() const { if (!m_hash) m_hash = computeHash(m_data, m_length); return m_hash; }
+ unsigned existingHash() const { ASSERT(m_hash); return m_hash; }
+ static unsigned computeHash(const UChar* data, unsigned length) { return WTF::stringHash(data, length); }
+ static unsigned computeHash(const char* data, unsigned length) { return WTF::stringHash(data, length); }
+ static unsigned computeHash(const char* data) { return WTF::stringHash(data); }
+
+ ALWAYS_INLINE void deref() { m_refCountAndFlags -= s_refCountIncrement; if (!(m_refCountAndFlags & (s_refCountMask | s_refCountFlagStatic))) delete this; }
+ ALWAYS_INLINE bool hasOneRef() const { return (m_refCountAndFlags & (s_refCountMask | s_refCountFlagStatic)) == s_refCountIncrement; }
+
+ static StringImpl* empty();
+
+ static void copyChars(UChar* destination, const UChar* source, unsigned numCharacters)
+ {
+ if (numCharacters <= s_copyCharsInlineCutOff) {
+ for (unsigned i = 0; i < numCharacters; ++i)
+ destination[i] = source[i];
+ } else
+ memcpy(destination, source, numCharacters * sizeof(UChar));
+ }
+
+ // Returns a StringImpl suitable for use on another thread.
+ PassRefPtr<StringImpl> crossThreadString();
+ // Makes a deep copy. Helpful only if you need to use a String on another thread
+ // (use crossThreadString if the method call doesn't need to be threadsafe).
+ // Since StringImpl objects are immutable, there's no other reason to make a copy.
+ PassRefPtr<StringImpl> threadsafeCopy() const;
+
+ PassRefPtr<StringImpl> substring(unsigned pos, unsigned len = UINT_MAX);
+
+ UChar operator[](unsigned i) { ASSERT(i < m_length); return m_data[i]; }
+ UChar32 characterStartingAt(unsigned);
+
+ bool containsOnlyWhitespace();
+
+ int toIntStrict(bool* ok = 0, int base = 10);
+ unsigned toUIntStrict(bool* ok = 0, int base = 10);
+ int64_t toInt64Strict(bool* ok = 0, int base = 10);
+ uint64_t toUInt64Strict(bool* ok = 0, int base = 10);
+ intptr_t toIntPtrStrict(bool* ok = 0, int base = 10);
+
+ int toInt(bool* ok = 0); // ignores trailing garbage
+ unsigned toUInt(bool* ok = 0); // ignores trailing garbage
+ int64_t toInt64(bool* ok = 0); // ignores trailing garbage
+ uint64_t toUInt64(bool* ok = 0); // ignores trailing garbage
+ intptr_t toIntPtr(bool* ok = 0); // ignores trailing garbage
+
+ double toDouble(bool* ok = 0);
+ float toFloat(bool* ok = 0);
+
+ PassRefPtr<StringImpl> lower();
+ PassRefPtr<StringImpl> upper();
+ PassRefPtr<StringImpl> secure(UChar aChar, bool last = true);
+ PassRefPtr<StringImpl> foldCase();
+
+ PassRefPtr<StringImpl> stripWhiteSpace();
+ PassRefPtr<StringImpl> simplifyWhiteSpace();
+
+ PassRefPtr<StringImpl> removeCharacters(CharacterMatchFunctionPtr);
+
+ int find(const char*, int index = 0, bool caseSensitive = true);
+ int find(UChar, int index = 0);
+ int find(CharacterMatchFunctionPtr, int index = 0);
+ int find(StringImpl*, int index, bool caseSensitive = true);
+
+ int reverseFind(UChar, int index);
+ int reverseFind(StringImpl*, int index, bool caseSensitive = true);
+
+ bool startsWith(StringImpl* str, bool caseSensitive = true) { return reverseFind(str, 0, caseSensitive) == 0; }
+ bool endsWith(StringImpl*, bool caseSensitive = true);
+
+ PassRefPtr<StringImpl> replace(UChar, UChar);
+ PassRefPtr<StringImpl> replace(UChar, StringImpl*);
+ PassRefPtr<StringImpl> replace(StringImpl*, StringImpl*);
+ PassRefPtr<StringImpl> replace(unsigned index, unsigned len, StringImpl*);
+
+ Vector<char> ascii();
+ int wordCount(int maxWordsToCount = INT_MAX);
+
+ WTF::Unicode::Direction defaultWritingDirection();
+
+#if PLATFORM(CF)
+ CFStringRef createCFString();
+#endif
+#ifdef __OBJC__
+ operator NSString*();
+#endif
+
+private:
+ // This number must be at least 2 to avoid sharing empty, null as well as 1 character strings from SmallStrings.
+ static const unsigned s_copyCharsInlineCutOff = 20;
+
+ static PassRefPtr<StringImpl> createStrippingNullCharactersSlowCase(const UChar*, unsigned length);
+
+ BufferOwnership bufferOwnership() const { return static_cast<BufferOwnership>(m_refCountAndFlags & s_refCountMaskBufferOwnership); }
+ bool isStatic() const { return m_refCountAndFlags & s_refCountFlagStatic; }
+ const UChar* m_data;
+ union {
+ void* m_buffer;
+ StringImpl* m_substringBuffer;
+ SharedUChar* m_sharedBuffer;
+ };
+ mutable unsigned m_hash;
+};
+
+bool equal(const StringImpl*, const StringImpl*);
+bool equal(const StringImpl*, const char*);
+inline bool equal(const char* a, StringImpl* b) { return equal(b, a); }
+
+bool equalIgnoringCase(StringImpl*, StringImpl*);
+bool equalIgnoringCase(StringImpl*, const char*);
+inline bool equalIgnoringCase(const char* a, StringImpl* b) { return equalIgnoringCase(b, a); }
+bool equalIgnoringCase(const UChar* a, const char* b, unsigned length);
+inline bool equalIgnoringCase(const char* a, const UChar* b, unsigned length) { return equalIgnoringCase(b, a, length); }
+
+bool equalIgnoringNullity(StringImpl*, StringImpl*);
+
+static inline bool isSpaceOrNewline(UChar c)
+{
+ // Use isASCIISpace() for basic Latin-1.
+ // This will include newlines, which aren't included in Unicode DirWS.
+ return c <= 0x7F ? WTF::isASCIISpace(c) : WTF::Unicode::direction(c) == WTF::Unicode::WhiteSpaceNeutral;
+}
+
+// This is a hot function because it's used when parsing HTML.
+inline PassRefPtr<StringImpl> StringImpl::createStrippingNullCharacters(const UChar* characters, unsigned length)
+{
+ ASSERT(characters);
+ ASSERT(length);
+
+ // Optimize for the case where there are no Null characters by quickly
+ // searching for nulls, and then using StringImpl::create, which will
+ // memcpy the whole buffer. This is faster than assigning character by
+ // character during the loop.
+
+ // Fast case.
+ int foundNull = 0;
+ for (unsigned i = 0; !foundNull && i < length; i++) {
+ int c = characters[i]; // more efficient than using UChar here (at least on Intel Mac OS)
+ foundNull |= !c;
+ }
+ if (!foundNull)
+ return StringImpl::create(characters, length);
+
+ return StringImpl::createStrippingNullCharactersSlowCase(characters, length);
+}
+
+}
+
+using WebCore::equal;
+
+namespace WTF {
+
+ // WebCore::StringHash is the default hash for StringImpl* and RefPtr<StringImpl>
+ template<typename T> struct DefaultHash;
+ template<> struct DefaultHash<WebCore::StringImpl*> {
+ typedef WebCore::StringHash Hash;
+ };
+ template<> struct DefaultHash<RefPtr<WebCore::StringImpl> > {
+ typedef WebCore::StringHash Hash;
+ };
+
+}
+
+#endif
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef StringImplBase_h
+#define StringImplBase_h
+
+#include <wtf/Noncopyable.h>
+#include <wtf/unicode/Unicode.h>
+
+namespace WTF {
+
+class StringImplBase : public Noncopyable {
+public:
+ bool isStringImpl() { return (m_refCountAndFlags & s_refCountInvalidForStringImpl) != s_refCountInvalidForStringImpl; }
+ unsigned length() const { return m_length; }
+ void ref() { m_refCountAndFlags += s_refCountIncrement; }
+
+protected:
+ enum BufferOwnership {
+ BufferInternal,
+ BufferOwned,
+ BufferSubstring,
+ BufferShared,
+ };
+
+ using Noncopyable::operator new;
+ void* operator new(size_t, void* inPlace) { ASSERT(inPlace); return inPlace; }
+
+ // For SmallStringStorage, which allocates an array and uses an in-place new.
+ StringImplBase() { }
+
+ StringImplBase(unsigned length, BufferOwnership ownership)
+ : m_refCountAndFlags(s_refCountIncrement | s_refCountFlagShouldReportedCost | ownership)
+ , m_length(length)
+ {
+ ASSERT(isStringImpl());
+ }
+
+ enum StaticStringConstructType { ConstructStaticString };
+ StringImplBase(unsigned length, StaticStringConstructType)
+ : m_refCountAndFlags(s_refCountFlagStatic | s_refCountFlagIsIdentifier | BufferOwned)
+ , m_length(length)
+ {
+ ASSERT(isStringImpl());
+ }
+
+ // This constructor is not used when creating StringImpl objects,
+ // and sets the flags into a state marking the object as such.
+ enum NonStringImplConstructType { ConstructNonStringImpl };
+ StringImplBase(NonStringImplConstructType)
+ : m_refCountAndFlags(s_refCountIncrement | s_refCountInvalidForStringImpl)
+ , m_length(0)
+ {
+ ASSERT(!isStringImpl());
+ }
+
+ // The bottom 7 bits hold flags, the top 25 bits hold the ref count.
+ // When dereferencing StringImpls we check for the ref count AND the
+ // static bit both being zero - static strings are never deleted.
+ static const unsigned s_refCountMask = 0xFFFFFF80;
+ static const unsigned s_refCountIncrement = 0x80;
+ static const unsigned s_refCountFlagStatic = 0x40;
+ static const unsigned s_refCountFlagHasTerminatingNullCharacter = 0x20;
+ static const unsigned s_refCountFlagIsAtomic = 0x10;
+ static const unsigned s_refCountFlagShouldReportedCost = 0x8;
+ static const unsigned s_refCountFlagIsIdentifier = 0x4;
+ static const unsigned s_refCountMaskBufferOwnership = 0x3;
+ // An invalid permutation of flags (static & shouldReportedCost - static strings do not
+ // set shouldReportedCost in the constructor, and this bit is only ever cleared, not set).
+ // Used by "ConstructNonStringImpl" constructor, above.
+ static const unsigned s_refCountInvalidForStringImpl = s_refCountFlagStatic | s_refCountFlagShouldReportedCost;
+
+ unsigned m_refCountAndFlags;
+ unsigned m_length;
+};
+
+} // namespace WTF
+
+using WTF::StringImplBase;
+
+#endif
--- /dev/null
+/*
+ * Copyright (C) 2010 Apple Inc. All Rights Reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+
+#ifdef SKIP_STATIC_CONSTRUCTORS_ON_GCC
+#define ATOMICSTRING_HIDE_GLOBALS 1
+#endif
+
+#include "AtomicString.h"
+#include "StaticConstructors.h"
+#include "StringImpl.h"
+
+namespace WebCore {
+
+StringImpl* StringImpl::empty()
+{
+ // FIXME: This works around a bug in our port of PCRE, that a regular expression
+ // run on the empty string may still perform a read from the first element, and
+ // as such we need this to be a valid pointer. No code should ever be reading
+ // from a zero length string, so this should be able to be a non-null pointer
+ // into the zero-page.
+ // Replace this with 'reinterpret_cast<UChar*>(static_cast<intptr_t>(1))' once
+ // PCRE goes away.
+ static UChar emptyUCharData = 0;
+ DEFINE_STATIC_LOCAL(StringImpl, emptyString, (&emptyUCharData, 0, ConstructStaticString));
+ return &emptyString;
+}
+
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, nullAtom)
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, emptyAtom, "")
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, textAtom, "#text")
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, commentAtom, "#comment")
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, starAtom, "*")
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, xmlAtom, "xml")
+JS_EXPORTDATA DEFINE_GLOBAL(AtomicString, xmlnsAtom, "xmlns")
+
+void AtomicString::init()
+{
+ static bool initialized;
+ if (!initialized) {
+ // On iPhone WebKit, isMainThread() tests for the Web Thread, so use pthread_main_np() instead.
+ ASSERT(pthread_main_np());
+
+ // Use placement new to initialize the globals.
+ new ((void*)&nullAtom) AtomicString;
+ new ((void*)&emptyAtom) AtomicString("");
+ new ((void*)&textAtom) AtomicString("#text");
+ new ((void*)&commentAtom) AtomicString("#comment");
+ new ((void*)&starAtom) AtomicString("*");
+ new ((void*)&xmlAtom) AtomicString("xml");
+ new ((void*)&xmlnsAtom) AtomicString("xmlns");
+
+ initialized = true;
+ }
+}
+
+}
--- /dev/null
+/*
+ * (C) 1999 Lars Knoll (knoll@kde.org)
+ * Copyright (C) 2004, 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
+ * Copyright (C) 2007-2009 Torch Mobile, Inc.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ */
+
+#include "config.h"
+#include "WTFString.h"
+
+#include <limits>
+#include <stdarg.h>
+#include <wtf/ASCIICType.h>
+#include <wtf/text/CString.h>
+#include <wtf/StringExtras.h>
+#include <wtf/Vector.h>
+#include <wtf/dtoa.h>
+#include <wtf/unicode/UTF8.h>
+#include <wtf/unicode/Unicode.h>
+
+using namespace WTF;
+using namespace WTF::Unicode;
+
+namespace WebCore {
+
+String::String(const UChar* str)
+{
+ if (!str)
+ return;
+
+ int len = 0;
+ while (str[len] != UChar(0))
+ len++;
+
+ m_impl = StringImpl::create(str, len);
+}
+
+void String::append(const String& str)
+{
+ if (str.isEmpty())
+ return;
+
+ // FIXME: This is extremely inefficient. So much so that we might want to take this
+ // out of String's API. We can make it better by optimizing the case where exactly
+ // one String is pointing at this StringImpl, but even then it's going to require a
+ // call to fastMalloc every single time.
+ if (str.m_impl) {
+ if (m_impl) {
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(m_impl->length() + str.length(), data);
+ memcpy(data, m_impl->characters(), m_impl->length() * sizeof(UChar));
+ memcpy(data + m_impl->length(), str.characters(), str.length() * sizeof(UChar));
+ m_impl = newImpl.release();
+ } else
+ m_impl = str.m_impl;
+ }
+}
+
+void String::append(char c)
+{
+ // FIXME: This is extremely inefficient. So much so that we might want to take this
+ // out of String's API. We can make it better by optimizing the case where exactly
+ // one String is pointing at this StringImpl, but even then it's going to require a
+ // call to fastMalloc every single time.
+ if (m_impl) {
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(m_impl->length() + 1, data);
+ memcpy(data, m_impl->characters(), m_impl->length() * sizeof(UChar));
+ data[m_impl->length()] = c;
+ m_impl = newImpl.release();
+ } else
+ m_impl = StringImpl::create(&c, 1);
+}
+
+void String::append(UChar c)
+{
+ // FIXME: This is extremely inefficient. So much so that we might want to take this
+ // out of String's API. We can make it better by optimizing the case where exactly
+ // one String is pointing at this StringImpl, but even then it's going to require a
+ // call to fastMalloc every single time.
+ if (m_impl) {
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(m_impl->length() + 1, data);
+ memcpy(data, m_impl->characters(), m_impl->length() * sizeof(UChar));
+ data[m_impl->length()] = c;
+ m_impl = newImpl.release();
+ } else
+ m_impl = StringImpl::create(&c, 1);
+}
+
+String operator+(const String& a, const String& b)
+{
+ if (a.isEmpty())
+ return b;
+ if (b.isEmpty())
+ return a;
+ String c = a;
+ c += b;
+ return c;
+}
+
+String operator+(const String& s, const char* cs)
+{
+ return s + String(cs);
+}
+
+String operator+(const char* cs, const String& s)
+{
+ return String(cs) + s;
+}
+
+void String::insert(const String& str, unsigned pos)
+{
+ if (str.isEmpty()) {
+ if (str.isNull())
+ return;
+ if (isNull())
+ m_impl = str.impl();
+ return;
+ }
+ insert(str.characters(), str.length(), pos);
+}
+
+void String::append(const UChar* charactersToAppend, unsigned lengthToAppend)
+{
+ if (!m_impl) {
+ if (!charactersToAppend)
+ return;
+ m_impl = StringImpl::create(charactersToAppend, lengthToAppend);
+ return;
+ }
+
+ if (!lengthToAppend)
+ return;
+
+ ASSERT(charactersToAppend);
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(length() + lengthToAppend, data);
+ memcpy(data, characters(), length() * sizeof(UChar));
+ memcpy(data + length(), charactersToAppend, lengthToAppend * sizeof(UChar));
+ m_impl = newImpl.release();
+}
+
+void String::insert(const UChar* charactersToInsert, unsigned lengthToInsert, unsigned position)
+{
+ if (position >= length()) {
+ append(charactersToInsert, lengthToInsert);
+ return;
+ }
+
+ ASSERT(m_impl);
+
+ if (!lengthToInsert)
+ return;
+
+ ASSERT(charactersToInsert);
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(length() + lengthToInsert, data);
+ memcpy(data, characters(), position * sizeof(UChar));
+ memcpy(data + position, charactersToInsert, lengthToInsert * sizeof(UChar));
+ memcpy(data + position + lengthToInsert, characters() + position, (length() - position) * sizeof(UChar));
+ m_impl = newImpl.release();
+}
+
+UChar32 String::characterStartingAt(unsigned i) const
+{
+ if (!m_impl || i >= m_impl->length())
+ return 0;
+ return m_impl->characterStartingAt(i);
+}
+
+void String::truncate(unsigned position)
+{
+ if (position >= length())
+ return;
+ UChar* data;
+ RefPtr<StringImpl> newImpl = StringImpl::createUninitialized(position, data);
+ memcpy(data, characters(), position * sizeof(UChar));
+ m_impl = newImpl.release();
+}
+
+void String::remove(unsigned position, int lengthToRemove)
+{
+ if (lengthToRemove <= 0)
+ return;
+ if (position >= length())
+ return;
+ if (static_cast<unsigned>(lengthToRemove) > length() - position)
+ lengthToRemove = length() - position;
+ UChar* data;
+ RefPtr<StringImpl> newImpl =
+ StringImpl::createUninitialized(length() - lengthToRemove, data);
+ memcpy(data, characters(), position * sizeof(UChar));
+ memcpy(data + position, characters() + position + lengthToRemove,
+ (length() - lengthToRemove - position) * sizeof(UChar));
+ m_impl = newImpl.release();
+}
+
+String String::substring(unsigned pos, unsigned len) const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->substring(pos, len);
+}
+
+String String::lower() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->lower();
+}
+
+String String::upper() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->upper();
+}
+
+String String::stripWhiteSpace() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->stripWhiteSpace();
+}
+
+String String::simplifyWhiteSpace() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->simplifyWhiteSpace();
+}
+
+String String::removeCharacters(CharacterMatchFunctionPtr findMatch) const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->removeCharacters(findMatch);
+}
+
+String String::foldCase() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->foldCase();
+}
+
+bool String::percentage(int& result) const
+{
+ if (!m_impl || !m_impl->length())
+ return false;
+
+ if ((*m_impl)[m_impl->length() - 1] != '%')
+ return false;
+
+ result = charactersToIntStrict(m_impl->characters(), m_impl->length() - 1);
+ return true;
+}
+
+const UChar* String::charactersWithNullTermination()
+{
+ if (!m_impl)
+ return 0;
+ if (m_impl->hasTerminatingNullCharacter())
+ return m_impl->characters();
+ m_impl = StringImpl::createWithTerminatingNullCharacter(*m_impl);
+ return m_impl->characters();
+}
+
+String String::format(const char *format, ...)
+{
+#if PLATFORM(QT)
+ // Use QString::vsprintf to avoid the locale dependent formatting of vsnprintf.
+ // https://bugs.webkit.org/show_bug.cgi?id=18994
+ va_list args;
+ va_start(args, format);
+
+ QString buffer;
+ buffer.vsprintf(format, args);
+
+ va_end(args);
+
+ return buffer;
+
+#elif OS(WINCE)
+ va_list args;
+ va_start(args, format);
+
+ Vector<char, 256> buffer;
+
+ int bufferSize = 256;
+ buffer.resize(bufferSize);
+ for (;;) {
+ int written = vsnprintf(buffer.data(), bufferSize, format, args);
+ va_end(args);
+
+ if (written == 0)
+ return String("");
+ if (written > 0)
+ return StringImpl::create(buffer.data(), written);
+
+ bufferSize <<= 1;
+ buffer.resize(bufferSize);
+ va_start(args, format);
+ }
+
+#else
+ va_list args;
+ va_start(args, format);
+
+ Vector<char, 256> buffer;
+
+ // Do the format once to get the length.
+#if COMPILER(MSVC)
+ int result = _vscprintf(format, args);
+#else
+ char ch;
+ int result = vsnprintf(&ch, 1, format, args);
+ // We need to call va_end() and then va_start() again here, as the
+ // contents of args is undefined after the call to vsnprintf
+ // according to http://man.cx/snprintf(3)
+ //
+ // Not calling va_end/va_start here happens to work on lots of
+ // systems, but fails e.g. on 64bit Linux.
+ va_end(args);
+ va_start(args, format);
+#endif
+
+ if (result == 0)
+ return String("");
+ if (result < 0)
+ return String();
+ unsigned len = result;
+ buffer.grow(len + 1);
+
+ // Now do the formatting again, guaranteed to fit.
+ vsnprintf(buffer.data(), buffer.size(), format, args);
+
+ va_end(args);
+
+ return StringImpl::create(buffer.data(), len);
+#endif
+}
+
+String String::number(short n)
+{
+ return String::format("%hd", n);
+}
+
+String String::number(unsigned short n)
+{
+ return String::format("%hu", n);
+}
+
+String String::number(int n)
+{
+ return String::format("%d", n);
+}
+
+String String::number(unsigned n)
+{
+ return String::format("%u", n);
+}
+
+String String::number(long n)
+{
+ return String::format("%ld", n);
+}
+
+String String::number(unsigned long n)
+{
+ return String::format("%lu", n);
+}
+
+String String::number(long long n)
+{
+#if OS(WINDOWS) && !PLATFORM(QT)
+ return String::format("%I64i", n);
+#else
+ return String::format("%lli", n);
+#endif
+}
+
+String String::number(unsigned long long n)
+{
+#if OS(WINDOWS) && !PLATFORM(QT)
+ return String::format("%I64u", n);
+#else
+ return String::format("%llu", n);
+#endif
+}
+
+String String::number(double n)
+{
+ return String::format("%.6lg", n);
+}
+
+int String::toIntStrict(bool* ok, int base) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toIntStrict(ok, base);
+}
+
+unsigned String::toUIntStrict(bool* ok, int base) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toUIntStrict(ok, base);
+}
+
+int64_t String::toInt64Strict(bool* ok, int base) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toInt64Strict(ok, base);
+}
+
+uint64_t String::toUInt64Strict(bool* ok, int base) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toUInt64Strict(ok, base);
+}
+
+intptr_t String::toIntPtrStrict(bool* ok, int base) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toIntPtrStrict(ok, base);
+}
+
+
+int String::toInt(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toInt(ok);
+}
+
+unsigned String::toUInt(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toUInt(ok);
+}
+
+int64_t String::toInt64(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toInt64(ok);
+}
+
+uint64_t String::toUInt64(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toUInt64(ok);
+}
+
+intptr_t String::toIntPtr(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0;
+ }
+ return m_impl->toIntPtr(ok);
+}
+
+double String::toDouble(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0.0;
+ }
+ return m_impl->toDouble(ok);
+}
+
+float String::toFloat(bool* ok) const
+{
+ if (!m_impl) {
+ if (ok)
+ *ok = false;
+ return 0.0f;
+ }
+ return m_impl->toFloat(ok);
+}
+
+String String::threadsafeCopy() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->threadsafeCopy();
+}
+
+String String::crossThreadString() const
+{
+ if (!m_impl)
+ return String();
+ return m_impl->crossThreadString();
+}
+
+void String::split(const String& separator, bool allowEmptyEntries, Vector<String>& result) const
+{
+ result.clear();
+
+ int startPos = 0;
+ int endPos;
+ while ((endPos = find(separator, startPos)) != -1) {
+ if (allowEmptyEntries || startPos != endPos)
+ result.append(substring(startPos, endPos - startPos));
+ startPos = endPos + separator.length();
+ }
+ if (allowEmptyEntries || startPos != static_cast<int>(length()))
+ result.append(substring(startPos));
+}
+
+void String::split(const String& separator, Vector<String>& result) const
+{
+ return split(separator, false, result);
+}
+
+void String::split(UChar separator, bool allowEmptyEntries, Vector<String>& result) const
+{
+ result.clear();
+
+ int startPos = 0;
+ int endPos;
+ while ((endPos = find(separator, startPos)) != -1) {
+ if (allowEmptyEntries || startPos != endPos)
+ result.append(substring(startPos, endPos - startPos));
+ startPos = endPos + 1;
+ }
+ if (allowEmptyEntries || startPos != static_cast<int>(length()))
+ result.append(substring(startPos));
+}
+
+void String::split(UChar separator, Vector<String>& result) const
+{
+ return split(String(&separator, 1), false, result);
+}
+
+Vector<char> String::ascii() const
+{
+ if (m_impl)
+ return m_impl->ascii();
+
+ const char* nullMsg = "(null impl)";
+ Vector<char, 2048> buffer;
+ for (int i = 0; nullMsg[i]; ++i)
+ buffer.append(nullMsg[i]);
+
+ buffer.append('\0');
+ return buffer;
+}
+
+CString String::latin1() const
+{
+ // Basic Latin1 (ISO) encoding - Unicode characters 0..255 are
+ // preserved, characters outside of this range are converted to '?'.
+
+ unsigned length = this->length();
+ const UChar* characters = this->characters();
+
+ char* characterBuffer;
+ CString result = CString::newUninitialized(length, characterBuffer);
+
+ for (unsigned i = 0; i < length; ++i) {
+ UChar ch = characters[i];
+ characterBuffer[i] = ch > 255 ? '?' : ch;
+ }
+
+ return result;
+}
+
+// Helper to write a three-byte UTF-8 code point to the buffer, caller must check room is available.
+static inline void putUTF8Triple(char*& buffer, UChar ch)
+{
+ ASSERT(ch >= 0x0800);
+ *buffer++ = static_cast<char>(((ch >> 12) & 0x0F) | 0xE0);
+ *buffer++ = static_cast<char>(((ch >> 6) & 0x3F) | 0x80);
+ *buffer++ = static_cast<char>((ch & 0x3F) | 0x80);
+}
+
+CString String::utf8() const
+{
+ unsigned length = this->length();
+ const UChar* characters = this->characters();
+
+ // Allocate a buffer big enough to hold all the characters
+ // (an individual UTF-16 UChar can only expand to 3 UTF-8 bytes).
+ // Optimization ideas, if we find this function is hot:
+ // * We could speculatively create a CStringBuffer to contain 'length'
+ // characters, and resize if necessary (i.e. if the buffer contains
+ // non-ascii characters). (Alternatively, scan the buffer first for
+ // ascii characters, so we know this will be sufficient).
+ // * We could allocate a CStringBuffer with an appropriate size to
+ // have a good chance of being able to write the string into the
+ // buffer without reallocing (say, 1.5 x length).
+ Vector<char, 1024> bufferVector(length * 3);
+
+ char* buffer = bufferVector.data();
+ ConversionResult result = convertUTF16ToUTF8(&characters, characters + length, &buffer, buffer + bufferVector.size(), false);
+ ASSERT(result != sourceIllegal); // Only produced from strict conversion.
+ ASSERT(result != targetExhausted); // (length * 3) should be sufficient for any conversion
+
+ // If a high surrogate is left unconverted, treat it the same was as an unpaired high surrogate
+ // would have been handled in the middle of a string with non-strict conversion - which is to say,
+ // simply encode it to UTF-8.
+ if (result == sourceExhausted) {
+ // This should be one unpaired high surrogate.
+ ASSERT((characters + 1) == (characters + length));
+ ASSERT((*characters >= 0xD800) && (*characters <= 0xDBFF));
+ // There should be room left, since one UChar hasn't been converted.
+ ASSERT((buffer + 3) <= (buffer + bufferVector.size()));
+ putUTF8Triple(buffer, *characters);
+ }
+
+ return CString(bufferVector.data(), buffer - bufferVector.data());
+}
+
+String String::fromUTF8(const char* stringStart, size_t length)
+{
+ if (!stringStart)
+ return String();
+
+ // We'll use a StringImpl as a buffer; if the source string only contains ascii this should be
+ // the right length, if there are any multi-byte sequences this buffer will be too large.
+ UChar* buffer;
+ String stringBuffer(StringImpl::createUninitialized(length, buffer));
+ UChar* bufferEnd = buffer + length;
+
+ // Try converting into the buffer.
+ const char* stringCurrent = stringStart;
+ if (convertUTF8ToUTF16(&stringCurrent, stringStart + length, &buffer, bufferEnd) != conversionOK)
+ return String();
+
+ // stringBuffer is full (the input must have been all ascii) so just return it!
+ if (buffer == bufferEnd)
+ return stringBuffer;
+
+ // stringBuffer served its purpose as a buffer, copy the contents out into a new string.
+ unsigned utf16Length = buffer - stringBuffer.characters();
+ ASSERT(utf16Length < length);
+ return String(stringBuffer.characters(), utf16Length);
+}
+
+String String::fromUTF8(const char* string)
+{
+ if (!string)
+ return String();
+ return fromUTF8(string, strlen(string));
+}
+
+String String::fromUTF8WithLatin1Fallback(const char* string, size_t size)
+{
+ String utf8 = fromUTF8(string, size);
+ if (!utf8)
+ return String(string, size);
+ return utf8;
+}
+
+// String Operations
+
+static bool isCharacterAllowedInBase(UChar c, int base)
+{
+ if (c > 0x7F)
+ return false;
+ if (isASCIIDigit(c))
+ return c - '0' < base;
+ if (isASCIIAlpha(c)) {
+ if (base > 36)
+ base = 36;
+ return (c >= 'a' && c < 'a' + base - 10)
+ || (c >= 'A' && c < 'A' + base - 10);
+ }
+ return false;
+}
+
+template <typename IntegralType>
+static inline IntegralType toIntegralType(const UChar* data, size_t length, bool* ok, int base)
+{
+ static const IntegralType integralMax = std::numeric_limits<IntegralType>::max();
+ static const bool isSigned = std::numeric_limits<IntegralType>::is_signed;
+ const IntegralType maxMultiplier = integralMax / base;
+
+ IntegralType value = 0;
+ bool isOk = false;
+ bool isNegative = false;
+
+ if (!data)
+ goto bye;
+
+ // skip leading whitespace
+ while (length && isSpaceOrNewline(*data)) {
+ length--;
+ data++;
+ }
+
+ if (isSigned && length && *data == '-') {
+ length--;
+ data++;
+ isNegative = true;
+ } else if (length && *data == '+') {
+ length--;
+ data++;
+ }
+
+ if (!length || !isCharacterAllowedInBase(*data, base))
+ goto bye;
+
+ while (length && isCharacterAllowedInBase(*data, base)) {
+ length--;
+ IntegralType digitValue;
+ UChar c = *data;
+ if (isASCIIDigit(c))
+ digitValue = c - '0';
+ else if (c >= 'a')
+ digitValue = c - 'a' + 10;
+ else
+ digitValue = c - 'A' + 10;
+
+ if (value > maxMultiplier || (value == maxMultiplier && digitValue > (integralMax % base) + isNegative))
+ goto bye;
+
+ value = base * value + digitValue;
+ data++;
+ }
+
+#if COMPILER(MSVC)
+#pragma warning(push, 0)
+#pragma warning(disable:4146)
+#endif
+
+ if (isNegative)
+ value = -value;
+
+#if COMPILER(MSVC)
+#pragma warning(pop)
+#endif
+
+ // skip trailing space
+ while (length && isSpaceOrNewline(*data)) {
+ length--;
+ data++;
+ }
+
+ if (!length)
+ isOk = true;
+bye:
+ if (ok)
+ *ok = isOk;
+ return isOk ? value : 0;
+}
+
+static unsigned lengthOfCharactersAsInteger(const UChar* data, size_t length)
+{
+ size_t i = 0;
+
+ // Allow leading spaces.
+ for (; i != length; ++i) {
+ if (!isSpaceOrNewline(data[i]))
+ break;
+ }
+
+ // Allow sign.
+ if (i != length && (data[i] == '+' || data[i] == '-'))
+ ++i;
+
+ // Allow digits.
+ for (; i != length; ++i) {
+ if (!isASCIIDigit(data[i]))
+ break;
+ }
+
+ return i;
+}
+
+int charactersToIntStrict(const UChar* data, size_t length, bool* ok, int base)
+{
+ return toIntegralType<int>(data, length, ok, base);
+}
+
+unsigned charactersToUIntStrict(const UChar* data, size_t length, bool* ok, int base)
+{
+ return toIntegralType<unsigned>(data, length, ok, base);
+}
+
+int64_t charactersToInt64Strict(const UChar* data, size_t length, bool* ok, int base)
+{
+ return toIntegralType<int64_t>(data, length, ok, base);
+}
+
+uint64_t charactersToUInt64Strict(const UChar* data, size_t length, bool* ok, int base)
+{
+ return toIntegralType<uint64_t>(data, length, ok, base);
+}
+
+intptr_t charactersToIntPtrStrict(const UChar* data, size_t length, bool* ok, int base)
+{
+ return toIntegralType<intptr_t>(data, length, ok, base);
+}
+
+int charactersToInt(const UChar* data, size_t length, bool* ok)
+{
+ return toIntegralType<int>(data, lengthOfCharactersAsInteger(data, length), ok, 10);
+}
+
+unsigned charactersToUInt(const UChar* data, size_t length, bool* ok)
+{
+ return toIntegralType<unsigned>(data, lengthOfCharactersAsInteger(data, length), ok, 10);
+}
+
+int64_t charactersToInt64(const UChar* data, size_t length, bool* ok)
+{
+ return toIntegralType<int64_t>(data, lengthOfCharactersAsInteger(data, length), ok, 10);
+}
+
+uint64_t charactersToUInt64(const UChar* data, size_t length, bool* ok)
+{
+ return toIntegralType<uint64_t>(data, lengthOfCharactersAsInteger(data, length), ok, 10);
+}
+
+intptr_t charactersToIntPtr(const UChar* data, size_t length, bool* ok)
+{
+ return toIntegralType<intptr_t>(data, lengthOfCharactersAsInteger(data, length), ok, 10);
+}
+
+double charactersToDouble(const UChar* data, size_t length, bool* ok)
+{
+ if (!length) {
+ if (ok)
+ *ok = false;
+ return 0.0;
+ }
+
+ Vector<char, 256> bytes(length + 1);
+ for (unsigned i = 0; i < length; ++i)
+ bytes[i] = data[i] < 0x7F ? data[i] : '?';
+ bytes[length] = '\0';
+ char* end;
+ double val = WTF::strtod(bytes.data(), &end);
+ if (ok)
+ *ok = (end == 0 || *end == '\0');
+ return val;
+}
+
+float charactersToFloat(const UChar* data, size_t length, bool* ok)
+{
+ // FIXME: This will return ok even when the string fits into a double but not a float.
+ return static_cast<float>(charactersToDouble(data, length, ok));
+}
+
+} // namespace WebCore
+
+#ifndef NDEBUG
+// For use in the debugger - leaks memory
+WebCore::String* string(const char*);
+
+WebCore::String* string(const char* s)
+{
+ return new WebCore::String(s);
+}
+#endif
--- /dev/null
+/*
+ * (C) 1999 Lars Knoll (knoll@kde.org)
+ * Copyright (C) 2004, 2005, 2006, 2007, 2008 Apple Inc. All rights reserved.
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public License
+ * along with this library; see the file COPYING.LIB. If not, write to
+ * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1301, USA.
+ *
+ */
+
+#ifndef WTFString_h
+#define WTFString_h
+
+// This file would be called String.h, but that conflicts with <string.h>
+// on systems without case-sensitive file systems.
+
+#include "StringImpl.h"
+
+#ifdef __OBJC__
+#include <objc/objc.h>
+#endif
+
+#if PLATFORM(CF)
+typedef const struct __CFString * CFStringRef;
+#endif
+
+#if PLATFORM(QT)
+QT_BEGIN_NAMESPACE
+class QString;
+QT_END_NAMESPACE
+#include <QDataStream>
+#endif
+
+#if PLATFORM(WX)
+class wxString;
+#endif
+
+#if PLATFORM(HAIKU)
+class BString;
+#endif
+
+namespace WTF {
+class CString;
+}
+using WTF::CString;
+
+// FIXME: This is a temporary layering violation while we move string code to WTF.
+// Landing the file moves in one patch, will follow on with patches to change the namespaces.
+namespace WebCore {
+
+class SharedBuffer;
+struct StringHash;
+
+// Declarations of string operations
+
+bool charactersAreAllASCII(const UChar*, size_t);
+int charactersToIntStrict(const UChar*, size_t, bool* ok = 0, int base = 10);
+unsigned charactersToUIntStrict(const UChar*, size_t, bool* ok = 0, int base = 10);
+int64_t charactersToInt64Strict(const UChar*, size_t, bool* ok = 0, int base = 10);
+uint64_t charactersToUInt64Strict(const UChar*, size_t, bool* ok = 0, int base = 10);
+intptr_t charactersToIntPtrStrict(const UChar*, size_t, bool* ok = 0, int base = 10);
+
+int charactersToInt(const UChar*, size_t, bool* ok = 0); // ignores trailing garbage
+unsigned charactersToUInt(const UChar*, size_t, bool* ok = 0); // ignores trailing garbage
+int64_t charactersToInt64(const UChar*, size_t, bool* ok = 0); // ignores trailing garbage
+uint64_t charactersToUInt64(const UChar*, size_t, bool* ok = 0); // ignores trailing garbage
+intptr_t charactersToIntPtr(const UChar*, size_t, bool* ok = 0); // ignores trailing garbage
+
+double charactersToDouble(const UChar*, size_t, bool* ok = 0);
+float charactersToFloat(const UChar*, size_t, bool* ok = 0);
+
+int find(const UChar*, size_t, UChar, int startPosition = 0);
+int reverseFind(const UChar*, size_t, UChar, int startPosition = -1);
+
+class String {
+public:
+ String() { } // gives null string, distinguishable from an empty string
+ String(const UChar* str, unsigned len)
+ {
+ if (!str)
+ return;
+ m_impl = StringImpl::create(str, len);
+ }
+ String(const char* str)
+ {
+ if (!str)
+ return;
+ m_impl = StringImpl::create(str);
+ }
+ String(const char* str, unsigned length)
+ {
+ if (!str)
+ return;
+ m_impl = StringImpl::create(str, length);
+ }
+ String(const UChar*); // Specifically for null terminated UTF-16
+ String(StringImpl* i) : m_impl(i) { }
+ String(PassRefPtr<StringImpl> i) : m_impl(i) { }
+ String(RefPtr<StringImpl> i) : m_impl(i) { }
+
+ void swap(String& o) { m_impl.swap(o.m_impl); }
+
+ // Hash table deleted values, which are only constructed and never copied or destroyed.
+ String(WTF::HashTableDeletedValueType) : m_impl(WTF::HashTableDeletedValue) { }
+ bool isHashTableDeletedValue() const { return m_impl.isHashTableDeletedValue(); }
+
+ static String adopt(StringBuffer& buffer) { return StringImpl::adopt(buffer); }
+ static String adopt(Vector<UChar>& vector) { return StringImpl::adopt(vector); }
+
+ ALWAYS_INLINE unsigned length() const
+ {
+ if (!m_impl)
+ return 0;
+ return m_impl->length();
+ }
+
+ const UChar* characters() const
+ {
+ if (!m_impl)
+ return 0;
+ return m_impl->characters();
+ }
+
+ const UChar* charactersWithNullTermination();
+
+ UChar operator[](unsigned i) const // if i >= length(), returns 0
+ {
+ if (!m_impl || i >= m_impl->length())
+ return 0;
+ return m_impl->characters()[i];
+ }
+ UChar32 characterStartingAt(unsigned) const; // Ditto.
+
+ bool contains(UChar c) const { return find(c) != -1; }
+ bool contains(const char* str, bool caseSensitive = true) const { return find(str, 0, caseSensitive) != -1; }
+ bool contains(const String& str, bool caseSensitive = true) const { return find(str, 0, caseSensitive) != -1; }
+
+ int find(UChar c, int start = 0) const
+ { return m_impl ? m_impl->find(c, start) : -1; }
+ int find(CharacterMatchFunctionPtr matchFunction, int start = 0) const
+ { return m_impl ? m_impl->find(matchFunction, start) : -1; }
+ int find(const char* str, int start = 0, bool caseSensitive = true) const
+ { return m_impl ? m_impl->find(str, start, caseSensitive) : -1; }
+ int find(const String& str, int start = 0, bool caseSensitive = true) const
+ { return m_impl ? m_impl->find(str.impl(), start, caseSensitive) : -1; }
+
+ int reverseFind(UChar c, int start = -1) const
+ { return m_impl ? m_impl->reverseFind(c, start) : -1; }
+ int reverseFind(const String& str, int start = -1, bool caseSensitive = true) const
+ { return m_impl ? m_impl->reverseFind(str.impl(), start, caseSensitive) : -1; }
+
+ bool startsWith(const String& s, bool caseSensitive = true) const
+ { return m_impl ? m_impl->startsWith(s.impl(), caseSensitive) : s.isEmpty(); }
+ bool endsWith(const String& s, bool caseSensitive = true) const
+ { return m_impl ? m_impl->endsWith(s.impl(), caseSensitive) : s.isEmpty(); }
+
+ void append(const String&);
+ void append(char);
+ void append(UChar);
+ void append(const UChar*, unsigned length);
+ void insert(const String&, unsigned pos);
+ void insert(const UChar*, unsigned length, unsigned pos);
+
+ String& replace(UChar a, UChar b) { if (m_impl) m_impl = m_impl->replace(a, b); return *this; }
+ String& replace(UChar a, const String& b) { if (m_impl) m_impl = m_impl->replace(a, b.impl()); return *this; }
+ String& replace(const String& a, const String& b) { if (m_impl) m_impl = m_impl->replace(a.impl(), b.impl()); return *this; }
+ String& replace(unsigned index, unsigned len, const String& b) { if (m_impl) m_impl = m_impl->replace(index, len, b.impl()); return *this; }
+
+ void makeLower() { if (m_impl) m_impl = m_impl->lower(); }
+ void makeUpper() { if (m_impl) m_impl = m_impl->upper(); }
+ void makeSecure(UChar aChar, bool last = true) { if (m_impl) m_impl = m_impl->secure(aChar, last); }
+
+ void truncate(unsigned len);
+ void remove(unsigned pos, int len = 1);
+
+ String substring(unsigned pos, unsigned len = UINT_MAX) const;
+ String left(unsigned len) const { return substring(0, len); }
+ String right(unsigned len) const { return substring(length() - len, len); }
+
+ // Returns a lowercase/uppercase version of the string
+ String lower() const;
+ String upper() const;
+
+ String stripWhiteSpace() const;
+ String simplifyWhiteSpace() const;
+
+ String removeCharacters(CharacterMatchFunctionPtr) const;
+
+ // Return the string with case folded for case insensitive comparison.
+ String foldCase() const;
+
+ static String number(short);
+ static String number(unsigned short);
+ static String number(int);
+ static String number(unsigned);
+ static String number(long);
+ static String number(unsigned long);
+ static String number(long long);
+ static String number(unsigned long long);
+ static String number(double);
+
+ static String format(const char *, ...) WTF_ATTRIBUTE_PRINTF(1, 2);
+
+ // Returns an uninitialized string. The characters needs to be written
+ // into the buffer returned in data before the returned string is used.
+ // Failure to do this will have unpredictable results.
+ static String createUninitialized(unsigned length, UChar*& data) { return StringImpl::createUninitialized(length, data); }
+
+ void split(const String& separator, Vector<String>& result) const;
+ void split(const String& separator, bool allowEmptyEntries, Vector<String>& result) const;
+ void split(UChar separator, Vector<String>& result) const;
+ void split(UChar separator, bool allowEmptyEntries, Vector<String>& result) const;
+
+ int toIntStrict(bool* ok = 0, int base = 10) const;
+ unsigned toUIntStrict(bool* ok = 0, int base = 10) const;
+ int64_t toInt64Strict(bool* ok = 0, int base = 10) const;
+ uint64_t toUInt64Strict(bool* ok = 0, int base = 10) const;
+ intptr_t toIntPtrStrict(bool* ok = 0, int base = 10) const;
+
+ int toInt(bool* ok = 0) const;
+ unsigned toUInt(bool* ok = 0) const;
+ int64_t toInt64(bool* ok = 0) const;
+ uint64_t toUInt64(bool* ok = 0) const;
+ intptr_t toIntPtr(bool* ok = 0) const;
+ double toDouble(bool* ok = 0) const;
+ float toFloat(bool* ok = 0) const;
+
+ bool percentage(int& percentage) const;
+
+ // Returns a StringImpl suitable for use on another thread.
+ String crossThreadString() const;
+ // Makes a deep copy. Helpful only if you need to use a String on another thread
+ // (use crossThreadString if the method call doesn't need to be threadsafe).
+ // Since the underlying StringImpl objects are immutable, there's no other reason
+ // to ever prefer copy() over plain old assignment.
+ String threadsafeCopy() const;
+
+ bool isNull() const { return !m_impl; }
+ ALWAYS_INLINE bool isEmpty() const { return !m_impl || !m_impl->length(); }
+
+ StringImpl* impl() const { return m_impl.get(); }
+
+#if PLATFORM(CF)
+ String(CFStringRef);
+ CFStringRef createCFString() const;
+#endif
+
+#ifdef __OBJC__
+ String(NSString*);
+
+ // This conversion maps NULL to "", which loses the meaning of NULL, but we
+ // need this mapping because AppKit crashes when passed nil NSStrings.
+ operator NSString*() const { if (!m_impl) return @""; return *m_impl; }
+#endif
+
+#if PLATFORM(QT)
+ String(const QString&);
+ String(const QStringRef&);
+ operator QString() const;
+#endif
+
+#if PLATFORM(WX)
+ String(const wxString&);
+ operator wxString() const;
+#endif
+
+#if PLATFORM(HAIKU)
+ String(const BString&);
+ operator BString() const;
+#endif
+
+ Vector<char> ascii() const;
+
+ CString latin1() const;
+ CString utf8() const;
+
+ static String fromUTF8(const char*, size_t);
+ static String fromUTF8(const char*);
+
+ // Tries to convert the passed in string to UTF-8, but will fall back to Latin-1 if the string is not valid UTF-8.
+ static String fromUTF8WithLatin1Fallback(const char*, size_t);
+
+ // Determines the writing direction using the Unicode Bidi Algorithm rules P2 and P3.
+ WTF::Unicode::Direction defaultWritingDirection() const { return m_impl ? m_impl->defaultWritingDirection() : WTF::Unicode::LeftToRight; }
+
+ bool containsOnlyASCII() const { return charactersAreAllASCII(characters(), length()); }
+
+private:
+ RefPtr<StringImpl> m_impl;
+};
+
+#if PLATFORM(QT)
+QDataStream& operator<<(QDataStream& stream, const String& str);
+QDataStream& operator>>(QDataStream& stream, String& str);
+#endif
+
+String operator+(const String&, const String&);
+String operator+(const String&, const char*);
+String operator+(const char*, const String&);
+
+inline String& operator+=(String& a, const String& b) { a.append(b); return a; }
+
+inline bool operator==(const String& a, const String& b) { return equal(a.impl(), b.impl()); }
+inline bool operator==(const String& a, const char* b) { return equal(a.impl(), b); }
+inline bool operator==(const char* a, const String& b) { return equal(a, b.impl()); }
+
+inline bool operator!=(const String& a, const String& b) { return !equal(a.impl(), b.impl()); }
+inline bool operator!=(const String& a, const char* b) { return !equal(a.impl(), b); }
+inline bool operator!=(const char* a, const String& b) { return !equal(a, b.impl()); }
+
+inline bool equalIgnoringCase(const String& a, const String& b) { return equalIgnoringCase(a.impl(), b.impl()); }
+inline bool equalIgnoringCase(const String& a, const char* b) { return equalIgnoringCase(a.impl(), b); }
+inline bool equalIgnoringCase(const char* a, const String& b) { return equalIgnoringCase(a, b.impl()); }
+
+inline bool equalPossiblyIgnoringCase(const String& a, const String& b, bool ignoreCase)
+{
+ return ignoreCase ? equalIgnoringCase(a, b) : (a == b);
+}
+
+inline bool equalIgnoringNullity(const String& a, const String& b) { return equalIgnoringNullity(a.impl(), b.impl()); }
+
+inline bool operator!(const String& str) { return str.isNull(); }
+
+inline void swap(String& a, String& b) { a.swap(b); }
+
+// Definitions of string operations
+
+#ifdef __OBJC__
+// This is for situations in WebKit where the long standing behavior has been
+// "nil if empty", so we try to maintain longstanding behavior for the sake of
+// entrenched clients
+inline NSString* nsStringNilIfEmpty(const String& str) { return str.isEmpty() ? nil : (NSString*)str; }
+#endif
+
+inline bool charactersAreAllASCII(const UChar* characters, size_t length)
+{
+ UChar ored = 0;
+ for (size_t i = 0; i < length; ++i)
+ ored |= characters[i];
+ return !(ored & 0xFF80);
+}
+
+inline int find(const UChar* characters, size_t length, UChar character, int startPosition)
+{
+ if (startPosition >= static_cast<int>(length))
+ return -1;
+ for (size_t i = startPosition; i < length; ++i) {
+ if (characters[i] == character)
+ return static_cast<int>(i);
+ }
+ return -1;
+}
+
+inline int find(const UChar* characters, size_t length, CharacterMatchFunctionPtr matchFunction, int startPosition)
+{
+ if (startPosition >= static_cast<int>(length))
+ return -1;
+ for (size_t i = startPosition; i < length; ++i) {
+ if (matchFunction(characters[i]))
+ return static_cast<int>(i);
+ }
+ return -1;
+}
+
+inline int reverseFind(const UChar* characters, size_t length, UChar character, int startPosition)
+{
+ if (startPosition >= static_cast<int>(length) || !length)
+ return -1;
+ if (startPosition < 0)
+ startPosition += static_cast<int>(length);
+ while (true) {
+ if (characters[startPosition] == character)
+ return startPosition;
+ if (!startPosition)
+ return -1;
+ startPosition--;
+ }
+ ASSERT_NOT_REACHED();
+ return -1;
+}
+
+inline void append(Vector<UChar>& vector, const String& string)
+{
+ vector.append(string.characters(), string.length());
+}
+
+inline void appendNumber(Vector<UChar>& vector, unsigned char number)
+{
+ int numberLength = number > 99 ? 3 : (number > 9 ? 2 : 1);
+ size_t vectorSize = vector.size();
+ vector.grow(vectorSize + numberLength);
+
+ switch (numberLength) {
+ case 3:
+ vector[vectorSize + 2] = number % 10 + '0';
+ number /= 10;
+
+ case 2:
+ vector[vectorSize + 1] = number % 10 + '0';
+ number /= 10;
+
+ case 1:
+ vector[vectorSize] = number % 10 + '0';
+ }
+}
+
+} // namespace WebCore
+
+namespace WTF {
+
+ // StringHash is the default hash for String
+ template<typename T> struct DefaultHash;
+ template<> struct DefaultHash<WebCore::String> {
+ typedef WebCore::StringHash Hash;
+ };
+
+}
+
+#endif
#ifndef WTF_Collator_h
#define WTF_Collator_h
-#include <memory>
#include <wtf/Noncopyable.h>
+#include <wtf/PassOwnPtr.h>
#include <wtf/unicode/Unicode.h>
#if USE(ICU_UNICODE) && !UCONFIG_NO_COLLATION
~Collator();
void setOrderLowerFirst(bool);
- static std::auto_ptr<Collator> userDefault();
+ static PassOwnPtr<Collator> userDefault();
Result collate(const ::UChar*, size_t, const ::UChar*, size_t) const;
{
}
-std::auto_ptr<Collator> Collator::userDefault()
+PassOwnPtr<Collator> Collator::userDefault()
{
- return std::auto_ptr<Collator>(new Collator(0));
+ return new Collator(0);
}
// A default implementation for platforms that lack Unicode-aware collation.
#define UnicodeGLib_h
#include "UnicodeMacrosFromICU.h"
-#include <wtf/gtk/GOwnPtr.h>
+#include "GOwnPtr.h"
#include <glib.h>
#include <pango/pango.h>
{
}
-std::auto_ptr<Collator> Collator::userDefault()
+PassOwnPtr<Collator> Collator::userDefault()
{
#if OS(DARWIN) && PLATFORM(CF)
// Mac OS X doesn't set UNIX locale to match user-selected one, so ICU default doesn't work.
char buf[256];
if (collationOrder) {
CFStringGetCString(collationOrder, buf, sizeof(buf), kCFStringEncodingASCII);
- return std::auto_ptr<Collator>(new Collator(buf));
+ return new Collator(buf);
} else
- return std::auto_ptr<Collator>(new Collator(""));
+ return new Collator("");
#else
- return std::auto_ptr<Collator>(new Collator(0));
+ return new Collator(0);
#endif
}
QT_END_NAMESPACE
// ugly hack to make UChar compatible with JSChar in API/JSStringRef.h
-#if defined(Q_OS_WIN) || COMPILER(WINSCW)
+#if defined(Q_OS_WIN) || COMPILER(WINSCW) || (COMPILER(RVCT) && OS(SYMBIAN))
typedef wchar_t UChar;
#else
typedef uint16_t UChar;
--- /dev/null
+// Copyright 2010, Google Inc. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef URLComponent_h
+#define URLComponent_h
+
+namespace WTF {
+
+// Represents a substring for URL parsing.
+class URLComponent {
+public:
+ URLComponent() : m_begin(0), m_length(-1) { }
+ URLComponent(int begin, int length) : m_begin(begin), m_length(length) { }
+
+ // Returns true if this component is valid, meaning the length is given. Even
+ // valid components may be empty to record the fact that they exist.
+ bool isValid() const { return m_length != -1; }
+
+ // Returns true if the given component is specified on false, the component
+ // is either empty or invalid.
+ bool isNonempty() const { return m_length > 0; }
+
+ void reset()
+ {
+ m_begin = 0;
+ m_length = -1;
+ }
+
+ bool operator==(const URLComponent& other) const { return m_begin == other.m_begin && m_length == other.m_length; }
+
+ int begin() const { return m_begin; }
+ void setBegin(int begin) { m_begin = begin; }
+
+ int length() const { return m_length; }
+ void setLength(int length) { m_length = length; }
+
+ int end() const { return m_begin + m_length; }
+
+private:
+ int m_begin; // Byte offset in the string of this component.
+ int m_length; // Will be -1 if the component is unspecified.
+};
+
+} // namespace WTF
+
+#endif // URLComponent_h
--- /dev/null
+/* Based on nsURLParsers.cc from Mozilla
+ * -------------------------------------
+ * Copyright (C) 1998 Netscape Communications Corporation.
+ *
+ * Other contributors:
+ * Darin Fisher (original author)
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this library; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ *
+ * Alternatively, the contents of this file may be used under the terms
+ * of either the Mozilla Public License Version 1.1, found at
+ * http://www.mozilla.org/MPL/ (the "MPL") or the GNU General Public
+ * License Version 2.0, found at http://www.fsf.org/copyleft/gpl.html
+ * (the "GPL"), in which case the provisions of the MPL or the GPL are
+ * applicable instead of those above. If you wish to allow use of your
+ * version of this file only under the terms of one of those two
+ * licenses (the MPL or the GPL) and not to allow others to use your
+ * version of this file under the LGPL, indicate your decision by
+ * deletingthe provisions above and replace them with the notice and
+ * other provisions required by the MPL or the GPL, as the case may be.
+ * If you do not delete the provisions above, a recipient may use your
+ * version of this file under any of the LGPL, the MPL or the GPL.
+ */
+
+#include "config.h"
+#include "URLSegments.h"
+
+namespace WTF {
+
+int URLSegments::length() const
+{
+ if (fragment.isValid())
+ return fragment.end();
+ return charactersBefore(Fragment, false);
+}
+
+int URLSegments::charactersBefore(ComponentType type, bool includeDelimiter) const
+{
+ if (type == Scheme)
+ return scheme.begin();
+
+ int current = 0;
+ if (scheme.isValid())
+ current = scheme.end() + 1; // Advance over the ':' at the end of the scheme.
+
+ if (username.isValid()) {
+ if (type <= Username)
+ return username.begin();
+ current = username.end() + 1; // Advance over the '@' or ':' at the end.
+ }
+
+ if (password.isValid()) {
+ if (type <= Password)
+ return password.begin();
+ current = password.end() + 1; // Advance over the '@' at the end.
+ }
+
+ if (host.isValid()) {
+ if (type <= Host)
+ return host.begin();
+ current = host.end();
+ }
+
+ if (port.isValid()) {
+ if (type < Port || (type == Port && includeDelimiter))
+ return port.begin() - 1; // Back over delimiter.
+ if (type == Port)
+ return port.begin(); // Don't want delimiter counted.
+ current = port.end();
+ }
+
+ if (path.isValid()) {
+ if (type <= Path)
+ return path.begin();
+ current = path.end();
+ }
+
+ if (query.isValid()) {
+ if (type < Query || (type == Query && includeDelimiter))
+ return query.begin() - 1; // Back over delimiter.
+ if (type == Query)
+ return query.begin(); // Don't want delimiter counted.
+ current = query.end();
+ }
+
+ if (fragment.isValid()) {
+ if (type == Fragment && !includeDelimiter)
+ return fragment.begin(); // Back over delimiter.
+
+ // When there is a fragment and we get here, the component we wanted was before
+ // this and not found, so we always know the beginning of the fragment is right.
+ return fragment.begin() - 1; // Don't want delimiter counted.
+ }
+
+ return current;
+}
+
+} // namespace WTF
--- /dev/null
+// Copyright 2007, Google Inc. All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following disclaimer
+// in the documentation and/or other materials provided with the
+// distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived from
+// this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+#ifndef URLSegments_h
+#define URLSegments_h
+
+#include "URLComponent.h"
+
+namespace WTF {
+
+// A structure that holds the identified parts of an input URL. This structure
+// does NOT store the URL itself. The caller will have to store the URL text
+// and its corresponding Parsed structure separately.
+class URLSegments {
+public:
+ // Identifies different components.
+ enum ComponentType {
+ Scheme,
+ Username,
+ Password,
+ Host,
+ Port,
+ Path,
+ Query,
+ Fragment,
+ };
+
+ URLSegments() { }
+
+ // Returns the length of the URL (the end of the last component).
+ //
+ // Note that for some invalid, non-canonical URLs, this may not be the length
+ // of the string. For example "http://": the parsed structure will only
+ // contain an entry for the four-character scheme, and it doesn't know about
+ // the "://". For all other last-components, it will return the real length.
+ int length() const;
+
+ // Returns the number of characters before the given component if it exists,
+ // or where the component would be if it did exist. This will return the
+ // string length if the component would be appended to the end.
+ //
+ // Note that this can get a little funny for the port, query, and fragment
+ // components which have a delimiter that is not counted as part of the
+ // component. The |includeDelimiter| flag controls if you want this counted
+ // as part of the component or not when the component exists.
+ //
+ // This example shows the difference between the two flags for two of these
+ // delimited components that is present (the port and query) and one that
+ // isn't (the reference). The components that this flag affects are marked
+ // with a *.
+ // 0 1 2
+ // 012345678901234567890
+ // Example input: http://foo:80/?query
+ // include_delim=true, ...=false ("<-" indicates different)
+ // Scheme: 0 0
+ // Username: 5 5
+ // Password: 5 5
+ // Host: 7 7
+ // *Port: 10 11 <-
+ // Path: 13 13
+ // *Query: 14 15 <-
+ // *Fragment: 20 20
+ //
+ int charactersBefore(ComponentType, bool includeDelimiter) const;
+
+ // Each component excludes the related delimiters and has a length of -1
+ // if that component is absent but 0 if the component exists but is empty.
+ URLComponent scheme;
+ URLComponent username;
+ URLComponent password;
+ URLComponent host;
+ URLComponent port;
+ URLComponent path;
+ URLComponent query;
+ URLComponent fragment;
+};
+
+} // namespace WTF
+
+#endif // URLSegments_h
--- /dev/null
+# Copyright (C) 2009 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ },
+ 'targets': [
+ {
+ 'target_name': 'wtfurl',
+ 'type': '<(library)',
+ 'msvs_guid': 'EF5E94AB-B646-4E5B-A058-52EF07B8351C',
+ 'dependencies': [
+ ],
+ 'sources': [
+ 'src/URLComponent.h',
+ 'src/URLSegments.cpp',
+ 'src/URLSegments.h',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ 'src',
+ ],
+ },
+ },
+ ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
threadingWindowHandle = CreateWindow(kThreadingWindowClassName, 0, 0,
CW_USEDEFAULT, 0, CW_USEDEFAULT, 0, hWndParent, 0, 0, 0);
threadingFiredMessage = RegisterWindowMessage(L"com.apple.WebKit.MainThreadFired");
+
+ initializeCurrentThreadInternal("Main Thread");
}
void scheduleDispatchFunctionsOnMainThread()
--- /dev/null
+/*
+ * Copyright (C) 2007 Apple Inc. All rights reserved.
+ * Copyright (C) 2008, 2009 Torch Mobile, Inc.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
+ * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+ * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
+ * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+ * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+ * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+ * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+ * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+ * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+ * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include "config.h"
+#include "OwnPtr.h"
+
+#include <windows.h>
+
+namespace WTF {
+
+void deleteOwnedPtr(HBITMAP ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+void deleteOwnedPtr(HBRUSH ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+void deleteOwnedPtr(HDC ptr)
+{
+ if (ptr)
+ DeleteDC(ptr);
+}
+
+void deleteOwnedPtr(HFONT ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+void deleteOwnedPtr(HPALETTE ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+void deleteOwnedPtr(HPEN ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+void deleteOwnedPtr(HRGN ptr)
+{
+ if (ptr)
+ DeleteObject(ptr);
+}
+
+}
#include "config.h"
#include "MainThread.h"
+#include <wx/defs.h>
+#include <wx/app.h>
+#include <wx/event.h>
+
+const wxEventType wxEVT_CALL_AFTER = wxNewEventType();
+
+class wxCallAfter : public wxEvtHandler
+{
+public:
+ wxCallAfter()
+ : wxEvtHandler()
+ {
+ wxTheApp->Connect(-1, -1, wxEVT_CALL_AFTER, wxCommandEventHandler(wxCallAfter::OnCallback));
+ wxCommandEvent event(wxEVT_CALL_AFTER);
+ wxPostEvent(wxTheApp, event);
+ }
+
+ void OnCallback(wxCommandEvent& event)
+ {
+ WTF::dispatchFunctionsFromMainThread();
+ }
+};
+
namespace WTF {
void initializeMainThreadPlatform()
void scheduleDispatchFunctionsOnMainThread()
{
+ wxCallAfter();
}
} // namespace WTF
namespace JSC { namespace Yarr {
+#include "RegExpJitTables.h"
+
class CharacterClassConstructor {
public:
CharacterClassConstructor(bool isCaseInsensitive = false)
CharacterClass* charClass()
{
- CharacterClass* characterClass = new CharacterClass();
+ CharacterClass* characterClass = new CharacterClass(0);
characterClass->m_matches.append(m_matches);
characterClass->m_ranges.append(m_ranges);
Vector<CharacterRange> m_rangesUnicode;
};
-
-CharacterClass* newlineCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_matches.append('\n');
- characterClass->m_matches.append('\r');
- characterClass->m_matchesUnicode.append(0x2028);
- characterClass->m_matchesUnicode.append(0x2029);
-
- return characterClass;
-}
-
-CharacterClass* digitsCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_ranges.append(CharacterRange('0', '9'));
-
- return characterClass;
-}
-
-CharacterClass* spacesCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_matches.append(' ');
- characterClass->m_ranges.append(CharacterRange('\t', '\r'));
- characterClass->m_matchesUnicode.append(0x00a0);
- characterClass->m_matchesUnicode.append(0x1680);
- characterClass->m_matchesUnicode.append(0x180e);
- characterClass->m_matchesUnicode.append(0x2028);
- characterClass->m_matchesUnicode.append(0x2029);
- characterClass->m_matchesUnicode.append(0x202f);
- characterClass->m_matchesUnicode.append(0x205f);
- characterClass->m_matchesUnicode.append(0x3000);
- characterClass->m_rangesUnicode.append(CharacterRange(0x2000, 0x200a));
-
- return characterClass;
-}
-
-CharacterClass* wordcharCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_matches.append('_');
- characterClass->m_ranges.append(CharacterRange('0', '9'));
- characterClass->m_ranges.append(CharacterRange('A', 'Z'));
- characterClass->m_ranges.append(CharacterRange('a', 'z'));
-
- return characterClass;
-}
-
-CharacterClass* nondigitsCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_ranges.append(CharacterRange(0, '0' - 1));
- characterClass->m_ranges.append(CharacterRange('9' + 1, 0x7f));
- characterClass->m_rangesUnicode.append(CharacterRange(0x80, 0xffff));
-
- return characterClass;
-}
-
-CharacterClass* nonspacesCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_ranges.append(CharacterRange(0, '\t' - 1));
- characterClass->m_ranges.append(CharacterRange('\r' + 1, ' ' - 1));
- characterClass->m_ranges.append(CharacterRange(' ' + 1, 0x7f));
- characterClass->m_rangesUnicode.append(CharacterRange(0x0080, 0x009f));
- characterClass->m_rangesUnicode.append(CharacterRange(0x00a1, 0x167f));
- characterClass->m_rangesUnicode.append(CharacterRange(0x1681, 0x180d));
- characterClass->m_rangesUnicode.append(CharacterRange(0x180f, 0x1fff));
- characterClass->m_rangesUnicode.append(CharacterRange(0x200b, 0x2027));
- characterClass->m_rangesUnicode.append(CharacterRange(0x202a, 0x202e));
- characterClass->m_rangesUnicode.append(CharacterRange(0x2030, 0x205e));
- characterClass->m_rangesUnicode.append(CharacterRange(0x2060, 0x2fff));
- characterClass->m_rangesUnicode.append(CharacterRange(0x3001, 0xffff));
-
- return characterClass;
-}
-
-CharacterClass* nonwordcharCreate()
-{
- CharacterClass* characterClass = new CharacterClass();
-
- characterClass->m_matches.append('`');
- characterClass->m_ranges.append(CharacterRange(0, '0' - 1));
- characterClass->m_ranges.append(CharacterRange('9' + 1, 'A' - 1));
- characterClass->m_ranges.append(CharacterRange('Z' + 1, '_' - 1));
- characterClass->m_ranges.append(CharacterRange('z' + 1, 0x7f));
- characterClass->m_rangesUnicode.append(CharacterRange(0x80, 0xffff));
-
- return characterClass;
-}
-
-
class RegexPatternConstructor {
public:
RegexPatternConstructor(RegexPattern& pattern)
void atomBackReference(unsigned subpatternId)
{
ASSERT(subpatternId);
+ m_pattern.m_shouldFallBack = true;
m_pattern.m_maxBackReference = std::max(m_pattern.m_maxBackReference, subpatternId);
if (subpatternId > m_pattern.m_numSubpatterns) {
return;
}
+ if (max > 1 && term.type == PatternTerm::TypeParenthesesSubpattern)
+ m_pattern.m_shouldFallBack = true;
+
if (min == 0)
term.quantify(max, greedy ? QuantifierGreedy : QuantifierNonGreedy);
else if (min == max)
#ifndef RegexCompiler_h
#define RegexCompiler_h
-#include <wtf/Platform.h>
-
#if ENABLE(YARR)
-#include <wtf/unicode/Unicode.h>
#include "RegexParser.h"
#include "RegexPattern.h"
+#include <wtf/unicode/Unicode.h>
namespace JSC { namespace Yarr {
return false;
}
- bool tryConsumeCharacter(int testChar)
- {
- if (input.atEnd())
- return false;
-
- int ch = input.read();
-
- if (pattern->m_ignoreCase ? ((Unicode::toLower(testChar) == ch) || (Unicode::toUpper(testChar) == ch)) : (testChar == ch)) {
- input.next();
- return true;
- }
- return false;
- }
-
bool checkCharacter(int testChar, int inputPosition)
{
return testChar == input.readChecked(inputPosition);
return (loChar == ch) || (hiChar == ch);
}
- bool tryConsumeCharacterClass(CharacterClass* characterClass, bool invert)
- {
- if (input.atEnd())
- return false;
-
- bool match = testCharacterClass(characterClass, input.read());
-
- if (invert)
- match = !match;
-
- if (match) {
- input.next();
- return true;
- }
- return false;
- }
-
bool checkCharacterClass(CharacterClass* characterClass, bool invert, int inputPosition)
{
bool match = testCharacterClass(characterClass, input.readChecked(inputPosition));
}
case PatternTerm::TypeParentheticalAssertion: {
- unsigned alternativeFrameLocation = term.inputPosition + RegexStackSpaceForBackTrackInfoParentheticalAssertion;
+ unsigned alternativeFrameLocation = term.frameLocation + RegexStackSpaceForBackTrackInfoParentheticalAssertion;
atomParentheticalAssertionBegin(term.parentheses.subpatternId, term.invertOrCapture, term.frameLocation, alternativeFrameLocation);
emitDisjunction(term.parentheses.disjunction, currentCountAlreadyChecked, 0);
#ifndef RegexInterpreter_h
#define RegexInterpreter_h
-#include <wtf/Platform.h>
-
#if ENABLE(YARR)
-#include <wtf/unicode/Unicode.h>
#include "RegexParser.h"
#include "RegexPattern.h"
+#include <wtf/unicode/Unicode.h>
namespace JSC { namespace Yarr {
namespace JSC { namespace Yarr {
-
class RegexGenerator : private MacroAssembler {
friend void jitCompileRegex(JSGlobalData* globalData, RegexCodeBlock& jitObject, const UString& pattern, unsigned& numSubpatterns, const char*& error, bool ignoreCase, bool multiline);
static const RegisterID regT1 = ARMRegisters::r6;
static const RegisterID returnRegister = ARMRegisters::r0;
+#elif CPU(MIPS)
+ static const RegisterID input = MIPSRegisters::a0;
+ static const RegisterID index = MIPSRegisters::a1;
+ static const RegisterID length = MIPSRegisters::a2;
+ static const RegisterID output = MIPSRegisters::a3;
+
+ static const RegisterID regT0 = MIPSRegisters::t4;
+ static const RegisterID regT1 = MIPSRegisters::t5;
+
+ static const RegisterID returnRegister = MIPSRegisters::v0;
#elif CPU(X86)
static const RegisterID input = X86Registers::eax;
static const RegisterID index = X86Registers::edx;
void matchCharacterClass(RegisterID character, JumpList& matchDest, const CharacterClass* charClass)
{
+ if (charClass->m_table) {
+ ExtendedAddress tableEntry(character, reinterpret_cast<intptr_t>(charClass->m_table->m_table));
+ matchDest.append(branchTest8(charClass->m_table->m_inverted ? Zero : NonZero, tableEntry));
+ return;
+ }
Jump unicodeFail;
if (charClass->m_matchesUnicode.size() || charClass->m_rangesUnicode.size()) {
Jump isAscii = branch32(LessThanOrEqual, character, Imm32(0x7f));
ASSERT(!m_pattern.m_ignoreCase || (Unicode::toLower(ch) == Unicode::toUpper(ch)));
failures.append(jumpIfCharNotEquals(ch, state.inputOffset()));
}
+
add32(Imm32(1), countRegister);
add32(Imm32(1), index);
- branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
+ if (term.quantityCount != 0xffffffff)
+ branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
+ else
+ jump(loop);
+
failures.append(jump());
Label backtrackBegin(this);
loadFromFrame(term.frameLocation, countRegister);
atEndOfInput().linkTo(hardFail, this);
- branch32(Equal, countRegister, Imm32(term.quantityCount), hardFail);
+ if (term.quantityCount != 0xffffffff)
+ branch32(Equal, countRegister, Imm32(term.quantityCount), hardFail);
if (m_pattern.m_ignoreCase && isASCIIAlpha(ch)) {
readCharacter(state.inputOffset(), character);
or32(Imm32(32), character);
add32(Imm32(1), countRegister);
add32(Imm32(1), index);
- branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
+ if (term.quantityCount != 0xffffffff)
+ branch32(NotEqual, countRegister, Imm32(term.quantityCount)).linkTo(loop, this);
+ else
+ jump(loop);
+
failures.append(jump());
Label backtrackBegin(this);
break;
case PatternTerm::TypeBackReference:
- m_generationFailed = true;
+ ASSERT_NOT_REACHED();
break;
case PatternTerm::TypeForwardReference:
break;
case PatternTerm::TypeParenthesesSubpattern:
- if ((term.quantityCount == 1) && !term.parentheses.isCopy)
- generateParenthesesSingle(state);
- else
- m_generationFailed = true;
+ ASSERT((term.quantityCount == 1) && !term.parentheses.isCopy); // must fallback to pcre before this point
+ generateParenthesesSingle(state);
break;
case PatternTerm::TypeParentheticalAssertion:
push(ARMRegisters::r5);
push(ARMRegisters::r6);
move(ARMRegisters::r3, output);
+#elif CPU(MIPS)
+ // Do nothing.
#endif
}
pop(ARMRegisters::r6);
pop(ARMRegisters::r5);
pop(ARMRegisters::r4);
+#elif CPU(MIPS)
+ // Do nothing
#endif
ret();
}
public:
RegexGenerator(RegexPattern& pattern)
: m_pattern(pattern)
- , m_generationFailed(false)
{
}
jitObject.set(patchBuffer.finalizeCode());
}
- bool generationFailed()
- {
- return m_generationFailed;
- }
-
private:
RegexPattern& m_pattern;
Vector<AlternativeBacktrackRecord> m_backtrackRecords;
- bool m_generationFailed;
};
void jitCompileRegex(JSGlobalData* globalData, RegexCodeBlock& jitObject, const UString& patternString, unsigned& numSubpatterns, const char*& error, bool ignoreCase, bool multiline)
{
RegexPattern pattern(ignoreCase, multiline);
-
if ((error = compileRegex(patternString, pattern)))
return;
-
numSubpatterns = pattern.m_numSubpatterns;
- RegexGenerator generator(pattern);
- generator.compile(globalData, jitObject);
-
- if (generator.generationFailed()) {
- JSRegExpIgnoreCaseOption ignoreCaseOption = ignoreCase ? JSRegExpIgnoreCase : JSRegExpDoNotIgnoreCase;
- JSRegExpMultilineOption multilineOption = multiline ? JSRegExpMultiline : JSRegExpSingleLine;
- jitObject.setFallback(jsRegExpCompile(reinterpret_cast<const UChar*>(patternString.data()), patternString.size(), ignoreCaseOption, multilineOption, &numSubpatterns, &error));
+ if (!pattern.m_shouldFallBack && globalData->canUseJIT()) {
+ RegexGenerator generator(pattern);
+ generator.compile(globalData, jitObject);
+ return;
}
+
+ JSRegExpIgnoreCaseOption ignoreCaseOption = ignoreCase ? JSRegExpIgnoreCase : JSRegExpDoNotIgnoreCase;
+ JSRegExpMultilineOption multilineOption = multiline ? JSRegExpMultiline : JSRegExpSingleLine;
+ jitObject.setFallback(jsRegExpCompile(reinterpret_cast<const UChar*>(patternString.data()), patternString.size(), ignoreCaseOption, multilineOption, &numSubpatterns, &error));
}
}}
#ifndef RegexJIT_h
#define RegexJIT_h
-#include <wtf/Platform.h>
-
#if ENABLE(YARR_JIT)
#include "MacroAssembler.h"
JSRegExp* getFallback() { return m_fallback; }
void setFallback(JSRegExp* fallback) { m_fallback = fallback; }
- bool operator!() { return !m_ref.m_code.executableAddress(); }
+ bool operator!() { return (!m_ref.m_code.executableAddress() && !m_fallback); }
void set(MacroAssembler::CodeRef ref) { m_ref = ref; }
int execute(const UChar* input, unsigned start, unsigned length, int* output)
#ifndef RegexParser_h
#define RegexParser_h
-#include <wtf/Platform.h>
-
#if ENABLE(YARR)
#include <UString.h>
+#include <limits.h>
#include <wtf/ASCIICType.h>
#include <wtf/unicode/Unicode.h>
-#include <limits.h>
namespace JSC { namespace Yarr {
#ifndef RegexPattern_h
#define RegexPattern_h
-#include <wtf/Platform.h>
#if ENABLE(YARR)
}
};
+struct CharacterClassTable : RefCounted<CharacterClassTable> {
+ const char* m_table;
+ bool m_inverted;
+ static PassRefPtr<CharacterClassTable> create(const char* table, bool inverted)
+ {
+ return adoptRef(new CharacterClassTable(table, inverted));
+ }
+
+private:
+ CharacterClassTable(const char* table, bool inverted)
+ : m_table(table)
+ , m_inverted(inverted)
+ {
+ }
+};
+
struct CharacterClass : FastAllocBase {
+ // All CharacterClass instances have to have the full set of matches and ranges,
+ // they may have an optional table for faster lookups (which must match the
+ // specified matches and ranges)
+ CharacterClass(PassRefPtr<CharacterClassTable> table)
+ : m_table(table)
+ {
+ }
Vector<UChar> m_matches;
Vector<CharacterRange> m_ranges;
Vector<UChar> m_matchesUnicode;
Vector<CharacterRange> m_rangesUnicode;
+ RefPtr<CharacterClassTable> m_table;
};
enum QuantifierType {
, m_multiline(multiline)
, m_numSubpatterns(0)
, m_maxBackReference(0)
+ , m_shouldFallBack(false)
, newlineCached(0)
, digitsCached(0)
, spacesCached(0)
m_numSubpatterns = 0;
m_maxBackReference = 0;
+ m_shouldFallBack = false;
+
newlineCached = 0;
digitsCached = 0;
spacesCached = 0;
bool m_multiline;
unsigned m_numSubpatterns;
unsigned m_maxBackReference;
+ bool m_shouldFallBack;
PatternDisjunction* m_body;
Vector<PatternDisjunction*, 4> m_disjunctions;
Vector<CharacterClass*> m_userCharacterClasses;