summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/dfg/DFGRepatch.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'Source/JavaScriptCore/dfg/DFGRepatch.cpp')
-rw-r--r--Source/JavaScriptCore/dfg/DFGRepatch.cpp380
1 files changed, 292 insertions, 88 deletions
diff --git a/Source/JavaScriptCore/dfg/DFGRepatch.cpp b/Source/JavaScriptCore/dfg/DFGRepatch.cpp
index f2928c290..edf3c9505 100644
--- a/Source/JavaScriptCore/dfg/DFGRepatch.cpp
+++ b/Source/JavaScriptCore/dfg/DFGRepatch.cpp
@@ -32,6 +32,7 @@
#include "DFGSpeculativeJIT.h"
#include "LinkBuffer.h"
#include "Operations.h"
+#include "PolymorphicPutByIdList.h"
#include "RepatchBuffer.h"
namespace JSC { namespace DFG {
@@ -361,7 +362,7 @@ static bool tryBuildGetByIDList(ExecState* exec, JSValue baseValue, const Identi
// place that we made it from. It just so happens to be the place that we are at
// right now!
stubJit.store32(
- MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFGWithInlining()),
+ MacroAssembler::TrustedImm32(exec->codeOriginIndexForDFG()),
CCallHelpers::tagFor(static_cast<VirtualRegister>(RegisterFile::ArgumentCount)));
operationCall = stubJit.call();
@@ -492,7 +493,7 @@ void dfgBuildGetByIDProtoList(ExecState* exec, JSValue baseValue, const Identifi
dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, operationGetById);
}
-static V_DFGOperation_EJCI appropriatePutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+static V_DFGOperation_EJCI appropriateGenericPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
{
if (slot.isStrictMode()) {
if (putKind == Direct)
@@ -504,6 +505,18 @@ static V_DFGOperation_EJCI appropriatePutByIdFunction(const PutPropertySlot &slo
return operationPutByIdNonStrict;
}
+static V_DFGOperation_EJCI appropriateListBuildingPutByIdFunction(const PutPropertySlot &slot, PutKind putKind)
+{
+ if (slot.isStrictMode()) {
+ if (putKind == Direct)
+ return operationPutByIdDirectStrictBuildList;
+ return operationPutByIdStrictBuildList;
+ }
+ if (putKind == Direct)
+ return operationPutByIdDirectNonStrictBuildList;
+ return operationPutByIdNonStrictBuildList;
+}
+
static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue prototype, MacroAssembler::JumpList& failureCases)
{
if (prototype.isNull())
@@ -515,7 +528,187 @@ static void testPrototype(MacroAssembler &stubJit, GPRReg scratchGPR, JSValue pr
failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(scratchGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(prototype.asCell()->structure())));
}
-static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier&, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+static void emitPutReplaceStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind,
+ Structure* structure,
+ CodeLocationLabel failureLabel,
+ MacroAssemblerCodeRef& stubRoutine)
+{
+ JSGlobalData* globalData = &exec->globalData();
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.valueGPR);
+ GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.scratchGPR);
+ bool needToRestoreScratch = false;
+#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
+ GPRReg scratchGPR2;
+ const bool writeBarrierNeeded = true;
+#else
+ const bool writeBarrierNeeded = false;
+#endif
+
+ MacroAssembler stubJit;
+
+ if (scratchGPR == InvalidGPRReg && (writeBarrierNeeded || !structure->isUsingInlineStorage())) {
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
+ needToRestoreScratch = true;
+ stubJit.push(scratchGPR);
+ }
+
+ MacroAssembler::Jump badStructure = stubJit.branchPtr(
+ MacroAssembler::NotEqual,
+ MacroAssembler::Address(baseGPR, JSCell::structureOffset()),
+ MacroAssembler::TrustedImmPtr(structure));
+
+#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
+ scratchGPR2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
+ stubJit.push(scratchGPR2);
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratchGPR2, WriteBarrierForPropertyAccess);
+ stubJit.pop(scratchGPR2);
+#endif
+
+#if USE(JSVALUE64)
+ if (structure->isUsingInlineStorage())
+ stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue)));
+ else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
+ stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (structure->isUsingInlineStorage()) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+ success = stubJit.jump();
+
+ badStructure.link(&stubJit);
+ stubJit.pop(scratchGPR);
+ failure = stubJit.jump();
+ } else {
+ success = stubJit.jump();
+ failure = badStructure;
+ }
+
+ LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToDone));
+ patchBuffer.link(failure, failureLabel);
+
+ stubRoutine = patchBuffer.finalizeCode();
+}
+
+static void emitPutTransitionStub(
+ ExecState* exec,
+ JSValue,
+ const Identifier&,
+ const PutPropertySlot& slot,
+ StructureStubInfo& stubInfo,
+ PutKind putKind,
+ Structure* structure,
+ Structure* oldStructure,
+ StructureChain* prototypeChain,
+ CodeLocationLabel failureLabel,
+ MacroAssemblerCodeRef& stubRoutine)
+{
+ JSGlobalData* globalData = &exec->globalData();
+
+ GPRReg baseGPR = static_cast<GPRReg>(stubInfo.baseGPR);
+#if USE(JSVALUE32_64)
+ GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.valueTagGPR);
+#endif
+ GPRReg valueGPR = static_cast<GPRReg>(stubInfo.valueGPR);
+ GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.scratchGPR);
+ bool needToRestoreScratch = false;
+
+ ASSERT(scratchGPR != baseGPR);
+
+ MacroAssembler stubJit;
+
+ MacroAssembler::JumpList failureCases;
+
+ if (scratchGPR == InvalidGPRReg) {
+ scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
+ stubJit.push(scratchGPR);
+ needToRestoreScratch = true;
+ }
+
+ failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
+
+ testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases);
+
+ if (putKind == NotDirect) {
+ for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
+ testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases);
+ }
+
+#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
+ // Must always emit this write barrier as the structure transition itself requires it
+ GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
+ stubJit.push(scratch2);
+ SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess);
+ stubJit.pop(scratch2);
+#endif
+
+ stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
+#if USE(JSVALUE64)
+ if (structure->isUsingInlineStorage())
+ stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue)));
+ else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
+ stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue)));
+ }
+#elif USE(JSVALUE32_64)
+ if (structure->isUsingInlineStorage()) {
+ stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ } else {
+ stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
+ stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
+ stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
+ }
+#endif
+
+ MacroAssembler::Jump success;
+ MacroAssembler::Jump failure;
+
+ if (needToRestoreScratch) {
+ stubJit.pop(scratchGPR);
+ success = stubJit.jump();
+
+ failureCases.link(&stubJit);
+ stubJit.pop(scratchGPR);
+ failure = stubJit.jump();
+ } else
+ success = stubJit.jump();
+
+ LinkBuffer patchBuffer(*globalData, &stubJit, exec->codeBlock());
+ patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToDone));
+ if (needToRestoreScratch)
+ patchBuffer.link(failure, failureLabel);
+ else
+ patchBuffer.link(failureCases, failureLabel);
+
+ stubRoutine = patchBuffer.finalizeCode();
+}
+
+static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier& ident, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
{
CodeBlock* codeBlock = exec->codeBlock();
JSGlobalData* globalData = &exec->globalData();
@@ -545,99 +738,26 @@ static bool tryCachePutByID(ExecState* exec, JSValue baseValue, const Identifier
StructureChain* prototypeChain = structure->prototypeChain(exec);
- GPRReg baseGPR = static_cast<GPRReg>(stubInfo.baseGPR);
-#if USE(JSVALUE32_64)
- GPRReg valueTagGPR = static_cast<GPRReg>(stubInfo.valueTagGPR);
-#endif
- GPRReg valueGPR = static_cast<GPRReg>(stubInfo.valueGPR);
- GPRReg scratchGPR = static_cast<GPRReg>(stubInfo.scratchGPR);
- bool needToRestoreScratch = false;
-
- ASSERT(scratchGPR != baseGPR);
-
- MacroAssembler stubJit;
-
- MacroAssembler::JumpList failureCases;
-
- if (scratchGPR == InvalidGPRReg) {
- scratchGPR = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR);
- stubJit.push(scratchGPR);
- needToRestoreScratch = true;
- }
-
- failureCases.append(stubJit.branchPtr(MacroAssembler::NotEqual, MacroAssembler::Address(baseGPR, JSCell::structureOffset()), MacroAssembler::TrustedImmPtr(oldStructure)));
-
- testPrototype(stubJit, scratchGPR, oldStructure->storedPrototype(), failureCases);
-
- if (putKind == NotDirect) {
- for (WriteBarrier<Structure>* it = prototypeChain->head(); *it; ++it)
- testPrototype(stubJit, scratchGPR, (*it)->storedPrototype(), failureCases);
- }
-
-#if ENABLE(GGC) || ENABLE(WRITE_BARRIER_PROFILING)
- // Must always emit this write barrier as the structure transition itself requires it
- GPRReg scratch2 = SpeculativeJIT::selectScratchGPR(baseGPR, valueGPR, scratchGPR);
- stubJit.push(scratch2);
- SpeculativeJIT::writeBarrier(stubJit, baseGPR, scratchGPR, scratch2, WriteBarrierForPropertyAccess);
- stubJit.pop(scratch2);
-#endif
-
- stubJit.storePtr(MacroAssembler::TrustedImmPtr(structure), MacroAssembler::Address(baseGPR, JSCell::structureOffset()));
-#if USE(JSVALUE64)
- if (structure->isUsingInlineStorage())
- stubJit.storePtr(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue)));
- else {
- stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
- stubJit.storePtr(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue)));
- }
-#elif USE(JSVALUE32_64)
- if (structure->isUsingInlineStorage()) {
- stubJit.store32(valueGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
- stubJit.store32(valueTagGPR, MacroAssembler::Address(baseGPR, JSObject::offsetOfInlineStorage() + slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
- } else {
- stubJit.loadPtr(MacroAssembler::Address(baseGPR, JSObject::offsetOfPropertyStorage()), scratchGPR);
- stubJit.store32(valueGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.payload)));
- stubJit.store32(valueTagGPR, MacroAssembler::Address(scratchGPR, slot.cachedOffset() * sizeof(JSValue) + OBJECT_OFFSETOF(EncodedValueDescriptor, asBits.tag)));
- }
-#endif
-
- MacroAssembler::Jump success;
- MacroAssembler::Jump failure;
-
- if (needToRestoreScratch) {
- stubJit.pop(scratchGPR);
- success = stubJit.jump();
-
- failureCases.link(&stubJit);
- stubJit.pop(scratchGPR);
- failure = stubJit.jump();
- } else
- success = stubJit.jump();
-
- LinkBuffer patchBuffer(*globalData, &stubJit, codeBlock);
- patchBuffer.link(success, stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToDone));
- if (needToRestoreScratch)
- patchBuffer.link(failure, stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
- else
- patchBuffer.link(failureCases, stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
-
- stubInfo.stubRoutine = patchBuffer.finalizeCode();
+ emitPutTransitionStub(
+ exec, baseValue, ident, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase),
+ stubInfo.stubRoutine);
RepatchBuffer repatchBuffer(codeBlock);
repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), CodeLocationLabel(stubInfo.stubRoutine.code()));
- repatchBuffer.relink(stubInfo.callReturnLocation, appropriatePutByIdFunction(slot, putKind));
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateListBuildingPutByIdFunction(slot, putKind));
stubInfo.initPutByIdTransition(*globalData, codeBlock->ownerExecutable(), oldStructure, structure, prototypeChain, putKind == Direct);
return true;
}
- dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriatePutByIdFunction(slot, putKind), false);
+ dfgRepatchByIdSelfAccess(codeBlock, stubInfo, structure, slot.cachedOffset(), appropriateListBuildingPutByIdFunction(slot, putKind), false);
stubInfo.initPutByIdReplace(*globalData, codeBlock->ownerExecutable(), structure);
return true;
}
- // FIXME: should support the transition case!
return false;
}
@@ -645,7 +765,91 @@ void dfgRepatchPutByID(ExecState* exec, JSValue baseValue, const Identifier& pro
{
bool cached = tryCachePutByID(exec, baseValue, propertyName, slot, stubInfo, putKind);
if (!cached)
- dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriatePutByIdFunction(slot, putKind));
+ dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+}
+
+static bool tryBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ CodeBlock* codeBlock = exec->codeBlock();
+ JSGlobalData* globalData = &exec->globalData();
+
+ if (!baseValue.isCell())
+ return false;
+ JSCell* baseCell = baseValue.asCell();
+ Structure* structure = baseCell->structure();
+ Structure* oldStructure = structure->previousID();
+
+ if (!slot.isCacheable())
+ return false;
+ if (structure->isUncacheableDictionary())
+ return false;
+
+ // Optimize self access.
+ if (slot.base() == baseValue) {
+ PolymorphicPutByIdList* list;
+ MacroAssemblerCodeRef stubRoutine;
+
+ if (slot.type() == PutPropertySlot::NewProperty) {
+ if (structure->isDictionary())
+ return false;
+
+ // skip optimizing the case where we need a realloc
+ if (oldStructure->propertyStorageCapacity() != structure->propertyStorageCapacity())
+ return false;
+
+ normalizePrototypeChain(exec, baseCell);
+
+ StructureChain* prototypeChain = structure->prototypeChain(exec);
+
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
+
+ emitPutTransitionStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, oldStructure, prototypeChain,
+ CodeLocationLabel(list->currentSlowPathTarget()),
+ stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::transition(
+ *globalData, codeBlock->ownerExecutable(),
+ oldStructure, structure, prototypeChain,
+ stubRoutine));
+ } else {
+ // We're now committed to creating the stub. Mogrify the meta-data accordingly.
+ list = PolymorphicPutByIdList::from(
+ putKind, stubInfo,
+ stubInfo.callReturnLocation.labelAtOffset(stubInfo.deltaCallToSlowCase));
+
+ emitPutReplaceStub(
+ exec, baseValue, propertyName, slot, stubInfo, putKind,
+ structure, CodeLocationLabel(list->currentSlowPathTarget()), stubRoutine);
+
+ list->addAccess(
+ PutByIdAccess::replace(
+ *globalData, codeBlock->ownerExecutable(),
+ structure, stubRoutine));
+ }
+
+ RepatchBuffer repatchBuffer(codeBlock);
+ repatchBuffer.relink(stubInfo.callReturnLocation.jumpAtOffset(stubInfo.deltaCallToStructCheck), CodeLocationLabel(stubRoutine.code()));
+
+ if (list->isFull())
+ repatchBuffer.relink(stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
+
+ return true;
+ }
+
+ return false;
+}
+
+void dfgBuildPutByIdList(ExecState* exec, JSValue baseValue, const Identifier& propertyName, const PutPropertySlot& slot, StructureStubInfo& stubInfo, PutKind putKind)
+{
+ bool cached = tryBuildPutByIdList(exec, baseValue, propertyName, slot, stubInfo, putKind);
+ if (!cached)
+ dfgRepatchCall(exec->codeBlock(), stubInfo.callReturnLocation, appropriateGenericPutByIdFunction(slot, putKind));
}
void dfgLinkFor(ExecState* exec, CallLinkInfo& callLinkInfo, CodeBlock* calleeCodeBlock, JSFunction* callee, MacroAssemblerCodePtr codePtr, CodeSpecializationKind kind)
@@ -687,14 +891,14 @@ void dfgResetPutByID(RepatchBuffer& repatchBuffer, StructureStubInfo& stubInfo)
{
V_DFGOperation_EJCI unoptimizedFunction = bitwise_cast<V_DFGOperation_EJCI>(MacroAssembler::readCallTarget(stubInfo.callReturnLocation).executableAddress());
V_DFGOperation_EJCI optimizedFunction;
- if (unoptimizedFunction == operationPutByIdStrict)
+ if (unoptimizedFunction == operationPutByIdStrict || unoptimizedFunction == operationPutByIdStrictBuildList)
optimizedFunction = operationPutByIdStrictOptimize;
- else if (unoptimizedFunction == operationPutByIdNonStrict)
+ else if (unoptimizedFunction == operationPutByIdNonStrict || unoptimizedFunction == operationPutByIdNonStrictBuildList)
optimizedFunction = operationPutByIdNonStrictOptimize;
- else if (unoptimizedFunction == operationPutByIdDirectStrict)
+ else if (unoptimizedFunction == operationPutByIdDirectStrict || unoptimizedFunction == operationPutByIdDirectStrictBuildList)
optimizedFunction = operationPutByIdDirectStrictOptimize;
else {
- ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict);
+ ASSERT(unoptimizedFunction == operationPutByIdDirectNonStrict || unoptimizedFunction == operationPutByIdDirectNonStrictBuildList);
optimizedFunction = operationPutByIdDirectNonStrictOptimize;
}
repatchBuffer.relink(stubInfo.callReturnLocation, optimizedFunction);