summaryrefslogtreecommitdiff
path: root/Source/JavaScriptCore/dfg/DFGGraph.cpp
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2016-04-10 09:28:39 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2016-04-10 09:28:39 +0000
commit32761a6cee1d0dee366b885b7b9c777e67885688 (patch)
treed6bec92bebfb216f4126356e55518842c2f476a1 /Source/JavaScriptCore/dfg/DFGGraph.cpp
parenta4e969f4965059196ca948db781e52f7cfebf19e (diff)
downloadWebKitGtk-tarball-32761a6cee1d0dee366b885b7b9c777e67885688.tar.gz
webkitgtk-2.4.11webkitgtk-2.4.11
Diffstat (limited to 'Source/JavaScriptCore/dfg/DFGGraph.cpp')
-rw-r--r--Source/JavaScriptCore/dfg/DFGGraph.cpp1208
1 files changed, 212 insertions, 996 deletions
diff --git a/Source/JavaScriptCore/dfg/DFGGraph.cpp b/Source/JavaScriptCore/dfg/DFGGraph.cpp
index 541c84ebb..8256007e9 100644
--- a/Source/JavaScriptCore/dfg/DFGGraph.cpp
+++ b/Source/JavaScriptCore/dfg/DFGGraph.cpp
@@ -1,5 +1,5 @@
/*
- * Copyright (C) 2011, 2013-2015 Apple Inc. All rights reserved.
+ * Copyright (C) 2011, 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
@@ -26,33 +26,23 @@
#include "config.h"
#include "DFGGraph.h"
-#if ENABLE(DFG_JIT)
-
-#include "BytecodeKills.h"
#include "BytecodeLivenessAnalysisInlines.h"
#include "CodeBlock.h"
#include "CodeBlockWithJITType.h"
-#include "DFGBlockWorklist.h"
#include "DFGClobberSet.h"
-#include "DFGClobbersExitState.h"
-#include "DFGCFG.h"
-#include "DFGDominators.h"
#include "DFGJITCode.h"
-#include "DFGMayExit.h"
-#include "DFGNaturalLoops.h"
-#include "DFGPrePostNumbering.h"
#include "DFGVariableAccessDataDump.h"
#include "FullBytecodeLiveness.h"
#include "FunctionExecutableDump.h"
#include "JIT.h"
-#include "JSLexicalEnvironment.h"
-#include "MaxFrameExtentForSlowPathCall.h"
+#include "JSActivation.h"
#include "OperandsInlines.h"
-#include "JSCInlines.h"
-#include "StackAlignment.h"
+#include "Operations.h"
#include <wtf/CommaPrinter.h>
#include <wtf/ListDump.h>
+#if ENABLE(DFG_JIT)
+
namespace JSC { namespace DFG {
// Creates an array of stringized names.
@@ -65,35 +55,27 @@ static const char* dfgOpNames[] = {
Graph::Graph(VM& vm, Plan& plan, LongLivedState& longLivedState)
: m_vm(vm)
, m_plan(plan)
- , m_codeBlock(m_plan.codeBlock)
+ , m_codeBlock(m_plan.codeBlock.get())
, m_profiledBlock(m_codeBlock->alternative())
, m_allocator(longLivedState.m_allocator)
- , m_cfg(std::make_unique<CFG>(*this))
+ , m_mustHandleAbstractValues(OperandsLike, plan.mustHandleValues)
+ , m_inlineCallFrames(adoptPtr(new InlineCallFrameSet()))
+ , m_hasArguments(false)
, m_nextMachineLocal(0)
+ , m_machineCaptureStart(std::numeric_limits<int>::max())
, m_fixpointState(BeforeFixpoint)
- , m_structureRegistrationState(HaveNotStartedRegistering)
, m_form(LoadStore)
, m_unificationState(LocallyUnified)
, m_refCountState(EverythingIsLive)
{
ASSERT(m_profiledBlock);
- m_hasDebuggerEnabled = m_profiledBlock->globalObject()->hasDebugger()
- || Options::forceDebuggerBytecodeGeneration();
+ for (unsigned i = m_mustHandleAbstractValues.size(); i--;)
+ m_mustHandleAbstractValues[i].setMostSpecific(*this, plan.mustHandleValues[i]);
}
Graph::~Graph()
{
- for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
- BasicBlock* block = this->block(blockIndex);
- if (!block)
- continue;
-
- for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- m_allocator.free(block->phis[phiIndex]);
- for (unsigned nodeIndex = block->size(); nodeIndex--;)
- m_allocator.free(block->at(nodeIndex));
- }
m_allocator.freeAll();
}
@@ -108,22 +90,16 @@ static void printWhiteSpace(PrintStream& out, unsigned amount)
out.print(" ");
}
-bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previousNodeRef, Node* currentNode, DumpContext* context)
+bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node* previousNode, Node* currentNode, DumpContext* context)
{
- if (!currentNode->origin.semantic)
- return false;
-
- Node* previousNode = previousNodeRef;
- previousNodeRef = currentNode;
-
if (!previousNode)
return false;
- if (previousNode->origin.semantic.inlineCallFrame == currentNode->origin.semantic.inlineCallFrame)
+ if (previousNode->codeOrigin.inlineCallFrame == currentNode->codeOrigin.inlineCallFrame)
return false;
- Vector<CodeOrigin> previousInlineStack = previousNode->origin.semantic.inlineStack();
- Vector<CodeOrigin> currentInlineStack = currentNode->origin.semantic.inlineStack();
+ Vector<CodeOrigin> previousInlineStack = previousNode->codeOrigin.inlineStack();
+ Vector<CodeOrigin> currentInlineStack = currentNode->codeOrigin.inlineStack();
unsigned commonSize = std::min(previousInlineStack.size(), currentInlineStack.size());
unsigned indexOfDivergence = commonSize;
for (unsigned i = 0; i < commonSize; ++i) {
@@ -156,7 +132,7 @@ bool Graph::dumpCodeOrigin(PrintStream& out, const char* prefix, Node*& previous
int Graph::amountOfNodeWhiteSpace(Node* node)
{
- return (node->origin.semantic.inlineDepth() - 1) * 2;
+ return (node->codeOrigin.inlineDepth() - 1) * 2;
}
void Graph::printNodeWhiteSpace(PrintStream& out, Node* node)
@@ -169,6 +145,7 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
NodeType op = node->op();
unsigned refCount = node->refCount();
+ bool skipped = !refCount;
bool mustGenerate = node->mustGenerate();
if (mustGenerate)
--refCount;
@@ -190,10 +167,11 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
// (5) The arguments to the operation. The may be of the form:
// @# - a NodeIndex referencing a prior node in the graph.
// arg# - an argument number.
+ // $# - the index in the CodeBlock of a constant { for numeric constants the value is displayed | for integers, in both decimal and hex }.
// id# - the index in the CodeBlock of an identifier { if codeBlock is passed to dump(), the string representation is displayed }.
- // var# - the index of a var on the global object, used by GetGlobalVar/GetGlobalLexicalVariable/PutGlobalVariable operations.
- out.printf("% 4d:<%c%u:", (int)node->index(), mustGenerate ? '!' : ' ', refCount);
- if (node->hasResult() && node->hasVirtualRegister() && node->virtualRegister().isValid())
+ // var# - the index of a var on the global object, used by GetGlobalVar/PutGlobalVar operations.
+ out.printf("% 4d:%s<%c%u:", (int)node->index(), skipped ? " skipped " : " ", mustGenerate ? '!' : ' ', refCount);
+ if (node->hasResult() && !skipped && node->hasVirtualRegister())
out.print(node->virtualRegister());
else
out.print("-");
@@ -222,97 +200,88 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
out.print(comma, node->arrayMode());
if (node->hasArithMode())
out.print(comma, node->arithMode());
- if (node->hasArithRoundingMode())
- out.print(comma, "Rounding:", node->arithRoundingMode());
- if (node->hasScopeOffset())
- out.print(comma, node->scopeOffset());
- if (node->hasDirectArgumentsOffset())
- out.print(comma, node->capturedArgumentsOffset());
+ if (node->hasVarNumber())
+ out.print(comma, node->varNumber());
if (node->hasRegisterPointer())
- out.print(comma, "global", "(", RawPointer(node->variablePointer()), ")");
+ out.print(comma, "global", globalObjectFor(node->codeOrigin)->findRegisterIndex(node->registerPointer()), "(", RawPointer(node->registerPointer()), ")");
if (node->hasIdentifier())
out.print(comma, "id", node->identifierNumber(), "{", identifiers()[node->identifierNumber()], "}");
- if (node->hasPromotedLocationDescriptor())
- out.print(comma, node->promotedLocationDescriptor());
if (node->hasStructureSet())
out.print(comma, inContext(node->structureSet(), context));
if (node->hasStructure())
out.print(comma, inContext(*node->structure(), context));
- if (node->hasTransition()) {
- out.print(comma, pointerDumpInContext(node->transition(), context));
-#if USE(JSVALUE64)
- out.print(", ID:", node->transition()->next->id());
-#else
- out.print(", ID:", RawPointer(node->transition()->next));
-#endif
+ if (node->hasStructureTransitionData())
+ out.print(comma, inContext(*node->structureTransitionData().previousStructure, context), " -> ", inContext(*node->structureTransitionData().newStructure, context));
+ if (node->hasFunction()) {
+ out.print(comma, "function(", RawPointer(node->function()), ", ");
+ if (node->function()->inherits(JSFunction::info())) {
+ JSFunction* function = jsCast<JSFunction*>(node->function());
+ if (function->isHostFunction())
+ out.print("<host function>");
+ else
+ out.print(FunctionExecutableDump(function->jsExecutable()));
+ } else
+ out.print("<not JSFunction>");
+ out.print(")");
}
- if (node->hasCellOperand()) {
- if (!node->cellOperand()->value() || !node->cellOperand()->value().isCell())
- out.print(comma, "invalid cell operand: ", node->cellOperand()->value());
- else {
- out.print(comma, pointerDump(node->cellOperand()->value().asCell()));
- if (node->cellOperand()->value().isCell()) {
- CallVariant variant(node->cellOperand()->value().asCell());
- if (ExecutableBase* executable = variant.executable()) {
- if (executable->isHostFunction())
- out.print(comma, "<host function>");
- else if (FunctionExecutable* functionExecutable = jsDynamicCast<FunctionExecutable*>(executable))
- out.print(comma, FunctionExecutableDump(functionExecutable));
- else
- out.print(comma, "<non-function executable>");
- }
- }
- }
+ if (node->hasExecutable()) {
+ if (node->executable()->inherits(FunctionExecutable::info()))
+ out.print(comma, "executable(", FunctionExecutableDump(jsCast<FunctionExecutable*>(node->executable())), ")");
+ else
+ out.print(comma, "executable(not function: ", RawPointer(node->executable()), ")");
+ }
+ if (node->hasFunctionDeclIndex()) {
+ FunctionExecutable* executable = m_codeBlock->functionDecl(node->functionDeclIndex());
+ out.print(comma, FunctionExecutableDump(executable));
+ }
+ if (node->hasFunctionExprIndex()) {
+ FunctionExecutable* executable = m_codeBlock->functionExpr(node->functionExprIndex());
+ out.print(comma, FunctionExecutableDump(executable));
}
if (node->hasStorageAccessData()) {
- StorageAccessData& storageAccessData = node->storageAccessData();
+ StorageAccessData& storageAccessData = m_storageAccessData[node->storageAccessDataIndex()];
out.print(comma, "id", storageAccessData.identifierNumber, "{", identifiers()[storageAccessData.identifierNumber], "}");
out.print(", ", static_cast<ptrdiff_t>(storageAccessData.offset));
- out.print(", inferredType = ", inContext(storageAccessData.inferredType, context));
- }
- if (node->hasMultiGetByOffsetData()) {
- MultiGetByOffsetData& data = node->multiGetByOffsetData();
- out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
- for (unsigned i = 0; i < data.cases.size(); ++i)
- out.print(comma, inContext(data.cases[i], context));
- }
- if (node->hasMultiPutByOffsetData()) {
- MultiPutByOffsetData& data = node->multiPutByOffsetData();
- out.print(comma, "id", data.identifierNumber, "{", identifiers()[data.identifierNumber], "}");
- for (unsigned i = 0; i < data.variants.size(); ++i)
- out.print(comma, inContext(data.variants[i], context));
}
ASSERT(node->hasVariableAccessData(*this) == node->hasLocal(*this));
if (node->hasVariableAccessData(*this)) {
- VariableAccessData* variableAccessData = node->tryGetVariableAccessData();
- if (variableAccessData) {
- VirtualRegister operand = variableAccessData->local();
- out.print(comma, variableAccessData->local(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
- operand = variableAccessData->machineLocal();
- if (operand.isValid())
- out.print(comma, "machine:", operand);
+ VariableAccessData* variableAccessData = node->variableAccessData();
+ VirtualRegister operand = variableAccessData->local();
+ if (operand.isArgument())
+ out.print(comma, "arg", operand.toArgument(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
+ else
+ out.print(comma, "loc", operand.toLocal(), "(", VariableAccessDataDump(*this, variableAccessData), ")");
+
+ operand = variableAccessData->machineLocal();
+ if (operand.isValid()) {
+ if (operand.isArgument())
+ out.print(comma, "machine:arg", operand.toArgument());
+ else
+ out.print(comma, "machine:loc", operand.toLocal());
}
}
- if (node->hasStackAccessData()) {
- StackAccessData* data = node->stackAccessData();
- out.print(comma, data->local);
- if (data->machineLocal.isValid())
- out.print(comma, "machine:", data->machineLocal);
- out.print(comma, data->format);
+ if (node->hasUnlinkedLocal()) {
+ VirtualRegister operand = node->unlinkedLocal();
+ if (operand.isArgument())
+ out.print(comma, "arg", operand.toArgument());
+ else
+ out.print(comma, "loc", operand.toLocal());
}
- if (node->hasUnlinkedLocal())
- out.print(comma, node->unlinkedLocal());
if (node->hasUnlinkedMachineLocal()) {
VirtualRegister operand = node->unlinkedMachineLocal();
- if (operand.isValid())
- out.print(comma, "machine:", operand);
+ if (operand.isValid()) {
+ if (operand.isArgument())
+ out.print(comma, "machine:arg", operand.toArgument());
+ else
+ out.print(comma, "machine:loc", operand.toLocal());
+ }
}
if (node->hasConstantBuffer()) {
out.print(comma);
out.print(node->startConstant(), ":[");
CommaPrinter anotherComma;
for (unsigned i = 0; i < node->numConstants(); ++i)
- out.print(anotherComma, pointerDumpInContext(freeze(m_codeBlock->constantBuffer(node->startConstant())[i]), context));
+ out.print(anotherComma, inContext(m_codeBlock->constantBuffer(node->startConstant())[i], context));
out.print("]");
}
if (node->hasIndexingType())
@@ -323,36 +292,29 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
out.print(comma, "^", node->phi()->index());
if (node->hasExecutionCounter())
out.print(comma, RawPointer(node->executionCounter()));
- if (node->hasWatchpointSet())
- out.print(comma, RawPointer(node->watchpointSet()));
+ if (node->hasVariableWatchpointSet())
+ out.print(comma, RawPointer(node->variableWatchpointSet()));
+ if (node->hasTypedArray())
+ out.print(comma, inContext(JSValue(node->typedArray()), context));
if (node->hasStoragePointer())
out.print(comma, RawPointer(node->storagePointer()));
- if (node->hasObjectMaterializationData())
- out.print(comma, node->objectMaterializationData());
- if (node->hasCallVarargsData())
- out.print(comma, "firstVarArgOffset = ", node->callVarargsData()->firstVarArgOffset);
- if (node->hasLoadVarargsData()) {
- LoadVarargsData* data = node->loadVarargsData();
- out.print(comma, "start = ", data->start, ", count = ", data->count);
- if (data->machineStart.isValid())
- out.print(", machineStart = ", data->machineStart);
- if (data->machineCount.isValid())
- out.print(", machineCount = ", data->machineCount);
- out.print(", offset = ", data->offset, ", mandatoryMinimum = ", data->mandatoryMinimum);
- out.print(", limit = ", data->limit);
- }
- if (node->isConstant())
- out.print(comma, pointerDumpInContext(node->constant(), context));
- if (node->isJump())
- out.print(comma, "T:", *node->targetBlock());
+ if (op == JSConstant) {
+ out.print(comma, "$", node->constantNumber());
+ JSValue value = valueOfJSConstant(node);
+ out.print(" = ", inContext(value, context));
+ }
+ if (op == WeakJSConstant)
+ out.print(comma, RawPointer(node->weakConstant()), " (", inContext(*node->weakConstant()->structure(), context), ")");
+ if (node->isBranch() || node->isJump())
+ out.print(comma, "T:", *node->takenBlock());
if (node->isBranch())
- out.print(comma, "T:", node->branchData()->taken, ", F:", node->branchData()->notTaken);
+ out.print(comma, "F:", *node->notTakenBlock());
if (node->isSwitch()) {
SwitchData* data = node->switchData();
out.print(comma, data->kind);
for (unsigned i = 0; i < data->cases.size(); ++i)
- out.print(comma, inContext(data->cases[i].value, context), ":", data->cases[i].target);
- out.print(comma, "default:", data->fallThrough);
+ out.print(comma, inContext(data->cases[i].value, context), ":", *data->cases[i].target);
+ out.print(comma, "default:", *data->fallThrough);
}
ClobberSet reads;
ClobberSet writes;
@@ -361,66 +323,45 @@ void Graph::dump(PrintStream& out, const char* prefix, Node* node, DumpContext*
out.print(comma, "R:", sortedListDump(reads.direct(), ","));
if (!writes.isEmpty())
out.print(comma, "W:", sortedListDump(writes.direct(), ","));
- ExitMode exitMode = mayExit(*this, node);
- if (exitMode != DoesNotExit)
- out.print(comma, exitMode);
- if (clobbersExitState(*this, node))
- out.print(comma, "ClobbersExit");
- if (node->origin.isSet()) {
- out.print(comma, "bc#", node->origin.semantic.bytecodeIndex);
- if (node->origin.semantic != node->origin.forExit && node->origin.forExit.isSet())
- out.print(comma, "exit: ", node->origin.forExit);
- }
- if (!node->origin.exitOK)
- out.print(comma, "ExitInvalid");
+ out.print(comma, "bc#", node->codeOrigin.bytecodeIndex);
+
out.print(")");
- if (node->hasVariableAccessData(*this) && node->tryGetVariableAccessData())
- out.print(" predicting ", SpeculationDump(node->tryGetVariableAccessData()->prediction()));
- else if (node->hasHeapPrediction())
- out.print(" predicting ", SpeculationDump(node->getHeapPrediction()));
+ if (!skipped) {
+ if (node->hasVariableAccessData(*this))
+ out.print(" predicting ", SpeculationDump(node->variableAccessData()->prediction()));
+ else if (node->hasHeapPrediction())
+ out.print(" predicting ", SpeculationDump(node->getHeapPrediction()));
+ }
out.print("\n");
}
-bool Graph::terminalsAreValid()
-{
- for (BasicBlock* block : blocksInNaturalOrder()) {
- if (!block->terminal())
- return false;
- }
- return true;
-}
-
void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* block, PhiNodeDumpMode phiNodeDumpMode, DumpContext* context)
{
- out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->origin.semantic, context), "):", block->isReachable ? "" : " (skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
- if (block->executionCount == block->executionCount)
- out.print(prefix, " Execution count: ", block->executionCount, "\n");
+ out.print(prefix, "Block ", *block, " (", inContext(block->at(0)->codeOrigin, context), "): ", block->isReachable ? "" : "(skipped)", block->isOSRTarget ? " (OSR target)" : "", "\n");
out.print(prefix, " Predecessors:");
for (size_t i = 0; i < block->predecessors.size(); ++i)
out.print(" ", *block->predecessors[i]);
out.print("\n");
- out.print(prefix, " Successors:");
- if (block->terminal()) {
- for (BasicBlock* successor : block->successors()) {
- out.print(" ", *successor);
- if (m_prePostNumbering)
- out.print(" (", m_prePostNumbering->edgeKind(block, successor), ")");
+ if (m_dominators.isValid()) {
+ out.print(prefix, " Dominated by:");
+ for (size_t i = 0; i < m_blocks.size(); ++i) {
+ if (!m_dominators.dominates(i, block->index))
+ continue;
+ out.print(" #", i);
}
- } else
- out.print(" <invalid>");
- out.print("\n");
- if (m_dominators && terminalsAreValid()) {
- out.print(prefix, " Dominated by: ", m_dominators->dominatorsOf(block), "\n");
- out.print(prefix, " Dominates: ", m_dominators->blocksDominatedBy(block), "\n");
- out.print(prefix, " Dominance Frontier: ", m_dominators->dominanceFrontierOf(block), "\n");
- out.print(prefix, " Iterated Dominance Frontier: ", m_dominators->iteratedDominanceFrontierOf(BlockList(1, block)), "\n");
+ out.print("\n");
+ out.print(prefix, " Dominates:");
+ for (size_t i = 0; i < m_blocks.size(); ++i) {
+ if (!m_dominators.dominates(block->index, i))
+ continue;
+ out.print(" #", i);
+ }
+ out.print("\n");
}
- if (m_prePostNumbering)
- out.print(prefix, " Pre/Post Numbering: ", m_prePostNumbering->preNumber(block), "/", m_prePostNumbering->postNumber(block), "\n");
- if (m_naturalLoops) {
- if (const NaturalLoop* loop = m_naturalLoops->headerOf(block)) {
+ if (m_naturalLoops.isValid()) {
+ if (const NaturalLoop* loop = m_naturalLoops.headerOf(block)) {
out.print(prefix, " Loop header, contains:");
Vector<BlockIndex> sortedBlockList;
for (unsigned i = 0; i < loop->size(); ++i)
@@ -432,7 +373,7 @@ void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* bl
}
Vector<const NaturalLoop*> containingLoops =
- m_naturalLoops->loopsOf(block);
+ m_naturalLoops.loopsOf(block);
if (!containingLoops.isEmpty()) {
out.print(prefix, " Containing loop headers:");
for (unsigned i = 0; i < containingLoops.size(); ++i)
@@ -446,7 +387,7 @@ void Graph::dumpBlockHeader(PrintStream& out, const char* prefix, BasicBlock* bl
Node* phiNode = block->phis[i];
if (!phiNode->shouldGenerate() && phiNodeDumpMode == DumpLivePhisOnly)
continue;
- out.print(" @", phiNode->index(), "<", phiNode->local(), ",", phiNode->refCount(), ">->(");
+ out.print(" @", phiNode->index(), "<", phiNode->refCount(), ">->(");
if (phiNode->child1()) {
out.print("@", phiNode->child1()->index());
if (phiNode->child2()) {
@@ -468,48 +409,33 @@ void Graph::dump(PrintStream& out, DumpContext* context)
if (!context)
context = &myContext;
- out.print("\n");
- out.print("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
- out.print(" Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
- if (m_form == SSA)
- out.print(" Argument formats: ", listDump(m_argumentFormats), "\n");
- else
- out.print(" Arguments: ", listDump(m_arguments), "\n");
- out.print("\n");
+ dataLog("\n");
+ dataLog("DFG for ", CodeBlockWithJITType(m_codeBlock, JITCode::DFGJIT), ":\n");
+ dataLog(" Fixpoint state: ", m_fixpointState, "; Form: ", m_form, "; Unification state: ", m_unificationState, "; Ref count state: ", m_refCountState, "\n");
+ dataLog("\n");
- Node* lastNode = nullptr;
+ Node* lastNode = 0;
for (size_t b = 0; b < m_blocks.size(); ++b) {
BasicBlock* block = m_blocks[b].get();
if (!block)
continue;
dumpBlockHeader(out, "", block, DumpAllPhis, context);
- out.print(" States: ", block->cfaStructureClobberStateAtHead);
- if (!block->cfaHasVisited)
- out.print(", CurrentlyCFAUnreachable");
- if (!block->intersectionOfCFAHasVisited)
- out.print(", CFAUnreachable");
- out.print("\n");
switch (m_form) {
case LoadStore:
case ThreadedCPS: {
- out.print(" Vars Before: ");
+ out.print(" vars before: ");
if (block->cfaHasVisited)
out.print(inContext(block->valuesAtHead, context));
else
out.print("<empty>");
out.print("\n");
- out.print(" Intersected Vars Before: ");
- if (block->intersectionOfCFAHasVisited)
- out.print(inContext(block->intersectionOfPastValuesAtHead, context));
- else
- out.print("<empty>");
- out.print("\n");
- out.print(" Var Links: ", block->variablesAtHead, "\n");
+ out.print(" var links: ", block->variablesAtHead, "\n");
break;
}
case SSA: {
RELEASE_ASSERT(block->ssa);
+ out.print(" Flush format: ", block->ssa->flushAtHead, "\n");
out.print(" Availability: ", block->ssa->availabilityAtHead, "\n");
out.print(" Live: ", nodeListDump(block->ssa->liveAtHead), "\n");
out.print(" Values: ", nodeMapDump(block->ssa->valuesAtHead, context), "\n");
@@ -518,45 +444,35 @@ void Graph::dump(PrintStream& out, DumpContext* context)
for (size_t i = 0; i < block->size(); ++i) {
dumpCodeOrigin(out, "", lastNode, block->at(i), context);
dump(out, "", block->at(i), context);
+ lastNode = block->at(i);
}
- out.print(" States: ", block->cfaBranchDirection, ", ", block->cfaStructureClobberStateAtTail);
- if (!block->cfaDidFinish)
- out.print(", CFAInvalidated");
- out.print("\n");
switch (m_form) {
case LoadStore:
case ThreadedCPS: {
- out.print(" Vars After: ");
+ out.print(" vars after: ");
if (block->cfaHasVisited)
out.print(inContext(block->valuesAtTail, context));
else
out.print("<empty>");
out.print("\n");
- out.print(" Var Links: ", block->variablesAtTail, "\n");
+ out.print(" var links: ", block->variablesAtTail, "\n");
break;
}
case SSA: {
RELEASE_ASSERT(block->ssa);
+ out.print(" Flush format: ", block->ssa->flushAtTail, "\n");
out.print(" Availability: ", block->ssa->availabilityAtTail, "\n");
out.print(" Live: ", nodeListDump(block->ssa->liveAtTail), "\n");
out.print(" Values: ", nodeMapDump(block->ssa->valuesAtTail, context), "\n");
break;
} }
- out.print("\n");
- }
-
- out.print("GC Values:\n");
- for (FrozenValue* value : m_frozenValues) {
- if (value->pointsToHeap())
- out.print(" ", inContext(*value, &myContext), "\n");
+ dataLog("\n");
}
-
- out.print(inContext(watchpoints(), &myContext));
if (!myContext.isEmpty()) {
- myContext.dump(out);
- out.print("\n");
+ myContext.dump(WTF::dataFile());
+ dataLog("\n");
}
}
@@ -618,113 +534,6 @@ void Graph::resetReachability()
determineReachability();
}
-namespace {
-
-class RefCountCalculator {
-public:
- RefCountCalculator(Graph& graph)
- : m_graph(graph)
- {
- }
-
- void calculate()
- {
- // First reset the counts to 0 for all nodes.
- for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
- BasicBlock* block = m_graph.block(blockIndex);
- if (!block)
- continue;
- for (unsigned indexInBlock = block->size(); indexInBlock--;)
- block->at(indexInBlock)->setRefCount(0);
- for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- block->phis[phiIndex]->setRefCount(0);
- }
-
- // Now find the roots:
- // - Nodes that are must-generate.
- // - Nodes that are reachable from type checks.
- // Set their ref counts to 1 and put them on the worklist.
- for (BlockIndex blockIndex = 0; blockIndex < m_graph.numBlocks(); ++blockIndex) {
- BasicBlock* block = m_graph.block(blockIndex);
- if (!block)
- continue;
- for (unsigned indexInBlock = block->size(); indexInBlock--;) {
- Node* node = block->at(indexInBlock);
- DFG_NODE_DO_TO_CHILDREN(m_graph, node, findTypeCheckRoot);
- if (!(node->flags() & NodeMustGenerate))
- continue;
- if (!node->postfixRef())
- m_worklist.append(node);
- }
- }
-
- while (!m_worklist.isEmpty()) {
- while (!m_worklist.isEmpty()) {
- Node* node = m_worklist.last();
- m_worklist.removeLast();
- ASSERT(node->shouldGenerate()); // It should not be on the worklist unless it's ref'ed.
- DFG_NODE_DO_TO_CHILDREN(m_graph, node, countEdge);
- }
-
- if (m_graph.m_form == SSA) {
- // Find Phi->Upsilon edges, which are represented as meta-data in the
- // Upsilon.
- for (BlockIndex blockIndex = m_graph.numBlocks(); blockIndex--;) {
- BasicBlock* block = m_graph.block(blockIndex);
- if (!block)
- continue;
- for (unsigned nodeIndex = block->size(); nodeIndex--;) {
- Node* node = block->at(nodeIndex);
- if (node->op() != Upsilon)
- continue;
- if (node->shouldGenerate())
- continue;
- if (node->phi()->shouldGenerate())
- countNode(node);
- }
- }
- }
- }
- }
-
-private:
- void findTypeCheckRoot(Node*, Edge edge)
- {
- // We may have an "unproved" untyped use for code that is unreachable. The CFA
- // will just not have gotten around to it.
- if (edge.isProved() || edge.willNotHaveCheck())
- return;
- if (!edge->postfixRef())
- m_worklist.append(edge.node());
- }
-
- void countNode(Node* node)
- {
- if (node->postfixRef())
- return;
- m_worklist.append(node);
- }
-
- void countEdge(Node*, Edge edge)
- {
- // Don't count edges that are already counted for their type checks.
- if (!(edge.isProved() || edge.willNotHaveCheck()))
- return;
- countNode(edge.node());
- }
-
- Graph& m_graph;
- Vector<Node*, 128> m_worklist;
-};
-
-} // anonymous namespace
-
-void Graph::computeRefCounts()
-{
- RefCountCalculator calculator(*this);
- calculator.calculate();
-}
-
void Graph::killBlockAndItsContents(BasicBlock* block)
{
for (unsigned phiIndex = block->phis.size(); phiIndex--;)
@@ -748,15 +557,29 @@ void Graph::killUnreachableBlocks()
}
}
+void Graph::resetExitStates()
+{
+ for (BlockIndex blockIndex = 0; blockIndex < m_blocks.size(); ++blockIndex) {
+ BasicBlock* block = m_blocks[blockIndex].get();
+ if (!block)
+ continue;
+ for (unsigned indexInBlock = block->size(); indexInBlock--;)
+ block->at(indexInBlock)->setCanExit(true);
+ }
+}
+
void Graph::invalidateCFG()
{
- m_dominators = nullptr;
- m_naturalLoops = nullptr;
- m_prePostNumbering = nullptr;
+ m_dominators.invalidate();
+ m_naturalLoops.invalidate();
}
void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, VariableAccessData* variableAccessData, Node* newGetLocal)
{
+ if (variableAccessData->isCaptured()) {
+ // Let CSE worry about this one.
+ return;
+ }
for (unsigned indexInBlock = startIndexInBlock; indexInBlock < block.size(); ++indexInBlock) {
Node* node = block[indexInBlock];
bool shouldContinue = true;
@@ -786,37 +609,26 @@ void Graph::substituteGetLocal(BasicBlock& block, unsigned startIndexInBlock, Va
}
}
-BlockList Graph::blocksInPreOrder()
+void Graph::addForDepthFirstSort(Vector<BasicBlock*>& result, Vector<BasicBlock*, 16>& worklist, HashSet<BasicBlock*>& seen, BasicBlock* block)
{
- BlockList result;
- BlockWorklist worklist;
- worklist.push(block(0));
- while (BasicBlock* block = worklist.pop()) {
- result.append(block);
- for (unsigned i = block->numSuccessors(); i--;)
- worklist.push(block->successor(i));
- }
- return result;
+ if (seen.contains(block))
+ return;
+
+ result.append(block);
+ worklist.append(block);
+ seen.add(block);
}
-BlockList Graph::blocksInPostOrder()
+void Graph::getBlocksInDepthFirstOrder(Vector<BasicBlock*>& result)
{
- BlockList result;
- PostOrderBlockWorklist worklist;
- worklist.push(block(0));
- while (BlockWithOrder item = worklist.pop()) {
- switch (item.order) {
- case VisitOrder::Pre:
- worklist.pushPost(item.node);
- for (unsigned i = item.node->numSuccessors(); i--;)
- worklist.push(item.node->successor(i));
- break;
- case VisitOrder::Post:
- result.append(item.node);
- break;
- }
+ Vector<BasicBlock*, 16> worklist;
+ HashSet<BasicBlock*> seen;
+ addForDepthFirstSort(result, worklist, seen, block(0));
+ while (!worklist.isEmpty()) {
+ BasicBlock* block = worklist.takeLast();
+ for (unsigned i = block->numSuccessors(); i--;)
+ addForDepthFirstSort(result, worklist, seen, block->successor(i));
}
- return result;
}
void Graph::clearReplacements()
@@ -826,22 +638,9 @@ void Graph::clearReplacements()
if (!block)
continue;
for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- block->phis[phiIndex]->setReplacement(nullptr);
+ block->phis[phiIndex]->misc.replacement = 0;
for (unsigned nodeIndex = block->size(); nodeIndex--;)
- block->at(nodeIndex)->setReplacement(nullptr);
- }
-}
-
-void Graph::clearEpochs()
-{
- for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
- BasicBlock* block = m_blocks[blockIndex].get();
- if (!block)
- continue;
- for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- block->phis[phiIndex]->setEpoch(Epoch());
- for (unsigned nodeIndex = block->size(); nodeIndex--;)
- block->at(nodeIndex)->setEpoch(Epoch());
+ block->at(nodeIndex)->misc.replacement = 0;
}
}
@@ -852,79 +651,10 @@ void Graph::initializeNodeOwners()
if (!block)
continue;
for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- block->phis[phiIndex]->owner = block;
- for (unsigned nodeIndex = block->size(); nodeIndex--;)
- block->at(nodeIndex)->owner = block;
- }
-}
-
-void Graph::clearFlagsOnAllNodes(NodeFlags flags)
-{
- for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
- BasicBlock* block = m_blocks[blockIndex].get();
- if (!block)
- continue;
- for (unsigned phiIndex = block->phis.size(); phiIndex--;)
- block->phis[phiIndex]->clearFlags(flags);
+ block->phis[phiIndex]->misc.owner = block;
for (unsigned nodeIndex = block->size(); nodeIndex--;)
- block->at(nodeIndex)->clearFlags(flags);
- }
-}
-
-bool Graph::watchCondition(const ObjectPropertyCondition& key)
-{
- if (!key.isWatchable())
- return false;
-
- m_plan.weakReferences.addLazily(key.object());
- if (key.hasPrototype())
- m_plan.weakReferences.addLazily(key.prototype());
- if (key.hasRequiredValue())
- m_plan.weakReferences.addLazily(key.requiredValue());
-
- m_plan.watchpoints.addLazily(key);
-
- if (key.kind() == PropertyCondition::Presence)
- m_safeToLoad.add(std::make_pair(key.object(), key.offset()));
-
- return true;
-}
-
-bool Graph::isSafeToLoad(JSObject* base, PropertyOffset offset)
-{
- return m_safeToLoad.contains(std::make_pair(base, offset));
-}
-
-InferredType::Descriptor Graph::inferredTypeFor(const PropertyTypeKey& key)
-{
- assertIsRegistered(key.structure());
-
- auto iter = m_inferredTypes.find(key);
- if (iter != m_inferredTypes.end())
- return iter->value;
-
- InferredType* typeObject = key.structure()->inferredTypeFor(key.uid());
- if (!typeObject) {
- m_inferredTypes.add(key, InferredType::Top);
- return InferredType::Top;
+ block->at(nodeIndex)->misc.owner = block;
}
-
- InferredType::Descriptor typeDescriptor = typeObject->descriptor();
- if (typeDescriptor.kind() == InferredType::Top) {
- m_inferredTypes.add(key, InferredType::Top);
- return InferredType::Top;
- }
-
- m_inferredTypes.add(key, typeDescriptor);
-
- m_plan.weakReferences.addLazily(typeObject);
- registerInferredType(typeDescriptor);
-
- // Note that we may already be watching this desired inferred type, because multiple structures may
- // point to the same InferredType instance.
- m_plan.watchpoints.addLazily(DesiredInferredType(typeObject, typeDescriptor));
-
- return typeDescriptor;
}
FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
@@ -936,7 +666,7 @@ FullBytecodeLiveness& Graph::livenessFor(CodeBlock* codeBlock)
std::unique_ptr<FullBytecodeLiveness> liveness = std::make_unique<FullBytecodeLiveness>();
codeBlock->livenessAnalysis().computeFullLiveness(*liveness);
FullBytecodeLiveness& result = *liveness;
- m_bytecodeLiveness.add(codeBlock, WTFMove(liveness));
+ m_bytecodeLiveness.add(codeBlock, std::move(liveness));
return result;
}
@@ -945,98 +675,49 @@ FullBytecodeLiveness& Graph::livenessFor(InlineCallFrame* inlineCallFrame)
return livenessFor(baselineCodeBlockFor(inlineCallFrame));
}
-BytecodeKills& Graph::killsFor(CodeBlock* codeBlock)
-{
- HashMap<CodeBlock*, std::unique_ptr<BytecodeKills>>::iterator iter = m_bytecodeKills.find(codeBlock);
- if (iter != m_bytecodeKills.end())
- return *iter->value;
-
- std::unique_ptr<BytecodeKills> kills = std::make_unique<BytecodeKills>();
- codeBlock->livenessAnalysis().computeKills(*kills);
- BytecodeKills& result = *kills;
- m_bytecodeKills.add(codeBlock, WTFMove(kills));
- return result;
-}
-
-BytecodeKills& Graph::killsFor(InlineCallFrame* inlineCallFrame)
-{
- return killsFor(baselineCodeBlockFor(inlineCallFrame));
-}
-
bool Graph::isLiveInBytecode(VirtualRegister operand, CodeOrigin codeOrigin)
{
- CodeOrigin* codeOriginPtr = &codeOrigin;
for (;;) {
- VirtualRegister reg = VirtualRegister(
- operand.offset() - codeOriginPtr->stackOffset());
-
- if (operand.offset() < codeOriginPtr->stackOffset() + JSStack::CallFrameHeaderSize) {
+ if (operand.offset() < codeOrigin.stackOffset() + JSStack::CallFrameHeaderSize) {
+ VirtualRegister reg = VirtualRegister(
+ operand.offset() - codeOrigin.stackOffset());
+
if (reg.isArgument()) {
RELEASE_ASSERT(reg.offset() < JSStack::CallFrameHeaderSize);
- if (codeOriginPtr->inlineCallFrame->isClosureCall
- && reg.offset() == JSStack::Callee)
- return true;
+ if (!codeOrigin.inlineCallFrame->isClosureCall)
+ return false;
- if (codeOriginPtr->inlineCallFrame->isVarargs()
- && reg.offset() == JSStack::ArgumentCount)
+ if (reg.offset() == JSStack::Callee)
+ return true;
+ if (reg.offset() == JSStack::ScopeChain)
return true;
return false;
}
- return livenessFor(codeOriginPtr->inlineCallFrame).operandIsLive(
- reg.offset(), codeOriginPtr->bytecodeIndex);
+ return livenessFor(codeOrigin.inlineCallFrame).operandIsLive(
+ reg.offset(), codeOrigin.bytecodeIndex);
}
- InlineCallFrame* inlineCallFrame = codeOriginPtr->inlineCallFrame;
- if (!inlineCallFrame)
+ if (!codeOrigin.inlineCallFrame)
break;
-
- // Arguments are always live. This would be redundant if it wasn't for our
- // op_call_varargs inlining.
- if (reg.isArgument()
- && static_cast<size_t>(reg.toArgument()) < inlineCallFrame->arguments.size())
- return true;
- codeOriginPtr = inlineCallFrame->getCallerSkippingTailCalls();
-
- // The first inline call frame could be an inline tail call
- if (!codeOriginPtr)
- break;
+ codeOrigin = codeOrigin.inlineCallFrame->caller;
}
return true;
}
-BitVector Graph::localsLiveInBytecode(CodeOrigin codeOrigin)
-{
- BitVector result;
- result.ensureSize(block(0)->variablesAtHead.numberOfLocals());
- forAllLocalsLiveInBytecode(
- codeOrigin,
- [&] (VirtualRegister reg) {
- ASSERT(reg.isLocal());
- result.quickSet(reg.toLocal());
- });
- return result;
-}
-
unsigned Graph::frameRegisterCount()
{
- unsigned result = m_nextMachineLocal + std::max(m_parameterSlots, static_cast<unsigned>(maxFrameExtentForSlowPathCallInRegisters));
- return roundLocalRegisterCountForFramePointerOffset(result);
-}
-
-unsigned Graph::stackPointerOffset()
-{
- return virtualRegisterForLocal(frameRegisterCount() - 1).offset();
+ return m_nextMachineLocal + m_parameterSlots;
}
unsigned Graph::requiredRegisterCountForExit()
{
unsigned count = JIT::frameRegisterCountFor(m_profiledBlock);
- for (InlineCallFrameSet::iterator iter = m_plan.inlineCallFrames->begin(); !!iter; ++iter) {
+ for (InlineCallFrameSet::iterator iter = m_inlineCallFrames->begin(); !!iter; ++iter) {
InlineCallFrame* inlineCallFrame = *iter;
CodeBlock* codeBlock = baselineCodeBlockForInlineCallFrame(inlineCallFrame);
unsigned requiredCount = VirtualRegister(inlineCallFrame->stackOffset).toLocal() + 1 + JIT::frameRegisterCountFor(codeBlock);
@@ -1050,512 +731,47 @@ unsigned Graph::requiredRegisterCountForExecutionAndExit()
return std::max(frameRegisterCount(), requiredRegisterCountForExit());
}
-JSValue Graph::tryGetConstantProperty(
- JSValue base, const StructureSet& structureSet, PropertyOffset offset)
+JSActivation* Graph::tryGetActivation(Node* node)
{
- if (!base || !base.isObject())
- return JSValue();
-
- JSObject* object = asObject(base);
-
- for (unsigned i = structureSet.size(); i--;) {
- Structure* structure = structureSet[i];
- assertIsRegistered(structure);
-
- WatchpointSet* set = structure->propertyReplacementWatchpointSet(offset);
- if (!set || !set->isStillValid())
- return JSValue();
-
- ASSERT(structure->isValidOffset(offset));
- ASSERT(!structure->isUncacheableDictionary());
-
- watchpoints().addLazily(set);
- }
-
- // What follows may require some extra thought. We need this load to load a valid JSValue. If
- // our profiling makes sense and we're still on track to generate code that won't be
- // invalidated, then we have nothing to worry about. We do, however, have to worry about
- // loading - and then using - an invalid JSValue in the case that unbeknownst to us our code
- // is doomed.
- //
- // One argument in favor of this code is that it should definitely work because the butterfly
- // is always set before the structure. However, we don't currently have a fence between those
- // stores. It's not clear if this matters, however. We don't ever shrink the property storage.
- // So, for this to fail, you'd need an access on a constant object pointer such that the inline
- // caches told us that the object had a structure that it did not *yet* have, and then later,
- // the object transitioned to that structure that the inline caches had alraedy seen. And then
- // the processor reordered the stores. Seems unlikely and difficult to test. I believe that
- // this is worth revisiting but it isn't worth losing sleep over. Filed:
- // https://bugs.webkit.org/show_bug.cgi?id=134641
- //
- // For now, we just do the minimal thing: defend against the structure right now being
- // incompatible with the getDirect we're trying to do. The easiest way to do that is to
- // determine if the structure belongs to the proven set.
-
- if (!structureSet.contains(object->structure()))
- return JSValue();
-
- return object->getDirect(offset);
-}
-
-JSValue Graph::tryGetConstantProperty(JSValue base, Structure* structure, PropertyOffset offset)
-{
- return tryGetConstantProperty(base, StructureSet(structure), offset);
-}
-
-JSValue Graph::tryGetConstantProperty(
- JSValue base, const StructureAbstractValue& structure, PropertyOffset offset)
-{
- if (structure.isInfinite()) {
- // FIXME: If we just converted the offset to a uid, we could do ObjectPropertyCondition
- // watching to constant-fold the property.
- // https://bugs.webkit.org/show_bug.cgi?id=147271
- return JSValue();
- }
-
- return tryGetConstantProperty(base, structure.set(), offset);
-}
-
-JSValue Graph::tryGetConstantProperty(const AbstractValue& base, PropertyOffset offset)
-{
- return tryGetConstantProperty(base.m_value, base.m_structure, offset);
-}
-
-AbstractValue Graph::inferredValueForProperty(
- const StructureSet& base, UniquedStringImpl* uid, StructureClobberState clobberState)
-{
- AbstractValue result;
- base.forEach(
- [&] (Structure* structure) {
- AbstractValue value;
- value.set(*this, inferredTypeForProperty(structure, uid));
- result.merge(value);
- });
- if (clobberState == StructuresAreClobbered)
- result.clobberStructures();
- return result;
-}
-
-AbstractValue Graph::inferredValueForProperty(
- const AbstractValue& base, UniquedStringImpl* uid, PropertyOffset offset,
- StructureClobberState clobberState)
-{
- if (JSValue value = tryGetConstantProperty(base, offset)) {
- AbstractValue result;
- result.set(*this, *freeze(value), clobberState);
- return result;
- }
-
- if (base.m_structure.isFinite())
- return inferredValueForProperty(base.m_structure.set(), uid, clobberState);
-
- return AbstractValue::heapTop();
+ if (!node->hasConstant())
+ return 0;
+ return jsDynamicCast<JSActivation*>(valueOfJSConstant(node));
}
-JSValue Graph::tryGetConstantClosureVar(JSValue base, ScopeOffset offset)
+WriteBarrierBase<Unknown>* Graph::tryGetRegisters(Node* node)
{
- // This has an awesome concurrency story. See comment for GetGlobalVar in ByteCodeParser.
-
- if (!base)
- return JSValue();
-
- JSLexicalEnvironment* activation = jsDynamicCast<JSLexicalEnvironment*>(base);
+ JSActivation* activation = tryGetActivation(node);
if (!activation)
- return JSValue();
-
- SymbolTable* symbolTable = activation->symbolTable();
- JSValue value;
- WatchpointSet* set;
- {
- ConcurrentJITLocker locker(symbolTable->m_lock);
-
- SymbolTableEntry* entry = symbolTable->entryFor(locker, offset);
- if (!entry)
- return JSValue();
-
- set = entry->watchpointSet();
- if (!set)
- return JSValue();
-
- if (set->state() != IsWatched)
- return JSValue();
-
- ASSERT(entry->scopeOffset() == offset);
- value = activation->variableAt(offset).get();
- if (!value)
- return JSValue();
- }
-
- watchpoints().addLazily(set);
-
- return value;
+ return 0;
+ if (!activation->isTornOff())
+ return 0;
+ return activation->registers();
}
-JSValue Graph::tryGetConstantClosureVar(const AbstractValue& value, ScopeOffset offset)
-{
- return tryGetConstantClosureVar(value.m_value, offset);
-}
-
-JSValue Graph::tryGetConstantClosureVar(Node* node, ScopeOffset offset)
+JSArrayBufferView* Graph::tryGetFoldableView(Node* node)
{
if (!node->hasConstant())
- return JSValue();
- return tryGetConstantClosureVar(node->asJSValue(), offset);
-}
-
-JSArrayBufferView* Graph::tryGetFoldableView(JSValue value)
-{
- if (!value)
- return nullptr;
- JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(value);
- if (!value)
- return nullptr;
- if (!view->length())
- return nullptr;
- WTF::loadLoadFence();
- watchpoints().addLazily(view);
+ return 0;
+ JSArrayBufferView* view = jsDynamicCast<JSArrayBufferView*>(valueOfJSConstant(node));
+ if (!view)
+ return 0;
+ if (!watchpoints().isStillValid(view))
+ return 0;
return view;
}
-JSArrayBufferView* Graph::tryGetFoldableView(JSValue value, ArrayMode arrayMode)
-{
- if (arrayMode.type() != Array::AnyTypedArray && arrayMode.typedArrayType() == NotTypedArray)
- return nullptr;
- return tryGetFoldableView(value);
-}
-
-void Graph::registerFrozenValues()
-{
- m_codeBlock->constants().resize(0);
- m_codeBlock->constantsSourceCodeRepresentation().resize(0);
- for (FrozenValue* value : m_frozenValues) {
- if (!value->pointsToHeap())
- continue;
-
- ASSERT(value->structure());
- ASSERT(m_plan.weakReferences.contains(value->structure()));
-
- switch (value->strength()) {
- case WeakValue: {
- m_plan.weakReferences.addLazily(value->value().asCell());
- break;
- }
- case StrongValue: {
- unsigned constantIndex = m_codeBlock->addConstantLazily();
- // We already have a barrier on the code block.
- m_codeBlock->constants()[constantIndex].setWithoutWriteBarrier(value->value());
- break;
- } }
- }
- m_codeBlock->constants().shrinkToFit();
- m_codeBlock->constantsSourceCodeRepresentation().shrinkToFit();
-}
-
-void Graph::visitChildren(SlotVisitor& visitor)
-{
- for (FrozenValue* value : m_frozenValues) {
- visitor.appendUnbarrieredReadOnlyValue(value->value());
- visitor.appendUnbarrieredReadOnlyPointer(value->structure());
- }
-
- for (BlockIndex blockIndex = numBlocks(); blockIndex--;) {
- BasicBlock* block = this->block(blockIndex);
- if (!block)
- continue;
-
- for (unsigned nodeIndex = 0; nodeIndex < block->size(); ++nodeIndex) {
- Node* node = block->at(nodeIndex);
-
- switch (node->op()) {
- case CheckStructure:
- for (unsigned i = node->structureSet().size(); i--;)
- visitor.appendUnbarrieredReadOnlyPointer(node->structureSet()[i]);
- break;
-
- case NewObject:
- case ArrayifyToStructure:
- case NewStringObject:
- visitor.appendUnbarrieredReadOnlyPointer(node->structure());
- break;
-
- case PutStructure:
- case AllocatePropertyStorage:
- case ReallocatePropertyStorage:
- visitor.appendUnbarrieredReadOnlyPointer(
- node->transition()->previous);
- visitor.appendUnbarrieredReadOnlyPointer(
- node->transition()->next);
- break;
-
- case MultiGetByOffset:
- for (const MultiGetByOffsetCase& getCase : node->multiGetByOffsetData().cases) {
- for (Structure* structure : getCase.set())
- visitor.appendUnbarrieredReadOnlyPointer(structure);
- }
- break;
-
- case MultiPutByOffset:
- for (unsigned i = node->multiPutByOffsetData().variants.size(); i--;) {
- PutByIdVariant& variant = node->multiPutByOffsetData().variants[i];
- const StructureSet& set = variant.oldStructure();
- for (unsigned j = set.size(); j--;)
- visitor.appendUnbarrieredReadOnlyPointer(set[j]);
- if (variant.kind() == PutByIdVariant::Transition)
- visitor.appendUnbarrieredReadOnlyPointer(variant.newStructure());
- }
- break;
-
- default:
- break;
- }
- }
- }
-}
-
-FrozenValue* Graph::freeze(JSValue value)
-{
- if (UNLIKELY(!value))
- return FrozenValue::emptySingleton();
-
- auto result = m_frozenValueMap.add(JSValue::encode(value), nullptr);
- if (LIKELY(!result.isNewEntry))
- return result.iterator->value;
-
- if (value.isUInt32())
- m_uint32ValuesInUse.append(value.asUInt32());
-
- FrozenValue frozenValue = FrozenValue::freeze(value);
- if (Structure* structure = frozenValue.structure())
- registerStructure(structure);
-
- return result.iterator->value = m_frozenValues.add(frozenValue);
-}
-
-FrozenValue* Graph::freezeStrong(JSValue value)
-{
- FrozenValue* result = freeze(value);
- result->strengthenTo(StrongValue);
- return result;
-}
-
-void Graph::convertToConstant(Node* node, FrozenValue* value)
-{
- if (value->structure())
- assertIsRegistered(value->structure());
- node->convertToConstant(value);
-}
-
-void Graph::convertToConstant(Node* node, JSValue value)
-{
- convertToConstant(node, freeze(value));
-}
-
-void Graph::convertToStrongConstant(Node* node, JSValue value)
-{
- convertToConstant(node, freezeStrong(value));
-}
-
-StructureRegistrationResult Graph::registerStructure(Structure* structure)
-{
- m_plan.weakReferences.addLazily(structure);
- if (m_plan.watchpoints.consider(structure))
- return StructureRegisteredAndWatched;
- return StructureRegisteredNormally;
-}
-
-void Graph::assertIsRegistered(Structure* structure)
-{
- // It's convenient to be able to call this with a maybe-null structure.
- if (!structure)
- return;
-
- DFG_ASSERT(*this, nullptr, m_plan.weakReferences.contains(structure));
-
- if (!structure->dfgShouldWatch())
- return;
- if (watchpoints().isWatched(structure->transitionWatchpointSet()))
- return;
-
- DFG_CRASH(*this, nullptr, toCString("Structure ", pointerDump(structure), " is watchable but isn't being watched.").data());
-}
-
-NO_RETURN_DUE_TO_CRASH static void crash(
- Graph& graph, const CString& whileText, const char* file, int line, const char* function,
- const char* assertion)
-{
- startCrashing();
- dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
- dataLog(file, "(", line, ") : ", function, "\n");
- dataLog("\n");
- dataLog(whileText);
- dataLog("Graph at time of failure:\n");
- graph.dump();
- dataLog("\n");
- dataLog("DFG ASSERTION FAILED: ", assertion, "\n");
- dataLog(file, "(", line, ") : ", function, "\n");
- CRASH_WITH_SECURITY_IMPLICATION();
-}
-
-void Graph::handleAssertionFailure(
- std::nullptr_t, const char* file, int line, const char* function, const char* assertion)
-{
- crash(*this, "", file, line, function, assertion);
-}
-
-void Graph::handleAssertionFailure(
- Node* node, const char* file, int line, const char* function, const char* assertion)
-{
- crash(*this, toCString("While handling node ", node, "\n\n"), file, line, function, assertion);
-}
-
-void Graph::handleAssertionFailure(
- BasicBlock* block, const char* file, int line, const char* function, const char* assertion)
-{
- crash(*this, toCString("While handling block ", pointerDump(block), "\n\n"), file, line, function, assertion);
-}
-
-void Graph::ensureDominators()
-{
- if (!m_dominators)
- m_dominators = std::make_unique<Dominators>(*this);
-}
-
-void Graph::ensurePrePostNumbering()
-{
- if (!m_prePostNumbering)
- m_prePostNumbering = std::make_unique<PrePostNumbering>(*this);
-}
-
-void Graph::ensureNaturalLoops()
-{
- ensureDominators();
- if (!m_naturalLoops)
- m_naturalLoops = std::make_unique<NaturalLoops>(*this);
-}
-
-ValueProfile* Graph::valueProfileFor(Node* node)
-{
- if (!node)
- return nullptr;
-
- CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
-
- if (node->hasLocal(*this)) {
- if (!node->local().isArgument())
- return nullptr;
- int argument = node->local().toArgument();
- Node* argumentNode = m_arguments[argument];
- if (!argumentNode)
- return nullptr;
- if (node->variableAccessData() != argumentNode->variableAccessData())
- return nullptr;
- return profiledBlock->valueProfileForArgument(argument);
- }
-
- if (node->hasHeapPrediction())
- return profiledBlock->valueProfileForBytecodeOffset(node->origin.semantic.bytecodeIndex);
-
- return nullptr;
-}
-
-MethodOfGettingAValueProfile Graph::methodOfGettingAValueProfileFor(Node* node)
+JSArrayBufferView* Graph::tryGetFoldableView(Node* node, ArrayMode arrayMode)
{
- if (!node)
- return MethodOfGettingAValueProfile();
-
- if (ValueProfile* valueProfile = valueProfileFor(node))
- return MethodOfGettingAValueProfile(valueProfile);
-
- if (node->op() == GetLocal) {
- CodeBlock* profiledBlock = baselineCodeBlockFor(node->origin.semantic);
-
- return MethodOfGettingAValueProfile::fromLazyOperand(
- profiledBlock,
- LazyOperandValueProfileKey(
- node->origin.semantic.bytecodeIndex, node->local()));
- }
-
- return MethodOfGettingAValueProfile();
+ if (arrayMode.typedArrayType() == NotTypedArray)
+ return 0;
+ return tryGetFoldableView(node);
}
-bool Graph::isStringPrototypeMethodSane(JSObject* stringPrototype, Structure* stringPrototypeStructure, UniquedStringImpl* uid)
+JSArrayBufferView* Graph::tryGetFoldableViewForChild1(Node* node)
{
- unsigned attributesUnused;
- PropertyOffset offset = stringPrototypeStructure->getConcurrently(uid, attributesUnused);
- if (!isValidOffset(offset))
- return false;
-
- JSValue value = tryGetConstantProperty(stringPrototype, stringPrototypeStructure, offset);
- if (!value)
- return false;
-
- JSFunction* function = jsDynamicCast<JSFunction*>(value);
- if (!function)
- return false;
-
- if (function->executable()->intrinsicFor(CodeForCall) != StringPrototypeValueOfIntrinsic)
- return false;
-
- return true;
-}
-
-bool Graph::canOptimizeStringObjectAccess(const CodeOrigin& codeOrigin)
-{
- if (hasExitSite(codeOrigin, NotStringObject))
- return false;
-
- Structure* stringObjectStructure = globalObjectFor(codeOrigin)->stringObjectStructure();
- registerStructure(stringObjectStructure);
- ASSERT(stringObjectStructure->storedPrototype().isObject());
- ASSERT(stringObjectStructure->storedPrototype().asCell()->classInfo() == StringPrototype::info());
-
- FrozenValue* stringPrototypeObjectValue = freeze(stringObjectStructure->storedPrototype());
- StringPrototype* stringPrototypeObject = stringPrototypeObjectValue->dynamicCast<StringPrototype*>();
- Structure* stringPrototypeStructure = stringPrototypeObjectValue->structure();
- if (registerStructure(stringPrototypeStructure) != StructureRegisteredAndWatched)
- return false;
-
- if (stringPrototypeStructure->isDictionary())
- return false;
-
- // We're being conservative here. We want DFG's ToString on StringObject to be
- // used in both numeric contexts (that would call valueOf()) and string contexts
- // (that would call toString()). We don't want the DFG to have to distinguish
- // between the two, just because that seems like it would get confusing. So we
- // just require both methods to be sane.
- if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->valueOf.impl()))
- return false;
- if (!isStringPrototypeMethodSane(stringPrototypeObject, stringPrototypeStructure, m_vm.propertyNames->toString.impl()))
- return false;
-
- return true;
-}
-
-bool Graph::willCatchExceptionInMachineFrame(CodeOrigin codeOrigin, CodeOrigin& opCatchOriginOut, HandlerInfo*& catchHandlerOut)
-{
- if (!m_hasExceptionHandlers)
- return false;
-
- unsigned bytecodeIndexToCheck = codeOrigin.bytecodeIndex;
- while (1) {
- InlineCallFrame* inlineCallFrame = codeOrigin.inlineCallFrame;
- CodeBlock* codeBlock = baselineCodeBlockFor(inlineCallFrame);
- if (HandlerInfo* handler = codeBlock->handlerForBytecodeOffset(bytecodeIndexToCheck)) {
- opCatchOriginOut = CodeOrigin(handler->target, inlineCallFrame);
- catchHandlerOut = handler;
- return true;
- }
-
- if (!inlineCallFrame)
- return false;
-
- bytecodeIndexToCheck = inlineCallFrame->directCaller.bytecodeIndex;
- codeOrigin = codeOrigin.inlineCallFrame->directCaller;
- }
-
- RELEASE_ASSERT_NOT_REACHED();
+ return tryGetFoldableView(child(node, 0).node(), node->arrayMode());
}
} } // namespace JSC::DFG
-#endif // ENABLE(DFG_JIT)
+#endif