003 File Manager
Current Path:
/usr/src/contrib/llvm-project/llvm/lib/Analysis
usr
/
src
/
contrib
/
llvm-project
/
llvm
/
lib
/
Analysis
/
📁
..
📄
AliasAnalysis.cpp
(33.55 KB)
📄
AliasAnalysisEvaluator.cpp
(15.64 KB)
📄
AliasAnalysisSummary.cpp
(3.49 KB)
📄
AliasAnalysisSummary.h
(10.17 KB)
📄
AliasSetTracker.cpp
(25.86 KB)
📄
Analysis.cpp
(5.29 KB)
📄
AssumeBundleQueries.cpp
(7.96 KB)
📄
AssumptionCache.cpp
(10.94 KB)
📄
BasicAliasAnalysis.cpp
(85.81 KB)
📄
BlockFrequencyInfo.cpp
(12.39 KB)
📄
BlockFrequencyInfoImpl.cpp
(28.6 KB)
📄
BranchProbabilityInfo.cpp
(43.48 KB)
📄
CFG.cpp
(9.9 KB)
📄
CFGPrinter.cpp
(11.2 KB)
📄
CFLAndersAliasAnalysis.cpp
(33.01 KB)
📄
CFLGraph.h
(21.23 KB)
📄
CFLSteensAliasAnalysis.cpp
(13.24 KB)
📄
CGSCCPassManager.cpp
(31.2 KB)
📄
CallGraph.cpp
(12.86 KB)
📄
CallGraphSCCPass.cpp
(26.31 KB)
📄
CallPrinter.cpp
(9.48 KB)
📄
CaptureTracking.cpp
(15.38 KB)
📄
CmpInstAnalysis.cpp
(4.63 KB)
📄
CodeMetrics.cpp
(6.99 KB)
📄
ConstantFolding.cpp
(105.15 KB)
📄
CostModel.cpp
(3.87 KB)
📄
DDG.cpp
(11.29 KB)
📄
Delinearization.cpp
(4.49 KB)
📄
DemandedBits.cpp
(16.27 KB)
📄
DependenceAnalysis.cpp
(150.78 KB)
📄
DependenceGraphBuilder.cpp
(19.24 KB)
📄
DivergenceAnalysis.cpp
(15.59 KB)
📄
DomPrinter.cpp
(9.67 KB)
📄
DomTreeUpdater.cpp
(15.21 KB)
📄
DominanceFrontier.cpp
(3.2 KB)
📄
EHPersonalities.cpp
(5.89 KB)
📄
GlobalsModRef.cpp
(41 KB)
📄
GuardUtils.cpp
(3.27 KB)
📄
HeatUtils.cpp
(2.85 KB)
📄
IVDescriptors.cpp
(42.28 KB)
📄
IVUsers.cpp
(16.12 KB)
📄
IndirectCallPromotionAnalysis.cpp
(4.33 KB)
📄
InlineAdvisor.cpp
(15.28 KB)
📄
InlineCost.cpp
(99.47 KB)
📄
InlineFeaturesAnalysis.cpp
(1.59 KB)
📄
InlineSizeEstimatorAnalysis.cpp
(10.95 KB)
📄
InstCount.cpp
(2.45 KB)
📄
InstructionPrecedenceTracking.cpp
(4.8 KB)
📄
InstructionSimplify.cpp
(216.91 KB)
📄
Interval.cpp
(1.78 KB)
📄
IntervalPartition.cpp
(4.5 KB)
📄
LazyBlockFrequencyInfo.cpp
(2.81 KB)
📄
LazyBranchProbabilityInfo.cpp
(2.96 KB)
📄
LazyCallGraph.cpp
(67.33 KB)
📄
LazyValueInfo.cpp
(76.38 KB)
📄
LegacyDivergenceAnalysis.cpp
(14.82 KB)
📄
Lint.cpp
(29.07 KB)
📄
Loads.cpp
(20.6 KB)
📄
LoopAccessAnalysis.cpp
(88.02 KB)
📄
LoopAnalysisManager.cpp
(6.6 KB)
📄
LoopCacheAnalysis.cpp
(23.53 KB)
📄
LoopInfo.cpp
(37.15 KB)
📄
LoopNestAnalysis.cpp
(10.62 KB)
📄
LoopPass.cpp
(12.89 KB)
📄
LoopUnrollAnalyzer.cpp
(7.26 KB)
📄
MLInlineAdvisor.cpp
(11.36 KB)
📄
MemDepPrinter.cpp
(5.13 KB)
📄
MemDerefPrinter.cpp
(2.53 KB)
📄
MemoryBuiltins.cpp
(41.14 KB)
📄
MemoryDependenceAnalysis.cpp
(69.89 KB)
📄
MemoryLocation.cpp
(7.92 KB)
📄
MemorySSA.cpp
(90.16 KB)
📄
MemorySSAUpdater.cpp
(57.9 KB)
📄
ModuleDebugInfoPrinter.cpp
(4.02 KB)
📄
ModuleSummaryAnalysis.cpp
(38.13 KB)
📄
MustExecute.cpp
(31.18 KB)
📄
ObjCARCAliasAnalysis.cpp
(5.81 KB)
📄
ObjCARCAnalysisUtils.cpp
(1.07 KB)
📄
ObjCARCInstKind.cpp
(23.15 KB)
📄
OptimizationRemarkEmitter.cpp
(4.23 KB)
📄
PHITransAddr.cpp
(16.05 KB)
📄
PhiValues.cpp
(8.4 KB)
📄
PostDominators.cpp
(3.59 KB)
📄
ProfileSummaryInfo.cpp
(18.07 KB)
📄
PtrUseVisitor.cpp
(1.28 KB)
📄
RegionInfo.cpp
(6.5 KB)
📄
RegionPass.cpp
(9.23 KB)
📄
RegionPrinter.cpp
(8.61 KB)
📄
ReleaseModeModelRunner.cpp
(2.83 KB)
📄
ScalarEvolution.cpp
(475.26 KB)
📄
ScalarEvolutionAliasAnalysis.cpp
(5.96 KB)
📄
ScalarEvolutionDivision.cpp
(7.51 KB)
📄
ScalarEvolutionNormalization.cpp
(4.59 KB)
📄
ScopedNoAliasAA.cpp
(7.38 KB)
📄
StackLifetime.cpp
(12.22 KB)
📄
StackSafetyAnalysis.cpp
(31.81 KB)
📄
StratifiedSets.h
(18.67 KB)
📄
SyncDependenceAnalysis.cpp
(12.97 KB)
📄
SyntheticCountsUtils.cpp
(3.81 KB)
📄
TFUtils.cpp
(8.99 KB)
📄
TargetLibraryInfo.cpp
(58.98 KB)
📄
TargetTransformInfo.cpp
(48.15 KB)
📄
Trace.cpp
(1.8 KB)
📄
TypeBasedAliasAnalysis.cpp
(26.04 KB)
📄
TypeMetadataUtils.cpp
(5.93 KB)
📄
VFABIDemangling.cpp
(16.46 KB)
📄
ValueLattice.cpp
(1.19 KB)
📄
ValueLatticeUtils.cpp
(1.53 KB)
📄
ValueTracking.cpp
(243.08 KB)
📄
VectorUtils.cpp
(48.57 KB)
📁
models
Editing: MLInlineAdvisor.cpp
//===- MLInlineAdvisor.cpp - machine learned InlineAdvisor ----------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===----------------------------------------------------------------------===// // // This file implements the interface between the inliner and a learned model. // It delegates model evaluation to either the AOT compiled model (the // 'release' mode) or a runtime-loaded model (the 'development' case). // //===----------------------------------------------------------------------===// #include <limits> #include <unordered_map> #include <unordered_set> #include "llvm/ADT/SCCIterator.h" #include "llvm/Analysis/CallGraph.h" #include "llvm/Analysis/InlineCost.h" #include "llvm/Analysis/InlineFeaturesAnalysis.h" #include "llvm/Analysis/MLInlineAdvisor.h" #include "llvm/Analysis/MLModelRunner.h" #include "llvm/Analysis/OptimizationRemarkEmitter.h" #include "llvm/Analysis/TargetLibraryInfo.h" #include "llvm/Analysis/TargetTransformInfo.h" #include "llvm/IR/InstIterator.h" #include "llvm/IR/Instructions.h" #include "llvm/IR/PassManager.h" #include "llvm/Support/CommandLine.h" #include "llvm/Support/Path.h" using namespace llvm; #define DEBUG_TYPE "inline-ml" static cl::opt<float> SizeIncreaseThreshold( "ml-advisor-size-increase-threshold", cl::Hidden, cl::desc("Maximum factor by which expected native size may increase before " "blocking any further inlining."), cl::init(2.0)); const std::array<std::string, NumberOfFeatures> llvm::FeatureNameMap{ #define POPULATE_NAMES(INDEX_NAME, NAME, COMMENT) NAME, INLINE_FEATURE_ITERATOR(POPULATE_NAMES) #undef POPULATE_NAMES }; const char *const llvm::DecisionName = "inlining_decision"; const char *const llvm::DefaultDecisionName = "inlining_default"; const char *const llvm::RewardName = "delta_size"; CallBase *getInlinableCS(Instruction &I) { if (auto *CS = dyn_cast<CallBase>(&I)) if (Function *Callee = CS->getCalledFunction()) { if (!Callee->isDeclaration()) { return CS; } } return nullptr; } MLInlineAdvisor::MLInlineAdvisor(Module &M, ModuleAnalysisManager &MAM, std::unique_ptr<MLModelRunner> Runner) : InlineAdvisor( MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager()), M(M), ModelRunner(std::move(Runner)), CG(new CallGraph(M)), InitialIRSize(getModuleIRSize()), CurrentIRSize(InitialIRSize) { assert(ModelRunner); // Extract the 'call site height' feature - the position of a call site // relative to the farthest statically reachable SCC node. We don't mutate // this value while inlining happens. Empirically, this feature proved // critical in behavioral cloning - i.e. training a model to mimic the manual // heuristic's decisions - and, thus, equally important for training for // improvement. for (auto I = scc_begin(CG.get()); !I.isAtEnd(); ++I) { const std::vector<CallGraphNode *> &CGNodes = *I; unsigned Level = 0; for (auto *CGNode : CGNodes) { Function *F = CGNode->getFunction(); if (!F || F->isDeclaration()) continue; for (auto &I : instructions(F)) { if (auto *CS = getInlinableCS(I)) { auto *Called = CS->getCalledFunction(); auto Pos = FunctionLevels.find(Called); // In bottom up traversal, an inlinable callee is either in the // same SCC, or to a function in a visited SCC. So not finding its // level means we haven't visited it yet, meaning it's in this SCC. if (Pos == FunctionLevels.end()) continue; Level = std::max(Level, Pos->second + 1); } } } for (auto *CGNode : CGNodes) { Function *F = CGNode->getFunction(); if (F && !F->isDeclaration()) FunctionLevels[F] = Level; } } } void MLInlineAdvisor::onPassEntry() { // Function passes executed between InlinerPass runs may have changed the // module-wide features. NodeCount = 0; EdgeCount = 0; for (auto &F : M) if (!F.isDeclaration()) { ++NodeCount; EdgeCount += getLocalCalls(F); } } int64_t MLInlineAdvisor::getLocalCalls(Function &F) { return FAM.getResult<InlineFeaturesAnalysis>(F).DirectCallsToDefinedFunctions; } // Update the internal state of the advisor, and force invalidate feature // analysis. Currently, we maintain minimal (and very simple) global state - the // number of functions and the number of static calls. We also keep track of the // total IR size in this module, to stop misbehaving policies at a certain bloat // factor (SizeIncreaseThreshold) void MLInlineAdvisor::onSuccessfulInlining(const MLInlineAdvice &Advice, bool CalleeWasDeleted) { assert(!ForceStop); Function *Caller = Advice.getCaller(); Function *Callee = Advice.getCallee(); // The caller features aren't valid anymore. FAM.invalidate<InlineFeaturesAnalysis>(*Caller); int64_t IRSizeAfter = getIRSize(*Caller) + (CalleeWasDeleted ? 0 : Advice.CalleeIRSize); CurrentIRSize += IRSizeAfter - (Advice.CallerIRSize + Advice.CalleeIRSize); if (CurrentIRSize > SizeIncreaseThreshold * InitialIRSize) ForceStop = true; // We can delta-update module-wide features. We know the inlining only changed // the caller, and maybe the callee (by deleting the latter). // Nodes are simple to update. // For edges, we 'forget' the edges that the caller and callee used to have // before inlining, and add back what they currently have together. int64_t NewCallerAndCalleeEdges = FAM.getResult<InlineFeaturesAnalysis>(*Caller) .DirectCallsToDefinedFunctions; if (CalleeWasDeleted) --NodeCount; else NewCallerAndCalleeEdges += FAM.getResult<InlineFeaturesAnalysis>(*Callee) .DirectCallsToDefinedFunctions; EdgeCount += (NewCallerAndCalleeEdges - Advice.CallerAndCalleeEdges); assert(CurrentIRSize >= 0 && EdgeCount >= 0 && NodeCount >= 0); } int64_t MLInlineAdvisor::getModuleIRSize() const { int64_t Ret = 0; for (auto &F : CG->getModule()) if (!F.isDeclaration()) Ret += getIRSize(F); return Ret; } std::unique_ptr<InlineAdvice> MLInlineAdvisor::getAdvice(CallBase &CB) { auto &Caller = *CB.getCaller(); auto &Callee = *CB.getCalledFunction(); auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & { return FAM.getResult<AssumptionAnalysis>(F); }; auto GetTLI = [&](Function &F) -> const TargetLibraryInfo & { return FAM.getResult<TargetLibraryAnalysis>(F); }; auto &TIR = FAM.getResult<TargetIRAnalysis>(Callee); auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(Caller); auto TrivialDecision = llvm::getAttributeBasedInliningDecision(CB, &Callee, TIR, GetTLI); // If this is a "never inline" case, there won't be any changes to internal // state we need to track, so we can just return the base InlineAdvice, which // will do nothing interesting. // Same thing if this is a recursive case. if ((TrivialDecision.hasValue() && !TrivialDecision->isSuccess()) || &Caller == &Callee) return std::make_unique<InlineAdvice>(this, CB, ORE, false); bool Mandatory = TrivialDecision.hasValue() && TrivialDecision->isSuccess(); // If we need to stop, we won't want to track anymore any state changes, so // we just return the base InlineAdvice, which acts as a noop. if (ForceStop) { ORE.emit([&] { return OptimizationRemarkMissed(DEBUG_TYPE, "ForceStop", &CB) << "Won't attempt inlining because module size grew too much."; }); return std::make_unique<InlineAdvice>(this, CB, ORE, Mandatory); } int CostEstimate = 0; if (!Mandatory) { auto IsCallSiteInlinable = llvm::getInliningCostEstimate(CB, TIR, GetAssumptionCache); if (!IsCallSiteInlinable) { // We can't inline this for correctness reasons, so return the base // InlineAdvice, as we don't care about tracking any state changes (which // won't happen). return std::make_unique<InlineAdvice>(this, CB, ORE, false); } CostEstimate = *IsCallSiteInlinable; } if (Mandatory) return getMandatoryAdvice(CB, ORE); auto NrCtantParams = 0; for (auto I = CB.arg_begin(), E = CB.arg_end(); I != E; ++I) { NrCtantParams += (isa<Constant>(*I)); } auto &CallerBefore = FAM.getResult<InlineFeaturesAnalysis>(Caller); auto &CalleeBefore = FAM.getResult<InlineFeaturesAnalysis>(Callee); ModelRunner->setFeature(FeatureIndex::CalleeBasicBlockCount, CalleeBefore.BasicBlockCount); ModelRunner->setFeature(FeatureIndex::CallSiteHeight, FunctionLevels[&Caller]); ModelRunner->setFeature(FeatureIndex::NodeCount, NodeCount); ModelRunner->setFeature(FeatureIndex::NrCtantParams, NrCtantParams); ModelRunner->setFeature(FeatureIndex::CostEstimate, CostEstimate); ModelRunner->setFeature(FeatureIndex::EdgeCount, EdgeCount); ModelRunner->setFeature(FeatureIndex::CallerUsers, CallerBefore.Uses); ModelRunner->setFeature(FeatureIndex::CallerConditionallyExecutedBlocks, CallerBefore.BlocksReachedFromConditionalInstruction); ModelRunner->setFeature(FeatureIndex::CallerBasicBlockCount, CallerBefore.BasicBlockCount); ModelRunner->setFeature(FeatureIndex::CalleeConditionallyExecutedBlocks, CalleeBefore.BlocksReachedFromConditionalInstruction); ModelRunner->setFeature(FeatureIndex::CalleeUsers, CalleeBefore.Uses); return getAdviceFromModel(CB, ORE); } std::unique_ptr<MLInlineAdvice> MLInlineAdvisor::getAdviceFromModel(CallBase &CB, OptimizationRemarkEmitter &ORE) { return std::make_unique<MLInlineAdvice>(this, CB, ORE, ModelRunner->run()); } std::unique_ptr<MLInlineAdvice> MLInlineAdvisor::getMandatoryAdvice(CallBase &CB, OptimizationRemarkEmitter &ORE) { return std::make_unique<MLInlineAdvice>(this, CB, ORE, true); } void MLInlineAdvice::reportContextForRemark( DiagnosticInfoOptimizationBase &OR) { using namespace ore; OR << NV("Callee", Callee->getName()); for (size_t I = 0; I < NumberOfFeatures; ++I) OR << NV(FeatureNameMap[I], getAdvisor()->getModelRunner().getFeature(I)); OR << NV("ShouldInline", isInliningRecommended()); } void MLInlineAdvice::recordInliningImpl() { ORE.emit([&]() { OptimizationRemark R(DEBUG_TYPE, "InliningSuccess", DLoc, Block); reportContextForRemark(R); return R; }); getAdvisor()->onSuccessfulInlining(*this, /*CalleeWasDeleted*/ false); } void MLInlineAdvice::recordInliningWithCalleeDeletedImpl() { ORE.emit([&]() { OptimizationRemark R(DEBUG_TYPE, "InliningSuccessWithCalleeDeleted", DLoc, Block); reportContextForRemark(R); return R; }); getAdvisor()->onSuccessfulInlining(*this, /*CalleeWasDeleted*/ true); } void MLInlineAdvice::recordUnsuccessfulInliningImpl( const InlineResult &Result) { ORE.emit([&]() { OptimizationRemarkMissed R(DEBUG_TYPE, "InliningAttemptedAndUnsuccessful", DLoc, Block); reportContextForRemark(R); return R; }); } void MLInlineAdvice::recordUnattemptedInliningImpl() { ORE.emit([&]() { OptimizationRemarkMissed R(DEBUG_TYPE, "IniningNotAttempted", DLoc, Block); reportContextForRemark(R); return R; }); }
Upload File
Create Folder