-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name mime src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C opt-level=3 -C embed-bitcode=no -C metadata=8792d82d415984c2 -C extra-filename=-8792d82d415984c2 --out-dir /usr/home/liquid/tmp/.tmpDr5X2t/target/release/deps -L dependency=/usr/home/liquid/tmp/.tmpDr5X2t/target/release/deps -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-mime-0.3.16-Opt-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 2,936,820,309 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 90,093,331 ( 3.07%) ./malloc/malloc.c:_int_free 87,219,515 ( 2.97%) ./malloc/malloc.c:_int_malloc 56,706,325 ( 1.93%) ./malloc/malloc.c:malloc 44,754,199 ( 1.52%) ???:llvm::InstCombinerImpl::run() 36,303,483 ( 1.24%) ???:llvm::FPPassManager::runOnFunction(llvm::Function&) 32,215,799 ( 1.10%) ???:llvm::AnalysisManager::getResultImpl(llvm::AnalysisKey*, llvm::Function&) 30,818,264 ( 1.05%) ???:llvm::SelectionDAG::Combine(llvm::CombineLevel, llvm::AAResults*, llvm::CodeGenOpt::Level) 30,453,612 ( 1.04%) ???:combineInstructionsOverFunction(llvm::Function&, llvm::InstCombineWorklist&, llvm::AAResults*, llvm::AssumptionCache&, llvm::TargetLibraryInfo&, llvm::TargetTransformInfo&, llvm::DominatorTree&, llvm::OptimizationRemarkEmitter&, llvm::BlockFrequencyInfo*, llvm::ProfileSummaryInfo*, unsigned int, llvm::LoopInfo*) 30,120,645 ( 1.03%) ???:llvm::AnalysisManager::invalidate(llvm::Function&, llvm::PreservedAnalyses const&) 28,929,138 ( 0.99%) ./malloc/malloc.c:free 23,628,659 ( 0.80%) ???:computeKnownBits(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) [clone .llvm.15619146473165121143] 22,338,196 ( 0.76%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 21,197,782 ( 0.72%) ???:llvm::ValueHandleBase::AddToUseList() 19,332,453 ( 0.66%) ???:runCVP(llvm::Module&) [clone .llvm.11785992503873176614] 18,898,922 ( 0.64%) ???:(anonymous namespace)::LazyValueInfoImpl::getEdgeValue(llvm::Value*, llvm::BasicBlock*, llvm::BasicBlock*, llvm::Instruction*) [clone .llvm.4316243980339171764] 18,413,359 ( 0.63%) ???:(anonymous namespace)::LazyValueInfoImpl::solve() [clone .llvm.4316243980339171764] 17,410,704 ( 0.59%) ???:llvm::AttributeList::addAttributes(llvm::LLVMContext&, unsigned int, llvm::AttrBuilder const&) const 17,179,462 ( 0.58%) ???:SimplifyICmpInst(unsigned int, llvm::Value*, llvm::Value*, llvm::SimplifyQuery const&, unsigned int) [clone .llvm.1619516508949622737] 17,141,164 ( 0.58%) ???:computeKnownBitsFromOperator(llvm::Operator const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 16,585,697 ( 0.56%) ???:computeKnownBits(llvm::Value const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 15,713,033 ( 0.54%) ???:bool llvm::DenseMapBase*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >, (anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >::LookupBucketFor<(anonymous namespace)::SimpleValue>((anonymous namespace)::SimpleValue const&, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> const*&) const 15,424,003 ( 0.53%) ???:llvm::InstCombinerImpl::visitICmpInst(llvm::ICmpInst&) 15,047,813 ( 0.51%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 14,924,063 ( 0.51%) ???:llvm::DataLayout::getTypeSizeInBits(llvm::Type*) const 14,555,043 ( 0.50%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 14,547,923 ( 0.50%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 14,168,425 ( 0.48%) ???:llvm::DataLayout::getAlignment(llvm::Type*, bool) const 13,790,586 ( 0.47%) ???:llvm::removeUnreachableBlocks(llvm::Function&, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*) 13,312,187 ( 0.45%) ./malloc/malloc.c:malloc_consolidate 12,707,820 ( 0.43%) ???:llvm::BasicAAResult::alias(llvm::MemoryLocation const&, llvm::MemoryLocation const&, llvm::AAQueryInfo&) 12,279,133 ( 0.42%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::runSemiNCA(llvm::DominatorTreeBase&, unsigned int) 12,238,068 ( 0.42%) ???:computeKnownBitsFromAssume(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 11,389,844 ( 0.39%) ???:llvm::TargetLibraryInfoImpl::getLibFunc(llvm::Function const&, llvm::LibFunc&) const 11,377,560 ( 0.39%) ???:llvm::LoopBase::verifyLoop() const 11,136,534 ( 0.38%) ???:llvm::SimplifyInstruction(llvm::Instruction*, llvm::SimplifyQuery const&, llvm::OptimizationRemarkEmitter*) 10,731,930 ( 0.37%) ???:llvm::LoopBase::getExitBlocks(llvm::SmallVectorImpl&) const 10,553,956 ( 0.36%) ???:llvm::BitstreamCursor::readRecord(unsigned int, llvm::SmallVectorImpl&, llvm::StringRef*) 10,309,110 ( 0.35%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 10,283,344 ( 0.35%) ???:llvm::PassRegistry::enumerateWith(llvm::PassRegistrationListener*) 9,625,195 ( 0.33%) ???:llvm::simplifyCFG(llvm::BasicBlock*, llvm::TargetTransformInfo const&, llvm::DomTreeUpdater*, llvm::SimplifyCFGOptions const&, llvm::ArrayRef) 9,602,410 ( 0.33%) ???:llvm::InstCombinerImpl::SimplifyDemandedUseBits(llvm::Value*, llvm::APInt, llvm::KnownBits&, unsigned int, llvm::Instruction*) 9,554,193 ( 0.33%) ???:llvm::InlineFunction(llvm::CallBase&, llvm::InlineFunctionInfo&, llvm::AAResults*, bool, llvm::Function*) 9,527,256 ( 0.32%) ./malloc/malloc.c:unlink_chunk.constprop.0 9,455,552 ( 0.32%) ???:llvm::LiveVariables::runOnBlock(llvm::MachineBasicBlock*, unsigned int) 9,239,758 ( 0.31%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc:operator new(unsigned long) 9,030,295 ( 0.31%) ???:(anonymous namespace)::SimplifyCFGOpt::simplifyCondBranch(llvm::BranchInst*, llvm::IRBuilder&) 8,972,352 ( 0.31%) ???:llvm::coro::declaresIntrinsics(llvm::Module const&, std::initializer_list) 8,660,796 ( 0.29%) ???:isKnownNonZero(llvm::Value const*, llvm::APInt const&, unsigned int, (anonymous namespace)::Query const&) [clone .llvm.15619146473165121143] 8,536,842 ( 0.29%) ???:llvm::FindFunctionBackedges(llvm::Function const&, llvm::SmallVectorImpl >&) 8,241,142 ( 0.28%) ???:llvm::SelectionDAGISel::SelectCodeCommon(llvm::SDNode*, unsigned char const*, unsigned int) 8,188,158 ( 0.28%) ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S:__strncmp_avx2 7,822,360 ( 0.27%) ???:llvm::SelectionDAG::computeKnownBits(llvm::SDValue, llvm::APInt const&, unsigned int) const 7,760,261 ( 0.26%) ???:(anonymous namespace)::MachineCopyPropagation::runOnMachineFunction(llvm::MachineFunction&) 7,732,801 ( 0.26%) ???:llvm::LiveIntervals::extendSegmentsToUses(llvm::LiveRange&, llvm::SmallVector, 16u>&, llvm::Register, llvm::LaneBitmask) 7,638,559 ( 0.26%) ???:llvm::MemoryDependenceResults::getNonLocalPointerDepFromBB(llvm::Instruction*, llvm::PHITransAddr const&, llvm::MemoryLocation const&, bool, llvm::BasicBlock*, llvm::SmallVectorImpl&, llvm::DenseMap, llvm::detail::DenseMapPair >&, bool, bool) 7,471,919 ( 0.25%) ???:llvm::detail::PassModel>, llvm::PreservedAnalyses, llvm::AnalysisManager>::run(llvm::Function&, llvm::AnalysisManager&) 7,225,080 ( 0.25%) ???:(anonymous namespace)::EarlyCSE::run() [clone .llvm.7062997131228810369] 7,169,346 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:>::process_obligations::> 7,155,204 ( 0.24%) ???:llvm::InstCombinerImpl::visitCallInst(llvm::CallInst&) 7,137,696 ( 0.24%) ???:llvm::X86TargetLowering::X86TargetLowering(llvm::X86TargetMachine const&, llvm::X86Subtarget const&) 6,938,477 ( 0.24%) ???:llvm::ScalarEvolution::getAddExpr(llvm::SmallVectorImpl&, llvm::SCEV::NoWrapFlags, unsigned int) 6,867,773 ( 0.23%) ???:llvm::ConstantRange::makeExactICmpRegion(llvm::CmpInst::Predicate, llvm::APInt const&) 6,667,490 ( 0.23%) ???:llvm::SmallPtrSetImplBase::insert_imp_big(void const*) 6,602,495 ( 0.22%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 6,596,258 ( 0.22%) ???:llvm::Loop::isLCSSAForm(llvm::DominatorTree const&) const 6,476,432 ( 0.22%) ???:llvm::GVN::processBlock(llvm::BasicBlock*) 6,466,456 ( 0.22%) ???:llvm::AttributeList::addAttribute(llvm::LLVMContext&, unsigned int, llvm::Attribute::AttrKind) const 6,429,376 ( 0.22%) ???:llvm::MemorySSA::buildMemorySSA(llvm::BatchAAResults&) 6,412,216 ( 0.22%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::FindRoots(llvm::DominatorTreeBase const&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 6,389,441 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/fulfill.rs:>::process_obligations::> 6,271,654 ( 0.21%) ???:llvm::SimplifyGEPInst(llvm::Type*, llvm::ArrayRef, llvm::SimplifyQuery const&) 6,105,920 ( 0.21%) ???:llvm::TargetLoweringBase::computeRegisterProperties(llvm::TargetRegisterInfo const*) 6,094,110 ( 0.21%) ???:llvm::InterferenceCache::Entry::update(unsigned int) 6,030,079 ( 0.21%) ???:llvm::PMDataManager::verifyPreservedAnalysis(llvm::Pass*) 5,970,697 ( 0.20%) ???:llvm::LoopInfoBase::analyze(llvm::DominatorTreeBase const&) 5,891,483 ( 0.20%) ???:llvm::FoldBranchToCommonDest(llvm::BranchInst*, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*, llvm::TargetTransformInfo const*, unsigned int) 5,880,320 ( 0.20%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/unify/mod.rs:>, &mut rustc_infer::infer::undo_log::InferCtxtUndoLogs>>>::uninlined_get_root_key 5,866,678 ( 0.20%) ???:(anonymous namespace)::CVPLatticeFunc::ComputeInstructionState(llvm::Instruction&, llvm::DenseMap, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal, llvm::DenseMapInfo, llvm::PointerIntPairInfo > > >, llvm::detail::DenseMapPair, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal> >&, llvm::SparseSolver, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal, llvm::LatticeKeyInfo, llvm::PointerIntPairInfo > > > >&) 5,853,947 ( 0.20%) ???:llvm::ScalarEvolution::getSCEV(llvm::Value*) 5,773,123 ( 0.20%) ???:llvm::isNonEscapingLocalObject(llvm::Value const*, llvm::SmallDenseMap, llvm::detail::DenseMapPair >*) 5,695,817 ( 0.19%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_unaligned_erms 5,686,075 ( 0.19%) ???:llvm::FoldingSetBase::FindNodeOrInsertPos(llvm::FoldingSetNodeID const&, void*&, llvm::FoldingSetBase::FoldingSetInfo const&) 5,556,796 ( 0.19%) ???:llvm::PopulateLoopsDFS::traverse(llvm::BasicBlock*) 5,390,179 ( 0.18%) ???:llvm::AttributeSetNode::get(llvm::LLVMContext&, llvm::AttrBuilder const&) 5,224,799 ( 0.18%) ???:(anonymous namespace)::PruningFunctionCloner::CloneBlock(llvm::BasicBlock const*, llvm::ilist_iterator, false, true>, std::vector >&) 5,190,516 ( 0.18%) ???:(anonymous namespace)::LazyValueInfoImpl::getValueInBlock(llvm::Value*, llvm::BasicBlock*, llvm::Instruction*) [clone .llvm.4316243980339171764] 5,137,565 ( 0.17%) ???:(anonymous namespace)::DAGCombiner::combine(llvm::SDNode*) 5,137,396 ( 0.17%) ./stdlib/msort.c:msort_with_tmp.part.0 5,034,437 ( 0.17%) ???:llvm::TargetLoweringBase::getTypeConversion(llvm::LLVMContext&, llvm::EVT) const 5,008,112 ( 0.17%) ???:(anonymous namespace)::Verifier::visitInstruction(llvm::Instruction&) 4,992,761 ( 0.17%) ???:llvm::JumpThreadingPass::processBlock(llvm::BasicBlock*) 4,888,669 ( 0.17%) ???:llvm::ScalarEvolution::forgetLoop(llvm::Loop const*) 4,867,554 ( 0.17%) ???:(anonymous namespace)::RAGreedy::growRegion((anonymous namespace)::RAGreedy::GlobalSplitCandidate&) 4,845,521 ( 0.16%) ./elf/dl-lookup.c:do_lookup_x 4,826,893 ( 0.16%) ???:llvm::Value::stripAndAccumulateConstantOffsets(llvm::DataLayout const&, llvm::APInt&, bool, llvm::function_ref) const 4,796,474 ( 0.16%) ???:llvm::IDFCalculatorBase::calculate(llvm::SmallVectorImpl&) 4,671,689 ( 0.16%) ???:llvm::BlockFrequencyInfoImpl::initializeRPOT() 4,663,922 ( 0.16%) ???:(anonymous namespace)::AggressiveDeadCodeElimination::performDeadCodeElimination() 4,546,672 ( 0.15%) ???:llvm::LivePhysRegs::stepBackward(llvm::MachineInstr const&) 4,540,336 ( 0.15%) ???:llvm::ScheduleDAGSDNodes::BuildSchedUnits() 4,502,072 ( 0.15%) ???:llvm::MachineInstr::addOperand(llvm::MachineFunction&, llvm::MachineOperand const&) 4,466,178 ( 0.15%) ???:??? 4,442,526 ( 0.15%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/unify/mod.rs:>::process_obligations::> 4,403,892 ( 0.15%) ???:llvm::SROA::runOnAlloca(llvm::AllocaInst&) 4,395,616 ( 0.15%) ???:llvm::Type::getPrimitiveSizeInBits() const 4,384,963 ( 0.15%) ???:llvm::Loop::isRecursivelyLCSSAForm(llvm::DominatorTree const&, llvm::LoopInfo const&) const 4,331,718 ( 0.15%) ???:llvm::DemandedBits::isInstructionDead(llvm::Instruction*) 4,312,074 ( 0.15%) ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S:__strlen_avx2 4,284,907 ( 0.15%) ???:llvm::DenseMapBase, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> >, llvm::DenseMapInfo >, llvm::detail::DenseMapPair, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> > > >, llvm::PoisoningVH, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> >, llvm::DenseMapInfo >, llvm::detail::DenseMapPair, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> > > >::destroyAll() [clone .llvm.4316243980339171764] 4,266,591 ( 0.15%) ???:llvm::AttributeList::get(llvm::LLVMContext&, llvm::ArrayRef) 4,251,100 ( 0.14%) ???:(anonymous namespace)::eliminateDeadStores(llvm::Function&, llvm::AAResults&, llvm::MemorySSA&, llvm::DominatorTree&, llvm::PostDominatorTree&, llvm::TargetLibraryInfo const&, llvm::LoopInfo const&) [clone .llvm.5769264623867638418] 4,239,971 ( 0.14%) ???:llvm::PMTopLevelManager::setLastUser(llvm::ArrayRef, llvm::Pass*) 4,230,565 ( 0.14%) ???:llvm::InstCombinerImpl::visitLoadInst(llvm::LoadInst&) 4,230,346 ( 0.14%) ???:llvm::ScalarEvolution::getRangeRef(llvm::SCEV const*, llvm::ScalarEvolution::RangeSignHint) 4,169,196 ( 0.14%) ???:llvm::SCCPInstVisitor::solve() 4,147,316 ( 0.14%) ???:llvm::PMDataManager::removeNotPreservedAnalysis(llvm::Pass*) 4,141,271 ( 0.14%) ./elf/../sysdeps/x86_64/dl-machine.h:_dl_relocate_object 4,071,798 ( 0.14%) ???:(anonymous namespace)::DeadMachineInstructionElim::eliminateDeadMI(llvm::MachineFunction&) 4,056,377 ( 0.14%) ???:SetImpliedBits(llvm::FeatureBitset&, llvm::FeatureBitset const&, llvm::ArrayRef) 4,047,396 ( 0.14%) ???:llvm::ConstantFoldTerminator(llvm::BasicBlock*, bool, llvm::TargetLibraryInfo const*, llvm::DomTreeUpdater*) 4,041,443 ( 0.14%) ???:llvm::LoopBase::hasDedicatedExits() const 3,954,348 ( 0.13%) ???:llvm::InstCombinerImpl::visitStoreInst(llvm::StoreInst&) 3,941,820 ( 0.13%) ???:llvm::X86InstrInfo::analyzeBranch(llvm::MachineBasicBlock&, llvm::MachineBasicBlock*&, llvm::MachineBasicBlock*&, llvm::SmallVectorImpl&, bool) const 3,888,047 ( 0.13%) ???:llvm::BlockFrequencyInfoImplBase::finalizeMetrics() 3,851,664 ( 0.13%) ???:SimplifyOrInst(llvm::Value*, llvm::Value*, llvm::SimplifyQuery const&, unsigned int) [clone .llvm.1619516508949622737] 3,779,180 ( 0.13%) ???:(anonymous namespace)::ClobberWalker::addSearches(llvm::MemoryPhi*, llvm::SmallVectorImpl&, unsigned int) 3,763,350 ( 0.13%) ???:updateCGAndAnalysisManagerForPass(llvm::LazyCallGraph&, llvm::LazyCallGraph::SCC&, llvm::LazyCallGraph::Node&, llvm::AnalysisManager&, llvm::CGSCCUpdateResult&, llvm::AnalysisManager&, bool) [clone .llvm.5426518467876156712] 3,731,613 ( 0.13%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::runSemiNCA(llvm::DominatorTreeBase&, unsigned int) 3,719,506 ( 0.13%) ???:std::back_insert_iterator > > std::__copy_move_a2, false, llvm::GraphTraits >, std::back_insert_iterator > > >(llvm::po_iterator, false, llvm::GraphTraits >, llvm::po_iterator, false, llvm::GraphTraits >, std::back_insert_iterator > >) 3,687,273 ( 0.13%) ???:llvm::Instruction::~Instruction() 3,616,080 ( 0.12%) ???:llvm::PMTopLevelManager::AUFoldingSetNode::Profile(llvm::FoldingSetNodeID&, llvm::AnalysisUsage const&) 3,603,492 ( 0.12%) ???:llvm::MemorySSA::OptimizeUses::optimizeUses() 3,599,076 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs:>::process_obligations::> 3,571,313 ( 0.12%) ???:llvm::Type::isSizedDerivedType(llvm::SmallPtrSetImpl*) const 3,560,664 ( 0.12%) ???:llvm::Intrinsic::getDeclaration(llvm::Module*, unsigned int, llvm::ArrayRef) 3,544,940 ( 0.12%) ???:llvm::DAGTypeLegalizer::run() 3,518,191 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 3,503,789 ( 0.12%) ???:llvm::TargetLowering::SimplifyDemandedBits(llvm::SDValue, llvm::APInt const&, llvm::APInt const&, llvm::KnownBits&, llvm::TargetLowering::TargetLoweringOpt&, unsigned int, bool) const 3,461,660 ( 0.12%) ???:runImpl(llvm::Function&, llvm::LazyValueInfo*, llvm::DominatorTree*, llvm::SimplifyQuery const&) [clone .llvm.16011871802505272439] 3,440,964 ( 0.12%) ???:llvm::Instruction::eraseFromParent() 3,436,720 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs:>::process_obligations::> 3,417,802 ( 0.12%) ???:llvm::KnownBits::computeForAddSub(bool, bool, llvm::KnownBits const&, llvm::KnownBits) 3,403,164 ( 0.12%) ???:llvm::formLCSSAForInstructions(llvm::SmallVectorImpl&, llvm::DominatorTree const&, llvm::LoopInfo const&, llvm::ScalarEvolution*, llvm::IRBuilderBase&, llvm::SmallVectorImpl*) 3,388,240 ( 0.12%) ???:(anonymous namespace)::GetCFGOnlyPasses::passEnumerate(llvm::PassInfo const*) [clone .llvm.764396836974782617] 3,351,039 ( 0.11%) ???:llvm::AAResults::Model::pointsToConstantMemory(llvm::MemoryLocation const&, llvm::AAQueryInfo&, bool) 3,332,668 ( 0.11%) ???:llvm::computeConstantRange(llvm::Value const*, bool, llvm::AssumptionCache*, llvm::Instruction const*, unsigned int) 3,254,737 ( 0.11%) ./malloc/malloc.c:realloc 3,236,635 ( 0.11%) ???:llvm::SelectionDAG::Legalize() 3,236,474 ( 0.11%) ???:llvm::SROA::runImpl(llvm::Function&, llvm::DominatorTree&, llvm::AssumptionCache&) 3,156,335 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:>::process_obligations::> 3,133,676 ( 0.11%) ???:std::back_insert_iterator > > std::__copy_move_a, false, llvm::GraphTraits >, std::back_insert_iterator > > >(llvm::po_iterator, false, llvm::GraphTraits >, llvm::po_iterator, false, llvm::GraphTraits >, std::back_insert_iterator > >) 3,111,755 ( 0.11%) ???:llvm::LiveRangeCalc::findReachingDefs(llvm::LiveRange&, llvm::MachineBasicBlock&, llvm::SlotIndex, unsigned int, llvm::ArrayRef) 3,070,980 ( 0.10%) ???:llvm::SelectionDAG::getConstant(llvm::ConstantInt const&, llvm::SDLoc const&, llvm::EVT, bool, bool) 3,063,845 ( 0.10%) ???:llvm::LivePhysRegs::addPristines(llvm::MachineFunction const&) 3,037,428 ( 0.10%) ???:llvm::SROA::rewritePartition(llvm::AllocaInst&, llvm::sroa::AllocaSlices&, llvm::sroa::Partition&) 3,012,297 ( 0.10%) ???:llvm::AAResults::getModRefInfo(llvm::Instruction const*, llvm::Optional const&, llvm::AAQueryInfo&) 2,999,230 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/select/mod.rs:>::process_obligations::> 2,996,759 ( 0.10%) ???:llvm::ReachingDefAnalysis::enterBasicBlock(llvm::MachineBasicBlock*) 2,992,891 ( 0.10%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_sse2_unaligned_erms 2,969,585 ( 0.10%) ???:llvm::ReassociatePass::BuildRankMap(llvm::Function&, llvm::ReversePostOrderTraversal >&) 2,943,448 ( 0.10%) ???:(anonymous namespace)::SelectionDAGLegalize::LegalizeOp(llvm::SDNode*) [clone .llvm.8386621111310650999] -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs -------------------------------------------------------------------------------- Ir -- line 121 ---------------------------------------- . #[derive(Debug)] . pub enum ProcessResult { . Unchanged, . Changed(Vec), . Error(E), . } . . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 7,265 ( 0.00%) struct ObligationTreeId(usize); . . type ObligationTreeIdGenerator = . std::iter::Map, fn(usize) -> ObligationTreeId>; . . pub struct ObligationForest { . /// The list of obligations. In between calls to `process_obligations`, . /// this list only contains nodes in the `Pending` or `Waiting` state. . /// -- line 137 ---------------------------------------- -- line 181 ---------------------------------------- . has_parent: bool, . . /// Identifier of the obligation tree to which this node belongs. . obligation_tree_id: ObligationTreeId, . } . . impl Node { . fn new(parent: Option, obligation: O, obligation_tree_id: ObligationTreeId) -> Node { 89,504 ( 0.00%) Node { . obligation, . state: Cell::new(NodeState::Pending), 11,723 ( 0.00%) dependents: if let Some(parent_index) = parent { vec![parent_index] } else { vec![] }, . has_parent: parent.is_some(), . obligation_tree_id, . } . } . } . . /// The state of one node in some tree within the forest. This represents the . /// current state of processing for the obligation (of type `O`) associated -- line 200 ---------------------------------------- -- line 223 ---------------------------------------- . /// | compress() . /// v . /// (Removed) . /// ``` . /// The `Error` state can be introduced in several places, via `error_at()`. . /// . /// Outside of `ObligationForest` methods, nodes should be either `Pending` or . /// `Waiting`. 544,595 ( 0.02%) #[derive(Debug, Copy, Clone, PartialEq, Eq)] . enum NodeState { . /// This obligation has not yet been selected successfully. Cannot have . /// subobligations. . Pending, . . /// This obligation was selected successfully, but may or may not have . /// subobligations. . Success, -- line 239 ---------------------------------------- -- line 279 ---------------------------------------- . pub stalled: bool, . } . . impl OutcomeTrait for Outcome { . type Error = Error; . type Obligation = O; . . fn new() -> Self { 18,810 ( 0.00%) Self { stalled: true, errors: vec![] } . } . . fn mark_not_stalled(&mut self) { 5,590 ( 0.00%) self.stalled = false; . } . . fn is_stalled(&self) -> bool { 4,412 ( 0.00%) self.stalled . } . . fn record_completed(&mut self, _outcome: &Self::Obligation) { . // do nothing . } . . fn record_error(&mut self, error: Self::Error) { 3,232 ( 0.00%) self.errors.push(error) . } . } . . #[derive(Debug, PartialEq, Eq)] . pub struct Error { . pub error: E, . pub backtrace: Vec, . } . . impl ObligationForest { 2,758 ( 0.00%) pub fn new() -> ObligationForest { 33,096 ( 0.00%) ObligationForest { . nodes: vec![], . done_cache: Default::default(), . active_cache: Default::default(), . reused_node_vec: vec![], . obligation_tree_id_generator: (0..).map(ObligationTreeId), . error_cache: Default::default(), . } 2,758 ( 0.00%) } . . /// Returns the total number of nodes in the forest that have not . /// yet been fully resolved. . pub fn len(&self) -> usize { . self.nodes.len() . } . . /// Registers an obligation. . pub fn register_obligation(&mut self, obligation: O) { . // Ignore errors here - there is no guarantee of success. 52,546 ( 0.00%) let _ = self.register_obligation_at(obligation, None); . } . . // Returns Err(()) if we already know this obligation failed. 70,829 ( 0.00%) fn register_obligation_at(&mut self, obligation: O, parent: Option) -> Result<(), ()> { 12,878 ( 0.00%) let cache_key = obligation.as_cache_key(); 12,878 ( 0.00%) if self.done_cache.contains(&cache_key) { . debug!("register_obligation_at: ignoring already done obligation: {:?}", obligation); . return Ok(()); . } . 35,598 ( 0.00%) match self.active_cache.entry(cache_key) { . Entry::Occupied(o) => { 678 ( 0.00%) let node = &mut self.nodes[*o.get()]; 678 ( 0.00%) if let Some(parent_index) = parent { . // If the node is already in `active_cache`, it has already . // had its chance to be marked with a parent. So if it's . // not already present, just dump `parent` into the . // dependents as a non-parent. 585 ( 0.00%) if !node.dependents.contains(&parent_index) { . node.dependents.push(parent_index); . } . } 1,017 ( 0.00%) if let NodeState::Error = node.state.get() { Err(()) } else { Ok(()) } . } 16,782 ( 0.00%) Entry::Vacant(v) => { 33,564 ( 0.00%) let obligation_tree_id = match parent { 4,086 ( 0.00%) Some(parent_index) => self.nodes[parent_index].obligation_tree_id, . None => self.obligation_tree_id_generator.next().unwrap(), . }; . . let already_failed = parent.is_some() . && self . .error_cache . .get(&obligation_tree_id) . .map_or(false, |errors| errors.contains(v.key())); . . if already_failed { . Err(()) . } else { 5,594 ( 0.00%) let new_index = self.nodes.len(); . v.insert(new_index); . self.nodes.push(Node::new(parent, obligation, obligation_tree_id)); . Ok(()) . } . } . } 57,951 ( 0.00%) } . . /// Converts all remaining obligations to the given error. 26,082 ( 0.00%) pub fn to_errors(&mut self, error: E) -> Vec> { . let errors = self . .nodes . .iter() . .enumerate() . .filter(|(_index, node)| node.state.get() == NodeState::Pending) . .map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) }) . .collect(); . 7,452 ( 0.00%) self.compress(|_| assert!(false)); . errors 22,356 ( 0.00%) } . . /// Returns the set of obligations that are in a pending state. . pub fn map_pending_obligations(&self, f: F) -> Vec

. where . F: Fn(&O) -> P, . { 5 ( 0.00%) self.nodes . .iter() . .filter(|node| node.state.get() == NodeState::Pending) . .map(|node| f(&node.obligation)) . .collect() . } . 2,555 ( 0.00%) fn insert_into_error_cache(&mut self, index: usize) { . let node = &self.nodes[index]; 365 ( 0.00%) self.error_cache 1,095 ( 0.00%) .entry(node.obligation_tree_id) . .or_default() . .insert(node.obligation.as_cache_key()); 2,920 ( 0.00%) } . . /// Performs a pass through the obligation list. This must . /// be called in a loop until `outcome.stalled` is false. . /// . /// This _cannot_ be unrolled (presently, at least). . #[inline(never)] 84,645 ( 0.00%) pub fn process_obligations(&mut self, processor: &mut P) -> OUT . where . P: ObligationProcessor, . OUT: OutcomeTrait>, . { . let mut outcome = OUT::new(); . . // Note that the loop body can append new nodes, and those new nodes . // will then be processed by subsequent iterations of the loop. . // . // We can't use an iterator for the loop because `self.nodes` is . // appended to and the borrow checker would complain. We also can't use . // `for index in 0..self.nodes.len() { ... }` because the range would . // be computed with the initial length, and we would miss the appended . // nodes. Therefore we use a `while` loop. . let mut index = 0; 321,110 ( 0.01%) while let Some(node) = self.nodes.get_mut(index) { . // `processor.process_obligation` can modify the predicate within . // `node.obligation`, and that predicate is the key used for . // `self.active_cache`. This means that `self.active_cache` can get . // out of sync with `nodes`. It's not very common, but it does . // happen, and code in `compress` has to allow for it. 623,410 ( 0.02%) if node.state.get() != NodeState::Pending { 1,620 ( 0.00%) index += 1; . continue; . } . 32,510 ( 0.00%) match processor.process_obligation(&mut node.obligation) { . ProcessResult::Unchanged => { . // No change in state. . } 21,552 ( 0.00%) ProcessResult::Changed(children) => { . // We are not (yet) stalled. . outcome.mark_not_stalled(); . node.state.set(NodeState::Success); . 26,346 ( 0.00%) for child in children { 40,749 ( 0.00%) let st = self.register_obligation_at(child, Some(index)); 4,794 ( 0.00%) if let Err(()) = st { . // Error already reported - propagate it . // to our node. . self.error_at(index); . } . } . } . ProcessResult::Error(err) => { . outcome.mark_not_stalled(); 6,666 ( 0.00%) outcome.record_error(Error { error: err, backtrace: self.error_at(index) }); . } . } 620,170 ( 0.02%) index += 1; . } . . // There's no need to perform marking, cycle processing and compression when nothing . // changed. 4,412 ( 0.00%) if !outcome.is_stalled() { . self.mark_successes(); . self.process_cycles(processor); 3,820 ( 0.00%) self.compress(|obl| outcome.record_completed(obl)); . } . . outcome 84,645 ( 0.00%) } . . /// Returns a vector of obligations for `p` and all of its . /// ancestors, putting them into the error state in the process. 1,414 ( 0.00%) fn error_at(&self, mut index: usize) -> Vec { . let mut error_stack: Vec = vec![]; . let mut trace = vec![]; . . loop { . let node = &self.nodes[index]; 365 ( 0.00%) node.state.set(NodeState::Error); . trace.push(node.obligation.clone()); 730 ( 0.00%) if node.has_parent { . // The first dependent is the parent, which is treated . // specially. . error_stack.extend(node.dependents.iter().skip(1)); 163 ( 0.00%) index = node.dependents[0]; . } else { . // No parent; treat all dependents non-specially. . error_stack.extend(node.dependents.iter()); . break; . } . } . . while let Some(index) = error_stack.pop() { -- line 508 ---------------------------------------- -- line 509 ---------------------------------------- . let node = &self.nodes[index]; . if node.state.get() != NodeState::Error { . node.state.set(NodeState::Error); . error_stack.extend(node.dependents.iter()); . } . } . . trace 1,616 ( 0.00%) } . . /// Mark all `Waiting` nodes as `Success`, except those that depend on a . /// pending node. . fn mark_successes(&self) { . // Convert all `Waiting` nodes to `Success`. . for node in &self.nodes { 98,791 ( 0.00%) if node.state.get() == NodeState::Waiting { . node.state.set(NodeState::Success); . } . } . . // Convert `Success` nodes that depend on a pending node back to . // `Waiting`. . for node in &self.nodes { 74,915 ( 0.00%) if node.state.get() == NodeState::Pending { . // This call site is hot. . self.inlined_mark_dependents_as_waiting(node); . } . } . } . . // This always-inlined function is for the hot call site. . #[inline(always)] . fn inlined_mark_dependents_as_waiting(&self, node: &Node) { 596 ( 0.00%) for &index in node.dependents.iter() { . let node = &self.nodes[index]; 596 ( 0.00%) let state = node.state.get(); 596 ( 0.00%) if state == NodeState::Success { . // This call site is cold. 1,389 ( 0.00%) self.uninlined_mark_dependents_as_waiting(node); . } else { . debug_assert!(state == NodeState::Waiting || state == NodeState::Error) . } . } . } . . // This never-inlined function is for the cold call site. . #[inline(never)] 3,241 ( 0.00%) fn uninlined_mark_dependents_as_waiting(&self, node: &Node) { . // Mark node Waiting in the cold uninlined code instead of the hot inlined . node.state.set(NodeState::Waiting); . self.inlined_mark_dependents_as_waiting(node) 3,704 ( 0.00%) } . . /// Report cycles between all `Success` nodes, and convert all `Success` . /// nodes to `Done`. This must be called after `mark_successes`. . fn process_cycles

(&mut self, processor: &mut P) . where . P: ObligationProcessor, . { 1,910 ( 0.00%) let mut stack = std::mem::take(&mut self.reused_node_vec); . for (index, node) in self.nodes.iter().enumerate() { . // For some benchmarks this state test is extremely hot. It's a win . // to handle the no-op cases immediately to avoid the cost of the . // function call. 74,915 ( 0.00%) if node.state.get() == NodeState::Success { 25,720 ( 0.00%) self.find_cycles_from_node(&mut stack, processor, index); . } . } . . debug_assert!(stack.is_empty()); 15,280 ( 0.00%) self.reused_node_vec = stack; . } . 64,647 ( 0.00%) fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, index: usize) . where . P: ObligationProcessor, . { . let node = &self.nodes[index]; 14,366 ( 0.00%) if node.state.get() == NodeState::Success { 162 ( 0.00%) match stack.iter().rposition(|&n| n == index) { . None => { . stack.push(index); 2,039 ( 0.00%) for &dep_index in node.dependents.iter() { 8,156 ( 0.00%) self.find_cycles_from_node(stack, processor, dep_index); . } . stack.pop(); . node.state.set(NodeState::Done); . } . Some(rpos) => { . // Cycle detected. . processor.process_backedge( . stack[rpos..].iter().map(|&i| &self.nodes[i].obligation), . PhantomData, . ); . } . } . } 57,464 ( 0.00%) } . . /// Compresses the vector, removing all popped nodes. This adjusts the . /// indices and hence invalidates any outstanding indices. `process_cycles` . /// must be run beforehand to remove any cycles on `Success` nodes. . #[inline(never)] 41,362 ( 0.00%) fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) { 5,636 ( 0.00%) let orig_nodes_len = self.nodes.len(); . let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec); . debug_assert!(node_rewrites.is_empty()); . node_rewrites.extend(0..orig_nodes_len); . let mut dead_nodes = 0; . . // Move removable nodes to the end, preserving the order of the . // remaining nodes. . // . // LOOP INVARIANT: . // self.nodes[0..index - dead_nodes] are the first remaining nodes . // self.nodes[index - dead_nodes..index] are all dead . // self.nodes[index..] are unchanged . for index in 0..orig_nodes_len { . let node = &self.nodes[index]; 382,215 ( 0.01%) match node.state.get() { . NodeState::Pending | NodeState::Waiting => { 138,650 ( 0.00%) if dead_nodes > 0 { 3,801 ( 0.00%) self.nodes.swap(index, index - dead_nodes); 6,335 ( 0.00%) node_rewrites[index] -= dead_nodes; . } . } . NodeState::Done => { . // This lookup can fail because the contents of . // `self.active_cache` are not guaranteed to match those of . // `self.nodes`. See the comment in `process_obligation` . // for more details. 19,421 ( 0.00%) if let Some((predicate, _)) = 15,675 ( 0.00%) self.active_cache.remove_entry(&node.obligation.as_cache_key()) . { . self.done_cache.insert(predicate); . } else { . self.done_cache.insert(node.obligation.as_cache_key().clone()); . } . // Extract the success stories. . outcome_cb(&node.obligation); 10,450 ( 0.00%) node_rewrites[index] = orig_nodes_len; 10,450 ( 0.00%) dead_nodes += 1; . } . NodeState::Error => { . // We *intentionally* remove the node from the cache at this point. Otherwise . // tests must come up with a different type on every type error they . // check against. 1,095 ( 0.00%) self.active_cache.remove(&node.obligation.as_cache_key()); 730 ( 0.00%) self.insert_into_error_cache(index); 730 ( 0.00%) node_rewrites[index] = orig_nodes_len; 730 ( 0.00%) dead_nodes += 1; . } . NodeState::Success => unreachable!(), . } . } . . if dead_nodes > 0 { . // Remove the dead nodes and rewrite indices. 3,758 ( 0.00%) self.nodes.truncate(orig_nodes_len - dead_nodes); 1,879 ( 0.00%) self.apply_rewrites(&node_rewrites); . } . . node_rewrites.truncate(0); 22,544 ( 0.00%) self.reused_node_vec = node_rewrites; 45,088 ( 0.00%) } . . #[inline(never)] 16,911 ( 0.00%) fn apply_rewrites(&mut self, node_rewrites: &[usize]) { . let orig_nodes_len = node_rewrites.len(); . . for node in &mut self.nodes { . let mut i = 0; 69,439 ( 0.00%) while let Some(dependent) = node.dependents.get_mut(i) { 2,116 ( 0.00%) let new_index = node_rewrites[*dependent]; 1,058 ( 0.00%) if new_index >= orig_nodes_len { . node.dependents.swap_remove(i); . if i == 0 && node.has_parent { . // We just removed the parent. . node.has_parent = false; . } . } else { 529 ( 0.00%) *dependent = new_index; 1,058 ( 0.00%) i += 1; . } . } . } . . // This updating of `self.active_cache` is necessary because the . // removal of nodes within `compress` can fail. See above. . self.active_cache.retain(|_predicate, index| { 281,556 ( 0.01%) let new_index = node_rewrites[*index]; 140,778 ( 0.00%) if new_index >= orig_nodes_len { . false . } else { 68,910 ( 0.00%) *index = new_index; . true . } . }); 15,032 ( 0.00%) } . } 753,704 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs -------------------------------------------------------------------------------- Ir -- line 346 ---------------------------------------- . /// . /// let c = Cell::new(5); . /// . /// c.set(10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn set(&self, val: T) { 146 ( 0.00%) let old = self.replace(val); 552 ( 0.00%) drop(old); . } . . /// Swaps the values of two `Cell`s. . /// Difference with `std::mem::swap` is that this function doesn't require `&mut` reference. . /// . /// # Examples . /// . /// ``` -- line 363 ---------------------------------------- -- line 434 ---------------------------------------- . /// . /// let five = c.get(); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn get(&self) -> T { . // SAFETY: This can cause data races if called from a separate thread, . // but `Cell` is `!Sync` so this won't happen. 44,730 ( 0.00%) unsafe { *self.value.get() } . } . . /// Updates the contained value using a function and returns the new value. . /// . /// # Examples . /// . /// ``` . /// #![feature(cell_update)] -- line 450 ---------------------------------------- -- line 689 ---------------------------------------- . . #[inline(always)] . fn is_writing(x: BorrowFlag) -> bool { . x < UNUSED . } . . #[inline(always)] . fn is_reading(x: BorrowFlag) -> bool { 84,721 ( 0.00%) x > UNUSED . } . . impl RefCell { . /// Creates a new `RefCell` containing `value`. . /// . /// # Examples . /// . /// ``` -- line 705 ---------------------------------------- -- line 706 ---------------------------------------- . /// use std::cell::RefCell; . /// . /// let c = RefCell::new(5); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_refcell_new", since = "1.24.0")] . #[inline] . pub const fn new(value: T) -> RefCell { 22,096 ( 0.00%) RefCell { 17,803 ( 0.00%) value: UnsafeCell::new(value), . borrow: Cell::new(UNUSED), . #[cfg(feature = "debug_refcell")] . borrowed_at: Cell::new(None), . } . } . . /// Consumes the `RefCell`, returning the wrapped value. . /// -- line 723 ---------------------------------------- -- line 731 ---------------------------------------- . /// let five = c.into_inner(); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_cell_into_inner", issue = "78729")] . #[inline] . pub const fn into_inner(self) -> T { . // Since this function takes `self` (the `RefCell`) by value, the . // compiler statically verifies that it is not currently borrowed. 508 ( 0.00%) self.value.into_inner() . } . . /// Replaces the wrapped value with a new one, returning the old value, . /// without deinitializing either one. . /// . /// This function corresponds to [`std::mem::replace`](../mem/fn.replace.html). . /// . /// # Panics -- line 747 ---------------------------------------- -- line 845 ---------------------------------------- . /// let c = RefCell::new(5); . /// . /// let m = c.borrow_mut(); . /// let b = c.borrow(); // this causes a panic . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] 1,092 ( 0.00%) pub fn borrow(&self) -> Ref<'_, T> { . self.try_borrow().expect("already mutably borrowed") 3,276 ( 0.00%) } . . /// Immutably borrows the wrapped value, returning an error if the value is currently mutably . /// borrowed. . /// . /// The borrow lasts until the returned `Ref` exits scope. Multiple immutable borrows can be . /// taken out at the same time. . /// . /// This is the non-panicking variant of [`borrow`](#method.borrow). -- line 863 ---------------------------------------- -- line 937 ---------------------------------------- . /// let c = RefCell::new(5); . /// let m = c.borrow(); . /// . /// let b = c.borrow_mut(); // this causes a panic . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] 5,674 ( 0.00%) pub fn borrow_mut(&self) -> RefMut<'_, T> { . self.try_borrow_mut().expect("already borrowed") 17,022 ( 0.00%) } . . /// Mutably borrows the wrapped value, returning an error if the value is currently borrowed. . /// . /// The borrow lasts until the returned `RefMut` or all `RefMut`s derived . /// from it exit scope. The value cannot be borrowed while this borrow is . /// active. . /// . /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut). -- line 955 ---------------------------------------- -- line 975 ---------------------------------------- . match BorrowRefMut::new(&self.borrow) { . Some(b) => { . #[cfg(feature = "debug_refcell")] . { . self.borrowed_at.set(Some(crate::panic::Location::caller())); . } . . // SAFETY: `BorrowRef` guarantees unique access. 487 ( 0.00%) Ok(RefMut { value: unsafe { &mut *self.value.get() }, borrow: b }) . } . None => Err(BorrowMutError { . // If a borrow occurred, then we must already have an outstanding borrow, . // so `borrowed_at` will be `Some` . #[cfg(feature = "debug_refcell")] . location: self.borrowed_at.get().unwrap(), . }), . } -- line 991 ---------------------------------------- -- line 1254 ---------------------------------------- . . struct BorrowRef<'b> { . borrow: &'b Cell, . } . . impl<'b> BorrowRef<'b> { . #[inline] . fn new(borrow: &'b Cell) -> Option> { 116,641 ( 0.00%) let b = borrow.get().wrapping_add(1); 73,705 ( 0.00%) if !is_reading(b) { . // Incrementing borrow can result in a non-reading value (<= 0) in these cases: . // 1. It was < 0, i.e. there are writing borrows, so we can't allow a read borrow . // due to Rust's reference aliasing rules . // 2. It was isize::MAX (the max amount of reading borrows) and it overflowed . // into isize::MIN (the max amount of writing borrows) so we can't allow . // an additional read borrow because isize can't represent so many read borrows . // (this can only happen if you mem::forget more than a small constant amount of . // `Ref`s, which is not good practice) -- line 1271 ---------------------------------------- -- line 1279 ---------------------------------------- . Some(BorrowRef { borrow }) . } . } . } . . impl Drop for BorrowRef<'_> { . #[inline] . fn drop(&mut self) { 48,747 ( 0.00%) let borrow = self.borrow.get(); . debug_assert!(is_reading(borrow)); 102,075 ( 0.00%) self.borrow.set(borrow - 1); . } . } . . impl Clone for BorrowRef<'_> { . #[inline] . fn clone(&self) -> Self { . // Since this Ref exists, we know the borrow flag . // is a reading borrow. -- line 1297 ---------------------------------------- -- line 1645 ---------------------------------------- . . struct BorrowRefMut<'b> { . borrow: &'b Cell, . } . . impl Drop for BorrowRefMut<'_> { . #[inline] . fn drop(&mut self) { 1,560,386 ( 0.05%) let borrow = self.borrow.get(); . debug_assert!(is_writing(borrow)); 3,104,756 ( 0.11%) self.borrow.set(borrow + 1); . } . } . . impl<'b> BorrowRefMut<'b> { . #[inline] . fn new(borrow: &'b Cell) -> Option> { . // NOTE: Unlike BorrowRefMut::clone, new is called to create the initial . // mutable reference, and so there must currently be no existing . // references. Thus, while clone increments the mutable refcount, here . // we explicitly only allow going from UNUSED to UNUSED - 1. 3,281,476 ( 0.11%) match borrow.get() { . UNUSED => { . borrow.set(UNUSED - 1); . Some(BorrowRefMut { borrow }) . } . _ => None, . } . } . -- line 1674 ---------------------------------------- -- line 1711 ---------------------------------------- . self.value . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl DerefMut for RefMut<'_, T> { . #[inline] . fn deref_mut(&mut self) -> &mut T { 1,207 ( 0.00%) self.value . } . } . . #[unstable(feature = "coerce_unsized", issue = "27732")] . impl<'b, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized> for RefMut<'b, T> {} . . #[stable(feature = "std_guard_impls", since = "1.20.0")] . impl fmt::Display for RefMut<'_, T> { -- line 1727 ---------------------------------------- -- line 1909 ---------------------------------------- . /// ``` . #[inline(always)] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_unsafecell_get", since = "1.32.0")] . pub const fn get(&self) -> *mut T { . // We can just cast the pointer from `UnsafeCell` to `T` because of . // #[repr(transparent)]. This exploits libstd's special status, there is . // no guarantee for user code that this will work in future versions of the compiler! 672,063 ( 0.02%) self as *const UnsafeCell as *const T as *mut T . } . . /// Returns a mutable reference to the underlying data. . /// . /// This call borrows the `UnsafeCell` mutably (at compile-time) which . /// guarantees that we possess the only reference. . /// . /// # Examples -- line 1925 ---------------------------------------- 2,211,071 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/fulfill.rs -------------------------------------------------------------------------------- Ir -- line 33 ---------------------------------------- . . impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> { . /// Note that we include both the `ParamEnv` and the `Predicate`, . /// as the `ParamEnv` can influence whether fulfillment succeeds . /// or fails. . type CacheKey = ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>; . . fn as_cache_key(&self) -> Self::CacheKey { 41,619 ( 0.00%) self.obligation.param_env.and(self.obligation.predicate) . } . } . . /// The fulfillment context is used to drive trait resolution. It . /// consists of a list of obligations that must be (eventually) . /// satisfied. The job is to track which are satisfied, which yielded . /// errors, and which are still pending. At any point, users can call . /// `select_where_possible`, and the fulfillment context will try to do -- line 49 ---------------------------------------- -- line 79 ---------------------------------------- . // outside of any snapshot, so any use of it inside a snapshot . // will lead to trouble and therefore is checked against, but . // other fulfillment contexts sometimes do live inside of . // a snapshot (they don't *straddle* a snapshot, so there . // is no trouble there). . usable_in_snapshot: bool, . } . 3,650 ( 0.00%) #[derive(Clone, Debug)] . pub struct PendingPredicateObligation<'tcx> { . pub obligation: PredicateObligation<'tcx>, . // This is far more often read than modified, meaning that we . // should mostly optimize for reading speed, while modifying is not as relevant. . // . // For whatever reason using a boxed slice is slower than using a `Vec` here. . pub stalled_on: Vec>, . } . . // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. . #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] . static_assert_size!(PendingPredicateObligation<'_>, 72); . . impl<'a, 'tcx> FulfillmentContext<'tcx> { . /// Creates a new fulfillment context. 874 ( 0.00%) pub fn new() -> FulfillmentContext<'tcx> { 11,694 ( 0.00%) FulfillmentContext { 1,362 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: true, . usable_in_snapshot: false, . } 874 ( 0.00%) } . . pub fn new_in_snapshot() -> FulfillmentContext<'tcx> { 15 ( 0.00%) FulfillmentContext { 5 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: true, . usable_in_snapshot: true, . } . } . . pub fn new_ignoring_regions() -> FulfillmentContext<'tcx> { 5,600 ( 0.00%) FulfillmentContext { 1,400 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: false, . usable_in_snapshot: false, . } . } . . /// Attempts to select obligations using `selcx`. . fn select(&mut self, selcx: &mut SelectionContext<'a, 'tcx>) -> Vec> { 14,990 ( 0.00%) let span = debug_span!("select", obligation_forest_size = ?self.predicates.len()); . let _enter = span.enter(); . . let mut errors = Vec::new(); . . loop { . debug!("select: starting another iteration"); . . // Process pending obligations. . let outcome: Outcome<_, _> = 65,835 ( 0.00%) self.predicates.process_obligations(&mut FulfillProcessor { . selcx, 9,405 ( 0.00%) register_region_obligations: self.register_region_obligations, . }); . debug!("select: outcome={:#?}", outcome); . . // FIXME: if we kept the original cache key, we could mark projection . // obligations as complete for the projection cache here. . . errors.extend(outcome.errors.into_iter().map(to_fulfillment_error)); . . // If nothing new was added, no need to keep looping. 18,810 ( 0.00%) if outcome.stalled { . break; . } . } . . debug!( . "select({} predicates remaining, {} errors) done", . self.predicates.len(), . errors.len() -- line 162 ---------------------------------------- -- line 169 ---------------------------------------- . impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { . /// "Normalize" a projection type `::X` by . /// creating a fresh type variable `$0` as well as a projection . /// predicate `::X == $0`. When the . /// inference engine runs, it will attempt to find an impl of . /// `SomeTrait` or a where-clause that lets us unify `$0` with . /// something concrete. If this fails, we'll unify `$0` with . /// `projection_ty` again. 110 ( 0.00%) #[tracing::instrument(level = "debug", skip(self, infcx, param_env, cause))] . fn normalize_projection_type( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . param_env: ty::ParamEnv<'tcx>, . projection_ty: ty::ProjectionTy<'tcx>, . cause: ObligationCause<'tcx>, . ) -> Ty<'tcx> { . debug_assert!(!projection_ty.has_escaping_bound_vars()); . . // FIXME(#20304) -- cache . . let mut selcx = SelectionContext::new(infcx); . let mut obligations = vec![]; 45 ( 0.00%) let normalized_ty = project::normalize_projection_type( . &mut selcx, . param_env, . projection_ty, 25 ( 0.00%) cause, . 0, . &mut obligations, . ); . self.register_predicate_obligations(infcx, obligations); . . debug!(?normalized_ty); . . normalized_ty . } . 24,252 ( 0.00%) fn register_predicate_obligation( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . obligation: PredicateObligation<'tcx>, . ) { . // this helps to reduce duplicate errors, as well as making . // debug output much nicer to read and so on. 12,126 ( 0.00%) let obligation = infcx.resolve_vars_if_possible(obligation); . . debug!(?obligation, "register_predicate_obligation"); . 24,252 ( 0.00%) assert!(!infcx.is_in_snapshot() || self.usable_in_snapshot); . 12,126 ( 0.00%) super::relationships::update(self, infcx, &obligation); . . self.predicates . .register_obligation(PendingPredicateObligation { obligation, stalled_on: vec![] }); 20,210 ( 0.00%) } . 31,128 ( 0.00%) fn select_all_or_error(&mut self, infcx: &InferCtxt<'_, 'tcx>) -> Vec> { . { 3,891 ( 0.00%) let errors = self.select_where_possible(infcx); 3,891 ( 0.00%) if !errors.is_empty() { 825 ( 0.00%) return errors; . } . } . 18,630 ( 0.00%) self.predicates.to_errors(CodeAmbiguity).into_iter().map(to_fulfillment_error).collect() 27,237 ( 0.00%) } . 74,950 ( 0.00%) fn select_where_possible( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . ) -> Vec> { . let mut selcx = SelectionContext::new(infcx); . self.select(&mut selcx) 67,455 ( 0.00%) } . 69 ( 0.00%) fn pending_obligations(&self) -> Vec> { . self.predicates.map_pending_obligations(|o| o.obligation.clone()) 92 ( 0.00%) } . . fn relationships(&mut self) -> &mut FxHashMap { 11 ( 0.00%) &mut self.relationships 11 ( 0.00%) } . } . . struct FulfillProcessor<'a, 'b, 'tcx> { . selcx: &'a mut SelectionContext<'b, 'tcx>, . register_region_obligations: bool, . } . . fn mk_pending(os: Vec>) -> Vec> { -- line 258 ---------------------------------------- -- line 275 ---------------------------------------- . #[inline(always)] . fn process_obligation( . &mut self, . pending_obligation: &mut Self::Obligation, . ) -> ProcessResult { . // If we were stalled on some unresolved variables, first check whether . // any of them have been resolved; if not, don't bother doing more work . // yet. 1,843,728 ( 0.06%) let change = match pending_obligation.stalled_on.len() { . // Match arms are in order of frequency, which matters because this . // code is so hot. 1 and 0 dominate; 2+ is fairly rare. . 1 => { 26,619 ( 0.00%) let infer_var = pending_obligation.stalled_on[0]; 8,873 ( 0.00%) self.selcx.infcx().ty_or_const_infer_var_changed(infer_var) . } . 0 => { . // In this case we haven't changed, but wish to make a change. . true . } . _ => { . // This `for` loop was once a call to `all()`, but this lower-level . // form was a perf win. See #64545 for details. 886,854 ( 0.03%) (|| { 1,772,919 ( 0.06%) for &infer_var in &pending_obligation.stalled_on { 589,824 ( 0.02%) if self.selcx.infcx().ty_or_const_infer_var_changed(infer_var) { . return true; . } . } . false . })() . } . }; . -- line 307 ---------------------------------------- -- line 309 ---------------------------------------- . debug!( . "process_predicate: pending obligation {:?} still stalled on {:?}", . self.selcx.infcx().resolve_vars_if_possible(pending_obligation.obligation.clone()), . pending_obligation.stalled_on . ); . return ProcessResult::Unchanged; . } . 21,330 ( 0.00%) self.progress_changed_obligations(pending_obligation) . } . . fn process_backedge<'c, I>( . &mut self, . cycle: I, . _marker: PhantomData<&'c PendingPredicateObligation<'tcx>>, . ) where . I: Clone + Iterator>, -- line 325 ---------------------------------------- -- line 333 ---------------------------------------- . } . } . . impl<'a, 'b, 'tcx> FulfillProcessor<'a, 'b, 'tcx> { . // The code calling this method is extremely hot and only rarely . // actually uses this, so move this part of the code . // out of that loop. . #[inline(never)] 71,100 ( 0.00%) fn progress_changed_obligations( . &mut self, . pending_obligation: &mut PendingPredicateObligation<'tcx>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { . pending_obligation.stalled_on.truncate(0); . . let obligation = &mut pending_obligation.obligation; . 21,330 ( 0.00%) if obligation.predicate.has_infer_types_or_consts() { 4,006 ( 0.00%) obligation.predicate = 12,018 ( 0.00%) self.selcx.infcx().resolve_vars_if_possible(obligation.predicate); . } . . debug!(?obligation, ?obligation.cause, "process_obligation"); . . let infcx = self.selcx.infcx(); . 6,208 ( 0.00%) let binder = obligation.predicate.kind(); 49,770 ( 0.00%) match binder.no_bound_vars() { 66 ( 0.00%) None => match binder.skip_binder() { . // Evaluation will discard candidates using the leak check. . // This means we need to pass it the bound version of our . // predicate. . ty::PredicateKind::Trait(trait_ref) => { . let trait_obligation = obligation.with(binder.rebind(trait_ref)); . 12 ( 0.00%) self.process_trait_obligation( . obligation, 36 ( 0.00%) trait_obligation, . &mut pending_obligation.stalled_on, . ) . } . ty::PredicateKind::Projection(data) => { . let project_obligation = obligation.with(binder.rebind(data)); . 12 ( 0.00%) self.process_projection_obligation( . obligation, 36 ( 0.00%) project_obligation, . &mut pending_obligation.stalled_on, . ) . } . ty::PredicateKind::RegionOutlives(_) . | ty::PredicateKind::TypeOutlives(_) . | ty::PredicateKind::WellFormed(_) . | ty::PredicateKind::ObjectSafe(_) . | ty::PredicateKind::ClosureKind(..) -- line 386 ---------------------------------------- -- line 397 ---------------------------------------- . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for Chalk") . } . }, . Some(pred) => match pred { . ty::PredicateKind::Trait(data) => { . let trait_obligation = obligation.with(Binder::dummy(data)); . 17,730 ( 0.00%) self.process_trait_obligation( . obligation, 78,012 ( 0.00%) trait_obligation, . &mut pending_obligation.stalled_on, . ) . } . . ty::PredicateKind::RegionOutlives(data) => { 552 ( 0.00%) match infcx.region_outlives_predicate(&obligation.cause, Binder::dummy(data)) { 368 ( 0.00%) Ok(()) => ProcessResult::Changed(vec![]), . Err(_) => ProcessResult::Error(CodeSelectionError(Unimplemented)), . } . } . . ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(t_a, r_b)) => { 2,226 ( 0.00%) if self.register_region_obligations { 5,565 ( 0.00%) self.selcx.infcx().register_region_obligation_with_cause( . t_a, . r_b, . &obligation.cause, . ); . } . ProcessResult::Changed(vec![]) . } . . ty::PredicateKind::Projection(ref data) => { . let project_obligation = obligation.with(Binder::dummy(*data)); . 665 ( 0.00%) self.process_projection_obligation( . obligation, 1,995 ( 0.00%) project_obligation, . &mut pending_obligation.stalled_on, . ) . } . . ty::PredicateKind::ObjectSafe(trait_def_id) => { 12 ( 0.00%) if !self.selcx.tcx().is_object_safe(trait_def_id) { . ProcessResult::Error(CodeSelectionError(Unimplemented)) . } else { . ProcessResult::Changed(vec![]) . } . } . . ty::PredicateKind::ClosureKind(_, closure_substs, kind) => { 90 ( 0.00%) match self.selcx.infcx().closure_kind(closure_substs) { . Some(closure_kind) => { 180 ( 0.00%) if closure_kind.extends(kind) { . ProcessResult::Changed(vec![]) . } else { . ProcessResult::Error(CodeSelectionError(Unimplemented)) . } . } . None => ProcessResult::Unchanged, . } . } . . ty::PredicateKind::WellFormed(arg) => { 15,267 ( 0.00%) match wf::obligations( . self.selcx.infcx(), 4,362 ( 0.00%) obligation.param_env, 4,362 ( 0.00%) obligation.cause.body_id, 6,543 ( 0.00%) obligation.recursion_depth + 1, . arg, . obligation.cause.span, . ) { . None => { 2,000 ( 0.00%) pending_obligation.stalled_on = 3,500 ( 0.00%) vec![TyOrConstInferVar::maybe_from_generic_arg(arg).unwrap()]; 1,000 ( 0.00%) ProcessResult::Unchanged . } 5,043 ( 0.00%) Some(os) => ProcessResult::Changed(mk_pending(os)), . } . } . . ty::PredicateKind::Subtype(subtype) => { 48 ( 0.00%) match self.selcx.infcx().subtype_predicate( . &obligation.cause, . obligation.param_env, . Binder::dummy(subtype), . ) { . None => { . // None means that both are unresolved. 25 ( 0.00%) pending_obligation.stalled_on = vec![ 5 ( 0.00%) TyOrConstInferVar::maybe_from_ty(subtype.a).unwrap(), 10 ( 0.00%) TyOrConstInferVar::maybe_from_ty(subtype.b).unwrap(), . ]; . ProcessResult::Unchanged . } . Some(Ok(ok)) => ProcessResult::Changed(mk_pending(ok.obligations)), . Some(Err(err)) => { . let expected_found = . ExpectedFound::new(subtype.a_is_expected, subtype.a, subtype.b); . ProcessResult::Error(FulfillmentErrorCode::CodeSubtypeError( -- line 497 ---------------------------------------- -- line 639 ---------------------------------------- . } . } . } . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for Chalk") . } . }, . } 63,990 ( 0.00%) } . 67,431 ( 0.00%) #[instrument(level = "debug", skip(self, obligation, stalled_on))] . fn process_trait_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . trait_obligation: TraitObligation<'tcx>, . stalled_on: &mut Vec>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { 3,549 ( 0.00%) let infcx = self.selcx.infcx(); 10,647 ( 0.00%) if obligation.predicate.is_global() { . // no type variables present, can use evaluation for better caching. . // FIXME: consider caching errors too. 3,872 ( 0.00%) if infcx.predicate_must_hold_considering_regions(obligation) { . debug!( . "selecting trait at depth {} evaluated to holds", . obligation.recursion_depth . ); 3,112 ( 0.00%) return ProcessResult::Changed(vec![]); . } . } . 13,547 ( 0.00%) match self.selcx.select(&trait_obligation) { . Ok(Some(impl_source)) => { . debug!("selecting trait at depth {} yielded Ok(Some)", obligation.recursion_depth); 15,048 ( 0.00%) ProcessResult::Changed(mk_pending(impl_source.nested_obligations())) . } . Ok(None) => { . debug!("selecting trait at depth {} yielded Ok(None)", obligation.recursion_depth); . . // This is a bit subtle: for the most part, the . // only reason we can fail to make progress on . // trait selection is because we don't have enough . // information about the types in the trait. . stalled_on.clear(); . stalled_on.extend(substs_infer_vars( . self.selcx, 955 ( 0.00%) trait_obligation.predicate.map_bound(|pred| pred.trait_ref.substs), . )); . . debug!( . "process_predicate: pending obligation {:?} now stalled on {:?}", . infcx.resolve_vars_if_possible(obligation.clone()), . stalled_on . ); . 955 ( 0.00%) ProcessResult::Unchanged . } . Err(selection_err) => { . debug!("selecting trait at depth {} yielded Err", obligation.recursion_depth); . 2,828 ( 0.00%) ProcessResult::Error(CodeSelectionError(selection_err)) . } . } . } . 1,496 ( 0.00%) fn process_projection_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . project_obligation: PolyProjectionObligation<'tcx>, . stalled_on: &mut Vec>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { 136 ( 0.00%) let tcx = self.selcx.tcx(); . 408 ( 0.00%) if obligation.predicate.is_global() { . // no type variables present, can use evaluation for better caching. . // FIXME: consider caching errors too. 24 ( 0.00%) if self.selcx.infcx().predicate_must_hold_considering_regions(obligation) { 72 ( 0.00%) if let Some(key) = ProjectionCacheKey::from_poly_projection_predicate( . &mut self.selcx, 84 ( 0.00%) project_obligation.predicate, . ) { . // If `predicate_must_hold_considering_regions` succeeds, then we've . // evaluated all sub-obligations. We can therefore mark the 'root' . // obligation as complete, and skip evaluating sub-obligations. 72 ( 0.00%) self.selcx . .infcx() . .inner . .borrow_mut() . .projection_cache() . .complete(key, EvaluationResult::EvaluatedToOk); . } 24 ( 0.00%) return ProcessResult::Changed(vec![]); . } else { . tracing::debug!("Does NOT hold: {:?}", obligation); . } . } . 868 ( 0.00%) match project::poly_project_and_unify_type(self.selcx, &project_obligation) { . Ok(Ok(Some(os))) => ProcessResult::Changed(mk_pending(os)), . Ok(Ok(None)) => { . stalled_on.clear(); . stalled_on.extend(substs_infer_vars( . self.selcx, 60 ( 0.00%) project_obligation.predicate.map_bound(|pred| pred.projection_ty.substs), . )); 60 ( 0.00%) ProcessResult::Unchanged . } . // Let the caller handle the recursion . Ok(Err(project::InProgress)) => ProcessResult::Changed(mk_pending(vec![ . project_obligation.with(project_obligation.predicate.to_predicate(tcx)), . ])), . Err(e) => ProcessResult::Error(CodeProjectionError(e)), . } 1,088 ( 0.00%) } . } . . /// Returns the set of inference variables contained in `substs`. . fn substs_infer_vars<'a, 'tcx>( . selcx: &mut SelectionContext<'a, 'tcx>, . substs: ty::Binder<'tcx, SubstsRef<'tcx>>, . ) -> impl Iterator> { . selcx . .infcx() . .resolve_vars_if_possible(substs) . .skip_binder() // ok because this check doesn't care about regions . .iter() . .filter(|arg| arg.has_infer_types_or_consts()) . .flat_map(|arg| { 7,675 ( 0.00%) let mut walker = arg.walk(); 13,815 ( 0.00%) while let Some(c) = walker.next() { 1,535 ( 0.00%) if !c.has_infer_types_or_consts() { . walker.visited.remove(&c); . walker.skip_current_subtree(); . } . } . walker.visited.into_iter() . }) . .filter_map(TyOrConstInferVar::maybe_from_generic_arg) . } . . fn to_fulfillment_error<'tcx>( . error: Error, FulfillmentErrorCode<'tcx>>, . ) -> FulfillmentError<'tcx> { . let mut iter = error.backtrace.into_iter(); 202 ( 0.00%) let obligation = iter.next().unwrap().obligation; . // The root obligation is the last item in the backtrace - if there's only . // one item, then it's the same as the main obligation . let root_obligation = iter.next_back().map_or_else(|| obligation.clone(), |e| e.obligation); 6,060 ( 0.00%) FulfillmentError::new(obligation, error.error, root_obligation) . } 1,304,803 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs -------------------------------------------------------------------------------- Ir -- line 417 ---------------------------------------- . /// # #![allow(unused_mut)] . /// let mut vec: Vec = Vec::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> Self { 860,482 ( 0.03%) Vec { buf: RawVec::NEW, len: 0 } 70 ( 0.00%) } . . /// Constructs a new, empty `Vec` with the specified capacity. . /// . /// The vector will be able to hold exactly `capacity` elements without . /// reallocating. If `capacity` is 0, the vector will not allocate. . /// . /// It is important to note that although the returned vector has the . /// *capacity* specified, the vector will have a zero *length*. For an -- line 434 ---------------------------------------- -- line 601 ---------------------------------------- . /// vec.push(11); . /// assert_eq!(vec.len(), 11); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 328,439 ( 0.01%) Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } . } . . /// Creates a `Vec` directly from the raw components of another vector. . /// . /// # Safety . /// . /// This is highly unsafe, due to the number of invariants that aren't . /// checked: -- line 617 ---------------------------------------- -- line 677 ---------------------------------------- . /// // Put everything back together into a Vec . /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); . /// assert_eq!(rebuilt, [4, 5, 6]); . /// } . /// ``` . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { 21,011 ( 0.00%) unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } . } . . /// Decomposes a `Vec` into its raw components. . /// . /// Returns the raw pointer to the underlying data, the length of . /// the vector (in elements), and the allocated capacity of the . /// data (in elements). These are the same arguments in the same . /// order as the arguments to [`from_raw_parts`]. -- line 693 ---------------------------------------- -- line 778 ---------------------------------------- . /// . /// ``` . /// let vec: Vec = Vec::with_capacity(10); . /// assert_eq!(vec.capacity(), 10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn capacity(&self) -> usize { 94,065 ( 0.00%) self.buf.capacity() . } . . /// Reserves capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 794 ---------------------------------------- -- line 801 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve(&mut self, additional: usize) { 82,995 ( 0.00%) self.buf.reserve(self.len, additional); . } . . /// Reserves the minimum capacity for exactly `additional` more elements to . /// be inserted in the given `Vec`. After calling `reserve_exact`, . /// capacity will be greater than or equal to `self.len() + additional`. . /// Does nothing if the capacity is already sufficient. . /// . /// Note that the allocator may give the collection more space than it -- line 817 ---------------------------------------- -- line 829 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve_exact(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve_exact(&mut self, additional: usize) { 3,039 ( 0.00%) self.buf.reserve_exact(self.len, additional); . } . . /// Tries to reserve capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `try_reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 845 ---------------------------------------- -- line 930 ---------------------------------------- . /// assert!(vec.capacity() >= 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn shrink_to_fit(&mut self) { . // The capacity is never less than the length, and there's nothing to do when . // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` . // by only calling it with a greater capacity. 7,668 ( 0.00%) if self.capacity() > self.len { 4,342 ( 0.00%) self.buf.shrink_to_fit(self.len); . } . } . . /// Shrinks the capacity of the vector with a lower bound. . /// . /// The capacity will remain at least as large as both the length . /// and the supplied value. . /// -- line 947 ---------------------------------------- -- line 990 ---------------------------------------- . /// let slice = vec.into_boxed_slice(); . /// assert_eq!(slice.into_vec().capacity(), 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn into_boxed_slice(mut self) -> Box<[T], A> { . unsafe { . self.shrink_to_fit(); 2,208 ( 0.00%) let me = ManuallyDrop::new(self); . let buf = ptr::read(&me.buf); . let len = me.len(); . buf.into_box(len).assume_init() . } . } . . /// Shortens the vector, keeping the first `len` elements and dropping . /// the rest. -- line 1006 ---------------------------------------- -- line 1040 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.truncate(0); . /// assert_eq!(vec, []); . /// ``` . /// . /// [`clear`]: Vec::clear . /// [`drain`]: Vec::drain . #[stable(feature = "rust1", since = "1.0.0")] 89,077 ( 0.00%) pub fn truncate(&mut self, len: usize) { . // This is safe because: . // . // * the slice passed to `drop_in_place` is valid; the `len > self.len` . // case avoids creating an invalid slice, and . // * the `len` of the vector is shrunk before calling `drop_in_place`, . // such that no value will be dropped twice in case `drop_in_place` . // were to panic once (if it panics twice, the program aborts). . unsafe { . // Note: It's intentional that this is `>` and not `>=`. . // Changing it to `>=` has negative performance . // implications in some cases. See #78884 for more. 69,392 ( 0.00%) if len > self.len { . return; . } . let remaining_len = self.len - len; . let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); 59,836 ( 0.00%) self.len = len; 1,879 ( 0.00%) ptr::drop_in_place(s); . } 106,860 ( 0.00%) } . . /// Extracts a slice containing the entire vector. . /// . /// Equivalent to `&s[..]`. . /// . /// # Examples . /// . /// ``` -- line 1076 ---------------------------------------- -- line 1126 ---------------------------------------- . /// ``` . /// . /// [`as_mut_ptr`]: Vec::as_mut_ptr . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_ptr(&self) -> *const T { . // We shadow the slice method of the same name to avoid going through . // `deref`, which creates an intermediate reference. 2,452,863 ( 0.08%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns an unsafe mutable pointer to the vector's buffer. . /// -- line 1142 ---------------------------------------- -- line 1162 ---------------------------------------- . /// } . /// assert_eq!(&*x, &[0, 1, 2, 3]); . /// ``` . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_mut_ptr(&mut self) -> *mut T { . // We shadow the slice method of the same name to avoid going through . // `deref_mut`, which creates an intermediate reference. 1,458,693 ( 0.05%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns a reference to the underlying allocator. . #[unstable(feature = "allocator_api", issue = "32838")] -- line 1178 ---------------------------------------- -- line 1259 ---------------------------------------- . /// . /// Normally, here, one would use [`clear`] instead to correctly drop . /// the contents and thus not leak memory. . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn set_len(&mut self, new_len: usize) { . debug_assert!(new_len <= self.capacity()); . 192,164 ( 0.01%) self.len = new_len; 3,240 ( 0.00%) } . . /// Removes an element from the vector and returns it. . /// . /// The removed element is replaced by the last element of the vector. . /// . /// This does not preserve ordering, but is *O*(1). . /// If you need to preserve the element order, use [`remove`] instead. . /// -- line 1276 ---------------------------------------- -- line 1305 ---------------------------------------- . assert_failed(index, len); . } . unsafe { . // We replace self[index] with the last element. Note that if the . // bounds check above succeeds there must be a last element (which . // can be self[index] itself). . let value = ptr::read(self.as_ptr().add(index)); . let base_ptr = self.as_mut_ptr(); 5 ( 0.00%) ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1); . self.set_len(len - 1); . value . } . } . . /// Inserts an element at position `index` within the vector, shifting all . /// elements after it to the right. . /// -- line 1321 ---------------------------------------- -- line 1329 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.insert(1, 4); . /// assert_eq!(vec, [1, 4, 2, 3]); . /// vec.insert(4, 5); . /// assert_eq!(vec, [1, 4, 2, 3, 5]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 1,211 ( 0.00%) pub fn insert(&mut self, index: usize, element: T) { . #[cold] . #[inline(never)] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("insertion index (is {}) should be <= len (is {})", index, len); . } . 197 ( 0.00%) let len = self.len(); 538 ( 0.00%) if index > len { . assert_failed(index, len); . } . . // space for the new element 1,124 ( 0.00%) if len == self.buf.capacity() { . self.reserve(1); . } . . unsafe { . // infallible . // The spot to put the new value . { . let p = self.as_mut_ptr().add(index); . // Shift everything over to make space. (Duplicating the . // `index`th element into two consecutive places.) 1,614 ( 0.00%) ptr::copy(p, p.offset(1), len - index); . // Write it in, overwriting the first copy of the `index`th . // element. . ptr::write(p, element); . } 1,124 ( 0.00%) self.set_len(len + 1); . } 1,038 ( 0.00%) } . . /// Removes and returns the element at position `index` within the vector, . /// shifting all elements after it to the left. . /// . /// Note: Because this shifts over the remaining elements, it has a . /// worst-case performance of *O*(*n*). If you don't need the order of elements . /// to be preserved, use [`swap_remove`] instead. If you'd like to remove . /// elements from the beginning of the `Vec`, consider using -- line 1376 ---------------------------------------- -- line 1395 ---------------------------------------- . pub fn remove(&mut self, index: usize) -> T { . #[cold] . #[inline(never)] . #[track_caller] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("removal index (is {}) should be < len (is {})", index, len); . } . 125 ( 0.00%) let len = self.len(); 485 ( 0.00%) if index >= len { . assert_failed(index, len); . } . unsafe { . // infallible . let ret; . { . // the place we are taking from. . let ptr = self.as_mut_ptr().add(index); . // copy it out, unsafely having a copy of the value on . // the stack and in the vector at the same time. 215 ( 0.00%) ret = ptr::read(ptr); . . // Shift everything down to fill in that spot. 314 ( 0.00%) ptr::copy(ptr.offset(1), ptr, len - index - 1); . } 125 ( 0.00%) self.set_len(len - 1); . ret . } . } . . /// Retains only the elements specified by the predicate. . /// . /// In other words, remove all elements `e` such that `f(&e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the -- line 1428 ---------------------------------------- -- line 1442 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3, 4, 5]; . /// let keep = [false, true, true, false, true]; . /// let mut iter = keep.iter(); . /// vec.retain(|_| *iter.next().unwrap()); . /// assert_eq!(vec, [2, 3, 5]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 69,872 ( 0.00%) pub fn retain(&mut self, mut f: F) . where . F: FnMut(&T) -> bool, . { 2,039 ( 0.00%) self.retain_mut(|elem| f(elem)); 68,312 ( 0.00%) } . . /// Retains only the elements specified by the predicate, passing a mutable reference to it. . /// . /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the . /// original order, and preserves the order of the retained elements. . /// . /// # Examples -- line 1463 ---------------------------------------- -- line 1474 ---------------------------------------- . /// }); . /// assert_eq!(vec, [2, 3, 4]); . /// ``` . #[unstable(feature = "vec_retain_mut", issue = "90829")] . pub fn retain_mut(&mut self, mut f: F) . where . F: FnMut(&mut T) -> bool, . { 8,785 ( 0.00%) let original_len = self.len(); . // Avoid double drop if the drop guard is not executed, . // since we may make some holes during the process. . unsafe { self.set_len(0) }; . . // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] . // |<- processed len ->| ^- next to check . // |<- deleted cnt ->| . // |<- original_len ->| -- line 1490 ---------------------------------------- -- line 1499 ---------------------------------------- . v: &'a mut Vec, . processed_len: usize, . deleted_cnt: usize, . original_len: usize, . } . . impl Drop for BackshiftOnDrop<'_, T, A> { . fn drop(&mut self) { 6,128 ( 0.00%) if self.deleted_cnt > 0 { . // SAFETY: Trailing unchecked items must be valid since we never touch them. . unsafe { . ptr::copy( . self.v.as_ptr().add(self.processed_len), 4,355 ( 0.00%) self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), . self.original_len - self.processed_len, . ); . } . } . // SAFETY: After filling holes, all items are in contiguous memory. . unsafe { 2 ( 0.00%) self.v.set_len(self.original_len - self.deleted_cnt); . } . } . } . . let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; . . fn process_loop( . original_len: usize, . f: &mut F, . g: &mut BackshiftOnDrop<'_, T, A>, . ) where . F: FnMut(&mut T) -> bool, . { 136,368 ( 0.00%) while g.processed_len != original_len { . // SAFETY: Unchecked element must be valid. . let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; 32,560 ( 0.00%) if !f(cur) { . // Advance early to avoid double drop if `drop_in_place` panicked. 758 ( 0.00%) g.processed_len += 1; 194 ( 0.00%) g.deleted_cnt += 1; . // SAFETY: We never touch this element again after dropped. 32 ( 0.00%) unsafe { ptr::drop_in_place(cur) }; . // We already advanced the counter. . if DELETED { . continue; . } else { . break; . } . } . if DELETED { . // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. . // We use copy for move, and never touch this element again. . unsafe { 2,886 ( 0.00%) let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); . ptr::copy_nonoverlapping(cur, hole_slot, 1); . } . } 2,617 ( 0.00%) g.processed_len += 1; . } . } . . // Stage 1: Nothing was deleted. . process_loop::(original_len, &mut f, &mut g); . . // Stage 2: Some elements were deleted. . process_loop::(original_len, &mut f, &mut g); -- line 1565 ---------------------------------------- -- line 1606 ---------------------------------------- . /// ``` . /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; . /// . /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); . /// . /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); . /// ``` . #[stable(feature = "dedup_by", since = "1.16.0")] 392 ( 0.00%) pub fn dedup_by(&mut self, mut same_bucket: F) . where . F: FnMut(&mut T, &mut T) -> bool, . { 266 ( 0.00%) let len = self.len(); 994 ( 0.00%) if len <= 1 { . return; . } . . /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ . struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { . /* Offset of the element we want to check if it is duplicate */ . read: usize, . -- line 1627 ---------------------------------------- -- line 1670 ---------------------------------------- . let ptr = gap.vec.as_mut_ptr(); . . /* Drop items while going through Vec, it should be more efficient than . * doing slice partition_dedup + truncate */ . . /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr . * are always in-bounds and read_ptr never aliases prev_ptr */ . unsafe { 2,258 ( 0.00%) while gap.read < len { . let read_ptr = ptr.add(gap.read); . let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); . 298 ( 0.00%) if same_bucket(&mut *read_ptr, &mut *prev_ptr) { . // Increase `gap.read` now since the drop may panic. . gap.read += 1; . /* We have found duplicate, drop it in-place */ . ptr::drop_in_place(read_ptr); . } else { . let write_ptr = ptr.add(gap.write); . . /* Because `read_ptr` can be equal to `write_ptr`, we either . * have to use `copy` or conditional `copy_nonoverlapping`. . * Looks like the first option is faster. */ . ptr::copy(read_ptr, write_ptr, 1); . . /* We have filled that place, so go further */ 1,188 ( 0.00%) gap.write += 1; . gap.read += 1; . } . } . . /* Technically we could let `gap` clean up with its Drop, but . * when `same_bucket` is guaranteed to not panic, this bloats a little . * the codegen, so we just do it manually */ . gap.vec.set_len(gap.write); . mem::forget(gap); . } 448 ( 0.00%) } . . /// Appends an element to the back of a collection. . /// . /// # Panics . /// . /// Panics if the new capacity exceeds `isize::MAX` bytes. . /// . /// # Examples -- line 1715 ---------------------------------------- -- line 1717 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2]; . /// vec.push(3); . /// assert_eq!(vec, [1, 2, 3]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 80 ( 0.00%) pub fn push(&mut self, value: T) { . // This will panic or abort if we would allocate > isize::MAX bytes . // or if the length increment would overflow for zero-sized types. 2,127,788 ( 0.07%) if self.len == self.buf.capacity() { 135,366 ( 0.00%) self.buf.reserve_for_push(self.len); . } . unsafe { 60,383 ( 0.00%) let end = self.as_mut_ptr().add(self.len); . ptr::write(end, value); 2,595,662 ( 0.09%) self.len += 1; . } 64 ( 0.00%) } . . /// Removes the last element from a vector and returns it, or [`None`] if it . /// is empty. . /// . /// If you'd like to pop the first element, consider using . /// [`VecDeque::pop_front`] instead. . /// . /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front -- line 1744 ---------------------------------------- -- line 1748 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3]; . /// assert_eq!(vec.pop(), Some(3)); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop(&mut self) -> Option { 326,047 ( 0.01%) if self.len == 0 { 22 ( 0.00%) None . } else { . unsafe { 232,961 ( 0.01%) self.len -= 1; 46,797 ( 0.00%) Some(ptr::read(self.as_ptr().add(self.len()))) . } . } . } . . /// Moves all the elements of `other` into `Self`, leaving `other` empty. . /// . /// # Panics . /// -- line 1769 ---------------------------------------- -- line 1776 ---------------------------------------- . /// let mut vec2 = vec![4, 5, 6]; . /// vec.append(&mut vec2); . /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(vec2, []); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "append", since = "1.4.0")] 203 ( 0.00%) pub fn append(&mut self, other: &mut Self) { . unsafe { . self.append_elements(other.as_slice() as _); . other.set_len(0); . } 174 ( 0.00%) } . . /// Appends elements to `Self` from other buffer. . #[cfg(not(no_global_oom_handling))] . #[inline] . unsafe fn append_elements(&mut self, other: *const [T]) { . let count = unsafe { (*other).len() }; . self.reserve(count); 12,205 ( 0.00%) let len = self.len(); . unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; 107,154 ( 0.00%) self.len += count; . } . . /// Creates a draining iterator that removes the specified range in the vector . /// and yields the removed items. . /// . /// When the iterator **is** dropped, all elements in the range are removed . /// from the vector, even if the iterator was not fully consumed. If the . /// iterator **is not** dropped (with [`mem::forget`] for example), it is -- line 1807 ---------------------------------------- -- line 1834 ---------------------------------------- . // When the Drain is first created, it shortens the length of . // the source vector to make sure no uninitialized or moved-from elements . // are accessible at all if the Drain's destructor never gets to run. . // . // Drain will ptr::read out the values to remove. . // When finished, remaining tail of the vec is copied back to cover . // the hole, and the vector length is restored to the new length. . // 5,058 ( 0.00%) let len = self.len(); . let Range { start, end } = slice::range(range, ..len); . . unsafe { . // set self.vec length's to start, to be safe in case Drain is leaked . self.set_len(start); . // Use the borrow in the IterMut to indicate borrowing behavior of the . // whole Drain iterator (like &mut T). 675 ( 0.00%) let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); 6,267 ( 0.00%) Drain { . tail_start: end, 496 ( 0.00%) tail_len: len - end, . iter: range_slice.iter(), . vec: NonNull::from(self), . } . } . } . . /// Clears the vector, removing all values. . /// -- line 1861 ---------------------------------------- -- line 1869 ---------------------------------------- . /// . /// v.clear(); . /// . /// assert!(v.is_empty()); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn clear(&mut self) { 154,410 ( 0.01%) self.truncate(0) . } . . /// Returns the number of elements in the vector, also referred to . /// as its 'length'. . /// . /// # Examples . /// . /// ``` -- line 1885 ---------------------------------------- -- line 1900 ---------------------------------------- . /// let mut v = Vec::new(); . /// assert!(v.is_empty()); . /// . /// v.push(1); . /// assert!(!v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 108,248 ( 0.00%) self.len() == 0 . } . . /// Splits the collection into two at the given index. . /// . /// Returns a newly allocated vector containing the elements in the range . /// `[at, len)`. After the call, the original vector will be left containing . /// the elements `[0, at)` with its previous capacity unchanged. . /// -- line 1916 ---------------------------------------- -- line 1935 ---------------------------------------- . A: Clone, . { . #[cold] . #[inline(never)] . fn assert_failed(at: usize, len: usize) -> ! { . panic!("`at` split index (is {}) should be <= len (is {})", at, len); . } . 347 ( 0.00%) if at > self.len() { . assert_failed(at, self.len()); . } . 259 ( 0.00%) if at == 0 { . // the new vector can take over the original buffer and avoid the copy . return mem::replace( . self, . Vec::with_capacity_in(self.capacity(), self.allocator().clone()), . ); . } . . let other_len = self.len - at; -- line 1955 ---------------------------------------- -- line 1988 ---------------------------------------- . /// . /// let mut vec = vec![]; . /// let mut p = 1; . /// vec.resize_with(4, || { p *= 2; p }); . /// assert_eq!(vec, [2, 4, 8, 16]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize_with", since = "1.33.0")] 6,924 ( 0.00%) pub fn resize_with(&mut self, new_len: usize, f: F) . where . F: FnMut() -> T, . { 928 ( 0.00%) let len = self.len(); 1,856 ( 0.00%) if new_len > len { 6,407 ( 0.00%) self.extend_with(new_len - len, ExtendFunc(f)); . } else { . self.truncate(new_len); . } 5,996 ( 0.00%) } . . /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, . /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime . /// `'a`. If the type has only static references, or none at all, then this . /// may be chosen to be `'static`. . /// . /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, . /// so the leaked allocation may include unused capacity that is not part -- line 2014 ---------------------------------------- -- line 2070 ---------------------------------------- . #[stable(feature = "vec_spare_capacity", since = "1.60.0")] . #[inline] . pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { . // Note: . // This method is not implemented in terms of `split_at_spare_mut`, . // to prevent invalidation of pointers to the buffer. . unsafe { . slice::from_raw_parts_mut( 2 ( 0.00%) self.as_mut_ptr().add(self.len) as *mut MaybeUninit, 27 ( 0.00%) self.buf.capacity() - self.len, . ) . } . } . . /// Returns vector content as a slice of `T`, along with the remaining spare . /// capacity of the vector as a slice of `MaybeUninit`. . /// . /// The returned spare capacity slice can be used to fill the vector with data -- line 2087 ---------------------------------------- -- line 2189 ---------------------------------------- . /// assert_eq!(vec, ["hello", "world", "world"]); . /// . /// let mut vec = vec![1, 2, 3, 4]; . /// vec.resize(2, 0); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize", since = "1.5.0")] 6,380 ( 0.00%) pub fn resize(&mut self, new_len: usize, value: T) { 638 ( 0.00%) let len = self.len(); . 1,914 ( 0.00%) if new_len > len { 1,941 ( 0.00%) self.extend_with(new_len - len, ExtendElement(value)) . } else { . self.truncate(new_len); . } 1,638 ( 0.00%) } . . /// Clones and appends all elements in a slice to the `Vec`. . /// . /// Iterates over the slice `other`, clones each element, and then appends . /// it to this `Vec`. The `other` slice is traversed in-order. . /// . /// Note that this function is same as [`extend`] except that it is . /// specialized to work with slices instead. If and when Rust gets -- line 2213 ---------------------------------------- -- line 2271 ---------------------------------------- . trait ExtendWith { . fn next(&mut self) -> T; . fn last(self) -> T; . } . . struct ExtendElement(T); . impl ExtendWith for ExtendElement { . fn next(&mut self) -> T { 2,490 ( 0.00%) self.0.clone() . } . fn last(self) -> T { . self.0 . } . } . . struct ExtendFunc(F); . impl T> ExtendWith for ExtendFunc { -- line 2287 ---------------------------------------- -- line 2291 ---------------------------------------- . fn last(mut self) -> T { . (self.0)() . } . } . . impl Vec { . #[cfg(not(no_global_oom_handling))] . /// Extend the vector by `n` values, using the given generator. 66,696 ( 0.00%) fn extend_with>(&mut self, n: usize, mut value: E) { . self.reserve(n); . . unsafe { 1,826 ( 0.00%) let mut ptr = self.as_mut_ptr().add(self.len()); . // Use SetLenOnDrop to work around bug where compiler . // might not realize the store through `ptr` through self.set_len() . // don't alias. . let mut local_len = SetLenOnDrop::new(&mut self.len); . . // Write all elements except the last one . for _ in 1..n { . ptr::write(ptr, value.next()); . ptr = ptr.offset(1); . // Increment the length in every step in case next() panics . local_len.increment_len(1); . } . 72,219 ( 0.00%) if n > 0 { . // We can write the last element directly without cloning needlessly . ptr::write(ptr, value.last()); . local_len.increment_len(1); . } . . // len set by scope guard . } 52,771 ( 0.00%) } . } . . impl Vec { . /// Removes consecutive repeated elements in the vector according to the . /// [`PartialEq`] trait implementation. . /// . /// If the vector is sorted, this removes all duplicates. . /// -- line 2333 ---------------------------------------- -- line 2338 ---------------------------------------- . /// . /// vec.dedup(); . /// . /// assert_eq!(vec, [1, 2, 3, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn dedup(&mut self) { 57 ( 0.00%) self.dedup_by(|a, b| a == b) . } . } . . //////////////////////////////////////////////////////////////////////////////// . // Internal methods and functions . //////////////////////////////////////////////////////////////////////////////// . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 15,001 ( 0.00%) pub fn from_elem(elem: T, n: usize) -> Vec { 88,229 ( 0.00%) ::from_elem(elem, n, Global) 16,312 ( 0.00%) } . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { . ::from_elem(elem, n, alloc) . } . -- line 2367 ---------------------------------------- -- line 2424 ---------------------------------------- . // Common trait implementations for Vec . //////////////////////////////////////////////////////////////////////////////// . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Deref for Vec { . type Target = [T]; . . fn deref(&self) -> &[T] { 3,319,093 ( 0.11%) unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } 33 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::DerefMut for Vec { . fn deref_mut(&mut self) -> &mut [T] { 1,072,509 ( 0.04%) unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } . } . } . . #[cfg(not(no_global_oom_handling))] . trait SpecCloneFrom { . fn clone_from(this: &mut Self, other: &Self); . } . -- line 2447 ---------------------------------------- -- line 2468 ---------------------------------------- . this.extend_from_slice(other); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for Vec { . #[cfg(not(test))] 74,905 ( 0.00%) fn clone(&self) -> Self { . let alloc = self.allocator().clone(); 2 ( 0.00%) <[T]>::to_vec_in(&**self, alloc) 94,160 ( 0.00%) } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Instead use the . // `slice::to_vec` function which is only available with cfg(test) . // NB see the slice::hack module in slice.rs for more information . #[cfg(test)] . fn clone(&self) -> Self { . let alloc = self.allocator().clone(); -- line 2487 ---------------------------------------- -- line 2518 ---------------------------------------- . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> Index for Vec { . type Output = I::Output; . . #[inline] . fn index(&self, index: I) -> &Self::Output { 48,744 ( 0.00%) Index::index(&**self, index) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_on_unimplemented( . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> IndexMut for Vec { . #[inline] . fn index_mut(&mut self, index: I) -> &mut Self::Output { 308 ( 0.00%) IndexMut::index_mut(&mut **self, index) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for Vec { . #[inline] . fn from_iter>(iter: I) -> Vec { 378,159 ( 0.01%) >::from_iter(iter.into_iter()) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl IntoIterator for Vec { . type Item = T; . type IntoIter = IntoIter; . -- line 2555 ---------------------------------------- -- line 2564 ---------------------------------------- . /// for s in v.into_iter() { . /// // s has type String, not &String . /// println!("{}", s); . /// } . /// ``` . #[inline] . fn into_iter(self) -> IntoIter { . unsafe { 161,233 ( 0.01%) let mut me = ManuallyDrop::new(self); . let alloc = ptr::read(me.allocator()); . let begin = me.as_mut_ptr(); . let end = if mem::size_of::() == 0 { . arith_offset(begin as *const i8, me.len() as isize) as *const T . } else { . begin.add(me.len()) as *const T . }; . let cap = me.buf.capacity(); 59,637 ( 0.00%) IntoIter { . buf: NonNull::new_unchecked(begin), . phantom: PhantomData, . cap, . alloc, . ptr: begin, . end, . } . } -- line 2589 ---------------------------------------- -- line 2591 ---------------------------------------- . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a Vec { . type Item = &'a T; . type IntoIter = slice::Iter<'a, T>; . . fn into_iter(self) -> slice::Iter<'a, T> { 19 ( 0.00%) self.iter() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { . type Item = &'a mut T; . type IntoIter = slice::IterMut<'a, T>; . . fn into_iter(self) -> slice::IterMut<'a, T> { 1,062 ( 0.00%) self.iter_mut() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for Vec { . #[inline] 952 ( 0.00%) fn extend>(&mut self, iter: I) { 155,403 ( 0.01%) >::spec_extend(self, iter.into_iter()) 952 ( 0.00%) } . . #[inline] . fn extend_one(&mut self, item: T) { . self.push(item); . } . . #[inline] . fn extend_reserve(&mut self, additional: usize) { -- line 2627 ---------------------------------------- -- line 2636 ---------------------------------------- . fn extend_desugared>(&mut self, mut iterator: I) { . // This is the case for a general iterator. . // . // This function should be the moral equivalent of: . // . // for item in iterator { . // self.push(item); . // } 38,059 ( 0.00%) while let Some(element) = iterator.next() { 6,559 ( 0.00%) let len = self.len(); 98,597 ( 0.00%) if len == self.capacity() { 776 ( 0.00%) let (lower, _) = iterator.size_hint(); . self.reserve(lower.saturating_add(1)); . } . unsafe { . ptr::write(self.as_mut_ptr().add(len), element); . // Since next() executes user code which can panic we have to bump the length . // after each step. . // NB can't overflow since we would have had to alloc the address space 74,767 ( 0.00%) self.set_len(len + 1); . } . } 1,182 ( 0.00%) } . . /// Creates a splicing iterator that replaces the specified range in the vector . /// with the given `replace_with` iterator and yields the removed items. . /// `replace_with` does not need to be the same length as `range`. . /// . /// `range` is removed even if the iterator is not consumed until the end. . /// . /// It is unspecified how many elements are removed from the vector -- line 2666 ---------------------------------------- -- line 2693 ---------------------------------------- . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "vec_splice", since = "1.21.0")] . pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> . where . R: RangeBounds, . I: IntoIterator, . { 3,777 ( 0.00%) Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } . } . . /// Creates an iterator which uses a closure to determine if an element should be removed. . /// . /// If the closure returns true, then the element is removed and yielded. . /// If the closure returns false, the element will remain in the vector and will not be yielded . /// by the iterator. . /// -- line 2709 ---------------------------------------- -- line 2745 ---------------------------------------- . /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); . /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); . /// ``` . #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] . pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> . where . F: FnMut(&mut T) -> bool, . { 867 ( 0.00%) let old_len = self.len(); . . // Guard against us getting leaked (leak amplification) . unsafe { . self.set_len(0); . } . 1,360 ( 0.00%) DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } . } . } . . /// Extend implementation that copies elements out of references before pushing them onto the Vec. . /// . /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to . /// append the entire slice at once. . /// -- line 2768 ---------------------------------------- -- line 2803 ---------------------------------------- . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(&**self, &**other) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { 246,981 ( 0.01%) fn drop(&mut self) { . unsafe { . // use drop for [T] . // use a raw slice to refer to the elements of the vector as weakest necessary type; . // could avoid questions of validity in certain cases 104,978 ( 0.00%) ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) . } . // RawVec handles deallocation 290,568 ( 0.01%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] . impl const Default for Vec { . /// Creates an empty `Vec`. . fn default() -> Vec { . Vec::new() -- line 2827 ---------------------------------------- -- line 2976 ---------------------------------------- . /// newly-allocated buffer with exactly the right capacity. . /// . /// # Examples . /// . /// ``` . /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); . /// ``` . fn from(v: Vec) -> Self { 4 ( 0.00%) v.into_boxed_slice() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl From<&str> for Vec { . /// Allocate a `Vec` and fill it with a UTF-8 string. . /// -- line 2992 ---------------------------------------- 678,250 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/select/mod.rs -------------------------------------------------------------------------------- Ir -- line 208 ---------------------------------------- . /// There is no built-in impl. There may be some other . /// candidate (a where-clause or user-defined impl). . None, . /// It is unknown whether there is an impl. . Ambiguous, . } . . impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { 33,804 ( 0.00%) pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { 196,655 ( 0.01%) SelectionContext { . infcx, 29,405 ( 0.00%) freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode: TraitQueryMode::Standard, . } 28,170 ( 0.00%) } . . pub fn intercrate(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { 16 ( 0.00%) SelectionContext { . infcx, 2 ( 0.00%) freshener: infcx.freshener_keep_static(), . intercrate: true, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode: TraitQueryMode::Standard, . } . } . . pub fn with_negative( -- line 238 ---------------------------------------- -- line 245 ---------------------------------------- . freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls, . query_mode: TraitQueryMode::Standard, . } . } . 3,992 ( 0.00%) pub fn with_query_mode( . infcx: &'cx InferCtxt<'cx, 'tcx>, . query_mode: TraitQueryMode, . ) -> SelectionContext<'cx, 'tcx> { . debug!(?query_mode, "with_query_mode"); 7,984 ( 0.00%) SelectionContext { . infcx, 499 ( 0.00%) freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode, . } 2,994 ( 0.00%) } . . /// Enables tracking of intercrate ambiguity causes. These are . /// used in coherence to give improved diagnostics. We don't do . /// this until we detect a coherence error because it can lead to . /// false overflow results (#47139) and because it costs . /// computation time. . pub fn enable_tracking_intercrate_ambiguity_causes(&mut self) { . assert!(self.intercrate); -- line 274 ---------------------------------------- -- line 281 ---------------------------------------- . /// was enabled and disables tracking at the same time. If . /// tracking is not enabled, just returns an empty vector. . pub fn take_intercrate_ambiguity_causes(&mut self) -> Vec { . assert!(self.intercrate); . self.intercrate_ambiguity_causes.take().unwrap_or_default() . } . . pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'tcx> { 3,047,637 ( 0.10%) self.infcx 4,553 ( 0.00%) } . . pub fn tcx(&self) -> TyCtxt<'tcx> { 82,830 ( 0.00%) self.infcx.tcx . } . . pub fn is_intercrate(&self) -> bool { 2,035 ( 0.00%) self.intercrate . } . . /////////////////////////////////////////////////////////////////////////// . // Selection . // . // The selection phase tries to identify *how* an obligation will . // be resolved. For example, it will identify which impl or . // parameter bound is to be used. The process can be inconclusive -- line 305 ---------------------------------------- -- line 309 ---------------------------------------- . // 1. If no applicable impl or parameter bound can be found. . // 2. If the output type parameters in the obligation do not match . // those specified by the impl/bound. For example, if the obligation . // is `Vec: Iterable`, but the impl specifies . // `impl Iterable for Vec`, than an error would result. . . /// Attempts to satisfy the obligation. If successful, this will affect the surrounding . /// type environment by performing unification. 25,984 ( 0.00%) #[instrument(level = "debug", skip(self))] 33,408 ( 0.00%) pub fn select( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, Selection<'tcx>> { 29,196 ( 0.00%) let candidate = match self.select_from_obligation(obligation) { . Err(SelectionError::Overflow) => { . // In standard mode, overflow must have been caught and reported . // earlier. . assert!(self.query_mode == TraitQueryMode::Canonical); . return Err(SelectionError::Overflow); . } . Err(SelectionError::Ambiguous(_)) => { . return Ok(None); . } . Err(e) => { 14,560 ( 0.00%) return Err(e); . } . Ok(None) => { 1,398 ( 0.00%) return Ok(None); . } 9,840 ( 0.00%) Ok(Some(candidate)) => candidate, . }; . 16,446 ( 0.00%) match self.confirm_candidate(obligation, candidate) { . Err(SelectionError::Overflow) => { . assert!(self.query_mode == TraitQueryMode::Canonical); . Err(SelectionError::Overflow) . } 299 ( 0.00%) Err(e) => Err(e), . Ok(candidate) => { . debug!(?candidate, "confirmed"); 19,404 ( 0.00%) Ok(Some(candidate)) . } . } . } . 37,120 ( 0.00%) crate fn select_from_obligation( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { . debug_assert!(!obligation.predicate.has_escaping_bound_vars()); . . let pec = &ProvisionalEvaluationCache::default(); . let stack = self.push_stack(TraitObligationStackList::empty(pec), obligation); . 7,424 ( 0.00%) self.candidate_from_obligation(&stack) 33,408 ( 0.00%) } . . /////////////////////////////////////////////////////////////////////////// . // EVALUATION . // . // Tests whether an obligation can be selected or whether an impl . // can be applied to particular types. It skips the "confirmation" . // step and hence completely ignores output type parameters. . // -- line 372 ---------------------------------------- -- line 384 ---------------------------------------- . self.evaluate_root_obligation(obligation) . .expect("Overflow should be caught earlier in standard query mode") . .may_apply() . } . . /// Evaluates whether the obligation `obligation` can be satisfied . /// and returns an `EvaluationResult`. This is meant for the . /// *initial* call. 998 ( 0.00%) pub fn evaluate_root_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . ) -> Result { . self.evaluation_probe(|this| { 1,497 ( 0.00%) this.evaluate_predicate_recursively( . TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()), . obligation.clone(), . ) . }) 998 ( 0.00%) } . . fn evaluation_probe( . &mut self, . op: impl FnOnce(&mut Self) -> Result, . ) -> Result { 8,869 ( 0.00%) self.infcx.probe(|snapshot| -> Result { 998 ( 0.00%) let result = op(self)?; . 8,540 ( 0.00%) match self.infcx.leak_check(true, snapshot) { . Ok(()) => {} . Err(_) => return Ok(EvaluatedToErr), . } . 6,100 ( 0.00%) match self.infcx.region_constraints_added_in_snapshot(snapshot) { . None => Ok(result), . Some(_) => Ok(result.max(EvaluatedToOkModuloRegions)), . } . }) . } . . /// Evaluates the predicates in `predicates` recursively. Note that . /// this applies projections in the predicates, and therefore . /// is run within an inference probe. 13,440 ( 0.00%) #[instrument(skip(self, stack), level = "debug")] . fn evaluate_predicates_recursively<'o, I>( . &mut self, . stack: TraitObligationStackList<'o, 'tcx>, . predicates: I, . ) -> Result . where . I: IntoIterator> + std::fmt::Debug, . { . let mut result = EvaluatedToOk; 8,391 ( 0.00%) for obligation in predicates { 2,244 ( 0.00%) let eval = self.evaluate_predicate_recursively(stack, obligation.clone())?; 1,122 ( 0.00%) if let EvaluatedToErr = eval { . // fast-path - EvaluatedToErr is the top of the lattice, . // so we don't need to look on the other predicates. . return Ok(EvaluatedToErr); . } else { . result = cmp::max(result, eval); . } . } . Ok(result) . } . 10,600 ( 0.00%) #[instrument( . level = "debug", . skip(self, previous_stack), . fields(previous_stack = ?previous_stack.head()) 9,540 ( 0.00%) )] . fn evaluate_predicate_recursively<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . obligation: PredicateObligation<'tcx>, . ) -> Result { . // `previous_stack` stores a `TraitObligation`, while `obligation` is . // a `PredicateObligation`. These are distinct types, so we can't . // use any `Option` combinator method that would force them to be . // the same. 2,120 ( 0.00%) match previous_stack.head() { 1,116 ( 0.00%) Some(h) => self.check_recursion_limit(&obligation, h.obligation)?, 502 ( 0.00%) None => self.check_recursion_limit(&obligation, &obligation)?, . } . 6,360 ( 0.00%) let result = ensure_sufficient_stack(|| { 1,060 ( 0.00%) let bound_predicate = obligation.predicate.kind(); 10,600 ( 0.00%) match bound_predicate.skip_binder() { . ty::PredicateKind::Trait(t) => { . let t = bound_predicate.rebind(t); . debug_assert!(!t.has_escaping_bound_vars()); . let obligation = obligation.with(t); 15,450 ( 0.00%) self.evaluate_trait_predicate_recursively(previous_stack, obligation) . } . . ty::PredicateKind::Subtype(p) => { . let p = bound_predicate.rebind(p); . // Does this code ever run? 15 ( 0.00%) match self.infcx.subtype_predicate(&obligation.cause, obligation.param_env, p) { . Some(Ok(InferOk { mut obligations, .. })) => { . self.add_depth(obligations.iter_mut(), obligation.recursion_depth); . self.evaluate_predicates_recursively( . previous_stack, . obligations.into_iter(), . ) . } . Some(Err(_)) => Ok(EvaluatedToErr), -- line 489 ---------------------------------------- -- line 522 ---------------------------------------- . None => Ok(EvaluatedToAmbig), . }, . . ty::PredicateKind::TypeOutlives(pred) => { . // A global type with no late-bound regions can only . // contain the "'static" lifetime (any other lifetime . // would either be late-bound or local), so it is guaranteed . // to outlive any other lifetime 18 ( 0.00%) if pred.0.is_global() && !pred.0.has_late_bound_regions() { . Ok(EvaluatedToOk) . } else { . Ok(EvaluatedToOkModuloRegions) . } . } . . ty::PredicateKind::RegionOutlives(..) => { . // We do not consider region relationships when evaluating trait matches. -- line 538 ---------------------------------------- -- line 545 ---------------------------------------- . } else { . Ok(EvaluatedToErr) . } . } . . ty::PredicateKind::Projection(data) => { . let data = bound_predicate.rebind(data); . let project_obligation = obligation.with(data); 84 ( 0.00%) match project::poly_project_and_unify_type(self, &project_obligation) { 70 ( 0.00%) Ok(Ok(Some(mut subobligations))) => { . 'compute_res: { . // If we've previously marked this projection as 'complete', thne . // use the final cached result (either `EvaluatedToOk` or . // `EvaluatedToOkModuloRegions`), and skip re-evaluating the . // sub-obligations. 56 ( 0.00%) if let Some(key) = 126 ( 0.00%) ProjectionCacheKey::from_poly_projection_predicate(self, data) . { 112 ( 0.00%) if let Some(cached_res) = self . .infcx . .inner . .borrow_mut() . .projection_cache() . .is_complete(key) . { . break 'compute_res Ok(cached_res); . } . } . 28 ( 0.00%) self.add_depth( . subobligations.iter_mut(), 28 ( 0.00%) obligation.recursion_depth, . ); 28 ( 0.00%) let res = self.evaluate_predicates_recursively( 42 ( 0.00%) previous_stack, 70 ( 0.00%) subobligations, . ); 56 ( 0.00%) if let Ok(res) = res { 28 ( 0.00%) if res == EvaluatedToOk || res == EvaluatedToOkModuloRegions { 28 ( 0.00%) if let Some(key) = 7 ( 0.00%) ProjectionCacheKey::from_poly_projection_predicate( 49 ( 0.00%) self, data, . ) . { . // If the result is something that we can cache, then mark this . // entry as 'complete'. This will allow us to skip evaluating the . // suboligations at all the next time we evaluate the projection . // predicate. 49 ( 0.00%) self.infcx . .inner . .borrow_mut() . .projection_cache() . .complete(key, res); . } . } . } . res -- line 601 ---------------------------------------- -- line 603 ---------------------------------------- . } . Ok(Ok(None)) => Ok(EvaluatedToAmbig), . Ok(Err(project::InProgress)) => Ok(EvaluatedToRecur), . Err(_) => Ok(EvaluatedToErr), . } . } . . ty::PredicateKind::ClosureKind(_, closure_substs, kind) => { 40 ( 0.00%) match self.infcx.closure_kind(closure_substs) { . Some(closure_kind) => { 24 ( 0.00%) if closure_kind.extends(kind) { . Ok(EvaluatedToOk) . } else { . Ok(EvaluatedToErr) . } . } . None => Ok(EvaluatedToAmbig), . } . } -- line 621 ---------------------------------------- -- line 693 ---------------------------------------- . } . } . } . } . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for chalk") . } . } 1,060 ( 0.00%) }); . . debug!("finished: {:?} from {:?}", result, obligation); . . result . } . 21,630 ( 0.00%) #[instrument(skip(self, previous_stack), level = "debug")] . fn evaluate_trait_predicate_recursively<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . mut obligation: TraitObligation<'tcx>, . ) -> Result { 2,060 ( 0.00%) if !self.intercrate . && obligation.is_global() . && obligation.param_env.caller_bounds().iter().all(|bound| bound.needs_subst()) . { . // If a param env has no global bounds, global obligations do not . // depend on its particular value in order to work, so we can clear . // out the param env and get better caching. . debug!("in global"); 1,744 ( 0.00%) obligation.param_env = obligation.param_env.without_caller_bounds(); . } . . let stack = self.push_stack(previous_stack, &obligation); . let mut fresh_trait_pred = stack.fresh_trait_pred; 2,060 ( 0.00%) let mut param_env = obligation.param_env; . . fresh_trait_pred = fresh_trait_pred.map_bound(|mut pred| { 1,030 ( 0.00%) pred.remap_constness(self.tcx(), &mut param_env); 12,360 ( 0.00%) pred . }); . . debug!(?fresh_trait_pred); . 1,536 ( 0.00%) if let Some(result) = self.check_evaluation_cache(param_env, fresh_trait_pred) { . debug!(?result, "CACHE HIT"); . return Ok(result); . } . . if let Some(result) = stack.cache().get_provisional(fresh_trait_pred) { . debug!(?result, "PROVISIONAL CACHE HIT"); . stack.update_reached_depth(result.reached_depth); . return Ok(result.result); -- line 744 ---------------------------------------- -- line 748 ---------------------------------------- . // stack. If so, we don't want to insert the result into the . // main cache (it is cycle dependent) nor the provisional . // cache (which is meant for things that have completed but . // for a "backedge" -- this result *is* the backedge). . if let Some(cycle_result) = self.check_evaluation_cycle(&stack) { . return Ok(cycle_result); . } . 5,796 ( 0.00%) let (result, dep_node) = self.in_task(|this| this.evaluate_stack(&stack)); 644 ( 0.00%) let result = result?; . 3,220 ( 0.00%) if !result.must_apply_modulo_regions() { . stack.cache().on_failure(stack.dfn); . } . 644 ( 0.00%) let reached_depth = stack.reached_depth.get(); 1,288 ( 0.00%) if reached_depth >= stack.depth { . debug!(?result, "CACHE MISS"); 12,236 ( 0.00%) self.insert_evaluation_cache(param_env, fresh_trait_pred, dep_node, result); . . stack.cache().on_completion( 1,288 ( 0.00%) stack.dfn, . |fresh_trait_pred, provisional_result, provisional_dep_node| { . // Create a new `DepNode` that has dependencies on: . // * The `DepNode` for the original evaluation that resulted in a provisional cache . // entry being crated . // * The `DepNode` for the *current* evaluation, which resulted in us completing . // provisional caches entries and inserting them into the evaluation cache . // . // This ensures that when a query reads this entry from the evaluation cache, -- line 777 ---------------------------------------- -- line 835 ---------------------------------------- . /// `self.freshener`, we can be sure that (a) this will not . /// affect the inferencer state and (b) that if we see two . /// fresh regions with the same index, they refer to the same . /// unbound type variable. . fn check_evaluation_cycle( . &mut self, . stack: &TraitObligationStack<'_, 'tcx>, . ) -> Option { 244 ( 0.00%) if let Some(cycle_depth) = stack . .iter() . .skip(1) // Skip top-most frame. . .find(|prev| { 886 ( 0.00%) stack.obligation.param_env == prev.obligation.param_env . && stack.fresh_trait_pred == prev.fresh_trait_pred . }) . .map(|stack| stack.depth) . { . debug!("evaluate_stack --> recursive at depth {}", cycle_depth); . . // If we have a stack like `A B C D E A`, where the top of . // the stack is the final `A`, then this will iterate over . // `A, E, D, C, B` -- i.e., all the participants apart . // from the cycle head. We mark them as participating in a . // cycle. This suppresses caching for those nodes. See . // `in_cycle` field for more details. 244 ( 0.00%) stack.update_reached_depth(cycle_depth); . . // Subtle: when checking for a coinductive cycle, we do . // not compare using the "freshened trait refs" (which . // have erased regions) but rather the fully explicit . // trait refs. This is important because it's only a cycle . // if the regions match exactly. 199 ( 0.00%) let cycle = stack.iter().skip(1).take_while(|s| s.depth >= cycle_depth); 122 ( 0.00%) let tcx = self.tcx(); 854 ( 0.00%) let cycle = cycle.map(|stack| stack.obligation.predicate.to_predicate(tcx)); 1,342 ( 0.00%) if self.coinductive_match(cycle) { . debug!("evaluate_stack --> recursive, coinductive"); . Some(EvaluatedToOk) . } else { . debug!("evaluate_stack --> recursive, inductive"); . Some(EvaluatedToRecur) . } . } else { . None . } . } . 5,796 ( 0.00%) fn evaluate_stack<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . ) -> Result { . // In intercrate mode, whenever any of the generics are unbound, . // there can always be an impl. Even if there are no impls in . // this crate, perhaps the type would be unified with . // something from another crate that does provide an impl. . // -- line 890 ---------------------------------------- -- line 903 ---------------------------------------- . // imagine, this is just where we started. To avoid that, we . // check for unbound variables and return an ambiguous (hence possible) . // match if we've seen this trait before. . // . // This suffices to allow chains like `FnMut` implemented in . // terms of `Fn` etc, but we could probably make this more . // precise still. . let unbound_input_types = 644 ( 0.00%) stack.fresh_trait_pred.skip_binder().trait_ref.substs.types().any(|ty| ty.is_fresh()); . 1,932 ( 0.00%) if stack.obligation.polarity() != ty::ImplPolarity::Negative { . // This check was an imperfect workaround for a bug in the old . // intercrate mode; it should be removed when that goes away. 3,220 ( 0.00%) if unbound_input_types && self.intercrate { . debug!("evaluate_stack --> unbound argument, intercrate --> ambiguous",); . // Heuristics: show the diagnostics when there are no candidates in crate. . if self.intercrate_ambiguity_causes.is_some() { . debug!("evaluate_stack: intercrate_ambiguity_causes is some"); . if let Ok(candidate_set) = self.assemble_candidates(stack) { . if !candidate_set.ambiguous && candidate_set.vec.is_empty() { . let trait_ref = stack.obligation.predicate.skip_binder().trait_ref; . let self_ty = trait_ref.self_ty(); -- line 924 ---------------------------------------- -- line 937 ---------------------------------------- . self.intercrate_ambiguity_causes.as_mut().unwrap().push(cause); . } . } . } . return Ok(EvaluatedToAmbig); . } . } . 1,288 ( 0.00%) if unbound_input_types . && stack.iter().skip(1).any(|prev| { 27 ( 0.00%) stack.obligation.param_env == prev.obligation.param_env . && self.match_fresh_trait_refs( 36 ( 0.00%) stack.fresh_trait_pred, 36 ( 0.00%) prev.fresh_trait_pred, . prev.obligation.param_env, . ) . }) . { . debug!("evaluate_stack --> unbound argument, recursive --> giving up",); . return Ok(EvaluatedToUnknown); . } . 6,216 ( 0.00%) match self.candidate_from_obligation(stack) { 3,030 ( 0.00%) Ok(Some(c)) => self.evaluate_candidate(stack, &c), . Err(SelectionError::Ambiguous(_)) => Ok(EvaluatedToAmbig), . Ok(None) => Ok(EvaluatedToAmbig), . Err(Overflow) => Err(OverflowError::Canonical), . Err(ErrorReporting) => Err(OverflowError::ErrorReporting), . Err(..) => Ok(EvaluatedToErr), . } 6,440 ( 0.00%) } . . /// For defaulted traits, we use a co-inductive strategy to solve, so . /// that recursion is ok. This routine returns `true` if the top of the . /// stack (`cycle[0]`): . /// . /// - is a defaulted trait, . /// - it also appears in the backtrace at some position `X`, . /// - all the predicates at positions `X..` between `X` and the top are -- line 975 ---------------------------------------- -- line 977 ---------------------------------------- . pub fn coinductive_match(&mut self, mut cycle: I) -> bool . where . I: Iterator>, . { . cycle.all(|predicate| self.coinductive_predicate(predicate)) . } . . fn coinductive_predicate(&self, predicate: ty::Predicate<'tcx>) -> bool { 244 ( 0.00%) let result = match predicate.kind().skip_binder() { 1,098 ( 0.00%) ty::PredicateKind::Trait(ref data) => self.tcx().trait_is_auto(data.def_id()), . _ => false, . }; . debug!(?predicate, ?result, "coinductive_predicate"); . result . } . . /// Further evaluates `candidate` to decide whether all type parameters match and whether nested . /// obligations are met. Returns whether `candidate` remains viable after this further -- line 994 ---------------------------------------- -- line 998 ---------------------------------------- . skip(self, stack), . fields(depth = stack.obligation.recursion_depth) . )] . fn evaluate_candidate<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . candidate: &SelectionCandidate<'tcx>, . ) -> Result { 1,188 ( 0.00%) let mut result = self.evaluation_probe(|this| { . let candidate = (*candidate).clone(); 9,528 ( 0.00%) match this.confirm_candidate(stack.obligation, candidate) { . Ok(selection) => { . debug!(?selection); 2,376 ( 0.00%) this.evaluate_predicates_recursively( . stack.list(), 7,722 ( 0.00%) selection.nested_obligations().into_iter(), . ) . } . Err(..) => Ok(EvaluatedToErr), . } . })?; . . // If we erased any lifetimes, then we want to use . // `EvaluatedToOkModuloRegions` instead of `EvaluatedToOk` -- line 1021 ---------------------------------------- -- line 1037 ---------------------------------------- . &self, . param_env: ty::ParamEnv<'tcx>, . trait_pred: ty::PolyTraitPredicate<'tcx>, . ) -> Option { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 2,060 ( 0.00%) if self.intercrate { . return None; . } . . let tcx = self.tcx(); . if self.can_use_global_caches(param_env) { 6,965 ( 0.00%) if let Some(res) = tcx.evaluation_cache.get(¶m_env.and(trait_pred), tcx) { . return Some(res); . } . } 3,072 ( 0.00%) self.infcx.evaluation_cache.get(¶m_env.and(trait_pred), tcx) . } . 7,728 ( 0.00%) fn insert_evaluation_cache( . &mut self, . param_env: ty::ParamEnv<'tcx>, . trait_pred: ty::PolyTraitPredicate<'tcx>, . dep_node: DepNodeIndex, . result: EvaluationResult, . ) { . // Avoid caching results that depend on more than just the trait-ref . // - the stack can create recursion. 2,576 ( 0.00%) if result.is_stack_dependent() { . return; . } . . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 1,288 ( 0.00%) if self.intercrate { . return; . } . . if self.can_use_global_caches(param_env) { . if !trait_pred.needs_infer() { . debug!(?trait_pred, ?result, "insert_evaluation_cache global"); . // This may overwrite the cache with the same value . // FIXME: Due to #50507 this overwrites the different values . // This should be changed to use HashMapExt::insert_same . // when that is fixed 611 ( 0.00%) self.tcx().evaluation_cache.insert(param_env.and(trait_pred), dep_node, result); . return; . } . } . . debug!(?trait_pred, ?result, "insert_evaluation_cache"); 132 ( 0.00%) self.infcx.evaluation_cache.insert(param_env.and(trait_pred), dep_node, result); 5,152 ( 0.00%) } . . /// For various reasons, it's possible for a subobligation . /// to have a *lower* recursion_depth than the obligation used to create it. . /// Projection sub-obligations may be returned from the projection cache, . /// which results in obligations with an 'old' `recursion_depth`. . /// Additionally, methods like `InferCtxt.subtype_predicate` produce . /// subobligations without taking in a 'parent' depth, causing the . /// generated subobligations to have a `recursion_depth` of `0`. . /// . /// To ensure that obligation_depth never decreases, we force all subobligations . /// to have at least the depth of the original obligation. 14 ( 0.00%) fn add_depth>>( . &self, . it: I, . min_depth: usize, . ) { 48 ( 0.00%) it.for_each(|o| o.recursion_depth = cmp::max(min_depth, o.recursion_depth) + 1); 28 ( 0.00%) } . . fn check_recursion_depth>( . &self, . depth: usize, . error_obligation: &Obligation<'tcx, T>, . ) -> Result<(), OverflowError> { 21,664 ( 0.00%) if !self.infcx.tcx.recursion_limit().value_within_limit(depth) { . match self.query_mode { . TraitQueryMode::Standard => { . if self.infcx.is_tainted_by_errors() { . return Err(OverflowError::ErrorReporting); . } . self.infcx.report_overflow_error(error_obligation, true); . } . TraitQueryMode::Canonical => { -- line 1126 ---------------------------------------- -- line 1143 ---------------------------------------- . ) -> Result<(), OverflowError> { . self.check_recursion_depth(obligation.recursion_depth, error_obligation) . } . . fn in_task(&mut self, op: OP) -> (R, DepNodeIndex) . where . OP: FnOnce(&mut Self) -> R, . { 14,261 ( 0.00%) let (result, dep_node) = 11,734 ( 0.00%) self.tcx().dep_graph.with_anon_task(self.tcx(), DepKind::TraitSelect, || op(self)); . self.tcx().dep_graph.read_index(dep_node); 13,164 ( 0.00%) (result, dep_node) . } . . /// filter_impls filters constant trait obligations and candidates that have a positive impl . /// for a negative goal and a negative impl for a positive goal . #[instrument(level = "debug", skip(self))] . fn filter_impls( . &mut self, . candidates: Vec>, . obligation: &TraitObligation<'tcx>, . ) -> Vec> { . let tcx = self.tcx(); . let mut result = Vec::with_capacity(candidates.len()); . 15,500 ( 0.00%) for candidate in candidates { . // Respect const trait obligations 2,790 ( 0.00%) if obligation.is_const() { 18 ( 0.00%) match candidate { . // const impl . ImplCandidate(def_id) . if tcx.impl_constness(def_id) == hir::Constness::Const => {} . // const param . ParamCandidate(trait_pred) . if trait_pred.skip_binder().constness . == ty::BoundConstness::ConstIfConst => {} . // auto trait impl -- line 1179 ---------------------------------------- -- line 1186 ---------------------------------------- . ConstDropCandidate(_) => {} . _ => { . // reject all other types of candidates . continue; . } . } . } . 1,848 ( 0.00%) if let ImplCandidate(def_id) = candidate { 972 ( 0.00%) if ty::ImplPolarity::Reservation == tcx.impl_polarity(def_id) 486 ( 0.00%) || obligation.polarity() == tcx.impl_polarity(def_id) . || self.allow_negative_impls . { . result.push(candidate); . } . } else { . result.push(candidate); . } . } . 4,216 ( 0.00%) result . } . . /// filter_reservation_impls filter reservation impl for any goal as ambiguous 14,637 ( 0.00%) #[instrument(level = "debug", skip(self))] . fn filter_reservation_impls( . &mut self, . candidate: SelectionCandidate<'tcx>, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { . let tcx = self.tcx(); . // Treat reservation impls as ambiguity. 2,556 ( 0.00%) if let ImplCandidate(def_id) = candidate { 834 ( 0.00%) if let ty::ImplPolarity::Reservation = tcx.impl_polarity(def_id) { . if let Some(intercrate_ambiguity_clauses) = &mut self.intercrate_ambiguity_causes { . let attrs = tcx.get_attrs(def_id); . let attr = tcx.sess.find_by_name(&attrs, sym::rustc_reservation_impl); . let value = attr.and_then(|a| a.value_str()); . if let Some(value) = value { . debug!( . "filter_reservation_impls: \ . reservation impl ambiguity on {:?}", -- line 1227 ---------------------------------------- -- line 1232 ---------------------------------------- . message: value.to_string(), . }, . ); . } . } . return Ok(None); . } . } 5,166 ( 0.00%) Ok(Some(candidate)) . } . . fn is_knowable<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Option { . debug!("is_knowable(intercrate={:?})", self.intercrate); . 3,291 ( 0.00%) if !self.intercrate || stack.obligation.polarity() == ty::ImplPolarity::Negative { . return None; . } . . let obligation = &stack.obligation; . let predicate = self.infcx().resolve_vars_if_possible(obligation.predicate); . . // Okay to skip binder because of the nature of the . // trait-ref-is-knowable check, which does not care about -- line 1254 ---------------------------------------- -- line 1286 ---------------------------------------- . &mut self, . mut param_env: ty::ParamEnv<'tcx>, . cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>, . ) -> Option>> { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 8,712 ( 0.00%) if self.intercrate { . return None; . } . let tcx = self.tcx(); . let mut pred = cache_fresh_trait_pred.skip_binder(); 8,712 ( 0.00%) pred.remap_constness(tcx, &mut param_env); . 4,356 ( 0.00%) if self.can_use_global_caches(param_env) { 43,070 ( 0.00%) if let Some(res) = tcx.selection_cache.get(¶m_env.and(pred), tcx) { 42,354 ( 0.00%) return Some(res); . } . } 12,029 ( 0.00%) self.infcx.selection_cache.get(¶m_env.and(pred), tcx) . } . . /// Determines whether can we safely cache the result . /// of selecting an obligation. This is almost always `true`, . /// except when dealing with certain `ParamCandidate`s. . /// . /// Ordinarily, a `ParamCandidate` will contain no inference variables, . /// since it was usually produced directly from a `DefId`. However, -- line 1314 ---------------------------------------- -- line 1325 ---------------------------------------- . fn can_cache_candidate( . &self, . result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>, . ) -> bool { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 2,194 ( 0.00%) if self.intercrate { . return false; . } 3,291 ( 0.00%) match result { . Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => !trait_ref.needs_infer(), . _ => true, . } . } . . fn insert_candidate_cache( . &mut self, . mut param_env: ty::ParamEnv<'tcx>, . cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>, . dep_node: DepNodeIndex, . candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>, . ) { . let tcx = self.tcx(); . let mut pred = cache_fresh_trait_pred.skip_binder(); . 2,194 ( 0.00%) pred.remap_constness(tcx, &mut param_env); . . if !self.can_cache_candidate(&candidate) { . debug!(?pred, ?candidate, "insert_candidate_cache - candidate is not cacheable"); . return; . } . 1,095 ( 0.00%) if self.can_use_global_caches(param_env) { 2,466 ( 0.00%) if let Err(Overflow) = candidate { . // Don't cache overflow globally; we only produce this in certain modes. . } else if !pred.needs_infer() { 4,196 ( 0.00%) if !candidate.needs_infer() { . debug!(?pred, ?candidate, "insert_candidate_cache global"); . // This may overwrite the cache with the same value. 28,323 ( 0.00%) tcx.selection_cache.insert(param_env.and(pred), dep_node, candidate); . return; . } . } . } . . debug!(?pred, ?candidate, "insert_candidate_cache local"); 1,058 ( 0.00%) self.infcx.selection_cache.insert(param_env.and(pred), dep_node, candidate); . } . . /// Matches a predicate against the bounds of its self type. . /// . /// Given an obligation like `::Bar: Baz` where the self type is . /// a projection, look at the bounds of `T::Bar`, see if we can find a . /// `Baz` bound. We return indexes into the list returned by . /// `tcx.item_bounds` for any applicable bounds. 18 ( 0.00%) fn match_projection_obligation_against_definition_bounds( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> smallvec::SmallVec<[usize; 2]> { 26 ( 0.00%) let poly_trait_predicate = self.infcx().resolve_vars_if_possible(obligation.predicate); . let placeholder_trait_predicate = 34 ( 0.00%) self.infcx().replace_bound_vars_with_placeholders(poly_trait_predicate); . debug!( . ?placeholder_trait_predicate, . "match_projection_obligation_against_definition_bounds" . ); . 2 ( 0.00%) let tcx = self.infcx.tcx; 12 ( 0.00%) let (def_id, substs) = match *placeholder_trait_predicate.trait_ref.self_ty().kind() { 6 ( 0.00%) ty::Projection(ref data) => (data.item_def_id, data.substs), . ty::Opaque(def_id, substs) => (def_id, substs), . _ => { . span_bug!( . obligation.cause.span, . "match_projection_obligation_against_definition_bounds() called \ . but self-ty is not a projection: {:?}", . placeholder_trait_predicate.trait_ref.self_ty() . ); -- line 1404 ---------------------------------------- -- line 1411 ---------------------------------------- . // unnecessary ambiguity. . let mut distinct_normalized_bounds = FxHashSet::default(); . . let matching_bounds = bounds . .iter() . .enumerate() . .filter_map(|(idx, bound)| { . let bound_predicate = bound.kind(); 39 ( 0.00%) if let ty::PredicateKind::Trait(pred) = bound_predicate.skip_binder() { . let bound = bound_predicate.rebind(pred.trait_ref); 195 ( 0.00%) if self.infcx.probe(|_| { 78 ( 0.00%) match self.match_normalize_trait_ref( . obligation, 52 ( 0.00%) bound, 52 ( 0.00%) placeholder_trait_predicate.trait_ref, . ) { . Ok(None) => true, . Ok(Some(normalized_trait)) . if distinct_normalized_bounds.insert(normalized_trait) => . { . true . } . _ => false, -- line 1433 ---------------------------------------- -- line 1437 ---------------------------------------- . } . } . None . }) . .collect(); . . debug!(?matching_bounds, "match_projection_obligation_against_definition_bounds"); . matching_bounds 18 ( 0.00%) } . . /// Equates the trait in `obligation` with trait bound. If the two traits . /// can be equated and the normalized trait bound doesn't contain inference . /// variables or placeholders, the normalized bound is returned. 182 ( 0.00%) fn match_normalize_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . trait_bound: ty::PolyTraitRef<'tcx>, . placeholder_trait_ref: ty::TraitRef<'tcx>, . ) -> Result>, ()> { . debug_assert!(!placeholder_trait_ref.has_escaping_bound_vars()); 46 ( 0.00%) if placeholder_trait_ref.def_id != trait_bound.def_id() { . // Avoid unnecessary normalization 24 ( 0.00%) return Err(()); . } . 24 ( 0.00%) let Normalized { value: trait_bound, obligations: _ } = ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 2 ( 0.00%) obligation.param_env, . obligation.cause.clone(), 6 ( 0.00%) obligation.recursion_depth + 1, 8 ( 0.00%) trait_bound, . ) . }); 6 ( 0.00%) self.infcx 4 ( 0.00%) .at(&obligation.cause, obligation.param_env) . .sup(ty::Binder::dummy(placeholder_trait_ref), trait_bound) . .map(|InferOk { obligations: _, value: () }| { . // This method is called within a probe, so we can't have . // inference variables and placeholders escape. . if !trait_bound.needs_infer() && !trait_bound.has_placeholders() { 5 ( 0.00%) Some(trait_bound) . } else { . None . } . }) . .map_err(|_| ()) 126 ( 0.00%) } . . fn evaluate_where_clause<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . where_clause_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result { . self.evaluation_probe(|this| { 1,397 ( 0.00%) match this.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) { 512 ( 0.00%) Ok(obligations) => this.evaluate_predicates_recursively(stack.list(), obligations), . Err(()) => Ok(EvaluatedToErr), . } . }) . } . 36 ( 0.00%) pub(super) fn match_projection_projections( . &mut self, . obligation: &ProjectionTyObligation<'tcx>, . env_predicate: PolyProjectionPredicate<'tcx>, . potentially_unnormalized_candidates: bool, . ) -> bool { . let mut nested_obligations = Vec::new(); 20 ( 0.00%) let (infer_predicate, _) = self.infcx.replace_bound_vars_with_fresh_vars( 12 ( 0.00%) obligation.cause.span, . LateBoundRegionConversionTime::HigherRankedType, . env_predicate, . ); 8 ( 0.00%) let infer_projection = if potentially_unnormalized_candidates { . ensure_sufficient_stack(|| { . project::normalize_with_depth_to( . self, . obligation.param_env, . obligation.cause.clone(), . obligation.recursion_depth + 1, . infer_predicate.projection_ty, . &mut nested_obligations, . ) . }) . } else { 8 ( 0.00%) infer_predicate.projection_ty . }; . 4 ( 0.00%) self.infcx 8 ( 0.00%) .at(&obligation.cause, obligation.param_env) 8 ( 0.00%) .sup(obligation.predicate, infer_projection) 29 ( 0.00%) .map_or(false, |InferOk { obligations, value: () }| { . self.evaluate_predicates_recursively( . TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()), . nested_obligations.into_iter().chain(obligations), . ) 6 ( 0.00%) .map_or(false, |res| res.may_apply()) . }) 36 ( 0.00%) } . . /////////////////////////////////////////////////////////////////////////// . // WINNOW . // . // Winnowing is the process of attempting to resolve ambiguity by . // probing further. During the winnowing process, we unify all . // type variables and then we also attempt to evaluate recursive . // bounds to see if they are satisfied. -- line 1544 ---------------------------------------- -- line 1550 ---------------------------------------- . /// See the comment for "SelectionCandidate" for more details. . fn candidate_should_be_dropped_in_favor_of( . &mut self, . sized_predicate: bool, . victim: &EvaluatedCandidate<'tcx>, . other: &EvaluatedCandidate<'tcx>, . needs_infer: bool, . ) -> bool { 86 ( 0.00%) if victim.candidate == other.candidate { . return true; . } . . // Check if a bound would previously have been removed when normalizing . // the param_env so that it can be given the lowest priority. See . // #50825 for the motivation for this. . let is_global = |cand: &ty::PolyTraitPredicate<'tcx>| { . cand.is_global() && !cand.has_late_bound_regions() . }; . . // (*) Prefer `BuiltinCandidate { has_nested: false }`, `PointeeCandidate`, . // `DiscriminantKindCandidate`, and `ConstDropCandidate` to anything else. . // . // This is a fix for #53123 and prevents winnowing from accidentally extending the . // lifetime of a variable. 1,548 ( 0.00%) match (&other.candidate, &victim.candidate) { . (_, AutoImplCandidate(..)) | (AutoImplCandidate(..), _) => { . bug!( . "default implementations shouldn't be recorded \ . when there are other valid candidates" . ); . } . . // (*) -- line 1582 ---------------------------------------- -- line 1674 ---------------------------------------- . } . (ObjectCandidate(_), ProjectionCandidate(_)) . | (ProjectionCandidate(_), ObjectCandidate(_)) => { . bug!("Have both object and projection candidate") . } . . // Arbitrarily give projection and object candidates priority. . ( 172 ( 0.00%) ObjectCandidate(_) | ProjectionCandidate(_), . ImplCandidate(..) . | ClosureCandidate . | GeneratorCandidate . | FnPointerCandidate { .. } . | BuiltinObjectCandidate . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { .. } -- line 1690 ---------------------------------------- -- line 1696 ---------------------------------------- . | ClosureCandidate . | GeneratorCandidate . | FnPointerCandidate { .. } . | BuiltinObjectCandidate . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { .. } . | TraitAliasCandidate(..), 344 ( 0.00%) ObjectCandidate(_) | ProjectionCandidate(_), . ) => false, . 860 ( 0.00%) (&ImplCandidate(other_def), &ImplCandidate(victim_def)) => { . // See if we can toss out `victim` based on specialization. . // This requires us to know *for sure* that the `other` impl applies . // i.e., `EvaluatedToOk`. . // . // FIXME(@lcnr): Using `modulo_regions` here seems kind of scary . // to me but is required for `std` to compile, so I didn't change it . // for now. . let tcx = self.tcx(); 344 ( 0.00%) if other.evaluation.must_apply_modulo_regions() { 664 ( 0.00%) if tcx.specializes((other_def, victim_def)) { . return true; . } . } . 405 ( 0.00%) if other.evaluation.must_apply_considering_regions() { 264 ( 0.00%) match tcx.impls_are_allowed_to_overlap(other_def, victim_def) { . Some(ty::ImplOverlapKind::Permitted { marker: true }) => { . // Subtle: If the predicate we are evaluating has inference . // variables, do *not* allow discarding candidates due to . // marker trait impls. . // . // Without this restriction, we could end up accidentally . // constrainting inference variables based on an arbitrarily . // chosen trait impl. -- line 1731 ---------------------------------------- -- line 1790 ---------------------------------------- . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { has_nested: true } . | TraitAliasCandidate(..), . ) => false, . } . } . 2,400 ( 0.00%) fn sized_conditions( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> BuiltinImplConditions<'tcx> { . use self::BuiltinImplConditions::{Ambiguous, None, Where}; . . // NOTE: binder moved to (*) 720 ( 0.00%) let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty()); . 1,204 ( 0.00%) match self_ty.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 1815 ---------------------------------------- -- line 1817 ---------------------------------------- . | ty::Ref(..) . | ty::Generator(..) . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => { . // safe for everything 66 ( 0.00%) Where(ty::Binder::dummy(Vec::new())) . } . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None, . . ty::Tuple(tys) => Where( . obligation . .predicate 96 ( 0.00%) .rebind(tys.last().into_iter().map(|k| k.expect_ty()).collect()), . ), . . ty::Adt(def, substs) => { 156 ( 0.00%) let sized_crit = def.sized_constraint(self.tcx()); . // (*) binder moved here 390 ( 0.00%) Where( . obligation.predicate.rebind({ 78 ( 0.00%) sized_crit.iter().map(|ty| ty.subst(self.tcx(), substs)).collect() . }), . ) . } . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => None, . ty::Infer(ty::TyVar(_)) => Ambiguous, . . ty::Placeholder(..) . | ty::Bound(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty); . } . } 1,920 ( 0.00%) } . 1,360 ( 0.00%) fn copy_clone_conditions( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> BuiltinImplConditions<'tcx> { . // NOTE: binder moved to (*) 340 ( 0.00%) let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty()); . . use self::BuiltinImplConditions::{Ambiguous, None, Where}; . 886 ( 0.00%) match *self_ty.kind() { . ty::Infer(ty::IntVar(_)) . | ty::Infer(ty::FloatVar(_)) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), . . ty::Uint(_) . | ty::Int(_) -- line 1874 ---------------------------------------- -- line 1886 ---------------------------------------- . ty::Dynamic(..) . | ty::Str . | ty::Slice(..) . | ty::Generator(..) . | ty::GeneratorWitness(..) . | ty::Foreign(..) . | ty::Ref(_, _, hir::Mutability::Mut) => None, . 17 ( 0.00%) ty::Tuple(tys) => { . // (*) binder moved here 100 ( 0.00%) Where(obligation.predicate.rebind(tys.iter().map(|k| k.expect_ty()).collect())) . } . 27 ( 0.00%) ty::Closure(_, substs) => { . // (*) binder moved here 81 ( 0.00%) let ty = self.infcx.shallow_resolve(substs.as_closure().tupled_upvars_ty()); 54 ( 0.00%) if let ty::Infer(ty::TyVar(_)) = ty.kind() { . // Not yet resolved. . Ambiguous . } else { 189 ( 0.00%) Where(obligation.predicate.rebind(substs.as_closure().upvar_tys().collect())) . } . } . . ty::Adt(..) | ty::Projection(..) | ty::Param(..) | ty::Opaque(..) => { . // Fallback to whatever user-defined impls exist in this case. . None . } . -- line 1914 ---------------------------------------- -- line 1920 ---------------------------------------- . } . . ty::Placeholder(..) . | ty::Bound(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty); . } . } 1,020 ( 0.00%) } . . /// For default impls, we need to break apart a type into its . /// "constituent types" -- meaning, the types that it contains. . /// . /// Here are some (simple) examples: . /// . /// ``` . /// (i32, u32) -> [i32, u32] -- line 1936 ---------------------------------------- -- line 1937 ---------------------------------------- . /// Foo where struct Foo { x: i32, y: u32 } -> [i32, u32] . /// Bar where struct Bar { x: T, y: u32 } -> [i32, u32] . /// Zed where enum Zed { A(T), B(u32) } -> [i32, u32] . /// ``` . fn constituent_types_for_ty( . &self, . t: ty::Binder<'tcx, Ty<'tcx>>, . ) -> ty::Binder<'tcx, Vec>> { 610 ( 0.00%) match *t.skip_binder().kind() { . ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::Str . | ty::Error(_) -- line 1953 ---------------------------------------- -- line 1968 ---------------------------------------- . ty::RawPtr(ty::TypeAndMut { ty: element_ty, .. }) | ty::Ref(_, element_ty, _) => { . t.rebind(vec![element_ty]) . } . . ty::Array(element_ty, _) | ty::Slice(element_ty) => t.rebind(vec![element_ty]), . . ty::Tuple(ref tys) => { . // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet 6 ( 0.00%) t.rebind(tys.iter().map(|k| k.expect_ty()).collect()) . } . . ty::Closure(_, ref substs) => { 14 ( 0.00%) let ty = self.infcx.shallow_resolve(substs.as_closure().tupled_upvars_ty()); . t.rebind(vec![ty]) . } . . ty::Generator(_, ref substs, _) => { . let ty = self.infcx.shallow_resolve(substs.as_generator().tupled_upvars_ty()); . let witness = substs.as_generator().witness(); . t.rebind([ty].into_iter().chain(iter::once(witness)).collect()) . } . . ty::GeneratorWitness(types) => { . debug_assert!(!types.has_escaping_bound_vars()); . types.map_bound(|types| types.to_vec()) . } . . // For `PhantomData`, we pass `T`. 181 ( 0.00%) ty::Adt(def, substs) if def.is_phantom_data() => t.rebind(substs.types().collect()), . 83 ( 0.00%) ty::Adt(def, substs) => { 328 ( 0.00%) t.rebind(def.all_fields().map(|f| f.ty(self.tcx(), substs)).collect()) . } . . ty::Opaque(def_id, substs) => { . // We can resolve the `impl Trait` to its concrete type, . // which enforces a DAG between the functions requiring . // the auto trait bounds in question. . t.rebind(vec![self.tcx().type_of(def_id).subst(self.tcx(), substs)]) . } . } . } . 2,040 ( 0.00%) fn collect_predicates_for_types( . &mut self, . param_env: ty::ParamEnv<'tcx>, . cause: ObligationCause<'tcx>, . recursion_depth: usize, . trait_def_id: DefId, . types: ty::Binder<'tcx, Vec>>, . ) -> Vec> { . // Because the types were potentially derived from -- line 2019 ---------------------------------------- -- line 2030 ---------------------------------------- . // 2. Produce something like `&'0 i32 : Copy` . // 3. Re-bind the regions back to `for<'a> &'a i32 : Copy` . . types . .as_ref() . .skip_binder() // binder moved -\ . .iter() . .flat_map(|ty| { 765 ( 0.00%) let ty: ty::Binder<'tcx, Ty<'tcx>> = types.rebind(ty); // <----/ . 3,060 ( 0.00%) self.infcx.commit_unconditionally(|_| { 765 ( 0.00%) let placeholder_ty = self.infcx.replace_bound_vars_with_placeholders(ty); 1,275 ( 0.00%) let Normalized { value: normalized_ty, mut obligations } = 2,550 ( 0.00%) ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 255 ( 0.00%) param_env, . cause.clone(), 255 ( 0.00%) recursion_depth, 255 ( 0.00%) placeholder_ty, . ) . }); 2,805 ( 0.00%) let placeholder_obligation = predicate_for_trait_def( . self.tcx(), 255 ( 0.00%) param_env, . cause.clone(), 510 ( 0.00%) trait_def_id, . recursion_depth, . normalized_ty, . &[], . ); 1,530 ( 0.00%) obligations.push(placeholder_obligation); . obligations . }) . }) . .collect() 1,020 ( 0.00%) } . . /////////////////////////////////////////////////////////////////////////// . // Matching . // . // Matching is a common path used for both evaluation and . // confirmation. It basically unifies types that appear in impls . // and traits. This does affect the surrounding environment; . // therefore, when used during evaluation, match routines must be -- line 2074 ---------------------------------------- -- line 2075 ---------------------------------------- . // run inside of a `probe()` so that their side-effects are . // contained. . . fn rematch_impl( . &mut self, . impl_def_id: DefId, . obligation: &TraitObligation<'tcx>, . ) -> Normalized<'tcx, SubstsRef<'tcx>> { 4,990 ( 0.00%) match self.match_impl(impl_def_id, obligation) { 3,992 ( 0.00%) Ok(substs) => substs, . Err(()) => { . bug!( . "Impl {:?} was matchable against {:?} but now is not", . impl_def_id, . obligation . ); . } . } . } . 57,133 ( 0.00%) #[tracing::instrument(level = "debug", skip(self))] . fn match_impl( . &mut self, . impl_def_id: DefId, . obligation: &TraitObligation<'tcx>, . ) -> Result>, ()> { . let impl_trait_ref = self.tcx().impl_trait_ref(impl_def_id).unwrap(); . . // Before we create the substitutions and everything, first . // consider a "quick reject". This avoids creating more types . // and so forth that we need to. . if self.fast_reject_trait_refs(obligation, &impl_trait_ref) { 504 ( 0.00%) return Err(()); . } . . let placeholder_obligation = 19,285 ( 0.00%) self.infcx().replace_bound_vars_with_placeholders(obligation.predicate); 8,265 ( 0.00%) let placeholder_obligation_trait_ref = placeholder_obligation.trait_ref; . 13,775 ( 0.00%) let impl_substs = self.infcx.fresh_substs_for_item(obligation.cause.span, impl_def_id); . 5,510 ( 0.00%) let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs); . . debug!(?impl_trait_ref); . 16,530 ( 0.00%) let Normalized { value: impl_trait_ref, obligations: mut nested_obligations } = 19,285 ( 0.00%) ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 2,755 ( 0.00%) obligation.param_env, . obligation.cause.clone(), 8,265 ( 0.00%) obligation.recursion_depth + 1, 5,510 ( 0.00%) impl_trait_ref, . ) . }); . . debug!(?impl_trait_ref, ?placeholder_obligation_trait_ref); . . let cause = ObligationCause::new( 2,755 ( 0.00%) obligation.cause.span, 5,510 ( 0.00%) obligation.cause.body_id, 24,795 ( 0.00%) ObligationCauseCode::MatchImpl(obligation.cause.clone(), impl_def_id), . ); . 5,510 ( 0.00%) let InferOk { obligations, .. } = self . .infcx 2,755 ( 0.00%) .at(&cause, obligation.param_env) . .eq(placeholder_obligation_trait_ref, impl_trait_ref) . .map_err(|e| debug!("match_impl: failed eq_trait_refs due to `{}`", e))?; . nested_obligations.extend(obligations); . 4,452 ( 0.00%) if !self.intercrate . && self.tcx().impl_polarity(impl_def_id) == ty::ImplPolarity::Reservation . { . debug!("match_impl: reservation impls only apply in intercrate mode"); . return Err(()); . } . . debug!(?impl_substs, ?nested_obligations, "match_impl: success"); 8,904 ( 0.00%) Ok(Normalized { value: impl_substs, obligations: nested_obligations }) . } . . fn fast_reject_trait_refs( . &mut self, . obligation: &TraitObligation<'_>, . impl_trait_ref: &ty::TraitRef<'_>, . ) -> bool { . // We can avoid creating type variables and doing the full . // substitution if we find that any of the input types, when . // simplified, do not match. . 9,021 ( 0.00%) iter::zip(obligation.predicate.skip_binder().trait_ref.substs, impl_trait_ref.substs).any( . |(obligation_arg, impl_arg)| { 31,644 ( 0.00%) match (obligation_arg.unpack(), impl_arg.unpack()) { . (GenericArgKind::Type(obligation_ty), GenericArgKind::Type(impl_ty)) => { . // Note, we simplify parameters for the obligation but not the . // impl so that we do not reject a blanket impl but do reject . // more concrete impls if we're searching for `T: Trait`. 41,856 ( 0.00%) let simplified_obligation_ty = fast_reject::simplify_type( . self.tcx(), . obligation_ty, . SimplifyParams::Yes, . StripReferences::No, . ); 31,392 ( 0.00%) let simplified_impl_ty = fast_reject::simplify_type( . self.tcx(), . impl_ty, . SimplifyParams::No, . StripReferences::No, . ); . 15,696 ( 0.00%) simplified_obligation_ty.is_some() . && simplified_impl_ty.is_some() . && simplified_obligation_ty != simplified_impl_ty . } . (GenericArgKind::Lifetime(_), GenericArgKind::Lifetime(_)) => { . // Lifetimes can never cause a rejection. . false . } . (GenericArgKind::Const(_), GenericArgKind::Const(_)) => { -- line 2194 ---------------------------------------- -- line 2201 ---------------------------------------- . } . }, . ) . } . . /// Normalize `where_clause_trait_ref` and try to match it against . /// `obligation`. If successful, return any predicates that . /// result from the normalization. 1,143 ( 0.00%) fn match_where_clause_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . where_clause_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result>, ()> { . self.match_poly_trait_ref(obligation, where_clause_trait_ref) 1,016 ( 0.00%) } . . /// Returns `Ok` if `poly_trait_ref` being true implies that the . /// obligation is satisfied. . #[instrument(skip(self), level = "debug")] . fn match_poly_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . poly_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result>, ()> { 226 ( 0.00%) self.infcx 127 ( 0.00%) .at(&obligation.cause, obligation.param_env) 678 ( 0.00%) .sup(obligation.predicate.to_poly_trait_ref(), poly_trait_ref) . .map(|InferOk { obligations, .. }| obligations) . .map_err(|_| ()) . } . . /////////////////////////////////////////////////////////////////////////// . // Miscellany . . fn match_fresh_trait_refs( . &self, . previous: ty::PolyTraitPredicate<'tcx>, . current: ty::PolyTraitPredicate<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> bool { 27 ( 0.00%) let mut matcher = ty::_match::Match::new(self.tcx(), param_env); . matcher.relate(previous, current).is_ok() . } . . fn push_stack<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . obligation: &'o TraitObligation<'tcx>, . ) -> TraitObligationStack<'o, 'tcx> { 30,512 ( 0.00%) let fresh_trait_pred = obligation.predicate.fold_with(&mut self.freshener); . . let dfn = previous_stack.cache.next_dfn(); 2,060 ( 0.00%) let depth = previous_stack.depth() + 1; 39,588 ( 0.00%) TraitObligationStack { . obligation, 24,740 ( 0.00%) fresh_trait_pred, . reached_depth: Cell::new(depth), . previous: previous_stack, . dfn, . depth, . } . } . . #[instrument(skip(self), level = "debug")] . fn closure_trait_ref_unnormalized( . &mut self, . obligation: &TraitObligation<'tcx>, . substs: SubstsRef<'tcx>, . ) -> ty::PolyTraitRef<'tcx> { 129 ( 0.00%) let closure_sig = substs.as_closure().sig(); . . debug!(?closure_sig); . . // (1) Feels icky to skip the binder here, but OTOH we know . // that the self-type is an unboxed closure type and hence is . // in fact unparameterized (or at least does not reference any . // regions bound in the obligation). Still probably some . // refactoring could make this nicer. 430 ( 0.00%) closure_trait_ref_and_return_type( . self.tcx(), 258 ( 0.00%) obligation.predicate.def_id(), 43 ( 0.00%) obligation.predicate.skip_binder().self_ty(), // (1) 172 ( 0.00%) closure_sig, . util::TupleArgumentsFlag::No, . ) . .map_bound(|(trait_ref, _)| trait_ref) . } . . fn generator_trait_ref_unnormalized( . &mut self, . obligation: &TraitObligation<'tcx>, -- line 2291 ---------------------------------------- -- line 2307 ---------------------------------------- . ) . .map_bound(|(trait_ref, ..)| trait_ref) . } . . /// Returns the obligations that are implied by instantiating an . /// impl or trait. The obligations are substituted and fully . /// normalized. This is used when confirming an impl or default . /// impl. 25,760 ( 0.00%) #[tracing::instrument(level = "debug", skip(self, cause, param_env))] . fn impl_or_trait_obligations( . &mut self, . cause: ObligationCause<'tcx>, . recursion_depth: usize, . param_env: ty::ParamEnv<'tcx>, . def_id: DefId, // of impl or trait . substs: SubstsRef<'tcx>, // for impl or trait . ) -> Vec> { -- line 2323 ---------------------------------------- -- line 2334 ---------------------------------------- . // V: Iterator, V: Sized, . // ::Item: Copy . // When we substitute, say, `V => IntoIter, U => $0`, the last . // obligation will normalize to `<$0 as Iterator>::Item = $1` and . // `$1: Copy`, so we must ensure the obligations are emitted in . // that order. . let predicates = tcx.predicates_of(def_id); . debug!(?predicates); 1,120 ( 0.00%) assert_eq!(predicates.parent, None); 3,360 ( 0.00%) let mut obligations = Vec::with_capacity(predicates.predicates.len()); . for (predicate, _) in predicates.predicates { . debug!(?predicate); 6,315 ( 0.00%) let predicate = normalize_with_depth_to( . self, . param_env, . cause.clone(), . recursion_depth, 1,263 ( 0.00%) predicate.subst(tcx, substs), . &mut obligations, . ); 12,630 ( 0.00%) obligations.push(Obligation { . cause: cause.clone(), . recursion_depth, . param_env, . predicate, . }); . } . . // We are performing deduplication here to avoid exponential blowups -- line 2362 ---------------------------------------- -- line 2363 ---------------------------------------- . // (#38528) from happening, but the real cause of the duplication is . // unknown. What we know is that the deduplication avoids exponential . // amount of predicates being propagated when processing deeply nested . // types. . // . // This code is hot enough that it's worth avoiding the allocation . // required for the FxHashSet when possible. Special-casing lengths 0, . // 1 and 2 covers roughly 75-80% of the cases. 2,112 ( 0.00%) if obligations.len() <= 1 { . // No possibility of duplicates. 431 ( 0.00%) } else if obligations.len() == 2 { . // Only two elements. Drop the second if they are equal. 3 ( 0.00%) if obligations[0] == obligations[1] { 9 ( 0.00%) obligations.truncate(1); . } . } else { . // Three or more elements. Use a general deduplication process. . let mut seen = FxHashSet::default(); 91 ( 0.00%) obligations.retain(|i| seen.insert(i.clone())); . } . 4,480 ( 0.00%) obligations . } . } . . trait TraitObligationExt<'tcx> { . fn derived_cause( . &self, . variant: fn(DerivedObligationCause<'tcx>) -> ObligationCauseCode<'tcx>, . ) -> ObligationCause<'tcx>; -- line 2392 ---------------------------------------- -- line 2408 ---------------------------------------- . */ . . let obligation = self; . . // NOTE(flaper87): As of now, it keeps track of the whole error . // chain. Ideally, we should have a way to configure this either . // by using -Z verbose or just a CLI argument. . let derived_cause = DerivedObligationCause { 6,624 ( 0.00%) parent_trait_pred: obligation.predicate, 2,898 ( 0.00%) parent_code: obligation.cause.clone_code(), . }; . let derived_code = variant(derived_cause); 19,214 ( 0.00%) ObligationCause::new(obligation.cause.span, obligation.cause.body_id, derived_code) . } . } . . impl<'o, 'tcx> TraitObligationStack<'o, 'tcx> { . fn list(&'o self) -> TraitObligationStackList<'o, 'tcx> { . TraitObligationStackList::with(self) . } . . fn cache(&self) -> &'o ProvisionalEvaluationCache<'tcx> { 2,470 ( 0.00%) self.previous.cache . } . . fn iter(&'o self) -> TraitObligationStackList<'o, 'tcx> { . self.list() . } . . /// Indicates that attempting to evaluate this stack entry . /// required accessing something from the stack at depth `reached_depth`. 244 ( 0.00%) fn update_reached_depth(&self, reached_depth: usize) { 122 ( 0.00%) assert!( 122 ( 0.00%) self.depth >= reached_depth, . "invoked `update_reached_depth` with something under this stack: \ . self.depth={} reached_depth={}", . self.depth, . reached_depth, . ); . debug!(reached_depth, "update_reached_depth"); . let mut p = self; 366 ( 0.00%) while reached_depth < p.depth { . debug!(?p.fresh_trait_pred, "update_reached_depth: marking as cycle participant"); 122 ( 0.00%) p.reached_depth.set(p.reached_depth.get().min(reached_depth)); 122 ( 0.00%) p = p.previous.head.unwrap(); . } 244 ( 0.00%) } . } . . /// The "provisional evaluation cache" is used to store intermediate cache results . /// when solving auto traits. Auto traits are unusual in that they can support . /// cycles. So, for example, a "proof tree" like this would be ok: . /// . /// - `Foo: Send` :- . /// - `Bar: Send` :- -- line 2462 ---------------------------------------- -- line 2540 ---------------------------------------- . /// evaluation. When we create an entry in the evaluation cache using this provisional . /// cache entry (see `on_completion`), we use this `dep_node` to ensure that future reads from . /// the cache will have all of the necessary incr comp dependencies tracked. . dep_node: DepNodeIndex, . } . . impl<'tcx> Default for ProvisionalEvaluationCache<'tcx> { . fn default() -> Self { 5,720 ( 0.00%) Self { dfn: Cell::new(0), map: Default::default() } . } . } . . impl<'tcx> ProvisionalEvaluationCache<'tcx> { . /// Get the next DFN in sequence (basically a counter). . fn next_dfn(&self) -> usize { 1,030 ( 0.00%) let result = self.dfn.get(); 4,120 ( 0.00%) self.dfn.set(result + 1); . result . } . . /// Check the provisional cache for any result for . /// `fresh_trait_ref`. If there is a hit, then you must consider . /// it an access to the stack slots at depth . /// `reached_depth` (from the returned value). . fn get_provisional( -- line 2564 ---------------------------------------- -- line 2663 ---------------------------------------- . fn on_completion( . &self, . dfn: usize, . mut op: impl FnMut(ty::PolyTraitPredicate<'tcx>, EvaluationResult, DepNodeIndex), . ) { . debug!(?dfn, "on_completion"); . . for (fresh_trait_pred, eval) in 4,508 ( 0.00%) self.map.borrow_mut().drain_filter(|_k, eval| eval.from_dfn >= dfn) . { . debug!(?fresh_trait_pred, ?eval, "on_completion"); . . op(fresh_trait_pred, eval.result, eval.dep_node); . } . } . } . -- line 2679 ---------------------------------------- -- line 2692 ---------------------------------------- . TraitObligationStackList { cache: r.cache(), head: Some(r) } . } . . fn head(&self) -> Option<&'o TraitObligationStack<'o, 'tcx>> { . self.head . } . . fn depth(&self) -> usize { 2,597 ( 0.00%) if let Some(head) = self.head { head.depth } else { 0 } . } . } . . impl<'o, 'tcx> Iterator for TraitObligationStackList<'o, 'tcx> { . type Item = &'o TraitObligationStack<'o, 'tcx>; . . fn next(&mut self) -> Option<&'o TraitObligationStack<'o, 'tcx>> { 4,170 ( 0.00%) let o = self.head?; 1,886 ( 0.00%) *self = o.previous; . Some(o) . } . } . . impl<'o, 'tcx> fmt::Debug for TraitObligationStack<'o, 'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(f, "TraitObligationStack({:?})", self.obligation) . } -- line 2717 ---------------------------------------- 76,753 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs -------------------------------------------------------------------------------- Ir -- line 108 ---------------------------------------- . suppress_errors: bool, . }, . } . . impl RegionckMode { . /// Indicates that the MIR borrowck will repeat these region . /// checks, so we should ignore errors if NLL is (unconditionally) . /// enabled. 130 ( 0.00%) pub fn for_item_body(tcx: TyCtxt<'_>) -> Self { . // FIXME(Centril): Once we actually remove `::Migrate` also make . // this always `true` and then proceed to eliminate the dead code. 130 ( 0.00%) match tcx.borrowck_mode() { . // If we're on Migrate mode, report AST region errors . BorrowckMode::Migrate => RegionckMode::Erase { suppress_errors: false }, . . // If we're on MIR, don't report AST region errors as they should be reported by NLL . BorrowckMode::Mir => RegionckMode::Erase { suppress_errors: true }, . } 260 ( 0.00%) } . } . . /// This type contains all the things within `InferCtxt` that sit within a . /// `RefCell` and are involved with taking/rolling back snapshots. Snapshot . /// operations are hot enough that we want only one call to `borrow_mut` per . /// call to `start_snapshot` and `rollback_to`. . pub struct InferCtxtInner<'tcx> { . /// Cache for projections. This cache is snapshotted along with the infcx. -- line 134 ---------------------------------------- -- line 202 ---------------------------------------- . /// type instantiations (`ty::Infer`) to the actual opaque . /// type (`ty::Opaque`). Used during fallback to map unconstrained . /// opaque type inference variables to their corresponding . /// opaque type. . pub opaque_types_vars: FxHashMap, Ty<'tcx>>, . } . . impl<'tcx> InferCtxtInner<'tcx> { 21,025 ( 0.00%) fn new() -> InferCtxtInner<'tcx> { 142,970 ( 0.00%) InferCtxtInner { . projection_cache: Default::default(), . type_variable_storage: type_variable::TypeVariableStorage::new(), . undo_log: InferCtxtUndoLogs::default(), . const_unification_storage: ut::UnificationTableStorage::new(), . int_unification_storage: ut::UnificationTableStorage::new(), . float_unification_storage: ut::UnificationTableStorage::new(), 12,615 ( 0.00%) region_constraint_storage: Some(RegionConstraintStorage::new()), . region_obligations: vec![], . opaque_types: Default::default(), . opaque_types_vars: Default::default(), . } 25,230 ( 0.00%) } . . #[inline] . pub fn region_obligations(&self) -> &[(hir::HirId, RegionObligation<'tcx>)] { . &self.region_obligations . } . . #[inline] . pub fn projection_cache(&mut self) -> traits::ProjectionCache<'_, 'tcx> { 644 ( 0.00%) self.projection_cache.with_log(&mut self.undo_log) . } . . #[inline] . fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'tcx> { 69,124 ( 0.00%) self.type_variable_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn int_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::IntVid, . &mut ut::UnificationStorage, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 1,200,178 ( 0.04%) self.int_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn float_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::FloatVid, -- line 258 ---------------------------------------- -- line 268 ---------------------------------------- . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::ConstVid<'tcx>, . &mut ut::UnificationStorage>, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 502 ( 0.00%) self.const_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'_, 'tcx> { 13,616 ( 0.00%) self.region_constraint_storage . .as_mut() . .expect("region constraints already solved") 15,955 ( 0.00%) .with_log(&mut self.undo_log) . } . } . . pub struct InferCtxt<'a, 'tcx> { . pub tcx: TyCtxt<'tcx>, . . /// The `DefId` of the item in whose context we are performing inference or typeck. . /// It is used to check whether an opaque type use is a defining use. -- line 292 ---------------------------------------- -- line 361 ---------------------------------------- . /// item we are type-checking, and just consider those names as . /// part of the root universe. So this would only get incremented . /// when we enter into a higher-ranked (`for<..>`) type or trait . /// bound. . universe: Cell, . } . . /// See the `error_reporting` module for more details. 23,400 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)] . pub enum ValuePairs<'tcx> { . Types(ExpectedFound>), . Regions(ExpectedFound>), . Consts(ExpectedFound<&'tcx ty::Const<'tcx>>), . TraitRefs(ExpectedFound>), . PolyTraitRefs(ExpectedFound>), . } . -- line 377 ---------------------------------------- -- line 383 ---------------------------------------- . pub struct TypeTrace<'tcx> { . cause: ObligationCause<'tcx>, . values: ValuePairs<'tcx>, . } . . /// The origin of a `r1 <= r2` constraint. . /// . /// See `error_reporting` module for more details 20,675 ( 0.00%) #[derive(Clone, Debug)] . pub enum SubregionOrigin<'tcx> { . /// Arose from a subtyping relation 1,244 ( 0.00%) Subtype(Box>), . . /// When casting `&'a T` to an `&'b Trait` object, . /// relating `'a` to `'b` . RelateObjectBound(Span), . . /// Some type parameter was instantiated with the given type, . /// and that type must outlive some region. 94 ( 0.00%) RelateParamBound(Span, Ty<'tcx>, Option), . . /// The given region parameter was instantiated with a region . /// that must outlive some other region. . RelateRegionParamBound(Span), . . /// Creating a pointer `b` to contents of another reference . Reborrow(Span), . . /// Creating a pointer `b` to contents of an upvar . ReborrowUpvar(Span, ty::UpvarId), . . /// Data with type `Ty<'tcx>` was borrowed 111 ( 0.00%) DataBorrowed(Ty<'tcx>, Span), . . /// (&'a &'b T) where a >= b 119 ( 0.00%) ReferenceOutlivesReferent(Ty<'tcx>, Span), . . /// Comparing the signature and requirements of an impl method against . /// the containing trait. . CompareImplMethodObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, . . /// Comparing the signature and requirements of an impl associated type . /// against the containing trait . CompareImplTypeObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, -- line 426 ---------------------------------------- -- line 554 ---------------------------------------- . defining_use_anchor: Option, . } . . pub trait TyCtxtInferExt<'tcx> { . fn infer_ctxt(self) -> InferCtxtBuilder<'tcx>; . } . . impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> { 4,205 ( 0.00%) fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> { 12,615 ( 0.00%) InferCtxtBuilder { tcx: self, defining_use_anchor: None, fresh_typeck_results: None } 4,205 ( 0.00%) } . } . . impl<'tcx> InferCtxtBuilder<'tcx> { . /// Used only by `rustc_typeck` during body type-checking/inference, . /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`. . /// Will also change the scope for opaque type defining use checks to the given owner. 2,835 ( 0.00%) pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self { 3,465 ( 0.00%) self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner))); 1,575 ( 0.00%) self.with_opaque_type_inference(table_owner) 2,205 ( 0.00%) } . . /// Whenever the `InferCtxt` should be able to handle defining uses of opaque types, . /// you need to call this function. Otherwise the opaque type will be treated opaquely. . /// . /// It is only meant to be called in two places, for typeck . /// (via `with_fresh_in_progress_typeck_results`) and for the inference context used . /// in mir borrowck. 272 ( 0.00%) pub fn with_opaque_type_inference(mut self, defining_use_anchor: LocalDefId) -> Self { 136 ( 0.00%) self.defining_use_anchor = Some(defining_use_anchor); 902 ( 0.00%) self 408 ( 0.00%) } . . /// Given a canonical value `C` as a starting point, create an . /// inference context that contains each of the bound values . /// within instantiated as a fresh variable. The `f` closure is . /// invoked with the new infcx, along with the instantiated value . /// `V` and a substitution `S`. This substitution `S` maps from . /// the bound values in `C` to their instantiated values in `V` . /// (in other words, `S(C) = V`). 5,191 ( 0.00%) pub fn enter_with_canonical( . &mut self, . span: Span, . canonical: &Canonical<'tcx, T>, . f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R, . ) -> R . where . T: TypeFoldable<'tcx>, . { . self.enter(|infcx| { 6,030 ( 0.00%) let (value, subst) = 326 ( 0.00%) infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical); 9,263 ( 0.00%) f(infcx, value, subst) . }) 5,665 ( 0.00%) } . 27,813 ( 0.00%) pub fn enter(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R { 11,505 ( 0.00%) let InferCtxtBuilder { tcx, defining_use_anchor, ref fresh_typeck_results } = *self; . let in_progress_typeck_results = fresh_typeck_results.as_ref(); 181,660 ( 0.01%) f(InferCtxt { . tcx, . defining_use_anchor, . in_progress_typeck_results, 4,205 ( 0.00%) inner: RefCell::new(InferCtxtInner::new()), . lexical_region_resolutions: RefCell::new(None), . selection_cache: Default::default(), . evaluation_cache: Default::default(), . reported_trait_errors: Default::default(), . reported_closure_mismatch: Default::default(), . tainted_by_errors_flag: Cell::new(false), 4,205 ( 0.00%) err_count_on_creation: tcx.sess.err_count(), . in_snapshot: Cell::new(false), . skip_leak_check: Cell::new(false), . universe: Cell::new(ty::UniverseIndex::ROOT), . }) 29,425 ( 0.00%) } . } . . impl<'tcx, T> InferOk<'tcx, T> { . pub fn unit(self) -> InferOk<'tcx, ()> { . InferOk { value: (), obligations: self.obligations } . } . . /// Extracts `value`, registering any obligations into `fulfill_cx`. . pub fn into_value_registering_obligations( . self, . infcx: &InferCtxt<'_, 'tcx>, . fulfill_cx: &mut dyn TraitEngine<'tcx>, . ) -> T { 56 ( 0.00%) let InferOk { value, obligations } = self; 246 ( 0.00%) for obligation in obligations { . fulfill_cx.register_predicate_obligation(infcx, obligation); . } . value . } . } . . impl<'tcx> InferOk<'tcx, ()> { 2,155 ( 0.00%) pub fn into_obligations(self) -> PredicateObligations<'tcx> { 8,620 ( 0.00%) self.obligations 2,155 ( 0.00%) } . } . . #[must_use = "once you start a snapshot, you should always consume it"] . pub struct CombinedSnapshot<'a, 'tcx> { . undo_snapshot: Snapshot<'tcx>, . region_constraints_snapshot: RegionSnapshot, . universe: ty::UniverseIndex, . was_in_snapshot: bool, -- line 662 ---------------------------------------- -- line 674 ---------------------------------------- . let canonical = self.canonicalize_query((a, b), &mut OriginalQueryValues::default()); . debug!("canonical consts: {:?}", &canonical.value); . . self.tcx.try_unify_abstract_consts(canonical.value) . } . . pub fn is_in_snapshot(&self) -> bool { . self.in_snapshot.get() 4,042 ( 0.00%) } . 34,848 ( 0.00%) pub fn freshen>(&self, t: T) -> T { 39,204 ( 0.00%) t.fold_with(&mut self.freshener()) 39,204 ( 0.00%) } . . /// Returns the origin of the type variable identified by `vid`, or `None` . /// if this is not a type variable. . /// . /// No attempt is made to resolve `ty`. 1,162 ( 0.00%) pub fn type_var_origin(&'a self, ty: Ty<'tcx>) -> Option { 2,324 ( 0.00%) match *ty.kind() { 69 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 276 ( 0.00%) Some(*self.inner.borrow_mut().type_variables().var_origin(vid)) . } 512 ( 0.00%) _ => None, . } 2,324 ( 0.00%) } . 4,356 ( 0.00%) pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, false) 4,356 ( 0.00%) } . . /// Like `freshener`, but does not replace `'static` regions. 17,633 ( 0.00%) pub fn freshener_keep_static<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, true) 17,633 ( 0.00%) } . 564 ( 0.00%) pub fn unsolved_variables(&self) -> Vec> { 282 ( 0.00%) let mut inner = self.inner.borrow_mut(); 282 ( 0.00%) let mut vars: Vec> = inner . .type_variables() . .unsolved_variables() . .into_iter() 138 ( 0.00%) .map(|t| self.tcx.mk_ty_var(t)) . .collect(); . vars.extend( . (0..inner.int_unification_table().len()) . .map(|i| ty::IntVid { index: i as u32 }) 2,132 ( 0.00%) .filter(|&vid| inner.int_unification_table().probe_value(vid).is_none()) 512 ( 0.00%) .map(|v| self.tcx.mk_int_var(v)), . ); . vars.extend( . (0..inner.float_unification_table().len()) . .map(|i| ty::FloatVid { index: i as u32 }) . .filter(|&vid| inner.float_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_float_var(v)), . ); . vars 987 ( 0.00%) } . 9,323 ( 0.00%) fn combine_fields( . &'a self, . trace: TypeTrace<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> CombineFields<'a, 'tcx> { 37,300 ( 0.00%) CombineFields { . infcx: self, 93,250 ( 0.00%) trace, . cause: None, . param_env, . obligations: PredicateObligations::new(), . } 9,323 ( 0.00%) } . . /// Clear the "currently in a snapshot" flag, invoke the closure, . /// then restore the flag to its original value. This flag is a . /// debugging measure designed to detect cases where we start a . /// snapshot, create type variables, and register obligations . /// which may involve those type variables in the fulfillment cx, . /// potentially leaving "dangling type variables" behind. . /// In such cases, an assertion will fail when attempting to -- line 753 ---------------------------------------- -- line 755 ---------------------------------------- . /// better than grovelling through megabytes of `RUSTC_LOG` output. . /// . /// HOWEVER, in some cases the flag is unhelpful. In particular, we . /// sometimes create a "mini-fulfilment-cx" in which we enroll . /// obligations. As long as this fulfillment cx is fully drained . /// before we return, this is not a problem, as there won't be any . /// escaping obligations in the main cx. In those cases, you can . /// use this function. 48 ( 0.00%) pub fn save_and_restore_in_snapshot_flag(&self, func: F) -> R . where . F: FnOnce(&Self) -> R, . { . let flag = self.in_snapshot.replace(false); 1,698 ( 0.00%) let result = func(self); . self.in_snapshot.set(flag); . result 54 ( 0.00%) } . 46,790 ( 0.00%) fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> { . debug!("start_snapshot()"); . . let in_snapshot = self.in_snapshot.replace(true); . . let mut inner = self.inner.borrow_mut(); . 140,370 ( 0.00%) CombinedSnapshot { . undo_snapshot: inner.undo_log.start_snapshot(), . region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), . universe: self.universe(), . was_in_snapshot: in_snapshot, . // Borrow typeck results "in progress" (i.e., during typeck) . // to ban writes from within a snapshot to them. 23,395 ( 0.00%) _in_progress_typeck_results: self . .in_progress_typeck_results . .map(|typeck_results| typeck_results.borrow()), . } 93,580 ( 0.00%) } . 89,386 ( 0.00%) #[instrument(skip(self, snapshot), level = "debug")] . fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 8,126 ( 0.00%) undo_snapshot, 8,126 ( 0.00%) region_constraints_snapshot, 8,126 ( 0.00%) universe, 8,126 ( 0.00%) was_in_snapshot, 16,252 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . self.universe.set(universe); . . let mut inner = self.inner.borrow_mut(); 8,126 ( 0.00%) inner.rollback_to(undo_snapshot); . inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); . } . 229,035 ( 0.01%) #[instrument(skip(self, snapshot), level = "debug")] . fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 15,269 ( 0.00%) undo_snapshot, . region_constraints_snapshot: _, . universe: _, 15,269 ( 0.00%) was_in_snapshot, 30,538 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . . self.inner.borrow_mut().commit(undo_snapshot); . } . . /// Executes `f` and commit the bindings. 12,128 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 15,136 ( 0.00%) pub fn commit_unconditionally(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 1,376 ( 0.00%) let snapshot = self.start_snapshot(); 5,649 ( 0.00%) let r = f(&snapshot); 9,632 ( 0.00%) self.commit_from(snapshot); 7,764 ( 0.00%) r . } . . /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. 100,062 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 123,525 ( 0.00%) pub fn commit_if_ok(&self, f: F) -> Result . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, . { 19,157 ( 0.00%) let snapshot = self.start_snapshot(); 56,994 ( 0.00%) let r = f(&snapshot); . debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); 21,302 ( 0.00%) match r { . Ok(_) => { 98,411 ( 0.00%) self.commit_from(snapshot); . } . Err(_) => { 33,842 ( 0.00%) self.rollback_to("commit_if_ok -- error", snapshot); . } . } 113,538 ( 0.00%) r . } . . /// Execute `f` then unroll any bindings it creates. 31,816 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 38,250 ( 0.00%) pub fn probe(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 9,218 ( 0.00%) let snapshot = self.start_snapshot(); 15,062 ( 0.00%) let r = f(&snapshot); 42,662 ( 0.00%) self.rollback_to("probe", snapshot); 5,443 ( 0.00%) r . } . . /// If `should_skip` is true, then execute `f` then unroll any bindings it creates. 9 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 12 ( 0.00%) pub fn probe_maybe_skip_leak_check(&self, should_skip: bool, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 2 ( 0.00%) let snapshot = self.start_snapshot(); 1 ( 0.00%) let was_skip_leak_check = self.skip_leak_check.get(); 2 ( 0.00%) if should_skip { . self.skip_leak_check.set(true); . } 4 ( 0.00%) let r = f(&snapshot); 9 ( 0.00%) self.rollback_to("probe", snapshot); . self.skip_leak_check.set(was_skip_leak_check); 12 ( 0.00%) r . } . . /// Scan the constraints produced since `snapshot` began and returns: . /// . /// - `None` -- if none of them involve "region outlives" constraints . /// - `Some(true)` -- if there are `'a: 'b` constraints where `'a` or `'b` is a placeholder . /// - `Some(false)` -- if there are `'a: 'b` constraints but none involve placeholders 2,440 ( 0.00%) pub fn region_constraints_added_in_snapshot( . &self, . snapshot: &CombinedSnapshot<'a, 'tcx>, . ) -> Option { 4,880 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() . .region_constraints_added_in_snapshot(&snapshot.undo_snapshot) 3,660 ( 0.00%) } . 2 ( 0.00%) pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { 4 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().add_given(sub, sup); 3 ( 0.00%) } . 300 ( 0.00%) pub fn can_sub(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 225 ( 0.00%) } . 1,104 ( 0.00%) pub fn can_eq(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 828 ( 0.00%) } . 6,950 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn sub_regions( . &self, . origin: SubregionOrigin<'tcx>, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) { 6,255 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); . } . . /// Require that the region `r` be equal to one of the regions in . /// the set `regions`. . #[instrument(skip(self), level = "debug")] . pub fn member_constraint( . &self, . opaque_type_def_id: DefId, -- line 947 ---------------------------------------- -- line 983 ---------------------------------------- . let subtype_predicate = predicate.map_bound(|p| ty::SubtypePredicate { . a_is_expected: false, // when coercing from `a` to `b`, `b` is expected . a: p.a, . b: p.b, . }); . self.subtype_predicate(cause, param_env, subtype_predicate) . } . 84 ( 0.00%) pub fn subtype_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolySubtypePredicate<'tcx>, . ) -> Option> { . // Check for two unresolved inference variables, in which case we can . // make no progress. This is partly a micro-optimization, but it's . // also an opportunity to "sub-unify" the variables. This isn't -- line 999 ---------------------------------------- -- line 1002 ---------------------------------------- . // earlier that they are sub-unified). . // . // Note that we can just skip the binders here because . // type variables can't (at present, at . // least) capture any of the things bound by this binder. . // . // Note that this sub here is not just for diagnostics - it has semantic . // effects as well. 7 ( 0.00%) let r_a = self.shallow_resolve(predicate.skip_binder().a); 7 ( 0.00%) let r_b = self.shallow_resolve(predicate.skip_binder().b); 52 ( 0.00%) match (r_a.kind(), r_b.kind()) { 12 ( 0.00%) (&ty::Infer(ty::TyVar(a_vid)), &ty::Infer(ty::TyVar(b_vid))) => { . self.inner.borrow_mut().type_variables().sub(a_vid, b_vid); 12 ( 0.00%) return None; . } . _ => {} . } . . Some(self.commit_if_ok(|_snapshot| { 1 ( 0.00%) let ty::SubtypePredicate { a_is_expected, a, b } = . self.replace_bound_vars_with_placeholders(predicate); . 2 ( 0.00%) let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?; . . Ok(ok.unit()) . })) 63 ( 0.00%) } . 1,104 ( 0.00%) pub fn region_outlives_predicate( . &self, . cause: &traits::ObligationCause<'tcx>, . predicate: ty::PolyRegionOutlivesPredicate<'tcx>, . ) -> UnitResult<'tcx> { . self.commit_if_ok(|_snapshot| { . let ty::OutlivesPredicate(r_a, r_b) = . self.replace_bound_vars_with_placeholders(predicate); . let origin = SubregionOrigin::from_obligation_cause(cause, || { . RelateRegionParamBound(cause.span) . }); 920 ( 0.00%) self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` . Ok(()) . }) 736 ( 0.00%) } . . /// Number of type variables created so far. 11 ( 0.00%) pub fn num_ty_vars(&self) -> usize { . self.inner.borrow_mut().type_variables().num_vars() 22 ( 0.00%) } . 4,368 ( 0.00%) pub fn next_ty_var_id(&self, origin: TypeVariableOrigin) -> TyVid { 21,840 ( 0.00%) self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) 6,552 ( 0.00%) } . 4,184 ( 0.00%) pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { 15,196 ( 0.00%) self.tcx.mk_ty_var(self.next_ty_var_id(origin)) 6,276 ( 0.00%) } . 108 ( 0.00%) pub fn next_ty_var_in_universe( . &self, . origin: TypeVariableOrigin, . universe: ty::UniverseIndex, . ) -> Ty<'tcx> { 594 ( 0.00%) let vid = self.inner.borrow_mut().type_variables().new_var(universe, origin); 54 ( 0.00%) self.tcx.mk_ty_var(vid) 162 ( 0.00%) } . . pub fn next_const_var( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . ) -> &'tcx ty::Const<'tcx> { . self.tcx.mk_const_var(self.next_const_var_id(origin), ty) . } -- line 1074 ---------------------------------------- -- line 1090 ---------------------------------------- . pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { . self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }) . } . . fn next_int_var_id(&self) -> IntVid { 2,675 ( 0.00%) self.inner.borrow_mut().int_unification_table().new_key(None) . } . 1,605 ( 0.00%) pub fn next_int_var(&self) -> Ty<'tcx> { . self.tcx.mk_int_var(self.next_int_var_id()) 2,140 ( 0.00%) } . . fn next_float_var_id(&self) -> FloatVid { . self.inner.borrow_mut().float_unification_table().new_key(None) . } . . pub fn next_float_var(&self) -> Ty<'tcx> { . self.tcx.mk_float_var(self.next_float_var_id()) . } . . /// Creates a fresh region variable with the next available index. . /// The variable will be created in the maximum universe created . /// thus far, allowing it to name any region created thus far. 1,764 ( 0.00%) pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { 35,068 ( 0.00%) self.next_region_var_in_universe(origin, self.universe()) 3,528 ( 0.00%) } . . /// Creates a fresh region variable with the next available index . /// in the given universe; typically, you can use . /// `next_region_var` and just use the maximal universe. 13,726 ( 0.00%) pub fn next_region_var_in_universe( . &self, . origin: RegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { . let region_var = 89,219 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe, origin); 34,315 ( 0.00%) self.tcx.mk_region(ty::ReVar(region_var)) 20,589 ( 0.00%) } . . /// Return the universe that the region `r` was created in. For . /// most regions (e.g., `'static`, named regions from the user, . /// etc) this is the root universe U0. For inference variables or . /// placeholders, however, it will return the universe which which . /// they are associated. 874 ( 0.00%) pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex { . self.inner.borrow_mut().unwrap_region_constraints().universe(r) 1,311 ( 0.00%) } . . /// Number of region variables created so far. 1,100 ( 0.00%) pub fn num_region_vars(&self) -> usize { . self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() 1,650 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 2,527 ( 0.00%) pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> { . self.next_region_var(RegionVariableOrigin::Nll(origin)) 5,054 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 42 ( 0.00%) pub fn next_nll_region_var_in_universe( . &self, . origin: NllRegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { 302 ( 0.00%) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe) 84 ( 0.00%) } . 39,472 ( 0.00%) pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 21,816 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { . // Create a region inference variable for the given . // region parameter definition. 1,427 ( 0.00%) self.next_region_var(EarlyBoundRegion(span, param.name)).into() . } . GenericParamDefKind::Type { .. } => { . // Create a type inference variable for the given . // type parameter definition. The substitutions are . // for actual parameters that may be referred to by . // the default of this type parameter, if it exists. . // e.g., `struct Foo(...);` when . // used in a path such as `Foo::::new()` will . // use an inference variable for `C` with `[T, U]` . // as the substitutions for the default, `(T, U)`. 13,856 ( 0.00%) let ty_var_id = self.inner.borrow_mut().type_variables().new_var( . self.universe(), 17,320 ( 0.00%) TypeVariableOrigin { . kind: TypeVariableOriginKind::TypeParameterDefinition( 3,464 ( 0.00%) param.name, 3,464 ( 0.00%) Some(param.def_id), . ), . span, . }, . ); . 3,464 ( 0.00%) self.tcx.mk_ty_var(ty_var_id).into() . } . GenericParamDefKind::Const { .. } => { . let origin = ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstParameterDefinition( . param.name, . param.def_id, . ), . span, . }; . let const_var_id = 473 ( 0.00%) self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }); 43 ( 0.00%) self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into() . } . } 344 ( 0.00%) } . . /// Given a set of generics defined on a type or impl, returns a substitution mapping each . /// type/region parameter to a fresh inference variable. 8,847 ( 0.00%) pub fn fresh_substs_for_item(&self, span: Span, def_id: DefId) -> SubstsRef<'tcx> { 41,369 ( 0.00%) InternalSubsts::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) 5,898 ( 0.00%) } . . /// Returns `true` if errors have been reported since this infcx was . /// created. This is sometimes used as a heuristic to skip . /// reporting errors that often occur as a result of earlier . /// errors, but where it's hard to be 100% sure (e.g., unresolved . /// inference variables, regionck errors). 1,422 ( 0.00%) pub fn is_tainted_by_errors(&self) -> bool { . debug!( . "is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ . tainted_by_errors_flag={})", . self.tcx.sess.err_count(), . self.err_count_on_creation, . self.tainted_by_errors_flag.get() . ); . 15,459 ( 0.00%) if self.tcx.sess.err_count() > self.err_count_on_creation { . return true; // errors reported since this infcx was made . } . self.tainted_by_errors_flag.get() 2,133 ( 0.00%) } . . /// Set the "tainted by errors" flag to true. We call this when we . /// observe an error from a prior pass. . pub fn set_tainted_by_errors(&self) { . debug!("set_tainted_by_errors()"); . self.tainted_by_errors_flag.set(true) . } . . /// Process the region constraints and return any any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 12,229 ( 0.00%) pub fn resolve_regions( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) -> Vec> { 33,193 ( 0.00%) let (var_infos, data) = { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; 1,747 ( 0.00%) assert!( 5,241 ( 0.00%) self.is_tainted_by_errors() || inner.region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . inner.region_obligations . ); . inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) . .into_infos_and_data() 1,747 ( 0.00%) }; . . let region_rels = 1,747 ( 0.00%) &RegionRelations::new(self.tcx, region_context, outlives_env.free_region_map()); . 15,723 ( 0.00%) let (lexical_region_resolutions, errors) = 41,928 ( 0.00%) lexical_region_resolve::resolve(region_rels, var_infos, data, mode); . 6,988 ( 0.00%) let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); 1,747 ( 0.00%) assert!(old_value.is_none()); . . errors 15,723 ( 0.00%) } . . /// Process the region constraints and report any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 17,470 ( 0.00%) pub fn resolve_regions_and_report_errors( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) { 3,494 ( 0.00%) let errors = self.resolve_regions(region_context, outlives_env, mode); . 5,241 ( 0.00%) if !self.is_tainted_by_errors() { . // As a heuristic, just skip reporting region errors . // altogether if other errors have been reported while . // this infcx was in use. This is totally hokey but . // otherwise we have a hard time separating legit region . // errors from silly ones. 3,494 ( 0.00%) self.report_region_errors(&errors); . } 8,735 ( 0.00%) } . . /// Obtains (and clears) the current set of region . /// constraints. The inference context is still usable: further . /// unifications will simply add new constraints. . /// . /// This method is not meant to be used with normal lexical region . /// resolution. Rather, it is used in the NLL mode as a kind of . /// interim hack: basically we run normal type-check and generate -- line 1307 ---------------------------------------- -- line 1319 ---------------------------------------- . } . . /// Gives temporary access to the region constraint data. . pub fn with_region_constraints( . &self, . op: impl FnOnce(&RegionConstraintData<'tcx>) -> R, . ) -> R { . let mut inner = self.inner.borrow_mut(); 1,604 ( 0.00%) op(inner.unwrap_region_constraints().data()) . } . . pub fn region_var_origin(&self, vid: ty::RegionVid) -> RegionVariableOrigin { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; . inner . .region_constraint_storage . .as_mut() -- line 1335 ---------------------------------------- -- line 1338 ---------------------------------------- . .var_origin(vid) . } . . /// Takes ownership of the list of variable regions. This implies . /// that all the region constraints have already been taken, and . /// hence that `resolve_regions_and_report_errors` can never be . /// called. This is used only during NLL processing to "hand off" ownership . /// of the set of region variables into the NLL region context. 680 ( 0.00%) pub fn take_region_var_origins(&self) -> VarInfos { . let mut inner = self.inner.borrow_mut(); 2,584 ( 0.00%) let (var_infos, data) = inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) 136 ( 0.00%) .into_infos_and_data(); 136 ( 0.00%) assert!(data.is_empty()); . var_infos 1,088 ( 0.00%) } . . pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { . self.resolve_vars_if_possible(t).to_string() . } . . /// If `TyVar(vid)` resolves to a type, return that type. Else, return the . /// universe index of `TyVar(vid)`. 1,306 ( 0.00%) pub fn probe_ty_var(&self, vid: TyVid) -> Result, ty::UniverseIndex> { . use self::type_variable::TypeVariableValue; . 3,918 ( 0.00%) match self.inner.borrow_mut().type_variables().probe(vid) { . TypeVariableValue::Known { value } => Ok(value), . TypeVariableValue::Unknown { universe } => Err(universe), . } 4,571 ( 0.00%) } . . /// Resolve any type variables found in `value` -- but only one . /// level. So, if the variable `?X` is bound to some type . /// `Foo`, then this would return `Foo` (but `?Y` may . /// itself be bound to a type). . /// . /// Useful when you only need to inspect the outermost level of . /// the type and don't care about nested types (or perhaps you . /// will be resolving them as well, e.g. in a loop). . pub fn shallow_resolve(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 37,400 ( 0.00%) value.fold_with(&mut ShallowResolver { infcx: self }) . } . 1,528 ( 0.00%) pub fn root_var(&self, var: ty::TyVid) -> ty::TyVid { . self.inner.borrow_mut().type_variables().root_var(var) 2,292 ( 0.00%) } . . /// Where possible, replaces type/const variables in . /// `value` with their final value. Note that region variables . /// are unaffected. If a type/const variable has not been unified, it . /// is left as is. This is an idempotent operation that does . /// not affect inference state in any way and so you can do it . /// at will. 240 ( 0.00%) pub fn resolve_vars_if_possible(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 37,196 ( 0.00%) if !value.needs_infer() { 28,825 ( 0.00%) return value; // Avoid duplicated subst-folding. . } 40,028 ( 0.00%) let mut r = resolve::OpportunisticVarResolver::new(self); 34,142 ( 0.00%) value.fold_with(&mut r) 293 ( 0.00%) } . . /// Returns the first unresolved variable contained in `T`. In the . /// process of visiting `T`, this will resolve (where possible) . /// type variables in `T`, but it never constructs the final, . /// resolved type, so it's more efficient than . /// `resolve_vars_if_possible()`. . pub fn unresolved_type_vars(&self, value: &T) -> Option<(Ty<'tcx>, Option)> . where -- line 1415 ---------------------------------------- -- line 1490 ---------------------------------------- . expected: &'tcx ty::Const<'tcx>, . actual: &'tcx ty::Const<'tcx>, . err: TypeError<'tcx>, . ) -> DiagnosticBuilder<'tcx> { . let trace = TypeTrace::consts(cause, true, expected, actual); . self.report_and_explain_type_error(trace, &err) . } . 4,356 ( 0.00%) pub fn replace_bound_vars_with_fresh_vars( . &self, . span: Span, . lbrct: LateBoundRegionConversionTime, . value: ty::Binder<'tcx, T>, . ) -> (T, BTreeMap>) . where . T: TypeFoldable<'tcx>, . { . let fld_r = 21,192 ( 0.00%) |br: ty::BoundRegion| self.next_region_var(LateBoundRegion(span, br.kind, lbrct)); . let fld_t = |_| { . self.next_ty_var(TypeVariableOrigin { . kind: TypeVariableOriginKind::MiscVariable, . span, . }) . }; . let fld_c = |_, ty| { . self.next_const_var( . ty, . ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span }, . ) . }; 16,184 ( 0.00%) self.tcx.replace_bound_vars(value, fld_r, fld_t, fld_c) 2,904 ( 0.00%) } . . /// See the [`region_constraints::RegionConstraintCollector::verify_generic_bound`] method. 88 ( 0.00%) pub fn verify_generic_bound( . &self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . a: ty::Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { . debug!("verify_generic_bound({:?}, {:?} <: {:?})", kind, a, bound); . 33 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() 187 ( 0.00%) .verify_generic_bound(origin, kind, a, bound); 77 ( 0.00%) } . . /// Obtains the latest type of the given closure; this may be a . /// closure in the current function, in which case its . /// `ClosureKind` may not yet be known. 162 ( 0.00%) pub fn closure_kind(&self, closure_substs: SubstsRef<'tcx>) -> Option { 324 ( 0.00%) let closure_kind_ty = closure_substs.as_closure().kind_ty(); . let closure_kind_ty = self.shallow_resolve(closure_kind_ty); 243 ( 0.00%) closure_kind_ty.to_opt_closure_kind() . } . . /// Clears the selection, evaluation, and projection caches. This is useful when . /// repeatedly attempting to select an `Obligation` while changing only . /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. . pub fn clear_caches(&self) { . self.selection_cache.clear(); . self.evaluation_cache.clear(); . self.inner.borrow_mut().projection_cache().clear(); . } . . pub fn universe(&self) -> ty::UniverseIndex { 62,711 ( 0.00%) self.universe.get() 20,748 ( 0.00%) } . . /// Creates and return a fresh universe that extends all previous . /// universes. Updates `self.universe` to that new universe. 42 ( 0.00%) pub fn create_next_universe(&self) -> ty::UniverseIndex { 66 ( 0.00%) let u = self.universe.get().next_universe(); . self.universe.set(u); . u 42 ( 0.00%) } . . /// Resolves and evaluates a constant. . /// . /// The constant can be located on a trait like `::C`, in which case the given . /// substitutions and environment are used to resolve the constant. Alternatively if the . /// constant has generic parameters in scope the substitutions are used to evaluate the value of . /// the constant. For example in `fn foo() { let _ = [0; bar::()]; }` the repeat count . /// constant `bar::()` requires a substitution for `T`, if the substitution for `T` is still -- line 1576 ---------------------------------------- -- line 1606 ---------------------------------------- . // variables, thus we don't need to substitute back the original values. . self.tcx.const_eval_resolve(param_env_erased, unevaluated, span) . } . . /// If `typ` is a type variable of some kind, resolve it one level . /// (but do not resolve types found in the result). If `typ` is . /// not a type variable, just return it unmodified. . // FIXME(eddyb) inline into `ShallowResolver::visit_ty`. 438,768 ( 0.01%) fn shallow_resolve_ty(&self, typ: Ty<'tcx>) -> Ty<'tcx> { 260,534 ( 0.01%) match *typ.kind() { . ty::Infer(ty::TyVar(v)) => { . // Not entirely obvious: if `typ` is a type variable, . // it can be resolved to an int/float variable, which . // can then be recursively resolved, hence the . // recursion. Note though that we prevent type . // variables from unifying to other type variables . // directly (though they may be embedded . // structurally), and we prevent cycles in any case, . // so this recursion should always be of very limited . // depth. . // . // Note: if these two lines are combined into one we get . // dynamic borrow errors on `self.inner`. 102,676 ( 0.00%) let known = self.inner.borrow_mut().type_variables().probe(v).known(); . known.map_or(typ, |t| self.shallow_resolve_ty(t)) . } . 42,208 ( 0.00%) ty::Infer(ty::IntVar(v)) => self . .inner . .borrow_mut() . .int_unification_table() . .probe_value(v) 9,678 ( 0.00%) .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . ty::Infer(ty::FloatVar(v)) => self . .inner . .borrow_mut() . .float_unification_table() . .probe_value(v) . .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . _ => typ, . } 493,614 ( 0.02%) } . . /// `ty_or_const_infer_var_changed` is equivalent to one of these two: . /// * `shallow_resolve(ty) != ty` (where `ty.kind = ty::Infer(_)`) . /// * `shallow_resolve(ct) != ct` (where `ct.kind = ty::ConstKind::Infer(_)`) . /// . /// However, `ty_or_const_infer_var_changed` is more efficient. It's always . /// inlined, despite being large, because it has only two call sites that . /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` -- line 1659 ---------------------------------------- -- line 1662 ---------------------------------------- . #[inline(always)] . pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { . match infer_var { . TyOrConstInferVar::Ty(v) => { . use self::type_variable::TypeVariableValue; . . // If `inlined_probe` returns a `Known` value, it never equals . // `ty::Infer(ty::TyVar(v))`. 51,370 ( 0.00%) match self.inner.borrow_mut().type_variables().inlined_probe(v) { . TypeVariableValue::Unknown { .. } => false, . TypeVariableValue::Known { .. } => true, . } . } . . TyOrConstInferVar::TyInt(v) => { . // If `inlined_probe_value` returns a value it's always a . // `ty::Int(_)` or `ty::UInt(_)`, which never matches a . // `ty::Infer(_)`. 1,179,648 ( 0.04%) self.inner.borrow_mut().int_unification_table().inlined_probe_value(v).is_some() . } . . TyOrConstInferVar::TyFloat(v) => { . // If `probe_value` returns a value it's always a . // `ty::Float(_)`, which never matches a `ty::Infer(_)`. . // . // Not `inlined_probe_value(v)` because this call site is colder. . self.inner.borrow_mut().float_unification_table().probe_value(v).is_some() -- line 1688 ---------------------------------------- -- line 1716 ---------------------------------------- . /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. . Const(ConstVid<'tcx>), . } . . impl<'tcx> TyOrConstInferVar<'tcx> { . /// Tries to extract an inference variable from a type or a constant, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). 2,035 ( 0.00%) pub fn maybe_from_generic_arg(arg: GenericArg<'tcx>) -> Option { . match arg.unpack() { . GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), . GenericArgKind::Const(ct) => Self::maybe_from_const(ct), . GenericArgKind::Lifetime(_) => None, . } 2,035 ( 0.00%) } . . /// Tries to extract an inference variable from a type, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). 10 ( 0.00%) pub fn maybe_from_ty(ty: Ty<'tcx>) -> Option { 16,869 ( 0.00%) match *ty.kind() { 2,554 ( 0.00%) ty::Infer(ty::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), 1,536 ( 0.00%) ty::Infer(ty::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), . ty::Infer(ty::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), . _ => None, . } 10 ( 0.00%) } . . /// Tries to extract an inference variable from a constant, returns `None` . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). . pub fn maybe_from_const(ct: &'tcx ty::Const<'tcx>) -> Option { . match ct.val { . ty::ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), . _ => None, . } -- line 1749 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { . self.infcx.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 83,474 ( 0.00%) self.infcx.shallow_resolve_ty(ty) . } . 280 ( 0.00%) fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 244 ( 0.00%) if let ty::Const { val: ty::ConstKind::Infer(InferConst::Var(vid)), .. } = ct { 260 ( 0.00%) self.infcx . .inner . .borrow_mut() . .const_unification_table() 156 ( 0.00%) .probe_value(*vid) . .val . .known() . .unwrap_or(ct) . } else { . ct . } 350 ( 0.00%) } . } . . impl<'tcx> TypeTrace<'tcx> { . pub fn span(&self) -> Span { 14 ( 0.00%) self.cause.span . } . . pub fn types( . cause: &ObligationCause<'tcx>, . a_is_expected: bool, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> TypeTrace<'tcx> { -- line 1792 ---------------------------------------- -- line 1800 ---------------------------------------- . b: &'tcx ty::Const<'tcx>, . ) -> TypeTrace<'tcx> { . TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) } . } . } . . impl<'tcx> SubregionOrigin<'tcx> { . pub fn span(&self) -> Span { 35 ( 0.00%) match *self { 7 ( 0.00%) Subtype(ref a) => a.span(), . RelateObjectBound(a) => a, . RelateParamBound(a, ..) => a, . RelateRegionParamBound(a) => a, . Reborrow(a) => a, . ReborrowUpvar(a, _) => a, . DataBorrowed(_, a) => a, . ReferenceOutlivesReferent(_, a) => a, . CompareImplMethodObligation { span, .. } => span, -- line 1817 ---------------------------------------- -- line 1818 ---------------------------------------- . CompareImplTypeObligation { span, .. } => span, . } . } . . pub fn from_obligation_cause(cause: &traits::ObligationCause<'tcx>, default: F) -> Self . where . F: FnOnce() -> Self, . { 4,047 ( 0.00%) match *cause.code() { 1,097 ( 0.00%) traits::ObligationCauseCode::ReferenceOutlivesReferent(ref_type) => { 5,485 ( 0.00%) SubregionOrigin::ReferenceOutlivesReferent(ref_type, cause.span) . } . . traits::ObligationCauseCode::CompareImplMethodObligation { . impl_item_def_id, . trait_item_def_id, . } => SubregionOrigin::CompareImplMethodObligation { . span: cause.span, . impl_item_def_id, -- line 1836 ---------------------------------------- 5,153,999 ( 0.18%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 739,862 ( 0.03%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 62,672 ( 0.00%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 9,567,729 ( 0.33%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 100,894 ( 0.00%) self.stride += Group::WIDTH; 100,894 ( 0.00%) self.pos += self.stride; 84,942 ( 0.00%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 38,172 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 79,690 ( 0.00%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 18,888 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 101,759 ( 0.00%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 24,152 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 75,634 ( 0.00%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 102,465 ( 0.00%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 744 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 1,158 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 60 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 5,090 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 5,590 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 5,590 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 11,180 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 2,958 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 8,222 ( 0.00%) self.erase_no_drop(&item); 118 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 252,142 ( 0.01%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 1,484 ( 0.00%) match self.find(hash, eq) { 2,801 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 36,290 ( 0.00%) None => None, . } 368,292 ( 0.01%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 4,360 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 148,362 ( 0.01%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 65,791 ( 0.00%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 149,588 ( 0.01%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 105,304 ( 0.00%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 1,002,884 ( 0.03%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 1,582 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 680,682 ( 0.02%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 4 ( 0.00%) bucket.write(value); . bucket . } 738,229 ( 0.03%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 292,664 ( 0.01%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 68 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 219,498 ( 0.01%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 422 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 183,784 ( 0.01%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 4,822 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 372,342 ( 0.01%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 299,792 ( 0.01%) self.table.items += 1; . bucket 840 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 22,774 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 5,455 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 8,272 ( 0.00%) eq(self.bucket(index).as_ref()) 3,047 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 830 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 25,888 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 11,536 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] 1,211 ( 0.00%) pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 9,471 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } 1,384 ( 0.00%) } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. . /// . /// At most one mutable reference will be returned to any entry. `None` will be returned if any . /// of the hashes are duplicates. `None` will be returned if the hash is not found. . /// -- line 859 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 270,489 ( 0.01%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . . /// Returns an iterator over every element in the table. It is up to . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { 3 ( 0.00%) let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 32,918 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 3,104 ( 0.00%) let allocation = self.into_allocation(); 2,328 ( 0.00%) RawIntoIter { 3,880 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 1,119 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 232 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 205,090 ( 0.01%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 126,355 ( 0.00%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 31,992 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 71,274 ( 0.00%) Ok(Self { . ctrl, 30,977 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 92,156 ( 0.00%) } . . #[inline] 13,826 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 3,554 ( 0.00%) if capacity == 0 { 3,024 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 57,751 ( 0.00%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 6,878 ( 0.00%) Ok(result) . } . } 13,826 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 62,253 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 62,253 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 124,506 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 483,512 ( 0.02%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 1,407,802 ( 0.05%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 639,153 ( 0.02%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 28,064 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 1,159,060 ( 0.04%) for bit in group.match_byte(h2_hash) { 2,087,715 ( 0.07%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 1,493,320 ( 0.05%) if likely(eq(index)) { . return Some(index); . } . } . 418,529 ( 0.01%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] -- line 1198 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 1,793,896 ( 0.06%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 7,258,711 ( 0.25%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 850,122 ( 0.03%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 680,068 ( 0.02%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; . probe_index(i) == probe_index(new_i) -- line 1281 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 1,371,700 ( 0.05%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 456,926 ( 0.02%) *self.ctrl(index) = ctrl; 457,071 ( 0.02%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 96,792 ( 0.00%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 72,018 ( 0.00%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 383,138 ( 0.01%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 1,416 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 41,309 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 18,411 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 36,823 ( 0.00%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 36,822 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 77,751 ( 0.00%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); . Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 18,411 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 3,527 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 270,726 ( 0.01%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 18,411 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1554 ---------------------------------------- . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 12,830 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 2,577 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 3,326 ( 0.00%) self.items = 0; 2,580 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 54,917 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 219,668 ( 0.01%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 269,215 ( 0.01%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 219,668 ( 0.01%) self.items -= 1; . } . } . . impl Clone for RawTable { 3,104 ( 0.00%) fn clone(&self) -> Self { 399 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 3,492 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 24 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 208,177 ( 0.01%) fn drop(&mut self) { 164,834 ( 0.01%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 222,614 ( 0.01%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 3,104 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 3,880 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 77,443 ( 0.00%) if let Some(index) = self.current_group.lowest_set_bit() { 11,268 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 18,972 ( 0.00%) return Some(self.data.next_n(index)); . } . 75,139 ( 0.00%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 1,688 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 1,505 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 3,022 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 920 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 12,579 ( 0.00%) fn next(&mut self) -> Option> { 28,314 ( 0.00%) if let Some(b) = self.iter.next() { 220,866 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 25,158 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 1,478 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 5,000 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 726 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 1,049 ( 0.00%) fn next(&mut self) -> Option { 324 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 2,742 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 3 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 792 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 99 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 792 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 4,401,065 ( 0.15%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/unify/mod.rs -------------------------------------------------------------------------------- Ir -- line 154 ---------------------------------------- . /// Value of a unification key. We implement Tarjan's union-find . /// algorithm: when two keys are unified, one of them is converted . /// into a "redirect" pointing at the other. These redirects form a . /// DAG: the roots of the DAG (nodes that are not redirected) are each . /// associated with a value of type `V` and a rank. The rank is used . /// to keep the DAG relatively balanced, which helps keep the running . /// time of the algorithm under control. For more information, see . /// . 6,532 ( 0.00%) #[derive(PartialEq, Clone, Debug)] . pub struct VarValue { 5,723 ( 0.00%) parent: K, // if equal to self, this is a root 4,488 ( 0.00%) value: K::Value, // value assigned (only relevant to root) 3,991 ( 0.00%) rank: u32, // max depth (only relevant to root) . } . . /// Table of unification keys and their values. You must define a key type K . /// that implements the `UnifyKey` trait. Unification tables can be used in two-modes: . /// . /// - in-place (`UnificationTable>` or `InPlaceUnificationTable`): . /// - This is the standard mutable mode, where the array is modified . /// in place. . /// - To do backtracking, you can employ the `snapshot` and `rollback_to` . /// methods. . /// - persistent (`UnificationTable>` or `PersistentUnificationTable`): . /// - In this mode, we use a persistent vector to store the data, so that . /// cloning the table is an O(1) operation. . /// - This implies that ordinary operations are quite a bit slower though. . /// - Requires the `persistent` feature be selected in your Cargo.toml file. 21,025 ( 0.00%) #[derive(Clone, Debug, Default)] . pub struct UnificationTable { . /// Indicates the current value of each key. . values: S, . } . . pub type UnificationStorage = Vec>; . pub type UnificationTableStorage = UnificationTable, ()>>; . -- line 190 ---------------------------------------- -- line 215 ---------------------------------------- . } . . fn new(parent: K, value: K::Value, rank: u32) -> VarValue { . VarValue { . parent: parent, // this is a root . value: value, . rank: rank, . } 16,415 ( 0.00%) } . . fn redirect(&mut self, to: K) { 2,553 ( 0.00%) self.parent = to; . } . . fn root(&mut self, rank: u32, value: K::Value) { 2,680 ( 0.00%) self.rank = rank; 1,578 ( 0.00%) self.value = value; . } . . fn parent(&self, self_key: K) -> Option { 143,147 ( 0.00%) self.if_not_self(self.parent, self_key) . } . . fn if_not_self(&self, key: K, self_key: K) -> Option { . if key == self_key { . None . } else { . Some(key) . } -- line 243 ---------------------------------------- -- line 311 ---------------------------------------- . pub fn len(&self) -> usize { . self.values.len() . } . } . . impl UnificationTable { . /// Starts a new snapshot. Each snapshot must be either . /// Creates a fresh key with the given value. 38,876 ( 0.00%) pub fn new_key(&mut self, value: S::Value) -> S::Key { . let len = self.values.len(); 45,236 ( 0.00%) let key: S::Key = UnifyKey::from_index(len as u32); . self.values.push(VarValue::new_var(key, value)); 18,894 ( 0.00%) debug!("{}: created new key: {:?}", S::tag(), key); . key 55,510 ( 0.00%) } . . /// Reserve memory for `num_new_keys` to be created. Does not . /// actually create the new keys; you must then invoke `new_key`. . pub fn reserve(&mut self, num_new_keys: usize) { . self.values.reserve(num_new_keys); . } . . /// Clears all unifications that have been performed, resetting to -- line 333 ---------------------------------------- -- line 339 ---------------------------------------- . let value = value(key); . VarValue::new_var(key, value) . }); . } . . /// Obtains the current value for a particular key. . /// Not for end-users; they can use `probe_value`. . fn value(&self, key: S::Key) -> &VarValue { 991,971 ( 0.03%) &self.values[key.index() as usize] . } . . /// Find the root node for `vid`. This uses the standard . /// union-find algorithm with path compression: . /// . . /// . /// NB. This is a building-block operation and you would probably . /// prefer to call `probe` below. . /// . /// This is an always-inlined version of this function for the hot . /// callsites. `uninlined_get_root_key` is the never-inlined version. . #[inline(always)] . fn inlined_get_root_key(&mut self, vid: S::Key) -> S::Key { . let redirect = { 2,790,201 ( 0.10%) match self.value(vid).parent(vid) { . None => return vid, . Some(redirect) => redirect, . } . }; . 896,729 ( 0.03%) let root_key: S::Key = self.uninlined_get_root_key(redirect); 297,694 ( 0.01%) if root_key != redirect { . // Path compression 12 ( 0.00%) self.update_value(vid, |value| value.parent = root_key); . } . . root_key . } . . // This is a never-inlined version of this function for cold callsites. . // 'inlined_get_root_key` is the always-inlined version. . #[inline(never)] 2,250,274 ( 0.08%) fn uninlined_get_root_key(&mut self, vid: S::Key) -> S::Key { . self.inlined_get_root_key(vid) 2,245,196 ( 0.08%) } . 32 ( 0.00%) fn update_value(&mut self, key: S::Key, op: OP) . where . OP: FnOnce(&mut VarValue), . { 10,459 ( 0.00%) self.values.update(key.index() as usize, op); 7,466 ( 0.00%) debug!("Updated variable {:?} to {:?}", key, self.value(key)); 20 ( 0.00%) } . . /// Either redirects `node_a` to `node_b` or vice versa, depending . /// on the relative rank. The value associated with the new root . /// will be `new_value`. . /// . /// NB: This is the "union" operation of "union-find". It is . /// really more of a building block. If the values associated with . /// your key are non-trivial, you would probably prefer to call . /// `unify_var_var` below. . fn unify_roots(&mut self, key_a: S::Key, key_b: S::Key, new_value: S::Value) { 1,344 ( 0.00%) debug!("unify(key_a={:?}, key_b={:?})", key_a, key_b); . 1,791 ( 0.00%) let rank_a = self.value(key_a).rank; 5,311 ( 0.00%) let rank_b = self.value(key_b).rank; . if let Some((new_root, redirected)) = S::Key::order_roots( 2,901 ( 0.00%) key_a, . &self.value(key_a).value, 2,901 ( 0.00%) key_b, . &self.value(key_b).value, . ) { . // compute the new rank for the new root that they chose; . // this may not be the optimal choice. . let new_rank = if new_root == key_a { . debug_assert!(redirected == key_b); . if rank_a > rank_b { . rank_a -- line 416 ---------------------------------------- -- line 422 ---------------------------------------- . debug_assert!(redirected == key_a); . if rank_b > rank_a { . rank_b . } else { . rank_a + 1 . } . }; . self.redirect_root(new_rank, redirected, new_root, new_value); 2,688 ( 0.00%) } else if rank_a > rank_b { . // a has greater rank, so a should become b's parent, . // i.e., b should redirect to a. 360 ( 0.00%) self.redirect_root(rank_a, key_b, key_a, new_value); 1,235 ( 0.00%) } else if rank_a < rank_b { . // b has greater rank, so a should redirect to b. 205 ( 0.00%) self.redirect_root(rank_b, key_a, key_b, new_value); . } else { . // If equal, redirect one to the other and increment the . // other's rank. 4,944 ( 0.00%) self.redirect_root(rank_a + 1, key_a, key_b, new_value); . } . } . . /// Internal method to redirect `old_root_key` (which is currently . /// a root) to a child of `new_root_key` (which will remain a . /// root). The rank and value of `new_root_key` will be updated to . /// `new_rank` and `new_value` respectively. 17,563 ( 0.00%) fn redirect_root( . &mut self, . new_rank: u32, . old_root_key: S::Key, . new_root_key: S::Key, . new_value: S::Value, . ) { . self.update_value(old_root_key, |old_root_value| { . old_root_value.redirect(new_root_key); . }); 1,280 ( 0.00%) self.update_value(new_root_key, |new_root_value| { 377 ( 0.00%) new_root_value.root(new_rank, new_value); . }); 9,642 ( 0.00%) } . } . . /// //////////////////////////////////////////////////////////////////////// . /// Public API . . impl UnificationTable . where . S: UnificationStoreMut, . K: UnifyKey, . V: UnifyValue, . { . /// Unions two keys without the possibility of failure; only . /// applicable when unify values use `NoError` as their error . /// type. 9,752 ( 0.00%) pub fn union(&mut self, a_id: K1, b_id: K2) . where . K1: Into, . K2: Into, . V: UnifyValue, . { . self.unify_var_var(a_id, b_id).unwrap(); 8,652 ( 0.00%) } . . /// Unions a key and a value without the possibility of failure; . /// only applicable when unify values use `NoError` as their error . /// type. 36,618 ( 0.00%) pub fn union_value(&mut self, id: K1, value: V) . where . K1: Into, . V: UnifyValue, . { . self.unify_var_value(id, value).unwrap(); 28,699 ( 0.00%) } . . /// Given two keys, indicates whether they have been unioned together. . pub fn unioned(&mut self, a_id: K1, b_id: K2) -> bool . where . K1: Into, . K2: Into, . { . self.find(a_id) == self.find(b_id) -- line 502 ---------------------------------------- -- line 503 ---------------------------------------- . } . . /// Given a key, returns the (current) root key. . pub fn find(&mut self, id: K1) -> K . where . K1: Into, . { . let id = id.into(); 17,395 ( 0.00%) self.uninlined_get_root_key(id) . } . . /// Unions together two variables, merging their values. If . /// merging the values fails, the error is propagated and this . /// method has no effect. 2,304 ( 0.00%) pub fn unify_var_var(&mut self, a_id: K1, b_id: K2) -> Result<(), V::Error> . where . K1: Into, . K2: Into, . { . let a_id = a_id.into(); . let b_id = b_id.into(); . 4,374 ( 0.00%) let root_a = self.uninlined_get_root_key(a_id); 4,580 ( 0.00%) let root_b = self.uninlined_get_root_key(b_id); . 404 ( 0.00%) if root_a == root_b { . return Ok(()); . } . 6,681 ( 0.00%) let combined = V::unify_values(&self.value(root_a).value, &self.value(root_b).value)?; . . Ok(self.unify_roots(root_a, root_b, combined)) 2,560 ( 0.00%) } . . /// Sets the value of the key `a_id` to `b`, attempting to merge . /// with the previous value. 6,172 ( 0.00%) pub fn unify_var_value(&mut self, a_id: K1, b: V) -> Result<(), V::Error> . where . K1: Into, . { . let a_id = a_id.into(); 6,763 ( 0.00%) let root_a = self.uninlined_get_root_key(a_id); 14,070 ( 0.00%) let value = V::unify_values(&self.value(root_a).value, &b)?; 10,800 ( 0.00%) self.update_value(root_a, |node| node.value = value); . Ok(()) 5,570 ( 0.00%) } . . /// Returns the current value for the given key. If the key has . /// been union'd, this will give the value from the current root. 97,932 ( 0.00%) pub fn probe_value(&mut self, id: K1) -> V . where . K1: Into, . { . self.inlined_probe_value(id) 97,728 ( 0.00%) } . . // An always-inlined version of `probe_value`, for hot callsites. . #[inline(always)] . pub fn inlined_probe_value(&mut self, id: K1) -> V . where . K1: Into, . { . let id = id.into(); . let id = self.inlined_get_root_key(id); 61,806 ( 0.00%) self.value(id).value.clone() . } . } . . /////////////////////////////////////////////////////////////////////////// . . impl UnifyValue for () { . type Error = NoError; . . fn unify_values(_: &(), _: &()) -> Result<(), NoError> { . Ok(()) 286 ( 0.00%) } . } . . impl UnifyValue for Option { . type Error = V::Error; . . fn unify_values(a: &Option, b: &Option) -> Result { 5,708 ( 0.00%) match (a, b) { . (&None, &None) => Ok(None), . (&Some(ref v), &None) | (&None, &Some(ref v)) => Ok(Some(v.clone())), . (&Some(ref a), &Some(ref b)) => match V::unify_values(a, b) { . Ok(v) => Ok(Some(v)), . Err(err) => Err(err), . }, . } . } -- line 593 ---------------------------------------- 1,501,335 ( 0.05%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/../sysdeps/x86_64/dl-machine.h ./elf/dl-lookup.c ./malloc/malloc.c ./stdlib/msort.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 102,235,339 ( 3.48%) events annotated