-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name widestring --edition=2021 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C opt-level=3 -C embed-bitcode=no --cfg feature="alloc" --cfg feature="default" --cfg feature="std" -C metadata=ae0a093736cadb65 -C extra-filename=-ae0a093736cadb65 --out-dir /usr/home/liquid/tmp/.tmpPP6tLM/target/release/deps -L dependency=/usr/home/liquid/tmp/.tmpPP6tLM/target/release/deps -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-widestring-1.0.0-beta.1-Opt-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 11,283,872,127 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 418,464,654 ( 3.71%) ./malloc/malloc.c:_int_free 342,661,906 ( 3.04%) ./malloc/malloc.c:_int_malloc 253,940,417 ( 2.25%) ./malloc/malloc.c:malloc 191,025,400 ( 1.69%) ???:llvm::FPPassManager::runOnFunction(llvm::Function&) 164,284,544 ( 1.46%) ???:llvm::InstCombinerImpl::run() 149,013,428 ( 1.32%) ???:llvm::AnalysisManager::getResultImpl(llvm::AnalysisKey*, llvm::Function&) 143,774,936 ( 1.27%) ???:llvm::AnalysisManager::invalidate(llvm::Function&, llvm::PreservedAnalyses const&) 135,516,308 ( 1.20%) ./malloc/malloc.c:free 116,530,435 ( 1.03%) ???:combineInstructionsOverFunction(llvm::Function&, llvm::InstCombineWorklist&, llvm::AAResults*, llvm::AssumptionCache&, llvm::TargetLibraryInfo&, llvm::TargetTransformInfo&, llvm::DominatorTree&, llvm::OptimizationRemarkEmitter&, llvm::BlockFrequencyInfo*, llvm::ProfileSummaryInfo*, unsigned int, llvm::LoopInfo*) 89,623,920 ( 0.79%) ???:llvm::BitstreamCursor::readRecord(unsigned int, llvm::SmallVectorImpl&, llvm::StringRef*) 87,719,834 ( 0.78%) ???:llvm::SelectionDAG::Combine(llvm::CombineLevel, llvm::AAResults*, llvm::CodeGenOpt::Level) 76,518,692 ( 0.68%) ???:runCVP(llvm::Module&) [clone .llvm.11785992503873176614] 74,064,727 ( 0.66%) ???:computeKnownBits(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) [clone .llvm.15619146473165121143] 71,147,617 ( 0.63%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 69,388,081 ( 0.61%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 68,639,378 ( 0.61%) ???:llvm::AttributeList::addAttributes(llvm::LLVMContext&, unsigned int, llvm::AttrBuilder const&) const 63,176,288 ( 0.56%) ???:llvm::ValueHandleBase::AddToUseList() 62,154,571 ( 0.55%) ???:bool llvm::DenseMapBase*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >, (anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >::LookupBucketFor<(anonymous namespace)::SimpleValue>((anonymous namespace)::SimpleValue const&, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> const*&) const 57,960,707 ( 0.51%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 57,922,730 ( 0.51%) ???:llvm::AttributeList::get(llvm::LLVMContext&, llvm::ArrayRef) 52,229,679 ( 0.46%) ???:llvm::TargetLibraryInfoImpl::getLibFunc(llvm::Function const&, llvm::LibFunc&) const 52,073,709 ( 0.46%) ???:computeKnownBitsFromOperator(llvm::Operator const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 52,057,025 ( 0.46%) ./malloc/malloc.c:malloc_consolidate 49,884,845 ( 0.44%) ???:llvm::removeUnreachableBlocks(llvm::Function&, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*) 49,851,326 ( 0.44%) ???:(anonymous namespace)::LazyValueInfoImpl::solve() [clone .llvm.4316243980339171764] 46,366,203 ( 0.41%) ???:SimplifyICmpInst(unsigned int, llvm::Value*, llvm::Value*, llvm::SimplifyQuery const&, unsigned int) [clone .llvm.1619516508949622737] 45,537,723 ( 0.40%) ???:llvm::DataLayout::getAlignment(llvm::Type*, bool) const 44,356,313 ( 0.39%) ???:llvm::DataLayout::getTypeSizeInBits(llvm::Type*) const 44,293,010 ( 0.39%) ???:llvm::InstCombinerImpl::visitCallInst(llvm::CallInst&) 42,800,076 ( 0.38%) ???:llvm::coro::declaresIntrinsics(llvm::Module const&, std::initializer_list) 39,121,181 ( 0.35%) ???:llvm::AttributeSetNode::get(llvm::LLVMContext&, llvm::AttrBuilder const&) 39,077,388 ( 0.35%) ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S:__strncmp_avx2 39,074,195 ( 0.35%) ???:llvm::InlineFunction(llvm::CallBase&, llvm::InlineFunctionInfo&, llvm::AAResults*, bool, llvm::Function*) 38,876,638 ( 0.34%) ???:llvm::SimplifyInstruction(llvm::Instruction*, llvm::SimplifyQuery const&, llvm::OptimizationRemarkEmitter*) 38,267,302 ( 0.34%) ???:llvm::InstCombinerImpl::SimplifyDemandedUseBits(llvm::Value*, llvm::APInt, llvm::KnownBits&, unsigned int, llvm::Instruction*) 37,669,390 ( 0.33%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc:operator new(unsigned long) 37,668,463 ( 0.33%) ???:llvm::InstCombinerImpl::visitICmpInst(llvm::ICmpInst&) 37,309,590 ( 0.33%) ???:(anonymous namespace)::LazyValueInfoImpl::getEdgeValue(llvm::Value*, llvm::BasicBlock*, llvm::BasicBlock*, llvm::Instruction*) [clone .llvm.4316243980339171764] 36,100,322 ( 0.32%) ???:computeKnownBitsFromAssume(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 36,025,010 ( 0.32%) ???:computeKnownBits(llvm::Value const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 35,674,350 ( 0.32%) ???:llvm::detail::PassModel>, llvm::PreservedAnalyses, llvm::AnalysisManager>::run(llvm::Function&, llvm::AnalysisManager&) 35,464,636 ( 0.31%) ./malloc/malloc.c:unlink_chunk.constprop.0 35,270,806 ( 0.31%) ???:llvm::BasicAAResult::alias(llvm::MemoryLocation const&, llvm::MemoryLocation const&, llvm::AAQueryInfo&) 34,839,714 ( 0.31%) ???:llvm::isNonEscapingLocalObject(llvm::Value const*, llvm::SmallDenseMap, llvm::detail::DenseMapPair >*) 32,082,257 ( 0.28%) ???:llvm::simplifyCFG(llvm::BasicBlock*, llvm::TargetTransformInfo const&, llvm::DomTreeUpdater*, llvm::SimplifyCFGOptions const&, llvm::ArrayRef) 32,067,655 ( 0.28%) ???:llvm::PMDataManager::verifyPreservedAnalysis(llvm::Pass*) 31,251,850 ( 0.28%) ???:(anonymous namespace)::EarlyCSE::run() [clone .llvm.7062997131228810369] 31,244,528 ( 0.28%) ???:llvm::LiveVariables::runOnBlock(llvm::MachineBasicBlock*, unsigned int) 29,629,280 ( 0.26%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::runSemiNCA(llvm::DominatorTreeBase&, unsigned int) 29,085,339 ( 0.26%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 28,987,734 ( 0.26%) ???:llvm::FindFunctionBackedges(llvm::Function const&, llvm::SmallVectorImpl >&) 27,478,015 ( 0.24%) ???:isKnownNonZero(llvm::Value const*, llvm::APInt const&, unsigned int, (anonymous namespace)::Query const&) [clone .llvm.15619146473165121143] 25,039,425 ( 0.22%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 25,006,791 ( 0.22%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 24,834,216 ( 0.22%) ???:llvm::AttributeList::addAttribute(llvm::LLVMContext&, unsigned int, llvm::Attribute::AttrKind) const 24,600,464 ( 0.22%) ???:(anonymous namespace)::MachineCopyPropagation::runOnMachineFunction(llvm::MachineFunction&) 24,460,235 ( 0.22%) ???:llvm::SROA::runOnAlloca(llvm::AllocaInst&) 24,326,860 ( 0.22%) ???:llvm::MemorySSA::buildMemorySSA(llvm::BatchAAResults&) 24,203,935 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 22,813,803 ( 0.20%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_unaligned_erms 22,687,154 ( 0.20%) ???:llvm::SelectionDAGISel::SelectCodeCommon(llvm::SDNode*, unsigned char const*, unsigned int) 22,308,198 ( 0.20%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::FindRoots(llvm::DominatorTreeBase const&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 21,911,198 ( 0.19%) ???:llvm::GVN::processBlock(llvm::BasicBlock*) 21,326,692 ( 0.19%) ???:(anonymous namespace)::CVPLatticeFunc::ComputeInstructionState(llvm::Instruction&, llvm::DenseMap, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal, llvm::DenseMapInfo, llvm::PointerIntPairInfo > > >, llvm::detail::DenseMapPair, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal> >&, llvm::SparseSolver, llvm::PointerIntPairInfo > >, (anonymous namespace)::CVPLatticeVal, llvm::LatticeKeyInfo, llvm::PointerIntPairInfo > > > >&) 20,702,025 ( 0.18%) ???:(anonymous namespace)::PruningFunctionCloner::CloneBlock(llvm::BasicBlock const*, llvm::ilist_iterator, false, true>, std::vector >&) 20,676,981 ( 0.18%) ???:(anonymous namespace)::SimplifyCFGOpt::simplifyCondBranch(llvm::BranchInst*, llvm::IRBuilder&) 19,913,342 ( 0.18%) ???:(anonymous namespace)::BitcodeReader::parseModule(unsigned long, bool, llvm::function_ref, std::allocator > > (llvm::StringRef)>) 19,713,817 ( 0.17%) ./malloc/malloc.c:realloc 19,566,459 ( 0.17%) ???:(anonymous namespace)::eliminateDeadStores(llvm::Function&, llvm::AAResults&, llvm::MemorySSA&, llvm::DominatorTree&, llvm::PostDominatorTree&, llvm::TargetLibraryInfo const&, llvm::LoopInfo const&) [clone .llvm.5769264623867638418] 18,960,830 ( 0.17%) ???:(anonymous namespace)::AggressiveDeadCodeElimination::performDeadCodeElimination() 18,461,803 ( 0.16%) ???:updateCGAndAnalysisManagerForPass(llvm::LazyCallGraph&, llvm::LazyCallGraph::SCC&, llvm::LazyCallGraph::Node&, llvm::AnalysisManager&, llvm::CGSCCUpdateResult&, llvm::AnalysisManager&, bool) [clone .llvm.5426518467876156712] 18,354,534 ( 0.16%) ???:llvm::SimplifyGEPInst(llvm::Type*, llvm::ArrayRef, llvm::SimplifyQuery const&) 18,222,842 ( 0.16%) ???:llvm::MD5::final(llvm::MD5::MD5Result&) 18,001,406 ( 0.16%) ???:llvm::DemandedBits::isInstructionDead(llvm::Instruction*) 17,930,597 ( 0.16%) ???:(anonymous namespace)::LazyValueInfoImpl::getValueInBlock(llvm::Value*, llvm::BasicBlock*, llvm::Instruction*) [clone .llvm.4316243980339171764] 17,769,476 ( 0.16%) ???:??? 17,221,560 ( 0.15%) ???:llvm::Intrinsic::getDeclaration(llvm::Module*, unsigned int, llvm::ArrayRef) 16,956,415 ( 0.15%) ???:llvm::FoldingSetBase::FindNodeOrInsertPos(llvm::FoldingSetNodeID const&, void*&, llvm::FoldingSetBase::FoldingSetInfo const&) 16,853,639 ( 0.15%) ./malloc/malloc.c:calloc 16,769,539 ( 0.15%) ???:llvm::LoopBase::verifyLoop() const 16,622,504 ( 0.15%) ???:llvm::BlockFrequencyInfoImpl::initializeRPOT() 16,371,019 ( 0.15%) ???:llvm::SCCPInstVisitor::solve() 16,236,180 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::short_write_process_buffer:: 16,072,489 ( 0.14%) ???:llvm::InstCombinerImpl::visitStoreInst(llvm::StoreInst&) 16,068,978 ( 0.14%) ???:llvm::LoopInfoBase::analyze(llvm::DominatorTreeBase const&) 15,902,795 ( 0.14%) ???:llvm::ConstantRange::makeExactICmpRegion(llvm::CmpInst::Predicate, llvm::APInt const&) 15,773,554 ( 0.14%) ???:llvm::SROA::runImpl(llvm::Function&, llvm::DominatorTree&, llvm::AssumptionCache&) 15,583,016 ( 0.14%) ???:llvm::PopulateLoopsDFS::traverse(llvm::BasicBlock*) 15,312,533 ( 0.14%) ???:llvm::DenseMapBase, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> >, llvm::DenseMapInfo >, llvm::detail::DenseMapPair, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> > > >, llvm::PoisoningVH, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> >, llvm::DenseMapInfo >, llvm::detail::DenseMapPair, std::unique_ptr<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry, std::default_delete<(anonymous namespace)::LazyValueInfoCache::BlockCacheEntry> > > >::destroyAll() [clone .llvm.4316243980339171764] 15,102,653 ( 0.13%) ???:llvm::Type::getPrimitiveSizeInBits() const 14,820,366 ( 0.13%) ???:llvm::LivePhysRegs::stepBackward(llvm::MachineInstr const&) 14,780,979 ( 0.13%) ???:llvm::SROA::rewritePartition(llvm::AllocaInst&, llvm::sroa::AllocaSlices&, llvm::sroa::Partition&) 14,600,706 ( 0.13%) ???:runImpl(llvm::Function&, llvm::LazyValueInfo*, llvm::DominatorTree*, llvm::SimplifyQuery const&) [clone .llvm.16011871802505272439] 14,420,889 ( 0.13%) ???:llvm::TargetLoweringBase::getTypeConversion(llvm::LLVMContext&, llvm::EVT) const 14,370,027 ( 0.13%) ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S:__strlen_avx2 14,319,575 ( 0.13%) ???:llvm::Type::isSizedDerivedType(llvm::SmallPtrSetImpl*) const 14,301,837 ( 0.13%) ???:llvm::PMDataManager::removeNotPreservedAnalysis(llvm::Pass*) 14,221,041 ( 0.13%) ???:llvm::ScheduleDAGSDNodes::BuildSchedUnits() 14,195,125 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs:::span_data_to_lines_and_cols 14,103,094 ( 0.12%) ???:llvm::Instruction::eraseFromParent() 14,095,377 ( 0.12%) ???:llvm::ReassociatePass::run(llvm::Function&, llvm::AnalysisManager&) 14,084,315 ( 0.12%) ???:llvm::JumpThreadingPass::processBlock(llvm::BasicBlock*) 13,984,664 ( 0.12%) ???:(anonymous namespace)::DeadMachineInstructionElim::eliminateDeadMI(llvm::MachineFunction&) 13,972,005 ( 0.12%) ???:llvm::detail::AnalysisResultModel, llvm::Function>, llvm::OuterAnalysisManagerProxy, llvm::Function>::Result, llvm::PreservedAnalyses, llvm::AnalysisManager::Invalidator, true>::invalidate(llvm::Function&, llvm::PreservedAnalyses const&, llvm::AnalysisManager::Invalidator&) 13,830,578 ( 0.12%) ???:llvm::ConstantFoldTerminator(llvm::BasicBlock*, bool, llvm::TargetLibraryInfo const*, llvm::DomTreeUpdater*) 13,748,864 ( 0.12%) ./malloc/malloc.c:_int_realloc 13,736,045 ( 0.12%) ???:llvm::BlockFrequencyInfoImplBase::finalizeMetrics() 13,588,669 ( 0.12%) ???:llvm::IDFCalculatorBase::calculate(llvm::SmallVectorImpl&) 13,480,069 ( 0.12%) ???:llvm::MD5::update(llvm::StringRef) 13,476,647 ( 0.12%) ???:llvm::Value::stripAndAccumulateConstantOffsets(llvm::DataLayout const&, llvm::APInt&, bool, llvm::function_ref) const 13,455,520 ( 0.12%) ???:(anonymous namespace)::DAGCombiner::combine(llvm::SDNode*) 13,229,810 ( 0.12%) ???:llvm::ReassociatePass::BuildRankMap(llvm::Function&, llvm::ReversePostOrderTraversal >&) 12,993,830 ( 0.12%) ???:llvm_regexec 12,766,703 ( 0.11%) ???:llvm::PassManager, llvm::LazyCallGraph&, llvm::CGSCCUpdateResult&>::run(llvm::LazyCallGraph::SCC&, llvm::AnalysisManager&, llvm::LazyCallGraph&, llvm::CGSCCUpdateResult&) 12,697,103 ( 0.11%) ???:collectBitParts(llvm::Value*, bool, bool, std::map, std::less, std::allocator > > >&, int, bool&) 12,692,483 ( 0.11%) ???:(anonymous namespace)::Verifier::visitInstruction(llvm::Instruction&) 12,568,721 ( 0.11%) ???:llvm::Instruction::~Instruction() 12,538,366 ( 0.11%) ./stdlib/msort.c:msort_with_tmp.part.0 12,423,780 ( 0.11%) ???:llvm::X86TargetMachine::getTargetTransformInfo(llvm::Function const&) 12,271,296 ( 0.11%) ???:std::back_insert_iterator > > std::__copy_move_a2, false, llvm::GraphTraits >, std::back_insert_iterator > > >(llvm::po_iterator, false, llvm::GraphTraits >, llvm::po_iterator, false, llvm::GraphTraits >, std::back_insert_iterator > >) 12,166,428 ( 0.11%) ???:llvm::BasicAAResult::getModRefInfo(llvm::CallBase const*, llvm::MemoryLocation const&, llvm::AAQueryInfo&) 12,146,653 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 12,136,444 ( 0.11%) ???:llvm::MachineInstr::addOperand(llvm::MachineFunction&, llvm::MachineOperand const&) 12,085,185 ( 0.11%) ???:llvm::InstCombinerImpl::visitBitCast(llvm::BitCastInst&) 12,069,367 ( 0.11%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_sse2_unaligned_erms 11,949,029 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::eq 11,898,355 ( 0.11%) ???:simplifyFunctionCFGImpl(llvm::Function&, llvm::TargetTransformInfo const&, llvm::DominatorTree*, llvm::SimplifyCFGOptions const&) [clone .llvm.11597842506770977528] 11,889,109 ( 0.11%) ???:llvm::detail::AnalysisResultModel::Invalidator, true>::~AnalysisResultModel() 11,883,856 ( 0.11%) ???:int llvm::array_pod_sort_comparator(void const*, void const*) 11,868,211 ( 0.11%) ???:llvm::SmallPtrSetImplBase::insert_imp_big(void const*) 11,858,582 ( 0.11%) ???:llvm::FoldingSet::NodeEquals(llvm::FoldingSetBase const*, llvm::FoldingSetBase::Node*, llvm::FoldingSetNodeID const&, unsigned int, llvm::FoldingSetNodeID&) 11,798,388 ( 0.10%) ???:llvm::MemorySSA::OptimizeUses::optimizeUses() 11,710,370 ( 0.10%) ???:llvm::ScalarEvolution::getAddExpr(llvm::SmallVectorImpl&, llvm::SCEV::NoWrapFlags, unsigned int) 11,652,868 ( 0.10%) ???:llvm::SelectionDAG::computeKnownBits(llvm::SDValue, llvm::APInt const&, unsigned int) const 11,647,880 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 11,406,975 ( 0.10%) ???:llvm::PassManager>::run(llvm::Function&, llvm::AnalysisManager&) 11,367,788 ( 0.10%) ???:llvm::InstCombinerImpl::visitLoadInst(llvm::LoadInst&) 11,293,865 ( 0.10%) ???:llvm::Value::~Value() -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs -------------------------------------------------------------------------------- Ir -- line 186 ---------------------------------------- . /// // use the values stored in map . /// ``` . pub struct HashMap { . pub(crate) hash_builder: S, . pub(crate) table: RawTable<(K, V), A>, . } . . impl Clone for HashMap { 992 ( 0.00%) fn clone(&self) -> Self { 1,693 ( 0.00%) HashMap { . hash_builder: self.hash_builder.clone(), 271 ( 0.00%) table: self.table.clone(), . } 1,116 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.table.clone_from(&source.table); . . // Update hash_builder only if we successfully cloned all elements. . self.hash_builder.clone_from(&source.hash_builder); . } . } -- line 207 ---------------------------------------- -- line 210 ---------------------------------------- . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hasher(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_ . where . K: Borrow, . Q: Hash, . S: BuildHasher, . { 167,678 ( 0.00%) move |val| make_hash::(hash_builder, &val.0) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent_key(k: &Q) -> impl Fn(&(K, V)) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 594,174 ( 0.01%) move |x| k.eq(x.0.borrow()) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent(k: &Q) -> impl Fn(&K) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 766,052 ( 0.01%) move |x| k.eq(x.borrow()) . } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, -- line 248 ---------------------------------------- -- line 251 ---------------------------------------- . use core::hash::Hasher; . let mut state = hash_builder.build_hasher(); . val.hash(&mut state); . state.finish() . } . . #[cfg(feature = "nightly")] . #[cfg_attr(feature = "inline-more", inline)] 4 ( 0.00%) pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, . S: BuildHasher, . { . hash_builder.hash_one(val) 8 ( 0.00%) } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_insert_hash(hash_builder: &S, val: &K) -> u64 . where . K: Hash, . S: BuildHasher, . { -- line 274 ---------------------------------------- -- line 367 ---------------------------------------- . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . /// . /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html . #[cfg_attr(feature = "inline-more", inline)] . pub const fn with_hasher(hash_builder: S) -> Self { 355,349 ( 0.00%) Self { . hash_builder, . table: RawTable::new(), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. . /// -- line 383 ---------------------------------------- -- line 437 ---------------------------------------- . /// use hashbrown::hash_map::DefaultHashBuilder; . /// . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self { 61 ( 0.00%) Self { . hash_builder, . table: RawTable::new_in(alloc), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. It will be allocated with the given allocator. . /// -- line 453 ---------------------------------------- -- line 527 ---------------------------------------- . /// map.insert("c", 3); . /// . /// for key in map.keys() { . /// println!("{}", key); . /// } . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn keys(&self) -> Keys<'_, K, V> { 6 ( 0.00%) Keys { inner: self.iter() } . } . . /// An iterator visiting all values in arbitrary order. . /// The iterator element type is `&'a V`. . /// . /// # Examples . /// . /// ``` -- line 543 ---------------------------------------- -- line 663 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert_eq!(a.len(), 0); . /// a.insert(1, "a"); . /// assert_eq!(a.len(), 1); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn len(&self) -> usize { 38,021 ( 0.00%) self.table.len() . } . . /// Returns `true` if the map contains no elements. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 679 ---------------------------------------- -- line 680 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert!(a.is_empty()); . /// a.insert(1, "a"); . /// assert!(!a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn is_empty(&self) -> bool { 130,269 ( 0.00%) self.len() == 0 . } . . /// Clears the map, returning all key-value pairs as an iterator. Keeps the . /// allocated memory for reuse. . /// . /// # Examples . /// . /// ``` -- line 696 ---------------------------------------- -- line 790 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut a = HashMap::new(); . /// a.insert(1, "a"); . /// a.clear(); . /// assert!(a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6 ( 0.00%) pub fn clear(&mut self) { . self.table.clear(); 6 ( 0.00%) } . . /// Creates a consuming iterator visiting all the keys in arbitrary order. . /// The map cannot be used after calling this. . /// The iterator element type is `K`. . /// . /// # Examples . /// . /// ``` -- line 808 ---------------------------------------- -- line 963 ---------------------------------------- . /// } . /// . /// assert_eq!(letters[&'s'], 2); . /// assert_eq!(letters[&'t'], 3); . /// assert_eq!(letters[&'u'], 1); . /// assert_eq!(letters.get(&'y'), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 548 ( 0.00%) pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { . let hash = make_insert_hash::(&self.hash_builder, &key); . if let Some(elem) = self.table.find(hash, equivalent_key(&key)) { 40 ( 0.00%) Entry::Occupied(OccupiedEntry { . hash, . key: Some(key), . elem, . table: self, . }) . } else { 645 ( 0.00%) Entry::Vacant(VacantEntry { . hash, . key, . table: self, . }) . } 685 ( 0.00%) } . . /// Gets the given key's corresponding entry by reference in the map for in-place manipulation. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; . /// -- line 995 ---------------------------------------- -- line 1047 ---------------------------------------- . /// ``` . #[inline] . pub fn get(&self, k: &Q) -> Option<&V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 1,175,340 ( 0.01%) match self.get_inner(k) { . Some(&(_, ref v)) => Some(v), . None => None, . } . } . . /// Returns the key-value pair corresponding to the supplied key. . /// . /// The supplied key may be any borrowed form of the map's key type, but -- line 1063 ---------------------------------------- -- line 1091 ---------------------------------------- . } . . #[inline] . fn get_inner(&self, k: &Q) -> Option<&(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 1,659,825 ( 0.01%) if self.table.is_empty() { . None . } else { 2 ( 0.00%) let hash = make_hash::(&self.hash_builder, k); . self.table.get(hash, equivalent_key(k)) . } . } . . /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value. . /// . /// The supplied key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for -- line 1110 ---------------------------------------- -- line 1155 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.contains_key(&1), true); . /// assert_eq!(map.contains_key(&2), false); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 675,876 ( 0.01%) pub fn contains_key(&self, k: &Q) -> bool . where . K: Borrow, . Q: Hash + Eq, . { . self.get_inner(k).is_some() 906,748 ( 0.01%) } . . /// Returns a mutable reference to the value corresponding to the key. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// . /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html -- line 1177 ---------------------------------------- -- line 1185 ---------------------------------------- . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// if let Some(x) = map.get_mut(&1) { . /// *x = "b"; . /// } . /// assert_eq!(map[&1], "b"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 493 ( 0.00%) pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 3,861 ( 0.00%) match self.get_inner_mut(k) { . Some(&mut (_, ref mut v)) => Some(v), . None => None, . } 986 ( 0.00%) } . . #[inline] . fn get_inner_mut(&mut self, k: &Q) -> Option<&mut (K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 1,287 ( 0.00%) if self.table.is_empty() { . None . } else { . let hash = make_hash::(&self.hash_builder, k); 1,588 ( 0.00%) self.table.get_mut(hash, equivalent_key(k)) . } . } . . /// Attempts to get mutable references to `N` values in the map at once. . /// . /// Returns an array of length `N` with the results of each query. For soundness, at most one . /// mutable reference will be returned to any value. `None` will be returned if any of the . /// keys are duplicates or missing. -- line 1223 ---------------------------------------- -- line 1495 ---------------------------------------- . /// assert_eq!(map.insert(37, "a"), None); . /// assert_eq!(map.is_empty(), false); . /// . /// map.insert(37, "b"); . /// assert_eq!(map.insert(37, "c"), Some("b")); . /// assert_eq!(map[&37], "c"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 5,749,299 ( 0.05%) pub fn insert(&mut self, k: K, v: V) -> Option { . let hash = make_insert_hash::(&self.hash_builder, &k); 9,875 ( 0.00%) if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) { 28 ( 0.00%) Some(mem::replace(item, v)) . } else { 2,584,883 ( 0.02%) self.table 2,547,947 ( 0.02%) .insert(hash, (k, v), make_hasher::(&self.hash_builder)); 290,933 ( 0.00%) None . } 5,396,087 ( 0.05%) } . . /// Insert a key-value pair into the map without checking . /// if the key already exists in the map. . /// . /// Returns a reference to the key and value just inserted. . /// . /// This operation is safe if a key does not exist in the map. . /// -- line 1520 ---------------------------------------- -- line 1592 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.remove(&1), Some("a")); . /// assert_eq!(map.remove(&1), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 105,924 ( 0.00%) pub fn remove(&mut self, k: &Q) -> Option . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 1,404,375 ( 0.01%) match self.remove_entry(k) { 44,251 ( 0.00%) Some((_, v)) => Some(v), 94,632 ( 0.00%) None => None, . } 245,009 ( 0.00%) } . . /// Removes a key from the map, returning the stored key and value if the . /// key was previously in the map. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// -- line 1618 ---------------------------------------- -- line 1631 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { . let hash = make_hash::(&self.hash_builder, k); 461,611 ( 0.00%) self.table.remove_entry(hash, equivalent_key(k)) . } . } . . impl HashMap { . /// Creates a raw entry builder for the HashMap. . /// . /// Raw entries provide the lowest level of control for searching and . /// manipulating a map. They must be manually initialized with a hash and -- line 1647 ---------------------------------------- -- line 2209 ---------------------------------------- . /// Creates a `RawEntryMut` from the given key and its hash. . #[inline] . #[allow(clippy::wrong_self_convention)] . pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> . where . K: Borrow, . Q: Eq, . { 3,622,440 ( 0.03%) self.from_hash(hash, equivalent(k)) . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilderMut<'a, K, V, S, A> { . /// Creates a `RawEntryMut` from the given hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 9,441,816 ( 0.08%) pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { . self.search(hash, is_match) 10,132,619 ( 0.09%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { 1,216,282 ( 0.01%) match self.map.table.find(hash, |(k, _)| is_match(k)) { 6,081,384 ( 0.05%) Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut { . elem, . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), 409,894 ( 0.00%) None => RawEntryMut::Vacant(RawVacantEntryMut { . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), . } . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilder<'a, K, V, S, A> { -- line 2251 ---------------------------------------- -- line 2260 ---------------------------------------- . { . let hash = make_hash::(&self.map.hash_builder, k); . self.from_key_hashed_nocheck(hash, k) . } . . /// Access an entry by a key and its hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 2,790,449 ( 0.02%) pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> . where . K: Borrow, . Q: Eq, . { 3,192,161 ( 0.03%) self.from_hash(hash, equivalent(k)) 5,029,780 ( 0.04%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)> . where . F: FnMut(&K) -> bool, . { 4,473,441 ( 0.04%) match self.map.table.get(hash, |(k, _)| is_match(k)) { . Some(&(ref key, ref value)) => Some((key, value)), . None => None, . } . } . . /// Access an entry by hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] -- line 2289 ---------------------------------------- -- line 2624 ---------------------------------------- . /// and returns a mutable reference to it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::shadow_unrelated)] . pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) . where . K: Hash, . S: BuildHasher, . { 679,668 ( 0.01%) let &mut (ref mut k, ref mut v) = self.table.insert_entry( . hash, . (key, value), . make_hasher::(self.hash_builder), . ); . (k, v) . } . . /// Set the value of an entry with a custom hasher function. -- line 2640 ---------------------------------------- -- line 2974 ---------------------------------------- . /// map.insert("a", 1); . /// map.insert("b", 2); . /// map.insert("c", 3); . /// . /// // Not possible with .iter() . /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6,166 ( 0.00%) fn into_iter(self) -> IntoIter { 111,149 ( 0.00%) IntoIter { 53,109 ( 0.00%) inner: self.table.into_iter(), . } 18,498 ( 0.00%) } . } . . impl<'a, K, V> Iterator for Iter<'a, K, V> { . type Item = (&'a K, &'a V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(&'a K, &'a V)> { . // Avoid `Option::map` because it bloats LLVM IR. 302,471 ( 0.00%) match self.inner.next() { . Some(x) => unsafe { . let r = x.as_ref(); 3,379 ( 0.00%) Some((&r.0, &r.1)) . }, . None => None, . } . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { 134 ( 0.00%) self.inner.size_hint() . } . } . impl ExactSizeIterator for Iter<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { . self.inner.len() . } . } -- line 3013 ---------------------------------------- -- line 3051 ---------------------------------------- . } . } . . impl Iterator for IntoIter { . type Item = (K, V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(K, V)> { 11,091 ( 0.00%) self.inner.next() . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for IntoIter { . #[cfg_attr(feature = "inline-more", inline)] -- line 3067 ---------------------------------------- -- line 3076 ---------------------------------------- . f.debug_list().entries(self.iter()).finish() . } . } . . impl<'a, K, V> Iterator for Keys<'a, K, V> { . type Item = &'a K; . . #[cfg_attr(feature = "inline-more", inline)] 145 ( 0.00%) fn next(&mut self) -> Option<&'a K> { . // Avoid `Option::map` because it bloats LLVM IR. . match self.inner.next() { . Some((k, _)) => Some(k), . None => None, . } 290 ( 0.00%) } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for Keys<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { -- line 3098 ---------------------------------------- -- line 3819 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn insert(self, value: V) -> &'a mut V . where . K: Hash, . S: BuildHasher, . { . let table = &mut self.table.table; 51 ( 0.00%) let entry = table.insert_entry( . self.hash, . (self.key, value), . make_hasher::(&self.table.hash_builder), . ); . &mut entry.1 . } . . #[cfg_attr(feature = "inline-more", inline)] -- line 3835 ---------------------------------------- -- line 4557 ---------------------------------------- . /// keys with new values returned from the iterator. . impl Extend<(K, V)> for HashMap . where . K: Eq + Hash, . S: BuildHasher, . A: Allocator + Clone, . { . #[cfg_attr(feature = "inline-more", inline)] 103,944 ( 0.00%) fn extend>(&mut self, iter: T) { . // Keys may be already present or show multiple times in the iterator. . // Reserve the entire hint lower bound if the map is empty. . // Otherwise reserve half the hint (rounded up), so the map . // will only resize twice in the worst case. 84,806 ( 0.00%) let iter = iter.into_iter(); 44,034 ( 0.00%) let reserve = if self.is_empty() { . iter.size_hint().0 . } else { 5,962 ( 0.00%) (iter.size_hint().0 + 1) / 2 . }; . self.reserve(reserve); . iter.for_each(move |(k, v)| { 154,826 ( 0.00%) self.insert(k, v); . }); 66,251 ( 0.00%) } . . #[inline] . #[cfg(feature = "nightly")] . fn extend_one(&mut self, (k, v): (K, V)) { . self.insert(k, v); . } . . #[inline] -- line 4588 ---------------------------------------- -- line 4604 ---------------------------------------- . impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap . where . K: Eq + Hash + Copy, . V: Copy, . S: BuildHasher, . A: Allocator + Clone, . { . #[cfg_attr(feature = "inline-more", inline)] 14 ( 0.00%) fn extend>(&mut self, iter: T) { . self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); 16 ( 0.00%) } . . #[inline] . #[cfg(feature = "nightly")] . fn extend_one(&mut self, (k, v): (&'a K, &'a V)) { . self.insert(*k, *v); . } . . #[inline] -- line 4622 ---------------------------------------- 3,761,180 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 3,478,703 ( 0.03%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 285,043 ( 0.00%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 66,398,630 ( 0.59%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 389,310 ( 0.00%) self.stride += Group::WIDTH; 389,310 ( 0.00%) self.pos += self.stride; 325,631 ( 0.00%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 357,398 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 788,945 ( 0.01%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 125,460 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 942,131 ( 0.01%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 130,548 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 711,878 ( 0.01%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 965,796 ( 0.01%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 1,924 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 810 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 360 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 25,179 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 51,339 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 51,339 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 102,678 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 22,214 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 80,464 ( 0.00%) self.erase_no_drop(&item); 260 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 1,443,808 ( 0.01%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 5,288 ( 0.00%) match self.find(hash, eq) { 20,193 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 261,406 ( 0.00%) None => None, . } 1,992,624 ( 0.02%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 48,802 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 784,705 ( 0.01%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 602,498 ( 0.01%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 1,427,105 ( 0.01%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 1,045,456 ( 0.01%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 5,125,680 ( 0.05%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 20,434 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 3,354,782 ( 0.03%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 4 ( 0.00%) bucket.write(value); . bucket . } 3,775,559 ( 0.03%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 969,080 ( 0.01%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 208 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 726,810 ( 0.01%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 2,462 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 806,922 ( 0.01%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 15,289 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 1,688,894 ( 0.01%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 1,327,484 ( 0.01%) self.table.items += 1; . bucket 4,882 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 104,276 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 17,876 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 60,854 ( 0.00%) eq(self.bucket(index).as_ref()) 2,908 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 1,210 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 116,864 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 55,333 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] 5,558 ( 0.00%) pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 47,158 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } 6,352 ( 0.00%) } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. . /// . /// At most one mutable reference will be returned to any entry. `None` will be returned if any . /// of the hashes are duplicates. `None` will be returned if the hash is not found. . /// -- line 859 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 1,703,451 ( 0.02%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . -- line 936 ---------------------------------------- -- line 938 ---------------------------------------- . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { . let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 248,569 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 23,948 ( 0.00%) let allocation = self.into_allocation(); 17,961 ( 0.00%) RawIntoIter { 29,935 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 12,153 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 1,848 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 1,797,968 ( 0.02%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 1,271,418 ( 0.01%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 291,976 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 697,320 ( 0.01%) Ok(Self { . ctrl, 294,991 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 925,092 ( 0.01%) } . . #[inline] 71,437 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 18,898 ( 0.00%) if capacity == 0 { 13,956 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 577,094 ( 0.01%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 38,296 ( 0.00%) Ok(result) . } . } 71,437 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 282,615 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 282,615 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 565,230 ( 0.01%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 2,273,960 ( 0.02%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 6,675,087 ( 0.06%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 3,062,101 ( 0.03%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 225,048 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 7,581,952 ( 0.07%) for bit in group.match_byte(h2_hash) { 15,094,454 ( 0.13%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 10,507,756 ( 0.09%) if likely(eq(index)) { . return Some(index); . } . } . 2,127,811 ( 0.02%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] -- line 1198 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 7,773,280 ( 0.07%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 43,125,393 ( 0.38%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 4,192,497 ( 0.04%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 3,353,948 ( 0.03%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; . probe_index(i) == probe_index(new_i) -- line 1281 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 6,288,850 ( 0.06%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 2,095,190 ( 0.02%) *self.ctrl(index) = ctrl; 2,095,861 ( 0.02%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 807,464 ( 0.01%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 682,089 ( 0.01%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 3,345,402 ( 0.03%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 8,424 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 375,191 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 175,115 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 350,233 ( 0.00%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 350,230 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 733,084 ( 0.01%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); . Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 175,115 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 21,569 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 1,255,089 ( 0.01%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 175,115 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1554 ---------------------------------------- . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 120,006 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 24,568 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 33,856 ( 0.00%) self.items = 0; 24,598 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 258,135 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 1,032,540 ( 0.01%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 1,283,370 ( 0.01%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 1,032,540 ( 0.01%) self.items -= 1; . } . } . . impl Clone for RawTable { 2,176 ( 0.00%) fn clone(&self) -> Self { 396 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 2,448 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 144 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 1,845,842 ( 0.02%) fn drop(&mut self) { 1,440,750 ( 0.01%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 1,943,140 ( 0.02%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 23,948 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 29,935 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 583,320 ( 0.01%) if let Some(index) = self.current_group.lowest_set_bit() { 83,514 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 139,634 ( 0.00%) return Some(self.data.next_n(index)); . } . 491,227 ( 0.00%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 12,029 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 9,910 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 24,140 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 3,257 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 99,762 ( 0.00%) fn next(&mut self) -> Option> { 289,782 ( 0.00%) if let Some(b) = self.iter.next() { 726,991 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 199,524 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 8,230 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 57,806 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 278 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 4,244 ( 0.00%) fn next(&mut self) -> Option { 882 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 11,095 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 4 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 272 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 34 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 272 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 24,011,292 ( 0.21%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs -------------------------------------------------------------------------------- Ir -- line 57 ---------------------------------------- . /// # Examples . /// . /// Basic usage: . /// . /// ``` . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::from_str_radix(\"A\", 16), Ok(10));")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 378 ( 0.00%) pub fn from_str_radix(src: &str, radix: u32) -> Result { 189 ( 0.00%) from_str_radix(src, radix) 567 ( 0.00%) } . . /// Returns the number of ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// . /// ``` -- line 75 ---------------------------------------- -- line 80 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[doc(alias = "popcount")] . #[doc(alias = "popcnt")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn count_ones(self) -> u32 { 691,800 ( 0.01%) intrinsics::ctpop(self as $ActualT) as u32 . } . . /// Returns the number of zeros in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 96 ---------------------------------------- -- line 118 ---------------------------------------- . /// assert_eq!(n.leading_zeros(), 2); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn leading_zeros(self) -> u32 { 1,060,439 ( 0.01%) intrinsics::ctlz(self as $ActualT) as u32 . } . . /// Returns the number of trailing zeros in the binary representation . /// of `self`. . /// . /// # Examples . /// . /// Basic usage: -- line 134 ---------------------------------------- -- line 139 ---------------------------------------- . /// assert_eq!(n.trailing_zeros(), 3); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn trailing_zeros(self) -> u32 { 925,738 ( 0.01%) intrinsics::cttz(self) as u32 . } . . /// Returns the number of leading ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 155 ---------------------------------------- -- line 204 ---------------------------------------- . #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn rotate_left(self, n: u32) -> Self { 27,168,912 ( 0.24%) intrinsics::rotate_left(self, n as $SelfT) . } . . /// Shifts the bits to the right by a specified amount, `n`, . /// wrapping the truncated bits to the beginning of the resulting . /// integer. . /// . /// Please note this isn't the same operation as the `>>` shifting operator! . /// -- line 220 ---------------------------------------- -- line 430 ---------------------------------------- . #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MAX - 2).checked_add(3), None);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_checked_int_methods", since = "1.47.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . pub const fn checked_add(self, rhs: Self) -> Option { 46 ( 0.00%) let (a, b) = self.overflowing_add(rhs); . if unlikely!(b) {None} else {Some(a)} . } . . /// Unchecked integer addition. Computes `self + rhs`, assuming overflow . /// cannot occur. . /// . /// # Safety . /// -- line 446 ---------------------------------------- -- line 456 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_add`. 1,731,419 ( 0.02%) unsafe { intrinsics::unchecked_add(self, rhs) } . } . . /// Checked addition with a signed integer. Computes `self + rhs`, . /// returning `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 472 ---------------------------------------- -- line 525 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_sub`. 134,632 ( 0.00%) unsafe { intrinsics::unchecked_sub(self, rhs) } . } . . /// Checked integer multiplication. Computes `self * rhs`, returning . /// `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 541 ---------------------------------------- -- line 596 ---------------------------------------- . without modifying the original"] . #[inline] . pub const fn checked_div(self, rhs: Self) -> Option { . if unlikely!(rhs == 0) { . None . } else { . // SAFETY: div by zero has been checked above and unsigned types have no other . // failure modes for division 856 ( 0.00%) Some(unsafe { intrinsics::unchecked_div(self, rhs) }) . } . } . . /// Checked Euclidean division. Computes `self.div_euclid(rhs)`, returning `None` . /// if `rhs == 0`. . /// . /// # Examples . /// -- line 612 ---------------------------------------- -- line 1035 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_add(self, rhs: Self) -> Self { 509,797 ( 0.00%) intrinsics::saturating_add(self, rhs) . } . . /// Saturating addition with a signed integer. Computes `self + rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1051 ---------------------------------------- -- line 1084 ---------------------------------------- . #[doc = concat!("assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_sub(self, rhs: Self) -> Self { 16,341 ( 0.00%) intrinsics::saturating_sub(self, rhs) . } . . /// Saturating integer multiplication. Computes `self * rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1100 ---------------------------------------- -- line 1175 ---------------------------------------- . #[doc = concat!("assert_eq!(200", stringify!($SelfT), ".wrapping_add(", stringify!($SelfT), "::MAX), 199);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_add(self, rhs: Self) -> Self { 15,239,317 ( 0.14%) intrinsics::wrapping_add(self, rhs) . } . . /// Wrapping (modular) addition with a signed integer. Computes . /// `self + rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . #[doc = concat!("assert_eq!(100", stringify!($SelfT), ".wrapping_sub(", stringify!($SelfT), "::MAX), 101);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_sub(self, rhs: Self) -> Self { 4,121,171 ( 0.04%) intrinsics::wrapping_sub(self, rhs) . } . . /// Wrapping (modular) multiplication. Computes `self * . /// rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1233 ---------------------------------------- -- line 1240 ---------------------------------------- . /// assert_eq!(25u8.wrapping_mul(12), 44); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_mul(self, rhs: Self) -> Self { 13,908,614 ( 0.12%) intrinsics::wrapping_mul(self, rhs) . } . . /// Wrapping (modular) division. Computes `self / rhs`. . /// Wrapped division on unsigned types is just normal division. . /// There's no way wrapping could ever happen. . /// This function exists, so that all operations . /// are accounted for in the wrapping operations. . /// -- line 1256 ---------------------------------------- -- line 1491 ---------------------------------------- . #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_add(2), (7, false));")] . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.overflowing_add(1), (0, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 29 ( 0.00%) pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { 1,866,211 ( 0.02%) let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 58 ( 0.00%) } . . /// Calculates `self + rhs + carry` without the ability to overflow. . /// . /// Performs "ternary addition" which takes in an extra bit to add, and may return an . /// additional bit of overflow. This allows for chaining together multiple additions . /// to create "big integers" which represent larger values. . /// . #[doc = concat!("This can be thought of as a ", stringify!($BITS), "-bit \"full adder\", in the electronics sense.")] -- line 1510 ---------------------------------------- -- line 1587 ---------------------------------------- . #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_sub(2), (3, false));")] . #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_sub(1), (", stringify!($SelfT), "::MAX, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 2 ( 0.00%) pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { 362,771 ( 0.00%) let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 4 ( 0.00%) } . . /// Calculates `self - rhs - borrow` without the ability to overflow. . /// . /// Performs "ternary subtraction" which takes in an extra bit to subtract, and may return . /// an additional bit of overflow. This allows for chaining together multiple subtractions . /// to create "big integers" which represent larger values. . /// . /// # Examples -- line 1606 ---------------------------------------- -- line 1674 ---------------------------------------- . /// assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true)); . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { 3,888,481 ( 0.03%) let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates the divisor when `self` is divided by `rhs`. . /// . /// Returns a tuple of the divisor along with a boolean indicating . /// whether an arithmetic overflow would occur. Note that for unsigned . /// integers overflow never occurs, so the second value is always -- line 1690 ---------------------------------------- -- line 2132 ---------------------------------------- . #[doc = concat!("assert!(16", stringify!($SelfT), ".is_power_of_two());")] . #[doc = concat!("assert!(!10", stringify!($SelfT), ".is_power_of_two());")] . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] . #[inline(always)] . pub const fn is_power_of_two(self) -> bool { 174 ( 0.00%) self.count_ones() == 1 . } . . // Returns one less than next power of two. . // (For 8u8 next power of two is 8u8 and for 6u8 it is 8u8) . // . // 8u8.one_less_than_next_power_of_two() == 7 . // 6u8.one_less_than_next_power_of_two() == 7 . // . // This method cannot overflow, as in the `next_power_of_two` . // overflow cases it instead ends up returning the maximum value . // of the type, and can return 0 for 0. . #[inline] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . const fn one_less_than_next_power_of_two(self) -> Self { 10,030 ( 0.00%) if self <= 1 { return 0; } . 39,764 ( 0.00%) let p = self - 1; . // SAFETY: Because `p > 0`, it cannot consist entirely of leading zeros. . // That means the shift is always in-bounds, and some processors . // (such as intel pre-haswell) have more efficient ctlz . // intrinsics when the argument is non-zero. 119,109 ( 0.00%) let z = unsafe { intrinsics::ctlz_nonzero(p) }; 39,835 ( 0.00%) <$SelfT>::MAX >> z . } . . /// Returns the smallest power of two greater than or equal to `self`. . /// . /// When return value overflows (i.e., `self > (1 << (N-1))` for type . /// `uN`), it panics in debug mode and the return value is wrapped to 0 in . /// release mode (the only situation in which method can return 0). . /// -- line 2171 ---------------------------------------- -- line 2179 ---------------------------------------- . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . #[rustc_inherit_overflow_checks] . pub const fn next_power_of_two(self) -> Self { 69,442 ( 0.00%) self.one_less_than_next_power_of_two() + 1 . } . . /// Returns the smallest power of two greater than or equal to `n`. If . /// the next power of two is greater than the type's maximum value, . /// `None` is returned, otherwise the power of two is wrapped in `Some`. . /// . /// # Examples . /// -- line 2195 ---------------------------------------- 294,645 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs -------------------------------------------------------------------------------- Ir -- line 21 ---------------------------------------- . use rustc_target::spec::abi; . use std::borrow::Cow; . use std::cmp::Ordering; . use std::marker::PhantomData; . use std::ops::Range; . use ty::util::IntTypeExt; . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 3 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub struct TypeAndMut<'tcx> { 19,489 ( 0.00%) pub ty: Ty<'tcx>, 97,445 ( 0.00%) pub mutbl: hir::Mutability, . } . . #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . /// A "free" region `fr` can be interpreted as "some region . /// at least as big as the scope `fr.scope`". . pub struct FreeRegion { 53,494 ( 0.00%) pub scope: DefId, 121,382 ( 0.00%) pub bound_region: BoundRegionKind, . } . 594,769 ( 0.01%) #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] 74 ( 0.00%) #[derive(HashStable)] . pub enum BoundRegionKind { . /// An anonymous region parameter for a given fn (&T) 17,497 ( 0.00%) BrAnon(u32), . . /// Named region parameters for functions (a in &'a T) . /// . /// The `DefId` is needed to distinguish free regions in . /// the event of shadowing. 184 ( 0.00%) BrNamed(DefId, Symbol), . . /// Anonymous region for the implicit env pointer parameter . /// to a closure . BrEnv, . } . 2,148 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . #[derive(HashStable)] . pub struct BoundRegion { 118,046 ( 0.00%) pub var: BoundVar, 96,830 ( 0.00%) pub kind: BoundRegionKind, . } . . impl BoundRegionKind { . pub fn is_named(&self) -> bool { . match *self { . BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime, . _ => false, . } . } . } . . /// Defines the kinds of types. . /// . /// N.B., if you change this, you'll probably want to change the corresponding . /// AST structure in `rustc_ast/src/ast.rs` as well. 14,850,492 ( 0.13%) #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable, Debug)] 111,622 ( 0.00%) #[derive(HashStable)] . #[rustc_diagnostic_item = "TyKind"] . pub enum TyKind<'tcx> { . /// The primitive boolean type. Written as `bool`. . Bool, . . /// The primitive character type; holds a Unicode scalar value . /// (a non-surrogate code point). Written as `char`. . Char, -- line 90 ---------------------------------------- -- line 99 ---------------------------------------- . Float(ty::FloatTy), . . /// Algebraic data types (ADT). For example: structures, enumerations and unions. . /// . /// InternalSubsts here, possibly against intuition, *may* contain `Param`s. . /// That is, even after substitution it is possible that there are type . /// variables. This happens when the `Adt` corresponds to an ADT . /// definition and not a concrete use of it. 1,677,278 ( 0.01%) Adt(&'tcx AdtDef, SubstsRef<'tcx>), . . /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. . Foreign(DefId), . . /// The pointee of a string slice. Written as `str`. . Str, . . /// An array with the given length. Written as `[T; n]`. 10,987 ( 0.00%) Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), . . /// The pointee of an array slice. Written as `[T]`. . Slice(Ty<'tcx>), . . /// A raw pointer. Written as `*mut T` or `*const T` . RawPtr(TypeAndMut<'tcx>), . . /// A reference; a pointer with an associated lifetime. Written as . /// `&'a mut T` or `&'a T`. 1,822,496 ( 0.02%) Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), . . /// The anonymous type of a function declaration/definition. Each . /// function has a unique type, which is output (for a function . /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. . /// . /// For example the type of `bar` here: . /// . /// ```rust -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . /// . /// ```rust . /// fn foo() -> i32 { 1 } . /// let bar: fn() -> i32 = foo; . /// ``` . FnPtr(PolyFnSig<'tcx>), . . /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. 16,838 ( 0.00%) Dynamic(&'tcx List>>, ty::Region<'tcx>), . . /// The anonymous type of a closure. Used to represent the type of . /// `|a| a`. . Closure(DefId, SubstsRef<'tcx>), . . /// The anonymous type of a generator. Used to represent the type of . /// `|a| yield a`. . Generator(DefId, SubstsRef<'tcx>, hir::Movability), -- line 159 ---------------------------------------- -- line 180 ---------------------------------------- . /// The substitutions are for the generics of the function in question. . /// After typeck, the concrete type can be found in the `types` map. . Opaque(DefId, SubstsRef<'tcx>), . . /// A type parameter; for example, `T` in `fn f(x: T) {}`. . Param(ParamTy), . . /// Bound type variable, used only when preparing a trait query. 4,396 ( 0.00%) Bound(ty::DebruijnIndex, BoundTy), . . /// A placeholder type - universally quantified higher-ranked type. . Placeholder(ty::PlaceholderType), . . /// A type variable used during type checking. . Infer(InferTy), . . /// A placeholder for a type which could not be computed; this is -- line 196 ---------------------------------------- -- line 330 ---------------------------------------- . pub closure_kind_ty: T, . pub closure_sig_as_fn_ptr_ty: T, . pub tupled_upvars_ty: T, . } . . impl<'tcx> ClosureSubsts<'tcx> { . /// Construct `ClosureSubsts` from `ClosureSubstsParts`, containing `Substs` . /// for the closure parent, alongside additional closure-specific components. 109 ( 0.00%) pub fn new( . tcx: TyCtxt<'tcx>, . parts: ClosureSubstsParts<'tcx, Ty<'tcx>>, . ) -> ClosureSubsts<'tcx> { . ClosureSubsts { . substs: tcx.mk_substs( 218 ( 0.00%) parts.parent_substs.iter().copied().chain( 436 ( 0.00%) [parts.closure_kind_ty, parts.closure_sig_as_fn_ptr_ty, parts.tupled_upvars_ty] . .iter() . .map(|&ty| ty.into()), . ), . ), . } 218 ( 0.00%) } . . /// Divides the closure substs into their respective components. . /// The ordering assumed here must match that used by `ClosureSubsts::new` above. 2,618 ( 0.00%) fn split(self) -> ClosureSubstsParts<'tcx, GenericArg<'tcx>> { . match self.substs[..] { 19,416 ( 0.00%) [ 5,106 ( 0.00%) ref parent_substs @ .., 5,626 ( 0.00%) closure_kind_ty, 2,530 ( 0.00%) closure_sig_as_fn_ptr_ty, 4,170 ( 0.00%) tupled_upvars_ty, 10,472 ( 0.00%) ] => ClosureSubstsParts { . parent_substs, . closure_kind_ty, . closure_sig_as_fn_ptr_ty, . tupled_upvars_ty, . }, . _ => bug!("closure substs missing synthetics"), . } 7,854 ( 0.00%) } . . /// Returns `true` only if enough of the synthetic types are known to . /// allow using all of the methods on `ClosureSubsts` without panicking. . /// . /// Used primarily by `ty::print::pretty` to be able to handle closure . /// types that haven't had their synthetic types substituted in. . pub fn is_valid(self) -> bool { . self.substs.len() >= 3 -- line 378 ---------------------------------------- -- line 383 ---------------------------------------- . pub fn parent_substs(self) -> &'tcx [GenericArg<'tcx>] { . self.split().parent_substs . } . . /// Returns an iterator over the list of types of captured paths by the closure. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] 378 ( 0.00%) pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 1,757 ( 0.00%) match self.tupled_upvars_ty().kind() { . TyKind::Error(_) => None, 718 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() 252 ( 0.00%) } . . /// Returns the tuple type representing the upvars for this closure. . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 12,552 ( 0.00%) self.split().tupled_upvars_ty.expect_ty() . } . . /// Returns the closure kind for this closure; may return a type . /// variable during inference. To get the closure kind during . /// inference, use `infcx.closure_kind(substs)`. 1,292 ( 0.00%) pub fn kind_ty(self) -> Ty<'tcx> { . self.split().closure_kind_ty.expect_ty() 2,584 ( 0.00%) } . . /// Returns the `fn` pointer type representing the closure signature for this . /// closure. . // FIXME(eddyb) this should be unnecessary, as the shallowly resolved . // type is known at the time of the creation of `ClosureSubsts`, . // see `rustc_typeck::check::closure`. . pub fn sig_as_fn_ptr_ty(self) -> Ty<'tcx> { . self.split().closure_sig_as_fn_ptr_ty.expect_ty() . } . . /// Returns the closure kind for this closure; only usable outside . /// of an inference context, because in that context we know that . /// there are no type variables. . /// . /// If you have an inference context, use `infcx.closure_kind()`. 58 ( 0.00%) pub fn kind(self) -> ty::ClosureKind { 165 ( 0.00%) self.kind_ty().to_opt_closure_kind().unwrap() 116 ( 0.00%) } . . /// Extracts the signature from the closure. 1,286 ( 0.00%) pub fn sig(self) -> ty::PolyFnSig<'tcx> { . let ty = self.sig_as_fn_ptr_ty(); 2,572 ( 0.00%) match ty.kind() { 5,144 ( 0.00%) ty::FnPtr(sig) => *sig, . _ => bug!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {:?}", ty.kind()), . } 3,858 ( 0.00%) } . } . . /// Similar to `ClosureSubsts`; see the above documentation for more. . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GeneratorSubsts<'tcx> { . pub substs: SubstsRef<'tcx>, . } . -- line 448 ---------------------------------------- -- line 676 ---------------------------------------- . } . . impl<'tcx> UpvarSubsts<'tcx> { . /// Returns an iterator over the list of types of captured paths by the closure/generator. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] . pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 334 ( 0.00%) let tupled_tys = match self { 443 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . }; . 719 ( 0.00%) match tupled_tys.kind() { . TyKind::Error(_) => None, 610 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() . } . . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 552 ( 0.00%) match self { 612 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . } . } . } . . /// An inline const is modeled like . /// . /// const InlineConst<'l0...'li, T0...Tj, R>: R; -- line 710 ---------------------------------------- -- line 760 ---------------------------------------- . } . . /// Returns the type of this inline const. . pub fn ty(self) -> Ty<'tcx> { . self.split().ty.expect_ty() . } . } . 15,567 ( 0.00%) #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, TyEncodable, TyDecodable)] 13,453 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub enum ExistentialPredicate<'tcx> { . /// E.g., `Iterator`. . Trait(ExistentialTraitRef<'tcx>), . /// E.g., `Iterator::Item = T`. . Projection(ExistentialProjection<'tcx>), . /// E.g., `Send`. 164 ( 0.00%) AutoTrait(DefId), . } . . impl<'tcx> ExistentialPredicate<'tcx> { . /// Compares via an ordering that will not change if modules are reordered or other changes are . /// made to the tree. In particular, this ordering is preserved across incremental compilations. 780 ( 0.00%) pub fn stable_cmp(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Ordering { . use self::ExistentialPredicate::*; 1,116 ( 0.00%) match (*self, *other) { . (Trait(_), Trait(_)) => Ordering::Equal, . (Projection(ref a), Projection(ref b)) => { . tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id)) . } . (AutoTrait(ref a), AutoTrait(ref b)) => { . tcx.def_path_hash(*a).cmp(&tcx.def_path_hash(*b)) . } . (Trait(_), _) => Ordering::Less, . (Projection(_), Trait(_)) => Ordering::Greater, . (Projection(_), _) => Ordering::Less, . (AutoTrait(_), _) => Ordering::Greater, . } 936 ( 0.00%) } . } . . impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> { 1,431 ( 0.00%) pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> { . use crate::ty::ToPredicate; 795 ( 0.00%) match self.skip_binder() { . ExistentialPredicate::Trait(tr) => { . self.rebind(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx) . } . ExistentialPredicate::Projection(p) => { . self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) . } . ExistentialPredicate::AutoTrait(did) => { . let trait_ref = self.rebind(ty::TraitRef { . def_id: did, . substs: tcx.mk_substs_trait(self_ty, &[]), . }); . trait_ref.without_const().to_predicate(tcx) . } . } 1,272 ( 0.00%) } . } . . impl<'tcx> List>> { . /// Returns the "principal `DefId`" of this set of existential predicates. . /// . /// A Rust trait object type consists (in addition to a lifetime bound) . /// of a set of trait bounds, which are separated into any number . /// of auto-trait bounds, and at most one non-auto-trait bound. The -- line 826 ---------------------------------------- -- line 839 ---------------------------------------- . /// are the set `{Sync}`. . /// . /// It is also possible to have a "trivial" trait object that . /// consists only of auto traits, with no principal - for example, . /// `dyn Send + Sync`. In that case, the set of auto-trait bounds . /// is `{Send, Sync}`, while there is no principal. These trait objects . /// have a "trivial" vtable consisting of just the size, alignment, . /// and destructor. 89 ( 0.00%) pub fn principal(&self) -> Option>> { 843 ( 0.00%) self[0] 576 ( 0.00%) .map_bound(|this| match this { . ExistentialPredicate::Trait(tr) => Some(tr), . _ => None, 356 ( 0.00%) }) . .transpose() 267 ( 0.00%) } . 159 ( 0.00%) pub fn principal_def_id(&self) -> Option { . self.principal().map(|trait_ref| trait_ref.skip_binder().def_id) 318 ( 0.00%) } . . #[inline] . pub fn projection_bounds<'a>( . &'a self, . ) -> impl Iterator>> + 'a { . self.iter().filter_map(|predicate| { . predicate 6 ( 0.00%) .map_bound(|pred| match pred { . ExistentialPredicate::Projection(projection) => Some(projection), . _ => None, . }) . .transpose() . }) . } . . #[inline] -- line 874 ---------------------------------------- -- line 886 ---------------------------------------- . /// T: Foo . /// . /// This would be represented by a trait-reference where the `DefId` is the . /// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, . /// and `U` as parameter 1. . /// . /// Trait references also appear in object types like `Foo`, but in . /// that case the `Self` parameter is absent from the substitutions. 26,579 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 1,226,414 ( 0.01%) #[derive(HashStable, TypeFoldable)] . pub struct TraitRef<'tcx> { 2,293 ( 0.00%) pub def_id: DefId, 854,019 ( 0.01%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> TraitRef<'tcx> { 4,010 ( 0.00%) pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> { . TraitRef { def_id, substs } 16,040 ( 0.00%) } . . /// Returns a `TraitRef` of the form `P0: Foo` where `Pi` . /// are the parameters defined on trait. 1,017 ( 0.00%) pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> Binder<'tcx, TraitRef<'tcx>> { 452 ( 0.00%) ty::Binder::dummy(TraitRef { . def_id, 678 ( 0.00%) substs: InternalSubsts::identity_for_item(tcx, def_id), . }) 904 ( 0.00%) } . . #[inline] 6 ( 0.00%) pub fn self_ty(&self) -> Ty<'tcx> { . self.substs.type_at(0) 4 ( 0.00%) } . 8,824 ( 0.00%) pub fn from_method( . tcx: TyCtxt<'tcx>, . trait_id: DefId, . substs: SubstsRef<'tcx>, . ) -> ty::TraitRef<'tcx> { . let defs = tcx.generics_of(trait_id); . 1,103 ( 0.00%) ty::TraitRef { def_id: trait_id, substs: tcx.intern_substs(&substs[..defs.params.len()]) } 12,133 ( 0.00%) } . } . . pub type PolyTraitRef<'tcx> = Binder<'tcx, TraitRef<'tcx>>; . . impl<'tcx> PolyTraitRef<'tcx> { . pub fn self_ty(&self) -> Binder<'tcx, Ty<'tcx>> { . self.map_bound_ref(|tr| tr.self_ty()) . } . . pub fn def_id(&self) -> DefId { 9,978 ( 0.00%) self.skip_binder().def_id 3,326 ( 0.00%) } . 5,845 ( 0.00%) pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { 11,690 ( 0.00%) self.map_bound(|trait_ref| ty::TraitPredicate { . trait_ref, . constness: ty::BoundConstness::NotConst, . polarity: ty::ImplPolarity::Positive, . }) 5,845 ( 0.00%) } . } . . /// An existential reference to a trait, where `Self` is erased. . /// For example, the trait object `Trait<'a, 'b, X, Y>` is: . /// . /// exists T. T: Trait<'a, 'b, X, Y> . /// . /// The substitutions don't include the erased `Self`, only trait . /// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above). 116 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 6,612 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ExistentialTraitRef<'tcx> { 1,016 ( 0.00%) pub def_id: DefId, 8,118 ( 0.00%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> ExistentialTraitRef<'tcx> { 240 ( 0.00%) pub fn erase_self_ty( . tcx: TyCtxt<'tcx>, . trait_ref: ty::TraitRef<'tcx>, . ) -> ty::ExistentialTraitRef<'tcx> { . // Assert there is a Self. . trait_ref.substs.type_at(0); . . ty::ExistentialTraitRef { . def_id: trait_ref.def_id, . substs: tcx.intern_substs(&trait_ref.substs[1..]), . } 240 ( 0.00%) } . . /// Object types don't have a self type specified. Therefore, when . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. . pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> { . // otherwise the escaping vars would be captured by the binder . // debug_assert!(!self_ty.has_escaping_bound_vars()); -- line 985 ---------------------------------------- -- line 994 ---------------------------------------- . pub fn def_id(&self) -> DefId { . self.skip_binder().def_id . } . . /// Object types don't have a self type specified. Therefore, when . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. 132 ( 0.00%) pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> { 240 ( 0.00%) self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty)) 176 ( 0.00%) } . } . 81,619 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 160 ( 0.00%) #[derive(HashStable)] . pub enum BoundVariableKind { . Ty(BoundTyKind), . Region(BoundRegionKind), . Const, . } . . /// Binder is a binder for higher-ranked lifetimes or types. It is part of the . /// compiler's representation for things like `for<'a> Fn(&'a isize)` . /// (which would be represented by the type `PolyTraitRef == . /// Binder<'tcx, TraitRef>`). Note that when we instantiate, . /// erase, or otherwise "discharge" these bound vars, we change the . /// type from `Binder<'tcx, T>` to just `T` (see . /// e.g., `liberate_late_bound_regions`). . /// . /// `Decodable` and `Encodable` are implemented for `Binder` using the `impl_binder_encode_decode!` macro. 23,118 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 547,344 ( 0.00%) pub struct Binder<'tcx, T>(T, &'tcx List); . . impl<'tcx, T> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . { . /// Wraps `value` in a binder, asserting that `value` does not . /// contain any bound vars that would be bound by the . /// binder. This is commonly used to 'inject' a value T into a . /// different binding level. 50,250 ( 0.00%) pub fn dummy(value: T) -> Binder<'tcx, T> { 162,686 ( 0.00%) assert!(!value.has_escaping_bound_vars()); 387,798 ( 0.00%) Binder(value, ty::List::empty()) 50,161 ( 0.00%) } . . pub fn bind_with_vars(value: T, vars: &'tcx List) -> Binder<'tcx, T> { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(vars); . value.visit_with(&mut validator); . } 34,233 ( 0.00%) Binder(value, vars) . } . } . . impl<'tcx, T> Binder<'tcx, T> { . /// Skips the binder and returns the "bound" value. This is a . /// risky thing to do because it's easy to get confused about . /// De Bruijn indices and the like. It is usually better to . /// discharge the binder using `no_bound_vars` or -- line 1053 ---------------------------------------- -- line 1059 ---------------------------------------- . /// accounting. . /// . /// Some examples where `skip_binder` is reasonable: . /// . /// - extracting the `DefId` from a PolyTraitRef; . /// - comparing the self type of a PolyTraitRef to see if it is equal to . /// a type parameter `X`, since the type `X` does not reference any regions . pub fn skip_binder(self) -> T { 1,615,039 ( 0.01%) self.0 . } . . pub fn bound_vars(&self) -> &'tcx List { 1,801 ( 0.00%) self.1 . } . . pub fn as_ref(&self) -> Binder<'tcx, &T> { 9,904 ( 0.00%) Binder(&self.0, self.1) . } . . pub fn map_bound_ref_unchecked(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . let value = f(&self.0); . Binder(value, self.1) -- line 1083 ---------------------------------------- -- line 1085 ---------------------------------------- . . pub fn map_bound_ref>(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . self.as_ref().map_bound(f) . } . 1,052 ( 0.00%) pub fn map_bound>(self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(T) -> U, . { 68,284 ( 0.00%) let value = f(self.0); . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 349,406 ( 0.00%) Binder(value, self.1) 1,052 ( 0.00%) } . . pub fn try_map_bound, E>(self, f: F) -> Result, E> . where . F: FnOnce(T) -> Result, . { . let value = f(self.0)?; . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 11,380 ( 0.00%) Ok(Binder(value, self.1)) . } . . /// Wraps a `value` in a binder, using the same bound variables as the . /// current `Binder`. This should not be used if the new value *changes* . /// the bound variables. Note: the (old or new) value itself does not . /// necessarily need to *name* all the bound variables. . /// . /// This currently doesn't do anything different than `bind`, because we -- line 1122 ---------------------------------------- -- line 1126 ---------------------------------------- . pub fn rebind(&self, value: U) -> Binder<'tcx, U> . where . U: TypeFoldable<'tcx>, . { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.bound_vars()); . value.visit_with(&mut validator); . } 16,457 ( 0.00%) Binder(value, self.1) . } . . /// Unwraps and returns the value within, but only if it contains . /// no bound vars at all. (In other words, if this binder -- . /// and indeed any enclosing binder -- doesn't bind anything at . /// all.) Otherwise, returns `None`. . /// . /// (One could imagine having a method that just unwraps a single -- line 1142 ---------------------------------------- -- line 1143 ---------------------------------------- . /// binder, but permits late-bound vars bound by enclosing . /// binders, but that would require adjusting the debruijn . /// indices, and given the shallow binding structure we often use, . /// would not be that useful.) . pub fn no_bound_vars(self) -> Option . where . T: TypeFoldable<'tcx>, . { 260,789 ( 0.00%) if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } 515 ( 0.00%) } . . /// Splits the contents into two things that share the same binder . /// level as the original, returning two distinct binders. . /// . /// `f` should consider bound regions at depth 1 to be free, and . /// anything it produces with bound regions at depth 1 will be . /// bound in the resulting return values. . pub fn split(self, f: F) -> (Binder<'tcx, U>, Binder<'tcx, V>) -- line 1160 ---------------------------------------- -- line 1170 ---------------------------------------- . pub fn transpose(self) -> Option> { . let bound_vars = self.1; . self.0.map(|v| Binder(v, bound_vars)) . } . } . . /// Represents the projection of an associated type. In explicit UFCS . /// form this would be written `>::N`. 2,497 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 126,062 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ProjectionTy<'tcx> { . /// The parameters of the associated item. 47,729 ( 0.00%) pub substs: SubstsRef<'tcx>, . . /// The `DefId` of the `TraitItem` for the associated type `N`. . /// . /// Note that this is not the `DefId` of the `TraitRef` containing this . /// associated type, which is in `tcx.associated_item(item_def_id).container`. 74,193 ( 0.00%) pub item_def_id: DefId, . } . . impl<'tcx> ProjectionTy<'tcx> { 37,093 ( 0.00%) pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId { 10,598 ( 0.00%) tcx.associated_item(self.item_def_id).container.id() 42,392 ( 0.00%) } . . /// Extracts the underlying trait reference and own substs from this projection. . /// For example, if this is a projection of `::Item<'a>`, . /// then this function would return a `T: Iterator` trait reference and `['a]` as the own substs 12,285 ( 0.00%) pub fn trait_ref_and_own_substs( . &self, . tcx: TyCtxt<'tcx>, . ) -> (ty::TraitRef<'tcx>, &'tcx [ty::GenericArg<'tcx>]) { 5,265 ( 0.00%) let def_id = tcx.associated_item(self.item_def_id).container.id(); . let trait_generics = tcx.generics_of(def_id); 8,775 ( 0.00%) ( . ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, trait_generics) }, . &self.substs[trait_generics.count()..], . ) 15,795 ( 0.00%) } . . /// Extracts the underlying trait reference from this projection. . /// For example, if this is a projection of `::Item`, . /// then this function would return a `T: Iterator` trait reference. . /// . /// WARNING: This will drop the substs for generic associated types . /// consider calling [Self::trait_ref_and_own_substs] to get those . /// as well. 46,764 ( 0.00%) pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { 5,196 ( 0.00%) let def_id = self.trait_def_id(tcx); 5,196 ( 0.00%) ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, tcx.generics_of(def_id)) } 57,156 ( 0.00%) } . 9,914 ( 0.00%) pub fn self_ty(&self) -> Ty<'tcx> { 29,742 ( 0.00%) self.substs.type_at(0) 19,828 ( 0.00%) } . } . . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GenSig<'tcx> { . pub resume_ty: Ty<'tcx>, . pub yield_ty: Ty<'tcx>, . pub return_ty: Ty<'tcx>, . } -- line 1233 ---------------------------------------- -- line 1235 ---------------------------------------- . pub type PolyGenSig<'tcx> = Binder<'tcx, GenSig<'tcx>>; . . /// Signature of a function type, which we have arbitrarily . /// decided to use to refer to the input/output types. . /// . /// - `inputs`: is the list of arguments and their modes. . /// - `output`: is the return type. . /// - `c_variadic`: indicates whether this is a C-variadic function. 110,260 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 60,356 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct FnSig<'tcx> { 5,203 ( 0.00%) pub inputs_and_output: &'tcx List>, 55,403 ( 0.00%) pub c_variadic: bool, 10,974 ( 0.00%) pub unsafety: hir::Unsafety, 53,265 ( 0.00%) pub abi: abi::Abi, . } . . impl<'tcx> FnSig<'tcx> { 52,382 ( 0.00%) pub fn inputs(&self) -> &'tcx [Ty<'tcx>] { 227,137 ( 0.00%) &self.inputs_and_output[..self.inputs_and_output.len() - 1] 104,764 ( 0.00%) } . 31,918 ( 0.00%) pub fn output(&self) -> Ty<'tcx> { 243,384 ( 0.00%) self.inputs_and_output[self.inputs_and_output.len() - 1] 63,836 ( 0.00%) } . . // Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible . // method. . fn fake() -> FnSig<'tcx> { . FnSig { . inputs_and_output: List::empty(), . c_variadic: false, . unsafety: hir::Unsafety::Normal, -- line 1267 ---------------------------------------- -- line 1270 ---------------------------------------- . } . } . . pub type PolyFnSig<'tcx> = Binder<'tcx, FnSig<'tcx>>; . . impl<'tcx> PolyFnSig<'tcx> { . #[inline] . pub fn inputs(&self) -> Binder<'tcx, &'tcx [Ty<'tcx>]> { 11,728 ( 0.00%) self.map_bound_ref_unchecked(|fn_sig| fn_sig.inputs()) . } . #[inline] . pub fn input(&self, index: usize) -> ty::Binder<'tcx, Ty<'tcx>> { 9,626 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.inputs()[index]) . } . pub fn inputs_and_output(&self) -> ty::Binder<'tcx, &'tcx List>> { . self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output) 1,479 ( 0.00%) } . #[inline] . pub fn output(&self) -> ty::Binder<'tcx, Ty<'tcx>> { 8,885 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.output()) . } . pub fn c_variadic(&self) -> bool { 2,958 ( 0.00%) self.skip_binder().c_variadic 1,479 ( 0.00%) } . pub fn unsafety(&self) -> hir::Unsafety { 6,472 ( 0.00%) self.skip_binder().unsafety 3,236 ( 0.00%) } . pub fn abi(&self) -> abi::Abi { 70,652 ( 0.00%) self.skip_binder().abi 17,663 ( 0.00%) } . } . . pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<'tcx, FnSig<'tcx>>>; . 108 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 1,076 ( 0.00%) #[derive(HashStable)] . pub struct ParamTy { 552 ( 0.00%) pub index: u32, . pub name: Symbol, . } . . impl<'tcx> ParamTy { 638 ( 0.00%) pub fn new(index: u32, name: Symbol) -> ParamTy { . ParamTy { index, name } 319 ( 0.00%) } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamTy { 194 ( 0.00%) ParamTy::new(def.index, def.name) 97 ( 0.00%) } . . #[inline] . pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { . tcx.mk_ty_param(self.index, self.name) . } . } . . #[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)] . #[derive(HashStable)] . pub struct ParamConst { 150 ( 0.00%) pub index: u32, 150 ( 0.00%) pub name: Symbol, . } . . impl ParamConst { . pub fn new(index: u32, name: Symbol) -> ParamConst { . ParamConst { index, name } . } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamConst { -- line 1338 ---------------------------------------- -- line 1440 ---------------------------------------- . /// the inference variable is supposed to satisfy the relation . /// *for every value of the placeholder region*. To ensure that doesn't . /// happen, you can use `leak_check`. This is more clearly explained . /// by the [rustc dev guide]. . /// . /// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ . /// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ . /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html 5,470,053 ( 0.05%) #[derive(Clone, PartialEq, Eq, Hash, Copy, TyEncodable, TyDecodable, PartialOrd, Ord)] 1,768 ( 0.00%) pub enum RegionKind { . /// Region bound in a type or fn declaration which will be . /// substituted 'early' -- that is, at the same time when type . /// parameters are substituted. . ReEarlyBound(EarlyBoundRegion), . . /// Region bound in a function scope, which will be substituted when the . /// function is called. 193,666 ( 0.00%) ReLateBound(ty::DebruijnIndex, BoundRegion), . . /// When checking a function body, the types of all arguments and so forth . /// that refer to bound region parameters are modified to refer to free . /// region parameters. . ReFree(FreeRegion), . . /// Static data that has an "infinite" lifetime. Top in the region lattice. . ReStatic, -- line 1465 ---------------------------------------- -- line 1478 ---------------------------------------- . /// regions visible from `U`, but not less than regions not visible . /// from `U`. . ReEmpty(ty::UniverseIndex), . . /// Erased region, used by trait selection, in MIR and during codegen. . ReErased, . } . 23,638 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . pub struct EarlyBoundRegion { 35,781 ( 0.00%) pub def_id: DefId, 44,857 ( 0.00%) pub index: u32, 97,341 ( 0.00%) pub name: Symbol, . } . . /// A **`const`** **v**ariable **ID**. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . pub struct ConstVid<'tcx> { 2,215 ( 0.00%) pub index: u32, 292 ( 0.00%) pub phantom: PhantomData<&'tcx ()>, . } . . rustc_index::newtype_index! { . /// A **region** (lifetime) **v**ariable **ID**. . pub struct RegionVid { . DEBUG_FORMAT = custom, . } . } -- line 1505 ---------------------------------------- -- line 1513 ---------------------------------------- . rustc_index::newtype_index! { . pub struct BoundVar { .. } . } . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct BoundTy { . pub var: BoundVar, 4,396 ( 0.00%) pub kind: BoundTyKind, . } . 74,795 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundTyKind { . Anon, . Param(Symbol), . } . . impl From for BoundTy { . fn from(var: BoundVar) -> Self { -- line 1532 ---------------------------------------- -- line 1616 ---------------------------------------- . RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(), . RegionKind::ReEmpty(_) => false, . RegionKind::ReErased => false, . } . } . . #[inline] . pub fn is_late_bound(&self) -> bool { 3,269 ( 0.00%) matches!(*self, ty::ReLateBound(..)) . } . . #[inline] . pub fn is_placeholder(&self) -> bool { . matches!(*self, ty::RePlaceholder(..)) . } . . #[inline] . pub fn bound_at_or_above_binder(&self, index: ty::DebruijnIndex) -> bool { 127,134 ( 0.00%) match *self { . ty::ReLateBound(debruijn, _) => debruijn >= index, . _ => false, . } . } . . pub fn type_flags(&self) -> TypeFlags { . let mut flags = TypeFlags::empty(); . 151,712 ( 0.00%) match *self { . ty::ReVar(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; . flags = flags | TypeFlags::HAS_RE_INFER; . } . ty::RePlaceholder(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; -- line 1651 ---------------------------------------- -- line 1669 ---------------------------------------- . ty::ReErased => { . flags = flags | TypeFlags::HAS_RE_ERASED; . } . } . . debug!("type_flags({:?}) = {:?}", self, flags); . . flags 11,384 ( 0.00%) } . . /// Given an early-bound or free region, returns the `DefId` where it was bound. . /// For example, consider the regions in this snippet of code: . /// . /// ``` . /// impl<'a> Foo { . /// ^^ -- early bound, declared on an impl . /// -- line 1685 ---------------------------------------- -- line 1713 ---------------------------------------- . . #[inline(always)] . pub fn flags(&self) -> TypeFlags { . self.flags . } . . #[inline] . pub fn is_unit(&self) -> bool { 4,670 ( 0.00%) match self.kind() { 802 ( 0.00%) Tuple(ref tys) => tys.is_empty(), . _ => false, . } . } . . #[inline] . pub fn is_never(&self) -> bool { 46,030 ( 0.00%) matches!(self.kind(), Never) . } . . #[inline] . pub fn is_primitive(&self) -> bool { . self.kind().is_primitive() . } . . #[inline] -- line 1737 ---------------------------------------- -- line 1741 ---------------------------------------- . . #[inline] . pub fn is_ref(&self) -> bool { . matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_ty_var(&self) -> bool { 69,271 ( 0.00%) matches!(self.kind(), Infer(TyVar(_))) . } . . #[inline] . pub fn ty_vid(&self) -> Option { 55,796 ( 0.00%) match self.kind() { 7,250 ( 0.00%) &Infer(TyVar(vid)) => Some(vid), . _ => None, . } . } . . #[inline] . pub fn is_ty_infer(&self) -> bool { 327 ( 0.00%) matches!(self.kind(), Infer(_)) . } . . #[inline] . pub fn is_phantom_data(&self) -> bool { 2 ( 0.00%) if let Adt(def, _) = self.kind() { def.is_phantom_data() } else { false } . } . . #[inline] . pub fn is_bool(&self) -> bool { 291 ( 0.00%) *self.kind() == Bool . } . . /// Returns `true` if this type is a `str`. . #[inline] . pub fn is_str(&self) -> bool { . *self.kind() == Str . } . . #[inline] . pub fn is_param(&self, index: u32) -> bool { 74 ( 0.00%) match self.kind() { . ty::Param(ref data) => data.index == index, . _ => false, . } . } . . #[inline] . pub fn is_slice(&self) -> bool { . match self.kind() { . RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => matches!(ty.kind(), Slice(_) | Str), . _ => false, . } . } . . #[inline] . pub fn is_array(&self) -> bool { 606 ( 0.00%) matches!(self.kind(), Array(..)) . } . . #[inline] . pub fn is_simd(&self) -> bool { 18 ( 0.00%) match self.kind() { 9 ( 0.00%) Adt(def, _) => def.repr.simd(), . _ => false, . } . } . 80 ( 0.00%) pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 160 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => ty, . Str => tcx.mk_mach_uint(ty::UintTy::U8), . _ => bug!("`sequence_element_type` called on non-sequence value: {}", self), . } 80 ( 0.00%) } . . pub fn simd_size_and_type(&self, tcx: TyCtxt<'tcx>) -> (u64, Ty<'tcx>) { . match self.kind() { . Adt(def, substs) => { . assert!(def.repr.simd(), "`simd_size_and_type` called on non-SIMD type"); . let variant = def.non_enum_variant(); . let f0_ty = variant.fields[0].ty(tcx, substs); . -- line 1824 ---------------------------------------- -- line 1838 ---------------------------------------- . } . } . _ => bug!("`simd_size_and_type` called on invalid type"), . } . } . . #[inline] . pub fn is_region_ptr(&self) -> bool { 102 ( 0.00%) matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_mutable_ptr(&self) -> bool { 162 ( 0.00%) matches!( 108 ( 0.00%) self.kind(), . RawPtr(TypeAndMut { mutbl: hir::Mutability::Mut, .. }) . | Ref(_, _, hir::Mutability::Mut) . ) . } . . /// Get the mutability of the reference or `None` when not a reference . #[inline] . pub fn ref_mutability(&self) -> Option { -- line 1860 ---------------------------------------- -- line 1861 ---------------------------------------- . match self.kind() { . Ref(_, _, mutability) => Some(*mutability), . _ => None, . } . } . . #[inline] . pub fn is_unsafe_ptr(&self) -> bool { 8,278 ( 0.00%) matches!(self.kind(), RawPtr(_)) . } . . /// Tests if this is any kind of primitive pointer type (reference, raw pointer, fn pointer). . #[inline] . pub fn is_any_ptr(&self) -> bool { 8 ( 0.00%) self.is_region_ptr() || self.is_unsafe_ptr() || self.is_fn_ptr() . } . . #[inline] . pub fn is_box(&self) -> bool { 54,264 ( 0.00%) match self.kind() { 10,319 ( 0.00%) Adt(def, _) => def.is_box(), . _ => false, . } . } . . /// Panics if called on any type other than `Box`. 968 ( 0.00%) pub fn boxed_ty(&self) -> Ty<'tcx> { 968 ( 0.00%) match self.kind() { 2,420 ( 0.00%) Adt(def, substs) if def.is_box() => substs.type_at(0), . _ => bug!("`boxed_ty` is called on non-box type {:?}", self), . } 968 ( 0.00%) } . . /// A scalar type is one that denotes an atomic datum, with no sub-components. . /// (A RawPtr is scalar because it represents a non-managed pointer, so its . /// contents are abstract to rustc.) . #[inline] . pub fn is_scalar(&self) -> bool { 957 ( 0.00%) matches!( 2,213 ( 0.00%) self.kind(), . Bool | Char . | Int(_) . | Float(_) . | Uint(_) . | FnDef(..) . | FnPtr(_) . | RawPtr(_) . | Infer(IntVar(_) | FloatVar(_)) . ) . } . . /// Returns `true` if this type is a floating point type. . #[inline] . pub fn is_floating_point(&self) -> bool { 3,789 ( 0.00%) matches!(self.kind(), Float(_) | Infer(FloatVar(_))) . } . . #[inline] . pub fn is_trait(&self) -> bool { 232 ( 0.00%) matches!(self.kind(), Dynamic(..)) . } . . #[inline] . pub fn is_enum(&self) -> bool { 984 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_enum()) . } . . #[inline] . pub fn is_union(&self) -> bool { 8,335 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_union()) . } . . #[inline] . pub fn is_closure(&self) -> bool { 2,154 ( 0.00%) matches!(self.kind(), Closure(..)) . } . . #[inline] . pub fn is_generator(&self) -> bool { 5,545 ( 0.00%) matches!(self.kind(), Generator(..)) . } . . #[inline] . pub fn is_integral(&self) -> bool { 5,049 ( 0.00%) matches!(self.kind(), Infer(IntVar(_)) | Int(_) | Uint(_)) . } . . #[inline] . pub fn is_fresh_ty(&self) -> bool { . matches!(self.kind(), Infer(FreshTy(_))) . } . . #[inline] . pub fn is_fresh(&self) -> bool { 7,180 ( 0.00%) matches!(self.kind(), Infer(FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_))) . } . . #[inline] . pub fn is_char(&self) -> bool { . matches!(self.kind(), Char) . } . . #[inline] . pub fn is_numeric(&self) -> bool { . self.is_integral() || self.is_floating_point() . } . . #[inline] . pub fn is_signed(&self) -> bool { 937 ( 0.00%) matches!(self.kind(), Int(_)) . } . . #[inline] . pub fn is_ptr_sized_integral(&self) -> bool { 1,815 ( 0.00%) matches!(self.kind(), Int(ty::IntTy::Isize) | Uint(ty::UintTy::Usize)) . } . . #[inline] . pub fn has_concrete_skeleton(&self) -> bool { . !matches!(self.kind(), Param(_) | Infer(_) | Error(_)) . } . . /// Returns the type and mutability of `*ty`. . /// . /// The parameter `explicit` indicates if this is an *explicit* dereference. . /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. 16,527 ( 0.00%) pub fn builtin_deref(&self, explicit: bool) -> Option> { 99,265 ( 0.00%) match self.kind() { 1,422 ( 0.00%) Adt(def, _) if def.is_box() => { 200 ( 0.00%) Some(TypeAndMut { ty: self.boxed_ty(), mutbl: hir::Mutability::Not }) . } 58,362 ( 0.00%) Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl: *mutbl }), 2,848 ( 0.00%) RawPtr(mt) if explicit => Some(*mt), . _ => None, . } 33,054 ( 0.00%) } . . /// Returns the type of `ty[i]`. . pub fn builtin_index(&self) -> Option> { 656 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => Some(ty), . _ => None, . } 77 ( 0.00%) } . 190,624 ( 0.00%) pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { 95,312 ( 0.00%) match self.kind() { 71,277 ( 0.00%) FnDef(def_id, substs) => tcx.fn_sig(*def_id).subst(tcx, substs), 345 ( 0.00%) FnPtr(f) => *f, . Error(_) => { . // ignore errors (#54954) . ty::Binder::dummy(FnSig::fake()) . } . Closure(..) => bug!( . "to get the signature of a closure, use `substs.as_closure().sig()` not `fn_sig()`", . ), . _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), . } 214,452 ( 0.00%) } . . #[inline] . pub fn is_fn(&self) -> bool { 9,342 ( 0.00%) matches!(self.kind(), FnDef(..) | FnPtr(_)) . } . . #[inline] . pub fn is_fn_ptr(&self) -> bool { . matches!(self.kind(), FnPtr(_)) . } . . #[inline] . pub fn is_impl_trait(&self) -> bool { . matches!(self.kind(), Opaque(..)) . } . . #[inline] . pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> { 3,652 ( 0.00%) match self.kind() { . Adt(adt, _) => Some(adt), . _ => None, . } . } . . /// Iterates over tuple fields. . /// Panics when called on anything but a tuple. 605 ( 0.00%) pub fn tuple_fields(&self) -> impl DoubleEndedIterator> { 2,682 ( 0.00%) match self.kind() { 2,022 ( 0.00%) Tuple(substs) => substs.iter().map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 1,210 ( 0.00%) } . . /// Get the `i`-th element of a tuple. . /// Panics when called on anything but a tuple. 180 ( 0.00%) pub fn tuple_element_ty(&self, i: usize) -> Option> { 360 ( 0.00%) match self.kind() { 360 ( 0.00%) Tuple(substs) => substs.iter().nth(i).map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 360 ( 0.00%) } . . /// If the type contains variants, returns the valid range of variant indices. . // . // FIXME: This requires the optimized MIR in the case of generators. . #[inline] . pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option> { . match self.kind() { . TyKind::Adt(adt, _) => Some(adt.variant_range()), -- line 2068 ---------------------------------------- -- line 2073 ---------------------------------------- . } . } . . /// If the type contains variants, returns the variant for `variant_index`. . /// Panics if `variant_index` is out of range. . // . // FIXME: This requires the optimized MIR in the case of generators. . #[inline] 12,170 ( 0.00%) pub fn discriminant_for_variant( . &self, . tcx: TyCtxt<'tcx>, . variant_index: VariantIdx, . ) -> Option> { 7,865 ( 0.00%) match self.kind() { 1,640 ( 0.00%) TyKind::Adt(adt, _) if adt.variants.is_empty() => { . // This can actually happen during CTFE, see . // https://github.com/rust-lang/rust/issues/89765. . None . } 820 ( 0.00%) TyKind::Adt(adt, _) if adt.is_enum() => { . Some(adt.discriminant_for_variant(tcx, variant_index)) . } . TyKind::Generator(def_id, substs, _) => { . Some(substs.as_generator().discriminant_for_variant(*def_id, tcx, variant_index)) . } . _ => None, . } 9,736 ( 0.00%) } . . /// Returns the type of the discriminant of this type. 9,240 ( 0.00%) pub fn discriminant_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 4,620 ( 0.00%) match self.kind() { 7,392 ( 0.00%) ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx), . ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx), . . ty::Param(_) | ty::Projection(_) | ty::Opaque(..) | ty::Infer(ty::TyVar(_)) => { . let assoc_items = tcx.associated_item_def_ids( . tcx.require_lang_item(hir::LangItem::DiscriminantKind, None), . ); . tcx.mk_projection(assoc_items[0], tcx.intern_substs(&[self.into()])) . } -- line 2113 ---------------------------------------- -- line 2135 ---------------------------------------- . | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, . . ty::Bound(..) . | ty::Placeholder(_) . | ty::Infer(FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`discriminant_ty` applied to unexpected type: {:?}", self) . } . } 7,392 ( 0.00%) } . . /// Returns the type of metadata for (potentially fat) pointers to this type. 108 ( 0.00%) pub fn ptr_metadata_ty( . &'tcx self, . tcx: TyCtxt<'tcx>, . normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, . ) -> Ty<'tcx> { 108 ( 0.00%) let tail = tcx.struct_tail_with_normalize(self, normalize); 60 ( 0.00%) match tail.kind() { . // Sized types . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) -- line 2160 ---------------------------------------- -- line 2170 ---------------------------------------- . | ty::Foreign(..) . // If returned by `struct_tail_without_normalization` this is a unit struct . // without any fields, or not a struct, and therefore is Sized. . | ty::Adt(..) . // If returned by `struct_tail_without_normalization` this is the empty tuple, . // a.k.a. unit type, which is Sized . | ty::Tuple(..) => tcx.types.unit, . 24 ( 0.00%) ty::Str | ty::Slice(_) => tcx.types.usize, . ty::Dynamic(..) => { . let dyn_metadata = tcx.lang_items().dyn_metadata().unwrap(); . tcx.type_of(dyn_metadata).subst(tcx, &[tail.into()]) . }, . . ty::Projection(_) . | ty::Param(_) . | ty::Opaque(..) . | ty::Infer(ty::TyVar(_)) . | ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`ptr_metadata_ty` applied to unexpected type: {:?}", tail) . } . } 96 ( 0.00%) } . . /// When we create a closure, we record its kind (i.e., what trait . /// it implements) into its `ClosureSubsts` using a type . /// parameter. This is kind of a phantom type, except that the . /// most convenient thing for us to are the integral types. This . /// function converts such a special type into the closure . /// kind. To go the other way, use . /// `tcx.closure_kind_ty(closure_kind)`. . /// . /// Note that during type checking, we use an inference variable . /// to represent the closure kind, because it has not yet been . /// inferred. Once upvar inference (in `rustc_typeck/src/check/upvar.rs`) . /// is complete, that type variable will be unified. 3,528 ( 0.00%) pub fn to_opt_closure_kind(&self) -> Option { 7,056 ( 0.00%) match self.kind() { 7,056 ( 0.00%) Int(int_ty) => match int_ty { . ty::IntTy::I8 => Some(ty::ClosureKind::Fn), . ty::IntTy::I16 => Some(ty::ClosureKind::FnMut), . ty::IntTy::I32 => Some(ty::ClosureKind::FnOnce), . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . }, . . // "Bound" types appear in canonical queries when the . // closure type is not yet known . Bound(..) | Infer(_) => None, . . Error(_) => Some(ty::ClosureKind::Fn), . . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . } 3,528 ( 0.00%) } . . /// Fast path helper for testing if a type is `Sized`. . /// . /// Returning true means the type is known to be sized. Returning . /// `false` means nothing -- could be sized, might not be. . /// . /// Note that we could never rely on the fact that a type such as `[_]` is . /// trivially `!Sized` because we could be in a type environment with a . /// bound such as `[_]: Copy`. A function with such a bound obviously never . /// can be called, but that doesn't mean it shouldn't typecheck. This is why . /// this method doesn't return `Option`. 236,421 ( 0.00%) pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool { 157,614 ( 0.00%) match self.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 2246 ---------------------------------------- -- line 2250 ---------------------------------------- . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => true, . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, . 1,760 ( 0.00%) ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)), . 18,561 ( 0.00%) ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false, . . ty::Infer(ty::TyVar(_)) => false, . . ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`is_trivially_sized` applied to unexpected type: {:?}", self) . } . } 236,421 ( 0.00%) } . } . . /// Extra information about why we ended up with a particular variance. . /// This is only used to add more information to error messages, and . /// has no effect on soundness. While choosing the 'wrong' `VarianceDiagInfo` . /// may lead to confusing notes in error messages, it will never cause . /// a miscompilation or unsoundness. . /// . /// When in doubt, use `VarianceDiagInfo::default()` 42 ( 0.00%) #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] . pub enum VarianceDiagInfo<'tcx> { . /// No additional information - this is the default. . /// We will not add any additional information to error messages. . #[default] . None, . /// We switched our variance because a generic argument occurs inside . /// the invariant generic argument of another type. . Invariant { -- line 2290 ---------------------------------------- -- line 2295 ---------------------------------------- . /// (e.g. `0` for `*mut T`, `1` for `MyStruct<'CovariantParam, 'InvariantParam>`) . param_index: u32, . }, . } . . impl<'tcx> VarianceDiagInfo<'tcx> { . /// Mirrors `Variance::xform` - used to 'combine' the existing . /// and new `VarianceDiagInfo`s when our variance changes. 56,838 ( 0.00%) pub fn xform(self, other: VarianceDiagInfo<'tcx>) -> VarianceDiagInfo<'tcx> { . // For now, just use the first `VarianceDiagInfo::Invariant` that we see 85,257 ( 0.00%) match self { . VarianceDiagInfo::None => other, . VarianceDiagInfo::Invariant { .. } => self, . } 28,419 ( 0.00%) } . } 20,767,499 ( 0.18%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 127,178 ( 0.00%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 138,710 ( 0.00%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 301,842 ( 0.00%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 32,975 ( 0.00%) i += 2 . } . 138,710 ( 0.00%) if i < count { 87,516 ( 0.00%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 1,660,869 ( 0.01%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 11,204,768 ( 0.10%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 3,430,550 ( 0.03%) self.nbuf = nbuf + size; . . return; . } . 554,756 ( 0.00%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 175,452 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 175,452 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 1,754,520 ( 0.02%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 1,403,616 ( 0.01%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 1,579,068 ( 0.01%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 826,764 ( 0.01%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 701,808 ( 0.01%) self.processed += BUFFER_SIZE; 350,904 ( 0.00%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 17,141 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 393,762 ( 0.00%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 141,680 ( 0.00%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 71,082 ( 0.00%) self.nbuf = nbuf + length; . . return; . } . 60,247 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 35,455 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 7,091 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 21,273 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 7,091 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 14,182 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 49,871 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 56,962 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 99,742 ( 0.00%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 14,182 ( 0.00%) let input_left = length - processed; 9,816 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 14,767 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 14,767 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 14,767 ( 0.00%) self.state.v0 ^= elem; 29,534 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 7,091 ( 0.00%) self.nbuf = extra_bytes_left; 35,455 ( 0.00%) self.processed += nbuf + processed; 42,546 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 64,260 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 85,680 ( 0.00%) let mut state = self.state; . . for i in 0..last { 59,584 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 59,584 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 59,584 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 42,840 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 19,862 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 61,075 ( 0.00%) let length = self.processed + self.nbuf; 39,580 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 19,790 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 19,790 ( 0.00%) state.v0 ^= b; . 19,790 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 68,738 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 29,152 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 29,152 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 6,387,237 ( 0.06%) compress!(state); 6,866,481 ( 0.06%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 137,464 ( 0.00%) compress!(state); 137,464 ( 0.00%) compress!(state); 137,464 ( 0.00%) compress!(state); 117,677 ( 0.00%) compress!(state); . } . } 1,185,092 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs -------------------------------------------------------------------------------- Ir -- line 27 ---------------------------------------- . impl CacheEntry { . #[inline] . fn update( . &mut self, . new_file_and_idx: Option<(Lrc, usize)>, . pos: BytePos, . time_stamp: usize, . ) { 40,631 ( 0.00%) if let Some((file, file_idx)) = new_file_and_idx { 11,547 ( 0.00%) self.file = file; 19,992 ( 0.00%) self.file_index = file_idx; . } . 20,530 ( 0.00%) let line_index = self.file.lookup_line(pos).unwrap(); . let line_bounds = self.file.line_bounds(line_index); 62,086 ( 0.00%) self.line_number = line_index + 1; 88,852 ( 0.00%) self.line = line_bounds; . self.touch(time_stamp); . } . . #[inline] . fn touch(&mut self, time_stamp: usize) { 357,105 ( 0.00%) self.time_stamp = time_stamp; . } . } . . #[derive(Clone)] . pub struct CachingSourceMapView<'sm> { . source_map: &'sm SourceMap, . line_cache: [CacheEntry; 3], . time_stamp: usize, . } . . impl<'sm> CachingSourceMapView<'sm> { 9,244 ( 0.00%) pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { . let files = source_map.files(); 4,622 ( 0.00%) let first_file = files[0].clone(); . let entry = CacheEntry { . time_stamp: 0, . line_number: 0, . line: BytePos(0)..BytePos(0), . file: first_file, . file_index: 0, . }; . 23,110 ( 0.00%) CachingSourceMapView { . source_map, 32,354 ( 0.00%) line_cache: [entry.clone(), entry.clone(), entry], . time_stamp: 0, . } 18,488 ( 0.00%) } . . pub fn byte_pos_to_line_and_col( . &mut self, . pos: BytePos, . ) -> Option<(Lrc, usize, BytePos)> { . self.time_stamp += 1; . . // Check if the position is in one of the cached lines -- line 85 ---------------------------------------- -- line 106 ---------------------------------------- . }; . . let cache_entry = &mut self.line_cache[oldest]; . cache_entry.update(new_file_and_idx, pos, self.time_stamp); . . Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start)) . } . 1,354,899 ( 0.01%) pub fn span_data_to_lines_and_cols( . &mut self, . span_data: &SpanData, . ) -> Option<(Lrc, usize, BytePos, usize, BytePos)> { 774,228 ( 0.01%) self.time_stamp += 1; . . // Check if lo and hi are in the cached lines. 193,557 ( 0.00%) let lo_cache_idx = self.cache_entry_index(span_data.lo); 193,557 ( 0.00%) let hi_cache_idx = self.cache_entry_index(span_data.hi); . 328,044 ( 0.00%) if lo_cache_idx != -1 && hi_cache_idx != -1 { . // Cache hit for span lo and hi. Check if they belong to the same file. . let result = { 327,096 ( 0.00%) let lo = &self.line_cache[lo_cache_idx as usize]; . let hi = &self.line_cache[hi_cache_idx as usize]; . 981,288 ( 0.01%) if lo.file_index != hi.file_index { . return None; . } . . ( 327,096 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 163,548 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . ) . }; . 163,548 ( 0.00%) self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); 163,548 ( 0.00%) self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); . 654,192 ( 0.01%) return Some(result); . } . . // No cache hit or cache hit for only one of span lo and hi. 59,070 ( 0.00%) let oldest = if lo_cache_idx != -1 || hi_cache_idx != -1 { . let avoid_idx = if lo_cache_idx != -1 { lo_cache_idx } else { hi_cache_idx }; . self.oldest_cache_entry_index_avoid(avoid_idx as usize) . } else { . self.oldest_cache_entry_index() . }; . . // If the entry doesn't point to the correct file, get the new file and index. . // Return early if the file containing beginning of span doesn't contain end of span. 240,048 ( 0.00%) let new_file_and_idx = if !file_contains(&self.line_cache[oldest].file, span_data.lo) { 47,395 ( 0.00%) let new_file_and_idx = self.file_for_position(span_data.lo)?; 56,874 ( 0.00%) if !file_contains(&new_file_and_idx.0, span_data.hi) { . return None; . } . 37,916 ( 0.00%) Some(new_file_and_idx) . } else { . let file = &self.line_cache[oldest].file; 61,590 ( 0.00%) if !file_contains(&file, span_data.hi) { . return None; . } . 61,590 ( 0.00%) None . }; . . // Update the cache entries. 150,045 ( 0.00%) let (lo_idx, hi_idx) = match (lo_cache_idx, hi_cache_idx) { . // Oldest cache entry is for span_data.lo line. . (-1, -1) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); . 72,672 ( 0.00%) if !lo.line.contains(&span_data.hi) { . let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); . let next_oldest = self.oldest_cache_entry_index_avoid(oldest); . let hi = &mut self.line_cache[next_oldest]; . hi.update(new_file_and_idx, span_data.hi, self.time_stamp); . (oldest, next_oldest) . } else { . (oldest, oldest) . } . } . // Oldest cache entry is for span_data.lo line. . (-1, _) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); 948 ( 0.00%) let hi = &mut self.line_cache[hi_cache_idx as usize]; 1,422 ( 0.00%) hi.touch(self.time_stamp); . (oldest, hi_cache_idx as usize) . } . // Oldest cache entry is for span_data.hi line. . (_, -1) => { . let hi = &mut self.line_cache[oldest]; 5,311 ( 0.00%) hi.update(new_file_and_idx, span_data.hi, self.time_stamp); 10,622 ( 0.00%) let lo = &mut self.line_cache[lo_cache_idx as usize]; 21,244 ( 0.00%) lo.touch(self.time_stamp); . (lo_cache_idx as usize, oldest) . } . _ => { . panic!(); . } . }; . . let lo = &self.line_cache[lo_idx]; . let hi = &self.line_cache[hi_idx]; . . // Span lo and hi may equal line end when last line doesn't . // end in newline, hence the inclusive upper bounds below. 60,018 ( 0.00%) assert!(span_data.lo >= lo.line.start); 30,009 ( 0.00%) assert!(span_data.lo <= lo.line.end); 60,018 ( 0.00%) assert!(span_data.hi >= hi.line.start); 30,009 ( 0.00%) assert!(span_data.hi <= hi.line.end); 150,045 ( 0.00%) assert!(lo.file.contains(span_data.lo)); 90,027 ( 0.00%) assert!(lo.file.contains(span_data.hi)); 90,027 ( 0.00%) assert_eq!(lo.file_index, hi.file_index); . 90,027 ( 0.00%) Some(( 30,009 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 30,009 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . )) 1,742,013 ( 0.02%) } . . fn cache_entry_index(&self, pos: BytePos) -> isize { . for (idx, cache_entry) in self.line_cache.iter().enumerate() { 1,977,075 ( 0.02%) if cache_entry.line.contains(&pos) { . return idx as isize; . } . } . . -1 . } . . fn oldest_cache_entry_index(&self) -> usize { . let mut oldest = 0; . . for idx in 1..self.line_cache.len() { 145,344 ( 0.00%) if self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp { . oldest = idx; . } . } . . oldest . } . . fn oldest_cache_entry_index_avoid(&self, avoid_idx: usize) -> usize { . let mut oldest = if avoid_idx != 0 { 0 } else { 1 }; . . for idx in 0..self.line_cache.len() { 40,648 ( 0.00%) if idx != avoid_idx 17,420 ( 0.00%) && self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp . { . oldest = idx; . } . } . . oldest . } . 47,395 ( 0.00%) fn file_for_position(&self, pos: BytePos) -> Option<(Lrc, usize)> { 9,479 ( 0.00%) if !self.source_map.files().is_empty() { 18,958 ( 0.00%) let file_idx = self.source_map.lookup_source_file_idx(pos); . let file = &self.source_map.files()[file_idx]; . 75,832 ( 0.00%) if file_contains(file, pos) { . return Some((file.clone(), file_idx)); . } . } . . None 47,395 ( 0.00%) } . } . . #[inline] . fn file_contains(file: &SourceFile, pos: BytePos) -> bool { . // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position . // one past the end of a file to belong to it. Normally, that's what we want. But for the . // purposes of converting a byte position to a line and column number, we can't come up with a . // line and column number if the file is empty, because an empty file doesn't contain any -- line 290 ---------------------------------------- 2,579,294 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs -------------------------------------------------------------------------------- Ir -- line 5 ---------------------------------------- . #[stable(feature = "alloc_system_type", since = "1.28.0")] . unsafe impl GlobalAlloc for System { . #[inline] . unsafe fn alloc(&self, layout: Layout) -> *mut u8 { . // jemalloc provides alignment less than MIN_ALIGN for small allocations. . // So only rely on MIN_ALIGN if size >= align. . // Also see and . // . 6,940,942 ( 0.06%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 5,205,705 ( 0.05%) libc::malloc(layout.size()) as *mut u8 . } else { . #[cfg(target_os = "macos")] . { . if layout.align() > (1 << 31) { . return ptr::null_mut(); . } . } . aligned_malloc(&layout) . } . } . . #[inline] . unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { . // See the comment above in `alloc` for why this check looks the way it does. 578,944 ( 0.01%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 868,416 ( 0.01%) libc::calloc(layout.size(), 1) as *mut u8 . } else { . let ptr = self.alloc(layout); . if !ptr.is_null() { . ptr::write_bytes(ptr, 0, layout.size()); . } . ptr . } . } . . #[inline] . unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 1,879,961 ( 0.02%) libc::free(ptr as *mut libc::c_void) . } . . #[inline] . unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 783,212 ( 0.01%) if layout.align() <= MIN_ALIGN && layout.align() <= new_size { 1,566,424 ( 0.01%) libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 . } else { . realloc_fallback(self, ptr, layout, new_size) . } . } . } . . cfg_if::cfg_if! { . if #[cfg(any( -- line 56 ---------------------------------------- -- line 84 ---------------------------------------- . } else if #[cfg(target_os = "wasi")] { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { . libc::aligned_alloc(layout.align(), layout.size()) as *mut u8 . } . } else { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { 1 ( 0.00%) let mut out = ptr::null_mut(); . // posix_memalign requires that the alignment be a multiple of `sizeof(void*)`. . // Since these are all powers of 2, we can just use max. . let align = layout.align().max(crate::mem::size_of::()); 2 ( 0.00%) let ret = libc::posix_memalign(&mut out, align, layout.size()); 2 ( 0.00%) if ret != 0 { ptr::null_mut() } else { out as *mut u8 } . } . } . } 1 ( 0.00%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./malloc/malloc.c ./stdlib/msort.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 498,473,393 ( 4.42%) events annotated