-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name blake2b_simd --edition=2018 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C embed-bitcode=no -C debuginfo=2 --cfg feature="default" --cfg feature="std" -C metadata=5ee667cf1122ec2e -C extra-filename=-5ee667cf1122ec2e --out-dir /usr/home/liquid/tmp/.tmpK4m2sv/target/debug/deps -L dependency=/usr/home/liquid/tmp/.tmpK4m2sv/target/debug/deps --extern arrayref=/usr/home/liquid/tmp/.tmpK4m2sv/target/debug/deps/libarrayref-e2b5b027e8221563.rmeta --extern arrayvec=/usr/home/liquid/tmp/.tmpK4m2sv/target/debug/deps/libarrayvec-fd337bd02b727396.rmeta --extern constant_time_eq=/usr/home/liquid/tmp/.tmpK4m2sv/target/debug/deps/libconstant_time_eq-25ecde6309992ddc.rmeta -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-blake2b_simd-1.0.0-Debug-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 8,012,220,729 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 218,507,323 ( 2.73%) ./malloc/malloc.c:_int_malloc 215,913,250 ( 2.69%) ./malloc/malloc.c:_int_free 200,777,912 ( 2.51%) ???:(anonymous namespace)::Verifier::visitInstruction(llvm::Instruction&) 157,690,937 ( 1.97%) ???:llvm::SelectionDAG::Combine(llvm::CombineLevel, llvm::AAResults*, llvm::CodeGenOpt::Level) 133,126,243 ( 1.66%) ./malloc/malloc.c:malloc 130,933,727 ( 1.63%) ???:(anonymous namespace)::RegAllocFast::allocateBasicBlock(llvm::MachineBasicBlock&) 129,175,981 ( 1.61%) ???:llvm::MachineInstr::addOperand(llvm::MachineFunction&, llvm::MachineOperand const&) 126,986,141 ( 1.58%) ???:(anonymous namespace)::PruningFunctionCloner::CloneBlock(llvm::BasicBlock const*, llvm::ilist_iterator, false, true>, std::vector >&) 120,680,708 ( 1.51%) ???:(anonymous namespace)::Verifier::visitMDNode(llvm::MDNode const&, (anonymous namespace)::Verifier::AreDebugLocsAllowed) 103,142,034 ( 1.29%) ???:llvm::FPPassManager::runOnFunction(llvm::Function&) 95,301,124 ( 1.19%) ???:llvm::raw_svector_ostream::write_impl(char const*, unsigned long) 82,208,926 ( 1.03%) ???:llvm::InlineFunction(llvm::CallBase&, llvm::InlineFunctionInfo&, llvm::AAResults*, bool, llvm::Function*) 71,471,451 ( 0.89%) ???:(anonymous namespace)::Verifier::visitCallBase(llvm::CallBase&) 69,608,438 ( 0.87%) ???:(anonymous namespace)::VarLocBasedLDV::process(llvm::MachineInstr&, (anonymous namespace)::VarLocBasedLDV::OpenRangesSet&, (anonymous namespace)::VarLocBasedLDV::VarLocMap&, llvm::SmallVector<(anonymous namespace)::VarLocBasedLDV::TransferDebugPair, 4u>&) 66,056,626 ( 0.82%) ???:llvm::MCExpr::evaluateAsRelocatableImpl(llvm::MCValue&, llvm::MCAssembler const*, llvm::MCAsmLayout const*, llvm::MCFixup const*, llvm::DenseMap, llvm::detail::DenseMapPair > const*, bool) const 63,626,297 ( 0.79%) ./malloc/malloc.c:free 60,358,743 ( 0.75%) ???:llvm::SmallSet >::insert(llvm::Register const&) 55,463,738 ( 0.69%) ???:llvm::MetadataTracking::track(void*, llvm::Metadata&, llvm::PointerUnion) 52,290,664 ( 0.65%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 49,951,758 ( 0.62%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 49,126,382 ( 0.61%) ???:llvm::SelectionDAGISel::SelectCodeCommon(llvm::SDNode*, unsigned char const*, unsigned int) 46,624,657 ( 0.58%) ???:llvm::DenseMapBase > >, llvm::WeakTrackingVH, llvm::DenseMapInfo > > >, llvm::detail::DenseMapPair > >, llvm::WeakTrackingVH> >, llvm::ValueMapCallbackVH > >, llvm::WeakTrackingVH, llvm::DenseMapInfo > > >, llvm::detail::DenseMapPair > >, llvm::WeakTrackingVH> >::moveFromOldBuckets(llvm::detail::DenseMapPair > >, llvm::WeakTrackingVH>*, llvm::detail::DenseMapPair > >, llvm::WeakTrackingVH>*) 45,086,260 ( 0.56%) ???:llvm::TargetRegisterInfo::shouldRealignStack(llvm::MachineFunction const&) const 44,669,124 ( 0.56%) ???:(anonymous namespace)::X86MCCodeEmitter::emitPrefixImpl(unsigned int&, llvm::MCInst const&, llvm::MCSubtargetInfo const&, llvm::raw_ostream&) const 44,531,749 ( 0.56%) ???:(anonymous namespace)::TwoAddressInstructionPass::runOnMachineFunction(llvm::MachineFunction&) 42,834,148 ( 0.53%) ???:llvm::SmallPtrSetImplBase::insert_imp_big(void const*) 40,887,219 ( 0.51%) ???:llvm::AttributeList::addAttributes(llvm::LLVMContext&, unsigned int, llvm::AttrBuilder const&) const 40,325,709 ( 0.50%) ./stdlib/msort.c:msort_with_tmp.part.0 39,721,084 ( 0.50%) ???:llvm::ReplaceableMetadataImpl::replaceAllUsesWith(llvm::Metadata*) 38,921,614 ( 0.49%) ???:llvm::ScheduleDAGSDNodes::BuildSchedUnits() 38,667,700 ( 0.48%) ???:(anonymous namespace)::Verifier::verify(llvm::Function const&) [clone .llvm.4153962086227604281] 35,002,776 ( 0.44%) ???:llvm::DataLayout::getAlignment(llvm::Type*, bool) const 34,827,000 ( 0.43%) ???:llvm::calculateDbgEntityHistory(llvm::MachineFunction const*, llvm::TargetRegisterInfo const*, llvm::DbgValueHistoryMap&, llvm::DbgLabelInstrMap&) 33,265,154 ( 0.42%) ???:llvm::ValueMapper::remapInstruction(llvm::Instruction&) 33,036,517 ( 0.41%) ???:llvm::Instruction::clone() const 32,718,060 ( 0.41%) ???:llvm::MCAsmLayout::getFragmentOffset(llvm::MCFragment const*) const 31,332,644 ( 0.39%) ???:llvm::TargetLoweringBase::getTypeConversion(llvm::LLVMContext&, llvm::EVT) const 29,469,147 ( 0.37%) ./malloc/malloc.c:malloc_consolidate 29,443,790 ( 0.37%) ???:llvm::InstrEmitter::EmitMachineNode(llvm::SDNode*, bool, bool, llvm::DenseMap, llvm::detail::DenseMapPair >&) 29,371,739 ( 0.37%) ???:(anonymous namespace)::X86MCInstLower::Lower(llvm::MachineInstr const*, llvm::MCInst&) const 29,077,393 ( 0.36%) ???:llvm::FunctionLoweringInfo::set(llvm::Function const&, llvm::MachineFunction&, llvm::SelectionDAG*) 28,626,267 ( 0.36%) ???:llvm::MCObjectStreamer::emitBytes(llvm::StringRef) 28,497,623 ( 0.36%) ???:(anonymous namespace)::Mapper::mapValue(llvm::Value const*) [clone .llvm.12166235158543170009] 28,295,381 ( 0.35%) ???:llvm::MDTuple::getImpl(llvm::LLVMContext&, llvm::ArrayRef, llvm::Metadata::StorageType, bool) 27,723,512 ( 0.35%) ???:(anonymous namespace)::PEI::runOnMachineFunction(llvm::MachineFunction&) 27,253,872 ( 0.34%) ???:(anonymous namespace)::Verifier::verifyFunctionAttrs(llvm::FunctionType*, llvm::AttributeList, llvm::Value const*, bool) 27,115,278 ( 0.34%) ???:llvm::LexicalScopes::assignInstructionRanges(llvm::SmallVectorImpl >&, llvm::DenseMap, llvm::detail::DenseMapPair >&) 27,057,655 ( 0.34%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_sse2_unaligned_erms 26,111,615 ( 0.33%) ???:llvm::SimplifyInstruction(llvm::Instruction*, llvm::SimplifyQuery const&, llvm::OptimizationRemarkEmitter*) 26,077,715 ( 0.33%) ???:llvm::MDNode::dropReplaceableUses() 25,804,189 ( 0.32%) ???:llvm::MachineFunction::CreateMachineInstr(llvm::MCInstrDesc const&, llvm::DebugLoc const&, bool) 25,424,859 ( 0.32%) ???:llvm::DILocation::getImpl(llvm::LLVMContext&, unsigned int, unsigned int, llvm::Metadata*, llvm::Metadata*, bool, llvm::Metadata::StorageType, bool) 24,811,098 ( 0.31%) ./malloc/malloc.c:unlink_chunk.constprop.0 24,760,789 ( 0.31%) ???:(anonymous namespace)::X86MCCodeEmitter::encodeInstruction(llvm::MCInst const&, llvm::raw_ostream&, llvm::SmallVectorImpl&, llvm::MCSubtargetInfo const&) const 24,700,861 ( 0.31%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 23,607,164 ( 0.29%) ???:llvm::SelectionDAG::Legalize() 23,547,935 ( 0.29%) ???:llvm::AttributeList::hasFnAttribute(llvm::StringRef) const 23,314,175 ( 0.29%) ???:llvm::DwarfDebug::beginInstruction(llvm::MachineInstr const*) 22,962,698 ( 0.29%) ???:llvm::SelectionDAG::MorphNodeTo(llvm::SDNode*, unsigned int, llvm::SDVTList, llvm::ArrayRef) 22,490,185 ( 0.28%) ???:llvm::PMDataManager::verifyPreservedAnalysis(llvm::Pass*) 21,829,591 ( 0.27%) ???:llvm::DataLayout::getTypeSizeInBits(llvm::Type*) const 21,824,585 ( 0.27%) ???:llvm::X86RegisterInfo::eliminateFrameIndex(llvm::MachineInstrBundleIterator, int, unsigned int, llvm::RegScavenger*) const 21,748,590 ( 0.27%) ???:(anonymous namespace)::SelectionDAGLegalize::LegalizeOp(llvm::SDNode*) [clone .llvm.8386621111310650999] 21,701,876 ( 0.27%) ???:llvm::ValueHandleBase::AddToUseList() 21,563,616 ( 0.27%) ???:llvm::SelectionDAGISel::SelectAllBasicBlocks(llvm::Function const&) 21,489,586 ( 0.27%) ???:llvm::DAGTypeLegalizer::run() 20,258,170 ( 0.25%) ???:(anonymous namespace)::X86MCCodeEmitter::emitMemModRMByte(llvm::MCInst const&, unsigned int, unsigned int, unsigned long, bool, unsigned long, llvm::raw_ostream&, llvm::SmallVectorImpl&, llvm::MCSubtargetInfo const&, bool) const 19,105,712 ( 0.24%) ???:bool llvm::DenseMapBase, llvm::detail::DenseSetPair >, llvm::DILocation*, llvm::detail::DenseSetEmpty, llvm::MDNodeInfo, llvm::detail::DenseSetPair >::LookupBucketFor(llvm::DILocation* const&, llvm::detail::DenseSetPair const*&) const 19,066,443 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lev_distance.rs:rustc_span::lev_distance::lev_distance 18,924,556 ( 0.24%) ???:llvm::X86FrameLowering::hasFP(llvm::MachineFunction const&) const 18,828,483 ( 0.23%) ???:(anonymous namespace)::VarLocBasedLDV::collectIDsForRegs(llvm::SmallSet >&, llvm::SmallSet > const&, llvm::CoalescingBitVector const&, (anonymous namespace)::VarLocBasedLDV::VarLocMap const&) 18,588,212 ( 0.23%) ???:llvm::TargetLoweringBase::ArgListEntry::setAttributes(llvm::CallBase const*, unsigned int) 18,491,182 ( 0.23%) ???:(anonymous namespace)::RegAllocFast::markRegUsedInInstr(unsigned short) 18,182,709 ( 0.23%) ???:(anonymous namespace)::Verifier::visitFunction(llvm::Function const&)::$_3::operator()(llvm::Instruction const&, llvm::MDNode const*) const 18,040,893 ( 0.23%) ???:llvm::MCELFStreamer::emitInstToData(llvm::MCInst const&, llvm::MCSubtargetInfo const&) 17,905,063 ( 0.22%) ???:llvm::StringMapImpl::LookupBucketFor(llvm::StringRef) 17,774,521 ( 0.22%) ???:llvm::MCObjectStreamer::emitInstruction(llvm::MCInst const&, llvm::MCSubtargetInfo const&) 17,379,639 ( 0.22%) ???:llvm::LLVMContextImpl::~LLVMContextImpl() 17,370,058 ( 0.22%) ???:llvm::Type::isSizedDerivedType(llvm::SmallPtrSetImpl*) const 17,361,507 ( 0.22%) ???:llvm::SelectionDAG::getConstant(llvm::ConstantInt const&, llvm::SDLoc const&, llvm::EVT, bool, bool) 17,325,030 ( 0.22%) ???:llvm::MCAsmLayout::getSymbolOffset(llvm::MCSymbol const&) const 17,157,311 ( 0.21%) ???:llvm::Instruction::getMetadataImpl(unsigned int) const 17,124,903 ( 0.21%) ???:ScopedAliasMetadataDeepCloner::clone() 17,107,387 ( 0.21%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc:operator new(unsigned long) 16,990,707 ( 0.21%) ???:llvm::MCDwarfLineAddr::Encode(llvm::MCContext&, llvm::MCDwarfLineTableParams, long, unsigned long, llvm::raw_ostream&) 16,924,566 ( 0.21%) ???:llvm::MetadataTracking::untrack(void*, llvm::Metadata&) 16,860,988 ( 0.21%) ???:llvm::Instruction::~Instruction() 16,625,708 ( 0.21%) ???:llvm::ValueHandleBase::RemoveFromUseList() 16,309,738 ( 0.20%) ???:(anonymous namespace)::ScheduleDAGRRList::Schedule() [clone .llvm.6953762222372402862] 15,984,379 ( 0.20%) ???:llvm::X86FrameLowering::getFrameIndexReference(llvm::MachineFunction const&, int, llvm::Register&) const 15,807,572 ( 0.20%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 15,736,279 ( 0.20%) ???:llvm::BlockFrequencyInfoImpl::setBlockFreq(llvm::BasicBlock const*, unsigned long) 15,626,923 ( 0.20%) ???:(anonymous namespace)::VarLocBasedLDV::ExtendRanges(llvm::MachineFunction&, llvm::TargetPassConfig*) [clone .llvm.4451506318407214204] 15,496,732 ( 0.19%) ???:llvm::AsmPrinter::emitFunctionBody() 15,416,324 ( 0.19%) ???:llvm::MDNode::setOperand(unsigned int, llvm::Metadata*) 15,254,145 ( 0.19%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 15,068,584 ( 0.19%) ???:llvm::Instruction::setMetadata(unsigned int, llvm::MDNode*) 14,849,080 ( 0.19%) ???:llvm::Value::deleteValue() 14,781,924 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:rustc_span::lev_distance::lev_distance 14,672,889 ( 0.18%) ???:llvm::Twine::printOneChild(llvm::raw_ostream&, llvm::Twine::Child, llvm::Twine::NodeKind) const 14,243,296 ( 0.18%) ???:llvm::MCContext::getOrCreateSymbol(llvm::Twine const&) 14,126,811 ( 0.18%) ???:(anonymous namespace)::DAGCombiner::combine(llvm::SDNode*) 14,123,189 ( 0.18%) ???:(anonymous namespace)::RegAllocFast::setPhysReg(llvm::MachineInstr&, llvm::MachineOperand&, unsigned short) 13,834,156 ( 0.17%) ???:llvm::Function::dropAllReferences() 13,672,168 ( 0.17%) ???:llvm::LexicalScopes::extractLexicalScopes(llvm::SmallVectorImpl >&, llvm::DenseMap, llvm::detail::DenseMapPair >&) 13,408,324 ( 0.17%) ???:llvm::FoldingSetNodeID::AddInteger(unsigned int) 13,396,547 ( 0.17%) ???:(anonymous namespace)::X86FastISel::X86SelectAddress(llvm::Value const*, llvm::X86AddressMode&) 13,366,149 ( 0.17%) ???:llvm::MDNode::MDNode(llvm::LLVMContext&, unsigned int, llvm::Metadata::StorageType, llvm::ArrayRef, llvm::ArrayRef) 13,176,817 ( 0.16%) ???:(anonymous namespace)::X86AsmBackend::emitInstructionBegin(llvm::MCObjectStreamer&, llvm::MCInst const&) 13,137,471 ( 0.16%) ???:(anonymous namespace)::RegAllocFast::freePhysReg(unsigned short) 12,908,201 ( 0.16%) ???:llvm::LazyCallGraph::Node::populateSlow() 12,448,067 ( 0.16%) ???:llvm::FoldingSetBase::FindNodeOrInsertPos(llvm::FoldingSetNodeID const&, void*&, llvm::FoldingSetBase::FoldingSetInfo const&) 12,268,506 ( 0.15%) ???:(anonymous namespace)::RegAllocFast::allocVirtReg(llvm::MachineInstr&, (anonymous namespace)::RegAllocFast::LiveReg&, llvm::Register, bool) 12,155,238 ( 0.15%) ???:llvm::SelectionDAG::getRegister(unsigned int, llvm::EVT) 12,132,621 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/str/validations.rs:rustc_span::lev_distance::lev_distance 12,073,297 ( 0.15%) ???:(anonymous namespace)::X86AsmBackend::emitInstructionEnd(llvm::MCObjectStreamer&, llvm::MCInst const&) 11,912,953 ( 0.15%) ???:llvm::ScheduleDAGSDNodes::AddSchedEdges() 11,793,600 ( 0.15%) ???:llvm::MCAssembler::relaxDwarfLineAddr(llvm::MCAsmLayout&, llvm::MCDwarfLineAddrFragment&) 11,780,753 ( 0.15%) ???:(anonymous namespace)::RegAllocFast::isRegUsedInInstr(unsigned short, bool) const 11,765,859 ( 0.15%) ???:llvm::CoalescingBitVector::find(unsigned long) const 11,726,358 ( 0.15%) ???:llvm::CallBase::getArgOperand(unsigned int) const 11,386,259 ( 0.14%) ???:llvm::X86AsmPrinter::emitInstruction(llvm::MachineInstr const*) 11,201,704 ( 0.14%) ???:llvm::FastISel::flushLocalValueMap() 10,892,380 ( 0.14%) ???:llvm::SelectionDAG::AssignTopologicalOrder() 10,828,307 ( 0.14%) ???:(anonymous namespace)::X86MCCodeEmitter::emitImmediate(llvm::MCOperand const&, llvm::SMLoc, unsigned int, llvm::MCFixupKind, unsigned long, llvm::raw_ostream&, llvm::SmallVectorImpl&, int) const 10,740,888 ( 0.13%) ???:llvm::FastISel::recomputeInsertPt() 10,656,210 ( 0.13%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_unaligned_erms 10,642,168 ( 0.13%) ???:llvm::MachineInstr::AddRegOperandsToUseLists(llvm::MachineRegisterInfo&) 10,566,384 ( 0.13%) ???:llvm::MetadataTracking::retrack(void*, llvm::Metadata&, void*) 10,390,950 ( 0.13%) ???:(anonymous namespace)::RemoveRedundantDebugValues::runOnMachineFunction(llvm::MachineFunction&) 10,342,125 ( 0.13%) ???:llvm::DebugHandlerBase::endInstruction() 10,309,062 ( 0.13%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 10,287,595 ( 0.13%) ???:llvm::SelectionDAG::getNode(unsigned int, llvm::SDLoc const&, llvm::SDVTList, llvm::ArrayRef) 10,278,446 ( 0.13%) ???:CC_X86_64_C(unsigned int, llvm::MVT, llvm::MVT, llvm::CCValAssign::LocInfo, llvm::ISD::ArgFlagsTy, llvm::CCState&) 10,008,856 ( 0.12%) ???:llvm::DenseMapBase, llvm::detail::DenseMapPair >, llvm::MachineInstr const*, unsigned int, llvm::DenseMapInfo, llvm::detail::DenseMapPair >::FindAndConstruct(llvm::MachineInstr const*&&) 9,889,128 ( 0.12%) ???:llvm::DebugLoc::appendInlinedAt(llvm::DebugLoc const&, llvm::DILocation*, llvm::LLVMContext&, llvm::DenseMap, llvm::detail::DenseMapPair >&) 9,877,130 ( 0.12%) ???:(anonymous namespace)::CFIInstrInserter::runOnMachineFunction(llvm::MachineFunction&) 9,863,235 ( 0.12%) ???:??? 9,847,046 ( 0.12%) ???:llvm::DenseMapBase, llvm::detail::DenseMapPair >, llvm::Value const*, llvm::Value const*, llvm::DenseMapInfo, llvm::detail::DenseMapPair >::moveFromOldBuckets(llvm::detail::DenseMapPair*, llvm::detail::DenseMapPair*) 9,794,652 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs:rustc_span::lev_distance::lev_distance 9,720,150 ( 0.12%) ???:llvm::MCRegAliasIterator::MCRegAliasIterator(llvm::MCRegister, llvm::MCRegisterInfo const*, bool) 9,517,802 ( 0.12%) ???:llvm::MCELFStreamer::emitLabel(llvm::MCSymbol*, llvm::SMLoc) 9,450,957 ( 0.12%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/src/c++98/../../../../../gcc-5.5.0/libstdc++-v3/src/c++98/tree.cc:std::_Rb_tree_insert_and_rebalance(bool, std::_Rb_tree_node_base*, std::_Rb_tree_node_base*, std::_Rb_tree_node_base&) 9,369,474 ( 0.12%) ???:llvm::TargetInstrInfo::hasStoreToStackSlot(llvm::MachineInstr const&, llvm::SmallVectorImpl&) const 9,340,827 ( 0.12%) ???:llvm::MCAssembler::layout(llvm::MCAsmLayout&) 9,332,111 ( 0.12%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:memcpy@GLIBC_2.2.5 9,297,854 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::shallow_resolve_ty 8,924,276 ( 0.11%) ???:llvm::ScheduleDAGSDNodes::EmitSchedule(llvm::MachineInstrBundleIterator&) 8,895,958 ( 0.11%) ???:llvm::FastISel::selectInstruction(llvm::Instruction const*) 8,715,567 ( 0.11%) ???:(anonymous namespace)::ELFObjectWriter::recordRelocation(llvm::MCAssembler&, llvm::MCAsmLayout const&, llvm::MCFragment const*, llvm::MCFixup const&, llvm::MCValue, unsigned long&) [clone .llvm.14145361893594770252] 8,706,002 ( 0.11%) ???:llvm::MachineRegisterInfo::createVirtualRegister(llvm::TargetRegisterClass const*, llvm::StringRef) 8,698,268 ( 0.11%) ???:llvm::TargetRegisterInfo::checkAllSuperRegsMarked(llvm::BitVector const&, llvm::ArrayRef) const 8,682,695 ( 0.11%) ???:(anonymous namespace)::X86FastISel::fastLowerCall(llvm::FastISel::CallLoweringInfo&) [clone .llvm.4682347114745685263] 8,659,604 ( 0.11%) ???:llvm::DenseMapBase, llvm::detail::DenseMapPair >, llvm::Value*, llvm::ValueHandleBase*, llvm::DenseMapInfo, llvm::detail::DenseMapPair >::moveFromOldBuckets(llvm::detail::DenseMapPair*, llvm::detail::DenseMapPair*) 8,592,216 ( 0.11%) ???:llvm::coro::declaresIntrinsics(llvm::Module const&, std::initializer_list) 8,545,325 ( 0.11%) ???:llvm::MCAssembler::layoutSectionOnce(llvm::MCAsmLayout&, llvm::MCSection&) 8,493,039 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::lookup_source_file_idx 8,409,960 ( 0.10%) ???:int llvm::array_pod_sort_comparator(void const*, void const*) 8,387,689 ( 0.10%) ???:llvm::MCStreamer::emitULEB128IntValue(unsigned long, unsigned int) 8,180,927 ( 0.10%) ???:ScopedAliasMetadataDeepCloner::ScopedAliasMetadataDeepCloner(llvm::Function const*) 8,062,932 ( 0.10%) ???:llvm::SelectionDAG::clear() -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs -------------------------------------------------------------------------------- Ir -- line 70 ---------------------------------------- . . impl<'a, T> $name<'a, T> { . // Helper function for creating a slice from the iterator. . #[inline(always)] . fn make_slice(&self) -> &'a [T] { . // SAFETY: the iterator was created from a slice with pointer . // `self.ptr` and length `len!(self)`. This guarantees that all . // the prerequisites for `from_raw_parts` are fulfilled. 46,207 ( 0.00%) unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } . } . . // Helper function for moving the start of the iterator forwards by `offset` elements, . // returning the old start. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] . unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . let old = self.ptr.as_ptr(); . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // so this new pointer is inside `self` and thus guaranteed to be non-null. 294,004 ( 0.00%) self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }; . old . } . } . . // Helper function for moving the end of the iterator backwards by `offset` elements, . // returning the new end. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] -- line 101 ---------------------------------------- -- line 102 ---------------------------------------- . unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // which is guaranteed to not overflow an `isize`. Also, the resulting pointer . // is in bounds of `slice`, which fulfills the other requirements for `offset`. 1,312 ( 0.00%) self.end = unsafe { self.end.offset(-offset) }; . self.end . } . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ExactSizeIterator for $name<'_, T> { . #[inline(always)] . fn len(&self) -> usize { 111,684 ( 0.00%) len!(self) . } . . #[inline(always)] . fn is_empty(&self) -> bool { . is_empty!(self) . } . } . -- line 128 ---------------------------------------- -- line 134 ---------------------------------------- . fn next(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer . // must be non-null, and slices over non-ZSTs must also have a . // non-null end pointer. The call to `next_unchecked!` is safe . // since we check if the iterator is empty first. . unsafe { 134,649 ( 0.00%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 48,108 ( 0.00%) assume(!self.end.is_null()); . } 49,878,670 ( 0.62%) if is_empty!(self) { . None . } else { 34 ( 0.00%) Some(next_unchecked!(self)) . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 1,109,536 ( 0.01%) let exact = len!(self); . (exact, Some(exact)) . } . . #[inline] . fn count(self) -> usize { . len!(self) . } . . #[inline] . fn nth(&mut self, n: usize) -> Option<$elem> { 34,388 ( 0.00%) if n >= len!(self) { . // This iterator is now empty. . if mem::size_of::() == 0 { . // We have to do it this way as `ptr` may never be 0, but `end` . // could be (due to wrapping). . self.end = self.ptr.as_ptr(); . } else { . // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr . unsafe { -- line 175 ---------------------------------------- -- line 203 ---------------------------------------- . // faster to compile. . #[inline] . fn for_each(mut self, mut f: F) . where . Self: Sized, . F: FnMut(Self::Item), . { . while let Some(x) = self.next() { 3,946 ( 0.00%) f(x); . } . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn all(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 1,763 ( 0.00%) while let Some(x) = self.next() { 9,483 ( 0.00%) if !f(x) { . return false; . } . } . true . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] 8 ( 0.00%) fn any(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 377,751 ( 0.00%) while let Some(x) = self.next() { 306,011 ( 0.00%) if f(x) { . return true; . } . } . false 8 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find

(&mut self, mut predicate: P) -> Option . where . Self: Sized, . P: FnMut(&Self::Item) -> bool, . { 116,925 ( 0.00%) while let Some(x) = self.next() { 81,755 ( 0.00%) if predicate(&x) { 21 ( 0.00%) return Some(x); . } . } . None 751 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find_map(&mut self, mut f: F) -> Option . where . Self: Sized, . F: FnMut(Self::Item) -> Option, . { 18,938 ( 0.00%) while let Some(x) = self.next() { 101,535 ( 0.00%) if let Some(y) = f(x) { 6,810 ( 0.00%) return Some(y); . } . } . None 2,060 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . #[rustc_inherit_overflow_checks] . fn position

(&mut self, mut predicate: P) -> Option where . Self: Sized, . P: FnMut(Self::Item) -> bool, . { . let n = len!(self); . let mut i = 0; . while let Some(x) = self.next() { 787,542 ( 0.01%) if predicate(x) { . // SAFETY: we are guaranteed to be in bounds by the loop invariant: . // when `i >= n`, `self.next()` returns `None` and the loop breaks. . unsafe { assume(i < n) }; . return Some(i); . } . i += 1; . } . None -- line 303 ---------------------------------------- -- line 308 ---------------------------------------- . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . fn rposition

(&mut self, mut predicate: P) -> Option where . P: FnMut(Self::Item) -> bool, . Self: Sized + ExactSizeIterator + DoubleEndedIterator . { . let n = len!(self); . let mut i = n; 38,484 ( 0.00%) while let Some(x) = self.next_back() { 506,342 ( 0.01%) i -= 1; 414,035 ( 0.01%) if predicate(x) { . // SAFETY: `i` must be lower than `n` since it starts at `n` . // and is only decreasing. . unsafe { assume(i < n) }; . return Some(i); . } . } . None . } -- line 326 ---------------------------------------- -- line 332 ---------------------------------------- . // the returned references is guaranteed to refer to an element . // of the slice and thus guaranteed to be valid. . // . // Also note that the caller also guarantees that we're never . // called with the same index again, and that no other methods . // that will access this subslice are called, so it is valid . // for the returned reference to be mutable in the case of . // `IterMut` 83,032 ( 0.00%) unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } . } . . $($extra)* . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T> DoubleEndedIterator for $name<'a, T> { . #[inline] -- line 348 ---------------------------------------- -- line 349 ---------------------------------------- . fn next_back(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, . // and slices over non-ZSTs must also have a non-null end pointer. . // The call to `next_back_unchecked!` is safe since we check if the iterator is . // empty first. . unsafe { 2,156 ( 0.00%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 2,179 ( 0.00%) assume(!self.end.is_null()); . } 994,987 ( 0.01%) if is_empty!(self) { . None . } else { . Some(next_back_unchecked!(self)) . } . } . } . . #[inline] -- line 369 ---------------------------------------- 8,170,692 ( 0.10%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs -------------------------------------------------------------------------------- Ir -- line 223 ---------------------------------------- . fn eq(&self, other: &Rhs) -> bool; . . /// This method tests for `!=`. . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ne(&self, other: &Rhs) -> bool { 19,312 ( 0.00%) !self.eq(other) . } . } . . /// Derive macro generating an impl of the trait `PartialEq`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics, structural_match)] . pub macro PartialEq($item:item) { -- line 239 ---------------------------------------- -- line 328 ---------------------------------------- . /// assert_eq!(Ordering::Less, result); . /// . /// let result = 1.cmp(&1); . /// assert_eq!(Ordering::Equal, result); . /// . /// let result = 2.cmp(&1); . /// assert_eq!(Ordering::Greater, result); . /// ``` 21,054 ( 0.00%) #[derive(Clone, Copy, PartialEq, Debug, Hash)] . #[stable(feature = "rust1", since = "1.0.0")] . #[repr(i8)] . pub enum Ordering { . /// An ordering where a compared value is less than another. . #[stable(feature = "rust1", since = "1.0.0")] . Less = -1, . /// An ordering where a compared value is equal to another. . #[stable(feature = "rust1", since = "1.0.0")] -- line 344 ---------------------------------------- -- line 569 ---------------------------------------- . /// let result = x.0.cmp(&y.0).then_with(|| x.1.cmp(&y.1)).then_with(|| x.2.cmp(&y.2)); . /// . /// assert_eq!(result, Ordering::Less); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "ordering_chaining", since = "1.17.0")] . pub fn then_with Ordering>(self, f: F) -> Ordering { 53,967 ( 0.00%) match self { . Equal => f(), . _ => self, . } . } . } . . /// A helper struct for reverse ordering. . /// -- line 585 ---------------------------------------- -- line 792 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn max(self, other: Self) -> Self . where . Self: Sized, . { 112,620 ( 0.00%) max_by(self, other, Ord::cmp) . } . . /// Compares and returns the minimum of two values. . /// . /// Returns the first argument if the comparison determines them to be equal. . /// . /// # Examples . /// -- line 808 ---------------------------------------- -- line 812 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn min(self, other: Self) -> Self . where . Self: Sized, . { 1,104 ( 0.00%) min_by(self, other, Ord::cmp) . } . . /// Restrict a value to a certain interval. . /// . /// Returns `max` if `self` is greater than `max`, and `min` if `self` is . /// less than `min`. Otherwise this returns `self`. . /// . /// # Panics -- line 828 ---------------------------------------- -- line 1097 ---------------------------------------- . /// let result = 2.0 < 1.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn lt(&self, other: &Rhs) -> bool { 67,713 ( 0.00%) matches!(self.partial_cmp(other), Some(Less)) . } . . /// This method tests less than or equal to (for `self` and `other`) and is used by the `<=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1113 ---------------------------------------- -- line 1121 ---------------------------------------- . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn le(&self, other: &Rhs) -> bool { . // Pattern `Some(Less | Eq)` optimizes worse than negating `None | Some(Greater)`. . // FIXME: The root cause was fixed upstream in LLVM with: . // https://github.com/llvm/llvm-project/commit/9bad7de9a3fb844f1ca2965f35d0c2a3d1e11775 . // Revert this workaround once support for LLVM 12 gets dropped. 428,777 ( 0.01%) !matches!(self.partial_cmp(other), None | Some(Greater)) . } . . /// This method tests greater than (for `self` and `other`) and is used by the `>` operator. . /// . /// # Examples . /// . /// ``` . /// let result = 1.0 > 2.0; -- line 1137 ---------------------------------------- -- line 1140 ---------------------------------------- . /// let result = 2.0 > 2.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn gt(&self, other: &Rhs) -> bool { 979,382 ( 0.01%) matches!(self.partial_cmp(other), Some(Greater)) . } . . /// This method tests greater than or equal to (for `self` and `other`) and is used by the `>=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1156 ---------------------------------------- -- line 1160 ---------------------------------------- . /// let result = 2.0 >= 2.0; . /// assert_eq!(result, true); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ge(&self, other: &Rhs) -> bool { 871 ( 0.00%) matches!(self.partial_cmp(other), Some(Greater | Equal)) . } . } . . /// Derive macro generating an impl of the trait `PartialOrd`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics)] . pub macro PartialOrd($item:item) { -- line 1176 ---------------------------------------- -- line 1210 ---------------------------------------- . /// . /// assert_eq!(cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 1); . /// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn min_by Ordering>(v1: T, v2: T, compare: F) -> T { 5,186,350 ( 0.06%) match compare(&v1, &v2) { . Ordering::Less | Ordering::Equal => v1, . Ordering::Greater => v2, . } 88 ( 0.00%) } . . /// Returns the element that gives the minimum value from the specified function. . /// . /// Returns the first argument if the comparison determines them to be equal. . /// . /// # Examples . /// . /// ``` -- line 1230 ---------------------------------------- -- line 1231 ---------------------------------------- . /// use std::cmp; . /// . /// assert_eq!(cmp::min_by_key(-2, 1, |x: &i32| x.abs()), 1); . /// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] 528 ( 0.00%) pub fn min_by_key K, K: Ord>(v1: T, v2: T, mut f: F) -> T { 352 ( 0.00%) min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2))) 352 ( 0.00%) } . . /// Compares and returns the maximum of two values. . /// . /// Returns the second argument if the comparison determines them to be equal. . /// . /// Internally uses an alias to [`Ord::max`]. . /// . /// # Examples -- line 1249 ---------------------------------------- -- line 1273 ---------------------------------------- . /// . /// assert_eq!(cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn max_by Ordering>(v1: T, v2: T, compare: F) -> T { 568,213 ( 0.01%) match compare(&v1, &v2) { 285 ( 0.00%) Ordering::Less | Ordering::Equal => v2, . Ordering::Greater => v1, . } . } . . /// Returns the element that gives the maximum value from the specified function. . /// . /// Returns the second argument if the comparison determines them to be equal. . /// -- line 1290 ---------------------------------------- -- line 1308 ---------------------------------------- . use crate::cmp::Ordering::{self, Equal, Greater, Less}; . use crate::hint::unreachable_unchecked; . . macro_rules! partial_eq_impl { . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for $t { . #[inline] 146,135 ( 0.00%) fn eq(&self, other: &$t) -> bool { (*self) == (*other) } . #[inline] 74,819 ( 0.00%) fn ne(&self, other: &$t) -> bool { (*self) != (*other) } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for () { . #[inline] . fn eq(&self, _other: &()) -> bool { -- line 1326 ---------------------------------------- -- line 1392 ---------------------------------------- . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd for $t { . #[inline] . fn partial_cmp(&self, other: &$t) -> Option { . Some(self.cmp(other)) . } . #[inline] 5,041,961 ( 0.06%) fn lt(&self, other: &$t) -> bool { (*self) < (*other) } . #[inline] 32,454 ( 0.00%) fn le(&self, other: &$t) -> bool { (*self) <= (*other) } . #[inline] 84 ( 0.00%) fn ge(&self, other: &$t) -> bool { (*self) >= (*other) } . #[inline] . fn gt(&self, other: &$t) -> bool { (*self) > (*other) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for $t { . #[inline] . fn cmp(&self, other: &$t) -> Ordering { . // The order here is important to generate more optimal assembly. . // See for more info. 2,865,372 ( 0.04%) if *self < *other { Less } . else if *self == *other { Equal } . else { Greater } . } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] -- line 1423 ---------------------------------------- -- line 1430 ---------------------------------------- . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for bool { . #[inline] . fn cmp(&self, other: &bool) -> Ordering { . // Casting to i8's and converting the difference to an Ordering generates . // more optimal assembly. . // See for more info. 910 ( 0.00%) match (*self as i8) - (*other as i8) { . -1 => Less, . 0 => Equal, . 1 => Greater, . // SAFETY: bool as i8 returns 0 or 1, so the difference can't be anything else . _ => unsafe { unreachable_unchecked() }, . } . } . } -- line 1446 ---------------------------------------- -- line 1474 ---------------------------------------- . // & pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&B> for &A . where . A: PartialEq, . { . #[inline] 43,542 ( 0.00%) fn eq(&self, other: &&B) -> bool { 912,108 ( 0.01%) PartialEq::eq(*self, *other) 157,925 ( 0.00%) } . #[inline] 540 ( 0.00%) fn ne(&self, other: &&B) -> bool { 66 ( 0.00%) PartialEq::ne(*self, *other) 19,706 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd<&B> for &A . where . A: PartialOrd, . { . #[inline] . fn partial_cmp(&self, other: &&B) -> Option { -- line 1496 ---------------------------------------- -- line 1516 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for &A . where . A: Ord, . { . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(*self, *other) 3,939 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl Eq for &A where A: Eq {} . . // &mut pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&mut B> for &mut A -- line 1532 ---------------------------------------- 7,179,762 ( 0.09%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs -------------------------------------------------------------------------------- Ir -- line 141 ---------------------------------------- . /// ``` . /// let a = [1, 2, 3]; . /// assert!(!a.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] . #[inline] . pub const fn is_empty(&self) -> bool { 768,561 ( 0.01%) self.len() == 0 . } . . /// Returns the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 157 ---------------------------------------- -- line 159 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.first()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn first(&self) -> Option<&T> { 6,925 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 175 ---------------------------------------- -- line 178 ---------------------------------------- . /// *first = 5; . /// } . /// assert_eq!(x, &[5, 1, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn first_mut(&mut self) -> Option<&mut T> { 196 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &[0, 1, 2]; -- line 194 ---------------------------------------- -- line 197 ---------------------------------------- . /// assert_eq!(first, &0); . /// assert_eq!(elements, &[1, 2]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_first(&self) -> Option<(&T, &[T])> { 4 ( 0.00%) if let [first, tail @ ..] = self { Some((first, tail)) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 213 ---------------------------------------- -- line 237 ---------------------------------------- . /// assert_eq!(last, &2); . /// assert_eq!(elements, &[0, 1]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_last(&self) -> Option<(&T, &[T])> { 11,784 ( 0.00%) if let [init @ .., last] = self { Some((last, init)) } else { None } . } . . /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 253 ---------------------------------------- -- line 276 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.last()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn last(&self) -> Option<&T> { 386,673 ( 0.00%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a mutable pointer to the last item in the slice. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 292 ---------------------------------------- -- line 295 ---------------------------------------- . /// *last = 10; . /// } . /// assert_eq!(x, &[0, 1, 10]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn last_mut(&mut self) -> Option<&mut T> { 502,636 ( 0.01%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a reference to an element or subslice depending on the type of . /// index. . /// . /// - If given a position, returns a reference to the element at that . /// position or `None` if out of bounds. . /// - If given a range, returns the subslice corresponding to that range, -- line 311 ---------------------------------------- -- line 448 ---------------------------------------- . /// } . /// ``` . /// . /// [`as_mut_ptr`]: slice::as_mut_ptr . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] . #[inline] . pub const fn as_ptr(&self) -> *const T { 946,292 ( 0.01%) self as *const [T] as *const T . } . . /// Returns an unsafe mutable pointer to the slice's buffer. . /// . /// The caller must ensure that the slice outlives the pointer this . /// function returns, or else it will end up pointing to garbage. . /// . /// Modifying the container referenced by this slice may cause its buffer -- line 464 ---------------------------------------- -- line 476 ---------------------------------------- . /// } . /// } . /// assert_eq!(x, &[3, 4, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")] . #[inline] . pub const fn as_mut_ptr(&mut self) -> *mut T { 8 ( 0.00%) self as *mut [T] as *mut T . } . . /// Returns the two raw pointers spanning the slice. . /// . /// The returned range is half-open, which means that the end pointer . /// points *one past* the last element of the slice. This way, an empty . /// slice is represented by two equal pointers, and the difference between . /// the two pointers represents the size of the slice. -- line 492 ---------------------------------------- -- line 582 ---------------------------------------- . /// v.swap(2, 4); . /// assert!(v == ["a", "b", "e", "d", "c"]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . #[inline] . #[track_caller] . pub const fn swap(&mut self, a: usize, b: usize) { 18,296 ( 0.00%) let _ = &self[a]; 51,676 ( 0.00%) let _ = &self[b]; . . // SAFETY: we just checked that both `a` and `b` are in bounds . unsafe { self.swap_unchecked(a, b) } . } . . /// Swaps two elements in the slice, without doing bounds checking. . /// . /// For a safe alternative see [`swap`]. -- line 599 ---------------------------------------- -- line 677 ---------------------------------------- . . // Because this function is first compiled in isolation, . // this check tells LLVM that the indexing below is . // in-bounds. Then after inlining -- once the actual . // lengths of the slices are known -- it's removed. . let (a, b) = (&mut a[..n], &mut b[..n]); . . for i in 0..n { 4,011 ( 0.00%) mem::swap(&mut a[i], &mut b[n - 1 - i]); . } . } . } . . /// Returns an iterator over the slice. . /// . /// # Examples . /// -- line 693 ---------------------------------------- -- line 1499 ---------------------------------------- . /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(right, []); . /// } . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at(&self, mid: usize) -> (&[T], &[T]) { 336 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_unchecked(mid) } . } . . /// Divides one mutable slice into two at an index. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1515 ---------------------------------------- -- line 1530 ---------------------------------------- . /// left[1] = 2; . /// right[1] = 4; . /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { 174,818 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_mut_unchecked(mid) } . } . . /// Divides one slice into two at an index, without doing bounds checking. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1546 ---------------------------------------- -- line 1628 ---------------------------------------- . pub unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) { . let len = self.len(); . let ptr = self.as_mut_ptr(); . . // SAFETY: Caller has to check that `0 <= mid <= self.len()`. . // . // `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference . // is fine. 72,769 ( 0.00%) unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) } . } . . /// Divides one slice into an array and a remainder slice at an index. . /// . /// The array will contain all indices from `[0, N)` (excluding . /// the index `N` itself) and the slice will contain all . /// indices from `[N, len)` (excluding the index `len` itself). . /// -- line 1644 ---------------------------------------- -- line 2113 ---------------------------------------- . /// assert!(!v.iter().any(|e| e == "hi")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn contains(&self, x: &T) -> bool . where . T: PartialEq, . { 88 ( 0.00%) cmp::SliceContains::slice_contains(x, self) . } . . /// Returns `true` if `needle` is a prefix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2129 ---------------------------------------- -- line 2142 ---------------------------------------- . /// assert!(v.starts_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn starts_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let n = needle.len(); 5,542 ( 0.00%) self.len() >= n && needle == &self[..n] . } . . /// Returns `true` if `needle` is a suffix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2158 ---------------------------------------- -- line 2171 ---------------------------------------- . /// assert!(v.ends_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn ends_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let (m, n) = (self.len(), needle.len()); 18,058 ( 0.00%) m >= n && needle == &self[m - n..] . } . . /// Returns a subslice with the prefix removed. . /// . /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`. . /// If `prefix` is empty, simply returns the original slice. . /// . /// If the slice does not start with `prefix`, returns `None`. -- line 2187 ---------------------------------------- -- line 2293 ---------------------------------------- . /// s.insert(idx, num); . /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn binary_search(&self, x: &T) -> Result . where . T: Ord, . { 3 ( 0.00%) self.binary_search_by(|p| p.cmp(x)) . } . . /// Binary searches this sorted slice with a comparator function. . /// . /// The comparator function should implement an order consistent . /// with the sort order of the underlying slice, returning an . /// order code that indicates whether its argument is `Less`, . /// `Equal` or `Greater` the desired target. -- line 2309 ---------------------------------------- -- line 2345 ---------------------------------------- . #[inline] . pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result . where . F: FnMut(&'a T) -> Ordering, . { . let mut size = self.len(); . let mut left = 0; . let mut right = size; 3,883,103 ( 0.05%) while left < right { 5,908,046 ( 0.07%) let mid = left + size / 2; . . // SAFETY: the call is made safe by the following invariants: . // - `mid >= 0` . // - `mid < size`: `mid` is limited by `[left; right)` bound. 872,082 ( 0.01%) let cmp = f(unsafe { self.get_unchecked(mid) }); . . // The reason why we use if/else control flow rather than match . // is because match reorders comparison operations, which is perf sensitive. . // This is x86 asm for u8: https://rust.godbolt.org/z/8Y8Pra. 1,589,749 ( 0.02%) if cmp == Less { 2,794,663 ( 0.03%) left = mid + 1; 840,852 ( 0.01%) } else if cmp == Greater { . right = mid; . } else { . // SAFETY: same as the `get_unchecked` above . unsafe { crate::intrinsics::assume(mid < self.len()) }; . return Ok(mid); . } . 4,774,868 ( 0.06%) size = right - left; . } . Err(left) . } . . /// Binary searches this sorted slice with a key extraction function. . /// . /// Assumes that the slice is sorted by the key, for instance with . /// [`sort_by_key`] using the same key extraction function. -- line 2382 ---------------------------------------- -- line 3203 ---------------------------------------- . #[track_caller] . fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { . panic!( . "source slice length ({}) does not match destination slice length ({})", . src_len, dst_len, . ); . } . 97,266 ( 0.00%) if self.len() != src.len() { . len_mismatch_fail(self.len(), src.len()); . } . . // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was . // checked to have the same length. The slices cannot overlap because . // mutable references are exclusive. . unsafe { . ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); -- line 3219 ---------------------------------------- -- line 3382 ---------------------------------------- . } . let gcd: usize = gcd(mem::size_of::(), mem::size_of::()); . let ts: usize = mem::size_of::() / gcd; . let us: usize = mem::size_of::() / gcd; . . // Armed with this knowledge, we can find how many `U`s we can fit! . let us_len = self.len() / ts * us; . // And how many `T`s will be in the trailing slice! 24,854 ( 0.00%) let ts_len = self.len() % ts; . (us_len, ts_len) . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// . /// This method splits the slice into three distinct slices: prefix, correctly aligned middle . /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest -- line 3398 ---------------------------------------- -- line 3429 ---------------------------------------- . return (self, &[], &[]); . } . . // First, find at what point do we split between the first and 2nd slice. Easy with . // ptr.align_offset. . let ptr = self.as_ptr(); . // SAFETY: See the `align_to_mut` method for the detailed safety comment. . let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; 24,854 ( 0.00%) if offset > self.len() { . (self, &[], &[]) . } else { . let (left, rest) = self.split_at(offset); . let (us_len, ts_len) = rest.align_to_offsets::(); . // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay, . // since the caller guarantees that we can transmute `T` to `U` safely. . unsafe { . ( . left, . from_raw_parts(rest.as_ptr() as *const U, us_len), 24,854 ( 0.00%) from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), . ) . } . } . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// -- line 3456 ---------------------------------------- 3,346,312 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/str/validations.rs -------------------------------------------------------------------------------- Ir -- line 17 ---------------------------------------- . const fn utf8_acc_cont_byte(ch: u32, byte: u8) -> u32 { . (ch << 6) | (byte & CONT_MASK) as u32 . } . . /// Checks whether the byte is a UTF-8 continuation byte (i.e., starts with the . /// bits `10`). . #[inline] . pub(super) const fn utf8_is_cont_byte(byte: u8) -> bool { 1,105,857 ( 0.01%) (byte as i8) < -64 . } . . /// Reads the next code point out of a byte iterator (assuming a . /// UTF-8-like encoding). . /// . /// # Safety . /// . /// `bytes` must produce a valid UTF-8-like (UTF-8 or WTF-8) string . #[unstable(feature = "str_internals", issue = "none")] . #[inline] . pub unsafe fn next_code_point<'a, I: Iterator>(bytes: &mut I) -> Option { . // Decode UTF-8 3,324,547 ( 0.04%) let x = *bytes.next()?; 6,620,604 ( 0.08%) if x < 128 { 100 ( 0.00%) return Some(x as u32); . } . . // Multibyte case follows . // Decode from a byte combination out of: [[[x y] z] w] . // NOTE: Performance is sensitive to the exact formulation here . let init = utf8_first_byte(x, 2); . // SAFETY: `bytes` produces an UTF-8-like string, . // so the iterator must produce a value here. -- line 48 ---------------------------------------- -- line 77 ---------------------------------------- . /// `bytes` must produce a valid UTF-8-like (UTF-8 or WTF-8) string . #[inline] . pub(super) unsafe fn next_code_point_reverse<'a, I>(bytes: &mut I) -> Option . where . I: DoubleEndedIterator, . { . // Decode UTF-8 . let w = match *bytes.next_back()? { 3,414 ( 0.00%) next_byte if next_byte < 128 => return Some(next_byte as u32), . back_byte => back_byte, . }; . . // Multibyte case follows . // Decode from a byte combination out of: [x [y [z w]]] . let mut ch; . // SAFETY: `bytes` produces an UTF-8-like string, . // so the iterator must produce a value here. -- line 93 ---------------------------------------- -- line 113 ---------------------------------------- . } . . // use truncation to fit u64 into usize . const NONASCII_MASK: usize = 0x80808080_80808080u64 as usize; . . /// Returns `true` if any byte in the word `x` is nonascii (>= 128). . #[inline] . const fn contains_nonascii(x: usize) -> bool { 31,590 ( 0.00%) (x & NONASCII_MASK) != 0 . } . . /// Walks through `v` checking that it's a valid UTF-8 sequence, . /// returning `Ok(())` in that case, or, if it is invalid, `Err(err)`. . #[inline(always)] . #[rustc_const_unstable(feature = "str_internals", issue = "none")] . pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> { . let mut index = 0; . let len = v.len(); . . let usize_bytes = mem::size_of::(); . let ascii_block_size = 2 * usize_bytes; 48,894 ( 0.00%) let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 }; . let align = v.as_ptr().align_offset(usize_bytes); . 33,498 ( 0.00%) while index < len { . let old_offset = index; . macro_rules! err { . ($error_len: expr) => { . return Err(Utf8Error { valid_up_to: old_offset, error_len: $error_len }) . }; . } . . macro_rules! next { -- line 145 ---------------------------------------- -- line 148 ---------------------------------------- . // we needed data, but there was none: error! . if index >= len { . err!(None) . } . v[index] . }}; . } . 16,491 ( 0.00%) let first = v[index]; 32,982 ( 0.00%) if first >= 128 { . let w = utf8_char_width(first); . // 2-byte encoding is for codepoints \u{0080} to \u{07ff} . // first C2 80 last DF BF . // 3-byte encoding is for codepoints \u{0800} to \u{ffff} . // first E0 A0 80 last EF BF BF . // excluding surrogates codepoints \u{d800} to \u{dfff} . // ED A0 80 to ED BF BF . // 4-byte encoding is for codepoints \u{1000}0 to \u{10ff}ff -- line 165 ---------------------------------------- -- line 206 ---------------------------------------- . } . _ => err!(Some(1)), . } . index += 1; . } else { . // Ascii case, try to skip forward quickly. . // When the pointer is aligned, read 2 words of data per iteration . // until we find a word containing a non-ascii byte. 65,608 ( 0.00%) if align != usize::MAX && align.wrapping_sub(index) % usize_bytes == 0 { . let ptr = v.as_ptr(); 95,450 ( 0.00%) while index < blocks_end { . // SAFETY: since `align - index` and `ascii_block_size` are . // multiples of `usize_bytes`, `block = ptr.add(index)` is . // always aligned with a `usize` so it's safe to dereference . // both `block` and `block.offset(1)`. . unsafe { . let block = ptr.add(index) as *const usize; . // break if there is a nonascii byte 31,590 ( 0.00%) let zu = contains_nonascii(*block); . let zv = contains_nonascii(*block.offset(1)); 63,180 ( 0.00%) if zu || zv { . break; . } . } 63,180 ( 0.00%) index += ascii_block_size; . } . // step from the point where the wordwise loop stopped 731,538 ( 0.01%) while index < len && v[index] < 128 { 162,787 ( 0.00%) index += 1; . } . } else { 356 ( 0.00%) index += 1; . } . } . } . . Ok(()) . } . . // https://tools.ietf.org/html/rfc3629 -- line 245 ---------------------------------------- 3,368,942 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs -------------------------------------------------------------------------------- Ir -- line 108 ---------------------------------------- . suppress_errors: bool, . }, . } . . impl RegionckMode { . /// Indicates that the MIR borrowck will repeat these region . /// checks, so we should ignore errors if NLL is (unconditionally) . /// enabled. 617 ( 0.00%) pub fn for_item_body(tcx: TyCtxt<'_>) -> Self { . // FIXME(Centril): Once we actually remove `::Migrate` also make . // this always `true` and then proceed to eliminate the dead code. 617 ( 0.00%) match tcx.borrowck_mode() { . // If we're on Migrate mode, report AST region errors . BorrowckMode::Migrate => RegionckMode::Erase { suppress_errors: false }, . . // If we're on MIR, don't report AST region errors as they should be reported by NLL . BorrowckMode::Mir => RegionckMode::Erase { suppress_errors: true }, . } 1,234 ( 0.00%) } . } . . /// This type contains all the things within `InferCtxt` that sit within a . /// `RefCell` and are involved with taking/rolling back snapshots. Snapshot . /// operations are hot enough that we want only one call to `borrow_mut` per . /// call to `start_snapshot` and `rollback_to`. . pub struct InferCtxtInner<'tcx> { . /// Cache for projections. This cache is snapshotted along with the infcx. -- line 134 ---------------------------------------- -- line 202 ---------------------------------------- . /// type instantiations (`ty::Infer`) to the actual opaque . /// type (`ty::Opaque`). Used during fallback to map unconstrained . /// opaque type inference variables to their corresponding . /// opaque type. . pub opaque_types_vars: FxHashMap, Ty<'tcx>>, . } . . impl<'tcx> InferCtxtInner<'tcx> { 57,650 ( 0.00%) fn new() -> InferCtxtInner<'tcx> { 392,020 ( 0.00%) InferCtxtInner { . projection_cache: Default::default(), . type_variable_storage: type_variable::TypeVariableStorage::new(), . undo_log: InferCtxtUndoLogs::default(), . const_unification_storage: ut::UnificationTableStorage::new(), . int_unification_storage: ut::UnificationTableStorage::new(), . float_unification_storage: ut::UnificationTableStorage::new(), 34,590 ( 0.00%) region_constraint_storage: Some(RegionConstraintStorage::new()), . region_obligations: vec![], . opaque_types: Default::default(), . opaque_types_vars: Default::default(), . } 69,180 ( 0.00%) } . . #[inline] . pub fn region_obligations(&self) -> &[(hir::HirId, RegionObligation<'tcx>)] { . &self.region_obligations . } . . #[inline] . pub fn projection_cache(&mut self) -> traits::ProjectionCache<'_, 'tcx> { 13,439 ( 0.00%) self.projection_cache.with_log(&mut self.undo_log) . } . . #[inline] . fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'tcx> { 482,147 ( 0.01%) self.type_variable_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn int_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::IntVid, . &mut ut::UnificationStorage, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 76,521 ( 0.00%) self.int_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn float_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::FloatVid, -- line 258 ---------------------------------------- -- line 268 ---------------------------------------- . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::ConstVid<'tcx>, . &mut ut::UnificationStorage>, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 23,934 ( 0.00%) self.const_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'_, 'tcx> { 49,140 ( 0.00%) self.region_constraint_storage . .as_mut() . .expect("region constraints already solved") 58,833 ( 0.00%) .with_log(&mut self.undo_log) . } . } . . pub struct InferCtxt<'a, 'tcx> { . pub tcx: TyCtxt<'tcx>, . . /// The `DefId` of the item in whose context we are performing inference or typeck. . /// It is used to check whether an opaque type use is a defining use. -- line 292 ---------------------------------------- -- line 361 ---------------------------------------- . /// item we are type-checking, and just consider those names as . /// part of the root universe. So this would only get incremented . /// when we enter into a higher-ranked (`for<..>`) type or trait . /// bound. . universe: Cell, . } . . /// See the `error_reporting` module for more details. 72,320 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)] . pub enum ValuePairs<'tcx> { . Types(ExpectedFound>), . Regions(ExpectedFound>), . Consts(ExpectedFound<&'tcx ty::Const<'tcx>>), . TraitRefs(ExpectedFound>), . PolyTraitRefs(ExpectedFound>), . } . -- line 377 ---------------------------------------- -- line 383 ---------------------------------------- . pub struct TypeTrace<'tcx> { . cause: ObligationCause<'tcx>, . values: ValuePairs<'tcx>, . } . . /// The origin of a `r1 <= r2` constraint. . /// . /// See `error_reporting` module for more details 63,459 ( 0.00%) #[derive(Clone, Debug)] . pub enum SubregionOrigin<'tcx> { . /// Arose from a subtyping relation 3,674 ( 0.00%) Subtype(Box>), . . /// When casting `&'a T` to an `&'b Trait` object, . /// relating `'a` to `'b` . RelateObjectBound(Span), . . /// Some type parameter was instantiated with the given type, . /// and that type must outlive some region. 319 ( 0.00%) RelateParamBound(Span, Ty<'tcx>, Option), . . /// The given region parameter was instantiated with a region . /// that must outlive some other region. . RelateRegionParamBound(Span), . . /// Creating a pointer `b` to contents of another reference . Reborrow(Span), . . /// Creating a pointer `b` to contents of an upvar . ReborrowUpvar(Span, ty::UpvarId), . . /// Data with type `Ty<'tcx>` was borrowed 598 ( 0.00%) DataBorrowed(Ty<'tcx>, Span), . . /// (&'a &'b T) where a >= b 489 ( 0.00%) ReferenceOutlivesReferent(Ty<'tcx>, Span), . . /// Comparing the signature and requirements of an impl method against . /// the containing trait. . CompareImplMethodObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, . . /// Comparing the signature and requirements of an impl associated type . /// against the containing trait . CompareImplTypeObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, -- line 426 ---------------------------------------- -- line 554 ---------------------------------------- . defining_use_anchor: Option, . } . . pub trait TyCtxtInferExt<'tcx> { . fn infer_ctxt(self) -> InferCtxtBuilder<'tcx>; . } . . impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> { 11,530 ( 0.00%) fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> { 34,590 ( 0.00%) InferCtxtBuilder { tcx: self, defining_use_anchor: None, fresh_typeck_results: None } 11,530 ( 0.00%) } . } . . impl<'tcx> InferCtxtBuilder<'tcx> { . /// Used only by `rustc_typeck` during body type-checking/inference, . /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`. . /// Will also change the scope for opaque type defining use checks to the given owner. 8,001 ( 0.00%) pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self { 9,779 ( 0.00%) self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner))); 4,445 ( 0.00%) self.with_opaque_type_inference(table_owner) 6,223 ( 0.00%) } . . /// Whenever the `InferCtxt` should be able to handle defining uses of opaque types, . /// you need to call this function. Otherwise the opaque type will be treated opaquely. . /// . /// It is only meant to be called in two places, for typeck . /// (via `with_fresh_in_progress_typeck_results`) and for the inference context used . /// in mir borrowck. 1,254 ( 0.00%) pub fn with_opaque_type_inference(mut self, defining_use_anchor: LocalDefId) -> Self { 627 ( 0.00%) self.defining_use_anchor = Some(defining_use_anchor); 3,032 ( 0.00%) self 1,881 ( 0.00%) } . . /// Given a canonical value `C` as a starting point, create an . /// inference context that contains each of the bound values . /// within instantiated as a fresh variable. The `f` closure is . /// invoked with the new infcx, along with the instantiated value . /// `V` and a substitution `S`. This substitution `S` maps from . /// the bound values in `C` to their instantiated values in `V` . /// (in other words, `S(C) = V`). 10,583 ( 0.00%) pub fn enter_with_canonical( . &mut self, . span: Span, . canonical: &Canonical<'tcx, T>, . f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R, . ) -> R . where . T: TypeFoldable<'tcx>, . { . self.enter(|infcx| { 15,302 ( 0.00%) let (value, subst) = 766 ( 0.00%) infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical); 22,862 ( 0.00%) f(infcx, value, subst) . }) 11,496 ( 0.00%) } . 77,951 ( 0.00%) pub fn enter(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R { 31,501 ( 0.00%) let InferCtxtBuilder { tcx, defining_use_anchor, ref fresh_typeck_results } = *self; . let in_progress_typeck_results = fresh_typeck_results.as_ref(); 502,608 ( 0.01%) f(InferCtxt { . tcx, . defining_use_anchor, . in_progress_typeck_results, 11,530 ( 0.00%) inner: RefCell::new(InferCtxtInner::new()), . lexical_region_resolutions: RefCell::new(None), . selection_cache: Default::default(), . evaluation_cache: Default::default(), . reported_trait_errors: Default::default(), . reported_closure_mismatch: Default::default(), . tainted_by_errors_flag: Cell::new(false), 11,530 ( 0.00%) err_count_on_creation: tcx.sess.err_count(), . in_snapshot: Cell::new(false), . skip_leak_check: Cell::new(false), . universe: Cell::new(ty::UniverseIndex::ROOT), . }) 83,474 ( 0.00%) } . } . . impl<'tcx, T> InferOk<'tcx, T> { . pub fn unit(self) -> InferOk<'tcx, ()> { . InferOk { value: (), obligations: self.obligations } . } . . /// Extracts `value`, registering any obligations into `fulfill_cx`. . pub fn into_value_registering_obligations( . self, . infcx: &InferCtxt<'_, 'tcx>, . fulfill_cx: &mut dyn TraitEngine<'tcx>, . ) -> T { 146 ( 0.00%) let InferOk { value, obligations } = self; 612 ( 0.00%) for obligation in obligations { . fulfill_cx.register_predicate_obligation(infcx, obligation); . } . value . } . } . . impl<'tcx> InferOk<'tcx, ()> { 6,130 ( 0.00%) pub fn into_obligations(self) -> PredicateObligations<'tcx> { 24,520 ( 0.00%) self.obligations 6,130 ( 0.00%) } . } . . #[must_use = "once you start a snapshot, you should always consume it"] . pub struct CombinedSnapshot<'a, 'tcx> { . undo_snapshot: Snapshot<'tcx>, . region_constraints_snapshot: RegionSnapshot, . universe: ty::UniverseIndex, . was_in_snapshot: bool, -- line 662 ---------------------------------------- -- line 674 ---------------------------------------- . let canonical = self.canonicalize_query((a, b), &mut OriginalQueryValues::default()); . debug!("canonical consts: {:?}", &canonical.value); . . self.tcx.try_unify_abstract_consts(canonical.value) . } . . pub fn is_in_snapshot(&self) -> bool { . self.in_snapshot.get() 26,653 ( 0.00%) } . 239,600 ( 0.00%) pub fn freshen>(&self, t: T) -> T { 269,550 ( 0.00%) t.fold_with(&mut self.freshener()) 269,550 ( 0.00%) } . . /// Returns the origin of the type variable identified by `vid`, or `None` . /// if this is not a type variable. . /// . /// No attempt is made to resolve `ty`. 2,868 ( 0.00%) pub fn type_var_origin(&'a self, ty: Ty<'tcx>) -> Option { 5,736 ( 0.00%) match *ty.kind() { 1,404 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 5,616 ( 0.00%) Some(*self.inner.borrow_mut().type_variables().var_origin(vid)) . } 30 ( 0.00%) _ => None, . } 5,736 ( 0.00%) } . 29,950 ( 0.00%) pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, false) 29,950 ( 0.00%) } . . /// Like `freshener`, but does not replace `'static` regions. 63,895 ( 0.00%) pub fn freshener_keep_static<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, true) 63,895 ( 0.00%) } . 2,828 ( 0.00%) pub fn unsolved_variables(&self) -> Vec> { 1,414 ( 0.00%) let mut inner = self.inner.borrow_mut(); 1,414 ( 0.00%) let mut vars: Vec> = inner . .type_variables() . .unsolved_variables() . .into_iter() 2,808 ( 0.00%) .map(|t| self.tcx.mk_ty_var(t)) . .collect(); . vars.extend( . (0..inner.int_unification_table().len()) . .map(|i| ty::IntVid { index: i as u32 }) 3,954 ( 0.00%) .filter(|&vid| inner.int_unification_table().probe_value(vid).is_none()) 30 ( 0.00%) .map(|v| self.tcx.mk_int_var(v)), . ); . vars.extend( . (0..inner.float_unification_table().len()) . .map(|i| ty::FloatVid { index: i as u32 }) . .filter(|&vid| inner.float_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_float_var(v)), . ); . vars 4,949 ( 0.00%) } . 42,283 ( 0.00%) fn combine_fields( . &'a self, . trace: TypeTrace<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> CombineFields<'a, 'tcx> { 169,220 ( 0.00%) CombineFields { . infcx: self, 423,050 ( 0.01%) trace, . cause: None, . param_env, . obligations: PredicateObligations::new(), . } 42,283 ( 0.00%) } . . /// Clear the "currently in a snapshot" flag, invoke the closure, . /// then restore the flag to its original value. This flag is a . /// debugging measure designed to detect cases where we start a . /// snapshot, create type variables, and register obligations . /// which may involve those type variables in the fulfillment cx, . /// potentially leaving "dangling type variables" behind. . /// In such cases, an assertion will fail when attempting to -- line 753 ---------------------------------------- -- line 755 ---------------------------------------- . /// better than grovelling through megabytes of `RUSTC_LOG` output. . /// . /// HOWEVER, in some cases the flag is unhelpful. In particular, we . /// sometimes create a "mini-fulfilment-cx" in which we enroll . /// obligations. As long as this fulfillment cx is fully drained . /// before we return, this is not a problem, as there won't be any . /// escaping obligations in the main cx. In those cases, you can . /// use this function. 40 ( 0.00%) pub fn save_and_restore_in_snapshot_flag(&self, func: F) -> R . where . F: FnOnce(&Self) -> R, . { . let flag = self.in_snapshot.replace(false); 7,305 ( 0.00%) let result = func(self); . self.in_snapshot.set(flag); . result 45 ( 0.00%) } . 215,202 ( 0.00%) fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> { . debug!("start_snapshot()"); . . let in_snapshot = self.in_snapshot.replace(true); . . let mut inner = self.inner.borrow_mut(); . 645,606 ( 0.01%) CombinedSnapshot { . undo_snapshot: inner.undo_log.start_snapshot(), . region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), . universe: self.universe(), . was_in_snapshot: in_snapshot, . // Borrow typeck results "in progress" (i.e., during typeck) . // to ban writes from within a snapshot to them. 107,601 ( 0.00%) _in_progress_typeck_results: self . .in_progress_typeck_results . .map(|typeck_results| typeck_results.borrow()), . } 430,404 ( 0.01%) } . 362,736 ( 0.00%) #[instrument(skip(self, snapshot), level = "debug")] . fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 32,976 ( 0.00%) undo_snapshot, 32,976 ( 0.00%) region_constraints_snapshot, 32,976 ( 0.00%) universe, 32,976 ( 0.00%) was_in_snapshot, 65,952 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . self.universe.set(universe); . . let mut inner = self.inner.borrow_mut(); 32,976 ( 0.00%) inner.rollback_to(undo_snapshot); . inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); . } . 1,119,375 ( 0.01%) #[instrument(skip(self, snapshot), level = "debug")] . fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 74,625 ( 0.00%) undo_snapshot, . region_constraints_snapshot: _, . universe: _, 74,625 ( 0.00%) was_in_snapshot, 149,250 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . . self.inner.borrow_mut().commit(undo_snapshot); . } . . /// Executes `f` and commit the bindings. 95,457 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 116,985 ( 0.00%) pub fn commit_unconditionally(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 10,635 ( 0.00%) let snapshot = self.start_snapshot(); 32,838 ( 0.00%) let r = f(&snapshot); 74,445 ( 0.00%) self.commit_from(snapshot); 64,514 ( 0.00%) r . } . . /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. 499,498 ( 0.01%) #[instrument(skip(self, f), level = "debug")] 616,204 ( 0.01%) pub fn commit_if_ok(&self, f: F) -> Result . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, . { 92,231 ( 0.00%) let snapshot = self.start_snapshot(); 284,845 ( 0.00%) let r = f(&snapshot); . debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); 98,207 ( 0.00%) match r { . Ok(_) => { 458,778 ( 0.01%) self.commit_from(snapshot); . } . Err(_) => { 163,286 ( 0.00%) self.rollback_to("commit_if_ok -- error", snapshot); . } . } 503,232 ( 0.01%) r . } . . /// Execute `f` then unroll any bindings it creates. 108,268 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 129,998 ( 0.00%) pub fn probe(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 32,627 ( 0.00%) let snapshot = self.start_snapshot(); 52,491 ( 0.00%) let r = f(&snapshot); 149,669 ( 0.00%) self.rollback_to("probe", snapshot); 21,941 ( 0.00%) r . } . . /// If `should_skip` is true, then execute `f` then unroll any bindings it creates. 18 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 24 ( 0.00%) pub fn probe_maybe_skip_leak_check(&self, should_skip: bool, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 4 ( 0.00%) let snapshot = self.start_snapshot(); 2 ( 0.00%) let was_skip_leak_check = self.skip_leak_check.get(); 4 ( 0.00%) if should_skip { . self.skip_leak_check.set(true); . } 8 ( 0.00%) let r = f(&snapshot); 18 ( 0.00%) self.rollback_to("probe", snapshot); . self.skip_leak_check.set(was_skip_leak_check); 24 ( 0.00%) r . } . . /// Scan the constraints produced since `snapshot` began and returns: . /// . /// - `None` -- if none of them involve "region outlives" constraints . /// - `Some(true)` -- if there are `'a: 'b` constraints where `'a` or `'b` is a placeholder . /// - `Some(false)` -- if there are `'a: 'b` constraints but none involve placeholders 7,090 ( 0.00%) pub fn region_constraints_added_in_snapshot( . &self, . snapshot: &CombinedSnapshot<'a, 'tcx>, . ) -> Option { 14,180 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() . .region_constraints_added_in_snapshot(&snapshot.undo_snapshot) 10,635 ( 0.00%) } . . pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { . self.inner.borrow_mut().unwrap_region_constraints().add_given(sub, sup); . } . 348 ( 0.00%) pub fn can_sub(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 261 ( 0.00%) } . 1,728 ( 0.00%) pub fn can_eq(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 1,296 ( 0.00%) } . 31,790 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn sub_regions( . &self, . origin: SubregionOrigin<'tcx>, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) { 28,611 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); . } . . /// Require that the region `r` be equal to one of the regions in . /// the set `regions`. . #[instrument(skip(self), level = "debug")] . pub fn member_constraint( . &self, . opaque_type_def_id: DefId, -- line 947 ---------------------------------------- -- line 969 ---------------------------------------- . /// to `subtype_predicate` -- that is, "coercing" `a` to `b` winds up . /// actually requiring `a <: b`. This is of course a valid coercion, . /// but it's not as flexible as `FnCtxt::coerce` would be. . /// . /// (We may refactor this in the future, but there are a number of . /// practical obstacles. Among other things, `FnCtxt::coerce` presently . /// records adjustments that are required on the HIR in order to perform . /// the coercion, and we don't currently have a way to manage that.) 78 ( 0.00%) pub fn coerce_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolyCoercePredicate<'tcx>, . ) -> Option> { 52 ( 0.00%) let subtype_predicate = predicate.map_bound(|p| ty::SubtypePredicate { . a_is_expected: false, // when coercing from `a` to `b`, `b` is expected . a: p.a, . b: p.b, . }); 130 ( 0.00%) self.subtype_predicate(cause, param_env, subtype_predicate) 104 ( 0.00%) } . 1,020 ( 0.00%) pub fn subtype_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolySubtypePredicate<'tcx>, . ) -> Option> { . // Check for two unresolved inference variables, in which case we can . // make no progress. This is partly a micro-optimization, but it's . // also an opportunity to "sub-unify" the variables. This isn't -- line 999 ---------------------------------------- -- line 1002 ---------------------------------------- . // earlier that they are sub-unified). . // . // Note that we can just skip the binders here because . // type variables can't (at present, at . // least) capture any of the things bound by this binder. . // . // Note that this sub here is not just for diagnostics - it has semantic . // effects as well. 85 ( 0.00%) let r_a = self.shallow_resolve(predicate.skip_binder().a); 85 ( 0.00%) let r_b = self.shallow_resolve(predicate.skip_binder().b); 600 ( 0.00%) match (r_a.kind(), r_b.kind()) { 130 ( 0.00%) (&ty::Infer(ty::TyVar(a_vid)), &ty::Infer(ty::TyVar(b_vid))) => { . self.inner.borrow_mut().type_variables().sub(a_vid, b_vid); 130 ( 0.00%) return None; . } . _ => {} . } . . Some(self.commit_if_ok(|_snapshot| { 20 ( 0.00%) let ty::SubtypePredicate { a_is_expected, a, b } = . self.replace_bound_vars_with_placeholders(predicate); . 40 ( 0.00%) let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?; . . Ok(ok.unit()) . })) 765 ( 0.00%) } . 2,796 ( 0.00%) pub fn region_outlives_predicate( . &self, . cause: &traits::ObligationCause<'tcx>, . predicate: ty::PolyRegionOutlivesPredicate<'tcx>, . ) -> UnitResult<'tcx> { . self.commit_if_ok(|_snapshot| { . let ty::OutlivesPredicate(r_a, r_b) = . self.replace_bound_vars_with_placeholders(predicate); . let origin = SubregionOrigin::from_obligation_cause(cause, || { . RelateRegionParamBound(cause.span) . }); 2,330 ( 0.00%) self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` . Ok(()) . }) 1,864 ( 0.00%) } . . /// Number of type variables created so far. 90 ( 0.00%) pub fn num_ty_vars(&self) -> usize { . self.inner.borrow_mut().type_variables().num_vars() 180 ( 0.00%) } . 21,216 ( 0.00%) pub fn next_ty_var_id(&self, origin: TypeVariableOrigin) -> TyVid { 106,080 ( 0.00%) self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) 31,824 ( 0.00%) } . 18,606 ( 0.00%) pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { 72,951 ( 0.00%) self.tcx.mk_ty_var(self.next_ty_var_id(origin)) 27,909 ( 0.00%) } . 348 ( 0.00%) pub fn next_ty_var_in_universe( . &self, . origin: TypeVariableOrigin, . universe: ty::UniverseIndex, . ) -> Ty<'tcx> { 1,914 ( 0.00%) let vid = self.inner.borrow_mut().type_variables().new_var(universe, origin); 174 ( 0.00%) self.tcx.mk_ty_var(vid) 522 ( 0.00%) } . 44 ( 0.00%) pub fn next_const_var( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . ) -> &'tcx ty::Const<'tcx> { 77 ( 0.00%) self.tcx.mk_const_var(self.next_const_var_id(origin), ty) 44 ( 0.00%) } . 15 ( 0.00%) pub fn next_const_var_in_universe( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . universe: ty::UniverseIndex, . ) -> &'tcx ty::Const<'tcx> { 15 ( 0.00%) let vid = self . .inner . .borrow_mut() . .const_unification_table() 40 ( 0.00%) .new_key(ConstVarValue { origin, val: ConstVariableValue::Unknown { universe } }); 5 ( 0.00%) self.tcx.mk_const_var(vid, ty) 20 ( 0.00%) } . 22 ( 0.00%) pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { 77 ( 0.00%) self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { 44 ( 0.00%) origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }) 33 ( 0.00%) } . . fn next_int_var_id(&self) -> IntVid { 5,265 ( 0.00%) self.inner.borrow_mut().int_unification_table().new_key(None) . } . 3,159 ( 0.00%) pub fn next_int_var(&self) -> Ty<'tcx> { . self.tcx.mk_int_var(self.next_int_var_id()) 4,212 ( 0.00%) } . . fn next_float_var_id(&self) -> FloatVid { . self.inner.borrow_mut().float_unification_table().new_key(None) . } . . pub fn next_float_var(&self) -> Ty<'tcx> { . self.tcx.mk_float_var(self.next_float_var_id()) . } . . /// Creates a fresh region variable with the next available index. . /// The variable will be created in the maximum universe created . /// thus far, allowing it to name any region created thus far. 6,375 ( 0.00%) pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { 132,681 ( 0.00%) self.next_region_var_in_universe(origin, self.universe()) 12,750 ( 0.00%) } . . /// Creates a fresh region variable with the next available index . /// in the given universe; typically, you can use . /// `next_region_var` and just use the maximal universe. 54,256 ( 0.00%) pub fn next_region_var_in_universe( . &self, . origin: RegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { . let region_var = 352,664 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe, origin); 135,640 ( 0.00%) self.tcx.mk_region(ty::ReVar(region_var)) 81,384 ( 0.00%) } . . /// Return the universe that the region `r` was created in. For . /// most regions (e.g., `'static`, named regions from the user, . /// etc) this is the root universe U0. For inference variables or . /// placeholders, however, it will return the universe which which . /// they are associated. 8,136 ( 0.00%) pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex { . self.inner.borrow_mut().unwrap_region_constraints().universe(r) 12,204 ( 0.00%) } . . /// Number of region variables created so far. 5,036 ( 0.00%) pub fn num_region_vars(&self) -> usize { . self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() 7,554 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 8,628 ( 0.00%) pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> { . self.next_region_var(RegionVariableOrigin::Nll(origin)) 17,256 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 82 ( 0.00%) pub fn next_nll_region_var_in_universe( . &self, . origin: NllRegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { 590 ( 0.00%) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe) 164 ( 0.00%) } . 238,560 ( 0.00%) pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 135,486 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { . // Create a region inference variable for the given . // region parameter definition. 6,807 ( 0.00%) self.next_region_var(EarlyBoundRegion(span, param.name)).into() . } . GenericParamDefKind::Type { .. } => { . // Create a type inference variable for the given . // type parameter definition. The substitutions are . // for actual parameters that may be referred to by . // the default of this type parameter, if it exists. . // e.g., `struct Foo(...);` when . // used in a path such as `Foo::::new()` will . // use an inference variable for `C` with `[T, U]` . // as the substitutions for the default, `(T, U)`. 80,788 ( 0.00%) let ty_var_id = self.inner.borrow_mut().type_variables().new_var( . self.universe(), 100,985 ( 0.00%) TypeVariableOrigin { . kind: TypeVariableOriginKind::TypeParameterDefinition( 20,197 ( 0.00%) param.name, 20,197 ( 0.00%) Some(param.def_id), . ), . span, . }, . ); . 20,197 ( 0.00%) self.tcx.mk_ty_var(ty_var_id).into() . } . GenericParamDefKind::Const { .. } => { . let origin = ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstParameterDefinition( . param.name, . param.def_id, . ), . span, . }; . let const_var_id = 30,976 ( 0.00%) self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }); 2,816 ( 0.00%) self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into() . } . } 22,528 ( 0.00%) } . . /// Given a set of generics defined on a type or impl, returns a substitution mapping each . /// type/region parameter to a fresh inference variable. 56,403 ( 0.00%) pub fn fresh_substs_for_item(&self, span: Span, def_id: DefId) -> SubstsRef<'tcx> { 260,656 ( 0.00%) InternalSubsts::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) 37,602 ( 0.00%) } . . /// Returns `true` if errors have been reported since this infcx was . /// created. This is sometimes used as a heuristic to skip . /// reporting errors that often occur as a result of earlier . /// errors, but where it's hard to be 100% sure (e.g., unresolved . /// inference variables, regionck errors). 4,102 ( 0.00%) pub fn is_tainted_by_errors(&self) -> bool { . debug!( . "is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ . tainted_by_errors_flag={})", . self.tcx.sess.err_count(), . self.err_count_on_creation, . self.tainted_by_errors_flag.get() . ); . 41,855 ( 0.00%) if self.tcx.sess.err_count() > self.err_count_on_creation { . return true; // errors reported since this infcx was made . } . self.tainted_by_errors_flag.get() 6,153 ( 0.00%) } . . /// Set the "tainted by errors" flag to true. We call this when we . /// observe an error from a prior pass. . pub fn set_tainted_by_errors(&self) { . debug!("set_tainted_by_errors()"); . self.tainted_by_errors_flag.set(true) . } . . /// Process the region constraints and return any any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 32,081 ( 0.00%) pub fn resolve_regions( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) -> Vec> { 87,077 ( 0.00%) let (var_infos, data) = { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; 4,583 ( 0.00%) assert!( 13,749 ( 0.00%) self.is_tainted_by_errors() || inner.region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . inner.region_obligations . ); . inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) . .into_infos_and_data() 4,583 ( 0.00%) }; . . let region_rels = 4,583 ( 0.00%) &RegionRelations::new(self.tcx, region_context, outlives_env.free_region_map()); . 41,247 ( 0.00%) let (lexical_region_resolutions, errors) = 109,992 ( 0.00%) lexical_region_resolve::resolve(region_rels, var_infos, data, mode); . 18,332 ( 0.00%) let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); 4,583 ( 0.00%) assert!(old_value.is_none()); . . errors 41,247 ( 0.00%) } . . /// Process the region constraints and report any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 45,830 ( 0.00%) pub fn resolve_regions_and_report_errors( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) { 9,166 ( 0.00%) let errors = self.resolve_regions(region_context, outlives_env, mode); . 13,749 ( 0.00%) if !self.is_tainted_by_errors() { . // As a heuristic, just skip reporting region errors . // altogether if other errors have been reported while . // this infcx was in use. This is totally hokey but . // otherwise we have a hard time separating legit region . // errors from silly ones. 9,166 ( 0.00%) self.report_region_errors(&errors); . } 22,915 ( 0.00%) } . . /// Obtains (and clears) the current set of region . /// constraints. The inference context is still usable: further . /// unifications will simply add new constraints. . /// . /// This method is not meant to be used with normal lexical region . /// resolution. Rather, it is used in the NLL mode as a kind of . /// interim hack: basically we run normal type-check and generate -- line 1307 ---------------------------------------- -- line 1319 ---------------------------------------- . } . . /// Gives temporary access to the region constraint data. . pub fn with_region_constraints( . &self, . op: impl FnOnce(&RegionConstraintData<'tcx>) -> R, . ) -> R { . let mut inner = self.inner.borrow_mut(); 4,780 ( 0.00%) op(inner.unwrap_region_constraints().data()) . } . . pub fn region_var_origin(&self, vid: ty::RegionVid) -> RegionVariableOrigin { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; . inner . .region_constraint_storage . .as_mut() -- line 1335 ---------------------------------------- -- line 1338 ---------------------------------------- . .var_origin(vid) . } . . /// Takes ownership of the list of variable regions. This implies . /// that all the region constraints have already been taken, and . /// hence that `resolve_regions_and_report_errors` can never be . /// called. This is used only during NLL processing to "hand off" ownership . /// of the set of region variables into the NLL region context. 3,135 ( 0.00%) pub fn take_region_var_origins(&self) -> VarInfos { . let mut inner = self.inner.borrow_mut(); 11,913 ( 0.00%) let (var_infos, data) = inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) 627 ( 0.00%) .into_infos_and_data(); 627 ( 0.00%) assert!(data.is_empty()); . var_infos 5,016 ( 0.00%) } . . pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { . self.resolve_vars_if_possible(t).to_string() . } . . /// If `TyVar(vid)` resolves to a type, return that type. Else, return the . /// universe index of `TyVar(vid)`. 5,754 ( 0.00%) pub fn probe_ty_var(&self, vid: TyVid) -> Result, ty::UniverseIndex> { . use self::type_variable::TypeVariableValue; . 17,262 ( 0.00%) match self.inner.borrow_mut().type_variables().probe(vid) { . TypeVariableValue::Known { value } => Ok(value), . TypeVariableValue::Unknown { universe } => Err(universe), . } 20,139 ( 0.00%) } . . /// Resolve any type variables found in `value` -- but only one . /// level. So, if the variable `?X` is bound to some type . /// `Foo`, then this would return `Foo` (but `?Y` may . /// itself be bound to a type). . /// . /// Useful when you only need to inspect the outermost level of . /// the type and don't care about nested types (or perhaps you . /// will be resolving them as well, e.g. in a loop). . pub fn shallow_resolve(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 184,048 ( 0.00%) value.fold_with(&mut ShallowResolver { infcx: self }) . } . 15,288 ( 0.00%) pub fn root_var(&self, var: ty::TyVid) -> ty::TyVid { . self.inner.borrow_mut().type_variables().root_var(var) 22,932 ( 0.00%) } . . /// Where possible, replaces type/const variables in . /// `value` with their final value. Note that region variables . /// are unaffected. If a type/const variable has not been unified, it . /// is left as is. This is an idempotent operation that does . /// not affect inference state in any way and so you can do it . /// at will. 5,160 ( 0.00%) pub fn resolve_vars_if_possible(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 223,015 ( 0.00%) if !value.needs_infer() { 147,851 ( 0.00%) return value; // Avoid duplicated subst-folding. . } 235,052 ( 0.00%) let mut r = resolve::OpportunisticVarResolver::new(self); 231,477 ( 0.00%) value.fold_with(&mut r) 6,436 ( 0.00%) } . . /// Returns the first unresolved variable contained in `T`. In the . /// process of visiting `T`, this will resolve (where possible) . /// type variables in `T`, but it never constructs the final, . /// resolved type, so it's more efficient than . /// `resolve_vars_if_possible()`. . pub fn unresolved_type_vars(&self, value: &T) -> Option<(Ty<'tcx>, Option)> . where . T: TypeFoldable<'tcx>, . { . value.visit_with(&mut resolve::UnresolvedTypeFinder::new(self)).break_value() . } . 24 ( 0.00%) pub fn probe_const_var( . &self, . vid: ty::ConstVid<'tcx>, . ) -> Result<&'tcx ty::Const<'tcx>, ty::UniverseIndex> { 84 ( 0.00%) match self.inner.borrow_mut().const_unification_table().probe_value(vid).val { . ConstVariableValue::Known { value } => Ok(value), . ConstVariableValue::Unknown { universe } => Err(universe), . } 84 ( 0.00%) } . . pub fn fully_resolve>(&self, value: T) -> FixupResult<'tcx, T> { . /*! . * Attempts to resolve all type/region/const variables in . * `value`. Region inference must have been run already (e.g., . * by calling `resolve_regions_and_report_errors`). If some . * variable was never unified, an `Err` results. . * -- line 1437 ---------------------------------------- -- line 1490 ---------------------------------------- . expected: &'tcx ty::Const<'tcx>, . actual: &'tcx ty::Const<'tcx>, . err: TypeError<'tcx>, . ) -> DiagnosticBuilder<'tcx> { . let trace = TypeTrace::consts(cause, true, expected, actual); . self.report_and_explain_type_error(trace, &err) . } . 24,417 ( 0.00%) pub fn replace_bound_vars_with_fresh_vars( . &self, . span: Span, . lbrct: LateBoundRegionConversionTime, . value: ty::Binder<'tcx, T>, . ) -> (T, BTreeMap>) . where . T: TypeFoldable<'tcx>, . { . let fld_r = 73,321 ( 0.00%) |br: ty::BoundRegion| self.next_region_var(LateBoundRegion(span, br.kind, lbrct)); . let fld_t = |_| { . self.next_ty_var(TypeVariableOrigin { . kind: TypeVariableOriginKind::MiscVariable, . span, . }) . }; . let fld_c = |_, ty| { . self.next_const_var( . ty, . ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span }, . ) . }; 100,727 ( 0.00%) self.tcx.replace_bound_vars(value, fld_r, fld_t, fld_c) 16,278 ( 0.00%) } . . /// See the [`region_constraints::RegionConstraintCollector::verify_generic_bound`] method. 2,344 ( 0.00%) pub fn verify_generic_bound( . &self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . a: ty::Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { . debug!("verify_generic_bound({:?}, {:?} <: {:?})", kind, a, bound); . 879 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() 4,981 ( 0.00%) .verify_generic_bound(origin, kind, a, bound); 2,051 ( 0.00%) } . . /// Obtains the latest type of the given closure; this may be a . /// closure in the current function, in which case its . /// `ClosureKind` may not yet be known. 686 ( 0.00%) pub fn closure_kind(&self, closure_substs: SubstsRef<'tcx>) -> Option { 1,372 ( 0.00%) let closure_kind_ty = closure_substs.as_closure().kind_ty(); . let closure_kind_ty = self.shallow_resolve(closure_kind_ty); 1,029 ( 0.00%) closure_kind_ty.to_opt_closure_kind() . } . . /// Clears the selection, evaluation, and projection caches. This is useful when . /// repeatedly attempting to select an `Obligation` while changing only . /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. . pub fn clear_caches(&self) { . self.selection_cache.clear(); . self.evaluation_cache.clear(); . self.inner.borrow_mut().projection_cache().clear(); . } . . pub fn universe(&self) -> ty::UniverseIndex { 308,043 ( 0.00%) self.universe.get() 118,880 ( 0.00%) } . . /// Creates and return a fresh universe that extends all previous . /// universes. Updates `self.universe` to that new universe. 114 ( 0.00%) pub fn create_next_universe(&self) -> ty::UniverseIndex { 250 ( 0.00%) let u = self.universe.get().next_universe(); . self.universe.set(u); . u 114 ( 0.00%) } . . /// Resolves and evaluates a constant. . /// . /// The constant can be located on a trait like `::C`, in which case the given . /// substitutions and environment are used to resolve the constant. Alternatively if the . /// constant has generic parameters in scope the substitutions are used to evaluate the value of . /// the constant. For example in `fn foo() { let _ = [0; bar::()]; }` the repeat count . /// constant `bar::()` requires a substitution for `T`, if the substitution for `T` is still . /// too generic for the constant to be evaluated then `Err(ErrorHandled::TooGeneric)` is . /// returned. . /// . /// This handles inferences variables within both `param_env` and `substs` by . /// performing the operation on their respective canonical forms. 468 ( 0.00%) pub fn const_eval_resolve( . &self, . param_env: ty::ParamEnv<'tcx>, . unevaluated: ty::Unevaluated<'tcx>, . span: Option, . ) -> EvalToConstValueResult<'tcx> { 36 ( 0.00%) let substs = self.resolve_vars_if_possible(unevaluated.substs); . . // Postpone the evaluation of constants whose substs depend on inference . // variables . if substs.has_infer_types_or_consts() { . return Err(ErrorHandled::TooGeneric); . } . 36 ( 0.00%) let param_env_erased = self.tcx.erase_regions(param_env); . let substs_erased = self.tcx.erase_regions(substs); . . let unevaluated = ty::Unevaluated { . def: unevaluated.def, . substs: substs_erased, 72 ( 0.00%) promoted: unevaluated.promoted, . }; . . // The return value is the evaluated value which doesn't contain any reference to inference . // variables, thus we don't need to substitute back the original values. 432 ( 0.00%) self.tcx.const_eval_resolve(param_env_erased, unevaluated, span) 324 ( 0.00%) } . . /// If `typ` is a type variable of some kind, resolve it one level . /// (but do not resolve types found in the result). If `typ` is . /// not a type variable, just return it unmodified. . // FIXME(eddyb) inline into `ShallowResolver::visit_ty`. 2,428,808 ( 0.03%) fn shallow_resolve_ty(&self, typ: Ty<'tcx>) -> Ty<'tcx> { 1,444,945 ( 0.02%) match *typ.kind() { . ty::Infer(ty::TyVar(v)) => { . // Not entirely obvious: if `typ` is a type variable, . // it can be resolved to an int/float variable, which . // can then be recursively resolved, hence the . // recursion. Note though that we prevent type . // variables from unifying to other type variables . // directly (though they may be embedded . // structurally), and we prevent cycles in any case, . // so this recursion should always be of very limited . // depth. . // . // Note: if these two lines are combined into one we get . // dynamic borrow errors on `self.inner`. 716,092 ( 0.01%) let known = self.inner.borrow_mut().type_variables().probe(v).known(); . known.map_or(typ, |t| self.shallow_resolve_ty(t)) . } . 106,312 ( 0.00%) ty::Infer(ty::IntVar(v)) => self . .inner . .borrow_mut() . .int_unification_table() . .probe_value(v) 38,835 ( 0.00%) .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . ty::Infer(ty::FloatVar(v)) => self . .inner . .borrow_mut() . .float_unification_table() . .probe_value(v) . .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . _ => typ, . } 2,732,409 ( 0.03%) } . . /// `ty_or_const_infer_var_changed` is equivalent to one of these two: . /// * `shallow_resolve(ty) != ty` (where `ty.kind = ty::Infer(_)`) . /// * `shallow_resolve(ct) != ct` (where `ct.kind = ty::ConstKind::Infer(_)`) . /// . /// However, `ty_or_const_infer_var_changed` is more efficient. It's always . /// inlined, despite being large, because it has only two call sites that . /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` -- line 1659 ---------------------------------------- -- line 1662 ---------------------------------------- . #[inline(always)] . pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { . match infer_var { . TyOrConstInferVar::Ty(v) => { . use self::type_variable::TypeVariableValue; . . // If `inlined_probe` returns a `Known` value, it never equals . // `ty::Infer(ty::TyVar(v))`. 354,199 ( 0.00%) match self.inner.borrow_mut().type_variables().inlined_probe(v) { . TypeVariableValue::Unknown { .. } => false, . TypeVariableValue::Known { .. } => true, . } . } . . TyOrConstInferVar::TyInt(v) => { . // If `inlined_probe_value` returns a value it's always a . // `ty::Int(_)` or `ty::UInt(_)`, which never matches a . // `ty::Infer(_)`. 17,142 ( 0.00%) self.inner.borrow_mut().int_unification_table().inlined_probe_value(v).is_some() . } . . TyOrConstInferVar::TyFloat(v) => { . // If `probe_value` returns a value it's always a . // `ty::Float(_)`, which never matches a `ty::Infer(_)`. . // . // Not `inlined_probe_value(v)` because this call site is colder. . self.inner.borrow_mut().float_unification_table().probe_value(v).is_some() . } . . TyOrConstInferVar::Const(v) => { . // If `probe_value` returns a `Known` value, it never equals . // `ty::ConstKind::Infer(ty::InferConst::Var(v))`. . // . // Not `inlined_probe_value(v)` because this call site is colder. 700 ( 0.00%) match self.inner.borrow_mut().const_unification_table().probe_value(v).val { . ConstVariableValue::Unknown { .. } => false, . ConstVariableValue::Known { .. } => true, . } . } . } . } . } . -- line 1704 ---------------------------------------- -- line 1716 ---------------------------------------- . /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. . Const(ConstVid<'tcx>), . } . . impl<'tcx> TyOrConstInferVar<'tcx> { . /// Tries to extract an inference variable from a type or a constant, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). 13,915 ( 0.00%) pub fn maybe_from_generic_arg(arg: GenericArg<'tcx>) -> Option { . match arg.unpack() { . GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), . GenericArgKind::Const(ct) => Self::maybe_from_const(ct), . GenericArgKind::Lifetime(_) => None, . } 13,915 ( 0.00%) } . . /// Tries to extract an inference variable from a type, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). 130 ( 0.00%) pub fn maybe_from_ty(ty: Ty<'tcx>) -> Option { 125,335 ( 0.00%) match *ty.kind() { 27,076 ( 0.00%) ty::Infer(ty::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), 994 ( 0.00%) ty::Infer(ty::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), . ty::Infer(ty::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), . _ => None, . } 130 ( 0.00%) } . . /// Tries to extract an inference variable from a constant, returns `None` . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). . pub fn maybe_from_const(ct: &'tcx ty::Const<'tcx>) -> Option { 12 ( 0.00%) match ct.val { 6 ( 0.00%) ty::ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), . _ => None, . } . } . } . . struct ShallowResolver<'a, 'tcx> { . infcx: &'a InferCtxt<'a, 'tcx>, . } . . impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { . self.infcx.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 439,681 ( 0.01%) self.infcx.shallow_resolve_ty(ty) . } . 13,648 ( 0.00%) fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 13,178 ( 0.00%) if let ty::Const { val: ty::ConstKind::Infer(InferConst::Var(vid)), .. } = ct { 15,885 ( 0.00%) self.infcx . .inner . .borrow_mut() . .const_unification_table() 9,531 ( 0.00%) .probe_value(*vid) . .val . .known() . .unwrap_or(ct) . } else { . ct . } 17,060 ( 0.00%) } . } . . impl<'tcx> TypeTrace<'tcx> { . pub fn span(&self) -> Span { 38 ( 0.00%) self.cause.span . } . . pub fn types( . cause: &ObligationCause<'tcx>, . a_is_expected: bool, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> TypeTrace<'tcx> { -- line 1792 ---------------------------------------- -- line 1800 ---------------------------------------- . b: &'tcx ty::Const<'tcx>, . ) -> TypeTrace<'tcx> { . TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) } . } . } . . impl<'tcx> SubregionOrigin<'tcx> { . pub fn span(&self) -> Span { 95 ( 0.00%) match *self { 19 ( 0.00%) Subtype(ref a) => a.span(), . RelateObjectBound(a) => a, . RelateParamBound(a, ..) => a, . RelateRegionParamBound(a) => a, . Reborrow(a) => a, . ReborrowUpvar(a, _) => a, . DataBorrowed(_, a) => a, . ReferenceOutlivesReferent(_, a) => a, . CompareImplMethodObligation { span, .. } => span, -- line 1817 ---------------------------------------- -- line 1818 ---------------------------------------- . CompareImplTypeObligation { span, .. } => span, . } . } . . pub fn from_obligation_cause(cause: &traits::ObligationCause<'tcx>, default: F) -> Self . where . F: FnOnce() -> Self, . { 14,474 ( 0.00%) match *cause.code() { 4,085 ( 0.00%) traits::ObligationCauseCode::ReferenceOutlivesReferent(ref_type) => { 20,425 ( 0.00%) SubregionOrigin::ReferenceOutlivesReferent(ref_type, cause.span) . } . . traits::ObligationCauseCode::CompareImplMethodObligation { . impl_item_def_id, . trait_item_def_id, . } => SubregionOrigin::CompareImplMethodObligation { . span: cause.span, . impl_item_def_id, -- line 1836 ---------------------------------------- 2,318,111 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lev_distance.rs -------------------------------------------------------------------------------- Ir -- line 8 ---------------------------------------- . use std::cmp; . . #[cfg(test)] . mod tests; . . /// Finds the Levenshtein distance between two strings. . /// . /// Returns None if the distance exceeds the limit. 87,780 ( 0.00%) pub fn lev_distance(a: &str, b: &str, limit: usize) -> Option { . let n = a.chars().count(); . let m = b.chars().count(); 52,668 ( 0.00%) let min_dist = if n < m { m - n } else { n - m }; . 17,556 ( 0.00%) if min_dist > limit { . return None; . } 24,396 ( 0.00%) if n == 0 || m == 0 { . return (min_dist <= limit).then_some(min_dist); . } . . let mut dcol: Vec<_> = (0..=m).collect(); . 256,158 ( 0.00%) for (i, sc) in a.chars().enumerate() { . let mut current = i; 640,395 ( 0.01%) dcol[0] = current + 1; . . for (j, tc) in b.chars().enumerate() { 2,628,612 ( 0.03%) let next = dcol[j + 1]; 5,257,224 ( 0.07%) if sc == tc { . dcol[j + 1] = current; . } else { 2,448,663 ( 0.03%) dcol[j + 1] = cmp::min(current, next); 4,897,326 ( 0.06%) dcol[j + 1] = cmp::min(dcol[j + 1], dcol[j]) + 1; . } . current = next; . } . } . 24,396 ( 0.00%) (dcol[m] <= limit).then_some(dcol[m]) 79,002 ( 0.00%) } . . /// Finds the best match for a given word in the given iterator. . /// . /// As a loose rule to avoid the obviously incorrect suggestions, it takes . /// an optional limit for the maximum allowable edit distance, which defaults . /// to one-third of the given word. . /// . /// Besides Levenshtein, we use case insensitive comparison to improve accuracy . /// on an edge case with a lower(upper)case letters mismatch. . #[cold] 741 ( 0.00%) pub fn find_best_match_for_name( . candidates: &[Symbol], . lookup: Symbol, . dist: Option, . ) -> Option { 171 ( 0.00%) let lookup = lookup.as_str(); 342 ( 0.00%) let lookup_uppercase = lookup.to_uppercase(); . . // Priority of matches: . // 1. Exact case insensitive match . // 2. Levenshtein distance match . // 3. Sorted word match 43,890 ( 0.00%) if let Some(c) = candidates.iter().find(|c| c.as_str().to_uppercase() == lookup_uppercase) { . return Some(*c); . } . 171 ( 0.00%) let mut dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3); . let mut best = None; . for c in candidates { 61,674 ( 0.00%) match lev_distance(lookup, c.as_str(), dist) { . Some(0) => return Some(*c), . Some(d) => { . dist = d - 1; . best = Some(*c); . } . None => {} . } . } 57 ( 0.00%) if best.is_some() { . return best; . } . . find_match_by_sorted_words(candidates, lookup) 513 ( 0.00%) } . . fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option { . iter_names.iter().fold(None, |result, candidate| { 114,513 ( 0.00%) if sort_by_words(candidate.as_str()) == sort_by_words(lookup) { . Some(*candidate) . } else { . result . } . }) . } . 87,780 ( 0.00%) fn sort_by_words(name: &str) -> String { . let mut split_words: Vec<&str> = name.split('_').collect(); . // We are sorting primitive &strs and can use unstable sort here. . split_words.sort_unstable(); . split_words.join("_") 87,780 ( 0.00%) } 2,713,770 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 2,228,792 ( 0.03%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 164,342 ( 0.00%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 40,415,234 ( 0.50%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 291,226 ( 0.00%) self.stride += Group::WIDTH; 291,226 ( 0.00%) self.pos += self.stride; 247,295 ( 0.00%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 151,654 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 328,550 ( 0.00%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 60,702 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 392,667 ( 0.00%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 82,524 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 304,593 ( 0.00%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 411,350 ( 0.01%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 1,014 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 1,593 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 265 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 19,327 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 30,319 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 30,319 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 60,638 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 28,076 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 32,562 ( 0.00%) self.erase_no_drop(&item); 58 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 797,018 ( 0.01%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 2,500 ( 0.00%) match self.find(hash, eq) { 9,436 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 177,189 ( 0.00%) None => None, . } 1,089,759 ( 0.01%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 21,146 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 447,208 ( 0.01%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 280,795 ( 0.00%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 595,034 ( 0.01%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 470,640 ( 0.01%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 2,862,350 ( 0.04%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 4,802 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 2,166,166 ( 0.03%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 2 ( 0.00%) bucket.write(value); . bucket . } 2,103,399 ( 0.03%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 1,150,680 ( 0.01%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 78 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 863,010 ( 0.01%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 1,134 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 463,193 ( 0.01%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 33,442 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 940,584 ( 0.01%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 770,864 ( 0.01%) self.table.items += 1; . bucket 2,259 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 40,633 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 25,871 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 25,688 ( 0.00%) eq(self.bucket(index).as_ref()) 10,780 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 3,704 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 46,024 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 27,086 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] 1,855 ( 0.00%) pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 21,750 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } 2,120 ( 0.00%) } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. . /// . /// At most one mutable reference will be returned to any entry. `None` will be returned if any . /// of the hashes are duplicates. `None` will be returned if the hash is not found. . /// -- line 859 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 1,103,876 ( 0.01%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . . /// Returns an iterator over every element in the table. It is up to . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { 1 ( 0.00%) let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 139,388 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 7,648 ( 0.00%) let allocation = self.into_allocation(); 5,736 ( 0.00%) RawIntoIter { 9,560 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 2,614 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 362 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 783,455 ( 0.01%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 549,122 ( 0.01%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 126,000 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 302,244 ( 0.00%) Ok(Self { . ctrl, 126,254 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 399,664 ( 0.00%) } . . #[inline] 45,157 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 11,752 ( 0.00%) if capacity == 0 { 12,307 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 249,751 ( 0.00%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 19,748 ( 0.00%) Ok(result) . } . } 45,157 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 163,216 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 163,216 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 326,432 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 1,446,972 ( 0.02%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 4,223,400 ( 0.05%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 1,948,984 ( 0.02%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 86,036 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 4,295,019 ( 0.05%) for bit in group.match_byte(h2_hash) { 8,229,822 ( 0.10%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 5,395,526 ( 0.07%) if likely(eq(index)) { . return Some(index); . } . } . 1,360,429 ( 0.02%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_rehash_in_place(&mut self) { . // Bulk convert all full control bytes to DELETED, and all DELETED . // control bytes to EMPTY. This effectively frees up all buckets . // containing a DELETED entry. 3 ( 0.00%) for i in (0..self.buckets()).step_by(Group::WIDTH) { . let group = Group::load_aligned(self.ctrl(i)); . let group = group.convert_special_to_empty_and_full_to_deleted(); . group.store_aligned(self.ctrl(i)); . } . . // Fix up the trailing control bytes. See the comments in set_ctrl . // for the handling of tables smaller than the group width. 3 ( 0.00%) if self.buckets() < Group::WIDTH { . self.ctrl(0) . .copy_to(self.ctrl(Group::WIDTH), self.buckets()); . } else { . self.ctrl(0) . .copy_to(self.ctrl(self.buckets()), Group::WIDTH); . } . } . -- line 1220 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 5,311,328 ( 0.07%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 24,531,802 ( 0.31%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 2,708,908 ( 0.03%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 2,165,748 ( 0.03%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; 12 ( 0.00%) probe_index(i) == probe_index(new_i) . } . . /// Sets a control byte to the hash, and possibly also the replicated control byte at . /// the end of the array. . #[inline] . unsafe fn set_ctrl_h2(&self, index: usize, hash: u64) { . self.set_ctrl(index, h2(hash)); . } -- line 1289 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 4,110,689 ( 0.05%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 1,369,695 ( 0.02%) *self.ctrl(index) = ctrl; 1,370,360 ( 0.02%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 387,203 ( 0.00%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 316,488 ( 0.00%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 1,475,717 ( 0.02%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 2,670 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 177,140 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 73,425 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 146,853 ( 0.00%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 146,852 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 329,400 ( 0.00%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); 2 ( 0.00%) Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 73,425 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 6,623 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 821,245 ( 0.01%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 73,425 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1496 ---------------------------------------- . } . self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items; . }); . . // At this point, DELETED elements are elements that we haven't . // rehashed yet. Find them and re-insert them at their ideal . // position. . 'outer: for i in 0..guard.buckets() { 64 ( 0.00%) if *guard.ctrl(i) != DELETED { . continue; . } . . let i_p = guard.bucket_ptr(i, size_of); . . 'inner: loop { . // Hash the current item . let hash = hasher(*guard, i); -- line 1512 ---------------------------------------- -- line 1515 ---------------------------------------- . let new_i = guard.find_insert_slot(hash); . let new_i_p = guard.bucket_ptr(new_i, size_of); . . // Probing works by scanning through all of the control . // bytes in groups, which may not be aligned to the group . // size. If both the new and old position fall within the . // same unaligned group, then there is no benefit in moving . // it and we can just continue to the next item. 4 ( 0.00%) if likely(guard.is_in_same_group(i, new_i, hash)) { . guard.set_ctrl_h2(i, hash); . continue 'outer; . } . . // We are moving the current item to a new position. Write . // our H2 to the control byte of the new position. . let prev_ctrl = guard.replace_ctrl_h2(new_i, hash); . if prev_ctrl == EMPTY { -- line 1531 ---------------------------------------- -- line 1541 ---------------------------------------- . // swapped into the old slot. . debug_assert_eq!(prev_ctrl, DELETED); . ptr::swap_nonoverlapping(i_p, new_i_p, size_of); . continue 'inner; . } . } . } . 3 ( 0.00%) guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items; . . mem::forget(guard); . } . . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 59,238 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 11,077 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 15,171 ( 0.00%) self.items = 0; 11,135 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 127,471 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 509,884 ( 0.01%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 627,550 ( 0.01%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 509,884 ( 0.01%) self.items -= 1; . } . } . . impl Clone for RawTable { 4,264 ( 0.00%) fn clone(&self) -> Self { 623 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 4,797 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 106 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 658,391 ( 0.01%) fn drop(&mut self) { 620,801 ( 0.01%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 677,010 ( 0.01%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 7,648 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 9,560 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 282,772 ( 0.00%) if let Some(index) = self.current_group.lowest_set_bit() { 42,639 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 59,193 ( 0.00%) return Some(self.data.next_n(index)); . } . 246,125 ( 0.00%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 4,921 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 6,211 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 7,674 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 989 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 40,285 ( 0.00%) fn next(&mut self) -> Option> { 96,860 ( 0.00%) if let Some(b) = self.iter.next() { 381,044 ( 0.00%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 80,570 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 1,936 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 12,445 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 294 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 1,286 ( 0.00%) fn next(&mut self) -> Option { 363 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 3,370 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 5 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 512 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 64 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 512 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 13,588,388 ( 0.17%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/dl-lookup.c ./malloc/malloc.c ./stdlib/msort.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/src/c++98/../../../../../gcc-5.5.0/libstdc++-v3/src/c++98/tree.cc -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 292,833,794 ( 3.65%) events annotated