-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name unic_ucd_segment --edition=2018 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C opt-level=3 -C embed-bitcode=no -C metadata=99630f0270286824 -C extra-filename=-99630f0270286824 --out-dir /usr/home/liquid/tmp/.tmpUCpbPH/target/release/deps -L dependency=/usr/home/liquid/tmp/.tmpUCpbPH/target/release/deps --extern unic_char_property=/usr/home/liquid/tmp/.tmpUCpbPH/target/release/deps/libunic_char_property-2078721ce0036943.rmeta --extern unic_char_range=/usr/home/liquid/tmp/.tmpUCpbPH/target/release/deps/libunic_char_range-bedcbfa1efc2c0ea.rmeta --extern unic_ucd_version=/usr/home/liquid/tmp/.tmpUCpbPH/target/release/deps/libunic_ucd_version-0aaa60b0ca0c2ccd.rmeta -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-unic-ucd-segment-0.9.0-Opt-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 4,557,875,821 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 120,380,754 ( 2.64%) ./malloc/malloc.c:_int_free 113,822,506 ( 2.50%) ./malloc/malloc.c:_int_malloc 76,975,650 ( 1.69%) ./malloc/malloc.c:malloc 63,495,576 ( 1.39%) ???:llvm::SSAUpdater::GetValueAtEndOfBlockInternal(llvm::BasicBlock*) 58,921,133 ( 1.29%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 40,495,055 ( 0.89%) ???:llvm::ValueHandleBase::AddToUseList() 39,648,323 ( 0.87%) ???:llvm::MemoryDependenceResults::getNonLocalPointerDepFromBB(llvm::Instruction*, llvm::PHITransAddr const&, llvm::MemoryLocation const&, bool, llvm::BasicBlock*, llvm::SmallVectorImpl&, llvm::DenseMap, llvm::detail::DenseMapPair >&, bool, bool) 37,914,841 ( 0.83%) ./malloc/malloc.c:free 36,931,420 ( 0.81%) ???:llvm::SelectionDAG::Combine(llvm::CombineLevel, llvm::AAResults*, llvm::CodeGenOpt::Level) 30,828,079 ( 0.68%) ???:llvm::InstCombinerImpl::run() 26,961,945 ( 0.59%) ???:(anonymous namespace)::LazyValueInfoImpl::getEdgeValue(llvm::Value*, llvm::BasicBlock*, llvm::BasicBlock*, llvm::Instruction*) [clone .llvm.4316243980339171764] 26,405,430 ( 0.58%) ???:combineInstructionsOverFunction(llvm::Function&, llvm::InstCombineWorklist&, llvm::AAResults*, llvm::AssumptionCache&, llvm::TargetLibraryInfo&, llvm::TargetTransformInfo&, llvm::DominatorTree&, llvm::OptimizationRemarkEmitter&, llvm::BlockFrequencyInfo*, llvm::ProfileSummaryInfo*, unsigned int, llvm::LoopInfo*) 25,958,039 ( 0.57%) ???:llvm::FPPassManager::runOnFunction(llvm::Function&) 23,180,063 ( 0.51%) ???:computeKnownBits(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) [clone .llvm.15619146473165121143] 22,800,467 ( 0.50%) ???:(anonymous namespace)::LazyValueInfoImpl::solve() [clone .llvm.4316243980339171764] 21,156,652 ( 0.46%) ???:(anonymous namespace)::LVIValueHandle::deleted() 20,367,238 ( 0.45%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 20,361,583 ( 0.45%) ./malloc/malloc.c:malloc_consolidate 16,628,464 ( 0.36%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 16,450,893 ( 0.36%) ???:computeKnownBits(llvm::Value const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 16,293,700 ( 0.36%) ???:llvm::InstCombinerImpl::visitICmpInst(llvm::ICmpInst&) 15,973,355 ( 0.35%) ???:llvm::DataLayout::getTypeSizeInBits(llvm::Type*) const 15,950,898 ( 0.35%) ???:llvm::TryToSimplifyUncondBranchFromEmptyBlock(llvm::BasicBlock*, llvm::DomTreeUpdater*) 15,520,880 ( 0.34%) ???:llvm::ScalarEvolution::getAddExpr(llvm::SmallVectorImpl&, llvm::SCEV::NoWrapFlags, unsigned int) 15,285,251 ( 0.34%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 14,837,514 ( 0.33%) ???:llvm::BitstreamCursor::readRecord(unsigned int, llvm::SmallVectorImpl&, llvm::StringRef*) 14,696,361 ( 0.32%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::lookup_source_file_idx 14,428,775 ( 0.32%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::runSemiNCA(llvm::DominatorTreeBase&, unsigned int) 14,093,927 ( 0.31%) ???:llvm::TargetLibraryInfoImpl::getLibFunc(llvm::Function const&, llvm::LibFunc&) const 13,538,254 ( 0.30%) ???:llvm::removeUnreachableBlocks(llvm::Function&, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*) 13,324,213 ( 0.29%) ???:llvm::AnalysisManager::getResultImpl(llvm::AnalysisKey*, llvm::Function&) 13,172,283 ( 0.29%) ???:computeKnownBitsFromOperator(llvm::Operator const*, llvm::APInt const&, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 13,141,197 ( 0.29%) ???:SimplifyICmpInst(unsigned int, llvm::Value*, llvm::Value*, llvm::SimplifyQuery const&, unsigned int) [clone .llvm.1619516508949622737] 12,425,010 ( 0.27%) ???:llvm::ConstantInt::get(llvm::Type*, llvm::APInt const&) 12,390,077 ( 0.27%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 12,140,053 ( 0.27%) ???:llvm::ScalarEvolution::forgetMemoizedResults(llvm::SCEV const*) 12,065,427 ( 0.26%) ???:llvm::AnalysisManager::invalidate(llvm::Function&, llvm::PreservedAnalyses const&) 11,973,634 ( 0.26%) ./malloc/malloc.c:unlink_chunk.constprop.0 11,594,633 ( 0.25%) ???:llvm::ScalarEvolution::getSCEV(llvm::Value*) 10,760,685 ( 0.24%) ???:llvm::InstCombinerImpl::SimplifyDemandedUseBits(llvm::Value*, llvm::APInt, llvm::KnownBits&, unsigned int, llvm::Instruction*) 10,703,884 ( 0.23%) ???:(anonymous namespace)::SimplifyCFGOpt::simplifyCondBranch(llvm::BranchInst*, llvm::IRBuilder&) 10,568,792 ( 0.23%) ???:llvm::simplifyCFG(llvm::BasicBlock*, llvm::TargetTransformInfo const&, llvm::DomTreeUpdater*, llvm::SimplifyCFGOptions const&, llvm::ArrayRef) 10,438,638 ( 0.23%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_target/src/abi/mod.rs:::checked_add 10,309,110 ( 0.23%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 10,173,926 ( 0.22%) ???:llvm::AttributeList::addAttributes(llvm::LLVMContext&, unsigned int, llvm::AttrBuilder const&) const 9,967,450 ( 0.22%) ???:llvm::LiveVariables::runOnBlock(llvm::MachineBasicBlock*, unsigned int) 9,962,023 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::next 9,830,759 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs:::span_data_to_lines_and_cols 9,685,825 ( 0.21%) ???:(anonymous namespace)::MachineCopyPropagation::runOnMachineFunction(llvm::MachineFunction&) 9,677,705 ( 0.21%) ???:llvm::FindFunctionBackedges(llvm::Function const&, llvm::SmallVectorImpl >&) 9,503,751 ( 0.21%) ???:llvm::DataLayout::getAlignment(llvm::Type*, bool) const 9,402,462 ( 0.21%) ???:llvm::SCCPInstVisitor::visitPHINode(llvm::PHINode&) 9,400,988 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::bump 9,174,731 ( 0.20%) ???:llvm::SimplifyInstruction(llvm::Instruction*, llvm::SimplifyQuery const&, llvm::OptimizationRemarkEmitter*) 8,925,428 ( 0.20%) ???:llvm::SelectionDAGISel::SelectCodeCommon(llvm::SDNode*, unsigned char const*, unsigned int) 8,499,345 ( 0.19%) ???:computeKnownBitsFromAssume(llvm::Value const*, llvm::KnownBits&, unsigned int, (anonymous namespace)::Query const&) 8,311,356 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::short_write_process_buffer:: 8,195,311 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::check 8,030,587 ( 0.18%) ???:(anonymous namespace)::JoinVals::computeAssignment(unsigned int, (anonymous namespace)::JoinVals&) 7,979,395 ( 0.18%) ???:llvm::ConstantRange::makeExactICmpRegion(llvm::CmpInst::Predicate, llvm::APInt const&) 7,976,500 ( 0.18%) ???:llvm::LiveIntervals::extendSegmentsToUses(llvm::LiveRange&, llvm::SmallVector, 16u>&, llvm::Register, llvm::LaneBitmask) 7,709,928 ( 0.17%) ???:llvm::DenseMapBase, llvm::detail::DenseMapPair >, llvm::BasicBlock*, llvm::Value*, llvm::DenseMapInfo, llvm::detail::DenseMapPair >::moveFromOldBuckets(llvm::detail::DenseMapPair*, llvm::detail::DenseMapPair*) 7,678,006 ( 0.17%) ???:matchSelectPattern(llvm::CmpInst::Predicate, llvm::FastMathFlags, llvm::Value*, llvm::Value*, llvm::Value*, llvm::Value*, llvm::Value*&, llvm::Value*&, unsigned int) [clone .llvm.15619146473165121143] 7,565,319 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::from_key_hashed_nocheck:: 7,517,303 ( 0.16%) ???:llvm::BasicAAResult::alias(llvm::MemoryLocation const&, llvm::MemoryLocation const&, llvm::AAQueryInfo&) 7,307,226 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/util.rs:::checked_add 7,260,746 ( 0.16%) ???:runCVP(llvm::Module&) [clone .llvm.11785992503873176614] 7,230,979 ( 0.16%) ???:llvm::ScalarEvolution::getMulExpr(llvm::SmallVectorImpl&, llvm::SCEV::NoWrapFlags, unsigned int) 7,224,515 ( 0.16%) ???:llvm::SmallPtrSetImplBase::insert_imp_big(void const*) 7,130,582 ( 0.16%) ???:llvm::MachineInstr::addOperand(llvm::MachineFunction&, llvm::MachineOperand const&) 7,061,265 ( 0.15%) ???:llvm::ScalarEvolution::getRangeRef(llvm::SCEV const*, llvm::ScalarEvolution::RangeSignHint) 7,042,045 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/operand.rs:>::try_read_immediate 7,030,496 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/mod.rs:::next_token 6,888,121 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/tokenstream.rs:::next_with_spacing 6,866,728 ( 0.15%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc:operator new(unsigned long) 6,797,129 ( 0.15%) ???:llvm::FoldBranchToCommonDest(llvm::BranchInst*, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*, llvm::TargetTransformInfo const*, unsigned int) 6,714,603 ( 0.15%) ???:llvm::InlineFunction(llvm::CallBase&, llvm::InlineFunctionInfo&, llvm::AAResults*, bool, llvm::Function*) 6,710,586 ( 0.15%) ???:bool llvm::DenseMapBase*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >, (anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >::LookupBucketFor<(anonymous namespace)::SimpleValue>((anonymous namespace)::SimpleValue const&, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> const*&) const 6,619,303 ( 0.15%) ???:llvm::X86InstrInfo::analyzeBranch(llvm::MachineBasicBlock&, llvm::MachineBasicBlock*&, llvm::MachineBasicBlock*&, llvm::SmallVectorImpl&, bool) const 6,533,085 ( 0.14%) ???:llvm::JumpThreadingPass::processBlock(llvm::BasicBlock*) 6,367,734 ( 0.14%) ???:llvm::FoldingSetBase::FindNodeOrInsertPos(llvm::FoldingSetNodeID const&, void*&, llvm::FoldingSetBase::FoldingSetInfo const&) 6,345,978 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::get:: 6,197,428 ( 0.14%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::FindRoots(llvm::DominatorTreeBase const&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 6,193,209 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 6,163,198 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs:::advance_left 6,148,052 ( 0.13%) ???:llvm::InstCombinerImpl::visitPHINode(llvm::PHINode&) 6,092,566 ( 0.13%) ???:llvm::LivePhysRegs::stepBackward(llvm::MachineInstr const&) 6,056,157 ( 0.13%) ???:llvm::InstCombinerImpl::visitAdd(llvm::BinaryOperator&) 6,050,844 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/layout.rs:<&rustc_middle::ty::TyS as rustc_target::abi::TyAbiInterface<_>>::ty_and_layout_field::field_ty_or_layout::> 5,916,974 ( 0.13%) ???:llvm::ScheduleDAGSDNodes::BuildSchedUnits() 5,895,950 ( 0.13%) ???:llvm::ScalarEvolution::getLoopDisposition(llvm::SCEV const*, llvm::Loop const*) 5,790,278 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/tokentrees.rs:::parse_token_tree 5,778,083 ( 0.13%) ./malloc/malloc.c:realloc 5,703,992 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs:::clone 5,674,764 ( 0.12%) ???:llvm::LoopInfoBase::analyze(llvm::DominatorTreeBase const&) 5,674,618 ( 0.12%) ???:SimplifyOrInst(llvm::Value*, llvm::Value*, llvm::SimplifyQuery const&, unsigned int) [clone .llvm.1619516508949622737] 5,662,336 ( 0.12%) ???:(anonymous namespace)::RegisterCoalescer::joinCopy(llvm::MachineInstr*, bool&) 5,638,536 ( 0.12%) ???:llvm::TargetLoweringBase::getTypeConversion(llvm::LLVMContext&, llvm::EVT) const 5,618,507 ( 0.12%) ???:llvm::PopulateLoopsDFS::traverse(llvm::BasicBlock*) 5,607,429 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::hash_stable 5,493,077 ( 0.12%) ???:llvm::ConstantRange::unionWith(llvm::ConstantRange const&, llvm::ConstantRange::PreferredRangeType) const 5,481,121 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs:::eq 5,468,864 ( 0.12%) ???:(anonymous namespace)::DAGCombiner::combine(llvm::SDNode*) 5,397,215 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 5,351,070 ( 0.12%) ???:llvm::MemorySSA::buildMemorySSA(llvm::BatchAAResults&) 5,301,153 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp/ring.rs:>::pop_first 5,270,519 ( 0.12%) ???:llvm::Type::getPrimitiveSizeInBits() const 5,258,879 ( 0.12%) ???:llvm::BlockFrequencyInfoImpl::tryToComputeMassInFunction() 5,221,731 ( 0.11%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 5,096,331 ( 0.11%) ???:llvm::AAResults::getModRefInfo(llvm::Instruction const*, llvm::Optional const&, llvm::AAQueryInfo&) 5,093,520 ( 0.11%) ???:llvm::InstCombinerImpl::visitCallInst(llvm::CallInst&) 5,091,097 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/string.rs:alloc::string::String::push 5,085,907 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::span_data_to_lines_and_cols 4,913,629 ( 0.11%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::runSemiNCA(llvm::DominatorTreeBase&, unsigned int) 4,908,589 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/vec_deque/mod.rs:>::push_back 4,883,477 ( 0.11%) ???:(anonymous namespace)::Verifier::visitInstruction(llvm::Instruction&) 4,857,045 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs:rustc_mir_dataflow::drop_flag_effects::on_all_children_bits::is_terminal_path 4,845,521 ( 0.11%) ./elf/dl-lookup.c:do_lookup_x 4,794,254 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_target/src/abi/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 4,777,429 ( 0.10%) ???:llvm::GVN::processBlock(llvm::BasicBlock*) 4,730,686 ( 0.10%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_unaligned_erms 4,698,036 ( 0.10%) ???:llvm::MachineInstr::isIdenticalTo(llvm::MachineInstr const&, llvm::MachineInstr::MICheckType) const 4,598,729 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/encoder.rs:>::encode 4,586,632 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs:::clone 4,582,709 ( 0.10%) ???:llvm::LoopBase::verifyLoop() const 4,558,656 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/rustc_entry.rs:>>::rustc_entry -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs -------------------------------------------------------------------------------- Ir -- line 166 ---------------------------------------- . String(Cow<'static, str>), . Break(BreakToken), . Begin(BeginToken), . End, . } . . impl Token { . pub fn is_hardbreak_tok(&self) -> bool { 27,426 ( 0.00%) matches!(self, Token::Break(BreakToken { offset: 0, blank_space: SIZE_INFINITY })) . } . } . . #[derive(Copy, Clone)] . enum PrintFrame { . Fits, . Broken { offset: isize, breaks: Breaks }, . } -- line 182 ---------------------------------------- -- line 213 ---------------------------------------- . . #[derive(Clone)] . struct BufEntry { . token: Token, . size: isize, . } . . impl Printer { 40 ( 0.00%) pub fn new() -> Self { . let linewidth = 78; 10,045 ( 0.00%) Printer { . out: String::new(), . margin: linewidth as isize, . space: linewidth as isize, . buf: RingBuffer::new(), . left_total: 0, . right_total: 0, . scan_stack: VecDeque::new(), . print_stack: Vec::new(), . pending_indentation: 0, . last_printed: None, . } 50 ( 0.00%) } . . pub fn last_token(&self) -> Option<&Token> { . self.last_token_still_buffered().or_else(|| self.last_printed.as_ref()) . } . . pub fn last_token_still_buffered(&self) -> Option<&Token> { . self.buf.last().map(|last| &last.token) . } . . /// Be very careful with this! . pub fn replace_last_token_still_buffered(&mut self, token: Token) { . self.buf.last_mut().unwrap().token = token; . } . . fn scan_eof(&mut self) { 463 ( 0.00%) if !self.scan_stack.is_empty() { 20 ( 0.00%) self.check_stack(0); 20 ( 0.00%) self.advance_left(); . } . } . . fn scan_begin(&mut self, token: BeginToken) { 41,085 ( 0.00%) if self.scan_stack.is_empty() { 20 ( 0.00%) self.left_total = 1; . self.right_total = 1; . self.buf.clear(); . } 164,340 ( 0.00%) let right = self.buf.push(BufEntry { token: Token::Begin(token), size: -self.right_total }); . self.scan_stack.push_back(right); . } . . fn scan_end(&mut self) { 41,085 ( 0.00%) if self.scan_stack.is_empty() { . self.print_end(); . } else { . let right = self.buf.push(BufEntry { token: Token::End, size: -1 }); . self.scan_stack.push_back(right); . } . } . . fn scan_break(&mut self, token: BreakToken) { 22,824 ( 0.00%) if self.scan_stack.is_empty() { . self.left_total = 1; . self.right_total = 1; . self.buf.clear(); . } else { 45,648 ( 0.00%) self.check_stack(0); . } 91,296 ( 0.00%) let right = self.buf.push(BufEntry { token: Token::Break(token), size: -self.right_total }); . self.scan_stack.push_back(right); 68,472 ( 0.00%) self.right_total += token.blank_space; . } . 826,182 ( 0.02%) fn scan_string(&mut self, string: Cow<'static, str>) { 91,798 ( 0.00%) if self.scan_stack.is_empty() { . self.print_string(&string); . } else { . let len = string.len() as isize; 91,345 ( 0.00%) self.buf.push(BufEntry { token: Token::String(string), size: len }); 274,035 ( 0.01%) self.right_total += len; . self.check_stream(); . } 730,760 ( 0.02%) } . . fn check_stream(&mut self) { 569,975 ( 0.01%) while self.right_total - self.left_total > self.space { 122,757 ( 0.00%) if *self.scan_stack.front().unwrap() == self.buf.index_of_first() { . self.scan_stack.pop_front().unwrap(); 31,832 ( 0.00%) self.buf.first_mut().unwrap().size = SIZE_INFINITY; . } 81,838 ( 0.00%) self.advance_left(); 40,919 ( 0.00%) if self.buf.is_empty() { . break; . } . } . } . 892,132 ( 0.02%) fn advance_left(&mut self) { 474,516 ( 0.01%) while self.buf.first().unwrap().size >= 0 { 589,017 ( 0.01%) let left = self.buf.pop_first().unwrap(); . . match &left.token { . Token::String(string) => { 274,035 ( 0.01%) self.left_total += string.len() as isize; . self.print_string(string); . } . Token::Break(token) => { 68,472 ( 0.00%) self.left_total += token.blank_space; . self.print_break(*token, left.size); . } 54,555 ( 0.00%) Token::Begin(token) => self.print_begin(*token, left.size), . Token::End => self.print_end(), . } . 1,570,712 ( 0.03%) self.last_printed = Some(left.token); . 196,339 ( 0.00%) if self.buf.is_empty() { . break; . } . } 327,432 ( 0.01%) } . 205,506 ( 0.00%) fn check_stack(&mut self, mut depth: usize) { 182,826 ( 0.00%) while let Some(&index) = self.scan_stack.back() { . let mut entry = &mut self.buf[index]; 361,113 ( 0.01%) match entry.token { . Token::Begin(_) => { 73,202 ( 0.00%) if depth == 0 { . break; . } . self.scan_stack.pop_back().unwrap(); 68,700 ( 0.00%) entry.size += self.right_total; 22,900 ( 0.00%) depth -= 1; . } . Token::End => { . // paper says + not =, but that makes no sense. . self.scan_stack.pop_back().unwrap(); 41,082 ( 0.00%) entry.size = 1; 82,164 ( 0.00%) depth += 1; . } . _ => { . self.scan_stack.pop_back().unwrap(); 27,540 ( 0.00%) entry.size += self.right_total; 18,393 ( 0.00%) if depth == 0 { . break; . } . } . } . } 182,672 ( 0.00%) } . . fn get_top(&self) -> PrintFrame { 68,472 ( 0.00%) *self . .print_stack . .last() . .unwrap_or(&PrintFrame::Broken { offset: 0, breaks: Breaks::Inconsistent }) . } . . fn print_begin(&mut self, token: BeginToken, size: isize) { 123,255 ( 0.00%) if size > self.space { 90,925 ( 0.00%) let col = self.margin - self.space + token.offset; . self.print_stack.push(PrintFrame::Broken { offset: col, breaks: token.breaks }); . } else { . self.print_stack.push(PrintFrame::Fits); . } . } . . fn print_end(&mut self) { . self.print_stack.pop().unwrap(); . } . . fn print_break(&mut self, token: BreakToken, size: isize) { . let break_offset = 91,320 ( 0.00%) match self.get_top() { . PrintFrame::Fits => None, . PrintFrame::Broken { offset, breaks: Breaks::Consistent } => Some(offset), . PrintFrame::Broken { offset, breaks: Breaks::Inconsistent } => { . if size > self.space { Some(offset) } else { None } . } . }; . if let Some(offset) = break_offset { . self.out.push('\n'); 40,932 ( 0.00%) self.pending_indentation = offset + token.offset; 54,576 ( 0.00%) self.space = self.margin - (offset + token.offset); . } else { 27,540 ( 0.00%) self.pending_indentation += token.blank_space; 36,720 ( 0.00%) self.space -= token.blank_space; . } . } . . fn print_string(&mut self, string: &str) { . // Write the pending indent. A more concise way of doing this would be: . // . // write!(self.out, "{: >n$}", "", n = self.pending_indentation as usize)?; . // . // But that is significantly slower. This code is sufficiently hot, and indents can get . // sufficiently large, that the difference is significant on some workloads. 91,798 ( 0.00%) self.out.reserve(self.pending_indentation as usize); 25 ( 0.00%) self.out.extend(iter::repeat(' ').take(self.pending_indentation as usize)); 91,798 ( 0.00%) self.pending_indentation = 0; . . self.out.push_str(string); 458,537 ( 0.01%) self.space -= string.len() as isize; . } . . // Convenience functions to talk to the printer. . . /// "raw box" 72,944 ( 0.00%) pub fn rbox(&mut self, indent: usize, breaks: Breaks) { . self.scan_begin(BeginToken { offset: indent as isize, breaks }) 54,708 ( 0.00%) } . . /// Inconsistent breaking box 191,802 ( 0.00%) pub fn ibox(&mut self, indent: usize) { . self.rbox(indent, Breaks::Inconsistent) 159,835 ( 0.00%) } . . /// Consistent breaking box . pub fn cbox(&mut self, indent: usize) { . self.rbox(indent, Breaks::Consistent) . } . 182,592 ( 0.00%) pub fn break_offset(&mut self, n: usize, off: isize) { . self.scan_break(BreakToken { offset: off, blank_space: n as isize }) 136,944 ( 0.00%) } . 164,340 ( 0.00%) pub fn end(&mut self) { . self.scan_end() 164,340 ( 0.00%) } . 2,315 ( 0.00%) pub fn eof(mut self) -> String { . self.scan_eof(); 1,852 ( 0.00%) self.out 2,315 ( 0.00%) } . . pub fn word>>(&mut self, wrd: S) { . let string = wrd.into(); 651,985 ( 0.01%) self.scan_string(string) . } . . fn spaces(&mut self, n: usize) { 82,185 ( 0.00%) self.break_offset(n, 0) . } . . crate fn zerobreak(&mut self) { . self.spaces(0) . } . . pub fn space(&mut self) { . self.spaces(1) . } . . pub fn hardbreak(&mut self) { . self.spaces(SIZE_INFINITY as usize) . } . . pub fn is_beginning_of_line(&self) -> bool { 27,426 ( 0.00%) match self.last_token() { . Some(last_token) => last_token.is_hardbreak_tok(), . None => true, . } . } . . pub fn hardbreak_tok_offset(off: isize) -> Token { . Token::Break(BreakToken { offset: off, blank_space: SIZE_INFINITY }) . } -- line 481 ---------------------------------------- 1,443,871 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 84 ---------------------------------------- . pub struct SessionGlobals { . symbol_interner: symbol::Interner, . span_interner: Lock, . hygiene_data: Lock, . source_map: Lock>>, . } . . impl SessionGlobals { 12 ( 0.00%) pub fn new(edition: Edition) -> SessionGlobals { 50 ( 0.00%) SessionGlobals { 2 ( 0.00%) symbol_interner: symbol::Interner::fresh(), . span_interner: Lock::new(span_encoding::SpanInterner::default()), 6 ( 0.00%) hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), . source_map: Lock::new(None), . } 10 ( 0.00%) } . } . . #[inline] . pub fn create_session_globals_then(edition: Edition, f: impl FnOnce() -> R) -> R { 1 ( 0.00%) assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 7 ( 0.00%) SESSION_GLOBALS.set(&session_globals, f) 1 ( 0.00%) } . . #[inline] . pub fn set_session_globals_then(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R { . assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); -- line 119 ---------------------------------------- -- line 120 ---------------------------------------- . SESSION_GLOBALS.set(session_globals, f) . } . . #[inline] . pub fn create_default_session_if_not_set_then(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 4 ( 0.00%) create_session_if_not_set_then(edition::DEFAULT_EDITION, f) . } . . #[inline] . pub fn create_session_if_not_set_then(edition: Edition, f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 1 ( 0.00%) if !SESSION_GLOBALS.is_set() { 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 11 ( 0.00%) SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f)) 1 ( 0.00%) } else { . SESSION_GLOBALS.with(f) . } . } . . #[inline] . pub fn with_session_globals(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 2,044,828 ( 0.04%) SESSION_GLOBALS.with(f) . } . . #[inline] . pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { . create_session_globals_then(edition::DEFAULT_EDITION, f) . } . . // If this ever becomes non thread-local, `decode_syntax_context` . // and `decode_expn_id` will need to be updated to handle concurrent . // deserialization. . scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals); . . // FIXME: We should use this enum or something like it to get rid of the . // use of magic `/rust/1.x/...` paths across the board. 37 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] 660 ( 0.00%) #[derive(Decodable)] . pub enum RealFileName { . LocalPath(PathBuf), . /// For remapped paths (namely paths into libstd that have been mapped . /// to the appropriate spot on the local host's file system, and local file . /// system paths that have been remapped with `FilePathMapping`), . Remapped { . /// `local_path` is the (host-dependent) local path to the file. This is . /// None if the file was imported from another crate -- line 173 ---------------------------------------- -- line 179 ---------------------------------------- . } . . impl Hash for RealFileName { . fn hash(&self, state: &mut H) { . // To prevent #70924 from happening again we should only hash the . // remapped (virtualized) path if that exists. This is because . // virtualized paths to sysroot crates (/rust/$hash or /rust/$version) . // remain stable even if the corresponding local_path changes 1,827 ( 0.00%) self.remapped_path_if_available().hash(state) . } . } . . // This is functionally identical to #[derive(Encodable)], with the exception of . // an added assert statement . impl Encodable for RealFileName { . fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { 18 ( 0.00%) encoder.emit_enum(|encoder| match *self { . RealFileName::LocalPath(ref local_path) => { 72 ( 0.00%) encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| { . encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; . Ok(()) . }) . } . . RealFileName::Remapped { ref local_path, ref virtual_name } => encoder . .emit_enum_variant("Remapped", 1, 2, |encoder| { . // For privacy and build reproducibility, we must not embed host-dependant path in artifacts -- line 205 ---------------------------------------- -- line 213 ---------------------------------------- . } . } . . impl RealFileName { . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path(&self) -> Option<&Path> { 2 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => { . p.as_ref().map(PathBuf::as_path) . } . } . } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. 10 ( 0.00%) pub fn into_local_path(self) -> Option { 10 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => p, . } 15 ( 0.00%) } . . /// Returns the path suitable for embedding into build artifacts. This would still . /// be a local path if it has not been remapped. A remapped path will not correspond . /// to a valid file system path: see `local_path_if_available()` for something that . /// is more likely to return paths into the local host file system. . pub fn remapped_path_if_available(&self) -> &Path { 1,444 ( 0.00%) match self { . RealFileName::LocalPath(p) . | RealFileName::Remapped { local_path: _, virtual_name: p } => &p, . } 10 ( 0.00%) } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. Otherwise returns the remapped name. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path_if_available(&self) -> &Path { 9 ( 0.00%) match self { . RealFileName::LocalPath(path) . | RealFileName::Remapped { local_path: None, virtual_name: path } . | RealFileName::Remapped { local_path: Some(path), virtual_name: _ } => path, . } . } . . pub fn to_string_lossy(&self, display_pref: FileNameDisplayPreference) -> Cow<'_, str> { 15 ( 0.00%) match display_pref { . FileNameDisplayPreference::Local => self.local_path_if_available().to_string_lossy(), . FileNameDisplayPreference::Remapped => { . self.remapped_path_if_available().to_string_lossy() . } . } . } . } . . /// Differentiates between real files and common virtual files. 3,879 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] 4,092 ( 0.00%) #[derive(Decodable, Encodable)] . pub enum FileName { . Real(RealFileName), . /// Call to `quote!`. . QuoteExpansion(u64), . /// Command line. . Anon(u64), . /// Hack in `src/librustc_ast/parse.rs`. . // FIXME(jseyfried) -- line 281 ---------------------------------------- -- line 288 ---------------------------------------- . /// Custom sources for explicit parser calls from plugins and drivers. . Custom(String), . DocTest(PathBuf, isize), . /// Post-substitution inline assembly from LLVM. . InlineAsm(u64), . } . . impl From for FileName { 63 ( 0.00%) fn from(p: PathBuf) -> Self { 18 ( 0.00%) assert!(!p.to_string_lossy().ends_with('>')); 54 ( 0.00%) FileName::Real(RealFileName::LocalPath(p)) 63 ( 0.00%) } . } . 16 ( 0.00%) #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] . pub enum FileNameDisplayPreference { . Remapped, . Local, . } . . pub struct FileNameDisplay<'a> { . inner: &'a FileName, . display_pref: FileNameDisplayPreference, . } . . impl fmt::Display for FileNameDisplay<'_> { 36 ( 0.00%) fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { . use FileName::*; 54 ( 0.00%) match *self.inner { . Real(ref name) => { 54 ( 0.00%) write!(fmt, "{}", name.to_string_lossy(self.display_pref)) . } . QuoteExpansion(_) => write!(fmt, ""), . MacroExpansion(_) => write!(fmt, ""), . Anon(_) => write!(fmt, ""), . ProcMacroSourceCode(_) => write!(fmt, ""), . CfgSpec(_) => write!(fmt, ""), . CliCrateAttr(_) => write!(fmt, ""), . Custom(ref s) => write!(fmt, "<{}>", s), . DocTest(ref path, _) => write!(fmt, "{}", path.display()), . InlineAsm(_) => write!(fmt, ""), . } 45 ( 0.00%) } . } . . impl FileNameDisplay<'_> { 24 ( 0.00%) pub fn to_string_lossy(&self) -> Cow<'_, str> { 18 ( 0.00%) match self.inner { 6 ( 0.00%) FileName::Real(ref inner) => inner.to_string_lossy(self.display_pref), . _ => Cow::from(format!("{}", self)), . } 24 ( 0.00%) } . } . . impl FileName { . pub fn is_real(&self) -> bool { . use FileName::*; 684 ( 0.00%) match *self { . Real(_) => true, . Anon(_) . | MacroExpansion(_) . | ProcMacroSourceCode(_) . | CfgSpec(_) . | CliCrateAttr(_) . | Custom(_) . | QuoteExpansion(_) . | DocTest(_, _) . | InlineAsm(_) => false, . } . } . 6 ( 0.00%) pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped } 12 ( 0.00%) } . . // This may include transient local filesystem information. . // Must not be embedded in build outputs. 9 ( 0.00%) pub fn prefer_local(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local } 18 ( 0.00%) } . . pub fn display(&self, display_pref: FileNameDisplayPreference) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref } . } . . pub fn macro_expansion_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); -- line 375 ---------------------------------------- -- line 423 ---------------------------------------- . /// that the length of the span is equal to `span.hi - span.lo`; there may be space in the . /// [`BytePos`] range between files. . /// . /// `SpanData` is public because `Span` uses a thread-local interner and can't be . /// sent to other threads, but some pieces of performance infra run in a separate thread. . /// Using `Span` is generally preferred. . #[derive(Clone, Copy, Hash, PartialEq, Eq)] . pub struct SpanData { 10 ( 0.00%) pub lo: BytePos, 10 ( 0.00%) pub hi: BytePos, . /// Information about where the macro came from, if this piece of . /// code was created by a macro expansion. 15 ( 0.00%) pub ctxt: SyntaxContext, 5 ( 0.00%) pub parent: Option, . } . . // Order spans by position in the file. . impl Ord for SpanData { . fn cmp(&self, other: &Self) -> Ordering { . let SpanData { . lo: s_lo, . hi: s_hi, -- line 444 ---------------------------------------- -- line 485 ---------------------------------------- . } . #[inline] . pub fn with_parent(&self, parent: Option) -> Span { . Span::new(self.lo, self.hi, self.ctxt, parent) . } . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { 512,024 ( 0.01%) self.lo.0 == 0 && self.hi.0 == 0 . } . /// Returns `true` if `self` fully encloses `other`. . pub fn contains(self, other: Self) -> bool { 183 ( 0.00%) self.lo <= other.lo && other.hi <= self.hi . } . } . . // The interner is pointed to by a thread local value which is only set on the main thread . // with parallelization is disabled. So we don't allow `Span` to transfer between threads . // to avoid panics and other errors, even though it would be memory safe to do so. . #[cfg(not(parallel_compiler))] . impl !Send for Span {} . #[cfg(not(parallel_compiler))] . impl !Sync for Span {} . . impl PartialOrd for Span { 8,552 ( 0.00%) fn partial_cmp(&self, rhs: &Self) -> Option { 6,414 ( 0.00%) PartialOrd::partial_cmp(&self.data(), &rhs.data()) 8,552 ( 0.00%) } . } . impl Ord for Span { . fn cmp(&self, rhs: &Self) -> Ordering { . Ord::cmp(&self.data(), &rhs.data()) . } . } . . /// A collection of `Span`s. -- line 520 ---------------------------------------- -- line 532 ---------------------------------------- . } . . impl Span { . #[inline] . pub fn lo(self) -> BytePos { . self.data().lo . } . #[inline] 136,665 ( 0.00%) pub fn with_lo(self, lo: BytePos) -> Span { . self.data().with_lo(lo) 91,110 ( 0.00%) } . #[inline] 1,056 ( 0.00%) pub fn hi(self) -> BytePos { . self.data().hi 1,056 ( 0.00%) } . #[inline] 7,227 ( 0.00%) pub fn with_hi(self, hi: BytePos) -> Span { . self.data().with_hi(hi) 4,818 ( 0.00%) } . #[inline] . pub fn ctxt(self) -> SyntaxContext { . self.data_untracked().ctxt . } . #[inline] 49,145 ( 0.00%) pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { . self.data_untracked().with_ctxt(ctxt) 29,487 ( 0.00%) } . #[inline] . pub fn parent(self) -> Option { . self.data().parent . } . #[inline] . pub fn with_parent(self, ctxt: Option) -> Span { . self.data().with_parent(ctxt) . } -- line 566 ---------------------------------------- -- line 568 ---------------------------------------- . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { . self.data_untracked().is_dummy() . } . . /// Returns `true` if this span comes from a macro or desugaring. . #[inline] 9 ( 0.00%) pub fn from_expansion(self) -> bool { . self.ctxt() != SyntaxContext::root() 6 ( 0.00%) } . . /// Returns `true` if `span` originates in a derive-macro's expansion. . pub fn in_derive_expansion(self) -> bool { . matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _)) . } . . /// Gate suggestions that would not be appropriate in a context the user didn't write. . pub fn can_be_used_for_suggestions(self) -> bool { -- line 586 ---------------------------------------- -- line 618 ---------------------------------------- . } . . /// Returns `self` if `self` is not the dummy span, and `other` otherwise. . pub fn substitute_dummy(self, other: Span) -> Span { . if self.is_dummy() { other } else { self } . } . . /// Returns `true` if `self` fully encloses `other`. 549 ( 0.00%) pub fn contains(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.contains(other) 366 ( 0.00%) } . . /// Returns `true` if `self` touches `other`. . pub fn overlaps(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.lo < other.hi && other.lo < span.hi . } . -- line 638 ---------------------------------------- -- line 650 ---------------------------------------- . pub fn trim_start(self, other: Span) -> Option { . let span = self.data(); . let other = other.data(); . if span.hi > other.hi { Some(span.with_lo(cmp::max(span.lo, other.hi))) } else { None } . } . . /// Returns the source span -- this is either the supplied span, or the span for . /// the macro callsite that expanded to it. 40 ( 0.00%) pub fn source_callsite(self) -> Span { . let expn_data = self.ctxt().outer_expn_data(); 16 ( 0.00%) if !expn_data.is_root() { expn_data.call_site.source_callsite() } else { self } 32 ( 0.00%) } . . /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, . /// if any. . pub fn parent_callsite(self) -> Option { . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(expn_data.call_site) } else { None } . } . . /// Walk down the expansion ancestors to find a span that's contained within `outer`. 610 ( 0.00%) pub fn find_ancestor_inside(mut self, outer: Span) -> Option { 305 ( 0.00%) while !outer.contains(self) { . self = self.parent_callsite()?; . } . Some(self) 671 ( 0.00%) } . . /// Edition of the crate from which this span came. 32,070 ( 0.00%) pub fn edition(self) -> edition::Edition { . self.ctxt().edition() 21,380 ( 0.00%) } . . #[inline] . pub fn rust_2015(self) -> bool { 10,396 ( 0.00%) self.edition() == edition::Edition::Edition2015 . } . . #[inline] . pub fn rust_2018(self) -> bool { 232 ( 0.00%) self.edition() >= edition::Edition::Edition2018 . } . . #[inline] . pub fn rust_2021(self) -> bool { 464 ( 0.00%) self.edition() >= edition::Edition::Edition2021 . } . . /// Returns the source callee. . /// . /// Returns `None` if the supplied span has no expansion trace, . /// else returns the `ExpnData` for the macro definition . /// corresponding to the source callsite. . pub fn source_callee(self) -> Option { -- line 703 ---------------------------------------- -- line 707 ---------------------------------------- . } . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } . } . . /// Checks if a span is "internal" to a macro in which `#[unstable]` . /// items can be used (that is, a macro marked with . /// `#[allow_internal_unstable]`). 110 ( 0.00%) pub fn allows_unstable(self, feature: Symbol) -> bool { 22 ( 0.00%) self.ctxt() . .outer_expn_data() . .allow_internal_unstable . .map_or(false, |features| features.iter().any(|&f| f == feature)) 88 ( 0.00%) } . . /// Checks if this span arises from a compiler desugaring of kind `kind`. 4,186 ( 0.00%) pub fn is_desugaring(self, kind: DesugaringKind) -> bool { 3,588 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => k == kind, . _ => false, . } 2,990 ( 0.00%) } . . /// Returns the compiler desugaring that created this span, or `None` . /// if this span is not from a desugaring. 15 ( 0.00%) pub fn desugaring_kind(self) -> Option { 18 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => Some(k), . _ => None, . } 12 ( 0.00%) } . . /// Checks if a span is "internal" to a macro in which `unsafe` . /// can be used without triggering the `unsafe_code` lint. . // (that is, a macro marked with `#[allow_internal_unsafe]`). . pub fn allows_unsafe(self) -> bool { . self.ctxt().outer_expn_data().allow_internal_unsafe . } . -- line 745 ---------------------------------------- -- line 767 ---------------------------------------- . . /// Returns a `Span` that would enclose both `self` and `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^^^^ . /// ``` 947,958 ( 0.02%) pub fn to(self, end: Span) -> Span { . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 162,566 ( 0.00%) if span_data.ctxt != end_data.ctxt { 9,784 ( 0.00%) if span_data.ctxt == SyntaxContext::root() { . return end; 9,784 ( 0.00%) } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . cmp::min(span_data.lo, end_data.lo), . cmp::max(span_data.hi, end_data.hi), . if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 534,926 ( 0.01%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 775,602 ( 0.02%) } . . /// Returns a `Span` between the end of `self` to the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^ . /// ``` 231 ( 0.00%) pub fn between(self, end: Span) -> Span { . let span = self.data(); . let end = end.data(); . Span::new( . span.hi, . end.lo, . if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, 147 ( 0.00%) if span.parent == end.parent { span.parent } else { None }, . ) 168 ( 0.00%) } . . /// Returns a `Span` from the beginning of `self` until the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^ . /// ``` 132 ( 0.00%) pub fn until(self, end: Span) -> Span { . // Most of this function's body is copied from `to`. . // We can't just do `self.to(end.shrink_to_lo())`, . // because to also does some magic where it uses min/max so . // it can handle overlapping spans. Some advanced mis-use of . // `until` with different ctxts makes this visible. . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 24 ( 0.00%) if span_data.ctxt != end_data.ctxt { . if span_data.ctxt == SyntaxContext::root() { . return end; . } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . span_data.lo, . end_data.lo, . if end_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 84 ( 0.00%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 108 ( 0.00%) } . . pub fn from_inner(self, inner: InnerSpan) -> Span { . let span = self.data(); . Span::new( . span.lo + BytePos::from_usize(inner.start), . span.lo + BytePos::from_usize(inner.end), . span.ctxt, . span.parent, . ) . } . . /// Equivalent of `Span::def_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span { 1,046 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Opaque) . } . . /// Equivalent of `Span::call_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span { 2 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Transparent) . } . . /// Equivalent of `Span::mixed_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span { . self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) . } . . /// Produces a span with the same location as `self` and context produced by a macro with the . /// given ID and transparency, assuming that macro was defined directly and not produced by . /// some other macro (which is the case for built-in and procedural macros). 6,288 ( 0.00%) pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) 3,668 ( 0.00%) } . . #[inline] . pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . let span = self.data(); 532,179 ( 0.01%) span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency)) . } . . #[inline] . pub fn remove_mark(&mut self) -> ExpnId { . let mut span = self.data(); . let mark = span.ctxt.remove_mark(); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark -- line 897 ---------------------------------------- -- line 901 ---------------------------------------- . pub fn adjust(&mut self, expn_id: ExpnId) -> Option { . let mut span = self.data(); . let mark = span.ctxt.adjust(expn_id); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 264,726 ( 0.01%) pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { 176,484 ( 0.00%) let mut span = self.data(); 146,968 ( 0.00%) let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id); 264,726 ( 0.01%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 294,038 ( 0.01%) } . . #[inline] . pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option> { . let mut span = self.data(); . let mark = span.ctxt.glob_adjust(expn_id, glob_span); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } -- line 922 ---------------------------------------- -- line 929 ---------------------------------------- . ) -> Option> { . let mut span = self.data(); . let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 86,952 ( 0.00%) pub fn normalize_to_macros_2_0(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macros_2_0()) 76,083 ( 0.00%) } . . #[inline] . pub fn normalize_to_macro_rules(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macro_rules()) . } . } . -- line 948 ---------------------------------------- -- line 958 ---------------------------------------- . . /// What label should we attach to this span (if any)? . pub label: Option, . } . . impl Default for Span { . fn default() -> Self { . DUMMY_SP 2 ( 0.00%) } . } . . impl Encodable for Span { . default fn encode(&self, s: &mut E) -> Result<(), E::Error> { . let span = self.data(); . s.emit_struct(false, |s| { . s.emit_struct_field("lo", true, |s| span.lo.encode(s))?; . s.emit_struct_field("hi", false, |s| span.hi.encode(s)) -- line 974 ---------------------------------------- -- line 990 ---------------------------------------- . /// any spans that are debug-printed during the closure's execution. . /// . /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap` . /// (see `rustc_interface::callbacks::span_debug1`). However, some parts . /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before . /// a `TyCtxt` is available. In this case, we fall back to . /// the `SourceMap` provided to this function. If that is not available, . /// we fall back to printing the raw `Span` field values. 9 ( 0.00%) pub fn with_source_map T>(source_map: Lrc, f: F) -> T { . with_session_globals(|session_globals| { 2 ( 0.00%) *session_globals.source_map.borrow_mut() = Some(source_map); . }); . struct ClearSourceMap; . impl Drop for ClearSourceMap { . fn drop(&mut self) { . with_session_globals(|session_globals| { 1 ( 0.00%) session_globals.source_map.borrow_mut().take(); . }); . } . } . . let _guard = ClearSourceMap; 4 ( 0.00%) f() 8 ( 0.00%) } . . pub fn debug_with_source_map( . span: Span, . f: &mut fmt::Formatter<'_>, . source_map: &SourceMap, . ) -> fmt::Result { . write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(span), span.ctxt()) . } -- line 1021 ---------------------------------------- -- line 1048 ---------------------------------------- . . impl MultiSpan { . #[inline] . pub fn new() -> MultiSpan { . MultiSpan { primary_spans: vec![], span_labels: vec![] } . } . . pub fn from_span(primary_span: Span) -> MultiSpan { 380 ( 0.00%) MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } . } . . pub fn from_spans(mut vec: Vec) -> MultiSpan { . vec.sort(); 30 ( 0.00%) MultiSpan { primary_spans: vec, span_labels: vec![] } . } . . pub fn push_span_label(&mut self, span: Span, label: String) { . self.span_labels.push((span, label)); . } . . /// Selects the first primary span (if any). . pub fn primary_span(&self) -> Option { . self.primary_spans.first().cloned() 56 ( 0.00%) } . . /// Returns all primary spans. . pub fn primary_spans(&self) -> &[Span] { . &self.primary_spans . } . . /// Returns `true` if any of the primary spans are displayable. . pub fn has_primary_spans(&self) -> bool { -- line 1079 ---------------------------------------- -- line 1139 ---------------------------------------- . . /// Returns `true` if any of the span labels is displayable. . pub fn has_span_labels(&self) -> bool { . self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) . } . } . . impl From for MultiSpan { 380 ( 0.00%) fn from(span: Span) -> MultiSpan { . MultiSpan::from_span(span) 380 ( 0.00%) } . } . . impl From> for MultiSpan { 30 ( 0.00%) fn from(spans: Vec) -> MultiSpan { 24 ( 0.00%) MultiSpan::from_spans(spans) 30 ( 0.00%) } . } . . /// Identifies an offset of a multi-byte character in a `SourceFile`. 1,408 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct MultiByteChar { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The number of bytes, `>= 2`. . pub bytes: u8, . } . . /// Identifies an offset of a non-narrow character in a `SourceFile`. 839 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub enum NonNarrowChar { . /// Represents a zero-width character. . ZeroWidth(BytePos), . /// Represents a wide (full-width) character. . Wide(BytePos), . /// Represents a tab character, represented visually with a width of 4 characters. . Tab(BytePos), . } . . impl NonNarrowChar { . fn new(pos: BytePos, width: usize) -> Self { 200 ( 0.00%) match width { . 0 => NonNarrowChar::ZeroWidth(pos), . 2 => NonNarrowChar::Wide(pos), . 4 => NonNarrowChar::Tab(pos), . _ => panic!("width {} given for non-narrow character", width), . } . } . . /// Returns the absolute offset of the character in the `SourceMap`. -- line 1188 ---------------------------------------- -- line 1201 ---------------------------------------- . } . } . } . . impl Add for NonNarrowChar { . type Output = Self; . . fn add(self, rhs: BytePos) -> Self { 466 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), . } . } . } . . impl Sub for NonNarrowChar { . type Output = Self; . 134 ( 0.00%) fn sub(self, rhs: BytePos) -> Self { 466 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), . } 402 ( 0.00%) } . } . . /// Identifies an offset of a character that was normalized away from `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct NormalizedPos { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The difference between original and normalized string at position. . pub diff: u32, . } . 27 ( 0.00%) #[derive(PartialEq, Eq, Clone, Debug)] . pub enum ExternalSource { . /// No external source has to be loaded, since the `SourceFile` represents a local crate. . Unneeded, . Foreign { . kind: ExternalSourceKind, . /// This SourceFile's byte-offset within the source_map of its original crate. . original_start_pos: BytePos, . /// The end of this SourceFile within the source_map of its original crate. -- line 1246 ---------------------------------------- -- line 1257 ---------------------------------------- . AbsentOk, . /// A failed attempt has been made to load the external source. . AbsentErr, . Unneeded, . } . . impl ExternalSource { . pub fn get_source(&self) -> Option<&Lrc> { 1,529 ( 0.00%) match self { . ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src), . _ => None, . } . } . } . . #[derive(Debug)] . pub struct OffsetOverflowError; . 666 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] . pub enum SourceFileHashAlgorithm { . Md5, . Sha1, . Sha256, . } . . impl FromStr for SourceFileHashAlgorithm { . type Err = (); -- line 1283 ---------------------------------------- -- line 1290 ---------------------------------------- . _ => Err(()), . } . } . } . . rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm); . . /// The hash of the on-disk source file used for debug info. 54 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] 1,332 ( 0.00%) #[derive(HashStable_Generic, Encodable, Decodable)] . pub struct SourceFileHash { . pub kind: SourceFileHashAlgorithm, . value: [u8; 32], . } . . impl SourceFileHash { . pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash { 1 ( 0.00%) let mut hash = SourceFileHash { kind, value: Default::default() }; . let len = hash.hash_len(); . let value = &mut hash.value[..len]; . let data = src.as_bytes(); . match kind { . SourceFileHashAlgorithm::Md5 => { 30 ( 0.00%) value.copy_from_slice(&Md5::digest(data)); . } . SourceFileHashAlgorithm::Sha1 => { . value.copy_from_slice(&Sha1::digest(data)); . } . SourceFileHashAlgorithm::Sha256 => { . value.copy_from_slice(&Sha256::digest(data)); . } . } . hash . } . . /// Check if the stored hash matches the hash of the string. . pub fn matches(&self, src: &str) -> bool { 1 ( 0.00%) Self::new(self.kind, src) == *self . } . . /// The bytes of the hash. . pub fn hash_bytes(&self) -> &[u8] { . let len = self.hash_len(); . &self.value[..len] . } . . fn hash_len(&self) -> usize { 29 ( 0.00%) match self.kind { . SourceFileHashAlgorithm::Md5 => 16, . SourceFileHashAlgorithm::Sha1 => 20, . SourceFileHashAlgorithm::Sha256 => 32, . } . } . } . . /// A single source in the [`SourceMap`]. 522 ( 0.00%) #[derive(Clone)] . pub struct SourceFile { . /// The name of the file that the source came from. Source that doesn't . /// originate from files has names between angle brackets by convention . /// (e.g., ``). . pub name: FileName, . /// The complete source code. 9 ( 0.00%) pub src: Option>, . /// The source code's hash. . pub src_hash: SourceFileHash, . /// The external source code (used for external crates, which will have a `None` . /// value as `self.src`. . pub external_src: Lock, . /// The start position of this source in the `SourceMap`. . pub start_pos: BytePos, . /// The end position of this source in the `SourceMap`. -- line 1361 ---------------------------------------- -- line 1364 ---------------------------------------- . pub lines: Vec, . /// Locations of multi-byte characters in the source code. . pub multibyte_chars: Vec, . /// Width of characters that are not narrow in the source code. . pub non_narrow_chars: Vec, . /// Locations of characters removed during normalization. . pub normalized_pos: Vec, . /// A hash of the filename, used for speeding up hashing in incremental compilation. 9 ( 0.00%) pub name_hash: u128, . /// Indicates which crate this `SourceFile` was imported from. 9 ( 0.00%) pub cnum: CrateNum, . } . . impl Encodable for SourceFile { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_struct(false, |s| { . s.emit_struct_field("name", true, |s| self.name.encode(s))?; . s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?; . s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?; . s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?; . s.emit_struct_field("lines", false, |s| { . let lines = &self.lines[..]; . // Store the length. . s.emit_u32(lines.len() as u32)?; . 36 ( 0.00%) if !lines.is_empty() { . // In order to preserve some space, we exploit the fact that . // the lines list is sorted and individual lines are . // probably not that long. Because of that we can store lines . // as a difference list, using as little space as possible . // for the differences. . let max_line_length = if lines.len() == 1 { . 0 . } else { -- line 1397 ---------------------------------------- -- line 1399 ---------------------------------------- . .array_windows() . .map(|&[fst, snd]| snd - fst) . .map(|bp| bp.to_usize()) . .max() . .unwrap() . }; . . let bytes_per_diff: u8 = match max_line_length { 18 ( 0.00%) 0..=0xFF => 1, . 0x100..=0xFFFF => 2, . _ => 4, . }; . . // Encode the number of bytes used per diff. . bytes_per_diff.encode(s)?; . . // Encode the first element. . lines[0].encode(s)?; . . let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst); . 18 ( 0.00%) match bytes_per_diff { . 1 => { . for diff in diff_iter { . (diff.0 as u8).encode(s)? . } . } . 2 => { . for diff in diff_iter { . (diff.0 as u16).encode(s)? -- line 1428 ---------------------------------------- -- line 1436 ---------------------------------------- . _ => unreachable!(), . } . } . . Ok(()) . })?; . s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?; . s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?; 27 ( 0.00%) s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?; . s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?; 99 ( 0.00%) s.emit_struct_field("cnum", false, |s| self.cnum.encode(s)) . }) . } . } . . impl Decodable for SourceFile { 2,997 ( 0.00%) fn decode(d: &mut D) -> SourceFile { . d.read_struct(|d| { . let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d)); . let src_hash: SourceFileHash = . d.read_struct_field("src_hash", |d| Decodable::decode(d)); . let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d)); . let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d)); . let lines: Vec = d.read_struct_field("lines", |d| { . let num_lines: u32 = Decodable::decode(d); 333 ( 0.00%) let mut lines = Vec::with_capacity(num_lines as usize); . . if num_lines > 0 { . // Read the number of bytes used per diff. . let bytes_per_diff: u8 = Decodable::decode(d); . . // Read the first element. . let mut line_start: BytePos = Decodable::decode(d); . lines.push(line_start); . . for _ in 1..num_lines { 639,524 ( 0.01%) let diff = match bytes_per_diff { . 1 => d.read_u8() as u32, . 2 => d.read_u16() as u32, . 4 => d.read_u32(), . _ => unreachable!(), . }; . . line_start = line_start + BytePos(diff); . -- line 1480 ---------------------------------------- -- line 1483 ---------------------------------------- . } . . lines . }); . let multibyte_chars: Vec = . d.read_struct_field("multibyte_chars", |d| Decodable::decode(d)); . let non_narrow_chars: Vec = . d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d)); 666 ( 0.00%) let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d)); . let normalized_pos: Vec = . d.read_struct_field("normalized_pos", |d| Decodable::decode(d)); . let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d)); 3,663 ( 0.00%) SourceFile { 2,664 ( 0.00%) name, . start_pos, . end_pos, . src: None, 1,332 ( 0.00%) src_hash, . // Unused - the metadata decoder will construct . // a new SourceFile, filling in `external_src` properly . external_src: Lock::new(ExternalSource::Unneeded), 1,332 ( 0.00%) lines, 1,332 ( 0.00%) multibyte_chars, 1,332 ( 0.00%) non_narrow_chars, 1,332 ( 0.00%) normalized_pos, . name_hash, . cnum, . } . }) 2,997 ( 0.00%) } . } . . impl fmt::Debug for SourceFile { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(fmt, "SourceFile({:?})", self.name) . } . } . . impl SourceFile { 99 ( 0.00%) pub fn new( . name: FileName, . mut src: String, . start_pos: BytePos, . hash_kind: SourceFileHashAlgorithm, . ) -> Self { . // Compute the file hash before any normalization. . let src_hash = SourceFileHash::new(hash_kind, &src); 27 ( 0.00%) let normalized_pos = normalize_src(&mut src, start_pos); . . let name_hash = { . let mut hasher: StableHasher = StableHasher::new(); 18 ( 0.00%) name.hash(&mut hasher); . hasher.finish::() . }; 18 ( 0.00%) let end_pos = start_pos.to_usize() + src.len(); 27 ( 0.00%) assert!(end_pos <= u32::MAX as usize); . 108 ( 0.00%) let (lines, multibyte_chars, non_narrow_chars) = 18 ( 0.00%) analyze_source_file::analyze_source_file(&src, start_pos); . 153 ( 0.00%) SourceFile { . name, . src: Some(Lrc::new(src)), 99 ( 0.00%) src_hash, . external_src: Lock::new(ExternalSource::Unneeded), . start_pos, . end_pos: Pos::from_usize(end_pos), 36 ( 0.00%) lines, 36 ( 0.00%) multibyte_chars, 36 ( 0.00%) non_narrow_chars, 36 ( 0.00%) normalized_pos, . name_hash, . cnum: LOCAL_CRATE, . } 81 ( 0.00%) } . . /// Returns the `BytePos` of the beginning of the current line. . pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { . let line_index = self.lookup_line(pos).unwrap(); . self.lines[line_index] . } . . /// Add externally loaded source. . /// If the hash of the input doesn't match or no input is supplied via None, . /// it is interpreted as an error and the corresponding enum variant is set. . /// The return value signifies whether some kind of source is present. 651 ( 0.00%) pub fn add_external_src(&self, get_src: F) -> bool . where . F: FnOnce() -> Option, . { 249 ( 0.00%) if matches!( 186 ( 0.00%) *self.external_src.borrow(), . ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. } . ) { 1 ( 0.00%) let src = get_src(); . let mut external_src = self.external_src.borrow_mut(); . // Check that no-one else have provided the source while we were getting it 3 ( 0.00%) if let ExternalSource::Foreign { . kind: src_kind @ ExternalSourceKind::AbsentOk, .. . } = &mut *external_src . { 5 ( 0.00%) if let Some(mut src) = src { . // The src_hash needs to be computed on the pre-normalized src. 1 ( 0.00%) if self.src_hash.matches(&src) { 3 ( 0.00%) normalize_src(&mut src, BytePos::from_usize(0)); 8 ( 0.00%) *src_kind = ExternalSourceKind::Present(Lrc::new(src)); . return true; . } . } else { . *src_kind = ExternalSourceKind::AbsentErr; . } . . false . } else { . self.src.is_some() || external_src.get_source().is_some() . } . } else { 92 ( 0.00%) self.src.is_some() || self.external_src.borrow().get_source().is_some() . } 837 ( 0.00%) } . . /// Gets a line from the list of pre-computed line-beginnings. . /// The line number here is 0-based. . pub fn get_line(&self, line_number: usize) -> Option> { . fn get_until_newline(src: &str, begin: usize) -> &str { . // We can't use `lines.get(line_number+1)` because we might . // be parsing when we call this function and thus the current . // line is the last one we have line info for. -- line 1610 ---------------------------------------- -- line 1627 ---------------------------------------- . Some(Cow::Owned(String::from(get_until_newline(src, begin)))) . } else { . None . } . } . . pub fn is_real_file(&self) -> bool { . self.name.is_real() 342 ( 0.00%) } . . pub fn is_imported(&self) -> bool { . self.src.is_none() 75,326 ( 0.00%) } . . pub fn count_lines(&self) -> usize { . self.lines.len() . } . . /// Finds the line containing the given position. The return value is the . /// index into the `lines` array of this `SourceFile`, not the 1-based line . /// number. If the source_file is empty or the position is located before the . /// first line, `None` is returned. . pub fn lookup_line(&self, pos: BytePos) -> Option { 41,304 ( 0.00%) match self.lines.binary_search(&pos) { . Ok(idx) => Some(idx), . Err(0) => None, . Err(idx) => Some(idx - 1), . } . } . . pub fn line_bounds(&self, line_index: usize) -> Range { 146,413 ( 0.00%) if self.is_empty() { . return self.start_pos..self.end_pos; . } . 91,388 ( 0.00%) assert!(line_index < self.lines.len()); 136,875 ( 0.00%) if line_index == (self.lines.len() - 1) { 13,732 ( 0.00%) self.lines[line_index]..self.end_pos . } else { 82,092 ( 0.00%) self.lines[line_index]..self.lines[line_index + 1] . } . } . . /// Returns whether or not the file contains the given `SourceMap` byte . /// position. The position one past the end of the file is considered to be . /// contained by the file. This implies that files for which `is_empty` . /// returns true still contain one byte position according to this function. . #[inline] -- line 1674 ---------------------------------------- -- line 1692 ---------------------------------------- . Err(i) if i == 0 => 0, . Err(i) => self.normalized_pos[i - 1].diff, . }; . . BytePos::from_u32(pos.0 - self.start_pos.0 + diff) . } . . /// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`. 22 ( 0.00%) pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { . // The number of extra bytes due to multibyte chars in the `SourceFile`. . let mut total_extra_bytes = 0; . 66 ( 0.00%) for mbc in self.multibyte_chars.iter() { . debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); 516 ( 0.00%) if mbc.pos < bpos { . // Every character is at least one byte, so we only . // count the actual extra bytes. 1,536 ( 0.00%) total_extra_bytes += mbc.bytes as u32 - 1; . // We should never see a byte position in the middle of a . // character. 1,280 ( 0.00%) assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); . } else { . break; . } . } . 132 ( 0.00%) assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); 110 ( 0.00%) CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize) 44 ( 0.00%) } . . /// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a . /// given `BytePos`. 77 ( 0.00%) pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) { 22 ( 0.00%) let chpos = self.bytepos_to_file_charpos(pos); 10 ( 0.00%) match self.lookup_line(pos) { . Some(a) => { . let line = a + 1; // Line numbers start at 1 11 ( 0.00%) let linebpos = self.lines[a]; 22 ( 0.00%) let linechpos = self.bytepos_to_file_charpos(linebpos); . let col = chpos - linechpos; . debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); . debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); . debug!("byte is on line: {}", line); 11 ( 0.00%) assert!(chpos >= linechpos); . (line, col) . } . None => (0, chpos), . } 88 ( 0.00%) } . . /// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based) . /// column offset when displayed, for a given `BytePos`. 88 ( 0.00%) pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) { 33 ( 0.00%) let (line, col_or_chpos) = self.lookup_file_pos(pos); 22 ( 0.00%) if line > 0 { . let col = col_or_chpos; 11 ( 0.00%) let linebpos = self.lines[line - 1]; . let col_display = { . let start_width_idx = self . .non_narrow_chars . .binary_search_by_key(&linebpos, |x| x.pos()) . .unwrap_or_else(|x| x); . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); 9 ( 0.00%) let special_chars = end_width_idx - start_width_idx; . let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx] . .iter() . .map(|x| x.width()) . .sum(); 55 ( 0.00%) col.0 - special_chars + non_narrow . }; . (line, col, col_display) . } else { . let chpos = col_or_chpos; . let col_display = { . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let non_narrow: usize = . self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum(); . chpos.0 - end_width_idx + non_narrow . }; . (0, chpos, col_display) . } 77 ( 0.00%) } . } . . /// Normalizes the source code and records the normalizations. 100 ( 0.00%) fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec { . let mut normalized_pos = vec![]; . remove_bom(src, &mut normalized_pos); . normalize_newlines(src, &mut normalized_pos); . . // Offset all the positions by start_pos to match the final file positions. . for np in &mut normalized_pos { . np.pos.0 += start_pos.0; . } . . normalized_pos 90 ( 0.00%) } . . /// Removes UTF-8 BOM, if any. . fn remove_bom(src: &mut String, normalized_pos: &mut Vec) { 10 ( 0.00%) if src.starts_with('\u{feff}') { . src.drain(..3); . normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 }); . } . } . . /// Replaces `\r\n` with `\n` in-place in `src`. . /// . /// Returns error if there's a lone `\r` in the string. . fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec) { 10 ( 0.00%) if !src.as_bytes().contains(&b'\r') { . return; . } . . // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. . // While we *can* call `as_mut_vec` and do surgery on the live string . // directly, let's rather steal the contents of `src`. This makes the code . // safe even if a panic occurs. . -- line 1816 ---------------------------------------- -- line 1877 ---------------------------------------- . ( . $( . $(#[$attr:meta])* . $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty); . )* . ) => { . $( . $(#[$attr])* 179,503 ( 0.00%) $vis struct $ident($inner_vis $inner_ty); . . impl Pos for $ident { . #[inline(always)] . fn from_usize(n: usize) -> $ident { 5,668 ( 0.00%) $ident(n as $inner_ty) . } . . #[inline(always)] . fn to_usize(&self) -> usize { 216,997 ( 0.00%) self.0 as usize . } . . #[inline(always)] . fn from_u32(n: u32) -> $ident { . $ident(n as $inner_ty) . } . . #[inline(always)] -- line 1903 ---------------------------------------- -- line 1906 ---------------------------------------- . } . } . . impl Add for $ident { . type Output = $ident; . . #[inline(always)] . fn add(self, rhs: $ident) -> $ident { 1,403,034 ( 0.03%) $ident(self.0 + rhs.0) . } . } . . impl Sub for $ident { . type Output = $ident; . . #[inline(always)] . fn sub(self, rhs: $ident) -> $ident { 2,212,173 ( 0.05%) $ident(self.0 - rhs.0) . } . } . )* . }; . } . . impl_pos! { . /// A byte offset. -- line 1931 ---------------------------------------- -- line 1939 ---------------------------------------- . /// Because of multibyte UTF-8 characters, a byte offset . /// is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`] . /// values to `CharPos` values as necessary. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] . pub struct CharPos(pub usize); . } . . impl Encodable for BytePos { 265 ( 0.00%) fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_u32(self.0) 318 ( 0.00%) } . } . . impl Decodable for BytePos { . fn decode(d: &mut D) -> BytePos { 77,080 ( 0.00%) BytePos(d.read_u32()) . } . } . . // _____________________________________________________________________________ . // Loc, SourceFileAndLine, SourceFileAndBytePos . // . . /// A source code location used for error reporting. -- line 1962 ---------------------------------------- -- line 2079 ---------------------------------------- . /// offsets into the `SourceMap`). Instead, we hash the (file name, line, column) . /// triple, which stays the same even if the containing `SourceFile` has moved . /// within the `SourceMap`. . /// . /// Also note that we are hashing byte offsets for the column, not unicode . /// codepoint offsets. For the purpose of the hash that's sufficient. . /// Also, hashing filenames is expensive so we avoid doing it twice when the . /// span starts and ends in the same file, which is almost always the case. 726,292 ( 0.02%) fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { . const TAG_VALID_SPAN: u8 = 0; . const TAG_INVALID_SPAN: u8 = 1; . const TAG_RELATIVE_SPAN: u8 = 2; . 108,626 ( 0.00%) if !ctx.hash_spans() { . return; . } . 617,666 ( 0.01%) let span = self.data_untracked(); 296,658 ( 0.01%) span.ctxt.hash_stable(ctx, hasher); 311,268 ( 0.01%) span.parent.hash_stable(ctx, hasher); . 207,594 ( 0.00%) if span.is_dummy() { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . 207,348 ( 0.00%) if let Some(parent) = span.parent { . let def_span = ctx.def_span(parent).data_untracked(); . if def_span.contains(span) { . // This span is enclosed in a definition: only hash the relative position. . Hash::hash(&TAG_RELATIVE_SPAN, hasher); . (span.lo - def_span.lo).to_u32().hash_stable(ctx, hasher); . (span.hi - def_span.lo).to_u32().hash_stable(ctx, hasher); . return; . } . } . . // If this is not an empty or invalid span, we want to hash the last . // position that belongs to it, as opposed to hashing the first . // position past it. 414,696 ( 0.01%) let (file, line_lo, col_lo, line_hi, col_hi) = match ctx.span_data_to_lines_and_cols(&span) . { 207,348 ( 0.00%) Some(pos) => pos, . None => { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . }; . . Hash::hash(&TAG_VALID_SPAN, hasher); . // We truncate the stable ID hash and line and column numbers. The chances -- line 2129 ---------------------------------------- -- line 2134 ---------------------------------------- . // hash only the length, for example, then two otherwise equal spans with . // different end locations will have the same hash. This can cause a problem . // during incremental compilation wherein a previous result for a query that . // depends on the end location of a span will be incorrectly reused when the . // end location of the span it depends on has changed (see issue #74890). A . // similar analysis applies if some query depends specifically on the length . // of the span, but we only hash the end location. So hash both. . 103,674 ( 0.00%) let col_lo_trunc = (col_lo.0 as u64) & 0xFF; . let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8; . let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32; . let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40; . let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc; 103,674 ( 0.00%) let len = (span.hi - span.lo).0; . Hash::hash(&col_line, hasher); . Hash::hash(&len, hasher); 830,048 ( 0.02%) } . } 2,630,311 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs -------------------------------------------------------------------------------- Ir -- line 48 ---------------------------------------- . // . // FIXME: we have to do something for moving slice patterns. . fn place_contents_drop_state_cannot_differ<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . place: mir::Place<'tcx>, . ) -> bool { . let ty = place.ty(body, tcx).ty; 802,576 ( 0.02%) match ty.kind() { . ty::Array(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false", . place, ty . ); . false . } . ty::Slice(..) | ty::Ref(..) | ty::RawPtr(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} refd => true", . place, ty . ); . true . } 717,399 ( 0.02%) ty::Adt(def, _) if (def.has_dtor(tcx) && !def.is_box()) || def.is_union() => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} Drop => true", . place, ty . ); . true . } . _ => false, . } -- line 79 ---------------------------------------- -- line 83 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . lookup_result: LookupResult, . each_child: F, . ) where . F: FnMut(MovePathIndex), . { 3,984 ( 0.00%) match lookup_result { . LookupResult::Parent(..) => { . // access to untracked value - do not touch children . } . LookupResult::Exact(e) => on_all_children_bits(tcx, body, move_data, e, each_child), . } . } . . pub fn on_all_children_bits<'tcx, F>( -- line 99 ---------------------------------------- -- line 100 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . mut each_child: F, . ) where . F: FnMut(MovePathIndex), . { 1,119,752 ( 0.02%) fn is_terminal_path<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . path: MovePathIndex, . ) -> bool { 279,938 ( 0.01%) place_contents_drop_state_cannot_differ(tcx, body, move_data.move_paths[path].place) 1,119,752 ( 0.02%) } . 1,290,275 ( 0.03%) fn on_all_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . each_child: &mut F, . ) where . F: FnMut(MovePathIndex), . { 65,477 ( 0.00%) each_child(move_path_index); . 979,783 ( 0.02%) if is_terminal_path(tcx, body, move_data, move_path_index) { . return; . } . 121,350 ( 0.00%) let mut next_child_index = move_data.move_paths[move_path_index].first_child; 242,700 ( 0.01%) while let Some(child_index) = next_child_index { . on_all_children_bits(tcx, body, move_data, child_index, each_child); . next_child_index = move_data.move_paths[child_index].next_sibling; . } 1,056,284 ( 0.02%) } 840,861 ( 0.02%) on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child); . } . . pub fn on_all_drop_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . path: MovePathIndex, . mut each_child: F, -- line 146 ---------------------------------------- -- line 156 ---------------------------------------- . if erased_ty.needs_drop(tcx, ctxt.param_env) { . each_child(child); . } else { . debug!("on_all_drop_children_bits - skipping") . } . }) . } . 4,492 ( 0.00%) pub fn drop_flag_effects_for_function_entry<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; 402 ( 0.00%) for arg in body.args_iter() { 576 ( 0.00%) let place = mir::Place::from(arg); 864 ( 0.00%) let lookup_result = move_data.rev_lookup.find(place.as_ref()); . on_lookup_result_bits(tcx, body, move_data, lookup_result, |mpi| { 288 ( 0.00%) callback(mpi, DropFlagState::Present) . }); . } 3,216 ( 0.00%) } . 910,692 ( 0.02%) pub fn drop_flag_effects_for_location<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; . debug!("drop_flag_effects_for_location({:?})", loc); . . // first, move out of the RHS 18,834 ( 0.00%) for mi in &move_data.loc_map[loc] { 273,777 ( 0.01%) let path = mi.move_path_index(move_data); . debug!("moving out of path {:?}", move_data.move_paths[path]); . 72,500 ( 0.00%) on_all_children_bits(tcx, body, move_data, path, |mpi| callback(mpi, DropFlagState::Absent)) . } . . debug!("drop_flag_effects: assignment for location({:?})", loc); . 65,477 ( 0.00%) for_location_inits(tcx, body, move_data, loc, |mpi| callback(mpi, DropFlagState::Present)); 759,280 ( 0.02%) } . . pub fn for_location_inits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex), . { 18,834 ( 0.00%) for ii in &move_data.init_loc_map[loc] { 269,860 ( 0.01%) let init = move_data.inits[*ii]; 138,906 ( 0.00%) match init.kind { . InitKind::Deep => { . let path = init.path; . . on_all_children_bits(tcx, body, move_data, path, &mut callback) . } . InitKind::Shallow => { . let mpi = init.path; . callback(mpi); -- line 226 ---------------------------------------- -- line 230 ---------------------------------------- . } . } . . /// Calls `handle_inactive_variant` for each descendant move path of `enum_place` that contains a . /// `Downcast` to a variant besides the `active_variant`. . /// . /// NOTE: If there are no move paths corresponding to an inactive variant, . /// `handle_inactive_variant` will not be called for that variant. 10,560 ( 0.00%) pub(crate) fn on_all_inactive_variants<'tcx>( . tcx: TyCtxt<'tcx>, . body: &mir::Body<'tcx>, . move_data: &MoveData<'tcx>, . enum_place: mir::Place<'tcx>, . active_variant: VariantIdx, . mut handle_inactive_variant: impl FnMut(MovePathIndex), . ) { 4,400 ( 0.00%) let enum_mpi = match move_data.rev_lookup.find(enum_place.as_ref()) { . LookupResult::Exact(mpi) => mpi, . LookupResult::Parent(_) => return, . }; . . let enum_path = &move_data.move_paths[enum_mpi]; . for (variant_mpi, variant_path) in enum_path.children(&move_data.move_paths) { . // Because of the way we build the `MoveData` tree, each child should have exactly one more . // projection than `enum_place`. This additional projection must be a downcast since the -- line 254 ---------------------------------------- -- line 262 ---------------------------------------- . }; . . if variant_idx != active_variant { . on_all_children_bits(tcx, body, move_data, variant_mpi, |mpi| { . handle_inactive_variant(mpi) . }); . } . } 7,040 ( 0.00%) } 1,015,371 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/encoder.rs -------------------------------------------------------------------------------- Ir -- line 83 ---------------------------------------- . if $self.is_proc_macro { . return Lazy::empty(); . } . }; . } . . macro_rules! encoder_methods { . ($($name:ident($ty:ty);)*) => { 58,254 ( 0.00%) $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { . self.opaque.$name(value) 69,906 ( 0.00%) })* . } . } . . impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> { . type Error = ::Error; . . #[inline] . fn emit_unit(&mut self) -> Result<(), Self::Error> { -- line 101 ---------------------------------------- -- line 125 ---------------------------------------- . emit_raw_bytes(&[u8]); . } . } . . impl<'a, 'tcx, T: Encodable>> Encodable> . for Lazy . { . fn encode(&self, e: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { 1,524 ( 0.00%) e.emit_lazy_distance(*self) . } . } . . impl<'a, 'tcx, T: Encodable>> Encodable> . for Lazy<[T]> . { 70 ( 0.00%) fn encode(&self, e: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { . e.emit_usize(self.meta)?; 184 ( 0.00%) if self.meta == 0 { . return Ok(()); . } 407 ( 0.00%) e.emit_lazy_distance(*self) 6 ( 0.00%) } . } . . impl<'a, 'tcx, I: Idx, T: Encodable>> Encodable> . for Lazy> . where . Option: FixedSizeEncoding, . { . fn encode(&self, e: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { . e.emit_usize(self.meta)?; 144 ( 0.00%) e.emit_lazy_distance(*self) . } . } . . impl<'a, 'tcx> Encodable> for CrateNum { 246,008 ( 0.01%) fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { 160,477 ( 0.00%) if *self != LOCAL_CRATE && s.is_proc_macro { . panic!("Attempted to encode non-local CrateNum {:?} for proc-macro crate", self); . } . s.emit_u32(self.as_u32()) 210,864 ( 0.00%) } . } . . impl<'a, 'tcx> Encodable> for DefIndex { 3,660 ( 0.00%) fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { . s.emit_u32(self.as_u32()) 4,392 ( 0.00%) } . } . . impl<'a, 'tcx> Encodable> for ExpnIndex { . fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { . s.emit_u32(self.as_u32()) . } . } . . impl<'a, 'tcx> Encodable> for SyntaxContext { 444,050 ( 0.01%) fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { 266,430 ( 0.01%) rustc_span::hygiene::raw_encode_syntax_context(*self, &s.hygiene_ctxt, s) 532,860 ( 0.01%) } . } . . impl<'a, 'tcx> Encodable> for ExpnId { . fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { 5,074 ( 0.00%) if self.krate == LOCAL_CRATE { . // We will only write details for local expansions. Non-local expansions will fetch . // data from the corresponding crate's metadata. . // FIXME(#43047) FIXME(#74731) We may eventually want to avoid relying on external . // metadata from proc-macro crates. 38,203 ( 0.00%) s.hygiene_ctxt.schedule_expn_data_for_encoding(*self); . } 28,532 ( 0.00%) self.krate.encode(s)?; . self.local_id.encode(s) . } . } . . impl<'a, 'tcx> Encodable> for Span { 600,272 ( 0.01%) fn encode(&self, s: &mut EncodeContext<'a, 'tcx>) -> opaque::EncodeResult { 450,204 ( 0.01%) let span = self.data(); . . // Don't serialize any `SyntaxContext`s from a proc-macro crate, . // since we don't load proc-macro dependencies during serialization. . // This means that any hygiene information from macros used *within* . // a proc-macro crate (e.g. invoking a macro that expands to a proc-macro . // definition) will be lost. . // . // This can show up in two ways: -- line 211 ---------------------------------------- -- line 228 ---------------------------------------- . // None of these user-visible effects should result in any . // cross-crate inconsistencies (getting one behavior in the same . // crate, and a different behavior in another crate) due to the . // limited surface that proc-macros can expose. . // . // IMPORTANT: If this is ever changed, be sure to update . // `rustc_span::hygiene::raw_encode_expn_id` to handle . // encoding `ExpnData` for proc-macro crates. 150,068 ( 0.00%) if s.is_proc_macro { . SyntaxContext::root().encode(s)?; . } else { 75,034 ( 0.00%) span.ctxt.encode(s)?; . } . 75,034 ( 0.00%) if self.is_dummy() { . return TAG_PARTIAL_SPAN.encode(s); . } . . // The Span infrastructure should make sure that this invariant holds: . debug_assert!(span.lo <= span.hi); . 327,016 ( 0.01%) if !s.source_file_cache.0.contains(span.lo) { 95,578 ( 0.00%) let source_map = s.tcx.sess.source_map(); 191,156 ( 0.00%) let source_file_index = source_map.lookup_source_file_idx(span.lo); 95,578 ( 0.00%) s.source_file_cache = 191,156 ( 0.00%) (source_map.files()[source_file_index].clone(), source_file_index); . } . 416,066 ( 0.01%) if !s.source_file_cache.0.contains(span.hi) { . // Unfortunately, macro expansion still sometimes generates Spans . // that malformed in this way. . return TAG_PARTIAL_SPAN.encode(s); . } . . let source_files = s.required_source_files.as_mut().expect("Already encoded SourceMap!"); . // Record the fact that we need to encode the data for this `SourceFile` 74,970 ( 0.00%) source_files.insert(s.source_file_cache.1); . . // There are two possible cases here: . // 1. This span comes from a 'foreign' crate - e.g. some crate upstream of the . // crate we are writing metadata for. When the metadata for *this* crate gets . // deserialized, the deserializer will need to know which crate it originally came . // from. We use `TAG_VALID_SPAN_FOREIGN` to indicate that a `CrateNum` should . // be deserialized after the rest of the span data, which tells the deserializer . // which crate contains the source map information. -- line 272 ---------------------------------------- -- line 274 ---------------------------------------- . // write `TAG_VALID_SPAN_LOCAL` to let the deserializer know that it should use . // our own source map information. . // . // If we're a proc-macro crate, we always treat this as a local `Span`. . // In `encode_source_map`, we serialize foreign `SourceFile`s into our metadata . // if we're a proc-macro crate. . // This allows us to avoid loading the dependencies of proc-macro crates: all of . // the information we need to decode `Span`s is stored in the proc-macro crate. 449,820 ( 0.01%) let (tag, lo, hi) = if s.source_file_cache.0.is_imported() && !s.is_proc_macro { . // To simplify deserialization, we 'rebase' this span onto the crate it originally came from . // (the crate that 'owns' the file it references. These rebased 'lo' and 'hi' values . // are relative to the source map information for the 'foreign' crate whose CrateNum . // we write into the metadata. This allows `imported_source_files` to binary . // search through the 'foreign' crate's source map information, using the . // deserialized 'lo' and 'hi' values directly. . // . // All of this logic ensures that the final result of deserialization is a 'normal' . // Span that can be used without any additional trouble. . let external_start_pos = { . // Introduce a new scope so that we drop the 'lock()' temporary 76,392 ( 0.00%) match &*s.source_file_cache.0.external_src.lock() { 25,464 ( 0.00%) ExternalSource::Foreign { original_start_pos, .. } => *original_start_pos, . src => panic!("Unexpected external source {:?}", src), . } . }; 76,392 ( 0.00%) let lo = (span.lo - s.source_file_cache.0.start_pos) + external_start_pos; 25,464 ( 0.00%) let hi = (span.hi - s.source_file_cache.0.start_pos) + external_start_pos; . . (TAG_VALID_SPAN_FOREIGN, lo, hi) . } else { 148,518 ( 0.00%) (TAG_VALID_SPAN_LOCAL, span.lo, span.hi) . }; . . tag.encode(s)?; . lo.encode(s)?; . . // Encode length which is usually less than span.hi and profits more . // from the variable-length integer encoding that we use. . let len = hi - lo; . len.encode(s)?; . 149,940 ( 0.00%) if tag == TAG_VALID_SPAN_FOREIGN { . // This needs to be two lines to avoid holding the `s.source_file_cache` . // while calling `cnum.encode(s)` 101,856 ( 0.00%) let cnum = s.source_file_cache.0.cnum; 76,392 ( 0.00%) cnum.encode(s)?; . } . . Ok(()) 600,272 ( 0.01%) } . } . . impl<'a, 'tcx> TyEncoder<'tcx> for EncodeContext<'a, 'tcx> { . const CLEAR_CROSS_CRATE: bool = true; . . fn position(&self) -> usize { . self.opaque.position() . } -- line 331 ---------------------------------------- -- line 361 ---------------------------------------- . } . . /// Helper trait to allow overloading `EncodeContext::lazy` for iterators. . trait EncodeContentsForLazy<'a, 'tcx, T: ?Sized + LazyMeta> { . fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) -> T::Meta; . } . . impl<'a, 'tcx, T: Encodable>> EncodeContentsForLazy<'a, 'tcx, T> for &T { 80,033 ( 0.00%) fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) { 1,938 ( 0.00%) self.encode(ecx).unwrap() 6 ( 0.00%) } . } . . impl<'a, 'tcx, T: Encodable>> EncodeContentsForLazy<'a, 'tcx, T> for T { 8,704 ( 0.00%) fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) { 4,153 ( 0.00%) self.encode(ecx).unwrap() 6,028 ( 0.00%) } . } . . impl<'a, 'tcx, I, T: Encodable>> EncodeContentsForLazy<'a, 'tcx, [T]> for I . where . I: IntoIterator, . I::Item: EncodeContentsForLazy<'a, 'tcx, T>, . { 364 ( 0.00%) fn encode_contents_for_lazy(self, ecx: &mut EncodeContext<'a, 'tcx>) -> usize { 4,380 ( 0.00%) self.into_iter().map(|value| value.encode_contents_for_lazy(ecx)).count() 468 ( 0.00%) } . } . . // Shorthand for `$self.$tables.$table.set($def_id.index, $self.lazy($value))`, which would . // normally need extra variables to avoid errors about multiple mutable borrows. . macro_rules! record { . ($self:ident.$tables:ident.$table:ident[$def_id:expr] <- $value:expr) => {{ . { . let value = $value; . let lazy = $self.lazy(value); . $self.$tables.$table.set($def_id.index, lazy); . } . }}; . } . . impl<'a, 'tcx> EncodeContext<'a, 'tcx> { 1,595 ( 0.00%) fn emit_lazy_distance( . &mut self, . lazy: Lazy, . ) -> Result<(), ::Error> { . let pos = lazy.position.get(); 1,061 ( 0.00%) let distance = match self.lazy_state { . LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"), 267 ( 0.00%) LazyState::NodeStart(start) => { . let start = start.get(); 534 ( 0.00%) assert!(pos <= start); . start - pos . } 52 ( 0.00%) LazyState::Previous(last_pos) => { 52 ( 0.00%) assert!( . last_pos <= lazy.position, . "make sure that the calls to `lazy*` \ . are in the same order as the metadata fields", . ); 156 ( 0.00%) lazy.position.get() - last_pos.get() . } . }; 638 ( 0.00%) self.lazy_state = LazyState::Previous(NonZeroUsize::new(pos).unwrap()); . self.emit_usize(distance) 1,595 ( 0.00%) } . 4,095 ( 0.00%) fn lazy( . &mut self, . value: impl EncodeContentsForLazy<'a, 'tcx, T>, . ) -> Lazy { . let pos = NonZeroUsize::new(self.position()).unwrap(); . 18,545 ( 0.00%) assert_eq!(self.lazy_state, LazyState::NoNode); 42,567 ( 0.00%) self.lazy_state = LazyState::NodeStart(pos); 36,825 ( 0.00%) let meta = value.encode_contents_for_lazy(self); 19,887 ( 0.00%) self.lazy_state = LazyState::NoNode; . 37,887 ( 0.00%) assert!(pos.get() <= self.position()); . . Lazy::from_position_and_meta(pos, meta) 6,030 ( 0.00%) } . . fn encode_info_for_items(&mut self) { 9 ( 0.00%) self.encode_info_for_mod(CRATE_DEF_ID, self.tcx.hir().root_module()); . . // Proc-macro crates only export proc-macro items, which are looked . // up using `proc_macro_data` 2 ( 0.00%) if self.is_proc_macro { . return; . } . 6 ( 0.00%) self.tcx.hir().visit_all_item_likes(&mut self.as_deep_visitor()); . } . . fn encode_def_path_table(&mut self) { 4 ( 0.00%) let table = self.tcx.resolutions(()).definitions.def_path_table(); 2 ( 0.00%) if self.is_proc_macro { . for def_index in std::iter::once(CRATE_DEF_INDEX) . .chain(self.tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index)) . { . let def_key = self.lazy(table.def_key(def_index)); . let def_path_hash = self.lazy(table.def_path_hash(def_index)); . self.tables.def_keys.set(def_index, def_key); . self.tables.def_path_hashes.set(def_index, def_path_hash); . } . } else { 5 ( 0.00%) for (def_index, def_key, def_path_hash) in table.enumerated_keys_and_path_hashes() { . let def_key = self.lazy(def_key); . let def_path_hash = self.lazy(def_path_hash); 2,492 ( 0.00%) self.tables.def_keys.set(def_index, def_key); 2,492 ( 0.00%) self.tables.def_path_hashes.set(def_index, def_path_hash); . } . } . } . . fn encode_def_path_hash_map(&mut self) -> Lazy> { . self.lazy(DefPathHashMapRef::BorrowedFromTcx( 2 ( 0.00%) self.tcx.resolutions(()).definitions.def_path_hash_to_def_index_map(), . )) . } . . fn encode_source_map(&mut self) -> Lazy<[rustc_span::SourceFile]> { 2 ( 0.00%) let source_map = self.tcx.sess.source_map(); 3 ( 0.00%) let all_source_files = source_map.files(); . . // By replacing the `Option` with `None`, we ensure that we can't . // accidentally serialize any more `Span`s after the source map encoding . // is done. . let required_source_files = self.required_source_files.take().unwrap(); . . let adapted = all_source_files . .iter() . .enumerate() . .filter(|(idx, source_file)| { . // Only serialize `SourceFile`s that were used . // during the encoding of a `Span` 683 ( 0.00%) required_source_files.contains(*idx) && . // Don't serialize imported `SourceFile`s, unless . // we're in a proc-macro crate. 93 ( 0.00%) (!source_file.is_imported() || self.is_proc_macro) . }) . .map(|(_, source_file)| { 27 ( 0.00%) let mut adapted = match source_file.name { . FileName::Real(ref realname) => { 18 ( 0.00%) let mut adapted = (**source_file).clone(); 144 ( 0.00%) adapted.name = FileName::Real(match realname { . RealFileName::LocalPath(path_to_file) => { . // Prepend path of working directory onto potentially . // relative paths, because they could become relative . // to a wrong directory. . // We include `working_dir` as part of the crate hash, . // so it's okay for us to use it as part of the encoded . // metadata. 18 ( 0.00%) let working_dir = &self.tcx.sess.opts.working_dir; 18 ( 0.00%) match working_dir { . RealFileName::LocalPath(absolute) => { . // Although neither working_dir or the file name were subject . // to path remapping, the concatenation between the two may . // be. Hence we need to do a remapping here. . let joined = Path::new(absolute).join(path_to_file); 63 ( 0.00%) let (joined, remapped) = 99 ( 0.00%) source_map.path_mapping().map_prefix(joined); 18 ( 0.00%) if remapped { . RealFileName::Remapped { . local_path: None, . virtual_name: joined, . } . } else { . RealFileName::LocalPath(joined) . } . } -- line 532 ---------------------------------------- -- line 544 ---------------------------------------- . RealFileName::Remapped { local_path: _, virtual_name } => { . RealFileName::Remapped { . // We do not want any local path to be exported into metadata . local_path: None, . virtual_name: virtual_name.clone(), . } . } . }); 18 ( 0.00%) adapted.name_hash = { . let mut hasher: StableHasher = StableHasher::new(); 9 ( 0.00%) adapted.name.hash(&mut hasher); . hasher.finish::() . }; 9 ( 0.00%) Lrc::new(adapted) . } . . // expanded code, not from a file . _ => source_file.clone(), . }; . . // We're serializing this `SourceFile` into our crate metadata, . // so mark it as coming from this crate. . // This also ensures that we don't try to deserialize the . // `CrateNum` for a proc-macro dependency - since proc macro . // dependencies aren't loaded when we deserialize a proc-macro, . // trying to remap the `CrateNum` would fail. 18 ( 0.00%) if self.is_proc_macro { . Lrc::make_mut(&mut adapted).cnum = LOCAL_CRATE; . } . adapted . }) . .collect::>(); . . self.lazy(adapted.iter().map(|rc| &**rc)) . } . 8 ( 0.00%) fn encode_crate_root(&mut self) -> Lazy> { . let mut i = self.position(); . . // Encode the crate deps . let crate_deps = self.encode_crate_deps(); . let dylib_dependency_formats = self.encode_dylib_dependency_formats(); 5 ( 0.00%) let dep_bytes = self.position() - i; . . // Encode the lib features. . i = self.position(); . let lib_features = self.encode_lib_features(); 7 ( 0.00%) let lib_feature_bytes = self.position() - i; . . // Encode the language items. . i = self.position(); . let lang_items = self.encode_lang_items(); . let lang_items_missing = self.encode_lang_items_missing(); 5 ( 0.00%) let lang_item_bytes = self.position() - i; . . // Encode the diagnostic items. . i = self.position(); . let diagnostic_items = self.encode_diagnostic_items(); 5 ( 0.00%) let diagnostic_item_bytes = self.position() - i; . . // Encode the native libraries used . i = self.position(); . let native_libraries = self.encode_native_libraries(); 4 ( 0.00%) let native_lib_bytes = self.position() - i; . . let foreign_modules = self.encode_foreign_modules(); . . // Encode DefPathTable . i = self.position(); . self.encode_def_path_table(); 5 ( 0.00%) let def_path_table_bytes = self.position() - i; . . // Encode the def IDs of traits, for rustdoc and diagnostics. . i = self.position(); . let traits = self.encode_traits(); 5 ( 0.00%) let traits_bytes = self.position() - i; . . // Encode the def IDs of impls, for coherence checking. . i = self.position(); . let impls = self.encode_impls(); 5 ( 0.00%) let impls_bytes = self.position() - i; . 3 ( 0.00%) let tcx = self.tcx; . . // Encode MIR. . i = self.position(); . self.encode_mir(); 5 ( 0.00%) let mir_bytes = self.position() - i; . . // Encode the items. . i = self.position(); . self.encode_def_ids(); . self.encode_info_for_items(); 4 ( 0.00%) let item_bytes = self.position() - i; . . // Encode the allocation index . let interpret_alloc_index = { . let mut interpret_alloc_index = Vec::new(); . let mut n = 0; . trace!("beginning to encode alloc ids"); . loop { . let new_n = self.interpret_allocs.len(); . // if we have found new ids, serialize those, too 2 ( 0.00%) if n == new_n { . // otherwise, abort . break; . } . trace!("encoding {} further alloc ids", new_n - n); . for idx in n..new_n { . let id = self.interpret_allocs[idx]; . let pos = self.position() as u32; . interpret_alloc_index.push(pos); . interpret::specialized_encode_alloc_id(self, tcx, id).unwrap(); . } . n = new_n; . } 6 ( 0.00%) self.lazy(interpret_alloc_index) . }; . . // Encode the proc macro data. This affects 'tables', . // so we need to do this before we encode the tables . i = self.position(); . let proc_macro_data = self.encode_proc_macros(); 4 ( 0.00%) let proc_macro_data_bytes = self.position() - i; . . i = self.position(); 4 ( 0.00%) let tables = self.tables.encode(&mut self.opaque); 4 ( 0.00%) let tables_bytes = self.position() - i; . . // Encode exported symbols info. This is prefetched in `encode_metadata` so we encode . // this as late as possible to give the prefetching as much time as possible to complete. . i = self.position(); . let exported_symbols = tcx.exported_symbols(LOCAL_CRATE); . let exported_symbols = self.encode_exported_symbols(&exported_symbols); 4 ( 0.00%) let exported_symbols_bytes = self.position() - i; . . // Encode the hygiene data, . // IMPORTANT: this *must* be the last thing that we encode (other than `SourceMap`). The process . // of encoding other items (e.g. `optimized_mir`) may cause us to load . // data from the incremental cache. If this causes us to deserialize a `Span`, . // then we may load additional `SyntaxContext`s into the global `HygieneData`. . // Therefore, we need to encode the hygiene data last to ensure that we encode . // any `SyntaxContext`s that might be used. . i = self.position(); . let (syntax_contexts, expn_data, expn_hashes) = self.encode_hygiene(); 5 ( 0.00%) let hygiene_bytes = self.position() - i; . . i = self.position(); . let def_path_hash_map = self.encode_def_path_hash_map(); 1 ( 0.00%) let def_path_hash_map_bytes = self.position() - i; . . // Encode source_map. This needs to be done last, . // since encoding `Span`s tells us which `SourceFiles` we actually . // need to encode. . i = self.position(); . let source_map = self.encode_source_map(); 4 ( 0.00%) let source_map_bytes = self.position() - i; . 3 ( 0.00%) let attrs = tcx.hir().krate_attrs(); 7 ( 0.00%) let has_default_lib_allocator = tcx.sess.contains_name(&attrs, sym::default_lib_allocator); . 118 ( 0.00%) let root = self.lazy(CrateRoot { . name: tcx.crate_name(LOCAL_CRATE), 3 ( 0.00%) extra_filename: tcx.sess.opts.cg.extra_filename.clone(), 1 ( 0.00%) triple: tcx.sess.opts.target_triple.clone(), . hash: tcx.crate_hash(LOCAL_CRATE), . stable_crate_id: tcx.def_path_hash(LOCAL_CRATE.as_def_id()).stable_crate_id(), 3 ( 0.00%) panic_strategy: tcx.sess.panic_strategy(), 3 ( 0.00%) panic_in_drop_strategy: tcx.sess.opts.debugging_opts.panic_in_drop, 2 ( 0.00%) edition: tcx.sess.edition(), . has_global_allocator: tcx.has_global_allocator(LOCAL_CRATE), . has_panic_handler: tcx.has_panic_handler(LOCAL_CRATE), . has_default_lib_allocator, . proc_macro_data, 6 ( 0.00%) compiler_builtins: tcx.sess.contains_name(&attrs, sym::compiler_builtins), 6 ( 0.00%) needs_allocator: tcx.sess.contains_name(&attrs, sym::needs_allocator), 6 ( 0.00%) needs_panic_runtime: tcx.sess.contains_name(&attrs, sym::needs_panic_runtime), 6 ( 0.00%) no_builtins: tcx.sess.contains_name(&attrs, sym::no_builtins), 5 ( 0.00%) panic_runtime: tcx.sess.contains_name(&attrs, sym::panic_runtime), 5 ( 0.00%) profiler_runtime: tcx.sess.contains_name(&attrs, sym::profiler_runtime), 2 ( 0.00%) symbol_mangling_version: tcx.sess.opts.get_symbol_mangling_version(), . . crate_deps, . dylib_dependency_formats, . lib_features, . lang_items, . diagnostic_items, . lang_items_missing, . native_libraries, . foreign_modules, . source_map, . traits, . impls, . exported_symbols, . interpret_alloc_index, 2 ( 0.00%) tables, . syntax_contexts, . expn_data, . expn_hashes, . def_path_hash_map, . }); . 1 ( 0.00%) let total_bytes = self.position(); . 4 ( 0.00%) if tcx.sess.meta_stats() { . let mut zero_bytes = 0; . for e in self.opaque.data.iter() { . if *e == 0 { . zero_bytes += 1; . } . } . . eprintln!("metadata stats:"); -- line 756 ---------------------------------------- -- line 770 ---------------------------------------- . eprintln!(" item bytes: {}", item_bytes); . eprintln!(" table bytes: {}", tables_bytes); . eprintln!(" hygiene bytes: {}", hygiene_bytes); . eprintln!(" zero bytes: {}", zero_bytes); . eprintln!(" total bytes: {}", total_bytes); . } . . root 9 ( 0.00%) } . } . . fn should_encode_visibility(def_kind: DefKind) -> bool { . match def_kind { . DefKind::Mod . | DefKind::Struct . | DefKind::Union . | DefKind::Enum -- line 786 ---------------------------------------- -- line 853 ---------------------------------------- . /// Whether we should encode MIR. . /// . /// Computing, optimizing and encoding the MIR is a relatively expensive operation. . /// We want to avoid this work when not required. Therefore: . /// - we only compute `mir_for_ctfe` on items with const-eval semantics; . /// - we skip `optimized_mir` for check runs. . /// . /// Return a pair, resp. for CTFE and for LLVM. 549 ( 0.00%) fn should_encode_mir(tcx: TyCtxt<'_>, def_id: LocalDefId) -> (bool, bool) { 305 ( 0.00%) match tcx.def_kind(def_id) { . // Constructors . DefKind::Ctor(_, _) => { . let mir_opt_base = tcx.sess.opts.output_types.should_codegen() . || tcx.sess.opts.debugging_opts.always_encode_mir; . (true, mir_opt_base) . } . // Constants . DefKind::AnonConst . | DefKind::InlineConst . | DefKind::AssocConst . | DefKind::Static . | DefKind::Const => (true, false), . // Full-fledged functions . DefKind::AssocFn | DefKind::Fn => { . let generics = tcx.generics_of(def_id); 300 ( 0.00%) let needs_inline = (generics.requires_monomorphization(tcx) 96 ( 0.00%) || tcx.codegen_fn_attrs(def_id).requests_inline()) 36 ( 0.00%) && tcx.sess.opts.output_types.should_codegen(); . // The function has a `const` modifier or is annotated with `default_method_body_is_const`. 51 ( 0.00%) let is_const_fn = tcx.is_const_fn_raw(def_id.to_def_id()) 255 ( 0.00%) || tcx.has_attr(def_id.to_def_id(), sym::default_method_body_is_const); . let always_encode_mir = tcx.sess.opts.debugging_opts.always_encode_mir; 102 ( 0.00%) (is_const_fn, needs_inline || always_encode_mir) . } . // Closures can't be const fn. . DefKind::Closure => { . let generics = tcx.generics_of(def_id); . let needs_inline = (generics.requires_monomorphization(tcx) . || tcx.codegen_fn_attrs(def_id).requests_inline()) . && tcx.sess.opts.output_types.should_codegen(); . let always_encode_mir = tcx.sess.opts.debugging_opts.always_encode_mir; . (false, needs_inline || always_encode_mir) . } . // Generators require optimized MIR to compute layout. . DefKind::Generator => (false, true), . // The others don't have MIR. . _ => (false, false), . } 549 ( 0.00%) } . . fn should_encode_variances(def_kind: DefKind) -> bool { . match def_kind { . DefKind::Struct . | DefKind::Union . | DefKind::Enum . | DefKind::Variant . | DefKind::Fn -- line 909 ---------------------------------------- -- line 969 ---------------------------------------- . | DefKind::LifetimeParam . | DefKind::GlobalAsm . | DefKind::ExternCrate => false, . } . } . . impl<'a, 'tcx> EncodeContext<'a, 'tcx> { . fn encode_def_ids(&mut self) { 2 ( 0.00%) if self.is_proc_macro { . return; . } 1 ( 0.00%) let tcx = self.tcx; 3 ( 0.00%) let hir = tcx.hir(); 45 ( 0.00%) for local_id in hir.iter_local_def_id() { . let def_id = local_id.to_def_id(); . let def_kind = tcx.opt_def_kind(local_id); 3,738 ( 0.00%) let def_kind = if let Some(def_kind) = def_kind { def_kind } else { continue }; 7,696 ( 0.00%) record!(self.tables.def_kind[def_id] <- match def_kind { . // Replace Ctor by the enclosing object to avoid leaking details in children crates. . DefKind::Ctor(CtorOf::Struct, _) => DefKind::Struct, . DefKind::Ctor(CtorOf::Variant, _) => DefKind::Variant, . def_kind => def_kind, . }); 1,443 ( 0.00%) record!(self.tables.span[def_id] <- tcx.def_span(def_id)); 5,772 ( 0.00%) record!(self.tables.attributes[def_id] <- tcx.get_attrs(def_id)); 3,848 ( 0.00%) record!(self.tables.expn_that_defined[def_id] <- self.tcx.expn_that_defined(def_id)); . if should_encode_visibility(def_kind) { 2,844 ( 0.00%) record!(self.tables.visibility[def_id] <- self.tcx.visibility(def_id)); . } . if should_encode_stability(def_kind) { . self.encode_stability(def_id); . self.encode_const_stability(def_id); 1,434 ( 0.00%) self.encode_deprecation(def_id); . } . if should_encode_variances(def_kind) { 328 ( 0.00%) let v = self.tcx.variances_of(def_id); 1,148 ( 0.00%) record!(self.tables.variances[def_id] <- v); . } . if should_encode_generics(def_kind) { . let g = tcx.generics_of(def_id); 900 ( 0.00%) record!(self.tables.generics[def_id] <- g); 2,700 ( 0.00%) record!(self.tables.explicit_predicates[def_id] <- self.tcx.explicit_predicates_of(def_id)); 225 ( 0.00%) let inferred_outlives = self.tcx.inferred_outlives_of(def_id); 283 ( 0.00%) if !inferred_outlives.is_empty() { . record!(self.tables.inferred_outlives[def_id] <- inferred_outlives); . } . } 3,367 ( 0.00%) if let DefKind::Trait | DefKind::TraitAlias = def_kind { . record!(self.tables.super_predicates[def_id] <- self.tcx.super_predicates_of(def_id)); . } . } . let inherent_impls = tcx.crate_inherent_impls(()); . for (def_id, implementations) in inherent_impls.inherent_impls.iter() { 3 ( 0.00%) if implementations.is_empty() { . continue; . } 27 ( 0.00%) record!(self.tables.inherent_impls[def_id.to_def_id()] <- implementations.iter().map(|&def_id| { 3 ( 0.00%) assert!(def_id.is_local()); . def_id.index . })); . } . } . 1,554 ( 0.00%) fn encode_item_type(&mut self, def_id: DefId) { . debug!("EncodeContext::encode_item_type({:?})", def_id); 1,332 ( 0.00%) record!(self.tables.ty[def_id] <- self.tcx.type_of(def_id)); 1,776 ( 0.00%) } . . fn encode_enum_variant_info(&mut self, def: &ty::AdtDef, index: VariantIdx) { . let tcx = self.tcx; . let variant = &def.variants[index]; 220 ( 0.00%) let def_id = variant.def_id; . debug!("EncodeContext::encode_enum_variant_info({:?})", def_id); . 330 ( 0.00%) let data = VariantData { 55 ( 0.00%) ctor_kind: variant.ctor_kind, . discr: variant.discr, 55 ( 0.00%) ctor: variant.ctor_def_id.map(|did| did.index), 55 ( 0.00%) is_non_exhaustive: variant.is_field_list_non_exhaustive(), . }; . . record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); . record!(self.tables.children[def_id] <- variant.fields.iter().map(|f| { . assert!(f.did.is_local()); . f.did.index . })); 55 ( 0.00%) self.encode_ident_span(def_id, variant.ident(tcx)); 55 ( 0.00%) self.encode_item_type(def_id); 55 ( 0.00%) if variant.ctor_kind == CtorKind::Fn { . // FIXME(eddyb) encode signature only in `encode_enum_variant_ctor`. . if let Some(ctor_def_id) = variant.ctor_def_id { . record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(ctor_def_id)); . } . } . } . . fn encode_enum_variant_ctor(&mut self, def: &ty::AdtDef, index: VariantIdx) { . let tcx = self.tcx; . let variant = &def.variants[index]; 275 ( 0.00%) let def_id = variant.ctor_def_id.unwrap(); . debug!("EncodeContext::encode_enum_variant_ctor({:?})", def_id); . . // FIXME(eddyb) encode only the `CtorKind` for constructors. . let data = VariantData { 55 ( 0.00%) ctor_kind: variant.ctor_kind, . discr: variant.discr, . ctor: Some(def_id.index), 55 ( 0.00%) is_non_exhaustive: variant.is_field_list_non_exhaustive(), . }; . 220 ( 0.00%) record!(self.tables.kind[def_id] <- EntryKind::Variant(self.lazy(data))); 165 ( 0.00%) self.encode_item_type(def_id); 55 ( 0.00%) if variant.ctor_kind == CtorKind::Fn { . record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); . } . } . 140 ( 0.00%) fn encode_info_for_mod(&mut self, local_def_id: LocalDefId, md: &hir::Mod<'_>) { 28 ( 0.00%) let tcx = self.tcx; . let def_id = local_def_id.to_def_id(); . debug!("EncodeContext::encode_info_for_mod({:?})", def_id); . . // If we are encoding a proc-macro crates, `encode_info_for_mod` will . // only ever get called for the crate root. We still want to encode . // the crate root for consistency with other crates (some of the resolver . // code uses it). However, we skip encoding anything relating to child . // items - we encode information about proc-macros later on. 42 ( 0.00%) let reexports = if !self.is_proc_macro { 28 ( 0.00%) match tcx.module_reexports(local_def_id) { . Some(exports) => self.lazy(exports), . _ => Lazy::empty(), . } . } else { . Lazy::empty() . }; . 56 ( 0.00%) record!(self.tables.kind[def_id] <- EntryKind::Mod(reexports)); 28 ( 0.00%) if self.is_proc_macro { . // Encode this here because we don't do it in encode_def_ids. . record!(self.tables.expn_that_defined[def_id] <- tcx.expn_that_defined(local_def_id)); . } else { . let direct_children = md.item_ids.iter().map(|item_id| item_id.def_id.local_def_index); . // Foreign items are planted into their parent modules from name resolution point of view. 28 ( 0.00%) let tcx = self.tcx; . let foreign_item_children = md . .item_ids . .iter() 1,550 ( 0.00%) .filter_map(|item_id| match tcx.hir().item(*item_id).kind { . hir::ItemKind::ForeignMod { items, .. } => { . Some(items.iter().map(|fi_ref| fi_ref.id.def_id.local_def_index)) . } . _ => None, . }) . .flatten(); . 98 ( 0.00%) record!(self.tables.children[def_id] <- direct_children.chain(foreign_item_children)); . } 112 ( 0.00%) } . . fn encode_field( . &mut self, . adt_def: &ty::AdtDef, . variant_index: VariantIdx, . field_index: usize, . ) { . let variant = &adt_def.variants[variant_index]; -- line 1134 ---------------------------------------- -- line 1234 ---------------------------------------- . } . if trait_item.kind == ty::AssocKind::Fn { . record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); . } . } . . fn encode_info_for_impl_item(&mut self, def_id: DefId) { . debug!("EncodeContext::encode_info_for_impl_item({:?})", def_id); 108 ( 0.00%) let tcx = self.tcx; . 324 ( 0.00%) let ast_item = self.tcx.hir().expect_impl_item(def_id.expect_local()); 54 ( 0.00%) let impl_item = self.tcx.associated_item(def_id); . 216 ( 0.00%) let container = match impl_item.defaultness { . hir::Defaultness::Default { has_value: true } => AssocContainer::ImplDefault, . hir::Defaultness::Final => AssocContainer::ImplFinal, . hir::Defaultness::Default { has_value: false } => { . span_bug!(ast_item.span, "impl items always have values (currently)") . } . }; . 324 ( 0.00%) match impl_item.kind { . ty::AssocKind::Const => { . if let hir::ImplItemKind::Const(_, body_id) = ast_item.kind { . let qualifs = self.tcx.at(ast_item.span).mir_const_qualif(def_id); . . record!(self.tables.kind[def_id] <- EntryKind::AssocConst( . container, . qualifs, . self.encode_rendered_const_for_body(body_id)) . ); . } else { . bug!() . } . } . ty::AssocKind::Fn => { 102 ( 0.00%) let fn_data = if let hir::ImplItemKind::Fn(ref sig, body) = ast_item.kind { . FnData { . asyncness: sig.header.asyncness, . // Can be inside `impl const Trait`, so using sig.header.constness is not reliable 51 ( 0.00%) constness: if self.tcx.is_const_fn_raw(def_id) { . hir::Constness::Const . } else { . hir::Constness::NotConst . }, . param_names: self.encode_fn_param_names_for_body(body), . } . } else { . bug!() -- line 1282 ---------------------------------------- -- line 1286 ---------------------------------------- . container, . has_self: impl_item.fn_has_self_parameter, . }))); . } . ty::AssocKind::Type => { . record!(self.tables.kind[def_id] <- EntryKind::AssocType(container)); . } . } 162 ( 0.00%) self.encode_ident_span(def_id, impl_item.ident(self.tcx)); 270 ( 0.00%) self.encode_item_type(def_id); 216 ( 0.00%) if let Some(trait_item_def_id) = impl_item.trait_item_def_id { . record!(self.tables.trait_item_def_id[def_id] <- trait_item_def_id); . } 54 ( 0.00%) if impl_item.kind == ty::AssocKind::Fn { . record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); . } . } . . fn encode_fn_param_names_for_body(&mut self, body_id: hir::BodyId) -> Lazy<[Ident]> { 357 ( 0.00%) self.lazy(self.tcx.hir().body_param_names(body_id)) . } . . fn encode_fn_param_names(&mut self, param_names: &[Ident]) -> Lazy<[Ident]> { . self.lazy(param_names.iter()) . } . . fn encode_mir(&mut self) { 3 ( 0.00%) if self.is_proc_macro { . return; . } . . let keys_and_jobs = self . .tcx . .mir_keys(()) . .iter() . .filter_map(|&def_id| { 305 ( 0.00%) let (encode_const, encode_opt) = should_encode_mir(self.tcx, def_id); 249 ( 0.00%) if encode_const || encode_opt { 44 ( 0.00%) Some((def_id, encode_const, encode_opt)) . } else { . None . } 122 ( 0.00%) }) . .collect::>(); 115 ( 0.00%) for (def_id, encode_const, encode_opt) in keys_and_jobs.into_iter() { . debug_assert!(encode_const || encode_opt); . . debug!("EntryBuilder::encode_mir({:?})", def_id); 22 ( 0.00%) if encode_opt { 48 ( 0.00%) record!(self.tables.mir[def_id.to_def_id()] <- self.tcx.optimized_mir(def_id)); . } 22 ( 0.00%) if encode_const { 40 ( 0.00%) record!(self.tables.mir_for_ctfe[def_id.to_def_id()] <- self.tcx.mir_for_ctfe(def_id)); . . // FIXME(generic_const_exprs): this feels wrong to have in `encode_mir` 10 ( 0.00%) let abstract_const = self.tcx.thir_abstract_const(def_id); 40 ( 0.00%) if let Ok(Some(abstract_const)) = abstract_const { . record!(self.tables.thir_abstract_consts[def_id.to_def_id()] <- abstract_const); . } . } 88 ( 0.00%) record!(self.tables.promoted_mir[def_id.to_def_id()] <- self.tcx.promoted_mir(def_id)); . . let instance = . ty::InstanceDef::Item(ty::WithOptConstParam::unknown(def_id.to_def_id())); 22 ( 0.00%) let unused = self.tcx.unused_generic_params(instance); 22 ( 0.00%) if !unused.is_empty() { . record!(self.tables.unused_generic_params[def_id.to_def_id()] <- unused); . } . } . } . . fn encode_stability(&mut self, def_id: DefId) { . debug!("EncodeContext::encode_stability({:?})", def_id); . . // The query lookup can take a measurable amount of time in crates with many items. Check if . // the stability attributes are even enabled before using their queries. 1,912 ( 0.00%) if self.feat.staged_api || self.tcx.sess.opts.debugging_opts.force_unstable_if_unmarked { . if let Some(stab) = self.tcx.lookup_stability(def_id) { . record!(self.tables.stability[def_id] <- stab) . } . } . } . . fn encode_const_stability(&mut self, def_id: DefId) { . debug!("EncodeContext::encode_const_stability({:?})", def_id); . . // The query lookup can take a measurable amount of time in crates with many items. Check if . // the stability attributes are even enabled before using their queries. 1,673 ( 0.00%) if self.feat.staged_api || self.tcx.sess.opts.debugging_opts.force_unstable_if_unmarked { . if let Some(stab) = self.tcx.lookup_const_stability(def_id) { . record!(self.tables.const_stability[def_id] <- stab) . } . } . } . 1,434 ( 0.00%) fn encode_deprecation(&mut self, def_id: DefId) { . debug!("EncodeContext::encode_deprecation({:?})", def_id); 1,195 ( 0.00%) if let Some(depr) = self.tcx.lookup_deprecation(def_id) { . record!(self.tables.deprecation[def_id] <- depr); . } 1,195 ( 0.00%) } . 60 ( 0.00%) fn encode_rendered_const_for_body(&mut self, body_id: hir::BodyId) -> Lazy { 30 ( 0.00%) let hir = self.tcx.hir(); 20 ( 0.00%) let body = hir.body(body_id); 80 ( 0.00%) let rendered = rustc_hir_pretty::to_string(&(&hir as &dyn intravisit::Map<'_>), |s| { 30 ( 0.00%) s.print_expr(&body.value) . }); 20 ( 0.00%) let rendered_const = &RenderedConst(rendered); . self.lazy(rendered_const) 70 ( 0.00%) } . 639 ( 0.00%) fn encode_info_for_item(&mut self, def_id: DefId, item: &'tcx hir::Item<'tcx>) { . let tcx = self.tcx; . . debug!("EncodeContext::encode_info_for_item({:?})", def_id); . . self.encode_ident_span(def_id, item.ident); . 426 ( 0.00%) let entry_kind = match item.kind { . hir::ItemKind::Static(_, hir::Mutability::Mut, _) => EntryKind::MutStatic, . hir::ItemKind::Static(_, hir::Mutability::Not, _) => EntryKind::ImmStatic, 30 ( 0.00%) hir::ItemKind::Const(_, body_id) => { 30 ( 0.00%) let qualifs = self.tcx.at(item.span).mir_const_qualif(def_id); 70 ( 0.00%) EntryKind::Const(qualifs, self.encode_rendered_const_for_body(body_id)) . } . hir::ItemKind::Fn(ref sig, .., body) => { . let data = FnData { . asyncness: sig.header.asyncness, . constness: sig.header.constness, . param_names: self.encode_fn_param_names_for_body(body), . }; . . EntryKind::Fn(self.lazy(data)) . } . hir::ItemKind::Macro(ref macro_def) => { . EntryKind::MacroDef(self.lazy(macro_def.clone())) . } . hir::ItemKind::Mod(ref m) => { 156 ( 0.00%) return self.encode_info_for_mod(item.def_id, m); . } . hir::ItemKind::ForeignMod { .. } => EntryKind::ForeignMod, . hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm, . hir::ItemKind::TyAlias(..) => EntryKind::Type, . hir::ItemKind::OpaqueTy(..) => { . self.encode_explicit_item_bounds(def_id); . EntryKind::OpaqueTy . } 18 ( 0.00%) hir::ItemKind::Enum(..) => EntryKind::Enum(self.tcx.adt_def(def_id).repr), . hir::ItemKind::Struct(ref struct_def, _) => { . let adt_def = self.tcx.adt_def(def_id); . let variant = adt_def.non_enum_variant(); . . // Encode def_ids for each field and method . // for methods, write all the stuff get_trait_method . // needs to know . let ctor = struct_def -- line 1442 ---------------------------------------- -- line 1462 ---------------------------------------- . ctor_kind: variant.ctor_kind, . discr: variant.discr, . ctor: None, . is_non_exhaustive: variant.is_field_list_non_exhaustive(), . }), . adt_def.repr, . ) . } 180 ( 0.00%) hir::ItemKind::Impl(hir::Impl { defaultness, constness, .. }) => { 174 ( 0.00%) let trait_ref = self.tcx.impl_trait_ref(def_id); 90 ( 0.00%) let polarity = self.tcx.impl_polarity(def_id); 90 ( 0.00%) let parent = if let Some(trait_ref) = trait_ref { 42 ( 0.00%) let trait_def = self.tcx.trait_def(trait_ref.def_id); 252 ( 0.00%) trait_def.ancestors(self.tcx, def_id).ok().and_then(|mut an| { 252 ( 0.00%) an.nth(1).and_then(|node| match node { . specialization_graph::Node::Impl(parent) => Some(parent), . _ => None, . }) . }) . } else { . None . }; . . // if this is an impl of `CoerceUnsized`, create its . // "unsized info", else just store None . let coerce_unsized_info = trait_ref.and_then(|t| { 252 ( 0.00%) if Some(t.def_id) == self.tcx.lang_items().coerce_unsized_trait() { . Some(self.tcx.at(item.span).coerce_unsized_info(def_id)) . } else { . None . } . }); . . let data = ImplData { . polarity, -- line 1496 ---------------------------------------- -- line 1518 ---------------------------------------- . } . hir::ItemKind::TraitAlias(..) => EntryKind::TraitAlias, . hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) => { . bug!("cannot encode info for item {:?}", item) . } . }; . record!(self.tables.kind[def_id] <- entry_kind); . // FIXME(eddyb) there should be a nicer way to do this. 290 ( 0.00%) match item.kind { . hir::ItemKind::Enum(..) => record!(self.tables.children[def_id] <- 6 ( 0.00%) self.tcx.adt_def(def_id).variants.iter().map(|v| { 55 ( 0.00%) assert!(v.def_id.is_local()); . v.def_id.index . }) . ), . hir::ItemKind::Struct(..) | hir::ItemKind::Union(..) => { . record!(self.tables.children[def_id] <- . self.tcx.adt_def(def_id).non_enum_variant().fields.iter().map(|f| { . assert!(f.did.is_local()); . f.did.index . }) . ) . } . hir::ItemKind::Impl { .. } | hir::ItemKind::Trait(..) => { 90 ( 0.00%) let associated_item_def_ids = self.tcx.associated_item_def_ids(def_id); . record!(self.tables.children[def_id] <- . associated_item_def_ids.iter().map(|&def_id| { 54 ( 0.00%) assert!(def_id.is_local()); . def_id.index . }) . ); . } . _ => {} . } 164 ( 0.00%) match item.kind { . hir::ItemKind::Static(..) . | hir::ItemKind::Const(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::TyAlias(..) . | hir::ItemKind::OpaqueTy(..) . | hir::ItemKind::Enum(..) . | hir::ItemKind::Struct(..) . | hir::ItemKind::Union(..) 232 ( 0.00%) | hir::ItemKind::Impl { .. } => self.encode_item_type(def_id), . _ => {} . } 174 ( 0.00%) if let hir::ItemKind::Fn(..) = item.kind { . record!(self.tables.fn_sig[def_id] <- tcx.fn_sig(def_id)); . } 116 ( 0.00%) if let hir::ItemKind::Impl { .. } = item.kind { 225 ( 0.00%) if let Some(trait_ref) = self.tcx.impl_trait_ref(def_id) { 210 ( 0.00%) record!(self.tables.impl_trait_ref[def_id] <- trait_ref); . } . } 464 ( 0.00%) } . . fn encode_info_for_generic_param(&mut self, def_id: DefId, kind: EntryKind, encode_type: bool) { 9 ( 0.00%) record!(self.tables.kind[def_id] <- kind); 3 ( 0.00%) if encode_type { . self.encode_item_type(def_id); . } . } . . fn encode_info_for_closure(&mut self, hir_id: hir::HirId) { . let def_id = self.tcx.hir().local_def_id(hir_id); . debug!("EncodeContext::encode_info_for_closure({:?})", def_id); . -- line 1584 ---------------------------------------- -- line 1611 ---------------------------------------- . let const_data = self.encode_rendered_const_for_body(body_id); . let qualifs = self.tcx.mir_const_qualif(def_id); . . record!(self.tables.kind[def_id.to_def_id()] <- EntryKind::AnonConst(qualifs, const_data)); . self.encode_item_type(def_id.to_def_id()); . } . . fn encode_native_libraries(&mut self) -> Lazy<[NativeLib]> { 2 ( 0.00%) empty_proc_macro!(self); 2 ( 0.00%) let used_libraries = self.tcx.native_libraries(LOCAL_CRATE); . self.lazy(used_libraries.iter()) . } . . fn encode_foreign_modules(&mut self) -> Lazy<[ForeignModule]> { 5 ( 0.00%) empty_proc_macro!(self); 2 ( 0.00%) let foreign_modules = self.tcx.foreign_modules(LOCAL_CRATE); . self.lazy(foreign_modules.iter().map(|(_, m)| m).cloned()) . } . . fn encode_hygiene(&mut self) -> (SyntaxContextTable, ExpnDataTable, ExpnHashTable) { . let mut syntax_contexts: TableBuilder<_, _> = Default::default(); . let mut expn_data_table: TableBuilder<_, _> = Default::default(); . let mut expn_hash_table: TableBuilder<_, _> = Default::default(); . 2 ( 0.00%) let _: Result<(), !> = self.hygiene_ctxt.encode( 8 ( 0.00%) &mut (&mut *self, &mut syntax_contexts, &mut expn_data_table, &mut expn_hash_table), . |(this, syntax_contexts, _, _), index, ctxt_data| { . syntax_contexts.set(index, this.lazy(ctxt_data)); . Ok(()) . }, . |(this, _, expn_data_table, expn_hash_table), index, expn_data, hash| { . if let Some(index) = index.as_local() { 9,196 ( 0.00%) expn_data_table.set(index.as_raw(), this.lazy(expn_data)); 9,196 ( 0.00%) expn_hash_table.set(index.as_raw(), this.lazy(hash)); . } . Ok(()) . }, . ); . . ( . syntax_contexts.encode(&mut self.opaque), . expn_data_table.encode(&mut self.opaque), . expn_hash_table.encode(&mut self.opaque), . ) . } . . fn encode_proc_macros(&mut self) -> Option { 6 ( 0.00%) let is_proc_macro = self.tcx.sess.crate_types().contains(&CrateType::ProcMacro); . if is_proc_macro { . let tcx = self.tcx; . let hir = tcx.hir(); . . let proc_macro_decls_static = tcx.proc_macro_decls_static(()).unwrap().local_def_index; . let stability = tcx.lookup_stability(DefId::local(CRATE_DEF_INDEX)).copied(); . let macros = . self.lazy(tcx.resolutions(()).proc_macros.iter().map(|p| p.local_def_index)); -- line 1666 ---------------------------------------- -- line 1725 ---------------------------------------- . . Some(ProcMacroData { proc_macro_decls_static, stability, macros }) . } else { . None . } . } . . fn encode_crate_deps(&mut self) -> Lazy<[CrateDep]> { 7 ( 0.00%) empty_proc_macro!(self); . 1 ( 0.00%) let deps = self . .tcx . .crates(()) . .iter() . .map(|&cnum| { . let dep = CrateDep { 7 ( 0.00%) name: self.tcx.crate_name(cnum), 7 ( 0.00%) hash: self.tcx.crate_hash(cnum), 7 ( 0.00%) host_hash: self.tcx.crate_host_hash(cnum), 7 ( 0.00%) kind: self.tcx.dep_kind(cnum), 7 ( 0.00%) extra_filename: self.tcx.extra_filename(cnum), . }; 35 ( 0.00%) (cnum, dep) . }) . .collect::>(); . . { . // Sanity-check the crate numbers . let mut expected_cnum = 1; 14 ( 0.00%) for &(n, _) in &deps { 18 ( 0.00%) assert_eq!(n, CrateNum::new(expected_cnum)); . expected_cnum += 1; . } . } . . // We're just going to write a list of crate 'name-hash-version's, with . // the assumption that they are numbered 1 to n. . // FIXME (#2166): This is not nearly enough to support correct versioning . // but is enough to get transitive crate dependencies working. . self.lazy(deps.iter().map(|&(_, ref dep)| dep)) . } . . fn encode_lib_features(&mut self) -> Lazy<[(Symbol, Option)]> { 2 ( 0.00%) empty_proc_macro!(self); 1 ( 0.00%) let tcx = self.tcx; . let lib_features = tcx.lib_features(()); 2 ( 0.00%) self.lazy(lib_features.to_vec()) . } . . fn encode_diagnostic_items(&mut self) -> Lazy<[(Symbol, DefIndex)]> { 3 ( 0.00%) empty_proc_macro!(self); 1 ( 0.00%) let tcx = self.tcx; . let diagnostic_items = &tcx.diagnostic_items(LOCAL_CRATE).name_to_id; . self.lazy(diagnostic_items.iter().map(|(&name, def_id)| (name, def_id.index))) . } . . fn encode_lang_items(&mut self) -> Lazy<[(DefIndex, usize)]> { 4 ( 0.00%) empty_proc_macro!(self); 1 ( 0.00%) let tcx = self.tcx; 1 ( 0.00%) let lang_items = tcx.lang_items(); 2 ( 0.00%) let lang_items = lang_items.items().iter(); . self.lazy(lang_items.enumerate().filter_map(|(i, &opt_def_id)| { 568 ( 0.00%) if let Some(def_id) = opt_def_id { . if def_id.is_local() { . return Some((def_id.index, i)); . } . } . None . })) . } . . fn encode_lang_items_missing(&mut self) -> Lazy<[lang_items::LangItem]> { 2 ( 0.00%) empty_proc_macro!(self); 1 ( 0.00%) let tcx = self.tcx; 1 ( 0.00%) self.lazy(&tcx.lang_items().missing) . } . . fn encode_traits(&mut self) -> Lazy<[DefIndex]> { 3 ( 0.00%) empty_proc_macro!(self); 1 ( 0.00%) self.lazy(self.tcx.traits_in_crate(LOCAL_CRATE).iter().map(|def_id| def_id.index)) . } . . /// Encodes an index, mapping each trait to its (local) implementations. . fn encode_impls(&mut self) -> Lazy<[TraitImpls]> { . debug!("EncodeContext::encode_traits_and_impls()"); 2 ( 0.00%) empty_proc_macro!(self); 2 ( 0.00%) let tcx = self.tcx; 5 ( 0.00%) let mut visitor = ImplsVisitor { tcx, impls: FxHashMap::default() }; 4 ( 0.00%) tcx.hir().visit_all_item_likes(&mut visitor); . . let mut all_impls: Vec<_> = visitor.impls.into_iter().collect(); . . // Bring everything into deterministic order for hashing 14 ( 0.00%) all_impls.sort_by_cached_key(|&(trait_def_id, _)| tcx.def_path_hash(trait_def_id)); . . let all_impls: Vec<_> = all_impls . .into_iter() 210 ( 0.00%) .map(|(trait_def_id, mut impls)| { . // Bring everything into deterministic order for hashing 14 ( 0.00%) impls.sort_by_cached_key(|&(index, _)| { 42 ( 0.00%) tcx.hir().def_path_hash(LocalDefId { local_def_index: index }) . }); . 56 ( 0.00%) TraitImpls { . trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), 42 ( 0.00%) impls: self.lazy(&impls), . } 112 ( 0.00%) }) . .collect(); . . self.lazy(&all_impls) . } . . // Encodes all symbols exported from this crate into the metadata. . // . // This pass is seeded off the reachability list calculated in the . // middle::reachable module but filters out items that either don't have a . // symbol associated with them (they weren't translated) or if they're an FFI . // definition (as that's not defined in this crate). . fn encode_exported_symbols( . &mut self, . exported_symbols: &[(ExportedSymbol<'tcx>, SymbolExportLevel)], . ) -> Lazy<[(ExportedSymbol<'tcx>, SymbolExportLevel)]> { 2 ( 0.00%) empty_proc_macro!(self); . // The metadata symbol name is special. It should not show up in . // downstream crates. 9 ( 0.00%) let metadata_symbol_name = SymbolName::new(self.tcx, &metadata_symbol_name(self.tcx)); . . self.lazy( . exported_symbols . .iter() 78 ( 0.00%) .filter(|&&(ref exported_symbol, _)| match *exported_symbol { . ExportedSymbol::NoDefId(symbol_name) => symbol_name != metadata_symbol_name, . _ => true, . }) . .cloned(), . ) . } . . fn encode_dylib_dependency_formats(&mut self) -> Lazy<[Option]> { 2 ( 0.00%) empty_proc_macro!(self); 2 ( 0.00%) let formats = self.tcx.dependency_formats(()); 4 ( 0.00%) for (ty, arr) in formats.iter() { 2 ( 0.00%) if *ty != CrateType::Dylib { . continue; . } . return self.lazy(arr.iter().map(|slot| match *slot { . Linkage::NotLinked | Linkage::IncludedFromDylib => None, . . Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), . Linkage::Static => Some(LinkagePreference::RequireStatic), . })); -- line 1876 ---------------------------------------- -- line 1914 ---------------------------------------- . } . } . . // FIXME(eddyb) make metadata encoding walk over all definitions, instead of HIR. . impl<'a, 'tcx> Visitor<'tcx> for EncodeContext<'a, 'tcx> { . type NestedFilter = nested_filter::OnlyBodies; . . fn nested_visit_map(&mut self) -> Self::Map { 61 ( 0.00%) self.tcx.hir() . } 222,183 ( 0.00%) fn visit_expr(&mut self, ex: &'tcx hir::Expr<'tcx>) { 24,687 ( 0.00%) intravisit::walk_expr(self, ex); . self.encode_info_for_expr(ex); 197,496 ( 0.00%) } . fn visit_anon_const(&mut self, c: &'tcx AnonConst) { . intravisit::walk_anon_const(self, c); . self.encode_info_for_anon_const(c.hir_id); . } . fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 939 ( 0.00%) intravisit::walk_item(self, item); 626 ( 0.00%) match item.kind { . hir::ItemKind::ExternCrate(_) | hir::ItemKind::Use(..) => {} // ignore these 284 ( 0.00%) _ => self.encode_info_for_item(item.def_id.to_def_id(), item), . } 1,252 ( 0.00%) self.encode_addl_info_for_item(item); . } . fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem<'tcx>) { . intravisit::walk_foreign_item(self, ni); . self.encode_info_for_foreign_item(ni.def_id.to_def_id(), ni); . } 918 ( 0.00%) fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) { . intravisit::walk_generics(self, generics); . self.encode_info_for_generics(generics); 816 ( 0.00%) } . } . . impl<'a, 'tcx> EncodeContext<'a, 'tcx> { 21 ( 0.00%) fn encode_fields(&mut self, adt_def: &ty::AdtDef) { . for (variant_index, variant) in adt_def.variants.iter_enumerated() { . for (field_index, _field) in variant.fields.iter().enumerate() { . self.encode_field(adt_def, variant_index, field_index); . } . } 24 ( 0.00%) } . . fn encode_info_for_generics(&mut self, generics: &hir::Generics<'tcx>) { . for param in generics.params { 18 ( 0.00%) let def_id = self.tcx.hir().local_def_id(param.hir_id); 27 ( 0.00%) match param.kind { . GenericParamKind::Lifetime { .. } => continue, . GenericParamKind::Type { default, .. } => { . self.encode_info_for_generic_param( . def_id.to_def_id(), . EntryKind::TypeParam, . default.is_some(), . ); . } -- line 1970 ---------------------------------------- -- line 1975 ---------------------------------------- . record!(self.tables.const_defaults[def_id] <- self.tcx.const_param_default(def_id)) . } . } . } . } . } . . fn encode_info_for_expr(&mut self, expr: &hir::Expr<'_>) { 49,374 ( 0.00%) if let hir::ExprKind::Closure(..) = expr.kind { . self.encode_info_for_closure(expr.hir_id); . } . } . . fn encode_ident_span(&mut self, def_id: DefId, ident: Ident) { . record!(self.tables.ident_span[def_id] <- ident.span); . } . . /// In some cases, along with the item itself, we also . /// encode some sub-items. Usually we want some info from the item . /// so it's easier to do that here then to wait until we would encounter . /// normally in the visitor walk. 2,817 ( 0.00%) fn encode_addl_info_for_item(&mut self, item: &hir::Item<'_>) { 1,565 ( 0.00%) match item.kind { . hir::ItemKind::Static(..) . | hir::ItemKind::Const(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::Macro(..) . | hir::ItemKind::Mod(..) . | hir::ItemKind::ForeignMod { .. } . | hir::ItemKind::GlobalAsm(..) . | hir::ItemKind::ExternCrate(..) . | hir::ItemKind::Use(..) . | hir::ItemKind::TyAlias(..) . | hir::ItemKind::OpaqueTy(..) . | hir::ItemKind::TraitAlias(..) => { . // no sub-item recording needed in these cases . } . hir::ItemKind::Enum(..) => { 3 ( 0.00%) let def = self.tcx.adt_def(item.def_id.to_def_id()); 6 ( 0.00%) self.encode_fields(def); . . for (i, variant) in def.variants.iter_enumerated() { . self.encode_enum_variant_info(def, i); . 165 ( 0.00%) if let Some(_ctor_def_id) = variant.ctor_def_id { . self.encode_enum_variant_ctor(def, i); . } . } . } . hir::ItemKind::Struct(ref struct_def, _) => { . let def = self.tcx.adt_def(item.def_id.to_def_id()); . self.encode_fields(def); . -- line 2027 ---------------------------------------- -- line 2031 ---------------------------------------- . self.encode_struct_ctor(def, ctor_def_id.to_def_id()); . } . } . hir::ItemKind::Union(..) => { . let def = self.tcx.adt_def(item.def_id.to_def_id()); . self.encode_fields(def); . } . hir::ItemKind::Impl { .. } => { 216 ( 0.00%) for &trait_item_def_id in 45 ( 0.00%) self.tcx.associated_item_def_ids(item.def_id.to_def_id()).iter() . { . self.encode_info_for_impl_item(trait_item_def_id); . } . } . hir::ItemKind::Trait(..) => { . for &item_def_id in self.tcx.associated_item_def_ids(item.def_id.to_def_id()).iter() . { . self.encode_info_for_trait_item(item_def_id); . } . } . } 2,504 ( 0.00%) } . } . . struct ImplsVisitor<'tcx> { . tcx: TyCtxt<'tcx>, . impls: FxHashMap)>>, . } . . impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplsVisitor<'tcx> { 2,191 ( 0.00%) fn visit_item(&mut self, item: &hir::Item<'_>) { 626 ( 0.00%) match item.kind { . hir::ItemKind::Impl(..) => { 174 ( 0.00%) if let Some(trait_ref) = self.tcx.impl_trait_ref(item.def_id.to_def_id()) { 252 ( 0.00%) let simplified_self_ty = fast_reject::simplify_type( . self.tcx, . trait_ref.self_ty(), . SimplifyParams::No, . StripReferences::No, . ); . . self.impls . .entry(trait_ref.def_id) . .or_default() . .push((item.def_id.local_def_index, simplified_self_ty)); . } . } . _ => {} . } 2,504 ( 0.00%) } . . fn visit_trait_item(&mut self, _trait_item: &'v hir::TraitItem<'v>) {} . . fn visit_impl_item(&mut self, _impl_item: &'v hir::ImplItem<'v>) { . // handled in `visit_item` above . } . . fn visit_foreign_item(&mut self, _foreign_item: &'v hir::ForeignItem<'v>) {} -- line 2088 ---------------------------------------- -- line 2146 ---------------------------------------- . } . . #[inline] . pub fn raw_data(&self) -> &[u8] { . &self.raw_data . } . } . 10 ( 0.00%) pub fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { 5 ( 0.00%) let _prof_timer = tcx.prof.verbose_generic_activity("generate_crate_metadata"); . . // Since encoding metadata is not in a query, and nothing is cached, . // there's no need to do dep-graph tracking for any of it. . tcx.dep_graph.assert_ignored(); . 7 ( 0.00%) join( 3 ( 0.00%) || encode_metadata_impl(tcx), . || { 5 ( 0.00%) if tcx.sess.threads() == 1 { . return; . } . // Prefetch some queries used by metadata encoding. . // This is not necessary for correctness, but is only done for performance reasons. . // It can be removed if it turns out to cause trouble or be detrimental to performance. . join(|| prefetch_mir(tcx), || tcx.exported_symbols(LOCAL_CRATE)); . }, . ) . .0 9 ( 0.00%) } . 9 ( 0.00%) fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata { 1 ( 0.00%) let mut encoder = opaque::Encoder::new(vec![]); . encoder.emit_raw_bytes(METADATA_HEADER).unwrap(); . . // Will be filled with the root position after encoding everything. . encoder.emit_raw_bytes(&[0, 0, 0, 0]).unwrap(); . 2 ( 0.00%) let source_map_files = tcx.sess.source_map().files(); 3 ( 0.00%) let source_file_cache = (source_map_files[0].clone(), 0); 2 ( 0.00%) let required_source_files = Some(GrowableBitSet::with_capacity(source_map_files.len())); . drop(source_map_files); . . let hygiene_ctxt = HygieneEncodeContext::default(); . 35 ( 0.00%) let mut ecx = EncodeContext { 4 ( 0.00%) opaque: encoder, . tcx, 2 ( 0.00%) feat: tcx.features(), . tables: Default::default(), . lazy_state: LazyState::NoNode, . type_shorthands: Default::default(), . predicate_shorthands: Default::default(), 4 ( 0.00%) source_file_cache, . interpret_allocs: Default::default(), 4 ( 0.00%) required_source_files, 2 ( 0.00%) is_proc_macro: tcx.sess.crate_types().contains(&CrateType::ProcMacro), . hygiene_ctxt: &hygiene_ctxt, . }; . . // Encode the rustc version string in a predictable location. . rustc_version().encode(&mut ecx).unwrap(); . . // Encode all the entries and extra information in the crate, . // culminating in the `CrateRoot` which points to all of it. 1 ( 0.00%) let root = ecx.encode_crate_root(); . 7 ( 0.00%) let mut result = ecx.opaque.into_inner(); . . // Encode the root position. . let header = METADATA_HEADER.len(); . let pos = root.position.get(); 3 ( 0.00%) result[header + 0] = (pos >> 24) as u8; 3 ( 0.00%) result[header + 1] = (pos >> 16) as u8; 1 ( 0.00%) result[header + 2] = (pos >> 8) as u8; 5 ( 0.00%) result[header + 3] = (pos >> 0) as u8; . . // Record metadata size for self-profiling . tcx.prof.artifact_size("crate_metadata", "crate_metadata", result.len() as u64); . 5 ( 0.00%) EncodedMetadata { raw_data: result } 12 ( 0.00%) } . . pub fn provide(providers: &mut Providers) { 2 ( 0.00%) *providers = Providers { . traits_in_crate: |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); . 1 ( 0.00%) #[derive(Default)] . struct TraitsVisitor { . traits: Vec, . } . impl ItemLikeVisitor<'_> for TraitsVisitor { . fn visit_item(&mut self, item: &hir::Item<'_>) { 1,252 ( 0.00%) if let hir::ItemKind::Trait(..) | hir::ItemKind::TraitAlias(..) = item.kind { . self.traits.push(item.def_id.to_def_id()); . } . } . fn visit_trait_item(&mut self, _trait_item: &hir::TraitItem<'_>) {} . fn visit_impl_item(&mut self, _impl_item: &hir::ImplItem<'_>) {} . fn visit_foreign_item(&mut self, _foreign_item: &hir::ForeignItem<'_>) {} . } . . let mut visitor = TraitsVisitor::default(); 4 ( 0.00%) tcx.hir().visit_all_item_likes(&mut visitor); . // Bring everything into deterministic order. . visitor.traits.sort_by_cached_key(|&def_id| tcx.def_path_hash(def_id)); . tcx.arena.alloc_slice(&visitor.traits) . }, . . ..*providers . }; . } -- line 2257 ---------------------------------------- 160,642 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs -------------------------------------------------------------------------------- Ir -- line 141 ---------------------------------------- . /// ``` . /// let a = [1, 2, 3]; . /// assert!(!a.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] . #[inline] . pub const fn is_empty(&self) -> bool { 928,270 ( 0.02%) self.len() == 0 . } . . /// Returns the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 157 ---------------------------------------- -- line 159 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.first()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn first(&self) -> Option<&T> { 5,217 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 175 ---------------------------------------- -- line 178 ---------------------------------------- . /// *first = 5; . /// } . /// assert_eq!(x, &[5, 1, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn first_mut(&mut self) -> Option<&mut T> { 1,320 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &[0, 1, 2]; -- line 194 ---------------------------------------- -- line 237 ---------------------------------------- . /// assert_eq!(last, &2); . /// assert_eq!(elements, &[0, 1]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_last(&self) -> Option<(&T, &[T])> { 15,571 ( 0.00%) if let [init @ .., last] = self { Some((last, init)) } else { None } . } . . /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 253 ---------------------------------------- -- line 276 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.last()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn last(&self) -> Option<&T> { 733,584 ( 0.02%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a mutable pointer to the last item in the slice. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 292 ---------------------------------------- -- line 295 ---------------------------------------- . /// *last = 10; . /// } . /// assert_eq!(x, &[0, 1, 10]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn last_mut(&mut self) -> Option<&mut T> { 365,009 ( 0.01%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a reference to an element or subslice depending on the type of . /// index. . /// . /// - If given a position, returns a reference to the element at that . /// position or `None` if out of bounds. . /// - If given a range, returns the subslice corresponding to that range, -- line 311 ---------------------------------------- -- line 448 ---------------------------------------- . /// } . /// ``` . /// . /// [`as_mut_ptr`]: slice::as_mut_ptr . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] . #[inline] . pub const fn as_ptr(&self) -> *const T { 585,361 ( 0.01%) self as *const [T] as *const T . } . . /// Returns an unsafe mutable pointer to the slice's buffer. . /// . /// The caller must ensure that the slice outlives the pointer this . /// function returns, or else it will end up pointing to garbage. . /// . /// Modifying the container referenced by this slice may cause its buffer -- line 464 ---------------------------------------- -- line 476 ---------------------------------------- . /// } . /// } . /// assert_eq!(x, &[3, 4, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")] . #[inline] . pub const fn as_mut_ptr(&mut self) -> *mut T { 10 ( 0.00%) self as *mut [T] as *mut T . } . . /// Returns the two raw pointers spanning the slice. . /// . /// The returned range is half-open, which means that the end pointer . /// points *one past* the last element of the slice. This way, an empty . /// slice is represented by two equal pointers, and the difference between . /// the two pointers represents the size of the slice. -- line 492 ---------------------------------------- -- line 582 ---------------------------------------- . /// v.swap(2, 4); . /// assert!(v == ["a", "b", "e", "d", "c"]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . #[inline] . #[track_caller] . pub const fn swap(&mut self, a: usize, b: usize) { 32,798 ( 0.00%) let _ = &self[a]; 35,761 ( 0.00%) let _ = &self[b]; . . // SAFETY: we just checked that both `a` and `b` are in bounds . unsafe { self.swap_unchecked(a, b) } . } . . /// Swaps two elements in the slice, without doing bounds checking. . /// . /// For a safe alternative see [`swap`]. -- line 599 ---------------------------------------- -- line 677 ---------------------------------------- . . // Because this function is first compiled in isolation, . // this check tells LLVM that the indexing below is . // in-bounds. Then after inlining -- once the actual . // lengths of the slices are known -- it's removed. . let (a, b) = (&mut a[..n], &mut b[..n]); . . for i in 0..n { 478 ( 0.00%) mem::swap(&mut a[i], &mut b[n - 1 - i]); . } . } . } . . /// Returns an iterator over the slice. . /// . /// # Examples . /// -- line 693 ---------------------------------------- -- line 1499 ---------------------------------------- . /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(right, []); . /// } . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at(&self, mid: usize) -> (&[T], &[T]) { 198 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_unchecked(mid) } . } . . /// Divides one mutable slice into two at an index. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1515 ---------------------------------------- -- line 1530 ---------------------------------------- . /// left[1] = 2; . /// right[1] = 4; . /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { 13,774 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_mut_unchecked(mid) } . } . . /// Divides one slice into two at an index, without doing bounds checking. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1546 ---------------------------------------- -- line 1628 ---------------------------------------- . pub unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) { . let len = self.len(); . let ptr = self.as_mut_ptr(); . . // SAFETY: Caller has to check that `0 <= mid <= self.len()`. . // . // `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference . // is fine. 1,944 ( 0.00%) unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) } . } . . /// Divides one slice into an array and a remainder slice at an index. . /// . /// The array will contain all indices from `[0, N)` (excluding . /// the index `N` itself) and the slice will contain all . /// indices from `[N, len)` (excluding the index `len` itself). . /// -- line 1644 ---------------------------------------- -- line 2113 ---------------------------------------- . /// assert!(!v.iter().any(|e| e == "hi")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn contains(&self, x: &T) -> bool . where . T: PartialEq, . { 360 ( 0.00%) cmp::SliceContains::slice_contains(x, self) . } . . /// Returns `true` if `needle` is a prefix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2129 ---------------------------------------- -- line 2142 ---------------------------------------- . /// assert!(v.starts_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn starts_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let n = needle.len(); 1,409 ( 0.00%) self.len() >= n && needle == &self[..n] . } . . /// Returns `true` if `needle` is a suffix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2158 ---------------------------------------- -- line 2171 ---------------------------------------- . /// assert!(v.ends_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn ends_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let (m, n) = (self.len(), needle.len()); 1,240 ( 0.00%) m >= n && needle == &self[m - n..] . } . . /// Returns a subslice with the prefix removed. . /// . /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`. . /// If `prefix` is empty, simply returns the original slice. . /// . /// If the slice does not start with `prefix`, returns `None`. -- line 2187 ---------------------------------------- -- line 2293 ---------------------------------------- . /// s.insert(idx, num); . /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn binary_search(&self, x: &T) -> Result . where . T: Ord, . { 3 ( 0.00%) self.binary_search_by(|p| p.cmp(x)) . } . . /// Binary searches this sorted slice with a comparator function. . /// . /// The comparator function should implement an order consistent . /// with the sort order of the underlying slice, returning an . /// order code that indicates whether its argument is `Less`, . /// `Equal` or `Greater` the desired target. -- line 2309 ---------------------------------------- -- line 2345 ---------------------------------------- . #[inline] . pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result . where . F: FnMut(&'a T) -> Ordering, . { . let mut size = self.len(); . let mut left = 0; . let mut right = size; 4,063,315 ( 0.09%) while left < right { 5,167,091 ( 0.11%) let mid = left + size / 2; . . // SAFETY: the call is made safe by the following invariants: . // - `mid >= 0` . // - `mid < size`: `mid` is limited by `[left; right)` bound. 915,437 ( 0.02%) let cmp = f(unsafe { self.get_unchecked(mid) }); . . // The reason why we use if/else control flow rather than match . // is because match reorders comparison operations, which is perf sensitive. . // This is x86 asm for u8: https://rust.godbolt.org/z/8Y8Pra. 1,320,336 ( 0.03%) if cmp == Less { 4,395,140 ( 0.10%) left = mid + 1; 399,484 ( 0.01%) } else if cmp == Greater { . right = mid; . } else { . // SAFETY: same as the `get_unchecked` above . unsafe { crate::intrinsics::assume(mid < self.len()) }; . return Ok(mid); . } . 3,874,862 ( 0.09%) size = right - left; . } . Err(left) . } . . /// Binary searches this sorted slice with a key extraction function. . /// . /// Assumes that the slice is sorted by the key, for instance with . /// [`sort_by_key`] using the same key extraction function. -- line 2382 ---------------------------------------- -- line 3203 ---------------------------------------- . #[track_caller] . fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { . panic!( . "source slice length ({}) does not match destination slice length ({})", . src_len, dst_len, . ); . } . 34,572 ( 0.00%) if self.len() != src.len() { . len_mismatch_fail(self.len(), src.len()); . } . . // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was . // checked to have the same length. The slices cannot overlap because . // mutable references are exclusive. . unsafe { . ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); -- line 3219 ---------------------------------------- -- line 3382 ---------------------------------------- . } . let gcd: usize = gcd(mem::size_of::(), mem::size_of::()); . let ts: usize = mem::size_of::() / gcd; . let us: usize = mem::size_of::() / gcd; . . // Armed with this knowledge, we can find how many `U`s we can fit! . let us_len = self.len() / ts * us; . // And how many `T`s will be in the trailing slice! 3,536 ( 0.00%) let ts_len = self.len() % ts; . (us_len, ts_len) . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// . /// This method splits the slice into three distinct slices: prefix, correctly aligned middle . /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest -- line 3398 ---------------------------------------- -- line 3429 ---------------------------------------- . return (self, &[], &[]); . } . . // First, find at what point do we split between the first and 2nd slice. Easy with . // ptr.align_offset. . let ptr = self.as_ptr(); . // SAFETY: See the `align_to_mut` method for the detailed safety comment. . let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; 3,536 ( 0.00%) if offset > self.len() { . (self, &[], &[]) . } else { . let (left, rest) = self.split_at(offset); . let (us_len, ts_len) = rest.align_to_offsets::(); . // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay, . // since the caller guarantees that we can transmute `T` to `U` safely. . unsafe { . ( . left, . from_raw_parts(rest.as_ptr() as *const U, us_len), 3,536 ( 0.00%) from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), . ) . } . } . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// -- line 3456 ---------------------------------------- 2,630,154 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/tokentrees.rs -------------------------------------------------------------------------------- Ir -- line 8 ---------------------------------------- . }; . use rustc_ast_pretty::pprust::token_to_string; . use rustc_data_structures::fx::FxHashMap; . use rustc_errors::PResult; . use rustc_span::Span; . . impl<'a> StringReader<'a> { . pub(super) fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec) { 171 ( 0.00%) let mut tt_reader = TokenTreesReader { . string_reader: self, 9 ( 0.00%) token: Token::dummy(), . open_braces: Vec::new(), . unmatched_braces: Vec::new(), . matching_delim_spans: Vec::new(), . last_unclosed_found_span: None, . last_delim_empty_block_spans: FxHashMap::default(), . matching_block_spans: Vec::new(), . }; 9 ( 0.00%) let res = tt_reader.parse_all_token_trees(); 54 ( 0.00%) (res, tt_reader.unmatched_braces) . } . } . . struct TokenTreesReader<'a> { . string_reader: StringReader<'a>, . token: Token, . /// Stack of open delimiters and their spans. Used for error message. . open_braces: Vec<(token::DelimToken, Span)>, -- line 35 ---------------------------------------- -- line 43 ---------------------------------------- . last_delim_empty_block_spans: FxHashMap, . /// Collect the spans of braces (Open, Close). Used only . /// for detecting if blocks are empty and only braces. . matching_block_spans: Vec<(Span, Span)>, . } . . impl<'a> TokenTreesReader<'a> { . // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`. 72 ( 0.00%) fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { . let mut buf = TokenStreamBuilder::default(); . . self.bump(); 544 ( 0.00%) while self.token != token::Eof { 2,104 ( 0.00%) buf.push(self.parse_token_tree()?); . } . . Ok(buf.into_token_stream()) 81 ( 0.00%) } . . // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`. . fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { . let mut buf = TokenStreamBuilder::default(); . loop { 164,828 ( 0.00%) if let token::CloseDelim(..) = self.token.kind { . return buf.into_token_stream(); . } . 363,895 ( 0.01%) match self.parse_token_tree() { 655,011 ( 0.01%) Ok(tree) => buf.push(tree), . Err(mut e) => { . e.emit(); . return buf.into_token_stream(); . } . } . } . } . 657,378 ( 0.01%) fn parse_token_tree(&mut self) -> PResult<'a, TreeAndSpacing> { 146,084 ( 0.00%) let sm = self.string_reader.sess.source_map(); . 618,838 ( 0.01%) match self.token.kind { . token::Eof => { . let msg = "this file contains an unclosed delimiter"; . let mut err = . self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg); . for &(_, sp) in &self.open_braces { . err.span_label(sp, "unclosed delimiter"); . self.unmatched_braces.push(UnmatchedBrace { . expected_delim: token::DelimToken::Brace, -- line 91 ---------------------------------------- -- line 113 ---------------------------------------- . err.span_label( . *close_sp, . "...as it matches this but it has different indentation", . ); . } . } . Err(err) . } 9,635 ( 0.00%) token::OpenDelim(delim) => { . // The span for beginning of the delimited section 9,635 ( 0.00%) let pre_span = self.token.span; . . // Parse the open delimiter. 67,445 ( 0.00%) self.open_braces.push((delim, self.token.span)); . self.bump(); . . // Parse the token trees within the delimiters. . // We stop at any delimiter so we can try to recover if the user . // uses an incorrect delimiter. 9,635 ( 0.00%) let tts = self.parse_token_trees_until_close_delim(); . . // Expand to cover the entire delimited token tree 48,175 ( 0.00%) let delim_span = DelimSpan::from_pair(pre_span, self.token.span); . 19,270 ( 0.00%) match self.token.kind { . // Correct delimiter. 19,270 ( 0.00%) token::CloseDelim(d) if d == delim => { . let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); 19,270 ( 0.00%) let close_brace_span = self.token.span; . 28,905 ( 0.00%) if tts.is_empty() { 99 ( 0.00%) let empty_block_span = open_brace_span.to(close_brace_span); 132 ( 0.00%) if !sm.is_multiline(empty_block_span) { . // Only track if the block is in the form of `{}`, otherwise it is . // likely that it was written on purpose. . self.last_delim_empty_block_spans.insert(delim, empty_block_span); . } . } . . //only add braces 38,540 ( 0.00%) if let (DelimToken::Brace, DelimToken::Brace) = (open_brace, delim) { . self.matching_block_spans.push((open_brace_span, close_brace_span)); . } . 9,635 ( 0.00%) if self.open_braces.is_empty() { . // Clear up these spans to avoid suggesting them as we've found . // properly matched delimiters so far for an entire block. . self.matching_delim_spans.clear(); . } else { . self.matching_delim_spans.push(( . open_brace, . open_brace_span, . close_brace_span, -- line 165 ---------------------------------------- -- line 218 ---------------------------------------- . token::Eof => { . // Silently recover, the EOF token will be seen again . // and an error emitted then. Thus we don't pop from . // self.open_braces here. . } . _ => {} . } . 67,445 ( 0.00%) Ok(TokenTree::Delimited(delim_span, delim, tts).into()) . } . token::CloseDelim(delim) => { . // An unexpected closing delimiter (i.e., there is no . // matching opening delimiter). . let token_str = token_to_string(&self.token); . let msg = format!("unexpected closing delimiter: `{}`", token_str); . let mut err = . self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, &msg); -- line 234 ---------------------------------------- -- line 253 ---------------------------------------- . err.span_label(parent.1, "...matches this closing brace"); . } . } . . err.span_label(self.token.span, "unexpected closing delimiter"); . Err(err) . } . _ => { 317,035 ( 0.01%) let tt = TokenTree::Token(self.token.take()); . let mut spacing = self.bump(); 126,814 ( 0.00%) if !self.token.is_op() { . spacing = Alone; . } 380,442 ( 0.01%) Ok((tt, spacing)) . } . } 584,336 ( 0.01%) } . . fn bump(&mut self) -> Spacing { 695,972 ( 0.02%) let (spacing, token) = self.string_reader.next_token(); 365,291 ( 0.01%) self.token = token; . spacing . } . } . 9,635 ( 0.00%) #[derive(Default)] . struct TokenStreamBuilder { . buf: Vec, . } . . impl TokenStreamBuilder { 949,546 ( 0.02%) fn push(&mut self, (tree, joint): TreeAndSpacing) { 485,687 ( 0.01%) if let Some((TokenTree::Token(prev_token), Joint)) = self.buf.last() { 57,344 ( 0.00%) if let TokenTree::Token(token) = &tree { 142,872 ( 0.00%) if let Some(glued) = prev_token.glue(token) { . self.buf.pop(); 170,568 ( 0.00%) self.buf.push((TokenTree::Token(glued), joint)); . return; . } . } . } 294,140 ( 0.01%) self.buf.push((tree, joint)) 584,336 ( 0.01%) } . . fn into_token_stream(self) -> TokenStream { 96,431 ( 0.00%) TokenStream::new(self.buf) . } . } 261,422 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs -------------------------------------------------------------------------------- Ir -- line 27 ---------------------------------------- . impl CacheEntry { . #[inline] . fn update( . &mut self, . new_file_and_idx: Option<(Lrc, usize)>, . pos: BytePos, . time_stamp: usize, . ) { 36,363 ( 0.00%) if let Some((file, file_idx)) = new_file_and_idx { 44,182 ( 0.00%) self.file = file; 59,750 ( 0.00%) self.file_index = file_idx; . } . 10,981 ( 0.00%) let line_index = self.file.lookup_line(pos).unwrap(); . let line_bounds = self.file.line_bounds(line_index); 91,250 ( 0.00%) self.line_number = line_index + 1; 146,275 ( 0.00%) self.line = line_bounds; . self.touch(time_stamp); . } . . #[inline] . fn touch(&mut self, time_stamp: usize) { 171,261 ( 0.00%) self.time_stamp = time_stamp; . } . } . . #[derive(Clone)] . pub struct CachingSourceMapView<'sm> { . source_map: &'sm SourceMap, . line_cache: [CacheEntry; 3], . time_stamp: usize, . } . . impl<'sm> CachingSourceMapView<'sm> { 10,680 ( 0.00%) pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { . let files = source_map.files(); 5,340 ( 0.00%) let first_file = files[0].clone(); . let entry = CacheEntry { . time_stamp: 0, . line_number: 0, . line: BytePos(0)..BytePos(0), . file: first_file, . file_index: 0, . }; . 26,700 ( 0.00%) CachingSourceMapView { . source_map, 37,380 ( 0.00%) line_cache: [entry.clone(), entry.clone(), entry], . time_stamp: 0, . } 21,360 ( 0.00%) } . . pub fn byte_pos_to_line_and_col( . &mut self, . pos: BytePos, . ) -> Option<(Lrc, usize, BytePos)> { . self.time_stamp += 1; . . // Check if the position is in one of the cached lines -- line 85 ---------------------------------------- -- line 106 ---------------------------------------- . }; . . let cache_entry = &mut self.line_cache[oldest]; . cache_entry.update(new_file_and_idx, pos, self.time_stamp); . . Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start)) . } . 725,718 ( 0.02%) pub fn span_data_to_lines_and_cols( . &mut self, . span_data: &SpanData, . ) -> Option<(Lrc, usize, BytePos, usize, BytePos)> { 414,696 ( 0.01%) self.time_stamp += 1; . . // Check if lo and hi are in the cached lines. 103,674 ( 0.00%) let lo_cache_idx = self.cache_entry_index(span_data.lo); 103,674 ( 0.00%) let hi_cache_idx = self.cache_entry_index(span_data.hi); . 135,180 ( 0.00%) if lo_cache_idx != -1 && hi_cache_idx != -1 { . // Cache hit for span lo and hi. Check if they belong to the same file. . let result = { 135,174 ( 0.00%) let lo = &self.line_cache[lo_cache_idx as usize]; . let hi = &self.line_cache[hi_cache_idx as usize]; . 405,522 ( 0.01%) if lo.file_index != hi.file_index { . return None; . } . . ( 135,174 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 67,587 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . ) . }; . 67,587 ( 0.00%) self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); 67,587 ( 0.00%) self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); . 270,348 ( 0.01%) return Some(result); . } . . // No cache hit or cache hit for only one of span lo and hi. 72,168 ( 0.00%) let oldest = if lo_cache_idx != -1 || hi_cache_idx != -1 { . let avoid_idx = if lo_cache_idx != -1 { lo_cache_idx } else { hi_cache_idx }; . self.oldest_cache_entry_index_avoid(avoid_idx as usize) . } else { . self.oldest_cache_entry_index() . }; . . // If the entry doesn't point to the correct file, get the new file and index. . // Return early if the file containing beginning of span doesn't contain end of span. 269,360 ( 0.01%) let new_file_and_idx = if !file_contains(&self.line_cache[oldest].file, span_data.lo) { 125,530 ( 0.00%) let new_file_and_idx = self.file_for_position(span_data.lo)?; 150,636 ( 0.00%) if !file_contains(&new_file_and_idx.0, span_data.hi) { . return None; . } . 100,424 ( 0.00%) Some(new_file_and_idx) . } else { . let file = &self.line_cache[oldest].file; 32,943 ( 0.00%) if !file_contains(&file, span_data.hi) { . return None; . } . 32,943 ( 0.00%) None . }; . . // Update the cache entries. 180,435 ( 0.00%) let (lo_idx, hi_idx) = match (lo_cache_idx, hi_cache_idx) { . // Oldest cache entry is for span_data.lo line. . (-1, -1) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); . 107,838 ( 0.00%) if !lo.line.contains(&span_data.hi) { . let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); . let next_oldest = self.oldest_cache_entry_index_avoid(oldest); . let hi = &mut self.line_cache[next_oldest]; . hi.update(new_file_and_idx, span_data.hi, self.time_stamp); . (oldest, next_oldest) . } else { . (oldest, oldest) . } . } . // Oldest cache entry is for span_data.lo line. . (-1, _) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); 6 ( 0.00%) let hi = &mut self.line_cache[hi_cache_idx as usize]; 9 ( 0.00%) hi.touch(self.time_stamp); . (oldest, hi_cache_idx as usize) . } . // Oldest cache entry is for span_data.hi line. . (_, -1) => { . let hi = &mut self.line_cache[oldest]; 138 ( 0.00%) hi.update(new_file_and_idx, span_data.hi, self.time_stamp); 276 ( 0.00%) let lo = &mut self.line_cache[lo_cache_idx as usize]; 552 ( 0.00%) lo.touch(self.time_stamp); . (lo_cache_idx as usize, oldest) . } . _ => { . panic!(); . } . }; . . let lo = &self.line_cache[lo_idx]; . let hi = &self.line_cache[hi_idx]; . . // Span lo and hi may equal line end when last line doesn't . // end in newline, hence the inclusive upper bounds below. 72,174 ( 0.00%) assert!(span_data.lo >= lo.line.start); 36,087 ( 0.00%) assert!(span_data.lo <= lo.line.end); 72,174 ( 0.00%) assert!(span_data.hi >= hi.line.start); 36,087 ( 0.00%) assert!(span_data.hi <= hi.line.end); 180,435 ( 0.00%) assert!(lo.file.contains(span_data.lo)); 108,261 ( 0.00%) assert!(lo.file.contains(span_data.hi)); 108,261 ( 0.00%) assert_eq!(lo.file_index, hi.file_index); . 108,261 ( 0.00%) Some(( 36,087 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 36,087 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . )) 933,066 ( 0.02%) } . . fn cache_entry_index(&self, pos: BytePos) -> isize { . for (idx, cache_entry) in self.line_cache.iter().enumerate() { 1,500,815 ( 0.03%) if cache_entry.line.contains(&pos) { . return idx as isize; . } . } . . -1 . } . . fn oldest_cache_entry_index(&self) -> usize { . let mut oldest = 0; . . for idx in 1..self.line_cache.len() { 215,676 ( 0.00%) if self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp { . oldest = idx; . } . } . . oldest . } . . fn oldest_cache_entry_index_avoid(&self, avoid_idx: usize) -> usize { . let mut oldest = if avoid_idx != 0 { 0 } else { 1 }; . . for idx in 0..self.line_cache.len() { 48,621 ( 0.00%) if idx != avoid_idx 38,552 ( 0.00%) && self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp . { . oldest = idx; . } . } . . oldest . } . 125,530 ( 0.00%) fn file_for_position(&self, pos: BytePos) -> Option<(Lrc, usize)> { 25,106 ( 0.00%) if !self.source_map.files().is_empty() { 50,212 ( 0.00%) let file_idx = self.source_map.lookup_source_file_idx(pos); . let file = &self.source_map.files()[file_idx]; . 200,848 ( 0.00%) if file_contains(file, pos) { . return Some((file.clone(), file_idx)); . } . } . . None 125,530 ( 0.00%) } . } . . #[inline] . fn file_contains(file: &SourceFile, pos: BytePos) -> bool { . // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position . // one past the end of a file to belong to it. Normally, that's what we want. But for the . // purposes of converting a byte position to a line and column number, we can't come up with a . // line and column number if the file is empty, because an empty file doesn't contain any -- line 290 ---------------------------------------- 2,085,086 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs -------------------------------------------------------------------------------- Ir -- line 12 ---------------------------------------- . use rustc_data_structures::sync::Lrc; . use rustc_macros::HashStable_Generic; . use rustc_span::symbol::{kw, sym}; . use rustc_span::symbol::{Ident, Symbol}; . use rustc_span::{self, edition::Edition, Span, DUMMY_SP}; . use std::borrow::Cow; . use std::{fmt, mem}; . 8,772 ( 0.00%) #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] . pub enum CommentKind { . Line, . Block, . } . 101 ( 0.00%) #[derive(Clone, PartialEq, Encodable, Decodable, Hash, Debug, Copy)] . #[derive(HashStable_Generic)] . pub enum BinOpToken { . Plus, . Minus, . Star, . Slash, . Percent, . Caret, . And, . Or, . Shl, . Shr, . } . . /// A delimiter token. 82,732 ( 0.00%) #[derive(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Debug, Copy)] . #[derive(HashStable_Generic)] . pub enum DelimToken { . /// A round parenthesis (i.e., `(` or `)`). . Paren, . /// A square bracket (i.e., `[` or `]`). . Bracket, . /// A curly brace (i.e., `{` or `}`). . Brace, . /// An empty delimiter. . NoDelim, . } . 16,721 ( 0.00%) #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] . pub enum LitKind { . Bool, // AST only, must never appear in a `Token` . Byte, . Char, . Integer, . Float, . Str, . StrRaw(u16), // raw string delimited by `n` hash symbols . ByteStr, . ByteStrRaw(u16), // raw byte string delimited by `n` hash symbols . Err, . } . . /// A literal token. 140,533 ( 0.00%) #[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] . pub struct Lit { . pub kind: LitKind, . pub symbol: Symbol, . pub suffix: Option, . } . . impl fmt::Display for Lit { 45,570 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 82,026 ( 0.00%) let Lit { kind, symbol, suffix } = *self; . match kind { . Byte => write!(f, "b'{}'", symbol)?, 27,324 ( 0.00%) Char => write!(f, "'{}'", symbol)?, 9 ( 0.00%) Str => write!(f, "\"{}\"", symbol)?, . StrRaw(n) => write!( . f, . "r{delim}\"{string}\"{delim}", . delim = "#".repeat(n as usize), . string = symbol . )?, . ByteStr => write!(f, "b\"{}\"", symbol)?, . ByteStrRaw(n) => write!( . f, . "br{delim}\"{string}\"{delim}", . delim = "#".repeat(n as usize), . string = symbol . )?, 9 ( 0.00%) Integer | Float | Bool | Err => write!(f, "{}", symbol)?, . } . 18,228 ( 0.00%) if let Some(suffix) = suffix { . write!(f, "{}", suffix)?; . } . . Ok(()) 45,570 ( 0.00%) } . } . . impl LitKind { . /// An English article for the literal token kind. . pub fn article(self) -> &'static str { . match self { . Integer | Err => "an", . _ => "a", -- line 113 ---------------------------------------- -- line 130 ---------------------------------------- . crate fn may_have_suffix(self) -> bool { . matches!(self, Integer | Float | Err) . } . } . . impl Lit { . pub fn new(kind: LitKind, symbol: Symbol, suffix: Option) -> Lit { . Lit { kind, symbol, suffix } 69,331 ( 0.00%) } . } . 880 ( 0.00%) pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { 1,540 ( 0.00%) let ident_token = Token::new(Ident(name, is_raw), span); . 440 ( 0.00%) !ident_token.is_reserved_ident() . || ident_token.is_path_segment_keyword() . || [ . kw::Async, . kw::Do, . kw::Box, . kw::Break, . kw::Const, . kw::Continue, -- line 152 ---------------------------------------- -- line 161 ---------------------------------------- . kw::True, . kw::Try, . kw::Unsafe, . kw::While, . kw::Yield, . kw::Static, . ] . .contains(&name) 1,100 ( 0.00%) } . . fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { 54 ( 0.00%) let ident_token = Token::new(Ident(name, is_raw), span); . 36 ( 0.00%) !ident_token.is_reserved_ident() . || ident_token.is_path_segment_keyword() . || [kw::Underscore, kw::For, kw::Impl, kw::Fn, kw::Unsafe, kw::Extern, kw::Typeof, kw::Dyn] . .contains(&name) . } . 14,091,723 ( 0.31%) #[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] . pub enum TokenKind { . /* Expression-operator symbols. */ . Eq, . Lt, . Le, . EqEq, . Ne, . Ge, . Gt, . AndAnd, . OrOr, . Not, . Tilde, 48,672 ( 0.00%) BinOp(BinOpToken), . BinOpEq(BinOpToken), . . /* Structural symbols */ . At, . Dot, . DotDot, . DotDotDot, . DotDotEq, -- line 202 ---------------------------------------- -- line 208 ---------------------------------------- . LArrow, . FatArrow, . Pound, . Dollar, . Question, . /// Used by proc macros for representing lifetimes, not generated by lexer right now. . SingleQuote, . /// An opening delimiter (e.g., `{`). 155,538 ( 0.00%) OpenDelim(DelimToken), . /// A closing delimiter (e.g., `}`). 85,668 ( 0.00%) CloseDelim(DelimToken), . . /* Literals */ 190,518 ( 0.00%) Literal(Lit), . . /// Identifier token. . /// Do not forget about `NtIdent` when you want to match on identifiers. . /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to . /// treat regular and interpolated identifiers in the same way. 92,644 ( 0.00%) Ident(Symbol, /* is_raw */ bool), . /// Lifetime identifier token. . /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. . /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to . /// treat regular and interpolated lifetime identifiers in the same way. 168 ( 0.00%) Lifetime(Symbol), . 7,636 ( 0.00%) Interpolated(Lrc), . . /// A doc comment token. . /// `Symbol` is the doc comment's data excluding its "quotes" (`///`, `/**`, etc) . /// similarly to symbols in string literal tokens. 4,227 ( 0.00%) DocComment(CommentKind, ast::AttrStyle, Symbol), . . Eof, . } . . // `TokenKind` is used a lot. Make sure it doesn't unintentionally get bigger. . #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] . rustc_data_structures::static_assert_size!(TokenKind, 16); . 916,836 ( 0.02%) #[derive(Clone, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] . pub struct Token { 68,379 ( 0.00%) pub kind: TokenKind, 10 ( 0.00%) pub span: Span, . } . . impl TokenKind { . pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option) -> TokenKind { . Literal(Lit::new(kind, symbol, suffix)) 2,295 ( 0.00%) } . . // An approximation to proc-macro-style single-character operators used by rustc parser. . // If the operator token can be broken into two tokens, the first of which is single-character, . // then this function performs that operation, otherwise it returns `None`. 14,749 ( 0.00%) pub fn break_two_token_op(&self) -> Option<(TokenKind, TokenKind)> { 58,996 ( 0.00%) Some(match *self { . Le => (Lt, Eq), . EqEq => (Eq, Eq), . Ne => (Not, Eq), . Ge => (Gt, Eq), . AndAnd => (BinOp(And), BinOp(And)), . OrOr => (BinOp(Or), BinOp(Or)), . BinOp(Shl) => (Lt, Lt), . BinOp(Shr) => (Gt, Gt), -- line 271 ---------------------------------------- -- line 280 ---------------------------------------- . BinOpEq(Shl) => (Lt, Le), . BinOpEq(Shr) => (Gt, Ge), . DotDot => (Dot, Dot), . DotDotDot => (Dot, DotDot), . ModSep => (Colon, Colon), . RArrow => (BinOp(Minus), Gt), . LArrow => (Lt, BinOp(Minus)), . FatArrow => (Eq, Gt), 14,749 ( 0.00%) _ => return None, . }) 14,749 ( 0.00%) } . . /// Returns tokens that are likely to be typed accidentally instead of the current token. . /// Enables better error recovery when the wrong token is found. . pub fn similar_tokens(&self) -> Option> { . match *self { . Comma => Some(vec![Dot, Lt, Semi]), . Semi => Some(vec![Colon, Comma]), . FatArrow => Some(vec![Eq, RArrow]), . _ => None, . } . } . . pub fn should_end_const_arg(&self) -> bool { 24 ( 0.00%) matches!(self, Gt | Ge | BinOp(Shr) | BinOpEq(Shr)) 6 ( 0.00%) } . } . . impl Token { 95,109 ( 0.00%) pub fn new(kind: TokenKind, span: Span) -> Self { 387,112 ( 0.01%) Token { kind, span } 95,109 ( 0.00%) } . . /// Some token that will be thrown away later. 33,623 ( 0.00%) pub fn dummy() -> Self { . Token::new(TokenKind::Question, DUMMY_SP) 33,623 ( 0.00%) } . . /// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary. . pub fn from_ast_ident(ident: Ident) -> Self { 312 ( 0.00%) Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span) . } . . /// Return this token by value and leave a dummy token in its place. 63,407 ( 0.00%) pub fn take(&mut self) -> Self { . mem::replace(self, Token::dummy()) 63,407 ( 0.00%) } . . /// For interpolated tokens, returns a span of the fragment to which the interpolated . /// token refers. For all other tokens this is just a regular span. . /// It is particularly important to use this for identifiers and lifetimes . /// for which spans affect name resolution and edition checks. . /// Note that keywords are also identifiers, so they should use this . /// if they keep spans or perform edition checks. . pub fn uninterpolated_span(&self) -> Span { 252 ( 0.00%) match &self.kind { . Interpolated(nt) => nt.span(), 126 ( 0.00%) _ => self.span, . } 126 ( 0.00%) } . . pub fn is_op(&self) -> bool { 190,221 ( 0.00%) !matches!( 63,407 ( 0.00%) self.kind, . OpenDelim(..) . | CloseDelim(..) . | Literal(..) . | DocComment(..) . | Ident(..) . | Lifetime(..) . | Interpolated(..) . | Eof . ) 63,407 ( 0.00%) } . . pub fn is_like_plus(&self) -> bool { 970 ( 0.00%) matches!(self.kind, BinOp(Plus) | BinOpEq(Plus)) 194 ( 0.00%) } . . /// Returns `true` if the token can appear at the start of an expression. 296 ( 0.00%) pub fn can_begin_expr(&self) -> bool { 1,480 ( 0.00%) match self.uninterpolate().kind { 1,980 ( 0.00%) Ident(name, is_raw) => 660 ( 0.00%) ident_can_begin_expr(name, self.span, is_raw), // value name or keyword . OpenDelim(..) | // tuple, array or block . Literal(..) | // literal . Not | // operator not . BinOp(Minus) | // unary minus . BinOp(Star) | // dereference . BinOp(Or) | OrOr | // closure . BinOp(And) | // reference . AndAnd | // double reference . // DotDotDot is no longer supported, but we need some way to display the error . DotDot | DotDotDot | DotDotEq | // range notation . Lt | BinOp(Shl) | // associated path . ModSep | // global path . Lifetime(..) | // labeled loop . Pound => true, // expression attributes 72 ( 0.00%) Interpolated(ref nt) => matches!(**nt, NtLiteral(..) | . NtExpr(..) | . NtBlock(..) | . NtPath(..)), . _ => false, . } 592 ( 0.00%) } . . /// Returns `true` if the token can appear at the start of a type. 48 ( 0.00%) pub fn can_begin_type(&self) -> bool { 72 ( 0.00%) match self.uninterpolate().kind { 81 ( 0.00%) Ident(name, is_raw) => 9 ( 0.00%) ident_can_begin_type(name, self.span, is_raw), // type name or keyword . OpenDelim(Paren) | // tuple . OpenDelim(Bracket) | // array . Not | // never . BinOp(Star) | // raw pointer . BinOp(And) | // reference . AndAnd | // double reference . Question | // maybe bound in trait object . Lifetime(..) | // lifetime bound in trait object . Lt | BinOp(Shl) | // associated path . ModSep => true, // global path . Interpolated(ref nt) => matches!(**nt, NtTy(..) | NtPath(..)), . _ => false, . } 72 ( 0.00%) } . . /// Returns `true` if the token can appear at the start of a const param. . pub fn can_begin_const_arg(&self) -> bool { 45 ( 0.00%) match self.kind { . OpenDelim(Brace) => true, . Interpolated(ref nt) => matches!(**nt, NtExpr(..) | NtBlock(..) | NtLiteral(..)), 9 ( 0.00%) _ => self.can_begin_literal_maybe_minus(), . } . } . . /// Returns `true` if the token can appear at the start of a generic bound. . pub fn can_begin_bound(&self) -> bool { . self.is_path_start() . || self.is_lifetime() . || self.is_keyword(kw::For) -- line 419 ---------------------------------------- -- line 427 ---------------------------------------- . } . . /// Returns `true` if the token is any literal, a minus (which can prefix a literal, . /// for example a '-42', or one of the boolean idents). . /// . /// In other words, would this token be a valid start of `parse_literal_maybe_minus`? . /// . /// Keep this in sync with and `Lit::from_token`, excluding unary negation. 11 ( 0.00%) pub fn can_begin_literal_maybe_minus(&self) -> bool { 110 ( 0.00%) match self.uninterpolate().kind { . Literal(..) | BinOp(Minus) => true, 44 ( 0.00%) Ident(name, false) if name.is_bool_lit() => true, . Interpolated(ref nt) => match &**nt { . NtLiteral(_) => true, . NtExpr(e) => match &e.kind { . ast::ExprKind::Lit(_) => true, . ast::ExprKind::Unary(ast::UnOp::Neg, e) => { . matches!(&e.kind, ast::ExprKind::Lit(_)) . } . _ => false, . }, . _ => false, . }, . _ => false, . } 22 ( 0.00%) } . . // A convenience function for matching on identifiers during parsing. . // Turns interpolated identifier (`$i: ident`) or lifetime (`$l: lifetime`) token . // into the regular identifier or lifetime token it refers to, . // otherwise returns the original token. 29,587 ( 0.00%) pub fn uninterpolate(&self) -> Cow<'_, Token> { 701,262 ( 0.02%) match &self.kind { 50,793 ( 0.00%) Interpolated(nt) => match **nt { 11,001 ( 0.00%) NtIdent(ident, is_raw) => { 27,585 ( 0.00%) Cow::Owned(Token::new(Ident(ident.name, is_raw), ident.span)) . } . NtLifetime(ident) => Cow::Owned(Token::new(Lifetime(ident.name), ident.span)), . _ => Cow::Borrowed(self), . }, . _ => Cow::Borrowed(self), . } 29,587 ( 0.00%) } . . /// Returns an identifier if this token is an identifier. 179,018 ( 0.00%) pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> { . let token = self.uninterpolate(); 502,354 ( 0.01%) match token.kind { 1,593,027 ( 0.03%) Ident(name, is_raw) => Some((Ident::new(name, token.span), is_raw)), . _ => None, . } 1,203,628 ( 0.03%) } . . /// Returns a lifetime identifier if this token is a lifetime. 22,965 ( 0.00%) pub fn lifetime(&self) -> Option { . let token = self.uninterpolate(); 46,092 ( 0.00%) match token.kind { 294 ( 0.00%) Lifetime(name) => Some(Ident::new(name, token.span)), . _ => None, . } 160,755 ( 0.00%) } . . /// Returns `true` if the token is an identifier. . pub fn is_ident(&self) -> bool { . self.ident().is_some() 950 ( 0.00%) } . . /// Returns `true` if the token is a lifetime. 81 ( 0.00%) pub fn is_lifetime(&self) -> bool { . self.lifetime().is_some() 162 ( 0.00%) } . . /// Returns `true` if the token is an identifier whose name is the given . /// string slice. 14,427 ( 0.00%) pub fn is_ident_named(&self, name: Symbol) -> bool { . self.ident().map_or(false, |(ident, _)| ident.name == name) 28,854 ( 0.00%) } . . /// Returns `true` if the token is an interpolated path. . fn is_path(&self) -> bool { 60,224 ( 0.00%) if let Interpolated(ref nt) = self.kind { 934 ( 0.00%) if let NtPath(..) = **nt { . return true; . } . } . false . } . . /// Would `maybe_whole_expr` in `parser.rs` return `Ok(..)`? . /// That is, is this a pre-parsed expression dropped into the token stream -- line 516 ---------------------------------------- -- line 535 ---------------------------------------- . false . } . . /// Returns `true` if the token is either the `mut` or `const` keyword. . pub fn is_mutability(&self) -> bool { . self.is_keyword(kw::Mut) || self.is_keyword(kw::Const) . } . 366 ( 0.00%) pub fn is_qpath_start(&self) -> bool { 30,478 ( 0.00%) self == &Lt || self == &BinOp(Shl) . } . 60,224 ( 0.00%) pub fn is_path_start(&self) -> bool { 90,507 ( 0.00%) self == &ModSep . || self.is_qpath_start() . || self.is_path() . || self.is_path_segment_keyword() 9,984 ( 0.00%) || self.is_ident() && !self.is_reserved_ident() 75,280 ( 0.00%) } . . /// Returns `true` if the token is a given keyword, `kw`. 25,606 ( 0.00%) pub fn is_keyword(&self, kw: Symbol) -> bool { . self.is_non_raw_ident_where(|id| id.name == kw) 51,212 ( 0.00%) } . 287 ( 0.00%) pub fn is_path_segment_keyword(&self) -> bool { . self.is_non_raw_ident_where(Ident::is_path_segment_keyword) . } . . // Returns true for reserved identifiers used internally for elided lifetimes, . // unnamed method parameters, crate root module, error recovery etc. 56 ( 0.00%) pub fn is_special_ident(&self) -> bool { . self.is_non_raw_ident_where(Ident::is_special) . } . . /// Returns `true` if the token is a keyword used in the language. 56 ( 0.00%) pub fn is_used_keyword(&self) -> bool { . self.is_non_raw_ident_where(Ident::is_used_keyword) . } . . /// Returns `true` if the token is a keyword reserved for possible future use. 56 ( 0.00%) pub fn is_unused_keyword(&self) -> bool { . self.is_non_raw_ident_where(Ident::is_unused_keyword) . } . . /// Returns `true` if the token is either a special identifier or a keyword. . pub fn is_reserved_ident(&self) -> bool { . self.is_non_raw_ident_where(Ident::is_reserved) . } . . /// Returns `true` if the token is the identifier `true` or `false`. 287 ( 0.00%) pub fn is_bool_lit(&self) -> bool { 574 ( 0.00%) self.is_non_raw_ident_where(|id| id.name.is_bool_lit()) . } . . pub fn is_numeric_lit(&self) -> bool { . matches!( . self.kind, . Literal(Lit { kind: LitKind::Integer, .. }) | Literal(Lit { kind: LitKind::Float, .. }) . ) . } . . /// Returns `true` if the token is a non-raw identifier for which `pred` holds. 24 ( 0.00%) pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool { 612,811 ( 0.01%) match self.ident() { 10,321 ( 0.00%) Some((id, false)) => pred(id), . _ => false, . } 30 ( 0.00%) } . 286,720 ( 0.01%) pub fn glue(&self, joint: &Token) -> Option { 143,360 ( 0.00%) let kind = match self.kind { 870 ( 0.00%) Eq => match joint.kind { . Eq => EqEq, . Gt => FatArrow, . _ => return None, . }, . Lt => match joint.kind { . Eq => Le, . Lt => BinOp(Shl), . Le => BinOpEq(Shl), -- line 615 ---------------------------------------- -- line 621 ---------------------------------------- . Gt => BinOp(Shr), . Ge => BinOpEq(Shr), . _ => return None, . }, . Not => match joint.kind { . Eq => Ne, . _ => return None, . }, 54 ( 0.00%) BinOp(op) => match joint.kind { . Eq => BinOpEq(op), . BinOp(And) if op == And => AndAnd, . BinOp(Or) if op == Or => OrOr, 9 ( 0.00%) Gt if op == Minus => RArrow, . _ => return None, . }, 13,662 ( 0.00%) Dot => match joint.kind { . Dot => DotDot, . DotDot => DotDotDot, . _ => return None, . }, 13,662 ( 0.00%) DotDot => match joint.kind { . Dot => DotDotDot, . Eq => DotDotEq, . _ => return None, . }, 9,846 ( 0.00%) Colon => match joint.kind { . Colon => ModSep, . _ => return None, . }, . SingleQuote => match joint.kind { . Ident(name, false) => Lifetime(Symbol::intern(&format!("'{}", name))), . _ => return None, . }, . . Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot . | DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar . | Question | OpenDelim(..) | CloseDelim(..) | Literal(..) | Ident(..) . | Lifetime(..) | Interpolated(..) | DocComment(..) | Eof => return None, . }; . 184,782 ( 0.00%) Some(Token::new(kind, self.span.to(joint.span))) 258,048 ( 0.01%) } . } . . impl PartialEq for Token { . fn eq(&self, rhs: &TokenKind) -> bool { 663,798 ( 0.01%) self.kind == *rhs . } . } . 3,199 ( 0.00%) #[derive(Clone, Encodable, Decodable)] . /// For interpolation during macro expansion. . pub enum Nonterminal { . NtItem(P), . NtBlock(P), . NtStmt(ast::Stmt), . NtPat(P), 914 ( 0.00%) NtExpr(P), . NtTy(P), . NtIdent(Ident, /* is_raw */ bool), . NtLifetime(Ident), . NtLiteral(P), . /// Stuff inside brackets for attributes . NtMeta(P), . NtPath(ast::Path), . NtVis(ast::Visibility), -- line 686 ---------------------------------------- -- line 711 ---------------------------------------- . Path, . Vis, . TT, . } . . impl NonterminalKind { . /// The `edition` closure is used to get the edition for the given symbol. Doing . /// `span.edition()` is expensive, so we do it lazily. 192 ( 0.00%) pub fn from_symbol( . symbol: Symbol, . edition: impl FnOnce() -> Edition, . ) -> Option { 432 ( 0.00%) Some(match symbol { . sym::item => NonterminalKind::Item, . sym::block => NonterminalKind::Block, . sym::stmt => NonterminalKind::Stmt, . sym::pat => match edition() { . Edition::Edition2015 | Edition::Edition2018 => { . NonterminalKind::PatParam { inferred: true } . } . Edition::Edition2021 => NonterminalKind::PatWithOr, -- line 731 ---------------------------------------- -- line 737 ---------------------------------------- . sym::lifetime => NonterminalKind::Lifetime, . sym::literal => NonterminalKind::Literal, . sym::meta => NonterminalKind::Meta, . sym::path => NonterminalKind::Path, . sym::vis => NonterminalKind::Vis, . sym::tt => NonterminalKind::TT, . _ => return None, . }) 192 ( 0.00%) } . fn symbol(self) -> Symbol { . match self { . NonterminalKind::Item => sym::item, . NonterminalKind::Block => sym::block, . NonterminalKind::Stmt => sym::stmt, . NonterminalKind::PatParam { inferred: false } => sym::pat_param, . NonterminalKind::PatParam { inferred: true } | NonterminalKind::PatWithOr => sym::pat, . NonterminalKind::Expr => sym::expr, -- line 753 ---------------------------------------- 4,241,571 ( 0.09%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 71,212 ( 0.00%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 80,166 ( 0.00%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 173,965 ( 0.00%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 23,383 ( 0.00%) i += 2 . } . 80,166 ( 0.00%) if i < count { 57,246 ( 0.00%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 869,062 ( 0.02%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 5,935,675 ( 0.13%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 1,897,869 ( 0.04%) self.nbuf = nbuf + size; . . return; . } . 310,463 ( 0.01%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 90,792 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 90,792 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 907,920 ( 0.02%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 726,336 ( 0.02%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 817,128 ( 0.02%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 427,164 ( 0.01%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 363,168 ( 0.01%) self.processed += BUFFER_SIZE; 181,584 ( 0.00%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 8,429 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 221,848 ( 0.00%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 81,770 ( 0.00%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 40,900 ( 0.00%) self.nbuf = nbuf + length; . . return; . } . 29,781 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 17,265 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 3,453 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 10,359 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 3,453 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 6,906 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 26,342 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 29,795 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 52,684 ( 0.00%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 6,906 ( 0.00%) let input_left = length - processed; 2,685 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 2,292 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 2,292 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 2,292 ( 0.00%) self.state.v0 ^= elem; 4,584 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 3,453 ( 0.00%) self.nbuf = extra_bytes_left; 17,265 ( 0.00%) self.processed += nbuf + processed; 20,718 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 24,909 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 33,212 ( 0.00%) let mut state = self.state; . . for i in 0..last { 16,805 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 16,805 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 16,805 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 16,606 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 7,208 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 23,637 ( 0.00%) let length = self.processed + self.nbuf; 16,250 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 8,125 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 8,125 ( 0.00%) state.v0 ^= b; . 8,125 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 20,080 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 3,828 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 3,828 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 3,220,751 ( 0.07%) compress!(state); 3,471,002 ( 0.08%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 40,156 ( 0.00%) compress!(state); 40,156 ( 0.00%) compress!(state); 40,156 ( 0.00%) compress!(state); 32,032 ( 0.00%) compress!(state); . } . } 639,839 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/tokenstream.rs -------------------------------------------------------------------------------- Ir -- line 32 ---------------------------------------- . /// . /// If the syntax extension is an MBE macro, it will attempt to match its . /// LHS token tree against the provided token tree, and if it finds a . /// match, will transcribe the RHS token tree, splicing in any captured . /// `macro_parser::matched_nonterminals` into the `SubstNt`s it finds. . /// . /// The RHS of an MBE macro is the only place `SubstNt`s are substituted. . /// Nothing special happens to misnamed or misplaced `SubstNt`s. 1,760,326 ( 0.04%) #[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] . pub enum TokenTree { . /// A single token. 679,676 ( 0.01%) Token(Token), . /// A delimited sequence of token trees. 44,248 ( 0.00%) Delimited(DelimSpan, DelimToken, TokenStream), . } . . #[derive(Copy, Clone)] . pub enum CanSynthesizeMissingTokens { . Yes, . No, . } . -- line 53 ---------------------------------------- -- line 71 ---------------------------------------- . delim == delim2 && tts.eq_unspanned(&tts2) . } . _ => false, . } . } . . /// Retrieves the `TokenTree`'s span. . pub fn span(&self) -> Span { 36,594 ( 0.00%) match self { 13,786 ( 0.00%) TokenTree::Token(token) => token.span, 18,264 ( 0.00%) TokenTree::Delimited(sp, ..) => sp.entire(), . } 13,731 ( 0.00%) } . . /// Modify the `TokenTree`'s span in-place. . pub fn set_span(&mut self, span: Span) { 36,594 ( 0.00%) match self { 13,731 ( 0.00%) TokenTree::Token(token) => token.span = span, 9,132 ( 0.00%) TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span), . } 18,297 ( 0.00%) } . 4,110 ( 0.00%) pub fn token(kind: TokenKind, span: Span) -> TokenTree { 149,508 ( 0.00%) TokenTree::Token(Token::new(kind, span)) 4,110 ( 0.00%) } . . /// Returns the opening delimiter as a token tree. 16,647 ( 0.00%) pub fn open_tt(span: DelimSpan, delim: DelimToken) -> TokenTree { 49,941 ( 0.00%) TokenTree::token(token::OpenDelim(delim), span.open) 16,647 ( 0.00%) } . . /// Returns the closing delimiter as a token tree. 16,620 ( 0.00%) pub fn close_tt(span: DelimSpan, delim: DelimToken) -> TokenTree { 49,860 ( 0.00%) TokenTree::token(token::CloseDelim(delim), span.close) 16,620 ( 0.00%) } . 792 ( 0.00%) pub fn uninterpolate(self) -> TokenTree { 528 ( 0.00%) match self { 2,376 ( 0.00%) TokenTree::Token(token) => TokenTree::Token(token.uninterpolate().into_owned()), . tt => tt, . } 1,056 ( 0.00%) } . } . . impl HashStable for TokenStream . where . CTX: crate::HashStableContext, . { 531 ( 0.00%) fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { 1,352 ( 0.00%) for sub_tt in self.trees() { . sub_tt.hash_stable(hcx, hasher); . } 472 ( 0.00%) } . } . . pub trait CreateTokenStream: sync::Send + sync::Sync { . fn create_token_stream(&self) -> AttrAnnotatedTokenStream; . } . . impl CreateTokenStream for AttrAnnotatedTokenStream { . fn create_token_stream(&self) -> AttrAnnotatedTokenStream { -- line 131 ---------------------------------------- -- line 140 ---------------------------------------- . pub struct LazyTokenStream(Lrc>); . . impl LazyTokenStream { . pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream { . LazyTokenStream(Lrc::new(Box::new(inner))) . } . . pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream { 1,828 ( 0.00%) self.0.create_token_stream() . } . } . . impl fmt::Debug for LazyTokenStream { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(f, "LazyTokenStream({:?})", self.create_token_stream()) . } . } -- line 156 ---------------------------------------- -- line 188 ---------------------------------------- . Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream), . /// Stores the attributes for an attribute target, . /// along with the tokens for that attribute target. . /// See `AttributesData` for more information . Attributes(AttributesData), . } . . impl AttrAnnotatedTokenStream { 42,592 ( 0.00%) pub fn new(tokens: Vec<(AttrAnnotatedTokenTree, Spacing)>) -> AttrAnnotatedTokenStream { . AttrAnnotatedTokenStream(Lrc::new(tokens)) 42,592 ( 0.00%) } . . /// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream . /// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened' . /// back to a `TokenStream` of the form `outer_attr attr_target`. . /// If there are inner attributes, they are inserted into the proper . /// place in the attribute target tokens. 457 ( 0.00%) pub fn to_tokenstream(&self) -> TokenStream { 457 ( 0.00%) let trees: Vec<_> = self . .0 . .iter() 1,371 ( 0.00%) .flat_map(|tree| match &tree.0 { 914 ( 0.00%) AttrAnnotatedTokenTree::Token(inner) => { 2,285 ( 0.00%) smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter() . } . AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![( . TokenTree::Delimited(*span, *delim, stream.to_tokenstream()), . tree.1, . )] . .into_iter(), . AttrAnnotatedTokenTree::Attributes(data) => { . let mut outer_attrs = Vec::new(); -- line 219 ---------------------------------------- -- line 279 ---------------------------------------- . flat.extend(attr.tokens().to_tokenstream().0.clone().iter().cloned()); . } . flat.extend(target_tokens); . flat.into_iter() . } . }) . .collect(); . TokenStream::new(trees) 914 ( 0.00%) } . } . . /// Stores the tokens for an attribute target, along . /// with its attributes. . /// . /// This is constructed during parsing when we need to capture . /// tokens. . /// -- line 295 ---------------------------------------- -- line 316 ---------------------------------------- . pub struct TokenStream(pub(crate) Lrc>); . . pub type TreeAndSpacing = (TokenTree, Spacing); . . // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger. . #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] . rustc_data_structures::static_assert_size!(TokenStream, 8); . 179,503 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)] . pub enum Spacing { . Alone, . Joint, . } . . impl TokenStream { . /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream` . /// separating the two arguments with a comma for diagnostic suggestions. -- line 332 ---------------------------------------- -- line 371 ---------------------------------------- . impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream { . fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedTokenStream { . AttrAnnotatedTokenStream::new(vec![(tree, spacing)]) . } . } . . impl From for TokenStream { . fn from(tree: TokenTree) -> TokenStream { 4,770 ( 0.00%) TokenStream::new(vec![(tree, Spacing::Alone)]) . } . } . . impl From for TreeAndSpacing { . fn from(tree: TokenTree) -> TreeAndSpacing { 107,818 ( 0.00%) (tree, Spacing::Alone) . } . } . . impl iter::FromIterator for TokenStream { . fn from_iter>(iter: I) -> Self { 918 ( 0.00%) TokenStream::new(iter.into_iter().map(Into::into).collect::>()) . } . } . . impl Eq for TokenStream {} . . impl PartialEq for TokenStream { . fn eq(&self, other: &TokenStream) -> bool { . self.trees().eq(other.trees()) -- line 399 ---------------------------------------- -- line 401 ---------------------------------------- . } . . impl TokenStream { . pub fn new(streams: Vec) -> TokenStream { . TokenStream(Lrc::new(streams)) . } . . pub fn is_empty(&self) -> bool { 9,635 ( 0.00%) self.0.is_empty() 9,635 ( 0.00%) } . . pub fn len(&self) -> usize { 222,265 ( 0.00%) self.0.len() 5,480 ( 0.00%) } . . pub fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream { . match streams.len() { . 0 => TokenStream::default(), . 1 => streams.pop().unwrap(), . _ => { . // We are going to extend the first stream in `streams` with . // the elements from the subsequent streams. This requires -- line 422 ---------------------------------------- -- line 448 ---------------------------------------- . . // Create the final `TokenStream`. . TokenStream(first_stream_lrc) . } . } . } . . pub fn trees(&self) -> Cursor { 1,133 ( 0.00%) self.clone().into_trees() 2,266 ( 0.00%) } . 27,606 ( 0.00%) pub fn into_trees(self) -> Cursor { . Cursor::new(self) 55,212 ( 0.00%) } . . /// Compares two `TokenStream`s, checking equality without regarding span information. . pub fn eq_unspanned(&self, other: &TokenStream) -> bool { . let mut t1 = self.trees(); . let mut t2 = other.trees(); . for (t1, t2) in iter::zip(&mut t1, &mut t2) { . if !t1.eq_unspanned(&t2) { . return false; . } . } . t1.next().is_none() && t2.next().is_none() . } . . pub fn map_enumerated TokenTree>(self, mut f: F) -> TokenStream { 4,563 ( 0.00%) TokenStream(Lrc::new( . self.0 . .iter() . .enumerate() 109,782 ( 0.00%) .map(|(i, (tree, is_joint))| (f(i, tree), *is_joint)) . .collect(), . )) . } . } . . // 99.5%+ of the time we have 1 or 2 elements in this vector. . #[derive(Clone)] . pub struct TokenStreamBuilder(SmallVec<[TokenStream; 2]>); -- line 488 ---------------------------------------- -- line 572 ---------------------------------------- . pub struct Cursor { . pub stream: TokenStream, . index: usize, . } . . impl Iterator for Cursor { . type Item = TokenTree; . 21,978 ( 0.00%) fn next(&mut self) -> Option { 38,245 ( 0.00%) self.next_with_spacing().map(|(tree, _)| tree) 29,304 ( 0.00%) } . } . . impl Cursor { . fn new(stream: TokenStream) -> Self { . Cursor { stream, index: 0 } . } . 1,019,895 ( 0.02%) pub fn next_with_spacing(&mut self) -> Option { 633,915 ( 0.01%) if self.index < self.stream.len() { 332,832 ( 0.01%) self.index += 1; 1,124,389 ( 0.02%) Some(self.stream.0[self.index - 1].clone()) . } else { 86,704 ( 0.00%) None . } 1,223,874 ( 0.03%) } . . pub fn index(&self) -> usize { . self.index . } . . pub fn append(&mut self, new_stream: TokenStream) { . if new_stream.is_empty() { . return; . } . let index = self.index; . let stream = mem::take(&mut self.stream); . *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees(); . self.index = index; . } . 107,920 ( 0.00%) pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> { 215,840 ( 0.00%) self.stream.0[self.index..].get(n).map(|(tree, _)| tree) 215,840 ( 0.00%) } . } . 88,589 ( 0.00%) #[derive(Debug, Copy, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)] . pub struct DelimSpan { . pub open: Span, . pub close: Span, . } . . impl DelimSpan { 916 ( 0.00%) pub fn from_single(sp: Span) -> Self { . DelimSpan { open: sp, close: sp } 1,832 ( 0.00%) } . 19,270 ( 0.00%) pub fn from_pair(open: Span, close: Span) -> Self { . DelimSpan { open, close } 9,635 ( 0.00%) } . . pub fn dummy() -> Self { . Self::from_single(DUMMY_SP) 42,699 ( 0.00%) } . 149,504 ( 0.00%) pub fn entire(self) -> Span { . self.open.with_hi(self.close.hi()) 112,128 ( 0.00%) } . } 62,232 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/mod.rs -------------------------------------------------------------------------------- Ir -- line 26 ---------------------------------------- . pub struct UnmatchedBrace { . pub expected_delim: token::DelimToken, . pub found_delim: Option, . pub found_span: Span, . pub unclosed_span: Option, . pub candidate_span: Option, . } . 126 ( 0.00%) crate fn parse_token_trees<'a>( . sess: &'a ParseSess, . src: &'a str, . start_pos: BytePos, . override_span: Option, . ) -> (PResult<'a, TokenStream>, Vec) { . StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span } . .into_token_trees() 81 ( 0.00%) } . . struct StringReader<'a> { . sess: &'a ParseSess, . /// Initial position, read-only. . start_pos: BytePos, . /// The absolute offset within the source_map of the current character. . pos: BytePos, . /// Stop reading src at this index. -- line 50 ---------------------------------------- -- line 51 ---------------------------------------- . end_src_index: usize, . /// Source text to tokenize. . src: &'a str, . override_span: Option, . } . . impl<'a> StringReader<'a> { . fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { 97,912 ( 0.00%) self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi)) . } . . /// Returns the next token, and info about preceding whitespace, if any. 578,802 ( 0.01%) fn next_token(&mut self) -> (Spacing, Token) { . let mut spacing = Spacing::Joint; . . // Skip `#!` at the start of the file 165,372 ( 0.00%) let start_src_index = self.src_index(self.pos); 248,058 ( 0.01%) let text: &str = &self.src[start_src_index..self.end_src_index]; . let is_beginning_of_file = self.pos == self.start_pos; 82,686 ( 0.00%) if is_beginning_of_file { 36 ( 0.00%) if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { . self.pos = self.pos + BytePos::from_usize(shebang_len); . spacing = Spacing::Alone; . } . } . . // Skip trivial (whitespace & comments) tokens . loop { 105,134 ( 0.00%) let start_src_index = self.src_index(self.pos); 281,730 ( 0.01%) let text: &str = &self.src[start_src_index..self.end_src_index]; . 93,910 ( 0.00%) if text.is_empty() { . let span = self.mk_sp(self.pos, self.pos); 45 ( 0.00%) return (spacing, Token::new(token::Eof, span)); . } . 281,703 ( 0.01%) let token = rustc_lexer::first_token(text); . 187,802 ( 0.00%) let start = self.pos; 93,901 ( 0.00%) self.pos = self.pos + BytePos::from_usize(token.len); . . debug!("next_token: {:?}({:?})", token.kind, self.str_from(start)); . 753,172 ( 0.02%) match self.cook_lexer_token(token.kind, start) { 661,416 ( 0.01%) Some(kind) => { . let span = self.mk_sp(start, self.pos); 496,062 ( 0.01%) return (spacing, Token::new(kind, span)); . } . None => spacing = Spacing::Alone, . } . } 744,174 ( 0.02%) } . . /// Report a fatal lexical error with a given span. . fn fatal_span(&self, sp: Span, m: &str) -> FatalError { . self.sess.span_diagnostic.span_fatal(sp, m) . } . . /// Report a lexical error with a given span. . fn err_span(&self, sp: Span, m: &str) { -- line 110 ---------------------------------------- -- line 130 ---------------------------------------- . ) -> DiagnosticBuilder<'a> { . self.sess . .span_diagnostic . .struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c))) . } . . /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly . /// complain about it. 539 ( 0.00%) fn lint_unicode_text_flow(&self, start: BytePos) { . // Opening delimiter of the length 2 is not included into the comment text. . let content_start = start + BytePos(2); . let content = self.str_from(content_start); . if contains_text_flow_control_chars(content) { . let span = self.mk_sp(start, self.pos); . self.sess.buffer_lint_with_diagnostic( . &TEXT_DIRECTION_CODEPOINT_IN_COMMENT, . span, . ast::CRATE_NODE_ID, . "unicode codepoint changing visible direction of text present in comment", . BuiltinLintDiagnostics::UnicodeTextFlow(span, content.to_string()), . ); . } 616 ( 0.00%) } . . /// Turns simple `rustc_lexer::TokenKind` enum into a rich . /// `rustc_ast::TokenKind`. This turns strings into interned . /// symbols and runs additional validation. . fn cook_lexer_token(&self, token: rustc_lexer::TokenKind, start: BytePos) -> Option { 469,505 ( 0.01%) Some(match token { 568 ( 0.00%) rustc_lexer::TokenKind::LineComment { doc_style } => { . // Skip non-doc comments 2,609 ( 0.00%) let doc_style = if let Some(doc_style) = doc_style { . doc_style . } else { . self.lint_unicode_text_flow(start); . return None; . }; . . // Opening delimiter of the length 3 is not included into the symbol. . let content_start = start + BytePos(3); . let content = self.str_from(content_start); 2,455 ( 0.00%) self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style) . } . rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => { . if !terminated { . let msg = match doc_style { . Some(_) => "unterminated block doc-comment", . None => "unterminated block comment", . }; . let last_bpos = self.pos; -- line 179 ---------------------------------------- -- line 198 ---------------------------------------- . let content_end = self.pos - BytePos(if terminated { 2 } else { 0 }); . let content = self.str_from_to(content_start, content_end); . self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style) . } . rustc_lexer::TokenKind::Whitespace => return None, . rustc_lexer::TokenKind::Ident . | rustc_lexer::TokenKind::RawIdent . | rustc_lexer::TokenKind::UnknownPrefix => { 60,868 ( 0.00%) let is_raw_ident = token == rustc_lexer::TokenKind::RawIdent; 76,085 ( 0.00%) let is_unknown_prefix = token == rustc_lexer::TokenKind::UnknownPrefix; . let mut ident_start = start; 30,434 ( 0.00%) if is_raw_ident { . ident_start = ident_start + BytePos(2); . } 45,651 ( 0.00%) if is_unknown_prefix { . self.report_unknown_prefix(start); . } 76,085 ( 0.00%) let sym = nfc_normalize(self.str_from(ident_start)); . let span = self.mk_sp(start, self.pos); 60,868 ( 0.00%) self.sess.symbol_gallery.insert(sym, span); 30,434 ( 0.00%) if is_raw_ident { . if !sym.can_be_raw() { . self.err_span(span, &format!("`{}` cannot be a raw identifier", sym)); . } . self.sess.raw_identifier_spans.borrow_mut().push(span); . } 106,519 ( 0.00%) token::Ident(sym, is_raw_ident) . } . rustc_lexer::TokenKind::InvalidIdent . // Do not recover an identifier with emoji if the codepoint is a confusable . // with a recoverable substitution token, like `➖`. . if UNICODE_ARRAY . .iter() . .find(|&&(c, _, _)| { . let sym = self.str_from(start); -- line 232 ---------------------------------------- -- line 234 ---------------------------------------- . }) . .is_none() => . { . let sym = nfc_normalize(self.str_from(start)); . let span = self.mk_sp(start, self.pos); . self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default().push(span); . token::Ident(sym, false) . } 149,600 ( 0.00%) rustc_lexer::TokenKind::Literal { kind, suffix_start } => { . let suffix_start = start + BytePos(suffix_start as u32); . let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind); 28,050 ( 0.00%) let suffix = if suffix_start < self.pos { . let string = self.str_from(suffix_start); . if string == "_" { . self.sess . .span_diagnostic . .struct_span_warn( . self.mk_sp(suffix_start, self.pos), . "underscore literal suffix is not allowed", . ) -- line 253 ---------------------------------------- -- line 264 ---------------------------------------- . .emit(); . None . } else { . Some(Symbol::intern(string)) . } . } else { . None . }; 74,800 ( 0.00%) token::Literal(token::Lit { kind, symbol, suffix }) . } . rustc_lexer::TokenKind::Lifetime { starts_with_number } => { . // Include the leading `'` in the real identifier, for macro . // expansion purposes. See #12512 for the gory details of why . // this is necessary. . let lifetime_name = self.str_from(start); . if starts_with_number { . self.err_span_(start, self.pos, "lifetimes cannot start with a number"); -- line 280 ---------------------------------------- -- line 324 ---------------------------------------- . err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used"); . } . err.emit(); . token? . } . }) . } . 5,892 ( 0.00%) fn cook_doc_comment( . &self, . content_start: BytePos, . content: &str, . comment_kind: CommentKind, . doc_style: DocStyle, . ) -> TokenKind { 491 ( 0.00%) if content.contains('\r') { . for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') { . self.err_span_( . content_start + BytePos(idx as u32), . content_start + BytePos(idx as u32 + 1), . match comment_kind { . CommentKind::Line => "bare CR not allowed in doc-comment", . CommentKind::Block => "bare CR not allowed in block doc-comment", . }, -- line 347 ---------------------------------------- -- line 349 ---------------------------------------- . } . } . . let attr_style = match doc_style { . DocStyle::Outer => AttrStyle::Outer, . DocStyle::Inner => AttrStyle::Inner, . }; . 1,473 ( 0.00%) token::DocComment(comment_kind, attr_style, Symbol::intern(content)) 7,365 ( 0.00%) } . . fn cook_lexer_literal( . &self, . start: BytePos, . suffix_start: BytePos, . kind: rustc_lexer::LiteralKind, . ) -> (token::LitKind, Symbol) { . // prefix means `"` or `br"` or `r###"`, ... . let (lit_kind, mode, prefix_len, postfix_len) = match kind { 9,267 ( 0.00%) rustc_lexer::LiteralKind::Char { terminated } => { 9,267 ( 0.00%) if !terminated { . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start, suffix_start), . "unterminated character literal", . error_code!(E0762), . ) . } . (token::Char, Mode::Char, 1, 1) // ' ' . } -- line 377 ---------------------------------------- -- line 380 ---------------------------------------- . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start + BytePos(1), suffix_start), . "unterminated byte constant", . error_code!(E0763), . ) . } . (token::Byte, Mode::Byte, 2, 1) // b' ' . } 80 ( 0.00%) rustc_lexer::LiteralKind::Str { terminated } => { 80 ( 0.00%) if !terminated { . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start, suffix_start), . "unterminated double quote string", . error_code!(E0765), . ) . } . (token::Str, Mode::Str, 1, 1) // " " . } -- line 397 ---------------------------------------- -- line 410 ---------------------------------------- . let n = u32::from(n_hashes); . (token::StrRaw(n_hashes), Mode::RawStr, 2 + n, 1 + n) // r##" "## . } . rustc_lexer::LiteralKind::RawByteStr { n_hashes, err } => { . self.report_raw_str_error(start, err); . let n = u32::from(n_hashes); . (token::ByteStrRaw(n_hashes), Mode::RawByteStr, 3 + n, 1 + n) // br##" "## . } 6 ( 0.00%) rustc_lexer::LiteralKind::Int { base, empty_int } => { 3 ( 0.00%) return if empty_int { . self.sess . .span_diagnostic . .struct_span_err_with_code( . self.mk_sp(start, suffix_start), . "no valid digits found for number", . error_code!(E0768), . ) . .emit(); . (token::Integer, sym::integer(0)) . } else { . self.validate_int_literal(base, start, suffix_start); 9 ( 0.00%) (token::Integer, self.symbol_from_to(start, suffix_start)) . }; . } . rustc_lexer::LiteralKind::Float { base, empty_exponent } => { . if empty_exponent { . self.err_span_(start, self.pos, "expected at least one digit in exponent"); . } . . match base { -- line 439 ---------------------------------------- -- line 452 ---------------------------------------- . } . . let id = self.symbol_from_to(start, suffix_start); . return (token::Float, id); . } . }; . let content_start = start + BytePos(prefix_len); . let content_end = suffix_start - BytePos(postfix_len); 28,041 ( 0.00%) let id = self.symbol_from_to(content_start, content_end); . self.validate_literal_escape(mode, content_start, content_end, prefix_len, postfix_len); . (lit_kind, id) . } . . #[inline] . fn src_index(&self, pos: BytePos) -> usize { . (pos - self.start_pos).to_usize() . } . . /// Slice of the source text from `start` up to but excluding `self.pos`, . /// meaning the slice does not include the character `self.ch`. . fn str_from(&self, start: BytePos) -> &str { 95,663 ( 0.00%) self.str_from_to(start, self.pos) . } . . /// As symbol_from, with an explicit endpoint. . fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol { . debug!("taking an ident from {:?} to {:?}", start, end); 112,185 ( 0.00%) Symbol::intern(self.str_from_to(start, end)) . } . . /// Slice of the source text spanning from `start` up to but excluding `end`. 34,482 ( 0.00%) fn str_from_to(&self, start: BytePos, end: BytePos) -> &str { . &self.src[self.src_index(start)..self.src_index(end)] 137,928 ( 0.00%) } . . fn report_raw_str_error(&self, start: BytePos, opt_err: Option) { . match opt_err { . Some(RawStrError::InvalidStarter { bad_char }) => { . self.report_non_started_raw_string(start, bad_char) . } . Some(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self . .report_unterminated_raw_string(start, expected, possible_terminator_offset, found), -- line 493 ---------------------------------------- -- line 609 ---------------------------------------- . fn validate_literal_escape( . &self, . mode: Mode, . content_start: BytePos, . content_end: BytePos, . prefix_len: u32, . postfix_len: u32, . ) { 56,082 ( 0.00%) let lit_content = self.str_from_to(content_start, content_end); 288,138 ( 0.01%) unescape::unescape_literal(lit_content, mode, &mut |range, result| { . // Here we only check for errors. The actual unescaping is done later. 20,626 ( 0.00%) if let Err(err) = result { . let span_with_quotes = self . .mk_sp(content_start - BytePos(prefix_len), content_end + BytePos(postfix_len)); . let (start, end) = (range.start as u32, range.end as u32); . let lo = content_start + BytePos(start); . let hi = lo + BytePos(end - start); . let span = self.mk_sp(lo, hi); . emit_unescape_error( . &self.sess.span_diagnostic, -- line 628 ---------------------------------------- -- line 629 ---------------------------------------- . lit_content, . span_with_quotes, . span, . mode, . range, . err, . ); . } 92,817 ( 0.00%) }); . } . . fn validate_int_literal(&self, base: Base, content_start: BytePos, content_end: BytePos) { 12 ( 0.00%) let base = match base { . Base::Binary => 2, . Base::Octal => 8, . _ => return, . }; . let s = self.str_from_to(content_start + BytePos(2), content_end); . for (idx, c) in s.char_indices() { . let idx = idx as u32; . if c != '_' && c.to_digit(base).is_none() { -- line 649 ---------------------------------------- -- line 650 ---------------------------------------- . let lo = content_start + BytePos(2 + idx); . let hi = content_start + BytePos(2 + idx + c.len_utf8() as u32); . self.err_span_(lo, hi, &format!("invalid digit for a base {} literal", base)); . } . } . } . } . 136,953 ( 0.00%) pub fn nfc_normalize(string: &str) -> Symbol { . use unicode_normalization::{is_nfc_quick, IsNormalized, UnicodeNormalization}; 30,434 ( 0.00%) match is_nfc_quick(string.chars()) { 152,170 ( 0.00%) IsNormalized::Yes => Symbol::intern(string), . _ => { . let normalized_str: String = string.chars().nfc().collect(); . Symbol::intern(&normalized_str) . } . } . } 429,627 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/operand.rs -------------------------------------------------------------------------------- Ir -- line 41 ---------------------------------------- . fn from(val: ScalarMaybeUninit) -> Self { . Immediate::Scalar(val) . } . } . . impl From> for Immediate { . #[inline(always)] . fn from(val: Scalar) -> Self { 3 ( 0.00%) Immediate::Scalar(val.into()) . } . } . . impl<'tcx, Tag: Provenance> Immediate { . pub fn from_pointer(p: Pointer, cx: &impl HasDataLayout) -> Self { . Immediate::Scalar(ScalarMaybeUninit::from_pointer(p, cx)) . } . . pub fn from_maybe_pointer(p: Pointer>, cx: &impl HasDataLayout) -> Self { . Immediate::Scalar(ScalarMaybeUninit::from_maybe_pointer(p, cx)) . } . 3,092 ( 0.00%) pub fn new_slice(val: Scalar, len: u64, cx: &impl HasDataLayout) -> Self { 7,011 ( 0.00%) Immediate::ScalarPair(val.into(), Scalar::from_machine_usize(len, cx).into()) 4,638 ( 0.00%) } . . pub fn new_dyn_trait( . val: Scalar, . vtable: Pointer>, . cx: &impl HasDataLayout, . ) -> Self { . Immediate::ScalarPair(val.into(), ScalarMaybeUninit::from_maybe_pointer(vtable, cx)) . } . . #[inline] . pub fn to_scalar_or_uninit(self) -> ScalarMaybeUninit { 147,924 ( 0.00%) match self { 221,288 ( 0.00%) Immediate::Scalar(val) => val, . Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"), . } . } . . #[inline] . pub fn to_scalar(self) -> InterpResult<'tcx, Scalar> { . self.to_scalar_or_uninit().check_init() . } -- line 85 ---------------------------------------- -- line 159 ---------------------------------------- . fn deref(&self) -> &Immediate { . &self.imm . } . } . . /// An `Operand` is the result of computing a `mir::Operand`. It can be immediate, . /// or still in memory. The latter is an optimization, to delay reading that chunk of . /// memory and to avoid having to store arbitrary-sized data here. 66 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)] . pub enum Operand { . Immediate(Immediate), . Indirect(MemPlace), . } . . #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] . pub struct OpTy<'tcx, Tag: Provenance = AllocId> { . op: Operand, // Keep this private; it helps enforce invariants. -- line 175 ---------------------------------------- -- line 185 ---------------------------------------- . fn deref(&self) -> &Operand { . &self.op . } . } . . impl<'tcx, Tag: Provenance> From> for OpTy<'tcx, Tag> { . #[inline(always)] . fn from(mplace: MPlaceTy<'tcx, Tag>) -> Self { 594 ( 0.00%) OpTy { op: Operand::Indirect(*mplace), layout: mplace.layout } . } . } . . impl<'tcx, Tag: Provenance> From<&'_ MPlaceTy<'tcx, Tag>> for OpTy<'tcx, Tag> { . #[inline(always)] . fn from(mplace: &MPlaceTy<'tcx, Tag>) -> Self { . OpTy { op: Operand::Indirect(**mplace), layout: mplace.layout } . } -- line 201 ---------------------------------------- -- line 206 ---------------------------------------- . fn from(val: ImmTy<'tcx, Tag>) -> Self { . OpTy { op: Operand::Immediate(val.imm), layout: val.layout } . } . } . . impl<'tcx, Tag: Provenance> ImmTy<'tcx, Tag> { . #[inline] . pub fn from_scalar(val: Scalar, layout: TyAndLayout<'tcx>) -> Self { 45 ( 0.00%) ImmTy { imm: val.into(), layout } . } . . #[inline] . pub fn from_immediate(imm: Immediate, layout: TyAndLayout<'tcx>) -> Self { . ImmTy { imm, layout } . } . . #[inline] . pub fn try_from_uint(i: impl Into, layout: TyAndLayout<'tcx>) -> Option { . Some(Self::from_scalar(Scalar::try_from_uint(i, layout.size)?, layout)) . } . #[inline] . pub fn from_uint(i: impl Into, layout: TyAndLayout<'tcx>) -> Self { 3 ( 0.00%) Self::from_scalar(Scalar::from_uint(i, layout.size), layout) . } . . #[inline] . pub fn try_from_int(i: impl Into, layout: TyAndLayout<'tcx>) -> Option { . Some(Self::from_scalar(Scalar::try_from_int(i, layout.size)?, layout)) . } . . #[inline] -- line 236 ---------------------------------------- -- line 248 ---------------------------------------- . . impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { . /// Try reading an immediate in memory; this is interesting particularly for `ScalarPair`. . /// Returns `None` if the layout does not permit loading this as a value. . fn try_read_immediate_from_mplace( . &self, . mplace: &MPlaceTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, Option>> { 96,704 ( 0.00%) if mplace.layout.is_unsized() { . // Don't touch unsized . return Ok(None); . } . 262,734 ( 0.01%) let alloc = match self.get_alloc(mplace)? { 437,880 ( 0.01%) Some(ptr) => ptr, . None => { . return Ok(Some(ImmTy { . // zero-sized type 18 ( 0.00%) imm: Scalar::ZST.into(), . layout: mplace.layout, . })); . } . }; . 691,436 ( 0.02%) match mplace.layout.abi { . Abi::Scalar(..) => { 78,429 ( 0.00%) let scalar = alloc.read_scalar(alloc_range(Size::ZERO, mplace.layout.size))?; . Ok(Some(ImmTy { imm: scalar.into(), layout: mplace.layout })) . } 13,758 ( 0.00%) Abi::ScalarPair(a, b) => { . // We checked `ptr_align` above, so all fields will have the alignment they need. . // We would anyway check against `ptr_align.restrict_for_offset(b_offset)`, . // which `ptr.offset(b_offset)` cannot possibly fail to satisfy. . let (a, b) = (a.value, b.value); . let (a_size, b_size) = (a.size(self), b.size(self)); . let b_offset = a_size.align_to(b.align(self).abi); 9,172 ( 0.00%) assert!(b_offset.bytes() > 0); // we later use the offset to tell apart the fields . let a_val = alloc.read_scalar(alloc_range(Size::ZERO, a_size))?; 13,758 ( 0.00%) let b_val = alloc.read_scalar(alloc_range(b_offset, b_size))?; 22,930 ( 0.00%) Ok(Some(ImmTy { imm: Immediate::ScalarPair(a_val, b_val), layout: mplace.layout })) . } . _ => Ok(None), . } . } . . /// Try returning an immediate for the operand. . /// If the layout does not permit loading this as an immediate, return where in memory . /// we can find the data. . /// Note that for a given layout, this operation will either always fail or always . /// succeed! Whether it succeeds depends on whether the layout can be represented . /// in an `Immediate`, not on which data is stored there currently. 887,517 ( 0.02%) pub fn try_read_immediate( . &self, . src: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, Result, MPlaceTy<'tcx, M::PointerTag>>> { 1,775,034 ( 0.04%) Ok(match src.try_as_mplace() { . Ok(ref mplace) => { 175,156 ( 0.00%) if let Some(val) = self.try_read_immediate_from_mplace(mplace)? { . Ok(val) . } else { . Err(*mplace) . } . } 110,350 ( 0.00%) Err(val) => Ok(val), . }) 887,517 ( 0.02%) } . . /// Read an immediate from a place, asserting that that is possible with the given layout. . #[inline(always)] . pub fn read_immediate( . &self, . op: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, ImmTy<'tcx, M::PointerTag>> { 241,052 ( 0.01%) if let Ok(imm) = self.try_read_immediate(op)? { 443,500 ( 0.01%) Ok(imm) . } else { . span_bug!(self.cur_span(), "primitive read failed for type: {:?}", op.layout.ty); . } . } . . /// Read a scalar from a place 608,542 ( 0.01%) pub fn read_scalar( . &self, . op: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, ScalarMaybeUninit> { 55,322 ( 0.00%) Ok(self.read_immediate(op)?.to_scalar_or_uninit()) 497,898 ( 0.01%) } . . /// Read a pointer from a place. . pub fn read_pointer( . &self, . op: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, Pointer>> { . Ok(self.scalar_to_ptr(self.read_scalar(op)?.check_init()?)) . } -- line 342 ---------------------------------------- -- line 345 ---------------------------------------- . pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> { . let len = mplace.len(self)?; . let bytes = self.memory.read_bytes(mplace.ptr, Size::from_bytes(len))?; . let str = std::str::from_utf8(bytes).map_err(|err| err_ub!(InvalidStr(err)))?; . Ok(str) . } . . /// Projection functions 874,910 ( 0.02%) pub fn operand_field( . &self, . op: &OpTy<'tcx, M::PointerTag>, . field: usize, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . let base = match op.try_as_mplace() { . Ok(ref mplace) => { . // We can reuse the mplace field computation logic for indirect operands. . let field = self.mplace_field(mplace, field)?; 957,891 ( 0.02%) return Ok(field.into()); . } 2,870 ( 0.00%) Err(value) => value, . }; . 2,050 ( 0.00%) let field_layout = op.layout.field(self, field); . if field_layout.is_zst() { . let immediate = Scalar::ZST.into(); . return Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }); . } 1,230 ( 0.00%) let offset = op.layout.fields.offset(field); . let immediate = match *base { . // the field covers the entire type 5,330 ( 0.00%) _ if offset.bytes() == 0 && field_layout.size == op.layout.size => *base, . // extract fields from types with `ScalarPair` ABI . Immediate::ScalarPair(a, b) => { . let val = if offset.bytes() == 0 { a } else { b }; . Immediate::from(val) . } . Immediate::Scalar(val) => span_bug!( . self.cur_span(), . "field access on non aggregate {:#?}, {:#?}", . val, . op.layout . ), . }; 5,740 ( 0.00%) Ok(OpTy { op: Operand::Immediate(immediate), layout: field_layout }) 787,419 ( 0.02%) } . . pub fn operand_index( . &self, . op: &OpTy<'tcx, M::PointerTag>, . index: u64, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . if let Ok(index) = usize::try_from(index) { . // We can just treat this as a field. -- line 397 ---------------------------------------- -- line 398 ---------------------------------------- . self.operand_field(op, index) . } else { . // Indexing into a big array. This must be an mplace. . let mplace = op.assert_mem_place(); . Ok(self.mplace_index(&mplace, index)?.into()) . } . } . 156,233 ( 0.00%) pub fn operand_downcast( . &self, . op: &OpTy<'tcx, M::PointerTag>, . variant: VariantIdx, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . // Downcasts only change the layout 241,451 ( 0.01%) Ok(match op.try_as_mplace() { 139,980 ( 0.00%) Ok(ref mplace) => self.mplace_downcast(mplace, variant)?.into(), . Err(..) => { 615 ( 0.00%) let layout = op.layout.for_variant(self, variant); 2,870 ( 0.00%) OpTy { layout, ..*op } . } . }) 127,827 ( 0.00%) } . 1,640 ( 0.00%) pub fn operand_projection( . &self, . base: &OpTy<'tcx, M::PointerTag>, . proj_elem: mir::PlaceElem<'tcx>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . use rustc_middle::mir::ProjectionElem::*; 6,560 ( 0.00%) Ok(match proj_elem { 615 ( 0.00%) Field(field, _) => self.operand_field(base, field.index())?, 615 ( 0.00%) Downcast(_, variant) => self.operand_downcast(base, variant)?, . Deref => self.deref_operand(base)?.into(), . Subslice { .. } | ConstantIndex { .. } | Index(_) => { . // The rest should only occur as mplace, we do not use Immediates for types . // allowing such operations. This matches place_projection forcing an allocation. . let mplace = base.assert_mem_place(); . self.mplace_projection(&mplace, proj_elem)?.into() . } . }) 1,640 ( 0.00%) } . . /// Converts a repr(simd) operand into an operand where `place_index` accesses the SIMD elements. . /// Also returns the number of elements. . pub fn operand_to_simd( . &self, . base: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, u64)> { . // Basically we just transmute this place into an array following simd_size_and_type. -- line 446 ---------------------------------------- -- line 449 ---------------------------------------- . self.mplace_to_simd(&base.assert_mem_place()) . } . . /// Read from a local. Will not actually access the local if reading from a ZST. . /// Will not access memory, instead an indirect `Operand` is returned. . /// . /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an . /// OpTy from a local 143,607 ( 0.00%) pub fn access_local( . &self, . frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>, . local: mir::Local, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . let layout = self.layout_of_local(frame, local, layout)?; 4,560 ( 0.00%) let op = if layout.is_zst() { . // Do not read from ZST, they might not be initialized 10 ( 0.00%) Operand::Immediate(Scalar::ZST.into()) . } else { 4,899 ( 0.00%) M::access_local(&self, frame, local)? . }; 193,388 ( 0.00%) Ok(OpTy { op, layout }) 124,929 ( 0.00%) } . . /// Every place can be read from, so we can turn them into an operand. . /// This will definitely return `Indirect` if the place is a `Ptr`, i.e., this . /// will never actually read from memory. . #[inline(always)] . pub fn place_to_op( . &self, . place: &PlaceTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { 24 ( 0.00%) let op = match **place { . Place::Ptr(mplace) => Operand::Indirect(mplace), . Place::Local { frame, local } => { 108 ( 0.00%) *self.access_local(&self.stack()[frame], local, None)? . } . }; 84 ( 0.00%) Ok(OpTy { op, layout: place.layout }) . } . . // Evaluate a place with the goal of reading from it. This lets us sometimes . // avoid allocations. 122,888 ( 0.00%) pub fn eval_place_to_op( . &self, . place: mir::Place<'tcx>, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . // Do not use the layout passed in as argument if the base we are looking at . // here is not the entire place. 15,361 ( 0.00%) let layout = if place.projection.is_empty() { layout } else { None }; . 46,083 ( 0.00%) let base_op = self.access_local(self.frame(), place.local, layout)?; . . let op = place . .projection . .iter() 3,690 ( 0.00%) .try_fold(base_op, |op, elem| self.operand_projection(&op, elem))?; . . trace!("eval_place_to_op: got {:?}", *op); . // Sanity-check the type we ended up with. . debug_assert!(mir_assign_valid_types( . *self.tcx, . self.param_env, . self.layout_of(self.subst_from_current_frame_and_normalize_erasing_regions( . place.ty(&self.frame().body.local_decls, *self.tcx).ty . )?)?, . op.layout, . )); 44,475 ( 0.00%) Ok(op) 138,249 ( 0.00%) } . . /// Evaluate the operand, returning a place where you can then find the data. . /// If you already know the layout, you can save two table lookups . /// by passing it in here. . #[inline] 205,713 ( 0.00%) pub fn eval_operand( . &self, . mir_op: &mir::Operand<'tcx>, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . use rustc_middle::mir::Operand::*; 47,761 ( 0.00%) let op = match *mir_op { . // FIXME: do some more logic on `move` to invalidate the old location 71,971 ( 0.00%) Copy(place) | Move(place) => self.eval_place_to_op(place, layout)?, . . Constant(ref constant) => { . let val = 142,902 ( 0.00%) self.subst_from_current_frame_and_normalize_erasing_regions(constant.literal)?; . // This can still fail: . // * During ConstProp, with `TooGeneric` or since the `requried_consts` were not all . // checked yet. . // * During CTFE, since promoteds in `const`/`static` initializer bodies can fail. . 1,404 ( 0.00%) self.mir_const_to_op(&val, layout)? . } . }; . trace!("{:?}: {:?}", mir_op, *op); 304,381 ( 0.01%) Ok(op) 137,142 ( 0.00%) } . . /// Evaluate a bunch of operands at once . pub(super) fn eval_operands( . &self, . ops: &[mir::Operand<'tcx>], . ) -> InterpResult<'tcx, Vec>> { . ops.iter().map(|op| self.eval_operand(op, None)).collect() . } . . // Used when the miri-engine runs into a constant and for extracting information from constants . // in patterns via the `const_eval` module . /// The `val` and `layout` are assumed to already be in our interpreter . /// "universe" (param_env). 109,548 ( 0.00%) pub fn const_to_op( . &self, . val: &ty::Const<'tcx>, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { 52,015 ( 0.00%) match val.val { . ty::ConstKind::Param(_) | ty::ConstKind::Bound(..) => throw_inval!(TooGeneric), . ty::ConstKind::Error(_) => throw_inval!(AlreadyReported(ErrorReported)), 24 ( 0.00%) ty::ConstKind::Unevaluated(uv) => { . let instance = self.resolve(uv.def, uv.substs)?; 66 ( 0.00%) Ok(self.eval_to_allocation(GlobalId { instance, promoted: uv.promoted })?.into()) . } . ty::ConstKind::Infer(..) | ty::ConstKind::Placeholder(..) => { . span_bug!(self.cur_span(), "const_to_op: Unexpected ConstKind {:?}", val) . } 98,898 ( 0.00%) ty::ConstKind::Value(val_val) => self.const_val_to_op(val_val, val.ty, layout), . } 82,161 ( 0.00%) } . 14,014 ( 0.00%) pub fn mir_const_to_op( . &self, . val: &mir::ConstantKind<'tcx>, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { 20,806 ( 0.00%) match val { 39,064 ( 0.00%) mir::ConstantKind::Ty(ct) => self.const_to_op(ct, layout), . mir::ConstantKind::Val(val, ty) => self.const_val_to_op(*val, ty, layout), . } 11,466 ( 0.00%) } . 103,970 ( 0.00%) crate fn const_val_to_op( . &self, . val_val: ConstValue<'tcx>, . ty: Ty<'tcx>, . layout: Option>, . ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { . // Other cases need layout. . let tag_scalar = |scalar| -> InterpResult<'tcx, _> { 57,690 ( 0.00%) Ok(match scalar { . Scalar::Ptr(ptr, size) => Scalar::Ptr(self.global_base_pointer(ptr)?, size), 28,818 ( 0.00%) Scalar::Int(int) => Scalar::Int(int), . }) . }; 2,751 ( 0.00%) let layout = from_known_layout(self.tcx, self.param_env, layout, || self.layout_of(ty))?; 32,755 ( 0.00%) let op = match val_val { 12 ( 0.00%) ConstValue::ByRef { alloc, offset } => { 12 ( 0.00%) let id = self.tcx.create_memory_alloc(alloc); . // We rely on mutability being set correctly in that allocation to prevent writes . // where none should happen. . let ptr = self.global_base_pointer(Pointer::new(id, offset))?; 24 ( 0.00%) Operand::Indirect(MemPlace::from_ptr(ptr.into(), layout.align.abi)) . } 87,027 ( 0.00%) ConstValue::Scalar(x) => Operand::Immediate(tag_scalar(x)?.into()), 2,328 ( 0.00%) ConstValue::Slice { data, start, end } => { . // We rely on mutability being set correctly in `data` to prevent writes . // where none should happen. . let ptr = Pointer::new( 2,328 ( 0.00%) self.tcx.create_memory_alloc(data), . Size::from_bytes(start), // offset: `start` . ); 17,779 ( 0.00%) Operand::Immediate(Immediate::new_slice( . Scalar::from_pointer(self.global_base_pointer(ptr)?, &*self.tcx), . u64::try_from(end.checked_sub(start).unwrap()).unwrap(), // len: `end - start` . self, . )) . } . }; 218,337 ( 0.00%) Ok(OpTy { op, layout }) 83,176 ( 0.00%) } . . /// Read discriminant, return the runtime value as well as the variant index. . /// Can also legally be called on non-enums (e.g. through the discriminant_value intrinsic)! 186,280 ( 0.00%) pub fn read_discriminant( . &self, . op: &OpTy<'tcx, M::PointerTag>, . ) -> InterpResult<'tcx, (Scalar, VariantIdx)> { . trace!("read_discriminant_value {:#?}", op.layout); . // Get type and layout of the discriminant. 223,536 ( 0.00%) let discr_layout = self.layout_of(op.layout.ty.discriminant_ty(*self.tcx))?; . trace!("discriminant type: {:?}", discr_layout.ty); . . // We use "discriminant" to refer to the value associated with a particular enum variant. . // This is not to be confused with its "variant index", which is just determining its position in the . // declared list of variants -- they can differ with explicitly assigned discriminants. . // We use "tag" to refer to how the discriminant is encoded in memory, which can be either . // straight-forward (`TagEncoding::Direct`) or with a niche (`TagEncoding::Niche`). 55,884 ( 0.00%) let (tag_scalar_layout, tag_encoding, tag_field) = match op.layout.variants { . Variants::Single { index } => { . let discr = match op.layout.ty.discriminant_for_variant(*self.tcx, index) { . Some(discr) => { . // This type actually has discriminants. . assert_eq!(discr.ty, discr_layout.ty); . Scalar::from_uint(discr.val, discr_layout.size) . } . None => { . // On a type without actual discriminants, variant is 0. . assert_eq!(index.as_u32(), 0); . Scalar::from_uint(index.as_u32(), discr_layout.size) . } . }; . return Ok((discr, index)); . } 37,256 ( 0.00%) Variants::Multiple { tag, ref tag_encoding, tag_field, .. } => { . (tag, tag_encoding, tag_field) . } . }; . . // There are *three* layouts that come into play here: . // - The discriminant has a type for typechecking. This is `discr_layout`, and is used for . // the `Scalar` we return. . // - The tag (encoded discriminant) has layout `tag_layout`. This is always an integer type, . // and used to interpret the value we read from the tag field. . // For the return value, a cast to `discr_layout` is performed. . // - The field storing the tag has a layout, which is very similar to `tag_layout` but . // may be a pointer. This is `tag_val.layout`; we just use it for sanity checks. . . // Get layout for tag. 111,768 ( 0.00%) let tag_layout = self.layout_of(tag_scalar_layout.value.to_int_ty(*self.tcx))?; . . // Read tag and sanity-check `tag_layout`. 130,396 ( 0.00%) let tag_val = self.read_immediate(&self.operand_field(op, tag_field)?)?; 37,256 ( 0.00%) assert_eq!(tag_layout.size, tag_val.layout.size); 149,024 ( 0.00%) assert_eq!(tag_layout.abi.is_signed(), tag_val.layout.abi.is_signed()); . let tag_val = tag_val.to_scalar()?; . trace!("tag value: {:?}", tag_val); . . // Figure out which discriminant and variant this corresponds to. 167,652 ( 0.00%) Ok(match *tag_encoding { . TagEncoding::Direct => { 372,560 ( 0.01%) let tag_bits = tag_val . .try_to_int() . .map_err(|dbg_val| err_ub!(InvalidTag(dbg_val)))? . .assert_bits(tag_layout.size); . // Cast bits from tag layout to discriminant layout. 149,024 ( 0.00%) let discr_val = self.cast_from_scalar(tag_bits, tag_layout, discr_layout.ty); 37,256 ( 0.00%) let discr_bits = discr_val.assert_bits(discr_layout.size); . // Convert discriminant to variant index, and catch invalid discriminants. 93,140 ( 0.00%) let index = match *op.layout.ty.kind() { 18,628 ( 0.00%) ty::Adt(adt, _) => { 754,981 ( 0.02%) adt.discriminants(*self.tcx).find(|(_, var)| var.val == discr_bits) . } . ty::Generator(def_id, substs, _) => { . let substs = substs.as_generator(); . substs . .discriminants(def_id, *self.tcx) . .find(|(_, var)| var.val == discr_bits) . } . _ => span_bug!(self.cur_span(), "tagged layout for non-adt non-generator"), . } . .ok_or_else(|| err_ub!(InvalidTag(Scalar::from_uint(tag_bits, tag_layout.size))))?; . // Return the cast value, and the index. 74,512 ( 0.00%) (discr_val, index.0) . } . TagEncoding::Niche { dataful_variant, ref niche_variants, niche_start } => { . // Compute the variant this niche value/"tag" corresponds to. With niche layout, . // discriminant (encoded in niche/tag) and variant index are the same. . let variants_start = niche_variants.start().as_u32(); . let variants_end = niche_variants.end().as_u32(); . let variant = match tag_val.try_to_int() { . Err(dbg_val) => { -- line 721 ---------------------------------------- -- line 766 ---------------------------------------- . } . }; . // Compute the size of the scalar we need to return. . // No need to cast, because the variant index directly serves as discriminant and is . // encoded in the tag. . (Scalar::from_uint(variant.as_u32(), discr_layout.size), variant) . } . }) 167,652 ( 0.00%) } . } 2,535,268 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs -------------------------------------------------------------------------------- Ir -- line 5 ---------------------------------------- . #[stable(feature = "alloc_system_type", since = "1.28.0")] . unsafe impl GlobalAlloc for System { . #[inline] . unsafe fn alloc(&self, layout: Layout) -> *mut u8 { . // jemalloc provides alignment less than MIN_ALIGN for small allocations. . // So only rely on MIN_ALIGN if size >= align. . // Also see and . // . 3,538,974 ( 0.08%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 2,654,229 ( 0.06%) libc::malloc(layout.size()) as *mut u8 . } else { . #[cfg(target_os = "macos")] . { . if layout.align() > (1 << 31) { . return ptr::null_mut(); . } . } . aligned_malloc(&layout) . } . } . . #[inline] . unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { . // See the comment above in `alloc` for why this check looks the way it does. 83,184 ( 0.00%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 124,776 ( 0.00%) libc::calloc(layout.size(), 1) as *mut u8 . } else { . let ptr = self.alloc(layout); . if !ptr.is_null() { . ptr::write_bytes(ptr, 0, layout.size()); . } . ptr . } . } . . #[inline] . unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 905,529 ( 0.02%) libc::free(ptr as *mut libc::c_void) . } . . #[inline] . unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 197,272 ( 0.00%) if layout.align() <= MIN_ALIGN && layout.align() <= new_size { 394,544 ( 0.01%) libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 . } else { . realloc_fallback(self, ptr, layout, new_size) . } . } . } . . cfg_if::cfg_if! { . if #[cfg(any( -- line 56 ---------------------------------------- -- line 84 ---------------------------------------- . } else if #[cfg(target_os = "wasi")] { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { . libc::aligned_alloc(layout.align(), layout.size()) as *mut u8 . } . } else { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { 1 ( 0.00%) let mut out = ptr::null_mut(); . // posix_memalign requires that the alignment be a multiple of `sizeof(void*)`. . // Since these are all powers of 2, we can just use max. . let align = layout.align().max(crate::mem::size_of::()); 2 ( 0.00%) let ret = libc::posix_memalign(&mut out, align, layout.size()); 2 ( 0.00%) if ret != 0 { ptr::null_mut() } else { out as *mut u8 } . } . } . } 1 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp/ring.rs -------------------------------------------------------------------------------- Ir -- line 22 ---------------------------------------- . RingBuffer { data: VecDeque::new(), offset: 0 } . } . . pub fn is_empty(&self) -> bool { . self.data.is_empty() . } . . pub fn push(&mut self, value: T) -> usize { 392,678 ( 0.01%) let index = self.offset + self.data.len(); 1,278,554 ( 0.03%) self.data.push_back(value); . index . } . . pub fn clear(&mut self) { 10 ( 0.00%) self.data.clear(); . } . . pub fn index_of_first(&self) -> usize { . self.offset . } . . pub fn first(&self) -> Option<&T> { . self.data.front() . } . . pub fn first_mut(&mut self) -> Option<&mut T> { . self.data.front_mut() . } . 981,695 ( 0.02%) pub fn pop_first(&mut self) -> Option { . let first = self.data.pop_front()?; 785,356 ( 0.02%) self.offset += 1; 1,178,034 ( 0.03%) Some(first) 1,374,373 ( 0.03%) } . . pub fn last(&self) -> Option<&T> { . self.data.back() . } . . pub fn last_mut(&mut self) -> Option<&mut T> { . self.data.back_mut() . } -- line 63 ---------------------------------------- 981,695 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/vec_deque/mod.rs -------------------------------------------------------------------------------- Ir -- line 126 ---------------------------------------- . self.extend(remainder.iter().cloned()); . } . } . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . unsafe impl<#[may_dangle] T, A: Allocator> Drop for VecDeque { 6,804 ( 0.00%) fn drop(&mut self) { . /// Runs the destructor for all items in the slice when it gets dropped (normally or . /// during unwinding). . struct Dropper<'a, T>(&'a mut [T]); . . impl<'a, T> Drop for Dropper<'a, T> { . fn drop(&mut self) { . unsafe { . ptr::drop_in_place(self.0); -- line 142 ---------------------------------------- -- line 146 ---------------------------------------- . . let (front, back) = self.as_mut_slices(); . unsafe { . let _back_dropper = Dropper(back); . // use drop for [T] . ptr::drop_in_place(front); . } . // RawVec handles deallocation 7,776 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Default for VecDeque { . /// Creates an empty `VecDeque`. . #[inline] . fn default() -> VecDeque { . VecDeque::new() . } . } . . impl VecDeque { . /// Marginally more convenient . #[inline] . fn ptr(&self) -> *mut T { 394,484 ( 0.01%) self.buf.ptr() . } . . /// Marginally more convenient . #[inline] . fn cap(&self) -> usize { . if mem::size_of::() == 0 { . // For zero sized types, we are always at maximum capacity . MAXIMUM_ZST_CAPACITY . } else { 481,008 ( 0.01%) self.buf.capacity() . } . } . . /// Turn ptr into a slice . #[inline] . unsafe fn buffer_as_slice(&self) -> &[T] { . unsafe { slice::from_raw_parts(self.ptr(), self.cap()) } . } -- line 188 ---------------------------------------- -- line 191 ---------------------------------------- . #[inline] . unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] { . unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) } . } . . /// Moves an element out of the buffer . #[inline] . unsafe fn buffer_read(&mut self, off: usize) -> T { 30,657 ( 0.00%) unsafe { ptr::read(self.ptr().add(off)) } . } . . /// Writes an element into the buffer, moving it. . #[inline] . unsafe fn buffer_write(&mut self, off: usize, value: T) { . unsafe { . ptr::write(self.ptr().add(off), value); . } . } . . /// Returns `true` if the buffer is at full capacity. . #[inline] . fn is_full(&self) -> bool { 935,322 ( 0.02%) self.cap() - self.len() == 1 . } . . /// Returns the index in the underlying buffer for a given logical element . /// index. . #[inline] . fn wrap_index(&self, idx: usize) -> usize { . wrap_index(idx, self.cap()) . } -- line 221 ---------------------------------------- -- line 452 ---------------------------------------- . // [o o . o o o o o ] . // T H . // B [. . . o o o o o o o . . . . . . ] . // H T . // [o o o o o . o o ] . // H T . // C [o o o o o . . . . . . . . . o o ] . 136 ( 0.00%) if self.tail <= self.head { . // A . // Nop 48 ( 0.00%) } else if self.head < old_capacity - self.tail { . // B . unsafe { . self.copy_nonoverlapping(old_capacity, 0, self.head); . } . self.head += old_capacity; . debug_assert!(self.head > self.tail); . } else { . // C 24 ( 0.00%) let new_tail = new_capacity - (old_capacity - self.tail); . unsafe { 8 ( 0.00%) self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail); . } 8 ( 0.00%) self.tail = new_tail; . debug_assert!(self.head < self.tail); . } . debug_assert!(self.head < self.cap()); . debug_assert!(self.tail < self.cap()); . debug_assert!(self.cap().count_ones() == 1); . } . } . -- line 484 ---------------------------------------- -- line 507 ---------------------------------------- . /// use std::collections::VecDeque; . /// . /// let vector: VecDeque = VecDeque::with_capacity(10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub fn with_capacity(capacity: usize) -> VecDeque { 616 ( 0.00%) Self::with_capacity_in(capacity, Global) . } . } . . impl VecDeque { . /// Creates an empty `VecDeque`. . /// . /// # Examples . /// -- line 523 ---------------------------------------- -- line 524 ---------------------------------------- . /// ``` . /// use std::collections::VecDeque; . /// . /// let vector: VecDeque = VecDeque::new(); . /// ``` . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn new_in(alloc: A) -> VecDeque { 1,018 ( 0.00%) VecDeque::with_capacity_in(INITIAL_CAPACITY, alloc) . } . . /// Creates an empty `VecDeque` with space for at least `capacity` elements. . /// . /// # Examples . /// . /// ``` . /// use std::collections::VecDeque; . /// . /// let vector: VecDeque = VecDeque::with_capacity(10); . /// ``` . #[unstable(feature = "allocator_api", issue = "32838")] 3,268 ( 0.00%) pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque { 2,054 ( 0.00%) assert!(capacity < 1_usize << usize::BITS - 1, "capacity overflow"); . // +1 since the ringbuffer always leaves one space empty . let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); . 6,892 ( 0.00%) VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity_in(cap, alloc) } 4,902 ( 0.00%) } . . /// Provides a reference to the element at the given index. . /// . /// Element at index 0 is the front of the queue. . /// . /// # Examples . /// . /// ``` -- line 559 ---------------------------------------- -- line 562 ---------------------------------------- . /// let mut buf = VecDeque::new(); . /// buf.push_back(3); . /// buf.push_back(4); . /// buf.push_back(5); . /// assert_eq!(buf.get(1), Some(&4)); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn get(&self, index: usize) -> Option<&T> { 1,573,939 ( 0.03%) if index < self.len() { . let idx = self.wrap_add(self.tail, index); . unsafe { Some(&*self.ptr().add(idx)) } . } else { . None . } . } . . /// Provides a mutable reference to the element at the given index. -- line 578 ---------------------------------------- -- line 591 ---------------------------------------- . /// if let Some(elem) = buf.get_mut(1) { . /// *elem = 7; . /// } . /// . /// assert_eq!(buf[1], 7); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn get_mut(&mut self, index: usize) -> Option<&mut T> { 648,506 ( 0.01%) if index < self.len() { . let idx = self.wrap_add(self.tail, index); . unsafe { Some(&mut *self.ptr().add(idx)) } . } else { . None . } . } . . /// Swaps elements at indices `i` and `j`. -- line 607 ---------------------------------------- -- line 694 ---------------------------------------- . /// . /// let mut buf: VecDeque = [1].into(); . /// buf.reserve(10); . /// assert!(buf.capacity() >= 11); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve(&mut self, additional: usize) { . let old_cap = self.cap(); 8 ( 0.00%) let used_cap = self.len() + 1; . let new_cap = used_cap . .checked_add(additional) . .and_then(|needed_cap| needed_cap.checked_next_power_of_two()) . .expect("capacity overflow"); . 16 ( 0.00%) if new_cap > old_cap { 24 ( 0.00%) self.buf.reserve_exact(used_cap, new_cap - used_cap); . unsafe { . self.handle_capacity_increase(old_cap); . } . } . } . . /// Tries to reserve the minimum capacity for exactly `additional` more elements to . /// be inserted in the given `VecDeque`. After calling `try_reserve_exact`, -- line 717 ---------------------------------------- -- line 924 ---------------------------------------- . /// buf.push_back(5); . /// buf.push_back(10); . /// buf.push_back(15); . /// assert_eq!(buf, [5, 10, 15]); . /// buf.truncate(1); . /// assert_eq!(buf, [5]); . /// ``` . #[stable(feature = "deque_extras", since = "1.16.0")] 80 ( 0.00%) pub fn truncate(&mut self, len: usize) { . /// Runs the destructor for all items in the slice when it gets dropped (normally or . /// during unwinding). . struct Dropper<'a, T>(&'a mut [T]); . . impl<'a, T> Drop for Dropper<'a, T> { . fn drop(&mut self) { . unsafe { . ptr::drop_in_place(self.0); -- line 940 ---------------------------------------- -- line 945 ---------------------------------------- . // Safe because: . // . // * Any slice passed to `drop_in_place` is valid; the second case has . // `len <= front.len()` and returning on `len > self.len()` ensures . // `begin <= back.len()` in the first case . // * The head of the VecDeque is moved before calling `drop_in_place`, . // so no value is dropped twice if `drop_in_place` panics . unsafe { 20 ( 0.00%) if len > self.len() { . return; . } . let num_dropped = self.len() - len; . let (front, back) = self.as_mut_slices(); 20 ( 0.00%) if len > front.len() { . let begin = len - front.len(); . let drop_back = back.get_unchecked_mut(begin..) as *mut _; . self.head = self.wrap_sub(self.head, num_dropped); . ptr::drop_in_place(drop_back); . } else { . let drop_back = back as *mut _; . let drop_front = front.get_unchecked_mut(len..) as *mut _; 10 ( 0.00%) self.head = self.wrap_sub(self.head, num_dropped); . . // Make sure the second half is dropped even when a destructor . // in the first one panics. . let _back_dropper = Dropper(&mut *drop_back); . ptr::drop_in_place(drop_front); . } . } 80 ( 0.00%) } . . /// Returns a reference to the underlying allocator. . #[unstable(feature = "allocator_api", issue = "32838")] . #[inline] . pub fn allocator(&self) -> &A { . self.buf.allocator() . } . -- line 982 ---------------------------------------- -- line 1085 ---------------------------------------- . /// vector.as_mut_slices().0[0] = 42; . /// vector.as_mut_slices().1[0] = 24; . /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..])); . /// ``` . #[inline] . #[stable(feature = "deque_extras_15", since = "1.5.0")] . pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { . unsafe { 2,462 ( 0.00%) let head = self.head; 2,462 ( 0.00%) let tail = self.tail; 1,758 ( 0.00%) let buf = self.buffer_as_mut_slice(); . RingSlices::ring_slices(buf, head, tail) . } . } . . /// Returns the number of elements in the `VecDeque`. . /// . /// # Examples . /// -- line 1103 ---------------------------------------- -- line 1106 ---------------------------------------- . /// . /// let mut v = VecDeque::new(); . /// assert_eq!(v.len(), 0); . /// v.push_back(1); . /// assert_eq!(v.len(), 1); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn len(&self) -> usize { 761,965 ( 0.02%) count(self.tail, self.head, self.cap()) . } . . /// Returns `true` if the `VecDeque` is empty. . /// . /// # Examples . /// . /// ``` . /// use std::collections::VecDeque; -- line 1122 ---------------------------------------- -- line 1123 ---------------------------------------- . /// . /// let mut v = VecDeque::new(); . /// assert!(v.is_empty()); . /// v.push_front(1); . /// assert!(!v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 1,379,948 ( 0.03%) self.tail == self.head . } . . fn range_tail_head(&self, range: R) -> (usize, usize) . where . R: RangeBounds, . { . let Range { start, end } = slice::range(range, ..self.len()); . let tail = self.wrap_add(self.tail, start); -- line 1139 ---------------------------------------- -- line 1307 ---------------------------------------- . /// let mut v = VecDeque::new(); . /// v.push_back(1); . /// v.clear(); . /// assert!(v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn clear(&mut self) { 20 ( 0.00%) self.truncate(0); . } . . /// Returns `true` if the `VecDeque` contains an element equal to the . /// given value. . /// . /// # Examples . /// . /// ``` -- line 1323 ---------------------------------------- -- line 1396 ---------------------------------------- . /// assert_eq!(d.back(), None); . /// . /// d.push_back(1); . /// d.push_back(2); . /// assert_eq!(d.back(), Some(&2)); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn back(&self) -> Option<&T> { 22,834 ( 0.00%) self.get(self.len().wrapping_sub(1)) . } . . /// Provides a mutable reference to the back element, or `None` if the . /// `VecDeque` is empty. . /// . /// # Examples . /// . /// ``` -- line 1412 ---------------------------------------- -- line 1441 ---------------------------------------- . /// d.push_back(2); . /// . /// assert_eq!(d.pop_front(), Some(1)); . /// assert_eq!(d.pop_front(), Some(2)); . /// assert_eq!(d.pop_front(), None); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop_front(&mut self) -> Option { 251,549 ( 0.01%) if self.is_empty() { . None . } else { . let tail = self.tail; 446,028 ( 0.01%) self.tail = self.wrap_add(self.tail, 1); 10,553 ( 0.00%) unsafe { Some(self.buffer_read(tail)) } . } . } . . /// Removes the last element from the `VecDeque` and returns it, or `None` if . /// it is empty. . /// . /// # Examples . /// -- line 1462 ---------------------------------------- -- line 1466 ---------------------------------------- . /// let mut buf = VecDeque::new(); . /// assert_eq!(buf.pop_back(), None); . /// buf.push_back(1); . /// buf.push_back(3); . /// assert_eq!(buf.pop_back(), Some(3)); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop_back(&mut self) -> Option { 73,162 ( 0.00%) if self.is_empty() { . None . } else { 105,242 ( 0.00%) self.head = self.wrap_sub(self.head, 1); . let head = self.head; . unsafe { Some(self.buffer_read(head)) } . } . } . . /// Prepends an element to the `VecDeque`. . /// . /// # Examples -- line 1485 ---------------------------------------- -- line 1513 ---------------------------------------- . /// use std::collections::VecDeque; . /// . /// let mut buf = VecDeque::new(); . /// buf.push_back(1); . /// buf.push_back(3); . /// assert_eq!(3, *buf.back().unwrap()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 785,356 ( 0.02%) pub fn push_back(&mut self, value: T) { 935,322 ( 0.02%) if self.is_full() { 45 ( 0.00%) self.grow(); . } . 26 ( 0.00%) let head = self.head; 311,800 ( 0.01%) self.head = self.wrap_add(self.head, 1); 115,435 ( 0.00%) unsafe { self.buffer_write(head, value) } 785,356 ( 0.02%) } . . #[inline] . fn is_contiguous(&self) -> bool { . // FIXME: Should we consider `head == 0` to mean . // that `self` is contiguous? . self.tail <= self.head . } . -- line 1537 ---------------------------------------- -- line 2209 ---------------------------------------- . self.truncate(idx); . } . } . . // Double the buffer size. This method is inline(never), so we expect it to only . // be called in cold paths. . // This may panic or abort . #[inline(never)] 156 ( 0.00%) fn grow(&mut self) { . // Extend or possibly remove this assertion when valid use-cases for growing the . // buffer without it being full emerge . debug_assert!(self.is_full()); . let old_cap = self.cap(); 78 ( 0.00%) self.buf.reserve_exact(old_cap, old_cap); 130 ( 0.00%) assert!(self.cap() == old_cap * 2); . unsafe { . self.handle_capacity_increase(old_cap); . } . debug_assert!(!self.is_full()); 156 ( 0.00%) } . . /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`, . /// either by removing excess elements from the back or by appending . /// elements generated by calling `generator` to the back. . /// . /// # Examples . /// . /// ``` -- line 2236 ---------------------------------------- -- line 2751 ---------------------------------------- . } . } . . /// Returns the index in the underlying buffer for a given logical element index. . #[inline] . fn wrap_index(index: usize, size: usize) -> usize { . // size is always a power of 2 . debug_assert!(size.is_power_of_two()); 1,661,994 ( 0.04%) index & (size - 1) . } . . /// Calculate the number of elements left to be read in the buffer . #[inline] . fn count(tail: usize, head: usize, size: usize) -> usize { . // size is always a power of 2 2,878,068 ( 0.06%) (head.wrapping_sub(tail)) & (size - 1) . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for VecDeque { . fn eq(&self, other: &Self) -> bool { . if self.len() != other.len() { . return false; . } -- line 2774 ---------------------------------------- -- line 2902 ---------------------------------------- . . fn into_iter(self) -> IterMut<'a, T> { . self.iter_mut() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for VecDeque { 784 ( 0.00%) fn extend>(&mut self, iter: I) { . // This function should be the moral equivalent of: . // . // for item in iter.into_iter() { . // self.push_back(item); . // } . let mut iter = iter.into_iter(); . while let Some(element) = iter.next() { 448 ( 0.00%) if self.len() == self.capacity() { . let (lower, _) = iter.size_hint(); . self.reserve(lower.saturating_add(1)); . } . . let head = self.head; 104 ( 0.00%) self.head = self.wrap_add(self.head, 1); . unsafe { 104 ( 0.00%) self.buffer_write(head, element); . } . } 896 ( 0.00%) } . . #[inline] . fn extend_one(&mut self, elem: T) { . self.push_back(elem); . } . . #[inline] . fn extend_reserve(&mut self, additional: usize) { -- line 2937 ---------------------------------------- 403,130 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/string.rs -------------------------------------------------------------------------------- Ir -- line 375 ---------------------------------------- . /// ``` . /// let s = String::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_string_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> String { 47,441 ( 0.00%) String { vec: Vec::new() } . } . . /// Creates a new empty `String` with a particular capacity. . /// . /// `String`s have an internal buffer to hold their data. The capacity is . /// the length of that buffer, and can be queried with the [`capacity`] . /// method. This method creates an empty `String`, but one with an initial . /// buffer that can hold `capacity` bytes. This is useful when you may be -- line 391 ---------------------------------------- -- line 420 ---------------------------------------- . /// // ...but this may make the string reallocate . /// s.push('a'); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub fn with_capacity(capacity: usize) -> String { 10,064 ( 0.00%) String { vec: Vec::with_capacity(capacity) } . } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Since we don't . // require this method for testing purposes, I'll just stub it . // NB see the slice::hack module in slice.rs for more information . #[inline] . #[cfg(test)] -- line 436 ---------------------------------------- -- line 492 ---------------------------------------- . /// . /// [`from_utf8_unchecked`]: String::from_utf8_unchecked . /// [`Vec`]: crate::vec::Vec "Vec" . /// [`&str`]: prim@str "&str" . /// [`into_bytes`]: String::into_bytes . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn from_utf8(vec: Vec) -> Result { 840 ( 0.00%) match str::from_utf8(&vec) { . Ok(..) => Ok(String { vec }), 132 ( 0.00%) Err(e) => Err(FromUtf8Error { bytes: vec, error: e }), . } . } . . /// Converts a slice of bytes to a string, including invalid characters. . /// . /// Strings are made of bytes ([`u8`]), and a slice of bytes . /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts . /// between the two. Not all byte slices are valid strings, however: strings -- line 510 ---------------------------------------- -- line 550 ---------------------------------------- . /// let input = b"Hello \xF0\x90\x80World"; . /// let output = String::from_utf8_lossy(input); . /// . /// assert_eq!("Hello �World", output); . /// ``` . #[must_use] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 225 ( 0.00%) pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> { 250 ( 0.00%) let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks(); . 125 ( 0.00%) let first_valid = if let Some(chunk) = iter.next() { . let lossy::Utf8LossyChunk { valid, broken } = chunk; 25 ( 0.00%) if broken.is_empty() { . debug_assert_eq!(valid.len(), v.len()); 25 ( 0.00%) return Cow::Borrowed(valid); . } . valid . } else { . return Cow::Borrowed(""); . }; . . const REPLACEMENT: &str = "\u{FFFD}"; . -- line 573 ---------------------------------------- -- line 578 ---------------------------------------- . for lossy::Utf8LossyChunk { valid, broken } in iter { . res.push_str(valid); . if !broken.is_empty() { . res.push_str(REPLACEMENT); . } . } . . Cow::Owned(res) 200 ( 0.00%) } . . /// Decode a UTF-16–encoded vector `v` into a `String`, returning [`Err`] . /// if `v` contains any invalid data. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 594 ---------------------------------------- -- line 762 ---------------------------------------- . /// }; . /// . /// assert_eq!("💖", sparkle_heart); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn from_utf8_unchecked(bytes: Vec) -> String { 982 ( 0.00%) String { vec: bytes } . } . . /// Converts a `String` into a byte vector. . /// . /// This consumes the `String`, so we do not need to copy its contents. . /// . /// # Examples . /// -- line 778 ---------------------------------------- -- line 1138 ---------------------------------------- . /// s.push('2'); . /// s.push('3'); . /// . /// assert_eq!("abc123", s); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 3,263,814 ( 0.07%) pub fn push(&mut self, ch: char) { . match ch.len_utf8() { . 1 => self.vec.push(ch as u8), 2 ( 0.00%) _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), . } 2,719,845 ( 0.06%) } . . /// Returns a byte slice of this `String`'s contents. . /// . /// The inverse of this method is [`from_utf8`]. . /// . /// [`from_utf8`]: String::from_utf8 . /// . /// # Examples -- line 1159 ---------------------------------------- -- line 1543 ---------------------------------------- . /// let fancy_f = String::from("ƒoo"); . /// assert_eq!(fancy_f.len(), 4); . /// assert_eq!(fancy_f.chars().count(), 3); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn len(&self) -> usize { 31 ( 0.00%) self.vec.len() . } . . /// Returns `true` if this `String` has a length of zero, and `false` otherwise. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 1559 ---------------------------------------- -- line 1563 ---------------------------------------- . /// . /// v.push('a'); . /// assert!(!v.is_empty()); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 10,569 ( 0.00%) self.len() == 0 . } . . /// Splits the string into two at the given byte index. . /// . /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and . /// the returned `String` contains bytes `[at, len)`. `at` must be on the . /// boundary of a UTF-8 code point. . /// -- line 1579 ---------------------------------------- -- line 1845 ---------------------------------------- . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Display::fmt("invalid utf-16: lone surrogate found", f) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for String { 1,980 ( 0.00%) fn clone(&self) -> Self { 990 ( 0.00%) String { vec: self.vec.clone() } 2,310 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.vec.clone_from(&source.vec); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for String { 45,540 ( 0.00%) fn from_iter>(iter: I) -> String { . let mut buf = String::new(); . buf.extend(iter); . buf 54,648 ( 0.00%) } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "string_from_iter_by_ref", since = "1.17.0")] . impl<'a> FromIterator<&'a char> for String { . fn from_iter>(iter: I) -> String { . let mut buf = String::new(); . buf.extend(iter); -- line 1877 ---------------------------------------- -- line 1937 ---------------------------------------- . } . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for String { . fn extend>(&mut self, iter: I) { 45,540 ( 0.00%) let iterator = iter.into_iter(); . let (lower_bound, _) = iterator.size_hint(); . self.reserve(lower_bound); 1,603,479 ( 0.04%) iterator.for_each(move |c| self.push(c)); . } . . #[inline] . fn extend_one(&mut self, c: char) { . self.push(c); . } . . #[inline] -- line 1956 ---------------------------------------- -- line 2128 ---------------------------------------- . fn default() -> String { . String::new() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Display for String { . #[inline] 785 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 1,596 ( 0.00%) fmt::Display::fmt(&**self, f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Debug for String { . #[inline] . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(&**self, f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl hash::Hash for String { . #[inline] . fn hash(&self, hasher: &mut H) { 1,253 ( 0.00%) (**self).hash(hasher) . } . } . . /// Implements the `+` operator for concatenating two strings. . /// . /// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if . /// necessary). This is done to avoid allocating a new `String` and copying the entire contents on . /// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by -- line 2161 ---------------------------------------- -- line 2194 ---------------------------------------- . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Add<&str> for String { . type Output = String; . . #[inline] . fn add(mut self, other: &str) -> String { . self.push_str(other); 2 ( 0.00%) self . } . } . . /// Implements the `+=` operator for appending to a `String`. . /// . /// This has the same behavior as the [`push_str`][String::push_str] method. . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "stringaddassign", since = "1.12.0")] -- line 2210 ---------------------------------------- -- line 2233 ---------------------------------------- . &self[..][index] . } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Index> for String { . type Output = str; . . #[inline] 3,891 ( 0.00%) fn index(&self, index: ops::RangeFrom) -> &str { . &self[..][index] 3,891 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Index for String { . type Output = str; . . #[inline] . fn index(&self, _index: ops::RangeFull) -> &str { . unsafe { str::from_utf8_unchecked(&self.vec) } -- line 2251 ---------------------------------------- -- line 2388 ---------------------------------------- . impl ToString for T { . // A common guideline is to not inline generic functions. However, . // removing `#[inline]` from this method causes non-negligible regressions. . // See , the last attempt . // to try to remove it. . #[inline] . default fn to_string(&self) -> String { . let mut buf = String::new(); 150,715 ( 0.00%) let mut formatter = core::fmt::Formatter::new(&mut buf); . // Bypass format_args!() to avoid write_str with zero-length strs 37,754 ( 0.00%) fmt::Display::fmt(self, &mut formatter) . .expect("a Display implementation returned an error unexpectedly"); . buf . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "char_to_string_specialization", since = "1.46.0")] . impl ToString for char { -- line 2406 ---------------------------------------- -- line 2620 ---------------------------------------- . /// ``` . /// # use std::borrow::Cow; . /// assert_eq!(Cow::from("eggplant"), Cow::Borrowed("eggplant")); . /// ``` . /// . /// [`Borrowed`]: crate::borrow::Cow::Borrowed "borrow::Cow::Borrowed" . #[inline] . fn from(s: &'a str) -> Cow<'a, str> { 3,190 ( 0.00%) Cow::Borrowed(s) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a> From for Cow<'a, str> { . /// Converts a [`String`] into an [`Owned`] variant. . /// No heap allocation is performed, and the string -- line 2636 ---------------------------------------- -- line 2643 ---------------------------------------- . /// let s = "eggplant".to_string(); . /// let s2 = "eggplant".to_string(); . /// assert_eq!(Cow::from(s), Cow::<'static, str>::Owned(s2)); . /// ``` . /// . /// [`Owned`]: crate::borrow::Cow::Owned "borrow::Cow::Owned" . #[inline] . fn from(s: String) -> Cow<'a, str> { 630 ( 0.00%) Cow::Owned(s) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "cow_from_string_ref", since = "1.28.0")] . impl<'a> From<&'a String> for Cow<'a, str> { . /// Converts a [`String`] reference into a [`Borrowed`] variant. . /// No heap allocation is performed, and the string -- line 2659 ---------------------------------------- -- line 2709 ---------------------------------------- . /// ``` . /// let s1 = String::from("hello world"); . /// let v1 = Vec::from(s1); . /// . /// for b in v1 { . /// println!("{}", b); . /// } . /// ``` 46 ( 0.00%) fn from(string: String) -> Vec { 184 ( 0.00%) string.into_bytes() 46 ( 0.00%) } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Write for String { . #[inline] 488,784 ( 0.01%) fn write_str(&mut self, s: &str) -> fmt::Result { . self.push_str(s); . Ok(()) 427,686 ( 0.01%) } . . #[inline] 12,030 ( 0.00%) fn write_char(&mut self, c: char) -> fmt::Result { 8 ( 0.00%) self.push(c); . Ok(()) 12,030 ( 0.00%) } . } . . /// A draining iterator for `String`. . /// . /// This struct is created by the [`drain`] method on [`String`]. See its . /// documentation for more. . /// . /// [`drain`]: String::drain -- line 2743 ---------------------------------------- 75,452 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs -------------------------------------------------------------------------------- Ir -- line 57 ---------------------------------------- . /// # Examples . /// . /// Basic usage: . /// . /// ``` . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::from_str_radix(\"A\", 16), Ok(10));")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 8 ( 0.00%) pub fn from_str_radix(src: &str, radix: u32) -> Result { 4 ( 0.00%) from_str_radix(src, radix) 12 ( 0.00%) } . . /// Returns the number of ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// . /// ``` -- line 75 ---------------------------------------- -- line 80 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[doc(alias = "popcount")] . #[doc(alias = "popcnt")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn count_ones(self) -> u32 { 40,845 ( 0.00%) intrinsics::ctpop(self as $ActualT) as u32 . } . . /// Returns the number of zeros in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 96 ---------------------------------------- -- line 118 ---------------------------------------- . /// assert_eq!(n.leading_zeros(), 2); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn leading_zeros(self) -> u32 { 237,955 ( 0.01%) intrinsics::ctlz(self as $ActualT) as u32 . } . . /// Returns the number of trailing zeros in the binary representation . /// of `self`. . /// . /// # Examples . /// . /// Basic usage: -- line 134 ---------------------------------------- -- line 139 ---------------------------------------- . /// assert_eq!(n.trailing_zeros(), 3); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn trailing_zeros(self) -> u32 { 767,880 ( 0.02%) intrinsics::cttz(self) as u32 . } . . /// Returns the number of leading ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 155 ---------------------------------------- -- line 204 ---------------------------------------- . #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn rotate_left(self, n: u32) -> Self { 13,549,359 ( 0.30%) intrinsics::rotate_left(self, n as $SelfT) . } . . /// Shifts the bits to the right by a specified amount, `n`, . /// wrapping the truncated bits to the beginning of the resulting . /// integer. . /// . /// Please note this isn't the same operation as the `>>` shifting operator! . /// -- line 220 ---------------------------------------- -- line 456 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_add`. 889,455 ( 0.02%) unsafe { intrinsics::unchecked_add(self, rhs) } . } . . /// Checked addition with a signed integer. Computes `self + rhs`, . /// returning `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 472 ---------------------------------------- -- line 525 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_sub`. 73,845 ( 0.00%) unsafe { intrinsics::unchecked_sub(self, rhs) } . } . . /// Checked integer multiplication. Computes `self * rhs`, returning . /// `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 541 ---------------------------------------- -- line 596 ---------------------------------------- . without modifying the original"] . #[inline] . pub const fn checked_div(self, rhs: Self) -> Option { . if unlikely!(rhs == 0) { . None . } else { . // SAFETY: div by zero has been checked above and unsigned types have no other . // failure modes for division 120 ( 0.00%) Some(unsafe { intrinsics::unchecked_div(self, rhs) }) . } . } . . /// Checked Euclidean division. Computes `self.div_euclid(rhs)`, returning `None` . /// if `rhs == 0`. . /// . /// # Examples . /// -- line 612 ---------------------------------------- -- line 1035 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_add(self, rhs: Self) -> Self { 266,727 ( 0.01%) intrinsics::saturating_add(self, rhs) . } . . /// Saturating addition with a signed integer. Computes `self + rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1051 ---------------------------------------- -- line 1084 ---------------------------------------- . #[doc = concat!("assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_sub(self, rhs: Self) -> Self { 618,273 ( 0.01%) intrinsics::saturating_sub(self, rhs) . } . . /// Saturating integer multiplication. Computes `self * rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1100 ---------------------------------------- -- line 1175 ---------------------------------------- . #[doc = concat!("assert_eq!(200", stringify!($SelfT), ".wrapping_add(", stringify!($SelfT), "::MAX), 199);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_add(self, rhs: Self) -> Self { 8,364,305 ( 0.18%) intrinsics::wrapping_add(self, rhs) . } . . /// Wrapping (modular) addition with a signed integer. Computes . /// `self + rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . #[doc = concat!("assert_eq!(100", stringify!($SelfT), ".wrapping_sub(", stringify!($SelfT), "::MAX), 101);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_sub(self, rhs: Self) -> Self { 3,103,581 ( 0.07%) intrinsics::wrapping_sub(self, rhs) . } . . /// Wrapping (modular) multiplication. Computes `self * . /// rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1233 ---------------------------------------- -- line 1240 ---------------------------------------- . /// assert_eq!(25u8.wrapping_mul(12), 44); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_mul(self, rhs: Self) -> Self { 7,469,273 ( 0.16%) intrinsics::wrapping_mul(self, rhs) . } . . /// Wrapping (modular) division. Computes `self / rhs`. . /// Wrapped division on unsigned types is just normal division. . /// There's no way wrapping could ever happen. . /// This function exists, so that all operations . /// are accounted for in the wrapping operations. . /// -- line 1256 ---------------------------------------- -- line 1491 ---------------------------------------- . #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_add(2), (7, false));")] . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.overflowing_add(1), (0, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 3 ( 0.00%) pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { 2,942,076 ( 0.06%) let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 6 ( 0.00%) } . . /// Calculates `self + rhs + carry` without the ability to overflow. . /// . /// Performs "ternary addition" which takes in an extra bit to add, and may return an . /// additional bit of overflow. This allows for chaining together multiple additions . /// to create "big integers" which represent larger values. . /// . #[doc = concat!("This can be thought of as a ", stringify!($BITS), "-bit \"full adder\", in the electronics sense.")] -- line 1510 ---------------------------------------- -- line 1588 ---------------------------------------- . #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_sub(1), (", stringify!($SelfT), "::MAX, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { 246,930 ( 0.01%) let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates `self - rhs - borrow` without the ability to overflow. . /// . /// Performs "ternary subtraction" which takes in an extra bit to subtract, and may return . /// an additional bit of overflow. This allows for chaining together multiple subtractions . /// to create "big integers" which represent larger values. -- line 1604 ---------------------------------------- -- line 1674 ---------------------------------------- . /// assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true)); . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { 1,512,026 ( 0.03%) let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates the divisor when `self` is divided by `rhs`. . /// . /// Returns a tuple of the divisor along with a boolean indicating . /// whether an arithmetic overflow would occur. Note that for unsigned . /// integers overflow never occurs, so the second value is always -- line 1690 ---------------------------------------- -- line 2132 ---------------------------------------- . #[doc = concat!("assert!(16", stringify!($SelfT), ".is_power_of_two());")] . #[doc = concat!("assert!(!10", stringify!($SelfT), ".is_power_of_two());")] . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] . #[inline(always)] . pub const fn is_power_of_two(self) -> bool { 84 ( 0.00%) self.count_ones() == 1 . } . . // Returns one less than next power of two. . // (For 8u8 next power of two is 8u8 and for 6u8 it is 8u8) . // . // 8u8.one_less_than_next_power_of_two() == 7 . // 6u8.one_less_than_next_power_of_two() == 7 . // . // This method cannot overflow, as in the `next_power_of_two` . // overflow cases it instead ends up returning the maximum value . // of the type, and can return 0 for 0. . #[inline] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . const fn one_less_than_next_power_of_two(self) -> Self { 22,736 ( 0.00%) if self <= 1 { return 0; } . 15,146 ( 0.00%) let p = self - 1; . // SAFETY: Because `p > 0`, it cannot consist entirely of leading zeros. . // That means the shift is always in-bounds, and some processors . // (such as intel pre-haswell) have more efficient ctlz . // intrinsics when the argument is non-zero. 45,405 ( 0.00%) let z = unsafe { intrinsics::ctlz_nonzero(p) }; 15,155 ( 0.00%) <$SelfT>::MAX >> z . } . . /// Returns the smallest power of two greater than or equal to `self`. . /// . /// When return value overflows (i.e., `self > (1 << (N-1))` for type . /// `uN`), it panics in debug mode and the return value is wrapped to 0 in . /// release mode (the only situation in which method can return 0). . /// -- line 2171 ---------------------------------------- -- line 2179 ---------------------------------------- . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . #[rustc_inherit_overflow_checks] . pub const fn next_power_of_two(self) -> Self { 7,544 ( 0.00%) self.one_less_than_next_power_of_two() + 1 . } . . /// Returns the smallest power of two greater than or equal to `n`. If . /// the next power of two is greater than the type's maximum value, . /// `None` is returned, otherwise the power of two is wrapped in `Some`. . /// . /// # Examples . /// -- line 2195 ---------------------------------------- 47,959 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 1,437,491 ( 0.03%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 54,165 ( 0.00%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 31,947,858 ( 0.70%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 515,620 ( 0.01%) self.stride += Group::WIDTH; 515,620 ( 0.01%) self.pos += self.stride; 446,935 ( 0.01%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 69,978 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 162,520 ( 0.00%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 14,910 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 170,456 ( 0.00%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 26,344 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 147,211 ( 0.00%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 186,080 ( 0.00%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 18,222 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 41,124 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 10 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 19,503 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 3,155 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 3,155 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 6,310 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 2,244 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 4,066 ( 0.00%) self.erase_no_drop(&item); 36 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 487,418 ( 0.01%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 18,502 ( 0.00%) match self.find(hash, eq) { 10,191 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 220,262 ( 0.00%) None => None, . } 755,472 ( 0.02%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 20,436 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 160,188 ( 0.00%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 115,131 ( 0.00%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 289,473 ( 0.01%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 247,504 ( 0.01%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 1,954,336 ( 0.04%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 791 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 1,439,202 ( 0.03%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 4 ( 0.00%) bucket.write(value); . bucket . } 1,452,913 ( 0.03%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 185,600 ( 0.00%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 4 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 139,200 ( 0.00%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 5,494 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 139,551 ( 0.00%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 16,078 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 307,792 ( 0.01%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 253,589 ( 0.01%) self.table.items += 1; . bucket 10,970 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 9,778 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 36,262 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 51,083 ( 0.00%) eq(self.bucket(index).as_ref()) 21,413 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 4,843 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 11,096 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 36,159 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] 392 ( 0.00%) pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 4,709 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } 448 ( 0.00%) } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. . /// . /// At most one mutable reference will be returned to any entry. `None` will be returned if any . /// of the hashes are duplicates. `None` will be returned if the hash is not found. . /// -- line 859 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 1,565,467 ( 0.03%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . -- line 936 ---------------------------------------- -- line 938 ---------------------------------------- . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { . let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 156,071 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 1,320 ( 0.00%) let allocation = self.into_allocation(); 990 ( 0.00%) RawIntoIter { 1,650 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 595 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 109 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 709,191 ( 0.02%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 278,598 ( 0.01%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 67,858 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 161,190 ( 0.00%) Ok(Self { . ctrl, 61,856 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 203,642 ( 0.00%) } . . #[inline] 42,281 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 10,616 ( 0.00%) if capacity == 0 { 23,364 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 128,666 ( 0.00%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 3,738 ( 0.00%) Ok(result) . } . } 42,281 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 48,687 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 48,687 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 97,374 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 950,002 ( 0.02%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 2,643,312 ( 0.06%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 1,236,275 ( 0.03%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 15,608 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 4,662,807 ( 0.10%) for bit in group.match_byte(h2_hash) { 9,172,493 ( 0.20%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 6,059,380 ( 0.13%) if likely(eq(index)) { . return Some(index); . } . } . 1,280,286 ( 0.03%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] -- line 1198 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 3,672,528 ( 0.08%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 23,454,326 ( 0.51%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 1,776,245 ( 0.04%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 1,420,980 ( 0.03%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; . probe_index(i) == probe_index(new_i) -- line 1281 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 2,605,784 ( 0.06%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 868,476 ( 0.02%) *self.ctrl(index) = ctrl; 868,511 ( 0.02%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 338,459 ( 0.01%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 151,225 ( 0.00%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 1,185,064 ( 0.03%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 1,251 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 85,726 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 34,783 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 69,567 ( 0.00%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 69,566 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 146,161 ( 0.00%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); . Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 34,783 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 3,095 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 556,461 ( 0.01%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 34,783 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1554 ---------------------------------------- . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 25,911 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 8,135 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 13,386 ( 0.00%) self.items = 0; 8,135 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 54,032 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 216,128 ( 0.00%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 269,460 ( 0.01%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 216,128 ( 0.00%) self.items -= 1; . } . } . . impl Clone for RawTable { 109,680 ( 0.00%) fn clone(&self) -> Self { 13,710 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 123,390 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 4 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 683,455 ( 0.01%) fn drop(&mut self) { 498,986 ( 0.01%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 732,506 ( 0.02%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 1,320 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 1,650 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 318,008 ( 0.01%) if let Some(index) = self.current_group.lowest_set_bit() { 68,691 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 123,063 ( 0.00%) return Some(self.data.next_n(index)); . } . 316,385 ( 0.01%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 11,801 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 13,702 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 12,421 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 13,948 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 81,134 ( 0.00%) fn next(&mut self) -> Option> { 142,422 ( 0.00%) if let Some(b) = self.iter.next() { 419,028 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 162,268 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 612 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 2,768 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 146 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 9,481 ( 0.00%) fn next(&mut self) -> Option { 13,785 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 19,183 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 4 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 36,536 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 4,567 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 36,536 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 11,833,666 ( 0.26%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/layout.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . use std::iter; . use std::num::NonZeroUsize; . use std::ops::Bound; . . use rand::{seq::SliceRandom, SeedableRng}; . use rand_xoshiro::Xoshiro128StarStar; . . pub fn provide(providers: &mut ty::query::Providers) { 6 ( 0.00%) *providers = . ty::query::Providers { layout_of, fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers }; . } . . pub trait IntegerExt { . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx>; . fn from_attr(cx: &C, ity: attr::IntType) -> Integer; . fn from_int_ty(cx: &C, ity: ty::IntTy) -> Integer; . fn from_uint_ty(cx: &C, uty: ty::UintTy) -> Integer; -- line 39 ---------------------------------------- -- line 44 ---------------------------------------- . min: i128, . max: i128, . ) -> (Integer, bool); . } . . impl IntegerExt for Integer { . #[inline] . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx> { 304,180 ( 0.01%) match (*self, signed) { . (I8, false) => tcx.types.u8, . (I16, false) => tcx.types.u16, . (I32, false) => tcx.types.u32, . (I64, false) => tcx.types.u64, . (I128, false) => tcx.types.u128, . (I8, true) => tcx.types.i8, . (I16, true) => tcx.types.i16, . (I32, true) => tcx.types.i32, . (I64, true) => tcx.types.i64, . (I128, true) => tcx.types.i128, . } . } . . /// Gets the Integer type from an attr::IntType. 124 ( 0.00%) fn from_attr(cx: &C, ity: attr::IntType) -> Integer { . let dl = cx.data_layout(); . 310 ( 0.00%) match ity { . attr::SignedInt(ast::IntTy::I8) | attr::UnsignedInt(ast::UintTy::U8) => I8, . attr::SignedInt(ast::IntTy::I16) | attr::UnsignedInt(ast::UintTy::U16) => I16, . attr::SignedInt(ast::IntTy::I32) | attr::UnsignedInt(ast::UintTy::U32) => I32, . attr::SignedInt(ast::IntTy::I64) | attr::UnsignedInt(ast::UintTy::U64) => I64, . attr::SignedInt(ast::IntTy::I128) | attr::UnsignedInt(ast::UintTy::U128) => I128, . attr::SignedInt(ast::IntTy::Isize) | attr::UnsignedInt(ast::UintTy::Usize) => { 58 ( 0.00%) dl.ptr_sized_integer() . } . } 124 ( 0.00%) } . . fn from_int_ty(cx: &C, ity: ty::IntTy) -> Integer { . match ity { . ty::IntTy::I8 => I8, . ty::IntTy::I16 => I16, . ty::IntTy::I32 => I32, . ty::IntTy::I64 => I64, . ty::IntTy::I128 => I128, 167,688 ( 0.00%) ty::IntTy::Isize => cx.data_layout().ptr_sized_integer(), . } . } . fn from_uint_ty(cx: &C, ity: ty::UintTy) -> Integer { . match ity { . ty::UintTy::U8 => I8, . ty::UintTy::U16 => I16, . ty::UintTy::U32 => I32, . ty::UintTy::U64 => I64, . ty::UintTy::U128 => I128, 2 ( 0.00%) ty::UintTy::Usize => cx.data_layout().ptr_sized_integer(), . } . } . . /// Finds the appropriate Integer type and signedness for the given . /// signed discriminant range and `#[repr]` attribute. . /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but . /// that shouldn't affect anything, other than maybe debuginfo. 93 ( 0.00%) fn repr_discr<'tcx>( . tcx: TyCtxt<'tcx>, . ty: Ty<'tcx>, . repr: &ReprOptions, . min: i128, . max: i128, . ) -> (Integer, bool) { . // Theoretically, negative values could be larger in unsigned representation . // than the unsigned representation of the signed minimum. However, if there . // are any negative values, the only valid unsigned representation is u128 . // which can fit all i128 values, so the result remains unaffected. . let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128)); . let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max)); . 111 ( 0.00%) if let Some(ity) = repr.int { . let discr = Integer::from_attr(&tcx, ity); 6 ( 0.00%) let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; 2 ( 0.00%) if discr < fit { . bug!( . "Integer::repr_discr: `#[repr]` hint too small for \ . discriminant range of enum `{}", . ty . ) . } . return (discr, ity.is_signed()); . } . 58 ( 0.00%) let at_least = if repr.c() { . // This is usually I32, however it can be different on some platforms, . // notably hexagon and arm-none/thumb-none . tcx.data_layout().c_enum_min_size . } else { . // repr(Rust) enums try to be as small as possible . I8 . }; . . // If there are no negative values, we can use the unsigned fit. 261 ( 0.00%) if min >= 0 { . (cmp::max(unsigned_fit, at_least), false) . } else { . (cmp::max(signed_fit, at_least), true) . } 182 ( 0.00%) } . } . . pub trait PrimitiveExt { . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>; . fn to_int_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>; . } . . impl PrimitiveExt for Primitive { . #[inline] . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 253,314 ( 0.01%) match *self { 448 ( 0.00%) Int(i, signed) => i.to_ty(tcx, signed), . F32 => tcx.types.f32, . F64 => tcx.types.f64, . Pointer => tcx.mk_mut_ptr(tcx.mk_unit()), . } . } . . /// Return an *integer* type matching this primitive. . /// Useful in particular when dealing with enum discriminants. . #[inline] . fn to_int_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 149,051 ( 0.00%) match *self { 74,524 ( 0.00%) Int(i, signed) => i.to_ty(tcx, signed), . Pointer => tcx.types.usize, . F32 | F64 => bug!("floats do not have an int type"), . } . } . } . . /// The first half of a fat pointer. . /// -- line 181 ---------------------------------------- -- line 215 ---------------------------------------- . "unable to determine layout for `{}` because `{}` cannot be normalized", . t, . e.get_type_for_failure() . ), . } . } . } . 6,864 ( 0.00%) #[instrument(skip(tcx, query), level = "debug")] . fn layout_of<'tcx>( . tcx: TyCtxt<'tcx>, . query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, . ) -> Result, LayoutError<'tcx>> { . ty::tls::with_related_context(tcx, move |icx| { 429 ( 0.00%) let (param_env, ty) = query.into_parts(); . debug!(?ty); . 1,716 ( 0.00%) if !tcx.recursion_limit().value_within_limit(icx.layout_depth) { . tcx.sess.fatal(&format!("overflow representing the type `{}`", ty)); . } . . // Update the ImplicitCtxt to increase the layout_depth 3,861 ( 0.00%) let icx = ty::tls::ImplicitCtxt { layout_depth: icx.layout_depth + 1, ..icx.clone() }; . . ty::tls::enter_context(&icx, |_| { 1,287 ( 0.00%) let param_env = param_env.with_reveal_all_normalized(tcx); . let unnormalized_ty = ty; . . // FIXME: We might want to have two different versions of `layout_of`: . // One that can be called after typecheck has completed and can use . // `normalize_erasing_regions` here and another one that can be called . // before typecheck has completed and uses `try_normalize_erasing_regions`. . let ty = match tcx.try_normalize_erasing_regions(param_env, ty) { . Ok(t) => t, . Err(normalization_error) => { . return Err(LayoutError::NormalizationFailure(ty, normalization_error)); . } . }; . 429 ( 0.00%) if ty != unnormalized_ty { . // Ensure this layout is also cached for the normalized type. . return tcx.layout_of(param_env.and(ty)); . } . 1,640 ( 0.00%) let cx = LayoutCx { tcx, param_env }; . 1,230 ( 0.00%) let layout = cx.layout_of_uncached(ty)?; . let layout = TyAndLayout { ty, layout }; . . cx.record_layout_for_printing(layout); . . // Type-level uninhabitedness should always imply ABI uninhabitedness. 410 ( 0.00%) if tcx.conservative_is_privately_uninhabited(param_env.and(ty)) { 2 ( 0.00%) assert!(layout.abi.is_uninhabited()); . } . 1,230 ( 0.00%) Ok(layout) . }) . }) . } . . pub struct LayoutCx<'tcx, C> { . pub tcx: C, . pub param_env: ty::ParamEnv<'tcx>, . } -- line 279 ---------------------------------------- -- line 291 ---------------------------------------- . // Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`. . // This is used to go between `memory_index` (source field order to memory order) . // and `inverse_memory_index` (memory order to source field order). . // See also `FieldsShape::Arbitrary::memory_index` for more details. . // FIXME(eddyb) build a better abstraction for permutations, if possible. . fn invert_mapping(map: &[u32]) -> Vec { . let mut inverse = vec![0; map.len()]; . for i in 0..map.len() { 568 ( 0.00%) inverse[map[i] as usize] = i as u32; . } . inverse . } . . impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { 576 ( 0.00%) fn scalar_pair(&self, a: Scalar, b: Scalar) -> Layout { . let dl = self.data_layout(); 448 ( 0.00%) let b_align = b.value.align(dl); 512 ( 0.00%) let align = a.value.align(dl).max(b_align).max(dl.aggregate_align); . let b_offset = a.value.size(dl).align_to(b_align.abi); . let size = (b_offset + b.value.size(dl)).align_to(align.abi); . . // HACK(nox): We iter on `b` and then `a` because `max_by_key` . // returns the last maximum. 768 ( 0.00%) let largest_niche = Niche::from_scalar(dl, b_offset, b) . .into_iter() 640 ( 0.00%) .chain(Niche::from_scalar(dl, Size::ZERO, a)) . .max_by_key(|niche| niche.available(dl)); . 1,216 ( 0.00%) Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Arbitrary { 128 ( 0.00%) offsets: vec![Size::ZERO, b_offset], 128 ( 0.00%) memory_index: vec![0, 1], . }, 832 ( 0.00%) abi: Abi::ScalarPair(a, b), 640 ( 0.00%) largest_niche, . align, . size, . } 512 ( 0.00%) } . 4,884 ( 0.00%) fn univariant_uninterned( . &self, . ty: Ty<'tcx>, . fields: &[TyAndLayout<'_>], . repr: &ReprOptions, . kind: StructKind, . ) -> Result> { . let dl = self.data_layout(); 1,628 ( 0.00%) let pack = repr.pack; 407 ( 0.00%) if pack.is_some() && repr.align.is_some() { . self.tcx.sess.delay_span_bug(DUMMY_SP, "struct cannot be packed and aligned"); . return Err(LayoutError::Unknown(ty)); . } . 1,628 ( 0.00%) let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; . . let mut inverse_memory_index: Vec = (0..fields.len() as u32).collect(); . . let optimize = !repr.inhibit_struct_field_reordering_opt(); 401 ( 0.00%) if optimize { . let end = 2,765 ( 0.00%) if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; . let optimizing = &mut inverse_memory_index[..end]; 395 ( 0.00%) let field_align = |f: &TyAndLayout<'_>| { 232 ( 0.00%) if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi } . }; . . // If `-Z randomize-layout` was enabled for the type definition we can shuffle . // the field ordering to try and catch some code making assumptions about layouts . // we don't guarantee . if repr.can_randomize_type_layout() { . // `ReprOptions.layout_seed` is a deterministic seed that we can use to . // randomize field ordering with . let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed); . . // Shuffle the ordering of the fields . optimizing.shuffle(&mut rng); . . // Otherwise we just leave things alone and actually optimize the type's fields . } else { 790 ( 0.00%) match kind { . StructKind::AlwaysSized | StructKind::MaybeUnsized => { 1,185 ( 0.00%) optimizing.sort_by_key(|&x| { . // Place ZSTs first to avoid "interesting offsets", . // especially with only one or two non-ZST fields. 203 ( 0.00%) let f = &fields[x as usize]; 31 ( 0.00%) (!f.is_zst(), cmp::Reverse(field_align(f))) . }); . } . . StructKind::Prefixed(..) => { . // Sort in ascending alignment so that the layout stays optimal . // regardless of the prefix 790 ( 0.00%) optimizing.sort_by_key(|&x| field_align(&fields[x as usize])); . } . } . . // FIXME(Kixiron): We can always shuffle fields within a given alignment class . // regardless of the status of `-Z randomize-layout` . } . } . -- line 393 ---------------------------------------- -- line 399 ---------------------------------------- . // produce `memory_index` (see `invert_mapping`). . . let mut sized = true; . let mut offsets = vec![Size::ZERO; fields.len()]; . let mut offset = Size::ZERO; . let mut largest_niche = None; . let mut largest_niche_available = 0; . 1,221 ( 0.00%) if let StructKind::Prefixed(prefix_size, prefix_align) = kind { . let prefix_align = 1,148 ( 0.00%) if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align }; . align = align.max(AbiAndPrefAlign::new(prefix_align)); . offset = prefix_size.align_to(prefix_align); . } . 154 ( 0.00%) for &i in &inverse_memory_index { 2,112 ( 0.00%) let field = fields[i as usize]; 308 ( 0.00%) if !sized { . self.tcx.sess.delay_span_bug( . DUMMY_SP, . &format!( . "univariant: field #{} of `{}` comes after unsized field", . offsets.len(), . ty . ), . ); . } . 770 ( 0.00%) if field.is_unsized() { . sized = false; . } . . // Invariant: offset < dl.obj_size_bound() <= 1<<61 308 ( 0.00%) let field_align = if let Some(pack) = pack { . field.align.min(AbiAndPrefAlign::new(pack)) . } else { 462 ( 0.00%) field.align . }; . offset = offset.align_to(field_align.abi); . align = align.max(field_align); . . debug!("univariant offset: {:?} field: {:#?}", offset, field); 308 ( 0.00%) offsets[i as usize] = offset; . 154 ( 0.00%) if !repr.hide_niche() { 1,775 ( 0.00%) if let Some(mut niche) = field.largest_niche { . let available = niche.available(dl); 404 ( 0.00%) if available > largest_niche_available { . largest_niche_available = available; . niche.offset += offset; 1,216 ( 0.00%) largest_niche = Some(niche); . } . } . } . 154 ( 0.00%) offset = offset.checked_add(field.size, dl).ok_or(LayoutError::SizeOverflow(ty))?; . } . 922 ( 0.00%) if let Some(repr_align) = repr.align { . align = align.max(AbiAndPrefAlign::new(repr_align)); . } . . debug!("univariant min_size: {:?}", offset); . let min_size = offset; . . // As stated above, inverse_memory_index holds field indices by increasing offset. . // This makes it an already-sorted view of the offsets vec. . // To invert it, consider: . // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0. . // Field 5 would be the first element, so memory_index is i: . // Note: if we didn't optimize, it's already right. . . let memory_index = 898 ( 0.00%) if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index }; . . let size = min_size.align_to(align.abi); 814 ( 0.00%) let mut abi = Abi::Aggregate { sized }; . . // Unpack newtype ABIs and find scalar pairs. 1,628 ( 0.00%) if sized && size.bytes() > 0 { . // All other fields must be ZSTs. . let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst()); . 162 ( 0.00%) match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { . // We have exactly one non-ZST field. . (Some((i, field)), None, None) => { . // Field fills the struct and it has a scalar or scalar pair ABI. 286 ( 0.00%) if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size . { 168 ( 0.00%) match field.abi { . // For plain scalars, or vectors of them, we can't unpack . // newtypes for `#[repr(C)]`, as that affects C ABIs. 52 ( 0.00%) Abi::Scalar(_) | Abi::Vector { .. } if optimize => { . abi = field.abi; . } . // But scalar pairs are Rust-specific and get . // treated as aggregates by C ABIs anyway. . Abi::ScalarPair(..) => { . abi = field.abi; . } . _ => {} -- line 499 ---------------------------------------- -- line 503 ---------------------------------------- . . // Two non-ZST fields, and they're both scalars. . ( . Some((i, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(a), .. }, .. })), . Some((j, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(b), .. }, .. })), . None, . ) => { . // Order by the memory placement, not source order. 216 ( 0.00%) let ((i, a), (j, b)) = 168 ( 0.00%) if offsets[i] < offsets[j] { ((i, a), (j, b)) } else { ((j, b), (i, a)) }; 264 ( 0.00%) let pair = self.scalar_pair(a, b); 48 ( 0.00%) let pair_offsets = match pair.fields { 48 ( 0.00%) FieldsShape::Arbitrary { ref offsets, ref memory_index } => { 24 ( 0.00%) assert_eq!(memory_index, &[0, 1]); . offsets . } . _ => bug!(), . }; 216 ( 0.00%) if offsets[i] == pair_offsets[0] 24 ( 0.00%) && offsets[j] == pair_offsets[1] . && align == pair.align . && size == pair.size . { . // We can use `ScalarPair` only when it matches our . // already computed layout (including `#[repr(C)]`). 336 ( 0.00%) abi = pair.abi; . } 24 ( 0.00%) } . . _ => {} . } . } . 154 ( 0.00%) if fields.iter().any(|f| f.abi.is_uninhabited()) { . abi = Abi::Uninhabited; . } . 13,024 ( 0.00%) Ok(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 4,070 ( 0.00%) fields: FieldsShape::Arbitrary { offsets, memory_index }, . abi, 4,477 ( 0.00%) largest_niche, . align, . size, . }) 4,070 ( 0.00%) } . 4,100 ( 0.00%) fn layout_of_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx Layout, LayoutError<'tcx>> { 820 ( 0.00%) let tcx = self.tcx; 410 ( 0.00%) let param_env = self.param_env; . let dl = self.data_layout(); 492 ( 0.00%) let scalar_unit = |value: Primitive| { 2 ( 0.00%) let size = value.size(dl); 308 ( 0.00%) assert!(size.bits() <= 128); 277 ( 0.00%) Scalar { value, valid_range: WrappingRange { start: 0, end: size.unsigned_int_max() } } 82 ( 0.00%) }; . let scalar = |value: Primitive| tcx.intern_layout(Layout::scalar(self, scalar_unit(value))); . . let univariant = |fields: &[TyAndLayout<'_>], repr: &ReprOptions, kind| { 4,414 ( 0.00%) Ok(tcx.intern_layout(self.univariant_uninterned(ty, fields, repr, kind)?)) . }; . debug_assert!(!ty.has_infer_types_or_consts()); . 3,096 ( 0.00%) Ok(match *ty.kind() { . // Basic scalars. . ty::Bool => tcx.intern_layout(Layout::scalar( . self, 12 ( 0.00%) Scalar { value: Int(I8, false), valid_range: WrappingRange { start: 0, end: 1 } }, . )), . ty::Char => tcx.intern_layout(Layout::scalar( . self, 12 ( 0.00%) Scalar { . value: Int(I32, false), . valid_range: WrappingRange { start: 0, end: 0x10FFFF }, . }, . )), 25 ( 0.00%) ty::Int(ity) => scalar(Int(Integer::from_int_ty(dl, ity), true)), 40 ( 0.00%) ty::Uint(ity) => scalar(Int(Integer::from_uint_ty(dl, ity), false)), . ty::Float(fty) => scalar(match fty { . ty::FloatTy::F32 => F32, . ty::FloatTy::F64 => F64, . }), . ty::FnPtr(_) => { . let mut ptr = scalar_unit(Pointer); . ptr.valid_range = ptr.valid_range.with_start(1); . tcx.intern_layout(Layout::scalar(self, ptr)) . } . . // The never type. 3 ( 0.00%) ty::Never => tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Primitive, . abi: Abi::Uninhabited, . largest_niche: None, 1 ( 0.00%) align: dl.i8_align, . size: Size::ZERO, . }), . . // Potentially-wide pointers. 123 ( 0.00%) ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { . let mut data_ptr = scalar_unit(Pointer); . if !ty.is_unsafe_ptr() { . data_ptr.valid_range = data_ptr.valid_range.with_start(1); . } . . let pointee = tcx.normalize_erasing_regions(param_env, pointee); 800 ( 0.00%) if pointee.is_sized(tcx.at(DUMMY_SP), param_env) { 1,156 ( 0.00%) return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr))); . } . 128 ( 0.00%) let unsized_part = tcx.struct_tail_erasing_lifetimes(pointee, param_env); 128 ( 0.00%) let metadata = match unsized_part.kind() { . ty::Foreign(..) => { . return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr))); . } 248 ( 0.00%) ty::Slice(_) | ty::Str => scalar_unit(Int(dl.ptr_sized_integer(), false)), . ty::Dynamic(..) => { 2 ( 0.00%) let mut vtable = scalar_unit(Pointer); 2 ( 0.00%) vtable.valid_range = vtable.valid_range.with_start(1); 8 ( 0.00%) vtable . } . _ => return Err(LayoutError::Unknown(unsized_part)), . }; . . // Effectively a (ptr, meta) tuple. 672 ( 0.00%) tcx.intern_layout(self.scalar_pair(data_ptr, metadata)) . } . . // Arrays and slices. 21 ( 0.00%) ty::Array(element, mut count) => { 7 ( 0.00%) if count.has_projections() { . count = tcx.normalize_erasing_regions(param_env, count); . if count.has_projections() { . return Err(LayoutError::Unknown(ty)); . } . } . 28 ( 0.00%) let count = count.try_eval_usize(tcx, param_env).ok_or(LayoutError::Unknown(ty))?; 35 ( 0.00%) let element = self.layout_of(element)?; . let size = 35 ( 0.00%) element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow(ty))?; . . let abi = 56 ( 0.00%) if count != 0 && tcx.conservative_is_privately_uninhabited(param_env.and(ty)) { . Abi::Uninhabited . } else { . Abi::Aggregate { sized: true } . }; . 77 ( 0.00%) let largest_niche = if count != 0 { element.largest_niche } else { None }; . 154 ( 0.00%) tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 7 ( 0.00%) fields: FieldsShape::Array { stride: element.size, count }, . abi, . largest_niche, 7 ( 0.00%) align: element.align, . size, . }) . } 32 ( 0.00%) ty::Slice(element) => { 48 ( 0.00%) let element = self.layout_of(element)?; 176 ( 0.00%) tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 16 ( 0.00%) fields: FieldsShape::Array { stride: element.size, count: 0 }, . abi: Abi::Aggregate { sized: false }, . largest_niche: None, 16 ( 0.00%) align: element.align, . size: Size::ZERO, . }) . } 12 ( 0.00%) ty::Str => tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 }, . abi: Abi::Aggregate { sized: false }, . largest_niche: None, 2 ( 0.00%) align: dl.i8_align, . size: Size::ZERO, . }), . . // Odd unit types. 322 ( 0.00%) ty::FnDef(..) => univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?, . ty::Dynamic(..) | ty::Foreign(..) => { 13 ( 0.00%) let mut unit = self.univariant_uninterned( . ty, . &[], 2 ( 0.00%) &ReprOptions::default(), . StructKind::AlwaysSized, . )?; 2 ( 0.00%) match unit.abi { 1 ( 0.00%) Abi::Aggregate { ref mut sized } => *sized = false, . _ => bug!(), . } 7 ( 0.00%) tcx.intern_layout(unit) . } . . ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, substs)?, . . ty::Closure(_, ref substs) => { 17 ( 0.00%) let tys = substs.as_closure().upvar_tys(); 17 ( 0.00%) univariant( 73 ( 0.00%) &tys.map(|ty| self.layout_of(ty)).collect::, _>>()?, 34 ( 0.00%) &ReprOptions::default(), . StructKind::AlwaysSized, . )? . } . 33 ( 0.00%) ty::Tuple(tys) => { . let kind = 99 ( 0.00%) if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; . 33 ( 0.00%) univariant( 66 ( 0.00%) &tys.iter() . .map(|k| self.layout_of(k.expect_ty())) . .collect::, _>>()?, 66 ( 0.00%) &ReprOptions::default(), . kind, . )? . } . . // SIMD vector types. 110 ( 0.00%) ty::Adt(def, substs) if def.repr.simd() => { . if !def.is_struct() { . // Should have yielded E0517 by now. . tcx.sess.delay_span_bug( . DUMMY_SP, . "#[repr(simd)] was applied to an ADT that is not a struct", . ); . return Err(LayoutError::Unknown(ty)); . } -- line 732 ---------------------------------------- -- line 833 ---------------------------------------- . abi: Abi::Vector { element: e_abi, count: e_len }, . largest_niche: e_ly.largest_niche, . size, . align, . }) . } . . // ADTs. 110 ( 0.00%) ty::Adt(def, substs) => { . // Cache the field layouts. 110 ( 0.00%) let variants = def . .variants . .iter() . .map(|v| { . v.fields . .iter() 733 ( 0.00%) .map(|field| self.layout_of(field.ty(tcx, substs))) . .collect::, _>>() . }) . .collect::, _>>()?; . 55 ( 0.00%) if def.is_union() { 36 ( 0.00%) if def.repr.pack.is_some() && def.repr.align.is_some() { . self.tcx.sess.delay_span_bug( . tcx.def_span(def.did), . "union cannot be packed and aligned", . ); . return Err(LayoutError::Unknown(ty)); . } . . let mut align = 12 ( 0.00%) if def.repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; . 18 ( 0.00%) if let Some(repr_align) = def.repr.align { . align = align.max(AbiAndPrefAlign::new(repr_align)); . } . . let optimize = !def.repr.inhibit_union_abi_opt(); . let mut size = Size::ZERO; 6 ( 0.00%) let mut abi = Abi::Aggregate { sized: true }; . let index = VariantIdx::new(0); . for field in &variants[index] { 84 ( 0.00%) assert!(!field.is_unsized()); . align = align.max(field.align); . . // If all non-ZST fields have the same ABI, forward this ABI 18 ( 0.00%) if optimize && !field.is_zst() { . // Normalize scalar_unit to the maximal valid range . let field_abi = match field.abi { . Abi::Scalar(x) => Abi::Scalar(scalar_unit(x.value)), . Abi::ScalarPair(x, y) => { . Abi::ScalarPair(scalar_unit(x.value), scalar_unit(y.value)) . } . Abi::Vector { element: x, count } => { . Abi::Vector { element: scalar_unit(x.value), count } -- line 887 ---------------------------------------- -- line 895 ---------------------------------------- . // first non ZST: initialize 'abi' . abi = field_abi; . } else if abi != field_abi { . // different fields have different ABI: reset to Aggregate . abi = Abi::Aggregate { sized: true }; . } . } . 18 ( 0.00%) size = cmp::max(size, field.size); . } . 36 ( 0.00%) if let Some(pack) = def.repr.pack { . align = align.min(AbiAndPrefAlign::new(pack)); . } . 258 ( 0.00%) return Ok(tcx.intern_layout(Layout { 6 ( 0.00%) variants: Variants::Single { index }, 12 ( 0.00%) fields: FieldsShape::Union( 6 ( 0.00%) NonZeroUsize::new(variants[index].len()) . .ok_or(LayoutError::Unknown(ty))?, . ), . abi, . largest_niche: None, . align, 12 ( 0.00%) size: size.align_to(align.abi), . })); . } . . // A variant is absent if it's uninhabited and only has ZST fields. . // Present uninhabited variants only require space for their fields, . // but *not* an encoding of the discriminant (e.g., a tag value). . // See issue #49298 for more details on the need to leave space . // for non-ZST uninhabited data (mostly partial initialization). . let absent = |fields: &[TyAndLayout<'_>]| { 117 ( 0.00%) let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited()); . let is_zst = fields.iter().all(|f| f.is_zst()); 158 ( 0.00%) uninhabited && is_zst 158 ( 0.00%) }; . let (present_first, present_second) = { . let mut present_variants = variants . .iter_enumerated() 320 ( 0.00%) .filter_map(|(i, v)| if absent(v) { None } else { Some(i) }); . (present_variants.next(), present_variants.next()) . }; 98 ( 0.00%) let present_first = match present_first { . Some(present_first) => present_first, . // Uninhabited because it has no variants, or only absent ones. 1 ( 0.00%) None if def.is_enum() => { 6 ( 0.00%) return Ok(tcx.layout_of(param_env.and(tcx.types.never))?.layout); . } . // If it's a struct, still compute a layout so that we can still compute the . // field offsets. . None => VariantIdx::new(0), . }; . 48 ( 0.00%) let is_struct = !def.is_enum() || . // Only one variant is present. 32 ( 0.00%) (present_second.is_none() && . // Representation optimizations are allowed. . !def.repr.inhibit_enum_layout_opt()); 4 ( 0.00%) if is_struct { . // Struct, or univariant enum equivalent to a struct. . // (Typechecking will reject discriminant-sizing attrs.) . . let v = present_first; 16 ( 0.00%) let kind = if def.is_enum() || variants[v].is_empty() { . StructKind::AlwaysSized . } else { 42 ( 0.00%) let param_env = tcx.param_env(def.did); 14 ( 0.00%) let last_field = def.variants[v].fields.last().unwrap(); . let always_sized = 84 ( 0.00%) tcx.type_of(last_field.did).is_sized(tcx.at(DUMMY_SP), param_env); 28 ( 0.00%) if !always_sized { . StructKind::MaybeUnsized . } else { . StructKind::AlwaysSized . } . }; . 221 ( 0.00%) let mut st = self.univariant_uninterned(ty, &variants[v], &def.repr, kind)?; 85 ( 0.00%) st.variants = Variants::Single { index: v }; 119 ( 0.00%) let (start, end) = self.tcx.layout_scalar_valid_range(def.did); 68 ( 0.00%) match st.abi { . Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => { . // the asserts ensure that we are not using the . // `#[rustc_layout_scalar_valid_range(n)]` . // attribute to widen the range of anything as that would probably . // result in UB somewhere . // FIXME(eddyb) the asserts are probably not needed, . // as larger validity ranges would result in missed . // optimizations, *not* wrongly assuming the inner . // value is valid. e.g. unions enlarge validity ranges, . // because the values may be uninitialized. 30 ( 0.00%) if let Bound::Included(start) = start { . // FIXME(eddyb) this might be incorrect - it doesn't . // account for wrap-around (end < start) ranges. 4 ( 0.00%) assert!(scalar.valid_range.start <= start); 2 ( 0.00%) scalar.valid_range.start = start; . } 20 ( 0.00%) if let Bound::Included(end) = end { . // FIXME(eddyb) this might be incorrect - it doesn't . // account for wrap-around (end < start) ranges. . assert!(scalar.valid_range.end >= end); . scalar.valid_range.end = end; . } . . // Update `largest_niche` if we have introduced a larger niche. 30 ( 0.00%) let niche = if def.repr.hide_niche() { . None . } else { 100 ( 0.00%) Niche::from_scalar(dl, Size::ZERO, *scalar) . }; 34 ( 0.00%) if let Some(niche) = niche { 8 ( 0.00%) match st.largest_niche { 21 ( 0.00%) Some(largest_niche) => { . // Replace the existing niche even if they're equal, . // because this one is at a lower offset. 15 ( 0.00%) if largest_niche.available(dl) <= niche.available(dl) { 21 ( 0.00%) st.largest_niche = Some(niche); . } . } 7 ( 0.00%) None => st.largest_niche = Some(niche), . } . } . } . _ => assert!( 21 ( 0.00%) start == Bound::Unbounded && end == Bound::Unbounded, . "nonscalar layout for layout_scalar_valid_range type {:?}: {:#?}", . def, . st, . ), . } . 153 ( 0.00%) return Ok(tcx.intern_layout(st)); . } . . // At this point, we have handled all unions and . // structs. (We have also handled univariant enums . // that allow representation optimization.) . assert!(def.is_enum()); . . // The current code for niche-filling relies on variant indices . // instead of actual discriminants, so dataful enums with . // explicit discriminants (RFC #2363) would misbehave. . let no_explicit_discriminants = def . .variants . .iter_enumerated() . .all(|(i, v)| v.discr == ty::VariantDiscr::Relative(i.as_u32())); . 62 ( 0.00%) let mut niche_filling_layout = None; . . // Niche-filling enum optimization. 62 ( 0.00%) if !def.repr.inhibit_enum_layout_opt() && no_explicit_discriminants { . let mut dataful_variant = None; . let mut niche_variants = VariantIdx::MAX..=VariantIdx::new(0); . . // Find one non-ZST variant. 1,117 ( 0.00%) 'variants: for (v, fields) in variants.iter_enumerated() { 474 ( 0.00%) if absent(fields) { . continue 'variants; . } 316 ( 0.00%) for f in fields { 22 ( 0.00%) if !f.is_zst() { . if dataful_variant.is_none() { . dataful_variant = Some(v); . continue 'variants; . } else { . dataful_variant = None; . break 'variants; . } . } . } 520 ( 0.00%) niche_variants = *niche_variants.start().min(&v)..=v; . } . 57 ( 0.00%) if niche_variants.start() > niche_variants.end() { . dataful_variant = None; . } . . if let Some(i) = dataful_variant { 40 ( 0.00%) let count = (niche_variants.end().as_u32() . - niche_variants.start().as_u32() . + 1) as u128; . . // Find the field with the largest niche 8 ( 0.00%) let niche_candidate = variants[i] . .iter() . .enumerate() 16 ( 0.00%) .filter_map(|(j, field)| Some((j, field.largest_niche?))) . .max_by_key(|(_, niche)| niche.available(dl)); . 107 ( 0.00%) if let Some((field_index, niche, (niche_start, niche_scalar))) = 146 ( 0.00%) niche_candidate.and_then(|(field_index, niche)| { 98 ( 0.00%) Some((field_index, niche, niche.reserve(self, count)?)) . }) . { 21 ( 0.00%) let mut align = dl.aggregate_align; 14 ( 0.00%) let st = variants . .iter_enumerated() . .map(|(j, v)| { 238 ( 0.00%) let mut st = self.univariant_uninterned( . ty, . v, . &def.repr, . StructKind::AlwaysSized, . )?; 42 ( 0.00%) st.variants = Variants::Single { index: j }; . 28 ( 0.00%) align = align.max(st.align); . 84 ( 0.00%) Ok(st) . }) . .collect::, _>>()?; . 56 ( 0.00%) let offset = st[i].fields.offset(field_index) + niche.offset; 14 ( 0.00%) let size = st[i].size; . . let abi = if st.iter().all(|v| v.abi.is_uninhabited()) { . Abi::Uninhabited . } else { 33 ( 0.00%) match st[i].abi { 48 ( 0.00%) Abi::Scalar(_) => Abi::Scalar(niche_scalar), . Abi::ScalarPair(first, second) => { . // We need to use scalar_unit to reset the . // valid range to the maximal one for that . // primitive, because only the niche is . // guaranteed to be initialised, not the . // other primitive. 2 ( 0.00%) if offset.bytes() == 0 { 17 ( 0.00%) Abi::ScalarPair(niche_scalar, scalar_unit(second.value)) . } else { . Abi::ScalarPair(scalar_unit(first.value), niche_scalar) . } . } . _ => Abi::Aggregate { sized: true }, . } . }; . 77 ( 0.00%) let largest_niche = Niche::from_scalar(dl, offset, niche_scalar); . 434 ( 0.00%) niche_filling_layout = Some(Layout { 42 ( 0.00%) variants: Variants::Multiple { . tag: niche_scalar, . tag_encoding: TagEncoding::Niche { . dataful_variant: i, . niche_variants, . niche_start, . }, . tag_field: 0, . variants: st, . }, . fields: FieldsShape::Arbitrary { 35 ( 0.00%) offsets: vec![offset], 35 ( 0.00%) memory_index: vec![0], . }, . abi, . largest_niche, . size, 14 ( 0.00%) align, . }); . } . } . } . 155 ( 0.00%) let (mut min, mut max) = (i128::MAX, i128::MIN); 164 ( 0.00%) let discr_type = def.repr.discr_type(); 93 ( 0.00%) let bits = Integer::from_attr(self, discr_type).size().bits(); 505 ( 0.00%) for (i, discr) in def.discriminants(tcx) { 39 ( 0.00%) if variants[i].iter().any(|f| f.abi.is_uninhabited()) { . continue; . } . let mut x = discr.val as i128; 164 ( 0.00%) if discr_type.is_signed() { . // sign extend the raw representation to be an i128 3,116 ( 0.00%) x = (x << (128 - bits)) >> (128 - bits); . } 656 ( 0.00%) if x < min { 124 ( 0.00%) min = x; . } 656 ( 0.00%) if x > max { 492 ( 0.00%) max = x; . } . } . // We might have no inhabited variants, so pretend there's at least one. 31 ( 0.00%) if (min, max) == (i128::MAX, i128::MIN) { . min = 0; . max = 0; . } 155 ( 0.00%) assert!(min <= max, "discriminant range is {}...{}", min, max); 279 ( 0.00%) let (min_ity, signed) = Integer::repr_discr(tcx, ty, &def.repr, min, max); . 62 ( 0.00%) let mut align = dl.aggregate_align; 31 ( 0.00%) let mut size = Size::ZERO; . . // We're interested in the smallest alignment, so start large. 62 ( 0.00%) let mut start_align = Align::from_bytes(256).unwrap(); 155 ( 0.00%) assert_eq!(Integer::for_align(dl, start_align), None); . . // repr(C) on an enum tells us to make a (tag, union) layout, . // so we need to grow the prefix alignment to be at least . // the alignment of the union. (This value is used both for . // determining the alignment of the overall enum, and the . // determining the alignment of the payload after the tag.) 403 ( 0.00%) let mut prefix_align = min_ity.align(dl).abi; 31 ( 0.00%) if def.repr.c() { . for fields in &variants { . for field in fields { . prefix_align = prefix_align.max(field.align.abi); . } . } . } . . // Create the set of structs that represent each variant. 62 ( 0.00%) let mut layout_variants = variants . .iter_enumerated() . .map(|(i, field_layouts)| { 3,116 ( 0.00%) let mut st = self.univariant_uninterned( 164 ( 0.00%) ty, . &field_layouts, 328 ( 0.00%) &def.repr, 656 ( 0.00%) StructKind::Prefixed(min_ity.size(), prefix_align), . )?; 492 ( 0.00%) st.variants = Variants::Single { index: i }; . // Find the first field we can't move later . // to make room for a larger discriminant. . for field in 2,210 ( 0.00%) st.fields.index_by_increasing_offset().map(|j| field_layouts[j]) . { 44 ( 0.00%) if !field.is_zst() || field.align.abi.bytes() != 1 { 56 ( 0.00%) start_align = start_align.min(field.align.abi); . break; . } . } 656 ( 0.00%) size = cmp::max(size, st.size); 492 ( 0.00%) align = align.max(st.align); 984 ( 0.00%) Ok(st) . }) . .collect::, _>>()?; . . // Align the maximum variant size to the largest alignment. 155 ( 0.00%) size = size.align_to(align.abi); . 124 ( 0.00%) if size.bytes() >= dl.obj_size_bound() { . return Err(LayoutError::SizeOverflow(ty)); . } . 124 ( 0.00%) let typeck_ity = Integer::from_attr(dl, def.repr.discr_type()); 93 ( 0.00%) if typeck_ity < min_ity { . // It is a bug if Layout decided on a greater discriminant size than typeck for . // some reason at this point (based on values discriminant can take on). Mostly . // because this discriminant will be loaded, and then stored into variable of . // type calculated by typeck. Consider such case (a bug): typeck decided on . // byte-sized discriminant, but layout thinks we need a 16-bit to store all . // discriminant values. That would be a bug, because then, in codegen, in order . // to store this 16-bit discriminant into 8-bit sized temporary some of the . // space necessary to represent would have to be discarded (or layout is wrong -- line 1250 ---------------------------------------- -- line 1263 ---------------------------------------- . // as the alignment of the first field of each variant. . // We increase the size of the discriminant to avoid LLVM copying . // padding when it doesn't need to. This normally causes unaligned . // load/stores and excessive memcpy/memset operations. By using a . // bigger integer size, LLVM can be sure about its contents and . // won't be so conservative. . . // Use the initial field alignment 93 ( 0.00%) let mut ity = if def.repr.c() || def.repr.int.is_some() { . min_ity . } else { 87 ( 0.00%) Integer::for_align(dl, start_align).unwrap_or(min_ity) . }; . . // If the alignment is not larger than the chosen discriminant size, . // don't use the alignment as the final size. 29 ( 0.00%) if ity <= min_ity { . ity = min_ity; . } else { . // Patch up the variants' first few fields. 18 ( 0.00%) let old_ity_size = min_ity.size(); . let new_ity_size = ity.size(); . for variant in &mut layout_variants { 36 ( 0.00%) match variant.fields { . FieldsShape::Arbitrary { ref mut offsets, .. } => { . for i in offsets { 48 ( 0.00%) if *i <= old_ity_size { . assert_eq!(*i, old_ity_size); . *i = new_ity_size; . } . } . // We might be making the struct larger. 18 ( 0.00%) if variant.size <= old_ity_size { 4 ( 0.00%) variant.size = new_ity_size; . } . } . _ => bug!(), . } . } . } . 31 ( 0.00%) let tag_mask = ity.size().unsigned_int_max(); . let tag = Scalar { . value: Int(ity, signed), . valid_range: WrappingRange { 155 ( 0.00%) start: (min as u128 & tag_mask), 124 ( 0.00%) end: (max as u128 & tag_mask), . }, . }; . let mut abi = Abi::Aggregate { sized: true }; 186 ( 0.00%) if tag.value.size(dl) == size { . abi = Abi::Scalar(tag); . } else { . // Try to use a ScalarPair for all tagged enums. . let mut common_prim = None; . for (field_layouts, layout_variant) in iter::zip(&variants, &layout_variants) { 60 ( 0.00%) let offsets = match layout_variant.fields { . FieldsShape::Arbitrary { ref offsets, .. } => offsets, . _ => bug!(), . }; . let mut fields = . iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst()); 300 ( 0.00%) let (field, offset) = match (fields.next(), fields.next()) { . (None, None) => continue, . (Some(pair), None) => pair, . _ => { . common_prim = None; . break; . } . }; 66 ( 0.00%) let prim = match field.abi { 30 ( 0.00%) Abi::Scalar(scalar) => scalar.value, . _ => { . common_prim = None; . break; . } . }; 30 ( 0.00%) if let Some(pair) = common_prim { . // This is pretty conservative. We could go fancier . // by conflating things like i32 and u32, or even . // realising that (u8, u8) could just cohabit with . // u16 or even u32. 36 ( 0.00%) if pair != (prim, offset) { . common_prim = None; . break; . } . } else { . common_prim = Some((prim, offset)); . } . } 16 ( 0.00%) if let Some((prim, offset)) = common_prim { 136 ( 0.00%) let pair = self.scalar_pair(tag, scalar_unit(prim)); 16 ( 0.00%) let pair_offsets = match pair.fields { 16 ( 0.00%) FieldsShape::Arbitrary { ref offsets, ref memory_index } => { 8 ( 0.00%) assert_eq!(memory_index, &[0, 1]); . offsets . } . _ => bug!(), . }; 48 ( 0.00%) if pair_offsets[0] == Size::ZERO 16 ( 0.00%) && pair_offsets[1] == *offset 8 ( 0.00%) && align == pair.align 8 ( 0.00%) && size == pair.size . { . // We can use `ScalarPair` only when it matches our . // already computed layout (including `#[repr(C)]`). 216 ( 0.00%) abi = pair.abi; . } 8 ( 0.00%) } . } . . if layout_variants.iter().all(|v| v.abi.is_uninhabited()) { . abi = Abi::Uninhabited; . } . 496 ( 0.00%) let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag); . . let tagged_layout = Layout { . variants: Variants::Multiple { . tag, . tag_encoding: TagEncoding::Direct, . tag_field: 0, . variants: layout_variants, . }, . fields: FieldsShape::Arbitrary { 155 ( 0.00%) offsets: vec![Size::ZERO], 124 ( 0.00%) memory_index: vec![0], . }, . largest_niche, . abi, 62 ( 0.00%) align, 31 ( 0.00%) size, . }; . 100 ( 0.00%) let best_layout = match (tagged_layout, niche_filling_layout) { . (tagged_layout, Some(niche_filling_layout)) => { . // Pick the smaller layout; otherwise, . // pick the layout with the larger niche; otherwise, . // pick tagged as it has simpler codegen. 483 ( 0.00%) cmp::min_by_key(tagged_layout, niche_filling_layout, |layout| { 42 ( 0.00%) let niche_size = layout.largest_niche.map_or(0, |n| n.available(dl)); 7 ( 0.00%) (layout.size, cmp::Reverse(niche_size)) . }) . } 1,464 ( 0.00%) (tagged_layout, None) => tagged_layout, . }; . 279 ( 0.00%) tcx.intern_layout(best_layout) 110 ( 0.00%) } . . // Types with no meaningful known layout. . ty::Projection(_) | ty::Opaque(..) => { . // NOTE(eddyb) `layout_of` query should've normalized these away, . // if that was possible, so there's no reason to try again here. . return Err(LayoutError::Unknown(ty)); . } . -- line 1419 ---------------------------------------- -- line 1420 ---------------------------------------- . ty::Placeholder(..) | ty::GeneratorWitness(..) | ty::Infer(_) => { . bug!("Layout::compute: unexpected type `{}`", ty) . } . . ty::Bound(..) | ty::Param(_) | ty::Error(_) => { . return Err(LayoutError::Unknown(ty)); . } . }) 3,280 ( 0.00%) } . } . . /// Overlap eligibility and variant assignment for each GeneratorSavedLocal. . #[derive(Clone, Debug, PartialEq)] . enum SavedLocalEligibility { . Unassigned, . Assigned(VariantIdx), . // FIXME: Use newtype_index so we aren't wasting bytes -- line 1436 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . /// This is invoked by the `layout_of` query to record the final . /// layout of each type. . #[inline(always)] . fn record_layout_for_printing(&self, layout: TyAndLayout<'tcx>) { . // If we are running with `-Zprint-type-sizes`, maybe record layouts . // for dumping later. 2,050 ( 0.00%) if self.tcx.sess.opts.debugging_opts.print_type_sizes { . self.record_layout_for_printing_outlined(layout) . } . } . . fn record_layout_for_printing_outlined(&self, layout: TyAndLayout<'tcx>) { . // Ignore layouts that are done with non-empty environments or . // non-monomorphic layouts, as the user only wants to see the stuff . // resulting from the final codegen session. -- line 1771 ---------------------------------------- -- line 2041 ---------------------------------------- . . pub trait HasParamEnv<'tcx> { . fn param_env(&self) -> ty::ParamEnv<'tcx>; . } . . impl<'tcx> HasDataLayout for TyCtxt<'tcx> { . #[inline] . fn data_layout(&self) -> &TargetDataLayout { 536,760 ( 0.01%) &self.data_layout . } . } . . impl<'tcx> HasTargetSpec for TyCtxt<'tcx> { . fn target_spec(&self) -> &Target { 2 ( 0.00%) &self.sess.target . } . } . . impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> { . #[inline] . fn tcx(&self) -> TyCtxt<'tcx> { . *self . } -- line 2063 ---------------------------------------- -- line 2080 ---------------------------------------- . #[inline] . fn tcx(&self) -> TyCtxt<'tcx> { . **self . } . } . . impl<'tcx, C> HasParamEnv<'tcx> for LayoutCx<'tcx, C> { . fn param_env(&self) -> ty::ParamEnv<'tcx> { 849 ( 0.00%) self.param_env . } . } . . impl<'tcx, T: HasDataLayout> HasDataLayout for LayoutCx<'tcx, T> { . fn data_layout(&self) -> &TargetDataLayout { . self.tcx.data_layout() . } . } -- line 2096 ---------------------------------------- -- line 2098 ---------------------------------------- . impl<'tcx, T: HasTargetSpec> HasTargetSpec for LayoutCx<'tcx, T> { . fn target_spec(&self) -> &Target { . self.tcx.target_spec() . } . } . . impl<'tcx, T: HasTyCtxt<'tcx>> HasTyCtxt<'tcx> for LayoutCx<'tcx, T> { . fn tcx(&self) -> TyCtxt<'tcx> { 1,238 ( 0.00%) self.tcx.tcx() . } . } . . pub trait MaybeResult { . type Error; . . fn from(x: Result) -> Self; . fn to_result(self) -> Result; -- line 2114 ---------------------------------------- -- line 2124 ---------------------------------------- . Ok(self) . } . } . . impl MaybeResult for Result { . type Error = E; . . fn from(x: Result) -> Self { 167,796 ( 0.00%) x . } . fn to_result(self) -> Result { . self . } . } . . pub type TyAndLayout<'tcx> = rustc_target::abi::TyAndLayout<'tcx, Ty<'tcx>>; . -- line 2140 ---------------------------------------- -- line 2167 ---------------------------------------- . ) -> >>::Error; . } . . /// Blanket extension trait for contexts that can compute layouts of types. . pub trait LayoutOf<'tcx>: LayoutOfHelpers<'tcx> { . /// Computes the layout of a type. Note that this implicitly . /// executes in "reveal all" mode, and will normalize the input type. . #[inline] 466,309 ( 0.01%) fn layout_of(&self, ty: Ty<'tcx>) -> Self::LayoutOfResult { 14,135 ( 0.00%) self.spanned_layout_of(ty, DUMMY_SP) 466,088 ( 0.01%) } . . /// Computes the layout of a type, at `span`. Note that this implicitly . /// executes in "reveal all" mode, and will normalize the input type. . // FIXME(eddyb) avoid passing information like this, and instead add more . // `TyCtxt::at`-like APIs to be able to do e.g. `cx.at(span).layout_of(ty)`. . #[inline] 87,399 ( 0.00%) fn spanned_layout_of(&self, ty: Ty<'tcx>, span: Span) -> Self::LayoutOfResult { 6,723 ( 0.00%) let span = if !span.is_dummy() { span } else { self.layout_tcx_at_span() }; 7,267 ( 0.00%) let tcx = self.tcx().at(span); . . MaybeResult::from( . tcx.layout_of(self.param_env().and(ty)) . .map_err(|err| self.handle_layout_err(err, span, ty)), . ) 53,784 ( 0.00%) } . } . . impl<'tcx, C: LayoutOfHelpers<'tcx>> LayoutOf<'tcx> for C {} . . impl<'tcx> LayoutOfHelpers<'tcx> for LayoutCx<'tcx, TyCtxt<'tcx>> { . type LayoutOfResult = Result, LayoutError<'tcx>>; . . #[inline] -- line 2200 ---------------------------------------- -- line 2216 ---------------------------------------- . err . } . } . . impl<'tcx, C> TyAbiInterface<'tcx, C> for Ty<'tcx> . where . C: HasTyCtxt<'tcx> + HasParamEnv<'tcx>, . { 236,210 ( 0.01%) fn ty_and_layout_for_variant( . this: TyAndLayout<'tcx>, . cx: &C, . variant_index: VariantIdx, . ) -> TyAndLayout<'tcx> { 49,304 ( 0.00%) let layout = match this.variants { . Variants::Single { index } . // If all variants but one are uninhabited, the variant layout is the enum layout. 24 ( 0.00%) if index == variant_index && . // Don't confuse variants of uninhabited enums with the enum itself. . // For more details see https://github.com/rust-lang/rust/issues/69763. . this.fields != FieldsShape::Primitive => . { . this.layout . } . . Variants::Single { index } => { -- line 2240 ---------------------------------------- -- line 2263 ---------------------------------------- . align: tcx.data_layout.i8_align, . size: Size::ZERO, . }) . } . . Variants::Multiple { ref variants, .. } => &variants[variant_index], . }; . 197,216 ( 0.00%) assert_eq!(layout.variants, Variants::Single { index: variant_index }); . . TyAndLayout { ty: this.ty, layout } 236,210 ( 0.01%) } . 2,111,415 ( 0.05%) fn ty_and_layout_field(this: TyAndLayout<'tcx>, cx: &C, i: usize) -> TyAndLayout<'tcx> { . enum TyMaybeWithLayout<'tcx> { . Ty(Ty<'tcx>), . TyAndLayout(TyAndLayout<'tcx>), . } . 1,725,903 ( 0.04%) fn field_ty_or_layout<'tcx>( . this: TyAndLayout<'tcx>, . cx: &(impl HasTyCtxt<'tcx> + HasParamEnv<'tcx>), . i: usize, . ) -> TyMaybeWithLayout<'tcx> { . let tcx = cx.tcx(); . let tag_layout = |tag: Scalar| -> TyAndLayout<'tcx> { 675,380 ( 0.01%) let layout = Layout::scalar(cx, tag); 548,847 ( 0.01%) TyAndLayout { layout: tcx.intern_layout(layout), ty: tag.value.to_ty(tcx) } . }; . 425,691 ( 0.01%) match *this.ty.kind() { . ty::Bool . | ty::Char . | ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::FnPtr(_) . | ty::Never . | ty::FnDef(..) . | ty::GeneratorWitness(..) . | ty::Foreign(..) . | ty::Dynamic(..) => bug!("TyAndLayout::field({:?}): not applicable", this), . . // Potentially-fat pointers. . ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { 4,948 ( 0.00%) assert!(i < this.fields.count()); . . // Reuse the fat `*T` type as its own thin pointer data field. . // This provides information about, e.g., DST struct pointees . // (which may have no non-DST form), and will work as long . // as the `Abi` or `FieldsShape` is checked by users. 4,948 ( 0.00%) if i == 0 { . let nil = tcx.mk_unit(); 1,454 ( 0.00%) let unit_ptr_ty = if this.ty.is_unsafe_ptr() { . tcx.mk_mut_ptr(nil) . } else { 1,424 ( 0.00%) tcx.mk_mut_ref(tcx.lifetimes.re_static, nil) . }; . . // NOTE(eddyb) using an empty `ParamEnv`, and `unwrap`-ing . // the `Result` should always work because the type is . // always either `*mut ()` or `&'static mut ()`. 4,362 ( 0.00%) return TyMaybeWithLayout::TyAndLayout(TyAndLayout { . ty: this.ty, . ..tcx.layout_of(ty::ParamEnv::reveal_all().and(unit_ptr_ty)).unwrap() . }); . } . 7,146 ( 0.00%) match tcx.struct_tail_erasing_lifetimes(pointee, cx.param_env()).kind() { 2,034 ( 0.00%) ty::Slice(_) | ty::Str => TyMaybeWithLayout::Ty(tcx.types.usize), . ty::Dynamic(_, _) => { . TyMaybeWithLayout::Ty(tcx.mk_imm_ref( 3 ( 0.00%) tcx.lifetimes.re_static, 9 ( 0.00%) tcx.mk_array(tcx.types.usize, 3), . )) . /* FIXME: use actual fn pointers . Warning: naively computing the number of entries in the . vtable by counting the methods on the trait + methods on . all parent traits does not work, because some methods can . be not object safe and thus excluded from the vtable. . Increase this counter if you tried to implement this but . failed to do it without duplicating a lot of code from -- line 2344 ---------------------------------------- -- line 2350 ---------------------------------------- . */ . } . _ => bug!("TyAndLayout::field({:?}): not applicable", this), . } . } . . // Arrays and slices. . ty::Array(element, _) | ty::Slice(element) => TyMaybeWithLayout::Ty(element), 24 ( 0.00%) ty::Str => TyMaybeWithLayout::Ty(tcx.types.u8), . . // Tuples, generators and closures. . ty::Closure(_, ref substs) => field_ty_or_layout( 162 ( 0.00%) TyAndLayout { ty: substs.as_closure().tupled_upvars_ty(), ..this }, . cx, . i, . ), . . ty::Generator(def_id, ref substs, _) => match this.variants { . Variants::Single { index } => TyMaybeWithLayout::Ty( . substs . .as_generator() -- line 2370 ---------------------------------------- -- line 2377 ---------------------------------------- . Variants::Multiple { tag, tag_field, .. } => { . if i == tag_field { . return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); . } . TyMaybeWithLayout::Ty(substs.as_generator().prefix_tys().nth(i).unwrap()) . } . }, . 184,532 ( 0.00%) ty::Tuple(tys) => TyMaybeWithLayout::Ty(tys[i].expect_ty()), . . // ADTs. 46,485 ( 0.00%) ty::Adt(def, substs) => { 177,408 ( 0.00%) match this.variants { 46,485 ( 0.00%) Variants::Single { index } => { 185,940 ( 0.00%) TyMaybeWithLayout::Ty(def.variants[index].fields[i].ty(tcx, substs)) . } . . // Discriminant field for enums (where applicable). . Variants::Multiple { tag, .. } => { 84,438 ( 0.00%) assert_eq!(i, 0); 372 ( 0.00%) return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); . } . } . } . . ty::Projection(_) . | ty::Bound(..) . | ty::Placeholder(..) . | ty::Opaque(..) . | ty::Param(_) . | ty::Infer(_) . | ty::Error(_) => bug!("TyAndLayout::field: unexpected type `{}`", this.ty), . } 1,276,236 ( 0.03%) } . 567,216 ( 0.01%) match field_ty_or_layout(this, cx, i) { 196,262 ( 0.00%) TyMaybeWithLayout::Ty(field_ty) => { 1,875 ( 0.00%) cx.tcx().layout_of(cx.param_env().and(field_ty)).unwrap_or_else(|e| { . bug!( . "failed to get layout for `{}`: {},\n\ . despite it being a field (#{}) of an existing layout: {:#?}", . field_ty, . e, . i, . this . ) . }) . } 131,019 ( 0.00%) TyMaybeWithLayout::TyAndLayout(field_layout) => field_layout, . } 1,126,088 ( 0.02%) } . 5,203 ( 0.00%) fn ty_and_layout_pointee_info_at( . this: TyAndLayout<'tcx>, . cx: &C, . offset: Size, . ) -> Option { 173 ( 0.00%) let tcx = cx.tcx(); . let param_env = cx.param_env(); . . let addr_space_of_ty = |ty: Ty<'tcx>| { 357 ( 0.00%) if ty.is_fn() { cx.data_layout().instruction_address_space } else { AddressSpace::DATA } . }; . 2,561 ( 0.00%) let pointee_info = match *this.ty.kind() { 369 ( 0.00%) ty::RawPtr(mt) if offset.bytes() == 0 => { . tcx.layout_of(param_env.and(mt.ty)).ok().map(|layout| PointeeInfo { 123 ( 0.00%) size: layout.size, 123 ( 0.00%) align: layout.align.abi, . safe: None, 123 ( 0.00%) address_space: addr_space_of_ty(mt.ty), 615 ( 0.00%) }) . } 292 ( 0.00%) ty::FnPtr(fn_sig) if offset.bytes() == 0 => { . tcx.layout_of(param_env.and(tcx.mk_fn_ptr(fn_sig))).ok().map(|layout| PointeeInfo { . size: layout.size, . align: layout.align.abi, . safe: None, . address_space: cx.data_layout().instruction_address_space, . }) . } 702 ( 0.00%) ty::Ref(_, ty, mt) if offset.bytes() == 0 => { 234 ( 0.00%) let address_space = addr_space_of_ty(ty); 468 ( 0.00%) let kind = if tcx.sess.opts.optimize == OptLevel::No { . // Use conservative pointer kind if not optimizing. This saves us the . // Freeze/Unpin queries, and can save time in the codegen backend (noalias . // attributes in LLVM have compile-time cost even in unoptimized builds). . PointerKind::Shared . } else { 468 ( 0.00%) match mt { . hir::Mutability::Not => { 1,224 ( 0.00%) if ty.is_freeze(tcx.at(DUMMY_SP), cx.param_env()) { . PointerKind::Frozen . } else { . PointerKind::Shared . } . } . hir::Mutability::Mut => { . // References to self-referential structures should not be considered . // noalias, as another pointer to the structure can be obtained, that . // is not based-on the original reference. We consider all !Unpin . // types to be potentially self-referential here. 207 ( 0.00%) if ty.is_unpin(tcx.at(DUMMY_SP), cx.param_env()) { . PointerKind::UniqueBorrowed . } else { . PointerKind::Shared . } . } . } . }; . . tcx.layout_of(param_env.and(ty)).ok().map(|layout| PointeeInfo { 234 ( 0.00%) size: layout.size, . align: layout.align.abi, . safe: Some(kind), . address_space, 1,638 ( 0.00%) }) . } . . _ => { 312 ( 0.00%) let mut data_variant = match this.variants { . // Within the discriminant field, only the niche itself is . // always initialized, so we only check for a pointer at its . // offset. . // . // If the niche is a pointer, it's either valid (according . // to its type), or null (which the niche field's scalar . // validity range encodes). This allows using . // `dereferenceable_or_null` for e.g., `Option<&T>`, and . // this will continue to work as long as we don't start . // using more niches than just null (e.g., the first page of . // the address space, or unaligned pointers). . Variants::Multiple { 5 ( 0.00%) tag_encoding: TagEncoding::Niche { dataful_variant, .. }, . tag_field, . .. 38 ( 0.00%) } if this.fields.offset(tag_field) == offset => { . Some(this.for_variant(cx, dataful_variant)) . } . _ => Some(this), . }; . 10 ( 0.00%) if let Some(variant) = data_variant { . // We're not interested in any unions. 438 ( 0.00%) if let FieldsShape::Union(_) = variant.fields { . data_variant = None; . } . } . . let mut result = None; . . if let Some(variant) = data_variant { . let ptr_end = offset + Pointer.size(cx); . for i in 0..variant.fields.count() { . let field_start = variant.fields.offset(i); 1,205 ( 0.00%) if field_start <= offset { . let field = variant.field(cx, i); . result = field.to_result().ok().and_then(|field| { 366 ( 0.00%) if ptr_end <= field_start + field.size { . // We found the right field, look inside it. . let field_info = 564 ( 0.00%) field.pointee_info_at(cx, offset - field_start); . field_info . } else { . None . } . }); 141 ( 0.00%) if result.is_some() { . break; . } . } . } . } . . // FIXME(eddyb) This should be for `ptr::Unique`, not `Box`. . if let Some(ref mut pointee) = result { 282 ( 0.00%) if let ty::Adt(def, _) = this.ty.kind() { 352 ( 0.00%) if def.is_box() && offset.bytes() == 0 { . pointee.safe = Some(PointerKind::UniqueOwned); . } . } . } . . result . } . }; -- line 2562 ---------------------------------------- -- line 2564 ---------------------------------------- . debug!( . "pointee_info_at (offset={:?}, type kind: {:?}) => {:?}", . offset, . this.ty.kind(), . pointee_info . ); . . pointee_info 8,048 ( 0.00%) } . } . . impl<'tcx> ty::Instance<'tcx> { . // NOTE(eddyb) this is private to avoid using it from outside of . // `fn_abi_of_instance` - any other uses are either too high-level . // for `Instance` (e.g. typeck would use `Ty::fn_sig` instead), . // or should go through `FnAbi` instead, to avoid losing any . // adjustments `fn_abi_of_instance` might be performing. 2,112 ( 0.00%) fn fn_sig_for_fn_abi( . &self, . tcx: TyCtxt<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> ty::PolyFnSig<'tcx> { 1,152 ( 0.00%) let ty = self.ty(tcx, param_env); 610 ( 0.00%) match *ty.kind() { . ty::FnDef(..) => { . // HACK(davidtwco,eddyb): This is a workaround for polymorphization considering . // parameters unused if they show up in the signature, but not in the `mir::Body` . // (i.e. due to being inside a projection that got normalized, see . // `src/test/ui/polymorphization/normalized_sig_types.rs`), and codegen not keeping . // track of a polymorphization `ParamEnv` to allow normalizing later. 350 ( 0.00%) let mut sig = match *ty.kind() { 1,225 ( 0.00%) ty::FnDef(def_id, substs) => tcx . .normalize_erasing_regions(tcx.param_env(def_id), tcx.fn_sig(def_id)) . .subst(tcx, substs), . _ => unreachable!(), . }; . 350 ( 0.00%) if let ty::InstanceDef::VtableShim(..) = self.def { . // Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`. . sig = sig.map_bound(|mut sig| { . let mut inputs_and_output = sig.inputs_and_output.to_vec(); . inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]); . sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output); . sig . }); . } 1,400 ( 0.00%) sig . } 68 ( 0.00%) ty::Closure(def_id, substs) => { 34 ( 0.00%) let sig = substs.as_closure().sig(); . . let bound_vars = tcx.mk_bound_variable_kinds( . sig.bound_vars() . .iter() . .chain(iter::once(ty::BoundVariableKind::Region(ty::BrEnv))), . ); . let br = ty::BoundRegion { 34 ( 0.00%) var: ty::BoundVar::from_usize(bound_vars.len() - 1), . kind: ty::BoundRegionKind::BrEnv, . }; . let env_region = ty::ReLateBound(ty::INNERMOST, br); 153 ( 0.00%) let env_ty = tcx.closure_env_ty(def_id, substs, env_region).unwrap(); . 17 ( 0.00%) let sig = sig.skip_binder(); . ty::Binder::bind_with_vars( . tcx.mk_fn_sig( . iter::once(env_ty).chain(sig.inputs().iter().cloned()), . sig.output(), 34 ( 0.00%) sig.c_variadic, 102 ( 0.00%) sig.unsafety, . sig.abi, . ), . bound_vars, . ) . } . ty::Generator(_, substs, _) => { . let sig = substs.as_generator().poly_sig(); . -- line 2641 ---------------------------------------- -- line 2669 ---------------------------------------- . hir::Unsafety::Normal, . rustc_target::spec::abi::Abi::Rust, . ), . bound_vars, . ) . } . _ => bug!("unexpected type {:?} in Instance::fn_sig", ty), . } 1,728 ( 0.00%) } . } . . /// Calculates whether a function's ABI can unwind or not. . /// . /// This takes two primary parameters: . /// . /// * `codegen_fn_attr_flags` - these are flags calculated as part of the . /// codegen attrs for a defined function. For function pointers this set of -- line 2685 ---------------------------------------- -- line 2725 ---------------------------------------- . /// might (from a foreign exception or similar). . #[inline] . pub fn fn_can_unwind<'tcx>( . tcx: TyCtxt<'tcx>, . codegen_fn_attr_flags: CodegenFnAttrFlags, . abi: SpecAbi, . ) -> bool { . // Special attribute for functions which can't unwind. 538 ( 0.00%) if codegen_fn_attr_flags.contains(CodegenFnAttrFlags::NEVER_UNWIND) { . return false; . } . . // Otherwise if this isn't special then unwinding is generally determined by . // the ABI of the itself. ABIs like `C` have variants which also . // specifically allow unwinding (`C-unwind`), but not all platform-specific . // ABIs have such an option. Otherwise the only other thing here is Rust . // itself, and those ABIs are determined by the panic strategy configured -- line 2741 ---------------------------------------- -- line 2767 ---------------------------------------- . // to `panic=abort`). . // . // Eventually the check against `c_unwind` here will ideally get removed and . // this'll be a little cleaner as it'll be a straightforward check of the . // ABI. . // . // [rfc]: https://github.com/rust-lang/rfcs/blob/master/text/2945-c-unwind-abi.md . use SpecAbi::*; 1,384 ( 0.00%) match abi { . C { unwind } | Stdcall { unwind } | System { unwind } | Thiscall { unwind } => { 2 ( 0.00%) unwind 4 ( 0.00%) || (!tcx.features().c_unwind && tcx.sess.panic_strategy() == PanicStrategy::Unwind) . } . Cdecl . | Fastcall . | Vectorcall . | Aapcs . | Win64 . | SysV64 . | PtxKernel -- line 2786 ---------------------------------------- -- line 2797 ---------------------------------------- . | Unadjusted => false, . Rust | RustCall => tcx.sess.panic_strategy() == PanicStrategy::Unwind, . } . } . . #[inline] . pub fn conv_from_spec_abi(tcx: TyCtxt<'_>, abi: SpecAbi) -> Conv { . use rustc_target::spec::abi::Abi::*; 1,920 ( 0.00%) match tcx.sess.target.adjust_abi(abi) { . RustIntrinsic | PlatformIntrinsic | Rust | RustCall => Conv::Rust, . . // It's the ABI's job to select this, not ours. . System { .. } => bug!("system abi should be selected elsewhere"), . EfiApi => bug!("eficall abi should be selected elsewhere"), . . Stdcall { .. } => Conv::X86Stdcall, . Fastcall => Conv::X86Fastcall, -- line 2813 ---------------------------------------- -- line 2915 ---------------------------------------- . } . . /// Compute a `FnAbi` suitable for declaring/defining an `fn` instance, and for . /// direct calls to an `fn`. . /// . /// NB: that includes virtual calls, which are represented by "direct calls" . /// to an `InstanceDef::Virtual` instance (of `::fn`). . #[inline] 4,240 ( 0.00%) fn fn_abi_of_instance( . &self, . instance: ty::Instance<'tcx>, . extra_args: &'tcx ty::List>, . ) -> Self::FnAbiOfResult { . // FIXME(eddyb) get a better `span` here. 1,514 ( 0.00%) let span = self.layout_tcx_at_span(); 2,422 ( 0.00%) let tcx = self.tcx().at(span); . . MaybeResult::from( 5,528 ( 0.00%) tcx.fn_abi_of_instance(self.param_env().and((instance, extra_args))).map_err(|err| { . // HACK(eddyb) at least for definitions of/calls to `Instance`s, . // we can get some kind of span even if one wasn't provided. . // However, we don't do this early in order to avoid calling . // `def_span` unconditionally (which may have a perf penalty). . let span = if !span.is_dummy() { span } else { tcx.def_span(instance.def_id()) }; . self.handle_fn_abi_err(err, span, FnAbiRequest::OfInstance { instance, extra_args }) . }), . ) 4,240 ( 0.00%) } . } . . impl<'tcx, C: FnAbiOfHelpers<'tcx>> FnAbiOf<'tcx> for C {} . . fn fn_abi_of_fn_ptr<'tcx>( . tcx: TyCtxt<'tcx>, . query: ty::ParamEnvAnd<'tcx, (ty::PolyFnSig<'tcx>, &'tcx ty::List>)>, . ) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, FnAbiError<'tcx>> { -- line 2950 ---------------------------------------- -- line 2954 ---------------------------------------- . sig, . extra_args, . None, . CodegenFnAttrFlags::empty(), . false, . ) . } . 1,728 ( 0.00%) fn fn_abi_of_instance<'tcx>( . tcx: TyCtxt<'tcx>, . query: ty::ParamEnvAnd<'tcx, (ty::Instance<'tcx>, &'tcx ty::List>)>, . ) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, FnAbiError<'tcx>> { 1,536 ( 0.00%) let (param_env, (instance, extra_args)) = query.into_parts(); . 768 ( 0.00%) let sig = instance.fn_sig_for_fn_abi(tcx, param_env); . 960 ( 0.00%) let caller_location = if instance.def.requires_caller_location(tcx) { 3 ( 0.00%) Some(tcx.caller_location_ty()) . } else { . None . }; . 192 ( 0.00%) let attrs = tcx.codegen_fn_attrs(instance.def_id()).flags; . 1,536 ( 0.00%) LayoutCx { tcx, param_env }.fn_abi_new_uncached( 768 ( 0.00%) sig, . extra_args, . caller_location, . attrs, 960 ( 0.00%) matches!(instance.def, ty::InstanceDef::Virtual(..)), . ) 1,728 ( 0.00%) } . . impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { . // FIXME(eddyb) perhaps group the signature/type-containing (or all of them?) . // arguments of this method, into a separate `struct`. 2,496 ( 0.00%) fn fn_abi_new_uncached( . &self, . sig: ty::PolyFnSig<'tcx>, . extra_args: &[Ty<'tcx>], . caller_location: Option>, . codegen_fn_attr_flags: CodegenFnAttrFlags, . // FIXME(eddyb) replace this with something typed, like an `enum`. . force_thin_self_ptr: bool, . ) -> Result<&'tcx FnAbi<'tcx, Ty<'tcx>>, FnAbiError<'tcx>> { . debug!("fn_abi_new_uncached({:?}, {:?})", sig, extra_args); . 2,880 ( 0.00%) let sig = self.tcx.normalize_erasing_late_bound_regions(self.param_env, sig); . . let conv = conv_from_spec_abi(self.tcx(), sig.abi); . . let mut inputs = sig.inputs(); . let extra_args = if sig.abi == RustCall { 84 ( 0.00%) assert!(!sig.c_variadic && extra_args.is_empty()); . 21 ( 0.00%) if let Some(input) = sig.inputs().last() { 84 ( 0.00%) if let ty::Tuple(tupled_arguments) = input.kind() { . inputs = &sig.inputs()[0..sig.inputs().len() - 1]; 21 ( 0.00%) tupled_arguments.iter().map(|k| k.expect_ty()).collect() . } else { . bug!( . "argument to function with \"rust-call\" ABI \ . is not a tuple" . ); . } . } else { . bug!( . "argument to function with \"rust-call\" ABI \ . is not a tuple" . ); . } . } else { 684 ( 0.00%) assert!(sig.c_variadic || extra_args.is_empty()); . extra_args.to_vec() . }; . 234 ( 0.00%) let target = &self.tcx.sess.target; 576 ( 0.00%) let target_env_gnu_like = matches!(&target.env[..], "gnu" | "musl" | "uclibc"); 576 ( 0.00%) let win_x64_gnu = target.os == "windows" && target.arch == "x86_64" && target.env == "gnu"; . let linux_s390x_gnu_like = 768 ( 0.00%) target.os == "linux" && target.arch == "s390x" && target_env_gnu_like; . let linux_sparc64_gnu_like = 768 ( 0.00%) target.os == "linux" && target.arch == "sparc64" && target_env_gnu_like; . let linux_powerpc_gnu_like = 1,152 ( 0.00%) target.os == "linux" && target.arch == "powerpc" && target_env_gnu_like; . use SpecAbi::*; 768 ( 0.00%) let rust_abi = matches!(sig.abi, RustIntrinsic | PlatformIntrinsic | Rust | RustCall); . . // Handle safe Rust thin and fat pointers. 192 ( 0.00%) let adjust_for_rust_scalar = |attrs: &mut ArgAttributes, . scalar: Scalar, . layout: TyAndLayout<'tcx>, . offset: Size, . is_return: bool| { . // Booleans are always an i1 that needs to be zero-extended. 46 ( 0.00%) if scalar.is_bool() { 93 ( 0.00%) attrs.ext(ArgExtension::Zext); . return; . } . . // Only pointer types handled below. 284 ( 0.00%) if scalar.value != Pointer { . return; . } . 284 ( 0.00%) if !scalar.valid_range.contains(0) { 372 ( 0.00%) attrs.set(ArgAttribute::NonNull); . } . 2,350 ( 0.00%) if let Some(pointee) = layout.pointee_info_at(self, offset) { . if let Some(kind) = pointee.safe { 543 ( 0.00%) attrs.pointee_align = Some(pointee.align); . . // `Box` (`UniqueBorrowed`) are not necessarily dereferenceable . // for the entire duration of the function as they can be deallocated . // at any time. Set their valid size to 0. 905 ( 0.00%) attrs.pointee_size = match kind { . PointerKind::UniqueOwned => Size::ZERO, . _ => pointee.size, . }; . . // `Box` pointer parameters never alias because ownership is transferred . // `&mut` pointer parameters never alias other parameters, . // or mutable global data . // -- line 3078 ---------------------------------------- -- line 3080 ---------------------------------------- . // and can be marked as both `readonly` and `noalias`, as . // LLVM's definition of `noalias` is based solely on memory . // dependencies rather than pointer equality . // . // Due to past miscompiles in LLVM, we apply a separate NoAliasMutRef attribute . // for UniqueBorrowed arguments, so that the codegen backend can decide whether . // or not to actually emit the attribute. It can also be controlled with the . // `-Zmutable-noalias` debugging option. 905 ( 0.00%) let no_alias = match kind { . PointerKind::Shared | PointerKind::UniqueBorrowed => false, . PointerKind::UniqueOwned => true, . PointerKind::Frozen => !is_return, . }; 154 ( 0.00%) if no_alias { 244 ( 0.00%) attrs.set(ArgAttribute::NoAlias); . } . 447 ( 0.00%) if kind == PointerKind::Frozen && !is_return { 244 ( 0.00%) attrs.set(ArgAttribute::ReadOnly); . } . 81 ( 0.00%) if kind == PointerKind::UniqueBorrowed && !is_return { 27 ( 0.00%) attrs.set(ArgAttribute::NoAliasMutRef); . } . } . } . }; . 2,880 ( 0.00%) let arg_of = |ty: Ty<'tcx>, arg_idx: Option| -> Result<_, FnAbiError<'tcx>> { 792 ( 0.00%) let is_return = arg_idx.is_none(); . 1,068 ( 0.00%) let layout = self.layout_of(ty)?; 900 ( 0.00%) let layout = if force_thin_self_ptr && arg_idx == Some(0) { . // Don't pass the vtable, it's not an argument of the virtual fn. . // Instead, pass just the data pointer, but give it the type `*const/mut dyn Trait` . // or `&/&mut dyn Trait` because this is special-cased elsewhere in codegen . make_thin_self_ptr(self, layout) . } else { . layout . }; . 9,863 ( 0.00%) let mut arg = ArgAbi::new(self, layout, |layout, scalar, offset| { 1,701 ( 0.00%) let mut attrs = ArgAttributes::new(); 2,989 ( 0.00%) adjust_for_rust_scalar(&mut attrs, scalar, *layout, offset, is_return); . attrs 5,670 ( 0.00%) }); . 33 ( 0.00%) if arg.layout.is_zst() { . // For some forsaken reason, x86_64-pc-windows-gnu . // doesn't ignore zero-sized struct arguments. . // The same is true for {s390x,sparc64,powerpc}-unknown-linux-{gnu,musl,uclibc}. 78 ( 0.00%) if is_return 18 ( 0.00%) || rust_abi . || (!win_x64_gnu . && !linux_s390x_gnu_like . && !linux_sparc64_gnu_like . && !linux_powerpc_gnu_like) . { 21 ( 0.00%) arg.mode = PassMode::Ignore; . } . } . 9,119 ( 0.00%) Ok(arg) . }; . 7,872 ( 0.00%) let mut fn_abi = FnAbi { . ret: arg_of(sig.output(), None)?, 384 ( 0.00%) args: inputs . .iter() . .cloned() . .chain(extra_args) . .chain(caller_location) . .enumerate() 300 ( 0.00%) .map(|(i, ty)| arg_of(ty, Some(i))) . .collect::>()?, . c_variadic: sig.c_variadic, . fixed_count: inputs.len(), . conv, . can_unwind: fn_can_unwind(self.tcx(), codegen_fn_attr_flags, sig.abi), . }; . self.fn_abi_adjust_for_abi(&mut fn_abi, sig.abi)?; . debug!("fn_abi_new_uncached = {:?}", fn_abi); 768 ( 0.00%) Ok(self.tcx.arena.alloc(fn_abi)) 1,536 ( 0.00%) } . . fn fn_abi_adjust_for_abi( . &self, . fn_abi: &mut FnAbi<'tcx, Ty<'tcx>>, . abi: SpecAbi, . ) -> Result<(), FnAbiError<'tcx>> { . if abi == SpecAbi::Unadjusted { . return Ok(()); . } . . if abi == SpecAbi::Rust . || abi == SpecAbi::RustCall . || abi == SpecAbi::RustIntrinsic . || abi == SpecAbi::PlatformIntrinsic . { 2,716 ( 0.00%) let fixup = |arg: &mut ArgAbi<'tcx, Ty<'tcx>>| { 488 ( 0.00%) if arg.is_ignore() { . return; . } . 2,802 ( 0.00%) match arg.layout.abi { . Abi::Aggregate { .. } => {} . . // This is a fun case! The gist of what this is doing is . // that we want callers and callees to always agree on the . // ABI of how they pass SIMD arguments. If we were to *not* . // make these arguments indirect then they'd be immediates . // in LLVM, which means that they'd used whatever the . // appropriate ABI is for the callee and the caller. That -- line 3192 ---------------------------------------- -- line 3211 ---------------------------------------- . return; . } . . _ => return, . } . . // Pass and return structures up to 2 pointers in size by value, matching `ScalarPair`. . // LLVM will usually pass these in 2 registers, which is more efficient than by-ref. 12 ( 0.00%) let max_by_val_size = Pointer.size(self) * 2; . let size = arg.layout.size; . 48 ( 0.00%) if arg.layout.is_unsized() || size > max_by_val_size { . arg.make_indirect(); . } else { . // We want to pass small aggregates as immediates, but using . // a LLVM aggregate type for this leads to bad optimizations, . // so we pick an appropriately sized integer type instead. . arg.cast_to(Reg { kind: RegKind::Integer, size }); . } 2,425 ( 0.00%) }; 191 ( 0.00%) fixup(&mut fn_abi.ret); . for arg in &mut fn_abi.args { 1,413 ( 0.00%) fixup(arg); . } . } else { 6 ( 0.00%) fn_abi.adjust_for_foreign_abi(self, abi)?; . } . . Ok(()) . } . } . . fn make_thin_self_ptr<'tcx>( . cx: &(impl HasTyCtxt<'tcx> + HasParamEnv<'tcx>), -- line 3244 ---------------------------------------- 517,324 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/util.rs -------------------------------------------------------------------------------- Ir -- line 41 ---------------------------------------- . write!(fmt, "{}", x) . } . _ => write!(fmt, "{}", self.val), . } . } . } . . fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) { 447,384 ( 0.01%) let (int, signed) = match *ty.kind() { 1,043,896 ( 0.02%) Int(ity) => (Integer::from_int_ty(&tcx, ity), true), . Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false), . _ => bug!("non integer discriminant"), . }; . (int.size(), signed) . } . . impl<'tcx> Discr<'tcx> { . /// Adds `1` to the value and wraps around if the maximum for the type is reached. 719,215 ( 0.02%) pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self { 1,008,098 ( 0.02%) self.checked_add(tcx, 1).0 287,686 ( 0.01%) } 1,342,152 ( 0.03%) pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) { . let (size, signed) = int_size_and_signed(tcx, self.ty); . let (val, oflo) = if signed { . let min = size.signed_int_min(); . let max = size.signed_int_max(); . let val = size.sign_extend(self.val) as i128; 596,512 ( 0.01%) assert!(n < (i128::MAX as u128)); . let n = n as i128; 1,043,896 ( 0.02%) let oflo = val > max - n; 596,512 ( 0.01%) let val = if oflo { min + (n - (max - val) - 1) } else { val + n }; . // zero the upper bits . let val = val as u128; . let val = size.truncate(val); . (val, oflo) . } else { . let max = size.unsigned_int_max(); . let val = self.val; . let oflo = val > max - n; . let val = if oflo { n - (max - val) - 1 } else { val + n }; . (val, oflo) . }; 596,512 ( 0.01%) (Self { val, ty: self.ty }, oflo) 1,342,152 ( 0.03%) } . } . . pub trait IntTypeExt { . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>; . fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option>) -> Option>; . fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>; . } . . impl IntTypeExt for attr::IntType { . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 213,440 ( 0.00%) match *self { . SignedInt(ast::IntTy::I8) => tcx.types.i8, . SignedInt(ast::IntTy::I16) => tcx.types.i16, . SignedInt(ast::IntTy::I32) => tcx.types.i32, . SignedInt(ast::IntTy::I64) => tcx.types.i64, . SignedInt(ast::IntTy::I128) => tcx.types.i128, . SignedInt(ast::IntTy::Isize) => tcx.types.isize, . UnsignedInt(ast::UintTy::U8) => tcx.types.u8, . UnsignedInt(ast::UintTy::U16) => tcx.types.u16, . UnsignedInt(ast::UintTy::U32) => tcx.types.u32, . UnsignedInt(ast::UintTy::U64) => tcx.types.u64, . UnsignedInt(ast::UintTy::U128) => tcx.types.u128, . UnsignedInt(ast::UintTy::Usize) => tcx.types.usize, . } 18,820 ( 0.00%) } . . fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> { . Discr { val: 0, ty: self.to_ty(tcx) } 71,328 ( 0.00%) } . 110 ( 0.00%) fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option>) -> Option> { 266 ( 0.00%) if let Some(val) = val { 104 ( 0.00%) assert_eq!(self.to_ty(tcx), val.ty); 468 ( 0.00%) let (new, oflo) = val.checked_add(tcx, 1); 52 ( 0.00%) if oflo { None } else { Some(new) } . } else { . Some(self.initial_discriminant(tcx)) . } 110 ( 0.00%) } . } . . impl<'tcx> TyCtxt<'tcx> { . /// Creates a hash of the type `Ty` which will be the same no matter what crate . /// context it's calculated within. This is used by the `type_id` intrinsic. . pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 { . let mut hasher = StableHasher::new(); . let mut hcx = self.create_stable_hashing_context(); -- line 131 ---------------------------------------- -- line 138 ---------------------------------------- . hcx.while_hashing_spans(false, |hcx| { . hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| { . ty.hash_stable(hcx, &mut hasher); . }); . }); . hasher.finish() . } . 21 ( 0.00%) pub fn has_error_field(self, ty: Ty<'tcx>) -> bool { 12 ( 0.00%) if let ty::Adt(def, substs) = *ty.kind() { . for field in def.all_fields() { . let field_ty = field.ty(self, substs); . if let Error(_) = field_ty.kind() { . return true; . } . } . } . false 24 ( 0.00%) } . . /// Attempts to returns the deeply last field of nested structures, but . /// does not apply any normalization in its search. Returns the same type . /// if input `ty` is not a structure at all. 4,338 ( 0.00%) pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> { . let tcx = self; . tcx.struct_tail_with_normalize(ty, |ty| ty) 3,856 ( 0.00%) } . . /// Returns the deeply last field of nested structures, or the same type if . /// not a structure at all. Corresponds to the only possible unsized field, . /// and its type can be used to determine unsizing strategy. . /// . /// Should only be called if `ty` has no inference variables and does not . /// need its lifetimes preserved (e.g. as part of codegen); otherwise . /// normalization attempt may cause compiler bugs. 12,265 ( 0.00%) pub fn struct_tail_erasing_lifetimes( . self, . ty: Ty<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> Ty<'tcx> { . let tcx = self; . tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty)) 10,035 ( 0.00%) } . . /// Returns the deeply last field of nested structures, or the same type if . /// not a structure at all. Corresponds to the only possible unsized field, . /// and its type can be used to determine unsizing strategy. . /// . /// This is parameterized over the normalization strategy (i.e. how to . /// handle `::Assoc` and `impl Trait`); pass the identity . /// function to indicate no normalization should take place. . /// . /// See also `struct_tail_erasing_lifetimes`, which is suitable for use . /// during codegen. 300 ( 0.00%) pub fn struct_tail_with_normalize( . self, . mut ty: Ty<'tcx>, . mut normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, . ) -> Ty<'tcx> { 8,173 ( 0.00%) let recursion_limit = self.recursion_limit(); . for iteration in 0.. { 1,623 ( 0.00%) if !recursion_limit.value_within_limit(iteration) { . return self.ty_error_with_message( . DUMMY_SP, . &format!("reached the recursion limit finding the struct tail for {}", ty), . ); . } 11,066 ( 0.00%) match *ty.kind() { 236 ( 0.00%) ty::Adt(def, substs) => { 472 ( 0.00%) if !def.is_struct() { . break; . } . match def.non_enum_variant().fields.last() { . Some(f) => ty = f.ty(self, substs), . None => break, . } . } . 3 ( 0.00%) ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => { . ty = last_ty.expect_ty(); . } . . ty::Tuple(_) => break, . . ty::Projection(_) | ty::Opaque(..) => { . let normalized = normalize(ty); . if ty == normalized { -- line 224 ---------------------------------------- -- line 229 ---------------------------------------- . } . . _ => { . break; . } . } . } . ty 234 ( 0.00%) } . . /// Same as applying `struct_tail` on `source` and `target`, but only . /// keeps going as long as the two types are instances of the same . /// structure definitions. . /// For `(Foo>, Foo)`, the result will be `(Foo, Trait)`, . /// whereas struct_tail produces `T`, and `Trait`, respectively. . /// . /// Should only be called if the types have no inference variables and do . /// not need their lifetimes preserved (e.g., as part of codegen); otherwise, . /// normalization attempt may cause compiler bugs. 90 ( 0.00%) pub fn struct_lockstep_tails_erasing_lifetimes( . self, . source: Ty<'tcx>, . target: Ty<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> (Ty<'tcx>, Ty<'tcx>) { . let tcx = self; . tcx.struct_lockstep_tails_with_normalize(source, target, |ty| { . tcx.normalize_erasing_regions(param_env, ty) . }) 60 ( 0.00%) } . . /// Same as applying `struct_tail` on `source` and `target`, but only . /// keeps going as long as the two types are instances of the same . /// structure definitions. . /// For `(Foo>, Foo)`, the result will be `(Foo, Trait)`, . /// whereas struct_tail produces `T`, and `Trait`, respectively. . /// . /// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use -- line 266 ---------------------------------------- -- line 268 ---------------------------------------- . pub fn struct_lockstep_tails_with_normalize( . self, . source: Ty<'tcx>, . target: Ty<'tcx>, . normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>, . ) -> (Ty<'tcx>, Ty<'tcx>) { . let (mut a, mut b) = (source, target); . loop { 72 ( 0.00%) match (&a.kind(), &b.kind()) { . (&Adt(a_def, a_substs), &Adt(b_def, b_substs)) . if a_def == b_def && a_def.is_struct() => . { . if let Some(f) = a_def.non_enum_variant().fields.last() { . a = f.ty(self, a_substs); . b = f.ty(self, b_substs); . } else { . break; -- line 284 ---------------------------------------- -- line 310 ---------------------------------------- . . _ => break, . } . } . (a, b) . } . . /// Calculate the destructor of a given type. 108 ( 0.00%) pub fn calculate_dtor( . self, . adt_did: DefId, . validate: impl Fn(Self, DefId) -> Result<(), ErrorReported>, . ) -> Option { 45 ( 0.00%) let drop_trait = self.lang_items().drop_trait()?; . self.ensure().coherent_trait(drop_trait); . 9 ( 0.00%) let ty = self.type_of(adt_did); 81 ( 0.00%) let (did, constness) = self.find_map_relevant_impl(drop_trait, ty, |impl_did| { . if let Some(item_id) = self.associated_item_def_ids(impl_did).first() { . if validate(self, impl_did).is_ok() { . return Some((*item_id, self.impl_constness(impl_did))); . } . } . None . })?; . . Some(ty::Destructor { did, constness }) 90 ( 0.00%) } . . /// Returns the set of types that are required to be alive in . /// order to run the destructor of `def` (see RFCs 769 and . /// 1238). . /// . /// Note that this returns only the constraints for the . /// destructor of `def` itself. For the destructors of the . /// contents, you need `adt_dtorck_constraint`. 88 ( 0.00%) pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec> { 40 ( 0.00%) let dtor = match def.destructor(self) { . None => { . debug!("destructor_constraints({:?}) - no dtor", def.did); . return vec![]; . } . Some(dtor) => dtor.did, . }; . . let impl_def_id = self.associated_item(dtor).container.id(); -- line 355 ---------------------------------------- -- line 406 ---------------------------------------- . false . } . } . }) . .map(|(item_param, _)| item_param) . .collect(); . debug!("destructor_constraint({:?}) = {:?}", def.did, result); . result 80 ( 0.00%) } . . /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note . /// that closures have a `DefId`, but the closure *expression* also . /// has a `HirId` that is located within the context where the . /// closure appears (and, sadly, a corresponding `NodeId`, since . /// those are not yet phased out). The parent of the closure's . /// `DefId` will also be the context where it appears. 201 ( 0.00%) pub fn is_closure(self, def_id: DefId) -> bool { 1,005 ( 0.00%) matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator) 402 ( 0.00%) } . . /// Returns `true` if `def_id` refers to a definition that does not have its own . /// type-checking context, i.e. closure, generator or inline const. 122 ( 0.00%) pub fn is_typeck_child(self, def_id: DefId) -> bool { 1,958 ( 0.00%) matches!( 2,086 ( 0.00%) self.def_kind(def_id), . DefKind::Closure | DefKind::Generator | DefKind::InlineConst . ) 244 ( 0.00%) } . . /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`). 634 ( 0.00%) pub fn is_trait(self, def_id: DefId) -> bool { 634 ( 0.00%) self.def_kind(def_id) == DefKind::Trait 1,268 ( 0.00%) } . . /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`), . /// and `false` otherwise. 489 ( 0.00%) pub fn is_trait_alias(self, def_id: DefId) -> bool { 489 ( 0.00%) self.def_kind(def_id) == DefKind::TraitAlias 978 ( 0.00%) } . . /// Returns `true` if this `DefId` refers to the implicit constructor for . /// a tuple struct like `struct Foo(u32)`, and `false` otherwise. 86 ( 0.00%) pub fn is_constructor(self, def_id: DefId) -> bool { 1,788 ( 0.00%) matches!(self.def_kind(def_id), DefKind::Ctor(..)) 172 ( 0.00%) } . . /// Given the `DefId`, returns the `DefId` of the innermost item that . /// has its own type-checking context or "inference enviornment". . /// . /// For example, a closure has its own `DefId`, but it is type-checked . /// with the containing item. Similarly, an inline const block has its . /// own `DefId` but it is type-checked together with the containing item. . /// . /// Therefore, when we fetch the . /// `typeck` the closure, for example, we really wind up . /// fetching the `typeck` the enclosing fn item. 4,910 ( 0.00%) pub fn typeck_root_def_id(self, def_id: DefId) -> DefId { . let mut def_id = def_id; . while self.is_typeck_child(def_id) { . def_id = self.parent(def_id).unwrap_or_else(|| { . bug!("closure {:?} has no parent", def_id); . }); . } . def_id 3,928 ( 0.00%) } . . /// Given the `DefId` and substs a closure, creates the type of . /// `self` argument that the closure expects. For example, for a . /// `Fn` closure, this would return a reference type `&T` where . /// `T = closure_ty`. . /// . /// Returns `None` if this closure's kind has not yet been inferred. . /// This should only be possible during type checking. . /// . /// Note that the return value is a late-bound region and hence . /// wrapped in a binder. 153 ( 0.00%) pub fn closure_env_ty( . self, . closure_def_id: DefId, . closure_substs: SubstsRef<'tcx>, . env_region: ty::RegionKind, . ) -> Option> { . let closure_ty = self.mk_closure(closure_def_id, closure_substs); . let closure_kind_ty = closure_substs.as_closure().kind_ty(); 51 ( 0.00%) let closure_kind = closure_kind_ty.to_opt_closure_kind()?; 68 ( 0.00%) let env_ty = match closure_kind { . ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty), 77 ( 0.00%) ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty), . ty::ClosureKind::FnOnce => closure_ty, . }; . Some(env_ty) 136 ( 0.00%) } . . /// Returns `true` if the node pointed to by `def_id` is a `static` item. 700 ( 0.00%) pub fn is_static(self, def_id: DefId) -> bool { . self.static_mutability(def_id).is_some() 800 ( 0.00%) } . . /// Returns `true` if this is a `static` item with the `#[thread_local]` attribute. . pub fn is_thread_local_static(self, def_id: DefId) -> bool { . self.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL) . } . . /// Returns `true` if the node pointed to by `def_id` is a mutable `static` item. . pub fn is_mutable_static(self, def_id: DefId) -> bool { -- line 510 ---------------------------------------- -- line 597 ---------------------------------------- . self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap(); . None . } . } . } . . impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 2 ( 0.00%) self.tcx . } . . fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { 4 ( 0.00%) if let ty::Opaque(def_id, substs) = t.kind { . self.expand_opaque_ty(def_id, substs).unwrap_or(t) 2 ( 0.00%) } else if t.has_opaque_types() { . t.super_fold_with(self) . } else { . t . } . } . } . . impl<'tcx> ty::TyS<'tcx> { -- line 619 ---------------------------------------- -- line 658 ---------------------------------------- . . /// Checks whether values of this type `T` are *moved* or *copied* . /// when referenced -- this amounts to a check for whether `T: . /// Copy`, but note that we **don't** consider lifetimes when . /// doing this check. This means that we may generate MIR which . /// does copies even when the type actually doesn't satisfy the . /// full requirements for the `Copy` trait (cc #29149) -- this . /// winds up being reported as an error during NLL borrow check. 855 ( 0.00%) pub fn is_copy_modulo_regions( . &'tcx self, . tcx_at: TyCtxtAt<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> bool { . tcx_at.is_copy_raw(param_env.and(self)) 684 ( 0.00%) } . . /// Checks whether values of this type `T` have a size known at . /// compile time (i.e., whether `T: Sized`). Lifetimes are ignored . /// for the purposes of this check, so it can be an . /// over-approximation in generic contexts, where one can have . /// strange rules like `>::Bar: Sized` that . /// actually carry lifetime requirements. 267,750 ( 0.01%) pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { 148,750 ( 0.00%) self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self)) 178,500 ( 0.00%) } . . /// Checks whether values of this type `T` implement the `Freeze` . /// trait -- frozen types are those that do not contain an . /// `UnsafeCell` anywhere. This is a language concept used to . /// distinguish "true immutability", which is relevant to . /// optimization as well as the rules around static values. Note . /// that the `Freeze` trait is not exposed to end users and is . /// effectively an implementation detail. 249,435 ( 0.01%) pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { 138,575 ( 0.00%) self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self)) 166,290 ( 0.00%) } . . /// Fast path helper for testing if a type is `Freeze`. . /// . /// Returning true means the type is known to be `Freeze`. Returning . /// `false` means nothing -- could be `Freeze`, might not be. . fn is_trivially_freeze(&self) -> bool { . match self.kind() { . ty::Int(_) -- line 701 ---------------------------------------- -- line 723 ---------------------------------------- . | ty::Opaque(..) . | ty::Param(_) . | ty::Placeholder(_) . | ty::Projection(_) => false, . } . } . . /// Checks whether values of this type `T` implement the `Unpin` trait. 270 ( 0.00%) pub fn is_unpin(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { 150 ( 0.00%) self.is_trivially_unpin() || tcx_at.is_unpin_raw(param_env.and(self)) 180 ( 0.00%) } . . /// Fast path helper for testing if a type is `Unpin`. . /// . /// Returning true means the type is known to be `Unpin`. Returning . /// `false` means nothing -- could be `Unpin`, might not be. 139,100 ( 0.00%) fn is_trivially_unpin(&self) -> bool { 139,355 ( 0.00%) match self.kind() { . ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::Bool . | ty::Char . | ty::Str . | ty::Never . | ty::Ref(..) . | ty::RawPtr(_) . | ty::FnDef(..) . | ty::Error(_) . | ty::FnPtr(_) => true, . ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_unpin), 102 ( 0.00%) ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(), . ty::Adt(..) . | ty::Bound(..) . | ty::Closure(..) . | ty::Dynamic(..) . | ty::Foreign(_) . | ty::Generator(..) . | ty::GeneratorWitness(_) . | ty::Infer(_) . | ty::Opaque(..) . | ty::Param(_) . | ty::Placeholder(_) . | ty::Projection(_) => false, . } 194,740 ( 0.00%) } . . /// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely . /// non-copy and *might* have a destructor attached; if it returns . /// `false`, then `ty` definitely has no destructor (i.e., no drop glue). . /// . /// (Note that this implies that if `ty` has a destructor attached, . /// then `needs_drop` will definitely return `true` for `ty`.) . /// . /// Note that this method is used to check eligible types in unions. . #[inline] 148,110 ( 0.00%) pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool { . // Avoid querying in simple cases. 158,757 ( 0.00%) match needs_drop_components(self, &tcx.data_layout) { . Err(AlwaysRequiresDrop) => true, 95,964 ( 0.00%) Ok(components) => { . let query_ty = match *components { 75,958 ( 0.00%) [] => return false, . // If we've got a single component, call the query with that . // to increase the chance that we hit the query cache. 9,431 ( 0.00%) [component_ty] => component_ty, . _ => self, . }; . . // This doesn't depend on regions, so try to minimize distinct . // query keys used. . // If normalization fails, we just use `query_ty`. . let query_ty = 84 ( 0.00%) tcx.try_normalize_erasing_regions(param_env, query_ty).unwrap_or(query_ty); . 42 ( 0.00%) tcx.needs_drop_raw(param_env.and(query_ty)) . } . } 133,299 ( 0.00%) } . . /// Checks if `ty` has has a significant drop. . /// . /// Note that this method can return false even if `ty` has a destructor . /// attached; even if that is the case then the adt has been marked with . /// the attribute `rustc_insignificant_dtor`. . /// . /// Note that this method is used to check for change in drop order for -- line 809 ---------------------------------------- -- line 854 ---------------------------------------- . /// . /// This function is "shallow" because it may return `true` for a composite type whose fields . /// are not `StructuralEq`. For example, `[T; 4]` has structural equality regardless of `T` . /// because equality for arrays is determined by the equality of each array element. If you . /// want to know whether a given call to `PartialEq::eq` will proceed structurally all the way . /// down, you will need to use a type visitor. . #[inline] . pub fn is_structural_eq_shallow(&'tcx self, tcx: TyCtxt<'tcx>) -> bool { 109,360 ( 0.00%) match self.kind() { . // Look for an impl of both `PartialStructuralEq` and `StructuralEq`. . Adt(..) => tcx.has_structural_eq_impls(self), . . // Primitive types that satisfy `Eq`. . Bool | Char | Int(_) | Uint(_) | Str | Never => true, . . // Composite types that satisfy `Eq` when all of their fields do. . // -- line 870 ---------------------------------------- -- line 911 ---------------------------------------- . /// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`. . /// . /// # Examples . /// . /// - `u8` -> `u8` . /// - `&'a mut u8` -> `u8` . /// - `&'a &'b u8` -> `u8` . /// - `&'a *const &'b u8 -> *const &'b u8` 164 ( 0.00%) pub fn peel_refs(&'tcx self) -> Ty<'tcx> { . let mut ty = self; 784 ( 0.00%) while let Ref(_, inner_ty, _) = ty.kind() { 152 ( 0.00%) ty = inner_ty; . } . ty 164 ( 0.00%) } . . pub fn outer_exclusive_binder(&'tcx self) -> DebruijnIndex { 402 ( 0.00%) self.outer_exclusive_binder 402 ( 0.00%) } . } . . pub enum ExplicitSelf<'tcx> { . ByValue, . ByReference(ty::Region<'tcx>, hir::Mutability), . ByRawPointer(hir::Mutability), . ByBox, . Other, -- line 937 ---------------------------------------- -- line 976 ---------------------------------------- . _ => Other, . } . } . } . . /// Returns a list of types such that the given type needs drop if and only if . /// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if . /// this type always needs drop. 232,274 ( 0.01%) pub fn needs_drop_components<'tcx>( . ty: Ty<'tcx>, . target_layout: &TargetDataLayout, . ) -> Result; 2]>, AlwaysRequiresDrop> { 199,092 ( 0.00%) match ty.kind() { . ty::Infer(ty::FreshIntTy(_)) . | ty::Infer(ty::FreshFloatTy(_)) . | ty::Bool . | ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::Never . | ty::FnDef(..) -- line 996 ---------------------------------------- -- line 1003 ---------------------------------------- . . // Foreign types can never have destructors. . ty::Foreign(..) => Ok(SmallVec::new()), . . ty::Dynamic(..) | ty::Error(_) => Err(AlwaysRequiresDrop), . . ty::Slice(ty) => needs_drop_components(ty, target_layout), . ty::Array(elem_ty, size) => { 36 ( 0.00%) match needs_drop_components(elem_ty, target_layout) { 12 ( 0.00%) Ok(v) if v.is_empty() => Ok(v), 48 ( 0.00%) res => match size.val.try_to_bits(target_layout.pointer_size) { . // Arrays of size zero don't need drop, even if their element . // type does. . Some(0) => Ok(SmallVec::new()), 24 ( 0.00%) Some(_) => res, . // We don't know which of the cases above we are in, so . // return the whole type and let the caller decide what to . // do. . None => Ok(smallvec![ty]), . }, . } . } . // If any field needs drop, then the whole tuple does. 9,164 ( 0.00%) ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| { 54,864 ( 0.00%) acc.extend(needs_drop_components(elem, target_layout)?); 36,576 ( 0.00%) Ok(acc) . }), . . // These require checking for `Copy` bounds or `Adt` destructors. . ty::Adt(..) . | ty::Projection(..) . | ty::Param(_) . | ty::Bound(..) . | ty::Placeholder(..) . | ty::Opaque(..) . | ty::Infer(_) . | ty::Closure(..) 37,176 ( 0.00%) | ty::Generator(..) => Ok(smallvec![ty]), . } 199,092 ( 0.00%) } . 60 ( 0.00%) pub fn is_trivially_const_drop<'tcx>(ty: Ty<'tcx>) -> bool { 40 ( 0.00%) match *ty.kind() { . ty::Bool . | ty::Char . | ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::Infer(ty::IntVar(_)) . | ty::Infer(ty::FloatVar(_)) . | ty::Str -- line 1053 ---------------------------------------- -- line 1070 ---------------------------------------- . // Not trivial because they have components, and instead of looking inside, . // we'll just perform trait selection. . ty::Closure(..) | ty::Generator(..) | ty::GeneratorWitness(_) | ty::Adt(..) => false, . . ty::Array(ty, _) | ty::Slice(ty) => is_trivially_const_drop(ty), . . ty::Tuple(tys) => tys.iter().all(|ty| is_trivially_const_drop(ty.expect_ty())), . } 80 ( 0.00%) } . . // Does the equivalent of . // ``` . // let v = self.iter().map(|p| p.fold_with(folder)).collect::>(); . // folder.tcx().intern_*(&v) . // ``` 73,448 ( 0.00%) pub fn fold_list<'tcx, F, T>( . list: &'tcx ty::List, . folder: &mut F, . intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> &'tcx ty::List, . ) -> Result<&'tcx ty::List, F::Error> . where . F: FallibleTypeFolder<'tcx>, . T: TypeFoldable<'tcx> + PartialEq + Copy, . { . let mut iter = list.iter(); . // Look for the first element that changed 828 ( 0.00%) match iter.by_ref().enumerate().find_map(|(i, t)| match t.try_fold_with(folder) { 1,385 ( 0.00%) Ok(new_t) if new_t == t => None, . new_t => Some((i, new_t)), . }) { . Some((i, Ok(new_t))) => { . // An element changed, prepare to intern the resulting list . let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len()); . new_list.extend_from_slice(&list[..i]); . new_list.push(new_t); . for t in iter { . new_list.push(t.try_fold_with(folder)?) . } 1,201 ( 0.00%) Ok(intern(folder.tcx(), &new_list)) . } . Some((_, Err(err))) => { . return Err(err); . } . None => Ok(list), . } 74,447 ( 0.00%) } . . #[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)] . pub struct AlwaysRequiresDrop; . . /// Normalizes all opaque types in the given value, replacing them . /// with their underlying types. 8 ( 0.00%) pub fn normalize_opaque_types<'tcx>( . tcx: TyCtxt<'tcx>, . val: &'tcx List>, . ) -> &'tcx List> { 14 ( 0.00%) let mut visitor = OpaqueTypeExpander { . seen_opaque_tys: FxHashSet::default(), . expanded_cache: FxHashMap::default(), . primary_def_id: None, . found_recursion: false, . found_any_recursion: false, . check_recursion: false, . tcx, . }; . val.fold_with(&mut visitor) 10 ( 0.00%) } . . pub fn provide(providers: &mut ty::query::Providers) { 2 ( 0.00%) *providers = ty::query::Providers { normalize_opaque_types, ..*providers } . } 517,987 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_target/src/abi/mod.rs -------------------------------------------------------------------------------- Ir -- line 40 ---------------------------------------- . /// Minimum size of #[repr(C)] enums (default I32 bits) . pub c_enum_min_size: Integer, . } . . impl Default for TargetDataLayout { . /// Creates an instance of `TargetDataLayout`. . fn default() -> TargetDataLayout { . let align = |bits| Align::from_bits(bits).unwrap(); 24 ( 0.00%) TargetDataLayout { . endian: Endian::Big, . i1_align: AbiAndPrefAlign::new(align(8)), . i8_align: AbiAndPrefAlign::new(align(8)), . i16_align: AbiAndPrefAlign::new(align(16)), . i32_align: AbiAndPrefAlign::new(align(32)), . i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) }, . i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) }, . f32_align: AbiAndPrefAlign::new(align(32)), . f64_align: AbiAndPrefAlign::new(align(64)), . pointer_size: Size::from_bits(64), . pointer_align: AbiAndPrefAlign::new(align(64)), . aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) }, 8 ( 0.00%) vector_align: vec![ . (Size::from_bits(64), AbiAndPrefAlign::new(align(64))), . (Size::from_bits(128), AbiAndPrefAlign::new(align(128))), . ], . instruction_address_space: AddressSpace::DATA, . c_enum_min_size: Integer::I32, . } . } . } . . impl TargetDataLayout { 18 ( 0.00%) pub fn parse(target: &Target) -> Result { . // Parse an address space index from a string. . let parse_address_space = |s: &str, cause: &str| { . s.parse::().map(AddressSpace).map_err(|err| { . format!("invalid address space `{}` for `{}` in \"data-layout\": {}", s, cause, err) . }) . }; . . // Parse a bit count from a string. -- line 80 ---------------------------------------- -- line 83 ---------------------------------------- . format!("invalid {} `{}` for `{}` in \"data-layout\": {}", kind, s, cause, err) . }) . }; . . // Parse a size string. . let size = |s: &str, cause: &str| parse_bits(s, "size", cause).map(Size::from_bits); . . // Parse an alignment string. 20 ( 0.00%) let align = |s: &[&str], cause: &str| { 2 ( 0.00%) if s.is_empty() { . return Err(format!("missing alignment for `{}` in \"data-layout\"", cause)); . } 12 ( 0.00%) let align_from_bits = |bits| { . Align::from_bits(bits).map_err(|err| { . format!("invalid alignment for `{}` in \"data-layout\": {}", cause, err) . }) 16 ( 0.00%) }; 18 ( 0.00%) let abi = parse_bits(s[0], "alignment", cause)?; . let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?; 14 ( 0.00%) Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? }) 16 ( 0.00%) }; . . let mut dl = TargetDataLayout::default(); . let mut i128_align_src = 64; 32 ( 0.00%) for spec in target.data_layout.split('-') { . let spec_parts = spec.split(':').collect::>(); . . match &*spec_parts { 78 ( 0.00%) ["e"] => dl.endian = Endian::Little, . ["E"] => dl.endian = Endian::Big, 2 ( 0.00%) [p] if p.starts_with('P') => { . dl.instruction_address_space = parse_address_space(&p[1..], "P")? . } 44 ( 0.00%) ["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?, 4 ( 0.00%) ["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?, 4 ( 0.00%) ["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?, 44 ( 0.00%) [p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => { . dl.pointer_size = size(s, p)?; . dl.pointer_align = align(a, p)?; . } 64 ( 0.00%) [s, ref a @ ..] if s.starts_with('i') => { 4 ( 0.00%) let bits = match s[1..].parse::() { . Ok(bits) => bits, . Err(_) => { . size(&s[1..], "i")?; // For the user error. . continue; . } . }; 14 ( 0.00%) let a = align(a, s)?; 10 ( 0.00%) match bits { . 1 => dl.i1_align = a, . 8 => dl.i8_align = a, . 16 => dl.i16_align = a, . 32 => dl.i32_align = a, 6 ( 0.00%) 64 => dl.i64_align = a, . _ => {} . } 8 ( 0.00%) if bits >= i128_align_src && bits <= 128 { . // Default alignment for i128 is decided by taking the alignment of . // largest-sized i{64..=128}. . i128_align_src = bits; 8 ( 0.00%) dl.i128_align = a; . } . } 14 ( 0.00%) [s, ref a @ ..] if s.starts_with('v') => { . let v_size = size(&s[1..], "v")?; . let a = align(a, s)?; . if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) { . v.1 = a; . continue; . } . // No existing entry, add a new one. . dl.vector_align.push((v_size, a)); . } . _ => {} // Ignore everything else. . } . } . . // Perform consistency checks against the Target information. 8 ( 0.00%) if dl.endian != target.endian { . return Err(format!( . "inconsistent target specification: \"data-layout\" claims \ . architecture is {}-endian, while \"target-endian\" is `{}`", . dl.endian.as_str(), . target.endian.as_str(), . )); . } . 10 ( 0.00%) if dl.pointer_size.bits() != target.pointer_width.into() { . return Err(format!( . "inconsistent target specification: \"data-layout\" claims \ . pointers are {}-bit, while \"target-pointer-width\" is `{}`", . dl.pointer_size.bits(), . target.pointer_width . )); . } . 4 ( 0.00%) dl.c_enum_min_size = Integer::from_size(Size::from_bits(target.c_enum_min_bits))?; . 34 ( 0.00%) Ok(dl) 18 ( 0.00%) } . . /// Returns exclusive upper bound on object size. . /// . /// The theoretical maximum object size is defined as the maximum positive `isize` value. . /// This ensures that the `offset` semantics remain well-defined by allowing it to correctly . /// index every address within an object along with one byte past the end, along with allowing . /// `isize` to store the difference between any two pointers into an object. . /// . /// The upper bound on 64-bit currently needs to be lower because LLVM uses a 64-bit integer . /// to represent object size in bits. It would need to be 1 << 61 to account for this, but is . /// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable . /// address space on 64-bit ARMv8 and x86_64. . #[inline] . pub fn obj_size_bound(&self) -> u64 { 721,884 ( 0.02%) match self.pointer_size.bits() { . 16 => 1 << 15, . 32 => 1 << 31, . 64 => 1 << 47, . bits => panic!("obj_size_bound: unknown pointer bit size {}", bits), . } 31 ( 0.00%) } . . #[inline] . pub fn ptr_sized_integer(&self) -> Integer { 559,428 ( 0.01%) match self.pointer_size.bits() { . 16 => I16, . 32 => I32, . 64 => I64, . bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits), . } 31 ( 0.00%) } . . #[inline] . pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign { . for &(size, align) in &self.vector_align { . if size == vec_size { . return align; . } . } -- line 222 ---------------------------------------- -- line 233 ---------------------------------------- . impl HasDataLayout for TargetDataLayout { . #[inline] . fn data_layout(&self) -> &TargetDataLayout { . self . } . } . . /// Endianness of the target, which must match cfg(target-endian). 2 ( 0.00%) #[derive(Copy, Clone, PartialEq)] . pub enum Endian { . Little, . Big, . } . . impl Endian { . pub fn as_str(&self) -> &'static str { 3 ( 0.00%) match self { . Self::Little => "little", . Self::Big => "big", . } 1 ( 0.00%) } . } . . impl fmt::Debug for Endian { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . f.write_str(self.as_str()) . } . } . -- line 261 ---------------------------------------- -- line 277 ---------------------------------------- . } . } . . /// Size of a type in bytes. . #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)] . #[derive(HashStable_Generic)] . pub struct Size { . // The top 3 bits are ALWAYS zero. 365,168 ( 0.01%) raw: u64, . } . . impl Size { . pub const ZERO: Size = Size { raw: 0 }; . . /// Rounds `bits` up to the next-higher byte boundary, if `bits` is . /// is not aligned. . pub fn from_bits(bits: impl TryInto) -> Size { -- line 293 ---------------------------------------- -- line 296 ---------------------------------------- . #[cold] . fn overflow(bits: u64) -> ! { . panic!("Size::from_bits({}) has overflowed", bits); . } . . // This is the largest value of `bits` that does not cause overflow . // during rounding, and guarantees that the resulting number of bytes . // cannot cause overflow when multiplied by 8. 36 ( 0.00%) if bits > 0xffff_ffff_ffff_fff8 { . overflow(bits); . } . . // Avoid potential overflow from `bits + 7`. 1,346 ( 0.00%) Size { raw: bits / 8 + ((bits % 8) + 7) / 8 } . } . . #[inline] . pub fn from_bytes(bytes: impl TryInto) -> Size { . let bytes: u64 = bytes.try_into().ok().unwrap(); . Size { raw: bytes } . } . -- line 317 ---------------------------------------- -- line 322 ---------------------------------------- . . #[inline] . pub fn bytes_usize(self) -> usize { . self.bytes().try_into().unwrap() . } . . #[inline] . pub fn bits(self) -> u64 { 788,464 ( 0.02%) self.raw << 3 . } . . #[inline] . pub fn bits_usize(self) -> usize { . self.bits().try_into().unwrap() . } . . #[inline] 37 ( 0.00%) pub fn align_to(self, align: Align) -> Size { 33,369 ( 0.00%) let mask = align.bytes() - 1; 28,683 ( 0.00%) Size::from_bytes((self.bytes() + mask) & !mask) 111 ( 0.00%) } . . #[inline] . pub fn is_aligned(self, align: Align) -> bool { 8 ( 0.00%) let mask = align.bytes() - 1; 4 ( 0.00%) self.bytes() & mask == 0 . } . . #[inline] . pub fn checked_add(self, offset: Size, cx: &C) -> Option { . let dl = cx.data_layout(); . 120,264 ( 0.00%) let bytes = self.bytes().checked_add(offset.bytes())?; . 600,858 ( 0.01%) if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None } . } . . #[inline] 14 ( 0.00%) pub fn checked_mul(self, count: u64, cx: &C) -> Option { . let dl = cx.data_layout(); . 19 ( 0.00%) let bytes = self.bytes().checked_mul(count)?; 57 ( 0.00%) if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None } 7 ( 0.00%) } . . /// Truncates `value` to `self` bits and then sign-extends it to 128 bits . /// (i.e., if it is negative, fill with 1's on the left). . #[inline] . pub fn sign_extend(self, value: u128) -> u128 { . let size = self.bits(); 48 ( 0.00%) if size == 0 { . // Truncated until nothing is left. . return 0; . } . // Sign-extend it. 149,176 ( 0.00%) let shift = 128 - size; . // Shift the unsigned value to the left, then shift back to the right as signed . // (essentially fills with sign bit on the left). 5,070,784 ( 0.11%) (((value << shift) as i128) >> shift) as u128 . } . . /// Truncates `value` to `self` bits. . #[inline] . pub fn truncate(self, value: u128) -> u128 { . let size = self.bits(); 238,648 ( 0.01%) if size == 0 { . // Truncated until nothing is left. . return 0; . } 234,238 ( 0.01%) let shift = 128 - size; . // Truncate (shift left to drop out leftover values, shift right to fill with zeroes). 4,269,269 ( 0.09%) (value << shift) >> shift . } . . #[inline] . pub fn signed_int_min(&self) -> i128 { 1,342,152 ( 0.03%) self.sign_extend(1_u128 << (self.bits() - 1)) as i128 . } . . #[inline] . pub fn signed_int_max(&self) -> i128 { 1,342,152 ( 0.03%) i128::MAX >> (128 - self.bits()) . } . . #[inline] 31 ( 0.00%) pub fn unsigned_int_max(&self) -> u128 { 260,128 ( 0.01%) u128::MAX >> (128 - self.bits()) 62 ( 0.00%) } . } . . // Panicking addition, subtraction and multiplication for convenience. . // Avoid during layout computation, return `LayoutError` instead. . . impl Add for Size { . type Output = Size; . #[inline] . fn add(self, other: Size) -> Size { . Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| { . panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes()) . })) 7 ( 0.00%) } . } . . impl Sub for Size { . type Output = Size; . #[inline] . fn sub(self, other: Size) -> Size { . Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| { . panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes()) -- line 430 ---------------------------------------- -- line 438 ---------------------------------------- . fn mul(self, size: Size) -> Size { . size * self . } . } . . impl Mul for Size { . type Output = Size; . #[inline] 18,244 ( 0.00%) fn mul(self, count: u64) -> Size { 36,904 ( 0.00%) match self.bytes().checked_mul(count) { . Some(bytes) => Size::from_bytes(bytes), . None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count), . } 18,244 ( 0.00%) } . } . . impl AddAssign for Size { . #[inline] . fn add_assign(&mut self, other: Size) { 14,378 ( 0.00%) *self = *self + other; . } . } . . impl Step for Size { . #[inline] . fn steps_between(start: &Self, end: &Self) -> Option { . u64::steps_between(&start.bytes(), &end.bytes()) . } -- line 465 ---------------------------------------- -- line 494 ---------------------------------------- . Self::from_bytes(u64::backward_unchecked(start.bytes(), count)) . } . } . . /// Alignment of a type in bytes (always a power of two). . #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)] . #[derive(HashStable_Generic)] . pub struct Align { 45,933 ( 0.00%) pow2: u8, . } . . impl Align { . pub const ONE: Align = Align { pow2: 0 }; . . #[inline] . pub fn from_bits(bits: u64) -> Result { . Align::from_bytes(Size::from_bits(bits).bytes()) . } . . #[inline] . pub fn from_bytes(align: u64) -> Result { . // Treat an alignment of 0 bytes like 1-byte alignment. 8 ( 0.00%) if align == 0 { . return Ok(Align::ONE); . } . . #[cold] . fn not_power_of_2(align: u64) -> String { . format!("`{}` is not a power of 2", align) . } . . #[cold] . fn too_large(align: u64) -> String { . format!("`{}` is too large", align) . } . . let mut bytes = align; . let mut pow2: u8 = 0; 56 ( 0.00%) while (bytes & 1) == 0 { 36 ( 0.00%) pow2 += 1; 24 ( 0.00%) bytes >>= 1; . } 8 ( 0.00%) if bytes != 1 { . return Err(not_power_of_2(align)); . } 8 ( 0.00%) if pow2 > 29 { . return Err(too_large(align)); . } . 62 ( 0.00%) Ok(Align { pow2 }) . } . . #[inline] . pub fn bytes(self) -> u64 { 27,607 ( 0.00%) 1 << self.pow2 . } . . #[inline] . pub fn bits(self) -> u64 { 10 ( 0.00%) self.bytes() * 8 . } . . /// Computes the best alignment possible for the given offset . /// (the largest power of two that the offset is a multiple of). . /// . /// N.B., for an offset of `0`, this happens to return `2^64`. . #[inline] . pub fn max_for_offset(offset: Size) -> Align { -- line 561 ---------------------------------------- -- line 591 ---------------------------------------- . . #[inline] . pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign { . AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) } . } . } . . /// Integers, also used for enum discriminants. 84,516 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, HashStable_Generic)] . pub enum Integer { . I8, . I16, . I32, . I64, . I128, . } . . impl Integer { . #[inline] . pub fn size(self) -> Size { 520,981 ( 0.01%) match self { . I8 => Size::from_bytes(1), . I16 => Size::from_bytes(2), . I32 => Size::from_bytes(4), . I64 => Size::from_bytes(8), . I128 => Size::from_bytes(16), . } . } . . pub fn align(self, cx: &C) -> AbiAndPrefAlign { . let dl = cx.data_layout(); . 4,105 ( 0.00%) match self { 168,780 ( 0.00%) I8 => dl.i8_align, 8 ( 0.00%) I16 => dl.i16_align, 13,800 ( 0.00%) I32 => dl.i32_align, 1,729 ( 0.00%) I64 => dl.i64_align, . I128 => dl.i128_align, . } . } . . /// Finds the smallest Integer type which can represent the signed value. . #[inline] . pub fn fit_signed(x: i128) -> Integer { . match x { 434 ( 0.00%) -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8, . -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16, . -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32, . -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64, . _ => I128, . } . } . . /// Finds the smallest Integer type which can represent the unsigned value. . #[inline] . pub fn fit_unsigned(x: u128) -> Integer { . match x { 124 ( 0.00%) 0..=0x0000_0000_0000_00ff => I8, 8 ( 0.00%) 0..=0x0000_0000_0000_ffff => I16, 12 ( 0.00%) 0..=0x0000_0000_ffff_ffff => I32, . 0..=0xffff_ffff_ffff_ffff => I64, . _ => I128, . } . } . . /// Finds the smallest integer with the given alignment. 360 ( 0.00%) pub fn for_align(cx: &C, wanted: Align) -> Option { . let dl = cx.data_layout(); . 2,268 ( 0.00%) for candidate in [I8, I16, I32, I64, I128] { 435 ( 0.00%) if wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes() { . return Some(candidate); . } . } . None 360 ( 0.00%) } . . /// Find the largest integer with the given alignment or less. . pub fn approximate_align(cx: &C, wanted: Align) -> Integer { . let dl = cx.data_layout(); . . // FIXME(eddyb) maybe include I128 in the future, when it works everywhere. . for candidate in [I64, I32, I16] { 924 ( 0.00%) if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() { . return candidate; . } . } . I8 . } . . // FIXME(eddyb) consolidate this and other methods that find the appropriate . // `Integer` given some requirements. . #[inline] . fn from_size(size: Size) -> Result { 8 ( 0.00%) match size.bits() { . 8 => Ok(Integer::I8), . 16 => Ok(Integer::I16), . 32 => Ok(Integer::I32), . 64 => Ok(Integer::I64), . 128 => Ok(Integer::I128), . _ => Err(format!("rust does not support integers with {} bits", size.bits())), . } . } . } . . /// Fundamental unit of memory access and layout. 1,445,280 ( 0.03%) #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub enum Primitive { . /// The `bool` is the signedness of the `Integer` type. . /// . /// One would think we would not care about such details this low down, . /// but some ABIs are described in terms of C types and ISAs where the . /// integer arithmetic is done on {sign,zero}-extended registers, e.g. . /// a negative integer passed by zero-extension will appear positive in . /// the callee, and most operations on it will produce the wrong values. 506,878 ( 0.01%) Int(Integer, bool), . F32, . F64, . Pointer, . } . . impl Primitive { . pub fn size(self, cx: &C) -> Size { . let dl = cx.data_layout(); . 739,497 ( 0.02%) match self { . Int(i, _) => i.size(), . F32 => Size::from_bits(32), . F64 => Size::from_bits(64), 2,028 ( 0.00%) Pointer => dl.pointer_size, . } 31 ( 0.00%) } . . pub fn align(self, cx: &C) -> AbiAndPrefAlign { . let dl = cx.data_layout(); . 6,515 ( 0.00%) match self { . Int(i, _) => i.align(dl), . F32 => dl.f32_align, . F64 => dl.f64_align, 344 ( 0.00%) Pointer => dl.pointer_align, . } . } . . // FIXME(eddyb) remove, it's trivial thanks to `matches!`. . #[inline] . pub fn is_float(self) -> bool { . matches!(self, F32 | F64) . } -- line 739 ---------------------------------------- -- line 753 ---------------------------------------- . /// sequence: . /// . /// 254 (-2), 255 (-1), 0, 1, 2 . /// . /// This is intended specifically to mirror LLVM’s `!range` metadata semantics. . #[derive(Clone, Copy, PartialEq, Eq, Hash)] . #[derive(HashStable_Generic)] . pub struct WrappingRange { 254,227 ( 0.01%) pub start: u128, . pub end: u128, . } . . impl WrappingRange { . /// Returns `true` if `v` is contained in the range. . #[inline(always)] . pub fn contains(&self, v: u128) -> bool { 59,076 ( 0.00%) if self.start <= self.end { . self.start <= v && v <= self.end . } else { . self.start <= v || v <= self.end . } . } . . /// Returns `self` with replaced `start` . #[inline(always)] -- line 777 ---------------------------------------- -- line 787 ---------------------------------------- . self . } . . /// Returns `true` if `size` completely fills the range. . #[inline] . pub fn is_full_for(&self, size: Size) -> bool { . let max_value = size.unsigned_int_max(); . debug_assert!(self.start <= max_value && self.end <= max_value); 95,965 ( 0.00%) self.start == (self.end.wrapping_add(1) & max_value) . } . } . . impl fmt::Debug for WrappingRange { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . if self.start > self.end { . write!(fmt, "(..={}) | ({}..)", self.end, self.start)?; . } else { -- line 803 ---------------------------------------- -- line 806 ---------------------------------------- . Ok(()) . } . } . . /// Information about one scalar component of a Rust type. . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] . #[derive(HashStable_Generic)] . pub struct Scalar { 296,960 ( 0.01%) pub value: Primitive, . . // FIXME(eddyb) always use the shortest range, e.g., by finding . // the largest space between two consecutive valid values and . // taking everything else as the (shortest) valid range. 338,695 ( 0.01%) pub valid_range: WrappingRange, . } . . impl Scalar { . #[inline] . pub fn is_bool(&self) -> bool { 22,903 ( 0.00%) matches!( 10,770 ( 0.00%) self, . Scalar { value: Int(I8, false), valid_range: WrappingRange { start: 0, end: 1 } } . ) . } . . /// Returns `true` if all possible numbers are valid, i.e `valid_range` covers the whole layout . #[inline] 13 ( 0.00%) pub fn is_always_valid(&self, cx: &C) -> bool { 4,057 ( 0.00%) self.valid_range.is_full_for(self.value.size(cx)) 26 ( 0.00%) } . } . . /// Describes how the fields of a type are located in memory. 685,903 ( 0.02%) #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub enum FieldsShape { . /// Scalar primitives and `!`, which never have fields. . Primitive, . . /// All fields start at no offset. The `usize` is the field count. 5 ( 0.00%) Union(NonZeroUsize), . . /// Array/vector-like placement, with all fields of identical types. 56 ( 0.00%) Array { stride: Size, count: u64 }, . . /// Struct-like placement, with precomputed offsets. . /// . /// Fields are guaranteed to not overlap, but note that gaps . /// before, between and after all the fields are NOT always . /// padding, and as such their contents may not be discarded. . /// For example, enum variants leave a gap at the start, . /// where the discriminant field in the enum layout goes. -- line 856 ---------------------------------------- -- line 875 ---------------------------------------- . // FIXME(camlorn) also consider small vector optimization here. . memory_index: Vec, . }, . } . . impl FieldsShape { . #[inline] . pub fn count(&self) -> usize { 10,197 ( 0.00%) match *self { . FieldsShape::Primitive => 0, . FieldsShape::Union(count) => count.get(), . FieldsShape::Array { count, .. } => count.try_into().unwrap(), 1,674 ( 0.00%) FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(), . } . } . . #[inline] 232,236 ( 0.01%) pub fn offset(&self, i: usize) -> Size { 695,633 ( 0.02%) match *self { . FieldsShape::Primitive => { . unreachable!("FieldsShape::offset: `Primitive`s have no fields") . } 24 ( 0.00%) FieldsShape::Union(count) => { 12 ( 0.00%) assert!( 12 ( 0.00%) i < count.get(), . "tried to access field {} of union with {} fields", . i, . count . ); . Size::ZERO . } . FieldsShape::Array { stride, count } => { . let i = u64::try_from(i).unwrap(); 9,218 ( 0.00%) assert!(i < count); . stride * i . } 134,659 ( 0.00%) FieldsShape::Arbitrary { ref offsets, .. } => offsets[i], . } 232,236 ( 0.01%) } . . #[inline] . pub fn memory_index(&self, i: usize) -> usize { 282 ( 0.00%) match *self { . FieldsShape::Primitive => { . unreachable!("FieldsShape::memory_index: `Primitive`s have no fields") . } . FieldsShape::Union(_) | FieldsShape::Array { .. } => i, . FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(), . } . } . . /// Gets source indices of the fields by increasing offsets. . #[inline] . pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator + 'a { 2,071 ( 0.00%) let mut inverse_small = [0u8; 64]; . let mut inverse_big = vec![]; 447 ( 0.00%) let use_small = self.count() <= inverse_small.len(); . . // We have to write this logic twice in order to keep the array small. . if let FieldsShape::Arbitrary { ref memory_index, .. } = *self { 447 ( 0.00%) if use_small { . for i in 0..self.count() { 780 ( 0.00%) inverse_small[memory_index[i] as usize] = i as u8; . } . } else { . inverse_big = vec![0; self.count()]; . for i in 0..self.count() { . inverse_big[memory_index[i] as usize] = i as u32; . } . } . } . 5,332 ( 0.00%) (0..self.count()).map(move |i| match *self { . FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i, . FieldsShape::Arbitrary { .. } => { 221 ( 0.00%) if use_small { 481 ( 0.00%) inverse_small[i] as usize . } else { . inverse_big[i] as usize . } . } . }) . } . } . -- line 959 ---------------------------------------- -- line 965 ---------------------------------------- . . impl AddressSpace { . /// The default address space, corresponding to data space. . pub const DATA: Self = AddressSpace(0); . } . . /// Describes how values of the type are passed by target ABIs, . /// in terms of categories of C types there are ABI rules for. 429,172 ( 0.01%) #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub enum Abi { . Uninhabited, . Scalar(Scalar), 225 ( 0.00%) ScalarPair(Scalar, Scalar), . Vector { . element: Scalar, . count: u64, . }, . Aggregate { . /// If true, the size is exact, otherwise it's only a lower bound. 2,070 ( 0.00%) sized: bool, . }, . } . . impl Abi { . /// Returns `true` if the layout corresponds to an unsized type. . #[inline] . pub fn is_unsized(&self) -> bool { 407,946 ( 0.01%) match *self { . Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false, . Abi::Aggregate { sized } => !sized, . } . } . . /// Returns `true` if this is a single signed integer scalar . #[inline] . pub fn is_signed(&self) -> bool { 111,780 ( 0.00%) match self { 260,807 ( 0.01%) Abi::Scalar(scal) => match scal.value { . Primitive::Int(_, signed) => signed, . _ => false, . }, . _ => panic!("`is_signed` on non-scalar ABI {:?}", self), . } . } . . /// Returns `true` if this is an uninhabited type . #[inline] . pub fn is_uninhabited(&self) -> bool { 6,707 ( 0.00%) matches!(*self, Abi::Uninhabited) . } . . /// Returns `true` is this is a scalar type . #[inline] . pub fn is_scalar(&self) -> bool { . matches!(*self, Abi::Scalar(_)) . } . } . . rustc_index::newtype_index! { . pub struct VariantIdx { . derive [HashStable_Generic] . } . } . 453,310 ( 0.01%) #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub enum Variants { . /// Single enum variants, structs/tuples, unions, and all non-ADTs. 67,266 ( 0.00%) Single { index: VariantIdx }, . . /// Enum-likes with more than one inhabited variant: each variant comes with . /// a *discriminant* (usually the same as the variant index but the user can . /// assign explicit discriminant values). That discriminant is encoded . /// as a *tag* on the machine. The layout of each variant is . /// a struct, and they all have space reserved for the tag. . /// For enums, the tag is the sole field of the layout. . Multiple { . tag: Scalar, 60 ( 0.00%) tag_encoding: TagEncoding, 48 ( 0.00%) tag_field: usize, 12 ( 0.00%) variants: IndexVec, . }, . } . 484 ( 0.00%) #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub enum TagEncoding { . /// The tag directly stores the discriminant, but possibly with a smaller layout . /// (so converting the tag to the discriminant can require sign extension). . Direct, . . /// Niche (values invalid for a type) encoding the discriminant: . /// Discriminant and variant index coincide. . /// The variant `dataful_variant` contains a niche at an arbitrary -- line 1056 ---------------------------------------- -- line 1065 ---------------------------------------- . dataful_variant: VariantIdx, . niche_variants: RangeInclusive, . niche_start: u128, . }, . } . . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub struct Niche { 42,328 ( 0.00%) pub offset: Size, 42,328 ( 0.00%) pub scalar: Scalar, . } . . impl Niche { 288 ( 0.00%) pub fn from_scalar(cx: &C, offset: Size, scalar: Scalar) -> Option { 288 ( 0.00%) let niche = Niche { offset, scalar }; 678,492 ( 0.01%) if niche.available(cx) > 0 { Some(niche) } else { None } 336 ( 0.00%) } . 6 ( 0.00%) pub fn available(&self, cx: &C) -> u128 { 54 ( 0.00%) let Scalar { value, valid_range: v } = self.scalar; . let size = value.size(cx); 85,332 ( 0.00%) assert!(size.bits() <= 128); . let max_value = size.unsigned_int_max(); . . // Find out how many values are outside the valid range. . let niche = v.end.wrapping_add(1)..v.start; 2,278 ( 0.00%) niche.end.wrapping_sub(niche.start) & max_value 12 ( 0.00%) } . 49 ( 0.00%) pub fn reserve(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> { 21 ( 0.00%) assert!(count > 0); . 42 ( 0.00%) let Scalar { value, valid_range: v } = self.scalar; . let size = value.size(cx); 14 ( 0.00%) assert!(size.bits() <= 128); . let max_value = size.unsigned_int_max(); . . let niche = v.end.wrapping_add(1)..v.start; 21 ( 0.00%) let available = niche.end.wrapping_sub(niche.start) & max_value; 21 ( 0.00%) if count > available { . return None; . } . . // Extend the range of valid values being reserved by moving either `v.start` or `v.end` bound. . // Given an eventual `Option`, we try to maximize the chance for `None` to occupy the niche of zero. . // This is accomplished by prefering enums with 2 variants(`count==1`) and always taking the shortest path to niche zero. . // Having `None` in niche zero can enable some special optimizations. . // -- line 1112 ---------------------------------------- -- line 1120 ---------------------------------------- . let start = v.start.wrapping_sub(count) & max_value; . Some((start, Scalar { value, valid_range: v.with_start(start) })) . }; . let move_end = |v: WrappingRange| { . let start = v.end.wrapping_add(1) & max_value; . let end = v.end.wrapping_add(count) & max_value; . Some((start, Scalar { value, valid_range: v.with_end(end) })) . }; 35 ( 0.00%) let distance_end_zero = max_value - v.end; 28 ( 0.00%) if v.start > v.end { . // zero is unavailable because wrapping occurs . move_end(v) 21 ( 0.00%) } else if v.start <= distance_end_zero { 24 ( 0.00%) if count <= v.start { . move_start(v) . } else { . // moved past zero, use other bound . move_end(v) . } . } else { 2 ( 0.00%) let end = v.end.wrapping_add(count) & max_value; . let overshot_zero = (1..=v.end).contains(&end); 3 ( 0.00%) if overshot_zero { . // moved past zero, use other bound . move_start(v) . } else { . move_end(v) . } . } 63 ( 0.00%) } . } . 606,186 ( 0.01%) #[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)] . pub struct Layout { . /// Says where the fields are located within the layout. . pub fields: FieldsShape, . . /// Encodes information about multi-variant layouts. . /// Even with `Multiple` variants, a layout still has its own fields! Those are then . /// shared between all variants. One of them will be the discriminant, . /// but e.g. generators can have more. . /// . /// To access all fields of this layout, both `fields` and the fields of the active variant . /// must be taken into account. 529 ( 0.00%) pub variants: Variants, . . /// The `abi` defines how this data is passed between functions, and it defines . /// value restrictions via `valid_range`. . /// . /// Note that this is entirely orthogonal to the recursive structure defined by . /// `variants` and `fields`; for example, `ManuallyDrop>` has . /// `Abi::ScalarPair`! So, even with non-`Aggregate` `abi`, `fields` and `variants` . /// have to be taken into account to find all fields of this layout. 42,614 ( 0.00%) pub abi: Abi, . . /// The leaf scalar with the largest number of invalid values . /// (i.e. outside of its `valid_range`), if it exists. 212,312 ( 0.00%) pub largest_niche: Option, . 341,531 ( 0.01%) pub align: AbiAndPrefAlign, 170,500 ( 0.00%) pub size: Size, . } . . impl Layout { 253,824 ( 0.01%) pub fn scalar(cx: &C, scalar: Scalar) -> Self { 253,824 ( 0.01%) let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar); . let size = scalar.value.size(cx); . let align = scalar.value.align(cx); 888,384 ( 0.02%) Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Primitive, 253,824 ( 0.01%) abi: Abi::Scalar(scalar), 169,216 ( 0.00%) largest_niche, . size, . align, . } 338,432 ( 0.01%) } . } . . /// The layout of a type, alongside the type itself. . /// Provides various type traversal APIs (e.g., recursing into fields). . /// . /// Note that the layout is NOT guaranteed to always be identical . /// to that obtained from `layout_of(ty)`, as we need to produce . /// layouts for which Rust types do not exist, such as enum variants . /// or synthetic fields of enums (i.e., discriminants) and fat pointers. . #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable_Generic)] . pub struct TyAndLayout<'a, Ty> { 164,699 ( 0.00%) pub ty: Ty, 333,458 ( 0.01%) pub layout: &'a Layout, . } . . impl<'a, Ty> Deref for TyAndLayout<'a, Ty> { . type Target = &'a Layout; . fn deref(&self) -> &&'a Layout { 58 ( 0.00%) &self.layout . } . } . 176 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] . pub enum PointerKind { . /// Most general case, we know no restrictions to tell LLVM. . Shared, . . /// `&T` where `T` contains no `UnsafeCell`, is `noalias` and `readonly`. . Frozen, . . /// `&mut T` which is `noalias` but not `readonly`. -- line 1227 ---------------------------------------- -- line 1251 ---------------------------------------- . fn ty_and_layout_pointee_info_at( . this: TyAndLayout<'a, Self>, . cx: &C, . offset: Size, . ) -> Option; . } . . impl<'a, Ty> TyAndLayout<'a, Ty> { 10,310 ( 0.00%) pub fn for_variant(self, cx: &C, variant_index: VariantIdx) -> Self . where . Ty: TyAbiInterface<'a, C>, . { 57,341 ( 0.00%) Ty::ty_and_layout_for_variant(self, cx, variant_index) 10,310 ( 0.00%) } . 15,645 ( 0.00%) pub fn field(self, cx: &C, i: usize) -> Self . where . Ty: TyAbiInterface<'a, C>, . { 604,929 ( 0.01%) Ty::ty_and_layout_field(self, cx, i) 8,344 ( 0.00%) } . . pub fn pointee_info_at(self, cx: &C, offset: Size) -> Option . where . Ty: TyAbiInterface<'a, C>, . { 1,982 ( 0.00%) Ty::ty_and_layout_pointee_info_at(self, cx, offset) . } . } . . impl<'a, Ty> TyAndLayout<'a, Ty> { . /// Returns `true` if the layout corresponds to an unsized type. . pub fn is_unsized(&self) -> bool { 44,841 ( 0.00%) self.abi.is_unsized() . } . . /// Returns `true` if the type is a ZST and not unsized. . pub fn is_zst(&self) -> bool { 298,185 ( 0.01%) match self.abi { . Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false, . Abi::Uninhabited => self.size.bytes() == 0, 31,292 ( 0.00%) Abi::Aggregate { sized } => sized && self.size.bytes() == 0, . } 15 ( 0.00%) } . . /// Determines if this type permits "raw" initialization by just transmuting some . /// memory into an instance of `T`. . /// `zero` indicates if the memory is zero-initialized, or alternatively . /// left entirely uninitialized. . /// This is conservative: in doubt, it will answer `true`. . /// . /// FIXME: Once we removed all the conservatism, we could alternatively -- line 1302 ---------------------------------------- 2,638,578 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs -------------------------------------------------------------------------------- Ir -- line 36 ---------------------------------------- . use rustc_session::parse::ParseSess; . use rustc_span::source_map::{MultiSpan, Span, DUMMY_SP}; . use rustc_span::symbol::{kw, sym, Ident, Symbol}; . use tracing::debug; . . use std::ops::Range; . use std::{cmp, mem, slice}; . 91,940 ( 0.00%) bitflags::bitflags! { . struct Restrictions: u8 { . const STMT_EXPR = 1 << 0; . const NO_STRUCT_LITERAL = 1 << 1; . const CONST_EXPR = 1 << 2; . } . } . . #[derive(Clone, Copy, PartialEq, Debug)] -- line 52 ---------------------------------------- -- line 104 ---------------------------------------- . $self.bump(); . return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty); . } . } . } . }; . } . 772,214 ( 0.02%) #[derive(Clone)] . pub struct Parser<'a> { 31,982 ( 0.00%) pub sess: &'a ParseSess, . /// The current token. 27,416 ( 0.00%) pub token: Token, . /// The spacing for the current token 13,708 ( 0.00%) pub token_spacing: Spacing, . /// The previous token. 41,124 ( 0.00%) pub prev_token: Token, 13,708 ( 0.00%) pub capture_cfg: bool, 27,416 ( 0.00%) restrictions: Restrictions, 27,416 ( 0.00%) expected_tokens: Vec, . // Important: This must only be advanced from `next_tok` . // to ensure that `token_cursor.num_next_calls` is updated properly . token_cursor: TokenCursor, 27,416 ( 0.00%) desugar_doc_comments: bool, . /// This field is used to keep track of how many left angle brackets we have seen. This is . /// required in order to detect extra leading left angle brackets (`<` characters) and error . /// appropriately. . /// . /// See the comments in the `parse_path_segment` function for more details. 27,416 ( 0.00%) unmatched_angle_bracket_count: u32, 41,124 ( 0.00%) max_angle_bracket_count: u32, . /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery . /// it gets removed from here. Every entry left at the end gets emitted as an independent . /// error. 13,708 ( 0.00%) pub(super) unclosed_delims: Vec, . last_unexpected_token_span: Option, . /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it . /// looked like it could have been a mistyped path or literal `Option:Some(42)`). . pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, . /// If present, this `Parser` is not parsing Rust code but rather a macro call. . subparser_name: Option<&'static str>, . capture_state: CaptureState, . /// This allows us to recover when the user forget to add braces around -- line 146 ---------------------------------------- -- line 173 ---------------------------------------- . /// the first macro inner attribute to invoke a proc-macro). . /// When create a `TokenStream`, the inner attributes get inserted . /// into the proper place in the token stream. . pub type ReplaceRange = (Range, Vec<(FlatToken, Spacing)>); . . /// Controls how we capture tokens. Capturing can be expensive, . /// so we try to avoid performing capturing in cases where . /// we will never need an `AttrAnnotatedTokenStream` 13,708 ( 0.00%) #[derive(Copy, Clone)] . pub enum Capturing { . /// We aren't performing any capturing - this is the default mode. . No, . /// We are capturing tokens . Yes, . } . 59,398 ( 0.00%) #[derive(Clone)] . struct CaptureState { 54,832 ( 0.00%) capturing: Capturing, 13,708 ( 0.00%) replace_ranges: Vec, . inner_attr_ranges: FxHashMap, . } . . impl<'a> Drop for Parser<'a> { . fn drop(&mut self) { 70,528 ( 0.00%) emit_unclosed_delims(&mut self.unclosed_delims, &self.sess); . } . } . 414,382 ( 0.01%) #[derive(Clone)] . struct TokenCursor { . frame: TokenCursorFrame, 92,259 ( 0.00%) stack: Vec, . desugar_doc_comments: bool, . // Counts the number of calls to `next` or `next_desugared`, . // depending on whether `desugar_doc_comments` is set. 30,692 ( 0.00%) num_next_calls: usize, . // During parsing, we may sometimes need to 'unglue' a . // glued token into two component tokens . // (e.g. '>>' into '>' and '>), so that the parser . // can consume them one at a time. This process . // bypasses the normal capturing mechanism . // (e.g. `num_next_calls` will not be incremented), . // since the 'unglued' tokens due not exist in . // the original `TokenStream`. -- line 217 ---------------------------------------- -- line 226 ---------------------------------------- . // in `Option>` requires us to unglue . // the trailing `>>` token. The `break_last_token` . // field is used to track this token - it gets . // appended to the captured stream when . // we evaluate a `LazyTokenStream` . break_last_token: bool, . } . 184,460 ( 0.00%) #[derive(Clone)] . struct TokenCursorFrame { 35,753 ( 0.00%) delim: token::DelimToken, . span: DelimSpan, . open_delim: bool, 71,506 ( 0.00%) tree_cursor: tokenstream::Cursor, . close_delim: bool, . } . . impl TokenCursorFrame { . fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { 48,205 ( 0.00%) TokenCursorFrame { . delim, . span, . open_delim: false, 36,388 ( 0.00%) tree_cursor: tts.into_trees(), . close_delim: false, . } . } . } . . impl TokenCursor { 2,244,276 ( 0.05%) fn next(&mut self) -> (Token, Spacing) { . loop { 1,049,295 ( 0.02%) let (tree, spacing) = if !self.frame.open_delim { 16,647 ( 0.00%) self.frame.open_delim = true; 83,235 ( 0.00%) TokenTree::open_tt(self.frame.span, self.frame.delim).into() 2,137,838 ( 0.05%) } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { . tree 85,820 ( 0.00%) } else if !self.frame.close_delim { 16,620 ( 0.00%) self.frame.close_delim = true; 83,100 ( 0.00%) TokenTree::close_tt(self.frame.span, self.frame.delim).into() 33,240 ( 0.00%) } else if let Some(frame) = self.stack.pop() { 216,060 ( 0.00%) self.frame = frame; . continue; . } else { 174,060 ( 0.00%) (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone) . }; . 406,422 ( 0.01%) match tree { . TokenTree::Token(token) => { 1,496,184 ( 0.03%) return (token, spacing); . } . TokenTree::Delimited(sp, delim, tts) => { . let frame = TokenCursorFrame::new(sp, delim, tts); . self.stack.push(mem::replace(&mut self.frame, frame)); . } . } . } 1,496,184 ( 0.03%) } . 299,100 ( 0.01%) fn next_desugared(&mut self) -> (Token, Spacing) { 90,648 ( 0.00%) let (data, attr_style, sp) = match self.next() { 1,377 ( 0.00%) (Token { kind: token::DocComment(_, attr_style, data), span }, _) => { . (data, attr_style, span) . } 147,255 ( 0.00%) tok => return tok, . }; . . // Searches for the occurrences of `"#*` and returns the minimum number of `#`s . // required to wrap the text. . let mut num_of_hashes = 0; . let mut count = 0; 73,916 ( 0.00%) for ch in data.as_str().chars() { . count = match ch { . '"' => 1, 84 ( 0.00%) '#' if count > 0 => count + 1, . _ => 0, . }; . num_of_hashes = cmp::max(num_of_hashes, count); . } . 1,836 ( 0.00%) let delim_span = DelimSpan::from_single(sp); 2,295 ( 0.00%) let body = TokenTree::Delimited( . delim_span, . token::Bracket, 5,508 ( 0.00%) [ 3,213 ( 0.00%) TokenTree::token(token::Ident(sym::doc, false), sp), 1,836 ( 0.00%) TokenTree::token(token::Eq, sp), 3,672 ( 0.00%) TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp), . ] . .iter() . .cloned() . .collect::(), 459 ( 0.00%) ); . . self.stack.push(mem::replace( . &mut self.frame, . TokenCursorFrame::new( . delim_span, . token::NoDelim, 459 ( 0.00%) if attr_style == AttrStyle::Inner { . [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] . .iter() . .cloned() . .collect::() . } else { 5,508 ( 0.00%) [TokenTree::token(token::Pound, sp), body] . .iter() . .cloned() . .collect::() . }, . ), . )); . 1,377 ( 0.00%) self.next() 269,190 ( 0.01%) } . } . 1,420 ( 0.00%) #[derive(Debug, Clone, PartialEq)] . enum TokenType { 1,136 ( 0.00%) Token(TokenKind), . Keyword(Symbol), . Operator, . Lifetime, . Ident, . Path, . Type, . Const, . } -- line 353 ---------------------------------------- -- line 392 ---------------------------------------- . } . . pub enum FollowedByType { . Yes, . No, . } . . fn token_descr_opt(token: &Token) -> Option<&'static str> { 112 ( 0.00%) Some(match token.kind { 224 ( 0.00%) _ if token.is_special_ident() => "reserved identifier", 224 ( 0.00%) _ if token.is_used_keyword() => "keyword", 224 ( 0.00%) _ if token.is_unused_keyword() => "reserved keyword", . token::DocComment(..) => "doc comment", . _ => return None, . }) . } . 392 ( 0.00%) pub(super) fn token_descr(token: &Token) -> String { 112 ( 0.00%) let token_str = pprust::token_to_string(token); . match token_descr_opt(token) { . Some(prefix) => format!("{} `{}`", prefix, token_str), 392 ( 0.00%) _ => format!("`{}`", token_str), . } 280 ( 0.00%) } . . impl<'a> Parser<'a> { 134,974 ( 0.00%) pub fn new( . sess: &'a ParseSess, . tokens: TokenStream, . desugar_doc_comments: bool, . subparser_name: Option<&'static str>, . ) -> Self { 9,641 ( 0.00%) let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens); 19,282 ( 0.00%) start_frame.open_delim = true; . start_frame.close_delim = true; . 298,871 ( 0.01%) let mut parser = Parser { . sess, 9,641 ( 0.00%) token: Token::dummy(), . token_spacing: Spacing::Alone, 9,641 ( 0.00%) prev_token: Token::dummy(), . capture_cfg: false, . restrictions: Restrictions::empty(), . expected_tokens: Vec::new(), . token_cursor: TokenCursor { 57,846 ( 0.00%) frame: start_frame, . stack: Vec::new(), . num_next_calls: 0, . desugar_doc_comments, . break_last_token: false, . }, . desugar_doc_comments, . unmatched_angle_bracket_count: 0, . max_angle_bracket_count: 0, -- line 445 ---------------------------------------- -- line 451 ---------------------------------------- . capturing: Capturing::No, . replace_ranges: Vec::new(), . inner_attr_ranges: Default::default(), . }, . current_closure: None, . }; . . // Make parser point to the first token. 19,282 ( 0.00%) parser.bump(); . . parser 86,769 ( 0.00%) } . . fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { . loop { 1,033,392 ( 0.02%) let (mut next, spacing) = if self.desugar_doc_comments { 119,640 ( 0.00%) self.token_cursor.next_desugared() . } else { 426,966 ( 0.01%) self.token_cursor.next() . }; 688,928 ( 0.02%) self.token_cursor.num_next_calls += 1; . // We've retrieved an token from the underlying . // cursor, so we no longer need to worry about . // an unglued token. See `break_and_eat` for more details 172,232 ( 0.00%) self.token_cursor.break_last_token = false; 688,928 ( 0.02%) if next.span.is_dummy() { . // Tweak the location for better diagnostics, but keep syntactic context intact. 38,584 ( 0.00%) next.span = fallback_span.with_ctxt(next.span.ctxt()); . } 566,556 ( 0.01%) if matches!( 344,464 ( 0.01%) next.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } 685,256 ( 0.02%) return (next, spacing); . } . } . . pub fn unexpected(&mut self) -> PResult<'a, T> { . match self.expect_one_of(&[], &[]) { . Err(e) => Err(e), . // We can get `Ok(true)` from `recover_closing_delimiter` . // which is called in `expected_one_of_not_found`. . Ok(_) => FatalError.raise(), . } . } . . /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. 222,741 ( 0.00%) pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { 24,749 ( 0.00%) if self.expected_tokens.is_empty() { 30,222 ( 0.00%) if self.token == *t { 25,185 ( 0.00%) self.bump(); . Ok(false) . } else { . self.unexpected_try_recover(t) . } . } else { 236,544 ( 0.01%) self.expect_one_of(slice::from_ref(t), &[]) . } 371,235 ( 0.01%) } . . /// Expect next token to be edible or inedible token. If edible, . /// then consume it; if inedible, then return without consuming . /// anything. Signal a fatal error if next token is unexpected. 350,820 ( 0.01%) pub fn expect_one_of( . &mut self, . edible: &[TokenKind], . inedible: &[TokenKind], . ) -> PResult<'a, bool /* recovered */> { 58,470 ( 0.00%) if edible.contains(&self.token.kind) { 87,630 ( 0.00%) self.bump(); . Ok(false) . } else if inedible.contains(&self.token.kind) { . // leave it in the input . Ok(false) . } else if self.last_unexpected_token_span == Some(self.token.span) { . FatalError.raise(); . } else { . self.expected_one_of_not_found(edible, inedible) . } 438,525 ( 0.01%) } . . // Public for rustfmt usage. . pub fn parse_ident(&mut self) -> PResult<'a, Ident> { 84,180 ( 0.00%) self.parse_ident_common(true) . } . . fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> { 90,585 ( 0.00%) self.token.ident().ok_or_else(|| match self.prev_token.kind { . TokenKind::DocComment(..) => { . self.span_err(self.prev_token.span, Error::UselessDocComment) . } . _ => self.expected_ident_found(), . }) . } . 210,980 ( 0.00%) fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { 30,140 ( 0.00%) let (ident, is_raw) = self.ident_or_err()?; 180,840 ( 0.00%) if !is_raw && ident.is_reserved() { . let mut err = self.expected_ident_found(); . if recover { . err.emit(); . } else { . return Err(err); . } . } 120,560 ( 0.00%) self.bump(); . Ok(ident) 301,400 ( 0.01%) } . . /// Checks if the next token is `tok`, and returns `true` if so. . /// . /// This method will automatically add `tok` to `expected_tokens` if `tok` is not . /// encountered. 2,472,435 ( 0.05%) fn check(&mut self, tok: &TokenKind) -> bool { 1,060,446 ( 0.02%) let is_present = self.token == *tok; 707,898 ( 0.02%) if !is_present { 1,467,512 ( 0.03%) self.expected_tokens.push(TokenType::Token(tok.clone())); . } . is_present 2,472,435 ( 0.05%) } . . /// Consumes a token 'tok' if it exists. Returns whether the given token was present. 28 ( 0.00%) pub fn eat(&mut self, tok: &TokenKind) -> bool { 395,816 ( 0.01%) let is_present = self.check(tok); 268,463 ( 0.01%) if is_present { 42,858 ( 0.00%) self.bump() . } . is_present 35 ( 0.00%) } . . /// If the next token is the given keyword, returns `true` without eating it. . /// An expectation is also added for diagnostics purposes. 2,940 ( 0.00%) fn check_keyword(&mut self, kw: Symbol) -> bool { 1,019 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw)); 29,129 ( 0.00%) self.token.is_keyword(kw) . } . . /// If the next token is the given keyword, eats it and returns `true`. . /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. . // Public for rustfmt usage. 10,600 ( 0.00%) pub fn eat_keyword(&mut self, kw: Symbol) -> bool { 8,372 ( 0.00%) if self.check_keyword(kw) { 1,244 ( 0.00%) self.bump(); . true . } else { . false . } 10,600 ( 0.00%) } . . fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool { 424 ( 0.00%) if self.token.is_keyword(kw) { 106 ( 0.00%) self.bump(); . true . } else { . false . } . } . . /// If the given word is not a keyword, signals an error. . /// If the next token is not the given word, signals an error. . /// Otherwise, eats it. 185 ( 0.00%) fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> { . if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } 148 ( 0.00%) } . . /// Is the given keyword `kw` followed by a non-reserved identifier? 6,928 ( 0.00%) fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool { 3,464 ( 0.00%) self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) 7,794 ( 0.00%) } . 92,058 ( 0.00%) fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool { 31,100 ( 0.00%) if ok { . true . } else { 1,396 ( 0.00%) self.expected_tokens.push(typ); . false . } 92,058 ( 0.00%) } . . fn check_ident(&mut self) -> bool { 3,573 ( 0.00%) self.check_or_expected(self.token.is_ident(), TokenType::Ident) . } . 58,296 ( 0.00%) fn check_path(&mut self) -> bool { 119,549 ( 0.00%) self.check_or_expected(self.token.is_path_start(), TokenType::Path) 72,870 ( 0.00%) } . . fn check_type(&mut self) -> bool { 72 ( 0.00%) self.check_or_expected(self.token.can_begin_type(), TokenType::Type) . } . . fn check_const_arg(&mut self) -> bool { 72 ( 0.00%) self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) . } . 3,573 ( 0.00%) fn check_inline_const(&self, dist: usize) -> bool { 1,985 ( 0.00%) self.is_keyword_ahead(dist, &[kw::Const]) . && self.look_ahead(dist + 1, |t| match t.kind { . token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), . token::OpenDelim(DelimToken::Brace) => true, . _ => false, . }) 3,573 ( 0.00%) } . . /// Checks to see if the next token is either `+` or `+=`. . /// Otherwise returns `false`. . fn check_plus(&mut self) -> bool { 316 ( 0.00%) self.check_or_expected( 158 ( 0.00%) self.token.is_like_plus(), 158 ( 0.00%) TokenType::Token(token::BinOp(token::Plus)), . ) . } . . /// Eats the expected token if it's present possibly breaking . /// compound tokens like multi-character operators in process. . /// Returns `true` if the token was eaten. 177,600 ( 0.00%) fn break_and_eat(&mut self, expected: TokenKind) -> bool { 103,600 ( 0.00%) if self.token.kind == expected { 102 ( 0.00%) self.bump(); . return true; . } 73,745 ( 0.00%) match self.token.kind.break_two_token_op() { . Some((first, second)) if first == expected => { . let first_span = self.sess.source_map().start_point(self.token.span); . let second_span = self.token.span.with_lo(first_span.hi()); . self.token = Token::new(first, first_span); . // Keep track of this token - if we end token capturing now, . // we'll want to append this token to the captured stream. . // . // If we consume any additional tokens, then this token -- line 682 ---------------------------------------- -- line 684 ---------------------------------------- . // and `next_tok` will set this field to `None` . self.token_cursor.break_last_token = true; . // Use the spacing of the glued token as the spacing . // of the unglued second token. . self.bump_with((Token::new(second, second_span), self.token_spacing)); . true . } . _ => { 44,247 ( 0.00%) self.expected_tokens.push(TokenType::Token(expected)); . false . } . } 192,196 ( 0.00%) } . . /// Eats `+` possibly breaking tokens like `+=` in process. . fn eat_plus(&mut self) -> bool { . self.break_and_eat(token::BinOp(token::Plus)) . } . . /// Eats `&` possibly breaking tokens like `&&` in process. . /// Signals an error if `&` is not eaten. . fn expect_and(&mut self) -> PResult<'a, ()> { 234 ( 0.00%) if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `|` possibly breaking tokens like `||` in process. . /// Signals an error if `|` was not eaten. . fn expect_or(&mut self) -> PResult<'a, ()> { . if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `<` possibly breaking tokens like `<<` in process. 542 ( 0.00%) fn eat_lt(&mut self) -> bool { 44,334 ( 0.00%) let ate = self.break_and_eat(token::Lt); 29,516 ( 0.00%) if ate { . // See doc comment for `unmatched_angle_bracket_count`. 24 ( 0.00%) self.unmatched_angle_bracket_count += 1; 24 ( 0.00%) self.max_angle_bracket_count += 1; . debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); . } . ate 542 ( 0.00%) } . . /// Eats `<` possibly breaking tokens like `<<` in process. . /// Signals an error if `<` was not eaten. . fn expect_lt(&mut self) -> PResult<'a, ()> { . if self.eat_lt() { Ok(()) } else { self.unexpected() } . } . . /// Eats `>` possibly breaking tokens like `>>` in process. . /// Signals an error if `>` was not eaten. . fn expect_gt(&mut self) -> PResult<'a, ()> { 36 ( 0.00%) if self.break_and_eat(token::Gt) { . // See doc comment for `unmatched_angle_bracket_count`. 18 ( 0.00%) if self.unmatched_angle_bracket_count > 0 { 12 ( 0.00%) self.unmatched_angle_bracket_count -= 1; . debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); . } . Ok(()) . } else { . self.unexpected() . } . } . . fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool { . kets.iter().any(|k| match expect { 97,710 ( 0.00%) TokenExpectType::Expect => self.check(k), . TokenExpectType::NoExpect => self.token == **k, . }) . } . . fn parse_seq_to_before_tokens( . &mut self, . kets: &[&TokenKind], . sep: SeqSep, -- line 758 ---------------------------------------- -- line 761 ---------------------------------------- . ) -> PResult<'a, (Vec, bool /* trailing */, bool /* recovered */)> { . let mut first = true; . let mut recovered = false; . let mut trailing = false; . let mut v = vec![]; . let unclosed_delims = !self.unclosed_delims.is_empty(); . . while !self.expect_any_with_type(kets, expect) { 75,655 ( 0.00%) if let token::CloseDelim(..) | token::Eof = self.token.kind { . break; . } 47,970 ( 0.00%) if let Some(ref t) = sep.sep { 43,332 ( 0.00%) if first { . first = false; . } else { 69,122 ( 0.00%) match self.expect(t) { . Ok(false) => { . self.current_closure.take(); . } . Ok(true) => { . self.current_closure.take(); . recovered = true; . break; . } -- line 784 ---------------------------------------- -- line 857 ---------------------------------------- . e.cancel(); . break; . } . } . } . } . } . } 28,300 ( 0.00%) if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { . trailing = true; . break; . } . 14,155 ( 0.00%) let t = f(self)?; 533 ( 0.00%) v.push(t); . } . 19,900 ( 0.00%) Ok((v, trailing, recovered)) . } . . fn recover_missing_braces_around_closure_body( . &mut self, . closure_spans: ClosureSpans, . mut expect_err: DiagnosticBuilder<'_>, . ) -> PResult<'a, ()> { . let initial_semicolon = self.token.span; -- line 882 ---------------------------------------- -- line 937 ---------------------------------------- . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_seq_to_before_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool, bool)> { 15,501 ( 0.00%) self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) . } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. 3,480 ( 0.00%) fn parse_seq_to_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool /* trailing */)> { 10,299 ( 0.00%) let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; 9,601 ( 0.00%) if !recovered { . self.eat(ket); . } 12,508 ( 0.00%) Ok((val, trailing)) 2,784 ( 0.00%) } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_unspanned_seq( . &mut self, . bra: &TokenKind, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { 1,115 ( 0.00%) self.expect(bra)?; 2,784 ( 0.00%) self.parse_seq_to_end(ket, sep, f) . } . . fn parse_delim_comma_seq( . &mut self, . delim: DelimToken, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_unspanned_seq( 412 ( 0.00%) &token::OpenDelim(delim), 479 ( 0.00%) &token::CloseDelim(delim), . SeqSep::trailing_allowed(token::Comma), . f, . ) . } . . fn parse_paren_comma_seq( . &mut self, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_delim_comma_seq(token::Paren, f) . } . . /// Advance the parser by one token using provided token as the next one. 2,227,082 ( 0.05%) fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { . // Bumping after EOF is a bad sign, usually an infinite loop. 1,027,884 ( 0.02%) if self.prev_token.kind == TokenKind::Eof { . let msg = "attempted to bump the parser past EOF (may be stuck in a loop)"; . self.span_bug(self.token.span, msg); . } . . // Update the current and previous tokens. 685,256 ( 0.02%) self.prev_token = mem::replace(&mut self.token, next_token); 171,314 ( 0.00%) self.token_spacing = next_spacing; . . // Diagnostics. 171,314 ( 0.00%) self.expected_tokens.clear(); . } . . /// Advance the parser by one token. 1,370,512 ( 0.03%) pub fn bump(&mut self) { 685,256 ( 0.02%) let next_token = self.next_tok(self.token.span); 1,027,884 ( 0.02%) self.bump_with(next_token); 1,370,512 ( 0.03%) } . . /// Look-ahead `dist` tokens of `self.token` and get access to that token there. . /// When `dist == 0` then the current token is looked at. . pub fn look_ahead(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R { 884 ( 0.00%) if dist == 0 { 430 ( 0.00%) return looker(&self.token); . } . 58,915 ( 0.00%) let frame = &self.token_cursor.frame; 68,274 ( 0.00%) if frame.delim != DelimToken::NoDelim { . let all_normal = (0..dist).all(|i| { 152,555 ( 0.00%) let token = frame.tree_cursor.look_ahead(i); 214,451 ( 0.00%) !matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _))) . }); . if all_normal { 269,514 ( 0.01%) return match frame.tree_cursor.look_ahead(dist - 1) { 106,158 ( 0.00%) Some(tree) => match tree { 52,918 ( 0.00%) TokenTree::Token(token) => looker(token), . TokenTree::Delimited(dspan, delim, _) => { 1,927 ( 0.00%) looker(&Token::new(token::OpenDelim(*delim), dspan.open)) . } . }, 6,110 ( 0.00%) None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)), . }; . } . } . . let mut cursor = self.token_cursor.clone(); . let mut i = 0; 14,344 ( 0.00%) let mut token = Token::dummy(); . while i < dist { 100,324 ( 0.00%) token = cursor.next().0; 43,050 ( 0.00%) if matches!( 28,664 ( 0.00%) token.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } . i += 1; . } 4,685 ( 0.00%) return looker(&token); . } . . /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. 3,789 ( 0.00%) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { 878 ( 0.00%) self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) 3,789 ( 0.00%) } . . /// Parses asyncness: `async` or nothing. . fn parse_asyncness(&mut self) -> Async { . if self.eat_keyword(kw::Async) { . let span = self.prev_token.uninterpolated_span(); . Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID } . } else { . Async::No . } . } . . /// Parses unsafety: `unsafe` or nothing. 188 ( 0.00%) fn parse_unsafety(&mut self) -> Unsafe { . if self.eat_keyword(kw::Unsafe) { . Unsafe::Yes(self.prev_token.uninterpolated_span()) . } else { . Unsafe::No . } 752 ( 0.00%) } . . /// Parses constness: `const` or nothing. 1,544 ( 0.00%) fn parse_constness(&mut self) -> Const { . // Avoid const blocks to be parsed as const items 477 ( 0.00%) if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) . && self.eat_keyword(kw::Const) . { 50 ( 0.00%) Const::Yes(self.prev_token.uninterpolated_span()) . } else { . Const::No . } 2,702 ( 0.00%) } . . /// Parses inline const expressions. . fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P> { . if pat { . self.sess.gated_spans.gate(sym::inline_const_pat, span); . } else { . self.sess.gated_spans.gate(sym::inline_const, span); . } -- line 1104 ---------------------------------------- -- line 1110 ---------------------------------------- . }; . let blk_span = anon_const.value.span; . Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new())) . } . . /// Parses mutability (`mut` or nothing). . fn parse_mutability(&mut self) -> Mutability { . if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not } 78 ( 0.00%) } . . /// Possibly parses mutability (`const` or `mut`). . fn parse_const_or_mut(&mut self) -> Option { . if self.eat_keyword(kw::Mut) { . Some(Mutability::Mut) . } else if self.eat_keyword(kw::Const) { . Some(Mutability::Not) . } else { . None . } . } . . fn parse_field_name(&mut self) -> PResult<'a, Ident> { 18,222 ( 0.00%) if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind . { . self.expect_no_suffix(self.token.span, "a tuple index", suffix); . self.bump(); . Ok(Ident::new(symbol, self.prev_token.span)) . } else { 27,333 ( 0.00%) self.parse_ident_common(true) . } . } . . fn parse_mac_args(&mut self) -> PResult<'a, P> { 14,715 ( 0.00%) self.parse_mac_args_common(true).map(P) . } . . fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> { 1,503 ( 0.00%) self.parse_mac_args_common(false) . } . 49,158 ( 0.00%) fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> { 43,696 ( 0.00%) Ok( 29,282 ( 0.00%) if self.check(&token::OpenDelim(DelimToken::Paren)) 1,479 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Bracket)) 1,479 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Brace)) . { 19,924 ( 0.00%) match self.parse_token_tree() { 24,905 ( 0.00%) TokenTree::Delimited(dspan, delim, tokens) => . // We've confirmed above that there is a delimiter so unwrapping is OK. . { 9,962 ( 0.00%) MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens) . } . _ => unreachable!(), . } 962 ( 0.00%) } else if !delimited_only { . if self.eat(&token::Eq) { 457 ( 0.00%) let eq_span = self.prev_token.span; . . // Collect tokens because they are used during lowering to HIR. 457 ( 0.00%) let expr = self.parse_expr_force_collect()?; 457 ( 0.00%) let span = expr.span; . 2,285 ( 0.00%) let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr))); 4,113 ( 0.00%) MacArgs::Eq(eq_span, Token::new(token_kind, span)) . } else { . MacArgs::Empty . } . } else { . return self.unexpected(); . }, . ) 38,234 ( 0.00%) } . . fn parse_or_use_outer_attributes( . &mut self, . already_parsed_attrs: Option, . ) -> PResult<'a, AttrWrapper> { 87,204 ( 0.00%) if let Some(attrs) = already_parsed_attrs { . Ok(attrs) . } else { 58,052 ( 0.00%) self.parse_outer_attributes() . } . } . . /// Parses a single token tree from the input. 149,464 ( 0.00%) pub(crate) fn parse_token_tree(&mut self) -> TokenTree { 110,697 ( 0.00%) match self.token.kind { . token::OpenDelim(..) => { . let depth = self.token_cursor.stack.len(); . . // We keep advancing the token cursor until we hit . // the matching `CloseDelim` token. 101,676 ( 0.00%) while !(depth == self.token_cursor.stack.len() . && matches!(self.token.kind, token::CloseDelim(_))) . { . // Advance one token at a time, so `TokenCursor::next()` . // can capture these tokens if necessary. 48,854 ( 0.00%) self.bump(); . } . // We are still inside the frame corresponding . // to the delimited stream we captured, so grab . // the tokens from this frame. . let frame = &self.token_cursor.frame; 10,042 ( 0.00%) let stream = frame.tree_cursor.stream.clone(); 10,042 ( 0.00%) let span = frame.span; 5,021 ( 0.00%) let delim = frame.delim; . // Consume close delimiter 10,042 ( 0.00%) self.bump(); 25,105 ( 0.00%) TokenTree::Delimited(span, delim, stream) . } . token::CloseDelim(_) | token::Eof => unreachable!(), . _ => { 27,324 ( 0.00%) self.bump(); 68,310 ( 0.00%) TokenTree::Token(self.prev_token.clone()) . } . } 130,781 ( 0.00%) } . . /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF. . pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { . let mut tts = Vec::new(); . while self.token != token::Eof { . tts.push(self.parse_token_tree()); . } . Ok(tts) -- line 1234 ---------------------------------------- -- line 1244 ---------------------------------------- . } . TokenStream::new(result) . } . . /// Evaluates the closure with restrictions in place. . /// . /// Afters the closure is evaluated, restrictions are reset. . fn with_res(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T { 29,062 ( 0.00%) let old = self.restrictions; 29,098 ( 0.00%) self.restrictions = res; . let res = f(self); 29,125 ( 0.00%) self.restrictions = old; . res . } . 4,336 ( 0.00%) fn is_crate_vis(&self) -> bool { 2,710 ( 0.00%) self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep) 4,878 ( 0.00%) } . . /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, . /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. . /// If the following element can't be a tuple (i.e., it's a function definition), then . /// it's not a tuple struct field), and the contents within the parentheses aren't valid, . /// so emit a proper diagnostic. . // Public for rustfmt usage. 3,590 ( 0.00%) pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { 910 ( 0.00%) maybe_whole!(self, NtVis, |x| x); . 359 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw::Crate)); 1,436 ( 0.00%) if self.is_crate_vis() { . self.bump(); // `crate` . self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span); . return Ok(Visibility { . span: self.prev_token.span, . kind: VisibilityKind::Crate(CrateSugar::JustCrate), . tokens: None, . }); . } . . if !self.eat_keyword(kw::Pub) { . // We need a span for our `Spanned`, but there's inherently no . // keyword to grab a span from for inherited visibility; an empty span at the . // beginning of the current token would seem to be the "Schelling span". 223 ( 0.00%) return Ok(Visibility { 669 ( 0.00%) span: self.token.span.shrink_to_lo(), . kind: VisibilityKind::Inherited, . tokens: None, . }); . } 136 ( 0.00%) let lo = self.prev_token.span; . 680 ( 0.00%) if self.check(&token::OpenDelim(token::Paren)) { . // We don't `self.bump()` the `(` yet because this might be a struct definition where . // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. . // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so . // by the following tokens. . if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep) . // account for `pub(crate::foo)` . { . // Parse `pub(crate)`. -- line 1303 ---------------------------------------- -- line 1338 ---------------------------------------- . } else if let FollowedByType::No = fbt { . // Provide this diagnostic if a type cannot follow; . // in particular, if this is not a tuple struct. . self.recover_incorrect_vis_restriction()?; . // Emit diagnostic, but continue with public visibility. . } . } . 408 ( 0.00%) Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None }) 3,231 ( 0.00%) } . . /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }` . fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { . self.bump(); // `(` . let path = self.parse_path(PathStyle::Mod)?; . self.expect(&token::CloseDelim(token::Paren))?; // `)` . . let msg = "incorrect visibility restriction"; -- line 1355 ---------------------------------------- -- line 1370 ---------------------------------------- . ) . .emit(); . . Ok(()) . } . . /// Parses `extern string_literal?`. . fn parse_extern(&mut self) -> Extern { 57 ( 0.00%) if self.eat_keyword(kw::Extern) { Extern::from_abi(self.parse_abi()) } else { Extern::None } . } . . /// Parses a string literal as an ABI spec. . fn parse_abi(&mut self) -> Option { . match self.parse_str_lit() { . Ok(str_lit) => Some(str_lit), . Err(Some(lit)) => match lit.kind { . ast::LitKind::Err(_) => None, -- line 1386 ---------------------------------------- -- line 1395 ---------------------------------------- . .emit(); . None . } . }, . Err(None) => None, . } . } . 7,500 ( 0.00%) pub fn collect_tokens_no_attrs( . &mut self, . f: impl FnOnce(&mut Self) -> PResult<'a, R>, . ) -> PResult<'a, R> { . // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use . // `ForceCollect::Yes` . self.collect_tokens_trailing_token( . AttrWrapper::empty(), . ForceCollect::Yes, 750 ( 0.00%) |this, _attrs| Ok((f(this)?, TrailingToken::None)), . ) 7,500 ( 0.00%) } . . /// `::{` or `::*` 203,536 ( 0.00%) fn is_import_coupler(&mut self) -> bool { 101,768 ( 0.00%) self.check(&token::ModSep) . && self.look_ahead(1, |t| { 118,245 ( 0.00%) *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star) . }) 183,600 ( 0.00%) } . . pub fn clear_expected_tokens(&mut self) { . self.expected_tokens.clear(); . } . } . . crate fn make_unclosed_delims_error( . unmatched: UnmatchedBrace, -- line 1430 ---------------------------------------- -- line 1450 ---------------------------------------- . err.span_label(sp, "closing delimiter possibly meant for this"); . } . if let Some(sp) = unmatched.unclosed_span { . err.span_label(sp, "unclosed delimiter"); . } . Some(err) . } . 186,792 ( 0.00%) pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &ParseSess) { 93,396 ( 0.00%) *sess.reached_eof.borrow_mut() |= . unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()); 93,396 ( 0.00%) for unmatched in unclosed_delims.drain(..) { . if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) { . e.emit(); . } . } 186,792 ( 0.00%) } . . /// A helper struct used when building an `AttrAnnotatedTokenStream` from . /// a `LazyTokenStream`. Both delimiter and non-delimited tokens . /// are stored as `FlatToken::Token`. A vector of `FlatToken`s . /// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested . /// `AttrAnnotatedTokenTree::Delimited` tokens . #[derive(Debug, Clone)] . pub enum FlatToken { -- line 1474 ---------------------------------------- 1,139,865 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/rustc_entry.rs -------------------------------------------------------------------------------- Ir -- line 26 ---------------------------------------- . /// } . /// . /// assert_eq!(letters[&'s'], 2); . /// assert_eq!(letters[&'t'], 3); . /// assert_eq!(letters[&'u'], 1); . /// assert_eq!(letters.get(&'y'), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 3,722,210 ( 0.08%) pub fn rustc_entry(&mut self, key: K) -> RustcEntry<'_, K, V, A> { . let hash = make_insert_hash(&self.hash_builder, &key); 155 ( 0.00%) if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) { 513,655 ( 0.01%) RustcEntry::Occupied(RustcOccupiedEntry { 808,452 ( 0.02%) key: Some(key), . elem, . table: &mut self.table, . }) . } else { . // Ideally we would put this in VacantEntry::insert, but Entry is not . // generic over the BuildHasher and adding a generic parameter would be . // a breaking change. . self.reserve(1); . 183,652 ( 0.00%) RustcEntry::Vacant(RustcVacantEntry { . hash, 142,624 ( 0.00%) key, . table: &mut self.table, . }) . } 2,370,203 ( 0.05%) } . } . . /// A view into a single entry in a map, which may either be vacant or occupied. . /// . /// This `enum` is constructed from the [`entry`] method on [`HashMap`]. . /// . /// [`HashMap`]: struct.HashMap.html . /// [`entry`]: struct.HashMap.html#method.rustc_entry -- line 62 ---------------------------------------- -- line 405 ---------------------------------------- . /// if let RustcEntry::Occupied(o) = map.rustc_entry("poneyland") { . /// *o.into_mut() += 10; . /// } . /// . /// assert_eq!(map["poneyland"], 22); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn into_mut(self) -> &'a mut V { 20,415 ( 0.00%) unsafe { &mut self.elem.as_mut().1 } . } . . /// Sets the value of the entry, and returns the entry's old value. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 421 ---------------------------------------- -- line 568 ---------------------------------------- . /// let mut map: HashMap<&str, u32> = HashMap::new(); . /// . /// if let RustcEntry::Vacant(o) = map.rustc_entry("poneyland") { . /// o.insert(37); . /// } . /// assert_eq!(map["poneyland"], 37); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 3,998 ( 0.00%) pub fn insert(self, value: V) -> &'a mut V { . unsafe { 111,666 ( 0.00%) let bucket = self.table.insert_no_grow(self.hash, (self.key, value)); 1,476 ( 0.00%) &mut bucket.as_mut().1 . } 5,260 ( 0.00%) } . . /// Sets the value of the entry with the RustcVacantEntry's key, . /// and returns a RustcOccupiedEntry. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 589 ---------------------------------------- 720,262 ( 0.02%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/dl-lookup.c ./malloc/malloc.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 467,713,205 (10.26%) events annotated