-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name lsp_types --edition=2018 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata,link -C opt-level=3 -C embed-bitcode=no --cfg feature="default" -C metadata=8f15c8430316a342 -C extra-filename=-8f15c8430316a342 --out-dir /usr/home/liquid/tmp/.tmpvvWohe/target/release/deps -L dependency=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps --extern bitflags=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps/libbitflags-923cad7af34a271a.rmeta --extern serde=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps/libserde-fc05f8c3c891b92a.rmeta --extern serde_json=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps/libserde_json-8a3c2629cc7d15ff.rmeta --extern serde_repr=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps/libserde_repr-9426f3ac80a61e0a.so --extern url=/usr/home/liquid/tmp/.tmpvvWohe/target/release/deps/liburl-113bf2ac77a1aabe.rmeta -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-lsp-types-0.91.1-Opt-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 52,975,836,195 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 1,799,785,259 ( 3.40%) ./malloc/malloc.c:_int_free 1,495,621,956 ( 2.82%) ./malloc/malloc.c:_int_malloc 1,063,270,015 ( 2.01%) ./malloc/malloc.c:malloc 935,916,336 ( 1.77%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 775,374,704 ( 1.46%) ???:llvm::FPPassManager::runOnFunction(llvm::Function&) 551,420,823 ( 1.04%) ./malloc/malloc.c:free 313,954,936 ( 0.59%) ???:llvm::AnalysisManager::getResultImpl(llvm::AnalysisKey*, llvm::Function&) 300,886,452 ( 0.57%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/hash/sip.rs:::write 282,467,736 ( 0.53%) ???:llvm::AnalysisManager::invalidate(llvm::Function&, llvm::PreservedAnalyses const&) 260,555,046 ( 0.49%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/server.rs:> as proc_macro::bridge::server::DispatcherTrait>::dispatch 249,598,272 ( 0.47%) ./elf/../elf/dl-tls.c:_dl_update_slotinfo 243,054,360 ( 0.46%) ./malloc/malloc.c:malloc_consolidate 237,344,100 ( 0.45%) ???:llvm::SelectionDAG::Combine(llvm::CombineLevel, llvm::AAResults*, llvm::CodeGenOpt::Level) 233,985,100 ( 0.44%) ???:llvm::InstCombinerImpl::run() 197,779,652 ( 0.37%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 195,561,387 ( 0.37%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 171,636,501 ( 0.32%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/client.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 169,903,671 ( 0.32%) ???:combineInstructionsOverFunction(llvm::Function&, llvm::InstCombineWorklist&, llvm::AAResults*, llvm::AssumptionCache&, llvm::TargetLibraryInfo&, llvm::TargetTransformInfo&, llvm::DominatorTree&, llvm::OptimizationRemarkEmitter&, llvm::BlockFrequencyInfo*, llvm::ProfileSummaryInfo*, unsigned int, llvm::LoopInfo*) 168,182,326 ( 0.32%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 165,763,651 ( 0.31%) ???:llvm::AttributeList::addAttributes(llvm::LLVMContext&, unsigned int, llvm::AttrBuilder const&) const 159,972,510 ( 0.30%) ???:llvm::BitstreamCursor::readRecord(unsigned int, llvm::SmallVectorImpl&, llvm::StringRef*) 153,282,918 ( 0.29%) ./malloc/malloc.c:realloc 150,669,705 ( 0.28%) ???:llvm::TargetLibraryInfoImpl::getLibFunc(llvm::Function const&, llvm::LibFunc&) const 150,485,860 ( 0.28%) ./malloc/malloc.c:unlink_chunk.constprop.0 134,348,192 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/string.rs:alloc::string::String::push 131,184,036 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::short_write_process_buffer:: 125,932,527 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs:rustc_mir_dataflow::drop_flag_effects::on_all_children_bits::is_terminal_path 123,323,172 ( 0.23%) ???:llvm::PMDataManager::verifyPreservedAnalysis(llvm::Pass*) 117,737,563 ( 0.22%) ???:llvm::LiveVariables::HandleRegMask(llvm::MachineOperand const&) 114,899,611 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 112,409,791 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/rpc.rs:>::decode 112,276,490 ( 0.21%) ???:runCVP(llvm::Module&) [clone .llvm.11785992503873176614] 111,048,362 ( 0.21%) ???:llvm::LiveVariables::runOnBlock(llvm::MachineBasicBlock*, unsigned int) 109,921,248 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:alloc::string::String::push 106,344,053 ( 0.20%) ./malloc/malloc.c:_int_realloc 105,890,176 ( 0.20%) ./elf/../elf/dl-tls.c:update_get_addr 105,846,512 ( 0.20%) ???:llvm::coro::declaresIntrinsics(llvm::Module const&, std::initializer_list) 104,358,652 ( 0.20%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::eq 104,169,116 ( 0.20%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:core::ptr::drop_in_place::replace::PutBackOnDrop> 103,567,810 ( 0.20%) ???:llvm::InstCombinerImpl::visitCallInst(llvm::CallInst&) 101,792,680 ( 0.19%) ???:(anonymous namespace)::MachineCopyPropagation::runOnMachineFunction(llvm::MachineFunction&) 101,646,501 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::shallow_resolve_ty 98,875,397 ( 0.19%) ???:llvm::DomTreeBuilder::SemiNCAInfo >::CalculateFromScratch(llvm::DominatorTreeBase&, llvm::DomTreeBuilder::SemiNCAInfo >::BatchUpdateInfo*) 95,680,527 ( 0.18%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::from_key_hashed_nocheck:: 93,769,523 ( 0.18%) ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S:__strncmp_avx2 91,873,264 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/mem/mod.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 91,376,751 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/scoped_cell.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 89,447,412 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 88,933,218 ( 0.17%) ./elf/../sysdeps/x86_64/tls_get_addr.S:__tls_get_addr 88,106,328 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 86,052,729 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/rpc.rs:>::encode 85,319,231 ( 0.16%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::, rustc_middle::ty::context::Interned>::{closure#0}> 85,090,943 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::intern_ty 83,167,501 ( 0.16%) ???:core::ptr::read 81,983,136 ( 0.15%) ???:llvm::detail::PassModel>, llvm::PreservedAnalyses, llvm::AnalysisManager>::run(llvm::Function&, llvm::AnalysisManager&) 81,523,638 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 80,470,168 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::_intern_substs 80,408,526 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/buffer.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 80,265,801 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs:::span_data_to_lines_and_cols 79,548,648 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/rpc.rs: as proc_macro::bridge::rpc::DecodeMut>::decode 79,519,677 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_transform/src/simplify.rs:::simplify 76,945,169 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::_intern_substs 73,796,922 ( 0.14%) ???:(anonymous namespace)::DeadMachineInstructionElim::eliminateDeadMI(llvm::MachineFunction&) 73,541,901 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::hash:: 72,647,391 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/hash/sip.rs:::write 72,473,375 ( 0.14%) ./malloc/malloc.c:calloc 72,465,456 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/closure.rs:proc_macro::bridge::scoped_cell::ScopedCell::replace 72,360,240 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 69,690,738 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::hash_stable 69,060,518 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/traversal.rs:::traverse_successor 68,465,843 ( 0.13%) ???:llvm::ValueHandleBase::AddToUseList() 67,319,954 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/buffer.rs:>::encode 66,563,246 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs:, proc_macro::bridge::client::SourceFile>, alloc::collections::btree::node::marker::LeafOrInternal>>::search_tree:: 66,281,518 ( 0.13%) ???:??? 64,006,480 ( 0.12%) ???:bool llvm::DenseMapBase*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >, (anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*, llvm::DenseMapInfo<(anonymous namespace)::SimpleValue>, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> >::LookupBucketFor<(anonymous namespace)::SimpleValue>((anonymous namespace)::SimpleValue const&, llvm::detail::DenseMapPair<(anonymous namespace)::SimpleValue, llvm::ScopedHashTableVal<(anonymous namespace)::SimpleValue, llvm::Value*>*> const*&) const 63,973,283 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs:>::process_obligations::> 63,894,952 ( 0.12%) /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc:operator new(unsigned long) 63,407,288 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/mem/mod.rs:core::ptr::drop_in_place::replace::PutBackOnDrop> 62,229,581 ( 0.12%) ???:llvm::LivePhysRegs::stepBackward(llvm::MachineInstr const&) 61,921,558 ( 0.12%) ???:llvm::DataLayout::getAlignment(llvm::Type*, bool) const 61,426,118 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/buffer.rs:>::encode 61,204,824 ( 0.12%) ???:llvm::SelectionDAGISel::SelectCodeCommon(llvm::SDNode*, unsigned char const*, unsigned int) 60,740,899 ( 0.11%) ???: as core::iter::traits::iterator::Iterator>::next 59,677,260 ( 0.11%) ???:llvm::removeUnreachableBlocks(llvm::Function&, llvm::DomTreeUpdater*, llvm::MemorySSAUpdater*) 59,643,234 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/type_variable.rs:::probe 59,498,170 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/node.rs:, alloc::collections::btree::node::marker::Leaf>, alloc::collections::btree::node::marker::Edge>>::insert_recursing 59,030,502 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/terminator.rs:::successors 58,545,053 ( 0.11%) ???:llvm::MD5::final(llvm::MD5::MD5Result&) 58,042,991 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::check 57,476,751 ( 0.11%) ???:llvm::MD5::update(llvm::StringRef) 56,613,231 ( 0.11%) ???:core::mem::replace 56,402,996 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs:, alloc::collections::btree::node::marker::LeafOrInternal>>::search_tree:: 55,987,631 ( 0.11%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 55,651,800 ( 0.11%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_unaligned_erms 55,544,458 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs:>>::insert 54,058,805 ( 0.10%) ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S:__strlen_avx2 -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs -------------------------------------------------------------------------------- Ir -- line 27 ---------------------------------------- . impl CacheEntry { . #[inline] . fn update( . &mut self, . new_file_and_idx: Option<(Lrc, usize)>, . pos: BytePos, . time_stamp: usize, . ) { 60,707 ( 0.00%) if let Some((file, file_idx)) = new_file_and_idx { 36,092 ( 0.00%) self.file = file; 60,142 ( 0.00%) self.file_index = file_idx; . } . 29,403 ( 0.00%) let line_index = self.file.lookup_line(pos).unwrap(); . let line_bounds = self.file.line_bounds(line_index); 122,962 ( 0.00%) self.line_number = line_index + 1; 186,837 ( 0.00%) self.line = line_bounds; . self.touch(time_stamp); . } . . #[inline] . fn touch(&mut self, time_stamp: usize) { 2,466,133 ( 0.00%) self.time_stamp = time_stamp; . } . } . . #[derive(Clone)] . pub struct CachingSourceMapView<'sm> { . source_map: &'sm SourceMap, . line_cache: [CacheEntry; 3], . time_stamp: usize, . } . . impl<'sm> CachingSourceMapView<'sm> { 59,780 ( 0.00%) pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { . let files = source_map.files(); 29,890 ( 0.00%) let first_file = files[0].clone(); . let entry = CacheEntry { . time_stamp: 0, . line_number: 0, . line: BytePos(0)..BytePos(0), . file: first_file, . file_index: 0, . }; . 149,450 ( 0.00%) CachingSourceMapView { . source_map, 209,230 ( 0.00%) line_cache: [entry.clone(), entry.clone(), entry], . time_stamp: 0, . } 119,560 ( 0.00%) } . . pub fn byte_pos_to_line_and_col( . &mut self, . pos: BytePos, . ) -> Option<(Lrc, usize, BytePos)> { . self.time_stamp += 1; . . // Check if the position is in one of the cached lines -- line 85 ---------------------------------------- -- line 106 ---------------------------------------- . }; . . let cache_entry = &mut self.line_cache[oldest]; . cache_entry.update(new_file_and_idx, pos, self.time_stamp); . . Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start)) . } . 8,832,600 ( 0.02%) pub fn span_data_to_lines_and_cols( . &mut self, . span_data: &SpanData, . ) -> Option<(Lrc, usize, BytePos, usize, BytePos)> { 5,047,200 ( 0.01%) self.time_stamp += 1; . . // Check if lo and hi are in the cached lines. 1,261,800 ( 0.00%) let lo_cache_idx = self.cache_entry_index(span_data.lo); 1,261,800 ( 0.00%) let hi_cache_idx = self.cache_entry_index(span_data.hi); . 2,410,152 ( 0.00%) if lo_cache_idx != -1 && hi_cache_idx != -1 { . // Cache hit for span lo and hi. Check if they belong to the same file. . let result = { 2,408,666 ( 0.00%) let lo = &self.line_cache[lo_cache_idx as usize]; . let hi = &self.line_cache[hi_cache_idx as usize]; . 7,225,998 ( 0.01%) if lo.file_index != hi.file_index { . return None; . } . . ( 2,408,666 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 1,204,333 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . ) . }; . 1,204,333 ( 0.00%) self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); 1,204,333 ( 0.00%) self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); . 4,817,332 ( 0.01%) return Some(result); . } . . // No cache hit or cache hit for only one of span lo and hi. 113,448 ( 0.00%) let oldest = if lo_cache_idx != -1 || hi_cache_idx != -1 { . let avoid_idx = if lo_cache_idx != -1 { lo_cache_idx } else { hi_cache_idx }; . self.oldest_cache_entry_index_avoid(avoid_idx as usize) . } else { . self.oldest_cache_entry_index() . }; . . // If the entry doesn't point to the correct file, get the new file and index. . // Return early if the file containing beginning of span doesn't contain end of span. 459,288 ( 0.00%) let new_file_and_idx = if !file_contains(&self.line_cache[oldest].file, span_data.lo) { 140,320 ( 0.00%) let new_file_and_idx = self.file_for_position(span_data.lo)?; 168,384 ( 0.00%) if !file_contains(&new_file_and_idx.0, span_data.hi) { . return None; . } . 112,256 ( 0.00%) Some(new_file_and_idx) . } else { . let file = &self.line_cache[oldest].file; 88,209 ( 0.00%) if !file_contains(&file, span_data.hi) { . return None; . } . 88,209 ( 0.00%) None . }; . . // Update the cache entries. 287,335 ( 0.00%) let (lo_idx, hi_idx) = match (lo_cache_idx, hi_cache_idx) { . // Oldest cache entry is for span_data.lo line. . (-1, -1) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); . 165,312 ( 0.00%) if !lo.line.contains(&span_data.hi) { . let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); . let next_oldest = self.oldest_cache_entry_index_avoid(oldest); . let hi = &mut self.line_cache[next_oldest]; . hi.update(new_file_and_idx, span_data.hi, self.time_stamp); . (oldest, next_oldest) . } else { . (oldest, oldest) . } . } . // Oldest cache entry is for span_data.lo line. . (-1, _) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); 1,486 ( 0.00%) let hi = &mut self.line_cache[hi_cache_idx as usize]; 2,229 ( 0.00%) hi.touch(self.time_stamp); . (oldest, hi_cache_idx as usize) . } . // Oldest cache entry is for span_data.hi line. . (_, -1) => { . let hi = &mut self.line_cache[oldest]; 1,620 ( 0.00%) hi.update(new_file_and_idx, span_data.hi, self.time_stamp); 3,240 ( 0.00%) let lo = &mut self.line_cache[lo_cache_idx as usize]; 6,480 ( 0.00%) lo.touch(self.time_stamp); . (lo_cache_idx as usize, oldest) . } . _ => { . panic!(); . } . }; . . let lo = &self.line_cache[lo_idx]; . let hi = &self.line_cache[hi_idx]; . . // Span lo and hi may equal line end when last line doesn't . // end in newline, hence the inclusive upper bounds below. 114,934 ( 0.00%) assert!(span_data.lo >= lo.line.start); 57,467 ( 0.00%) assert!(span_data.lo <= lo.line.end); 114,934 ( 0.00%) assert!(span_data.hi >= hi.line.start); 57,467 ( 0.00%) assert!(span_data.hi <= hi.line.end); 287,335 ( 0.00%) assert!(lo.file.contains(span_data.lo)); 172,401 ( 0.00%) assert!(lo.file.contains(span_data.hi)); 172,401 ( 0.00%) assert_eq!(lo.file_index, hi.file_index); . 172,401 ( 0.00%) Some(( 57,467 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 57,467 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . )) 11,356,200 ( 0.02%) } . . fn cache_entry_index(&self, pos: BytePos) -> isize { . for (idx, cache_entry) in self.line_cache.iter().enumerate() { 9,459,636 ( 0.02%) if cache_entry.line.contains(&pos) { . return idx as isize; . } . } . . -1 . } . . fn oldest_cache_entry_index(&self) -> usize { . let mut oldest = 0; . . for idx in 1..self.line_cache.len() { 330,624 ( 0.00%) if self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp { . oldest = idx; . } . } . . oldest . } . . fn oldest_cache_entry_index_avoid(&self, avoid_idx: usize) -> usize { . let mut oldest = if avoid_idx != 0 { 0 } else { 1 }; . . for idx in 0..self.line_cache.len() { 34,931 ( 0.00%) if idx != avoid_idx 20,828 ( 0.00%) && self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp . { . oldest = idx; . } . } . . oldest . } . 140,320 ( 0.00%) fn file_for_position(&self, pos: BytePos) -> Option<(Lrc, usize)> { 28,064 ( 0.00%) if !self.source_map.files().is_empty() { 56,128 ( 0.00%) let file_idx = self.source_map.lookup_source_file_idx(pos); . let file = &self.source_map.files()[file_idx]; . 224,512 ( 0.00%) if file_contains(file, pos) { . return Some((file.clone(), file_idx)); . } . } . . None 140,320 ( 0.00%) } . } . . #[inline] . fn file_contains(file: &SourceFile, pos: BytePos) -> bool { . // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position . // one past the end of a file to belong to it. Normally, that's what we want. But for the . // purposes of converting a byte position to a line and column number, we can't come up with a . // line and column number if the file is empty, because an empty file doesn't contain any -- line 290 ---------------------------------------- 13,968,131 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/mem/mod.rs -------------------------------------------------------------------------------- Ir -- line 326 ---------------------------------------- . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_size_of_val", issue = "46571")] . #[cfg_attr(not(test), rustc_diagnostic_item = "mem_size_of_val")] . pub const fn size_of_val(val: &T) -> usize { . // SAFETY: `val` is a reference, so it's a valid raw pointer 382,636 ( 0.00%) unsafe { intrinsics::size_of_val(val) } . } . . /// Returns the size of the pointed-to value in bytes. . /// . /// This is usually the same as `size_of::()`. However, when `T` *has* no . /// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object], . /// then `size_of_val_raw` can be used to get the dynamically-known size. . /// -- line 342 ---------------------------------------- -- line 375 ---------------------------------------- . /// assert_eq!(13, unsafe { mem::size_of_val_raw(y) }); . /// ``` . #[inline] . #[must_use] . #[unstable(feature = "layout_for_ptr", issue = "69835")] . #[rustc_const_unstable(feature = "const_size_of_val_raw", issue = "46571")] . pub const unsafe fn size_of_val_raw(val: *const T) -> usize { . // SAFETY: the caller must provide a valid raw pointer 238 ( 0.00%) unsafe { intrinsics::size_of_val(val) } . } . . /// Returns the [ABI]-required minimum alignment of a type. . /// . /// Every reference to a value of the type `T` must be a multiple of this number. . /// . /// This is the alignment used for struct fields. It may be smaller than the preferred alignment. . /// -- line 391 ---------------------------------------- -- line 831 ---------------------------------------- . #[must_use = "if you don't need the old value, you can just assign the new value directly"] . #[rustc_const_unstable(feature = "const_replace", issue = "83164")] . #[cfg_attr(not(test), rustc_diagnostic_item = "mem_replace")] . pub const fn replace(dest: &mut T, src: T) -> T { . // SAFETY: We read from `dest` but directly write `src` into it afterwards, . // such that the old value is not duplicated. Nothing is dropped and . // nothing here can panic. . unsafe { 2,971,019 ( 0.01%) let result = ptr::read(dest); 171,577,660 ( 0.32%) ptr::write(dest, src); . result . } . } . . /// Disposes of a value. . /// . /// This does so by calling the argument's implementation of [`Drop`][drop]. . /// -- line 848 ---------------------------------------- -- line 901 ---------------------------------------- . /// . /// println!("x: {}, y: {}", x, y.0); // still available . /// ``` . /// . /// [`RefCell`]: crate::cell::RefCell . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . #[cfg_attr(not(test), rustc_diagnostic_item = "mem_drop")] 44,583 ( 0.00%) pub fn drop(_x: T) {} . . /// Interprets `src` as having type `&U`, and then reads `src` without moving . /// the contained value. . /// . /// This function will unsafely assume the pointer `src` is valid for [`size_of::`][size_of] . /// bytes by transmuting `&T` to `&U` and then reading the `&U` (except that this is done in a way . /// that is correct even when `&U` makes stricter alignment requirements than `&T`). It will also . /// unsafely create a copy of the contained value instead of moving out of `src`. -- line 917 ---------------------------------------- -- line 991 ---------------------------------------- . } . } . . #[stable(feature = "discriminant_value", since = "1.21.0")] . impl cmp::Eq for Discriminant {} . . #[stable(feature = "discriminant_value", since = "1.21.0")] . impl hash::Hash for Discriminant { 69,671 ( 0.00%) fn hash(&self, state: &mut H) { . self.0.hash(state); 121,582 ( 0.00%) } . } . . #[stable(feature = "discriminant_value", since = "1.21.0")] . impl fmt::Debug for Discriminant { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt.debug_tuple("Discriminant").field(&self.0).finish() . } . } -- line 1009 ---------------------------------------- -- line 1031 ---------------------------------------- . /// assert_eq!(mem::discriminant(&Foo::A("bar")), mem::discriminant(&Foo::A("baz"))); . /// assert_eq!(mem::discriminant(&Foo::B(1)), mem::discriminant(&Foo::B(2))); . /// assert_ne!(mem::discriminant(&Foo::B(3)), mem::discriminant(&Foo::C(3))); . /// ``` . #[stable(feature = "discriminant_value", since = "1.21.0")] . #[rustc_const_unstable(feature = "const_discriminant", issue = "69821")] . #[cfg_attr(not(test), rustc_diagnostic_item = "mem_discriminant")] . pub const fn discriminant(v: &T) -> Discriminant { 384,173 ( 0.00%) Discriminant(intrinsics::discriminant_value(v)) . } . . /// Returns the number of variants in the enum type `T`. . /// . /// If `T` is not an enum, calling this function will not result in undefined behavior, but the . /// return value is unspecified. Equally, if `T` is an enum with more variants than `usize::MAX` . /// the return value is unspecified. Uninhabited variants will be counted. . /// -- line 1047 ---------------------------------------- 203,560 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/string.rs -------------------------------------------------------------------------------- Ir -- line 375 ---------------------------------------- . /// ``` . /// let s = String::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_string_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> String { 420,549 ( 0.00%) String { vec: Vec::new() } . } . . /// Creates a new empty `String` with a particular capacity. . /// . /// `String`s have an internal buffer to hold their data. The capacity is . /// the length of that buffer, and can be queried with the [`capacity`] . /// method. This method creates an empty `String`, but one with an initial . /// buffer that can hold `capacity` bytes. This is useful when you may be -- line 391 ---------------------------------------- -- line 420 ---------------------------------------- . /// // ...but this may make the string reallocate . /// s.push('a'); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub fn with_capacity(capacity: usize) -> String { 748,792 ( 0.00%) String { vec: Vec::with_capacity(capacity) } . } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Since we don't . // require this method for testing purposes, I'll just stub it . // NB see the slice::hack module in slice.rs for more information . #[inline] . #[cfg(test)] -- line 436 ---------------------------------------- -- line 492 ---------------------------------------- . /// . /// [`from_utf8_unchecked`]: String::from_utf8_unchecked . /// [`Vec`]: crate::vec::Vec "Vec" . /// [`&str`]: prim@str "&str" . /// [`into_bytes`]: String::into_bytes . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn from_utf8(vec: Vec) -> Result { 19,710 ( 0.00%) match str::from_utf8(&vec) { . Ok(..) => Ok(String { vec }), 3,903 ( 0.00%) Err(e) => Err(FromUtf8Error { bytes: vec, error: e }), . } . } . . /// Converts a slice of bytes to a string, including invalid characters. . /// . /// Strings are made of bytes ([`u8`]), and a slice of bytes . /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts . /// between the two. Not all byte slices are valid strings, however: strings -- line 510 ---------------------------------------- -- line 550 ---------------------------------------- . /// let input = b"Hello \xF0\x90\x80World"; . /// let output = String::from_utf8_lossy(input); . /// . /// assert_eq!("Hello �World", output); . /// ``` . #[must_use] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 936 ( 0.00%) pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> { 1,040 ( 0.00%) let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks(); . 520 ( 0.00%) let first_valid = if let Some(chunk) = iter.next() { . let lossy::Utf8LossyChunk { valid, broken } = chunk; 104 ( 0.00%) if broken.is_empty() { . debug_assert_eq!(valid.len(), v.len()); 104 ( 0.00%) return Cow::Borrowed(valid); . } . valid . } else { . return Cow::Borrowed(""); . }; . . const REPLACEMENT: &str = "\u{FFFD}"; . -- line 573 ---------------------------------------- -- line 578 ---------------------------------------- . for lossy::Utf8LossyChunk { valid, broken } in iter { . res.push_str(valid); . if !broken.is_empty() { . res.push_str(REPLACEMENT); . } . } . . Cow::Owned(res) 832 ( 0.00%) } . . /// Decode a UTF-16–encoded vector `v` into a `String`, returning [`Err`] . /// if `v` contains any invalid data. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 594 ---------------------------------------- -- line 762 ---------------------------------------- . /// }; . /// . /// assert_eq!("💖", sparkle_heart); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn from_utf8_unchecked(bytes: Vec) -> String { 72,660 ( 0.00%) String { vec: bytes } . } . . /// Converts a `String` into a byte vector. . /// . /// This consumes the `String`, so we do not need to copy its contents. . /// . /// # Examples . /// -- line 778 ---------------------------------------- -- line 1138 ---------------------------------------- . /// s.push('2'); . /// s.push('3'); . /// . /// assert_eq!("abc123", s); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 75,931,476 ( 0.14%) pub fn push(&mut self, ch: char) { . match ch.len_utf8() { . 1 => self.vec.push(ch as u8), 2 ( 0.00%) _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()), . } 63,276,230 ( 0.12%) } . . /// Returns a byte slice of this `String`'s contents. . /// . /// The inverse of this method is [`from_utf8`]. . /// . /// [`from_utf8`]: String::from_utf8 . /// . /// # Examples -- line 1159 ---------------------------------------- -- line 1543 ---------------------------------------- . /// let fancy_f = String::from("ƒoo"); . /// assert_eq!(fancy_f.len(), 4); . /// assert_eq!(fancy_f.chars().count(), 3); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn len(&self) -> usize { 113 ( 0.00%) self.vec.len() . } . . /// Returns `true` if this `String` has a length of zero, and `false` otherwise. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 1559 ---------------------------------------- -- line 1563 ---------------------------------------- . /// . /// v.push('a'); . /// assert!(!v.is_empty()); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 270,281 ( 0.00%) self.len() == 0 . } . . /// Splits the string into two at the given byte index. . /// . /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and . /// the returned `String` contains bytes `[at, len)`. `at` must be on the . /// boundary of a UTF-8 code point. . /// -- line 1579 ---------------------------------------- -- line 1845 ---------------------------------------- . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Display::fmt("invalid utf-16: lone surrogate found", f) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for String { 42,654 ( 0.00%) fn clone(&self) -> Self { 21,327 ( 0.00%) String { vec: self.vec.clone() } 49,763 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.vec.clone_from(&source.vec); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for String { 10,156 ( 0.00%) fn from_iter>(iter: I) -> String { . let mut buf = String::new(); . buf.extend(iter); . buf 11,626 ( 0.00%) } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "string_from_iter_by_ref", since = "1.17.0")] . impl<'a> FromIterator<&'a char> for String { . fn from_iter>(iter: I) -> String { . let mut buf = String::new(); . buf.extend(iter); -- line 1877 ---------------------------------------- -- line 1937 ---------------------------------------- . } . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for String { . fn extend>(&mut self, iter: I) { 866,386 ( 0.00%) let iterator = iter.into_iter(); 860,472 ( 0.00%) let (lower_bound, _) = iterator.size_hint(); . self.reserve(lower_bound); 37,291,387 ( 0.07%) iterator.for_each(move |c| self.push(c)); . } . . #[inline] . fn extend_one(&mut self, c: char) { . self.push(c); . } . . #[inline] -- line 1956 ---------------------------------------- -- line 2128 ---------------------------------------- . fn default() -> String { . String::new() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Display for String { . #[inline] 22,056 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 44,539 ( 0.00%) fmt::Display::fmt(&**self, f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Debug for String { . #[inline] . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(&**self, f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl hash::Hash for String { . #[inline] . fn hash(&self, hasher: &mut H) { 28,587 ( 0.00%) (**self).hash(hasher) . } . } . . /// Implements the `+` operator for concatenating two strings. . /// . /// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if . /// necessary). This is done to avoid allocating a new `String` and copying the entire contents on . /// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by -- line 2161 ---------------------------------------- -- line 2194 ---------------------------------------- . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Add<&str> for String { . type Output = String; . . #[inline] . fn add(mut self, other: &str) -> String { . self.push_str(other); 2 ( 0.00%) self . } . } . . /// Implements the `+=` operator for appending to a `String`. . /// . /// This has the same behavior as the [`push_str`][String::push_str] method. . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "stringaddassign", since = "1.12.0")] -- line 2210 ---------------------------------------- -- line 2233 ---------------------------------------- . &self[..][index] . } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Index> for String { . type Output = str; . . #[inline] 924 ( 0.00%) fn index(&self, index: ops::RangeFrom) -> &str { . &self[..][index] 924 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Index for String { . type Output = str; . . #[inline] . fn index(&self, _index: ops::RangeFull) -> &str { . unsafe { str::from_utf8_unchecked(&self.vec) } -- line 2251 ---------------------------------------- -- line 2388 ---------------------------------------- . impl ToString for T { . // A common guideline is to not inline generic functions. However, . // removing `#[inline]` from this method causes non-negligible regressions. . // See , the last attempt . // to try to remove it. . #[inline] . default fn to_string(&self) -> String { . let mut buf = String::new(); 1,693,820 ( 0.00%) let mut formatter = core::fmt::Formatter::new(&mut buf); . // Bypass format_args!() to avoid write_str with zero-length strs 411,852 ( 0.00%) fmt::Display::fmt(self, &mut formatter) . .expect("a Display implementation returned an error unexpectedly"); . buf . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "char_to_string_specialization", since = "1.46.0")] . impl ToString for char { -- line 2406 ---------------------------------------- -- line 2553 ---------------------------------------- . /// . /// ``` . /// let s1: String = String::from("hello world"); . /// let s2: Box = s1.into_boxed_str(); . /// let s3: String = String::from(s2); . /// . /// assert_eq!("hello world", s3) . /// ``` 1,302 ( 0.00%) fn from(s: Box) -> String { . s.into_string() 1,302 ( 0.00%) } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "box_from_str", since = "1.20.0")] . impl From for Box { . /// Converts the given [`String`] to a boxed `str` slice that is owned. . /// . /// # Examples -- line 2571 ---------------------------------------- -- line 2620 ---------------------------------------- . /// ``` . /// # use std::borrow::Cow; . /// assert_eq!(Cow::from("eggplant"), Cow::Borrowed("eggplant")); . /// ``` . /// . /// [`Borrowed`]: crate::borrow::Cow::Borrowed "borrow::Cow::Borrowed" . #[inline] . fn from(s: &'a str) -> Cow<'a, str> { 379,728 ( 0.00%) Cow::Borrowed(s) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a> From for Cow<'a, str> { . /// Converts a [`String`] into an [`Owned`] variant. . /// No heap allocation is performed, and the string -- line 2636 ---------------------------------------- -- line 2643 ---------------------------------------- . /// let s = "eggplant".to_string(); . /// let s2 = "eggplant".to_string(); . /// assert_eq!(Cow::from(s), Cow::<'static, str>::Owned(s2)); . /// ``` . /// . /// [`Owned`]: crate::borrow::Cow::Owned "borrow::Cow::Owned" . #[inline] . fn from(s: String) -> Cow<'a, str> { 68,495 ( 0.00%) Cow::Owned(s) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "cow_from_string_ref", since = "1.28.0")] . impl<'a> From<&'a String> for Cow<'a, str> { . /// Converts a [`String`] reference into a [`Borrowed`] variant. . /// No heap allocation is performed, and the string -- line 2659 ---------------------------------------- -- line 2709 ---------------------------------------- . /// ``` . /// let s1 = String::from("hello world"); . /// let v1 = Vec::from(s1); . /// . /// for b in v1 { . /// println!("{}", b); . /// } . /// ``` 102 ( 0.00%) fn from(string: String) -> Vec { 408 ( 0.00%) string.into_bytes() 102 ( 0.00%) } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Write for String { . #[inline] 3,505,616 ( 0.01%) fn write_str(&mut self, s: &str) -> fmt::Result { . self.push_str(s); . Ok(()) 3,067,414 ( 0.01%) } . . #[inline] 666,654 ( 0.00%) fn write_char(&mut self, c: char) -> fmt::Result { 29 ( 0.00%) self.push(c); . Ok(()) 666,654 ( 0.00%) } . } . . /// A draining iterator for `String`. . /// . /// This struct is created by the [`drain`] method on [`String`]. See its . /// documentation for more. . /// . /// [`drain`]: String::drain -- line 2743 ---------------------------------------- 796,283 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs -------------------------------------------------------------------------------- Ir -- line 10 ---------------------------------------- . pub fn move_path_children_matching<'tcx, F>( . move_data: &MoveData<'tcx>, . path: MovePathIndex, . mut cond: F, . ) -> Option . where . F: FnMut(mir::PlaceElem<'tcx>) -> bool, . { 41,594 ( 0.00%) let mut next_child = move_data.move_paths[path].first_child; 107,532 ( 0.00%) while let Some(child_index) = next_child { . let move_path_children = &move_data.move_paths[child_index]; 94,446 ( 0.00%) if let Some(&elem) = move_path_children.place.projection.last() { 159,238 ( 0.00%) if cond(elem) { . return Some(child_index); . } . } . next_child = move_path_children.next_sibling; . } . . None . } -- line 30 ---------------------------------------- -- line 48 ---------------------------------------- . // . // FIXME: we have to do something for moving slice patterns. . fn place_contents_drop_state_cannot_differ<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . place: mir::Place<'tcx>, . ) -> bool { . let ty = place.ty(body, tcx).ty; 21,214,334 ( 0.04%) match ty.kind() { . ty::Array(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false", . place, ty . ); . false . } . ty::Slice(..) | ty::Ref(..) | ty::RawPtr(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} refd => true", . place, ty . ); . true . } 15,838,245 ( 0.03%) ty::Adt(def, _) if (def.has_dtor(tcx) && !def.is_box()) || def.is_union() => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} Drop => true", . place, ty . ); . true . } . _ => false, . } -- line 79 ---------------------------------------- -- line 83 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . lookup_result: LookupResult, . each_child: F, . ) where . F: FnMut(MovePathIndex), . { 309,924 ( 0.00%) match lookup_result { . LookupResult::Parent(..) => { . // access to untracked value - do not touch children . } . LookupResult::Exact(e) => on_all_children_bits(tcx, body, move_data, e, each_child), . } . } . . pub fn on_all_children_bits<'tcx, F>( -- line 99 ---------------------------------------- -- line 100 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . mut each_child: F, . ) where . F: FnMut(MovePathIndex), . { 30,146,464 ( 0.06%) fn is_terminal_path<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . path: MovePathIndex, . ) -> bool { 7,536,616 ( 0.01%) place_contents_drop_state_cannot_differ(tcx, body, move_data.move_paths[path].place) 30,146,464 ( 0.06%) } . 36,120,817 ( 0.07%) fn on_all_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . each_child: &mut F, . ) where . F: FnMut(MovePathIndex), . { 864,884 ( 0.00%) each_child(move_path_index); . 26,469,971 ( 0.05%) if is_terminal_path(tcx, body, move_data, move_path_index) { . return; . } . 3,025,116 ( 0.01%) let mut next_child_index = move_data.move_paths[move_path_index].first_child; 9,226,156 ( 0.02%) while let Some(child_index) = next_child_index { 4,704,726 ( 0.01%) on_all_children_bits(tcx, body, move_data, child_index, each_child); 784,121 ( 0.00%) next_child_index = move_data.move_paths[child_index].next_sibling; . } 29,495,012 ( 0.06%) } 17,497,562 ( 0.03%) on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child); . } . . pub fn on_all_drop_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . path: MovePathIndex, . mut each_child: F, . ) where . F: FnMut(MovePathIndex), . { 1,463,940 ( 0.00%) on_all_children_bits(tcx, body, &ctxt.move_data, path, |child| { 1,131,436 ( 0.00%) let place = &ctxt.move_data.move_paths[path].place; 282,859 ( 0.00%) let ty = place.ty(body, tcx).ty; . debug!("on_all_drop_children_bits({:?}, {:?} : {:?})", path, place, ty); . 172,324 ( 0.00%) let erased_ty = tcx.erase_regions(ty); 1,753,302 ( 0.00%) if erased_ty.needs_drop(tcx, ctxt.param_env) { 281,970 ( 0.00%) each_child(child); . } else { . debug!("on_all_drop_children_bits - skipping") . } . }) . } . 491,819 ( 0.00%) pub fn drop_flag_effects_for_function_entry<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; 44,020 ( 0.00%) for arg in body.args_iter() { 118,944 ( 0.00%) let place = mir::Place::from(arg); 178,416 ( 0.00%) let lookup_result = move_data.rev_lookup.find(place.as_ref()); . on_lookup_result_bits(tcx, body, move_data, lookup_result, |mpi| { 59,526 ( 0.00%) callback(mpi, DropFlagState::Present) . }); . } 352,160 ( 0.00%) } . 32,572,632 ( 0.06%) pub fn drop_flag_effects_for_location<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; . debug!("drop_flag_effects_for_location({:?})", loc); . . // first, move out of the RHS 1,319,413 ( 0.00%) for mi in &move_data.loc_map[loc] { 6,957,401 ( 0.01%) let path = mi.move_path_index(move_data); . debug!("moving out of path {:?}", move_data.move_paths[path]); . 2,295,742 ( 0.00%) on_all_children_bits(tcx, body, move_data, path, |mpi| callback(mpi, DropFlagState::Absent)) . } . . debug!("drop_flag_effects: assignment for location({:?})", loc); . 859,529 ( 0.00%) for_location_inits(tcx, body, move_data, loc, |mpi| callback(mpi, DropFlagState::Present)); 25,435,016 ( 0.05%) } . . pub fn for_location_inits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex), . { 1,319,413 ( 0.00%) for ii in &move_data.init_loc_map[loc] { 3,744,396 ( 0.01%) let init = move_data.inits[*ii]; 2,080,470 ( 0.00%) match init.kind { . InitKind::Deep => { . let path = init.path; . . on_all_children_bits(tcx, body, move_data, path, &mut callback) . } . InitKind::Shallow => { . let mpi = init.path; . callback(mpi); -- line 226 ---------------------------------------- -- line 230 ---------------------------------------- . } . } . . /// Calls `handle_inactive_variant` for each descendant move path of `enum_place` that contains a . /// `Downcast` to a variant besides the `active_variant`. . /// . /// NOTE: If there are no move paths corresponding to an inactive variant, . /// `handle_inactive_variant` will not be called for that variant. 784,500 ( 0.00%) pub(crate) fn on_all_inactive_variants<'tcx>( . tcx: TyCtxt<'tcx>, . body: &mir::Body<'tcx>, . move_data: &MoveData<'tcx>, . enum_place: mir::Place<'tcx>, . active_variant: VariantIdx, . mut handle_inactive_variant: impl FnMut(MovePathIndex), . ) { 326,875 ( 0.00%) let enum_mpi = match move_data.rev_lookup.find(enum_place.as_ref()) { . LookupResult::Exact(mpi) => mpi, . LookupResult::Parent(_) => return, . }; . . let enum_path = &move_data.move_paths[enum_mpi]; 395,507 ( 0.00%) for (variant_mpi, variant_path) in enum_path.children(&move_data.move_paths) { . // Because of the way we build the `MoveData` tree, each child should have exactly one more . // projection than `enum_place`. This additional projection must be a downcast since the . // base is an enum. 75,773 ( 0.00%) let (downcast, base_proj) = variant_path.place.projection.split_last().unwrap(); 303,092 ( 0.00%) assert_eq!(enum_place.projection.len(), base_proj.len()); . 227,319 ( 0.00%) let variant_idx = match *downcast { . mir::ProjectionElem::Downcast(_, idx) => idx, . _ => unreachable!(), . }; . 75,773 ( 0.00%) if variant_idx != active_variant { . on_all_children_bits(tcx, body, move_data, variant_mpi, |mpi| { 73,738 ( 0.00%) handle_inactive_variant(mpi) . }); . } . } 523,000 ( 0.00%) } 27,615,754 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs -------------------------------------------------------------------------------- Ir -- line 186 ---------------------------------------- . /// // use the values stored in map . /// ``` . pub struct HashMap { . pub(crate) hash_builder: S, . pub(crate) table: RawTable<(K, V), A>, . } . . impl Clone for HashMap { 6,288 ( 0.00%) fn clone(&self) -> Self { 55,545 ( 0.00%) HashMap { . hash_builder: self.hash_builder.clone(), 11,406 ( 0.00%) table: self.table.clone(), . } 7,074 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.table.clone_from(&source.table); . . // Update hash_builder only if we successfully cloned all elements. . self.hash_builder.clone_from(&source.hash_builder); . } . } -- line 207 ---------------------------------------- -- line 210 ---------------------------------------- . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hasher(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_ . where . K: Borrow, . Q: Hash, . S: BuildHasher, . { 2,442,209 ( 0.00%) move |val| make_hash::(hash_builder, &val.0) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent_key(k: &Q) -> impl Fn(&(K, V)) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 5,689,769 ( 0.01%) move |x| k.eq(x.0.borrow()) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent(k: &Q) -> impl Fn(&K) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 8,013,090 ( 0.02%) move |x| k.eq(x.borrow()) . } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, -- line 248 ---------------------------------------- -- line 251 ---------------------------------------- . use core::hash::Hasher; . let mut state = hash_builder.build_hasher(); . val.hash(&mut state); . state.finish() . } . . #[cfg(feature = "nightly")] . #[cfg_attr(feature = "inline-more", inline)] 4 ( 0.00%) pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, . S: BuildHasher, . { 185,700 ( 0.00%) hash_builder.hash_one(val) 8 ( 0.00%) } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_insert_hash(hash_builder: &S, val: &K) -> u64 . where . K: Hash, . S: BuildHasher, . { -- line 274 ---------------------------------------- -- line 280 ---------------------------------------- . . #[cfg(feature = "nightly")] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_insert_hash(hash_builder: &S, val: &K) -> u64 . where . K: Hash, . S: BuildHasher, . { 4,881,930 ( 0.01%) hash_builder.hash_one(val) . } . . #[cfg(feature = "ahash")] . impl HashMap { . /// Creates an empty `HashMap`. . /// . /// The hash map is initially created with a capacity of 0, so it will not allocate until it . /// is first inserted into. -- line 296 ---------------------------------------- -- line 367 ---------------------------------------- . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . /// . /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html . #[cfg_attr(feature = "inline-more", inline)] . pub const fn with_hasher(hash_builder: S) -> Self { 3,271,747 ( 0.01%) Self { . hash_builder, . table: RawTable::new(), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. . /// -- line 383 ---------------------------------------- -- line 437 ---------------------------------------- . /// use hashbrown::hash_map::DefaultHashBuilder; . /// . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self { 97 ( 0.00%) Self { . hash_builder, . table: RawTable::new_in(alloc), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. It will be allocated with the given allocator. . /// -- line 453 ---------------------------------------- -- line 527 ---------------------------------------- . /// map.insert("c", 3); . /// . /// for key in map.keys() { . /// println!("{}", key); . /// } . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn keys(&self) -> Keys<'_, K, V> { 6 ( 0.00%) Keys { inner: self.iter() } . } . . /// An iterator visiting all values in arbitrary order. . /// The iterator element type is `&'a V`. . /// . /// # Examples . /// . /// ``` -- line 543 ---------------------------------------- -- line 663 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert_eq!(a.len(), 0); . /// a.insert(1, "a"); . /// assert_eq!(a.len(), 1); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn len(&self) -> usize { 660,835 ( 0.00%) self.table.len() . } . . /// Returns `true` if the map contains no elements. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 679 ---------------------------------------- -- line 680 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert!(a.is_empty()); . /// a.insert(1, "a"); . /// assert!(!a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn is_empty(&self) -> bool { 1,408,433 ( 0.00%) self.len() == 0 . } . . /// Clears the map, returning all key-value pairs as an iterator. Keeps the . /// allocated memory for reuse. . /// . /// # Examples . /// . /// ``` -- line 696 ---------------------------------------- -- line 790 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut a = HashMap::new(); . /// a.insert(1, "a"); . /// a.clear(); . /// assert!(a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6 ( 0.00%) pub fn clear(&mut self) { . self.table.clear(); 6 ( 0.00%) } . . /// Creates a consuming iterator visiting all the keys in arbitrary order. . /// The map cannot be used after calling this. . /// The iterator element type is `K`. . /// . /// # Examples . /// . /// ``` -- line 808 ---------------------------------------- -- line 963 ---------------------------------------- . /// } . /// . /// assert_eq!(letters[&'s'], 2); . /// assert_eq!(letters[&'t'], 3); . /// assert_eq!(letters[&'u'], 1); . /// assert_eq!(letters.get(&'y'), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 10,772 ( 0.00%) pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { . let hash = make_insert_hash::(&self.hash_builder, &key); . if let Some(elem) = self.table.find(hash, equivalent_key(&key)) { 2,055 ( 0.00%) Entry::Occupied(OccupiedEntry { . hash, . key: Some(key), . elem, . table: self, . }) . } else { 11,410 ( 0.00%) Entry::Vacant(VacantEntry { . hash, . key, . table: self, . }) . } 13,465 ( 0.00%) } . . /// Gets the given key's corresponding entry by reference in the map for in-place manipulation. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; . /// -- line 995 ---------------------------------------- -- line 1047 ---------------------------------------- . /// ``` . #[inline] . pub fn get(&self, k: &Q) -> Option<&V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 11,526,416 ( 0.02%) match self.get_inner(k) { . Some(&(_, ref v)) => Some(v), . None => None, . } . } . . /// Returns the key-value pair corresponding to the supplied key. . /// . /// The supplied key may be any borrowed form of the map's key type, but -- line 1063 ---------------------------------------- -- line 1091 ---------------------------------------- . } . . #[inline] . fn get_inner(&self, k: &Q) -> Option<&(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 15,965,477 ( 0.03%) if self.table.is_empty() { . None . } else { 2 ( 0.00%) let hash = make_hash::(&self.hash_builder, k); . self.table.get(hash, equivalent_key(k)) . } . } . . /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value. . /// . /// The supplied key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for -- line 1110 ---------------------------------------- -- line 1155 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.contains_key(&1), true); . /// assert_eq!(map.contains_key(&2), false); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 5,818,448 ( 0.01%) pub fn contains_key(&self, k: &Q) -> bool . where . K: Borrow, . Q: Hash + Eq, . { . self.get_inner(k).is_some() 7,863,822 ( 0.01%) } . . /// Returns a mutable reference to the value corresponding to the key. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// . /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html -- line 1177 ---------------------------------------- -- line 1185 ---------------------------------------- . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// if let Some(x) = map.get_mut(&1) { . /// *x = "b"; . /// } . /// assert_eq!(map[&1], "b"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 1,421 ( 0.00%) pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 11,769 ( 0.00%) match self.get_inner_mut(k) { . Some(&mut (_, ref mut v)) => Some(v), . None => None, . } 2,842 ( 0.00%) } . . #[inline] . fn get_inner_mut(&mut self, k: &Q) -> Option<&mut (K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 3,923 ( 0.00%) if self.table.is_empty() { . None . } else { . let hash = make_hash::(&self.hash_builder, k); 5,004 ( 0.00%) self.table.get_mut(hash, equivalent_key(k)) . } . } . . /// Attempts to get mutable references to `N` values in the map at once. . /// . /// Returns an array of length `N` with the results of each query. For soundness, at most one . /// mutable reference will be returned to any value. `None` will be returned if any of the . /// keys are duplicates or missing. -- line 1223 ---------------------------------------- -- line 1495 ---------------------------------------- . /// assert_eq!(map.insert(37, "a"), None); . /// assert_eq!(map.is_empty(), false); . /// . /// map.insert(37, "b"); . /// assert_eq!(map.insert(37, "c"), Some("b")); . /// assert_eq!(map[&37], "c"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 39,167,134 ( 0.07%) pub fn insert(&mut self, k: K, v: V) -> Option { . let hash = make_insert_hash::(&self.hash_builder, &k); 22,915 ( 0.00%) if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) { . Some(mem::replace(item, v)) . } else { 15,416,270 ( 0.03%) self.table 14,730,687 ( 0.03%) .insert(hash, (k, v), make_hasher::(&self.hash_builder)); 1,932,585 ( 0.00%) None . } 37,135,877 ( 0.07%) } . . /// Insert a key-value pair into the map without checking . /// if the key already exists in the map. . /// . /// Returns a reference to the key and value just inserted. . /// . /// This operation is safe if a key does not exist in the map. . /// -- line 1520 ---------------------------------------- -- line 1592 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.remove(&1), Some("a")); . /// assert_eq!(map.remove(&1), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 1,225,811 ( 0.00%) pub fn remove(&mut self, k: &Q) -> Option . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 7,278,614 ( 0.01%) match self.remove_entry(k) { 388,028 ( 0.00%) Some((_, v)) => Some(v), 1,119,267 ( 0.00%) None => None, . } 2,738,682 ( 0.01%) } . . /// Removes a key from the map, returning the stored key and value if the . /// key was previously in the map. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// -- line 1618 ---------------------------------------- -- line 1631 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { . let hash = make_hash::(&self.hash_builder, k); 3,012,007 ( 0.01%) self.table.remove_entry(hash, equivalent_key(k)) . } . } . . impl HashMap { . /// Creates a raw entry builder for the HashMap. . /// . /// Raw entries provide the lowest level of control for searching and . /// manipulating a map. They must be manually initialized with a hash and -- line 1647 ---------------------------------------- -- line 2069 ---------------------------------------- . where . F: FnMut(&K, &mut V) -> bool, . A: Allocator + Clone, . { . type Item = (K, V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { 12 ( 0.00%) self.inner.next(&mut self.f) . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (0, self.inner.iter.size_hint().1) . } . } . -- line 2085 ---------------------------------------- -- line 2095 ---------------------------------------- . #[cfg_attr(feature = "inline-more", inline)] . pub(super) fn next(&mut self, f: &mut F) -> Option<(K, V)> . where . F: FnMut(&K, &mut V) -> bool, . { . unsafe { . for item in &mut self.iter { . let &mut (ref key, ref mut value) = item.as_mut(); 12 ( 0.00%) if f(key, value) { 4 ( 0.00%) return Some(self.table.remove(item)); . } . } . } . None . } . } . . /// A mutable iterator over the values of a `HashMap`. -- line 2112 ---------------------------------------- -- line 2209 ---------------------------------------- . /// Creates a `RawEntryMut` from the given key and its hash. . #[inline] . #[allow(clippy::wrong_self_convention)] . pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> . where . K: Borrow, . Q: Eq, . { 34,655,664 ( 0.07%) self.from_hash(hash, equivalent(k)) . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilderMut<'a, K, V, S, A> { . /// Creates a `RawEntryMut` from the given hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 85,648,868 ( 0.16%) pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { . self.search(hash, is_match) 92,719,310 ( 0.18%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { 10,866,088 ( 0.02%) match self.map.table.find(hash, |(k, _)| is_match(k)) { 58,147,503 ( 0.11%) Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut { . elem, . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), 2,378,671 ( 0.00%) None => RawEntryMut::Vacant(RawVacantEntryMut { . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), . } . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilder<'a, K, V, S, A> { -- line 2251 ---------------------------------------- -- line 2260 ---------------------------------------- . { . let hash = make_hash::(&self.map.hash_builder, k); . self.from_key_hashed_nocheck(hash, k) . } . . /// Access an entry by a key and its hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 20,416,241 ( 0.04%) pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> . where . K: Borrow, . Q: Eq, . { 24,192,547 ( 0.05%) self.from_hash(hash, equivalent(k)) 37,571,663 ( 0.07%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)> . where . F: FnMut(&K) -> bool, . { 35,035,430 ( 0.07%) match self.map.table.get(hash, |(k, _)| is_match(k)) { . Some(&(ref key, ref value)) => Some((key, value)), . None => None, . } . } . . /// Access an entry by hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] -- line 2289 ---------------------------------------- -- line 2624 ---------------------------------------- . /// and returns a mutable reference to it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::shadow_unrelated)] . pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) . where . K: Hash, . S: BuildHasher, . { 3,951,772 ( 0.01%) let &mut (ref mut k, ref mut v) = self.table.insert_entry( . hash, . (key, value), . make_hasher::(self.hash_builder), . ); . (k, v) . } . . /// Set the value of an entry with a custom hasher function. -- line 2640 ---------------------------------------- -- line 2974 ---------------------------------------- . /// map.insert("a", 1); . /// map.insert("b", 2); . /// map.insert("c", 3); . /// . /// // Not possible with .iter() . /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 30,313 ( 0.00%) fn into_iter(self) -> IntoIter { 534,012 ( 0.00%) IntoIter { 255,187 ( 0.00%) inner: self.table.into_iter(), . } 90,939 ( 0.00%) } . } . . impl<'a, K, V> Iterator for Iter<'a, K, V> { . type Item = (&'a K, &'a V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(&'a K, &'a V)> { . // Avoid `Option::map` because it bloats LLVM IR. 3,335,666 ( 0.01%) match self.inner.next() { . Some(x) => unsafe { . let r = x.as_ref(); 17,850 ( 0.00%) Some((&r.0, &r.1)) . }, . None => None, . } . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { 482 ( 0.00%) self.inner.size_hint() . } . } . impl ExactSizeIterator for Iter<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { . self.inner.len() . } . } -- line 3013 ---------------------------------------- -- line 3051 ---------------------------------------- . } . } . . impl Iterator for IntoIter { . type Item = (K, V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(K, V)> { 66,443 ( 0.00%) self.inner.next() . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for IntoIter { . #[cfg_attr(feature = "inline-more", inline)] -- line 3067 ---------------------------------------- -- line 3076 ---------------------------------------- . f.debug_list().entries(self.iter()).finish() . } . } . . impl<'a, K, V> Iterator for Keys<'a, K, V> { . type Item = &'a K; . . #[cfg_attr(feature = "inline-more", inline)] 1,651 ( 0.00%) fn next(&mut self) -> Option<&'a K> { . // Avoid `Option::map` because it bloats LLVM IR. . match self.inner.next() { . Some((k, _)) => Some(k), . None => None, . } 3,302 ( 0.00%) } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for Keys<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { -- line 3098 ---------------------------------------- -- line 3819 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn insert(self, value: V) -> &'a mut V . where . K: Hash, . S: BuildHasher, . { . let table = &mut self.table.table; 538 ( 0.00%) let entry = table.insert_entry( . self.hash, . (self.key, value), . make_hasher::(&self.table.hash_builder), . ); . &mut entry.1 . } . . #[cfg_attr(feature = "inline-more", inline)] -- line 3835 ---------------------------------------- -- line 4557 ---------------------------------------- . /// keys with new values returned from the iterator. . impl Extend<(K, V)> for HashMap . where . K: Eq + Hash, . S: BuildHasher, . A: Allocator + Clone, . { . #[cfg_attr(feature = "inline-more", inline)] 443,745 ( 0.00%) fn extend>(&mut self, iter: T) { . // Keys may be already present or show multiple times in the iterator. . // Reserve the entire hint lower bound if the map is empty. . // Otherwise reserve half the hint (rounded up), so the map . // will only resize twice in the worst case. 364,410 ( 0.00%) let iter = iter.into_iter(); 347,078 ( 0.00%) let reserve = if self.is_empty() { . iter.size_hint().0 . } else { 26,619 ( 0.00%) (iter.size_hint().0 + 1) / 2 . }; . self.reserve(reserve); . iter.for_each(move |(k, v)| { 1,046,823 ( 0.00%) self.insert(k, v); . }); 275,133 ( 0.00%) } . . #[inline] . #[cfg(feature = "nightly")] . fn extend_one(&mut self, (k, v): (K, V)) { . self.insert(k, v); . } . . #[inline] -- line 4588 ---------------------------------------- 36,549,455 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/traversal.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . pub struct Preorder<'a, 'tcx> { . body: &'a Body<'tcx>, . visited: BitSet, . worklist: Vec, . root_is_start_block: bool, . } . . impl<'a, 'tcx> Preorder<'a, 'tcx> { 1,055,000 ( 0.00%) pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Preorder<'a, 'tcx> { 105,500 ( 0.00%) let worklist = vec![root]; . 316,500 ( 0.00%) Preorder { . body, . visited: BitSet::new_empty(body.basic_blocks().len()), 422,000 ( 0.00%) worklist, . root_is_start_block: root == START_BLOCK, . } 949,500 ( 0.00%) } . } . 103,736 ( 0.00%) pub fn preorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Preorder<'a, 'tcx> { 425,528 ( 0.00%) Preorder::new(body, START_BLOCK) 155,604 ( 0.00%) } . . impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 18,682,257 ( 0.04%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { 2,303,263 ( 0.00%) while let Some(idx) = self.worklist.pop() { 2,303,263 ( 0.00%) if !self.visited.insert(idx) { . continue; . } . 1,592,887 ( 0.00%) let data = &self.body[idx]; . 7,964,435 ( 0.02%) if let Some(ref term) = data.terminator { . self.worklist.extend(term.successors()); . } . . return Some((idx, data)); . } . . None 15,285,483 ( 0.03%) } . . fn size_hint(&self) -> (usize, Option) { . // All the blocks, minus the number of blocks we've visited. . let upper = self.body.basic_blocks().len() - self.visited.count(); . . let lower = if self.root_is_start_block { . // We will visit all remaining blocks exactly once. . upper -- line 74 ---------------------------------------- -- line 101 ---------------------------------------- . pub struct Postorder<'a, 'tcx> { . body: &'a Body<'tcx>, . visited: BitSet, . visit_stack: Vec<(BasicBlock, Successors<'a>)>, . root_is_start_block: bool, . } . . impl<'a, 'tcx> Postorder<'a, 'tcx> { 700,420 ( 0.00%) pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Postorder<'a, 'tcx> { . let mut po = Postorder { . body, . visited: BitSet::new_empty(body.basic_blocks().len()), . visit_stack: Vec::new(), . root_is_start_block: root == START_BLOCK, . }; . . let data = &po.body[root]; . 700,420 ( 0.00%) if let Some(ref term) = data.terminator { . po.visited.insert(root); 400,240 ( 0.00%) po.visit_stack.push((root, term.successors())); 200,120 ( 0.00%) po.traverse_successor(); . } . . po 900,540 ( 0.00%) } . 11,124,785 ( 0.02%) fn traverse_successor(&mut self) { . // This is quite a complex loop due to 1. the borrow checker not liking it much . // and 2. what exactly is going on is not clear . // . // It does the actual traversal of the graph, while the `next` method on the iterator . // just pops off of the stack. `visit_stack` is a stack containing pairs of nodes and . // iterators over the successors of those nodes. Each iteration attempts to get the next . // node from the top of the stack, then pushes that node and an iterator over the . // successors to the top of the stack. This loop only grows `visit_stack`, stopping when -- line 136 ---------------------------------------- -- line 169 ---------------------------------------- . // . // Now that the top of the stack has no successors we can traverse, each item will . // be popped off during iteration until we get back to `A`. This yields [E, D, B]. . // . // When we yield `B` and call `traverse_successor`, we push `C` to the stack, but . // since we've already visited `E`, that child isn't added to the stack. The last . // two iterations yield `C` and finally `A` for a final traversal of [E, D, B, C, A] . loop { 11,017,327 ( 0.02%) let bb = if let Some(&mut (_, ref mut iter)) = self.visit_stack.last_mut() { 6,449,682 ( 0.01%) if let Some(&bb) = iter.next() { . bb . } else { . break; . } . } else { . break; . }; . 2,149,894 ( 0.00%) if self.visited.insert(bb) { 9,723,945 ( 0.02%) if let Some(term) = &self.body[bb].terminator { 5,556,540 ( 0.01%) self.visit_stack.push((bb, term.successors())); . } . } . } 12,714,040 ( 0.02%) } . } . 26,804 ( 0.00%) pub fn postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Postorder<'a, 'tcx> { 26,804 ( 0.00%) Postorder::new(body, START_BLOCK) 40,206 ( 0.00%) } . . impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 620,133 ( 0.00%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { . let next = self.visit_stack.pop(); 1,489,195 ( 0.00%) if next.is_some() { 2,891,732 ( 0.01%) self.traverse_successor(); . } . 1,489,195 ( 0.00%) next.map(|(bb, _)| (bb, &self.body[bb])) 1,033,555 ( 0.00%) } . 105,200 ( 0.00%) fn size_hint(&self) -> (usize, Option) { . // All the blocks, minus the number of blocks we've visited. 340,776 ( 0.00%) let upper = self.body.basic_blocks().len() - self.visited.count(); . 340,776 ( 0.00%) let lower = if self.root_is_start_block { . // We will visit all remaining blocks exactly once. . upper . } else { . self.visit_stack.len() . }; . 63,120 ( 0.00%) (lower, Some(upper)) 147,280 ( 0.00%) } . } . . /// Reverse postorder traversal of a graph . /// . /// Reverse postorder is the reverse order of a postorder traversal. . /// This is different to a preorder traversal and represents a natural . /// linearization of control-flow. . /// -- line 232 ---------------------------------------- -- line 253 ---------------------------------------- . pub struct ReversePostorder<'a, 'tcx> { . body: &'a Body<'tcx>, . blocks: Vec, . idx: usize, . } . . impl<'a, 'tcx> ReversePostorder<'a, 'tcx> { . pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> ReversePostorder<'a, 'tcx> { 173,316 ( 0.00%) let blocks: Vec<_> = Postorder::new(body, root).map(|(bb, _)| bb).collect(); . 86,658 ( 0.00%) let len = blocks.len(); . 433,290 ( 0.00%) ReversePostorder { body, blocks, idx: len } . } . } . 519,948 ( 0.00%) pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> { . ReversePostorder::new(body, START_BLOCK) 433,290 ( 0.00%) } . . impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 1,382,544 ( 0.00%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { 5,530,176 ( 0.01%) if self.idx == 0 { . return None; . } 2,591,772 ( 0.00%) self.idx -= 1; . . self.blocks.get(self.idx).map(|&bb| (bb, &self.body[bb])) 2,765,088 ( 0.01%) } . . fn size_hint(&self) -> (usize, Option) { . (self.idx, Some(self.idx)) . } . } . . impl<'a, 'tcx> ExactSizeIterator for ReversePostorder<'a, 'tcx> {} . -- line 291 ---------------------------------------- -- line 295 ---------------------------------------- . /// This is clearer than writing `preorder` in cases where the order doesn't matter. . pub fn reachable<'a, 'tcx>( . body: &'a Body<'tcx>, . ) -> impl 'a + Iterator)> { . preorder(body) . } . . /// Returns a `BitSet` containing all basic blocks reachable from the `START_BLOCK`. 321,792 ( 0.00%) pub fn reachable_as_bitset<'tcx>(body: &Body<'tcx>) -> BitSet { . let mut iter = preorder(body); . (&mut iter).for_each(drop); 214,528 ( 0.00%) iter.visited 321,792 ( 0.00%) } 14,878,768 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/closure.rs -------------------------------------------------------------------------------- Ir -- line 10 ---------------------------------------- . type Env; . } . . impl<'a, A, R> !Sync for Closure<'a, A, R> {} . impl<'a, A, R> !Send for Closure<'a, A, R> {} . . impl<'a, A, R, F: FnMut(A) -> R> From<&'a mut F> for Closure<'a, A, R> { . fn from(f: &'a mut F) -> Self { 13,587,273 ( 0.03%) unsafe extern "C" fn call R>(env: &mut Env, arg: A) -> R { . (*(env as *mut _ as *mut F))(arg) 18,116,364 ( 0.03%) } . Closure { call: call::, env: unsafe { &mut *(f as *mut _ as *mut Env) } } . } . } . . impl<'a, A, R> Closure<'a, A, R> { . pub fn call(&mut self, arg: A) -> R { 72,465,456 ( 0.14%) unsafe { (self.call)(self.env, arg) } . } . } -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs -------------------------------------------------------------------------------- Ir -- line 417 ---------------------------------------- . /// # #![allow(unused_mut)] . /// let mut vec: Vec = Vec::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> Self { 56,695,583 ( 0.11%) Vec { buf: RawVec::NEW, len: 0 } 9,700 ( 0.00%) } . . /// Constructs a new, empty `Vec` with the specified capacity. . /// . /// The vector will be able to hold exactly `capacity` elements without . /// reallocating. If `capacity` is 0, the vector will not allocate. . /// . /// It is important to note that although the returned vector has the . /// *capacity* specified, the vector will have a zero *length*. For an -- line 434 ---------------------------------------- -- line 601 ---------------------------------------- . /// vec.push(11); . /// assert_eq!(vec.len(), 11); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 17,273,253 ( 0.03%) Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } . } . . /// Creates a `Vec` directly from the raw components of another vector. . /// . /// # Safety . /// . /// This is highly unsafe, due to the number of invariants that aren't . /// checked: -- line 617 ---------------------------------------- -- line 677 ---------------------------------------- . /// // Put everything back together into a Vec . /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); . /// assert_eq!(rebuilt, [4, 5, 6]); . /// } . /// ``` . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { 2,831,367 ( 0.01%) unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } . } . . /// Decomposes a `Vec` into its raw components. . /// . /// Returns the raw pointer to the underlying data, the length of . /// the vector (in elements), and the allocated capacity of the . /// data (in elements). These are the same arguments in the same . /// order as the arguments to [`from_raw_parts`]. -- line 693 ---------------------------------------- -- line 778 ---------------------------------------- . /// . /// ``` . /// let vec: Vec = Vec::with_capacity(10); . /// assert_eq!(vec.capacity(), 10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn capacity(&self) -> usize { 7,013,977 ( 0.01%) self.buf.capacity() . } . . /// Reserves capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 794 ---------------------------------------- -- line 801 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve(&mut self, additional: usize) { 8,750,078 ( 0.02%) self.buf.reserve(self.len, additional); . } . . /// Reserves the minimum capacity for exactly `additional` more elements to . /// be inserted in the given `Vec`. After calling `reserve_exact`, . /// capacity will be greater than or equal to `self.len() + additional`. . /// Does nothing if the capacity is already sufficient. . /// . /// Note that the allocator may give the collection more space than it -- line 817 ---------------------------------------- -- line 829 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve_exact(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve_exact(&mut self, additional: usize) { 201,591 ( 0.00%) self.buf.reserve_exact(self.len, additional); . } . . /// Tries to reserve capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `try_reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 845 ---------------------------------------- -- line 930 ---------------------------------------- . /// assert!(vec.capacity() >= 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn shrink_to_fit(&mut self) { . // The capacity is never less than the length, and there's nothing to do when . // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` . // by only calling it with a greater capacity. 330,361 ( 0.00%) if self.capacity() > self.len { 235,235 ( 0.00%) self.buf.shrink_to_fit(self.len); . } . } . . /// Shrinks the capacity of the vector with a lower bound. . /// . /// The capacity will remain at least as large as both the length . /// and the supplied value. . /// -- line 947 ---------------------------------------- -- line 990 ---------------------------------------- . /// let slice = vec.into_boxed_slice(); . /// assert_eq!(slice.into_vec().capacity(), 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn into_boxed_slice(mut self) -> Box<[T], A> { . unsafe { . self.shrink_to_fit(); 106,485 ( 0.00%) let me = ManuallyDrop::new(self); . let buf = ptr::read(&me.buf); . let len = me.len(); . buf.into_box(len).assume_init() . } . } . . /// Shortens the vector, keeping the first `len` elements and dropping . /// the rest. -- line 1006 ---------------------------------------- -- line 1040 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.truncate(0); . /// assert_eq!(vec, []); . /// ``` . /// . /// [`clear`]: Vec::clear . /// [`drain`]: Vec::drain . #[stable(feature = "rust1", since = "1.0.0")] 4,445,677 ( 0.01%) pub fn truncate(&mut self, len: usize) { . // This is safe because: . // . // * the slice passed to `drop_in_place` is valid; the `len > self.len` . // case avoids creating an invalid slice, and . // * the `len` of the vector is shrunk before calling `drop_in_place`, . // such that no value will be dropped twice in case `drop_in_place` . // were to panic once (if it panics twice, the program aborts). . unsafe { . // Note: It's intentional that this is `>` and not `>=`. . // Changing it to `>=` has negative performance . // implications in some cases. See #78884 for more. 3,621,973 ( 0.01%) if len > self.len { . return; . } . let remaining_len = self.len - len; . let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); 4,400,799 ( 0.01%) self.len = len; 94,108 ( 0.00%) ptr::drop_in_place(s); . } 5,334,780 ( 0.01%) } . . /// Extracts a slice containing the entire vector. . /// . /// Equivalent to `&s[..]`. . /// . /// # Examples . /// . /// ``` -- line 1076 ---------------------------------------- -- line 1126 ---------------------------------------- . /// ``` . /// . /// [`as_mut_ptr`]: Vec::as_mut_ptr . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_ptr(&self) -> *const T { . // We shadow the slice method of the same name to avoid going through . // `deref`, which creates an intermediate reference. 93,804,169 ( 0.18%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns an unsafe mutable pointer to the vector's buffer. . /// -- line 1142 ---------------------------------------- -- line 1162 ---------------------------------------- . /// } . /// assert_eq!(&*x, &[0, 1, 2, 3]); . /// ``` . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_mut_ptr(&mut self) -> *mut T { . // We shadow the slice method of the same name to avoid going through . // `deref_mut`, which creates an intermediate reference. 74,221,825 ( 0.14%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns a reference to the underlying allocator. . #[unstable(feature = "allocator_api", issue = "32838")] -- line 1178 ---------------------------------------- -- line 1259 ---------------------------------------- . /// . /// Normally, here, one would use [`clear`] instead to correctly drop . /// the contents and thus not leak memory. . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn set_len(&mut self, new_len: usize) { . debug_assert!(new_len <= self.capacity()); . 14,216,265 ( 0.03%) self.len = new_len; 396,336 ( 0.00%) } . . /// Removes an element from the vector and returns it. . /// . /// The removed element is replaced by the last element of the vector. . /// . /// This does not preserve ordering, but is *O*(1). . /// If you need to preserve the element order, use [`remove`] instead. . /// -- line 1276 ---------------------------------------- -- line 1305 ---------------------------------------- . assert_failed(index, len); . } . unsafe { . // We replace self[index] with the last element. Note that if the . // bounds check above succeeds there must be a last element (which . // can be self[index] itself). . let value = ptr::read(self.as_ptr().add(index)); . let base_ptr = self.as_mut_ptr(); 5 ( 0.00%) ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1); . self.set_len(len - 1); . value . } . } . . /// Inserts an element at position `index` within the vector, shifting all . /// elements after it to the right. . /// -- line 1321 ---------------------------------------- -- line 1329 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.insert(1, 4); . /// assert_eq!(vec, [1, 4, 2, 3]); . /// vec.insert(4, 5); . /// assert_eq!(vec, [1, 4, 2, 3, 5]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 112,308 ( 0.00%) pub fn insert(&mut self, index: usize, element: T) { . #[cold] . #[inline(never)] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("insertion index (is {}) should be <= len (is {})", index, len); . } . 18,808 ( 0.00%) let len = self.len(); 40,846 ( 0.00%) if index > len { . assert_failed(index, len); . } . . // space for the new element 62,558 ( 0.00%) if len == self.buf.capacity() { . self.reserve(1); . } . . unsafe { . // infallible . // The spot to put the new value . { . let p = self.as_mut_ptr().add(index); . // Shift everything over to make space. (Duplicating the . // `index`th element into two consecutive places.) 85,500 ( 0.00%) ptr::copy(p, p.offset(1), len - index); . // Write it in, overwriting the first copy of the `index`th . // element. . ptr::write(p, element); . } 62,528 ( 0.00%) self.set_len(len + 1); . } 96,264 ( 0.00%) } . . /// Removes and returns the element at position `index` within the vector, . /// shifting all elements after it to the left. . /// . /// Note: Because this shifts over the remaining elements, it has a . /// worst-case performance of *O*(*n*). If you don't need the order of elements . /// to be preserved, use [`swap_remove`] instead. If you'd like to remove . /// elements from the beginning of the `Vec`, consider using -- line 1376 ---------------------------------------- -- line 1387 ---------------------------------------- . /// . /// ``` . /// let mut v = vec![1, 2, 3]; . /// assert_eq!(v.remove(1), 2); . /// assert_eq!(v, [1, 3]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[track_caller] 328,728 ( 0.00%) pub fn remove(&mut self, index: usize) -> T { . #[cold] . #[inline(never)] . #[track_caller] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("removal index (is {}) should be < len (is {})", index, len); . } . 115,057 ( 0.00%) let len = self.len(); 349,468 ( 0.00%) if index >= len { . assert_failed(index, len); . } . unsafe { . // infallible . let ret; . { . // the place we are taking from. . let ptr = self.as_mut_ptr().add(index); . // copy it out, unsafely having a copy of the value on . // the stack and in the vector at the same time. 676,400 ( 0.00%) ret = ptr::read(ptr); . . // Shift everything down to fill in that spot. 230,662 ( 0.00%) ptr::copy(ptr.offset(1), ptr, len - index - 1); . } 117,747 ( 0.00%) self.set_len(len - 1); . ret . } 547,880 ( 0.00%) } . . /// Retains only the elements specified by the predicate. . /// . /// In other words, remove all elements `e` such that `f(&e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the . /// original order, and preserves the order of the retained elements. . /// . /// # Examples -- line 1431 ---------------------------------------- -- line 1442 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3, 4, 5]; . /// let keep = [false, true, true, false, true]; . /// let mut iter = keep.iter(); . /// vec.retain(|_| *iter.next().unwrap()); . /// assert_eq!(vec, [2, 3, 5]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 8,414,611 ( 0.02%) pub fn retain(&mut self, mut f: F) . where . F: FnMut(&T) -> bool, . { 68,881 ( 0.00%) self.retain_mut(|elem| f(elem)); 8,287,064 ( 0.02%) } . . /// Retains only the elements specified by the predicate, passing a mutable reference to it. . /// . /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the . /// original order, and preserves the order of the retained elements. . /// . /// # Examples -- line 1463 ---------------------------------------- -- line 1474 ---------------------------------------- . /// }); . /// assert_eq!(vec, [2, 3, 4]); . /// ``` . #[unstable(feature = "vec_retain_mut", issue = "90829")] . pub fn retain_mut(&mut self, mut f: F) . where . F: FnMut(&mut T) -> bool, . { 1,065,164 ( 0.00%) let original_len = self.len(); . // Avoid double drop if the drop guard is not executed, . // since we may make some holes during the process. . unsafe { self.set_len(0) }; . . // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] . // |<- processed len ->| ^- next to check . // |<- deleted cnt ->| . // |<- original_len ->| -- line 1490 ---------------------------------------- -- line 1499 ---------------------------------------- . v: &'a mut Vec, . processed_len: usize, . deleted_cnt: usize, . original_len: usize, . } . . impl Drop for BackshiftOnDrop<'_, T, A> { . fn drop(&mut self) { 909,712 ( 0.00%) if self.deleted_cnt > 0 { . // SAFETY: Trailing unchecked items must be valid since we never touch them. . unsafe { . ptr::copy( . self.v.as_ptr().add(self.processed_len), 828,512 ( 0.00%) self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), . self.original_len - self.processed_len, . ); . } . } . // SAFETY: After filling holes, all items are in contiguous memory. . unsafe { 2 ( 0.00%) self.v.set_len(self.original_len - self.deleted_cnt); . } . } . } . . let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; . . fn process_loop( . original_len: usize, . f: &mut F, . g: &mut BackshiftOnDrop<'_, T, A>, . ) where . F: FnMut(&mut T) -> bool, . { 11,691,422 ( 0.02%) while g.processed_len != original_len { . // SAFETY: Unchecked element must be valid. . let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; 2,427,714 ( 0.00%) if !f(cur) { . // Advance early to avoid double drop if `drop_in_place` panicked. 119,139 ( 0.00%) g.processed_len += 1; 44,746 ( 0.00%) g.deleted_cnt += 1; . // SAFETY: We never touch this element again after dropped. 2,973 ( 0.00%) unsafe { ptr::drop_in_place(cur) }; . // We already advanced the counter. . if DELETED { . continue; . } else { . break; . } . } . if DELETED { . // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. . // We use copy for move, and never touch this element again. . unsafe { 358,623 ( 0.00%) let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); . ptr::copy_nonoverlapping(cur, hole_slot, 1); . } . } 295,627 ( 0.00%) g.processed_len += 1; . } . } . . // Stage 1: Nothing was deleted. . process_loop::(original_len, &mut f, &mut g); . . // Stage 2: Some elements were deleted. . process_loop::(original_len, &mut f, &mut g); -- line 1565 ---------------------------------------- -- line 1606 ---------------------------------------- . /// ``` . /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; . /// . /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); . /// . /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); . /// ``` . #[stable(feature = "dedup_by", since = "1.16.0")] 108,031 ( 0.00%) pub fn dedup_by(&mut self, mut same_bucket: F) . where . F: FnMut(&mut T, &mut T) -> bool, . { 29,562 ( 0.00%) let len = self.len(); 89,892 ( 0.00%) if len <= 1 { . return; . } . . /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ . struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { . /* Offset of the element we want to check if it is duplicate */ . read: usize, . -- line 1627 ---------------------------------------- -- line 1670 ---------------------------------------- . let ptr = gap.vec.as_mut_ptr(); . . /* Drop items while going through Vec, it should be more efficient than . * doing slice partition_dedup + truncate */ . . /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr . * are always in-bounds and read_ptr never aliases prev_ptr */ . unsafe { 109,339 ( 0.00%) while gap.read < len { . let read_ptr = ptr.add(gap.read); . let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); . 11,884 ( 0.00%) if same_bucket(&mut *read_ptr, &mut *prev_ptr) { . // Increase `gap.read` now since the drop may panic. . gap.read += 1; . /* We have found duplicate, drop it in-place */ . ptr::drop_in_place(read_ptr); . } else { . let write_ptr = ptr.add(gap.write); . . /* Because `read_ptr` can be equal to `write_ptr`, we either . * have to use `copy` or conditional `copy_nonoverlapping`. . * Looks like the first option is faster. */ . ptr::copy(read_ptr, write_ptr, 1); . . /* We have filled that place, so go further */ 60,325 ( 0.00%) gap.write += 1; . gap.read += 1; . } . } . . /* Technically we could let `gap` clean up with its Drop, but . * when `same_bucket` is guaranteed to not panic, this bloats a little . * the codegen, so we just do it manually */ . gap.vec.set_len(gap.write); . mem::forget(gap); . } 123,464 ( 0.00%) } . . /// Appends an element to the back of a collection. . /// . /// # Panics . /// . /// Panics if the new capacity exceeds `isize::MAX` bytes. . /// . /// # Examples -- line 1715 ---------------------------------------- -- line 1717 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2]; . /// vec.push(3); . /// assert_eq!(vec, [1, 2, 3]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 39,555 ( 0.00%) pub fn push(&mut self, value: T) { . // This will panic or abort if we would allocate > isize::MAX bytes . // or if the length increment would overflow for zero-sized types. 97,736,208 ( 0.18%) if self.len == self.buf.capacity() { 6,342,180 ( 0.01%) self.buf.reserve_for_push(self.len); . } . unsafe { 2,954,179 ( 0.01%) let end = self.as_mut_ptr().add(self.len); . ptr::write(end, value); 153,248,598 ( 0.29%) self.len += 1; . } 31,644 ( 0.00%) } . . /// Removes the last element from a vector and returns it, or [`None`] if it . /// is empty. . /// . /// If you'd like to pop the first element, consider using . /// [`VecDeque::pop_front`] instead. . /// . /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front -- line 1744 ---------------------------------------- -- line 1748 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3]; . /// assert_eq!(vec.pop(), Some(3)); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop(&mut self) -> Option { 27,978,834 ( 0.05%) if self.len == 0 { 55 ( 0.00%) None . } else { . unsafe { 18,502,290 ( 0.03%) self.len -= 1; 5,486,288 ( 0.01%) Some(ptr::read(self.as_ptr().add(self.len()))) . } . } . } . . /// Moves all the elements of `other` into `Self`, leaving `other` empty. . /// . /// # Panics . /// -- line 1769 ---------------------------------------- -- line 1776 ---------------------------------------- . /// let mut vec2 = vec![4, 5, 6]; . /// vec.append(&mut vec2); . /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(vec2, []); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "append", since = "1.4.0")] 12,145 ( 0.00%) pub fn append(&mut self, other: &mut Self) { . unsafe { . self.append_elements(other.as_slice() as _); . other.set_len(0); . } 10,410 ( 0.00%) } . . /// Appends elements to `Self` from other buffer. . #[cfg(not(no_global_oom_handling))] . #[inline] . unsafe fn append_elements(&mut self, other: *const [T]) { . let count = unsafe { (*other).len() }; . self.reserve(count); 1,453,727 ( 0.00%) let len = self.len(); . unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; 9,858,465 ( 0.02%) self.len += count; . } . . /// Creates a draining iterator that removes the specified range in the vector . /// and yields the removed items. . /// . /// When the iterator **is** dropped, all elements in the range are removed . /// from the vector, even if the iterator was not fully consumed. If the . /// iterator **is not** dropped (with [`mem::forget`] for example), it is -- line 1807 ---------------------------------------- -- line 1834 ---------------------------------------- . // When the Drain is first created, it shortens the length of . // the source vector to make sure no uninitialized or moved-from elements . // are accessible at all if the Drain's destructor never gets to run. . // . // Drain will ptr::read out the values to remove. . // When finished, remaining tail of the vec is copied back to cover . // the hole, and the vector length is restored to the new length. . // 374,975 ( 0.00%) let len = self.len(); . let Range { start, end } = slice::range(range, ..len); . . unsafe { . // set self.vec length's to start, to be safe in case Drain is leaked . self.set_len(start); . // Use the borrow in the IterMut to indicate borrowing behavior of the . // whole Drain iterator (like &mut T). 41,863 ( 0.00%) let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); 11,197 ( 0.00%) Drain { . tail_start: end, 27,956 ( 0.00%) tail_len: len - end, . iter: range_slice.iter(), . vec: NonNull::from(self), . } . } . } . . /// Clears the vector, removing all values. . /// -- line 1861 ---------------------------------------- -- line 1869 ---------------------------------------- . /// . /// v.clear(); . /// . /// assert!(v.is_empty()); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn clear(&mut self) { 7,946,799 ( 0.02%) self.truncate(0) . } . . /// Returns the number of elements in the vector, also referred to . /// as its 'length'. . /// . /// # Examples . /// . /// ``` -- line 1885 ---------------------------------------- -- line 1900 ---------------------------------------- . /// let mut v = Vec::new(); . /// assert!(v.is_empty()); . /// . /// v.push(1); . /// assert!(!v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 7,802,575 ( 0.01%) self.len() == 0 . } . . /// Splits the collection into two at the given index. . /// . /// Returns a newly allocated vector containing the elements in the range . /// `[at, len)`. After the call, the original vector will be left containing . /// the elements `[0, at)` with its previous capacity unchanged. . /// -- line 1916 ---------------------------------------- -- line 1935 ---------------------------------------- . A: Clone, . { . #[cold] . #[inline(never)] . fn assert_failed(at: usize, len: usize) -> ! { . panic!("`at` split index (is {}) should be <= len (is {})", at, len); . } . 31,969 ( 0.00%) if at > self.len() { . assert_failed(at, self.len()); . } . 24,533 ( 0.00%) if at == 0 { . // the new vector can take over the original buffer and avoid the copy . return mem::replace( . self, . Vec::with_capacity_in(self.capacity(), self.allocator().clone()), . ); . } . . let other_len = self.len - at; -- line 1955 ---------------------------------------- -- line 1988 ---------------------------------------- . /// . /// let mut vec = vec![]; . /// let mut p = 1; . /// vec.resize_with(4, || { p *= 2; p }); . /// assert_eq!(vec, [2, 4, 8, 16]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize_with", since = "1.33.0")] 573,979 ( 0.00%) pub fn resize_with(&mut self, new_len: usize, f: F) . where . F: FnMut() -> T, . { 75,665 ( 0.00%) let len = self.len(); 151,330 ( 0.00%) if new_len > len { 511,759 ( 0.00%) self.extend_with(new_len - len, ExtendFunc(f)); . } else { . self.truncate(new_len); . } 498,314 ( 0.00%) } . . /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, . /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime . /// `'a`. If the type has only static references, or none at all, then this . /// may be chosen to be `'static`. . /// . /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, . /// so the leaked allocation may include unused capacity that is not part -- line 2014 ---------------------------------------- -- line 2070 ---------------------------------------- . #[stable(feature = "vec_spare_capacity", since = "1.60.0")] . #[inline] . pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { . // Note: . // This method is not implemented in terms of `split_at_spare_mut`, . // to prevent invalidation of pointers to the buffer. . unsafe { . slice::from_raw_parts_mut( 4 ( 0.00%) self.as_mut_ptr().add(self.len) as *mut MaybeUninit, 140 ( 0.00%) self.buf.capacity() - self.len, . ) . } . } . . /// Returns vector content as a slice of `T`, along with the remaining spare . /// capacity of the vector as a slice of `MaybeUninit`. . /// . /// The returned spare capacity slice can be used to fill the vector with data -- line 2087 ---------------------------------------- -- line 2189 ---------------------------------------- . /// assert_eq!(vec, ["hello", "world", "world"]); . /// . /// let mut vec = vec![1, 2, 3, 4]; . /// vec.resize(2, 0); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize", since = "1.5.0")] 714,490 ( 0.00%) pub fn resize(&mut self, new_len: usize, value: T) { 71,449 ( 0.00%) let len = self.len(); . 214,347 ( 0.00%) if new_len > len { 135,168 ( 0.00%) self.extend_with(new_len - len, ExtendElement(value)) . } else { . self.truncate(new_len); . } 160,345 ( 0.00%) } . . /// Clones and appends all elements in a slice to the `Vec`. . /// . /// Iterates over the slice `other`, clones each element, and then appends . /// it to this `Vec`. The `other` slice is traversed in-order. . /// . /// Note that this function is same as [`extend`] except that it is . /// specialized to work with slices instead. If and when Rust gets -- line 2213 ---------------------------------------- -- line 2271 ---------------------------------------- . trait ExtendWith { . fn next(&mut self) -> T; . fn last(self) -> T; . } . . struct ExtendElement(T); . impl ExtendWith for ExtendElement { . fn next(&mut self) -> T { 25,124 ( 0.00%) self.0.clone() . } . fn last(self) -> T { . self.0 . } . } . . struct ExtendFunc(F); . impl T> ExtendWith for ExtendFunc { -- line 2287 ---------------------------------------- -- line 2291 ---------------------------------------- . fn last(mut self) -> T { . (self.0)() . } . } . . impl Vec { . #[cfg(not(no_global_oom_handling))] . /// Extend the vector by `n` values, using the given generator. 6,094,955 ( 0.01%) fn extend_with>(&mut self, n: usize, mut value: E) { . self.reserve(n); . . unsafe { 130,301 ( 0.00%) let mut ptr = self.as_mut_ptr().add(self.len()); . // Use SetLenOnDrop to work around bug where compiler . // might not realize the store through `ptr` through self.set_len() . // don't alias. . let mut local_len = SetLenOnDrop::new(&mut self.len); . . // Write all elements except the last one . for _ in 1..n { . ptr::write(ptr, value.next()); . ptr = ptr.offset(1); . // Increment the length in every step in case next() panics . local_len.increment_len(1); . } . 4,931,129 ( 0.01%) if n > 0 { . // We can write the last element directly without cloning needlessly . ptr::write(ptr, value.last()); . local_len.increment_len(1); . } . . // len set by scope guard . } 4,859,459 ( 0.01%) } . } . . impl Vec { . /// Removes consecutive repeated elements in the vector according to the . /// [`PartialEq`] trait implementation. . /// . /// If the vector is sorted, this removes all duplicates. . /// -- line 2333 ---------------------------------------- -- line 2338 ---------------------------------------- . /// . /// vec.dedup(); . /// . /// assert_eq!(vec, [1, 2, 3, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn dedup(&mut self) { 15,434 ( 0.00%) self.dedup_by(|a, b| a == b) . } . } . . //////////////////////////////////////////////////////////////////////////////// . // Internal methods and functions . //////////////////////////////////////////////////////////////////////////////// . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 750,325 ( 0.00%) pub fn from_elem(elem: T, n: usize) -> Vec { 6,920,881 ( 0.01%) ::from_elem(elem, n, Global) 904,466 ( 0.00%) } . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { . ::from_elem(elem, n, alloc) . } . -- line 2367 ---------------------------------------- -- line 2424 ---------------------------------------- . // Common trait implementations for Vec . //////////////////////////////////////////////////////////////////////////////// . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Deref for Vec { . type Target = [T]; . . fn deref(&self) -> &[T] { 140,482,339 ( 0.27%) unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } 37 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::DerefMut for Vec { . fn deref_mut(&mut self) -> &mut [T] { 44,218,381 ( 0.08%) unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } . } . } . . #[cfg(not(no_global_oom_handling))] . trait SpecCloneFrom { . fn clone_from(this: &mut Self, other: &Self); . } . -- line 2447 ---------------------------------------- -- line 2468 ---------------------------------------- . this.extend_from_slice(other); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for Vec { . #[cfg(not(test))] 4,959,385 ( 0.01%) fn clone(&self) -> Self { . let alloc = self.allocator().clone(); 2 ( 0.00%) <[T]>::to_vec_in(&**self, alloc) 6,156,849 ( 0.01%) } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Instead use the . // `slice::to_vec` function which is only available with cfg(test) . // NB see the slice::hack module in slice.rs for more information . #[cfg(test)] . fn clone(&self) -> Self { . let alloc = self.allocator().clone(); -- line 2487 ---------------------------------------- -- line 2518 ---------------------------------------- . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> Index for Vec { . type Output = I::Output; . . #[inline] . fn index(&self, index: I) -> &Self::Output { 3,958,239 ( 0.01%) Index::index(&**self, index) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_on_unimplemented( . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> IndexMut for Vec { . #[inline] . fn index_mut(&mut self, index: I) -> &mut Self::Output { 23,608 ( 0.00%) IndexMut::index_mut(&mut **self, index) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for Vec { . #[inline] . fn from_iter>(iter: I) -> Vec { 27,254,528 ( 0.05%) >::from_iter(iter.into_iter()) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl IntoIterator for Vec { . type Item = T; . type IntoIter = IntoIter; . -- line 2555 ---------------------------------------- -- line 2564 ---------------------------------------- . /// for s in v.into_iter() { . /// // s has type String, not &String . /// println!("{}", s); . /// } . /// ``` . #[inline] . fn into_iter(self) -> IntoIter { . unsafe { 11,167,145 ( 0.02%) let mut me = ManuallyDrop::new(self); . let alloc = ptr::read(me.allocator()); . let begin = me.as_mut_ptr(); . let end = if mem::size_of::() == 0 { . arith_offset(begin as *const i8, me.len() as isize) as *const T . } else { . begin.add(me.len()) as *const T . }; . let cap = me.buf.capacity(); 4,111,364 ( 0.01%) IntoIter { . buf: NonNull::new_unchecked(begin), . phantom: PhantomData, . cap, . alloc, . ptr: begin, . end, . } . } -- line 2589 ---------------------------------------- -- line 2591 ---------------------------------------- . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a Vec { . type Item = &'a T; . type IntoIter = slice::Iter<'a, T>; . . fn into_iter(self) -> slice::Iter<'a, T> { 138 ( 0.00%) self.iter() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { . type Item = &'a mut T; . type IntoIter = slice::IterMut<'a, T>; . . fn into_iter(self) -> slice::IterMut<'a, T> { 1,180 ( 0.00%) self.iter_mut() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for Vec { . #[inline] 139,648 ( 0.00%) fn extend>(&mut self, iter: I) { 13,230,401 ( 0.02%) >::spec_extend(self, iter.into_iter()) 139,648 ( 0.00%) } . . #[inline] . fn extend_one(&mut self, item: T) { . self.push(item); . } . . #[inline] . fn extend_reserve(&mut self, additional: usize) { -- line 2627 ---------------------------------------- -- line 2636 ---------------------------------------- . fn extend_desugared>(&mut self, mut iterator: I) { . // This is the case for a general iterator. . // . // This function should be the moral equivalent of: . // . // for item in iterator { . // self.push(item); . // } 2,245,283 ( 0.00%) while let Some(element) = iterator.next() { 461,891 ( 0.00%) let len = self.len(); 7,112,282 ( 0.01%) if len == self.capacity() { 15,874 ( 0.00%) let (lower, _) = iterator.size_hint(); . self.reserve(lower.saturating_add(1)); . } . unsafe { . ptr::write(self.as_mut_ptr().add(len), element); . // Since next() executes user code which can panic we have to bump the length . // after each step. . // NB can't overflow since we would have had to alloc the address space 4,900,327 ( 0.01%) self.set_len(len + 1); . } . } 74,231 ( 0.00%) } . . /// Creates a splicing iterator that replaces the specified range in the vector . /// with the given `replace_with` iterator and yields the removed items. . /// `replace_with` does not need to be the same length as `range`. . /// . /// `range` is removed even if the iterator is not consumed until the end. . /// . /// It is unspecified how many elements are removed from the vector -- line 2666 ---------------------------------------- -- line 2693 ---------------------------------------- . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "vec_splice", since = "1.21.0")] . pub fn splice(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter, A> . where . R: RangeBounds, . I: IntoIterator, . { 153,012 ( 0.00%) Splice { drain: self.drain(range), replace_with: replace_with.into_iter() } . } . . /// Creates an iterator which uses a closure to determine if an element should be removed. . /// . /// If the closure returns true, then the element is removed and yielded. . /// If the closure returns false, the element will remain in the vector and will not be yielded . /// by the iterator. . /// -- line 2709 ---------------------------------------- -- line 2745 ---------------------------------------- . /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); . /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); . /// ``` . #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] . pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> . where . F: FnMut(&mut T) -> bool, . { 33,090 ( 0.00%) let old_len = self.len(); . . // Guard against us getting leaked (leak amplification) . unsafe { . self.set_len(0); . } . 99,408 ( 0.00%) DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } . } . } . . /// Extend implementation that copies elements out of references before pushing them onto the Vec. . /// . /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to . /// append the entire slice at once. . /// -- line 2768 ---------------------------------------- -- line 2803 ---------------------------------------- . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(&**self, &**other) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { 12,618,892 ( 0.02%) fn drop(&mut self) { . unsafe { . // use drop for [T] . // use a raw slice to refer to the elements of the vector as weakest necessary type; . // could avoid questions of validity in certain cases 7,075,813 ( 0.01%) ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) . } . // RawVec handles deallocation 14,874,462 ( 0.03%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] . impl const Default for Vec { . /// Creates an empty `Vec`. . fn default() -> Vec { . Vec::new() -- line 2827 ---------------------------------------- -- line 2976 ---------------------------------------- . /// newly-allocated buffer with exactly the right capacity. . /// . /// # Examples . /// . /// ``` . /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); . /// ``` . fn from(v: Vec) -> Self { 4 ( 0.00%) v.into_boxed_slice() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl From<&str> for Vec { . /// Allocate a `Vec` and fill it with a UTF-8 string. . /// -- line 2992 ---------------------------------------- 35,417,442 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs -------------------------------------------------------------------------------- Ir -- line 36 ---------------------------------------- . use rustc_session::parse::ParseSess; . use rustc_span::source_map::{MultiSpan, Span, DUMMY_SP}; . use rustc_span::symbol::{kw, sym, Ident, Symbol}; . use tracing::debug; . . use std::ops::Range; . use std::{cmp, mem, slice}; . 273,849 ( 0.00%) bitflags::bitflags! { . struct Restrictions: u8 { . const STMT_EXPR = 1 << 0; . const NO_STRUCT_LITERAL = 1 << 1; . const CONST_EXPR = 1 << 2; . } . } . . #[derive(Clone, Copy, PartialEq, Debug)] -- line 52 ---------------------------------------- -- line 104 ---------------------------------------- . $self.bump(); . return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty); . } . } . } . }; . } . 693,607 ( 0.00%) #[derive(Clone)] . pub struct Parser<'a> { 23,647 ( 0.00%) pub sess: &'a ParseSess, . /// The current token. 16,800 ( 0.00%) pub token: Token, . /// The spacing for the current token 11,405 ( 0.00%) pub token_spacing: Spacing, . /// The previous token. 25,200 ( 0.00%) pub prev_token: Token, 14,410 ( 0.00%) pub capture_cfg: bool, 31,825 ( 0.00%) restrictions: Restrictions, 22,810 ( 0.00%) expected_tokens: Vec, . // Important: This must only be advanced from `next_tok` . // to ensure that `token_cursor.num_next_calls` is updated properly . token_cursor: TokenCursor, 22,810 ( 0.00%) desugar_doc_comments: bool, . /// This field is used to keep track of how many left angle brackets we have seen. This is . /// required in order to detect extra leading left angle brackets (`<` characters) and error . /// appropriately. . /// . /// See the comments in the `parse_path_segment` function for more details. 22,810 ( 0.00%) unmatched_angle_bracket_count: u32, 34,215 ( 0.00%) max_angle_bracket_count: u32, . /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery . /// it gets removed from here. Every entry left at the end gets emitted as an independent . /// error. 11,405 ( 0.00%) pub(super) unclosed_delims: Vec, . last_unexpected_token_span: Option, . /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it . /// looked like it could have been a mistyped path or literal `Option:Some(42)`). . pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, . /// If present, this `Parser` is not parsing Rust code but rather a macro call. . subparser_name: Option<&'static str>, . capture_state: CaptureState, . /// This allows us to recover when the user forget to add braces around -- line 146 ---------------------------------------- -- line 173 ---------------------------------------- . /// the first macro inner attribute to invoke a proc-macro). . /// When create a `TokenStream`, the inner attributes get inserted . /// into the proper place in the token stream. . pub type ReplaceRange = (Range, Vec<(FlatToken, Spacing)>); . . /// Controls how we capture tokens. Capturing can be expensive, . /// so we try to avoid performing capturing in cases where . /// we will never need an `AttrAnnotatedTokenStream` 11,405 ( 0.00%) #[derive(Copy, Clone)] . pub enum Capturing { . /// We aren't performing any capturing - this is the default mode. . No, . /// We are capturing tokens . Yes, . } . 49,462 ( 0.00%) #[derive(Clone)] . struct CaptureState { 45,620 ( 0.00%) capturing: Capturing, 11,405 ( 0.00%) replace_ranges: Vec, . inner_attr_ranges: FxHashMap, . } . . impl<'a> Drop for Parser<'a> { . fn drop(&mut self) { 80,653 ( 0.00%) emit_unclosed_delims(&mut self.unclosed_delims, &self.sess); . } . } . 1,319,168 ( 0.00%) #[derive(Clone)] . struct TokenCursor { . frame: TokenCursorFrame, 295,736 ( 0.00%) stack: Vec, . desugar_doc_comments: bool, . // Counts the number of calls to `next` or `next_desugared`, . // depending on whether `desugar_doc_comments` is set. 96,288 ( 0.00%) num_next_calls: usize, . // During parsing, we may sometimes need to 'unglue' a . // glued token into two component tokens . // (e.g. '>>' into '>' and '>), so that the parser . // can consume them one at a time. This process . // bypasses the normal capturing mechanism . // (e.g. `num_next_calls` will not be incremented), . // since the 'unglued' tokens due not exist in . // the original `TokenStream`. -- line 217 ---------------------------------------- -- line 226 ---------------------------------------- . // in `Option>` requires us to unglue . // the trailing `>>` token. The `break_last_token` . // field is used to track this token - it gets . // appended to the captured stream when . // we evaluate a `LazyTokenStream` . break_last_token: bool, . } . 595,022 ( 0.00%) #[derive(Clone)] . struct TokenCursorFrame { 212,054 ( 0.00%) delim: token::DelimToken, . span: DelimSpan, . open_delim: bool, 424,108 ( 0.00%) tree_cursor: tokenstream::Cursor, . close_delim: bool, . } . . impl TokenCursorFrame { . fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { 65,640 ( 0.00%) TokenCursorFrame { . delim, . span, . open_delim: false, 125,119 ( 0.00%) tree_cursor: tts.into_trees(), . close_delim: false, . } . } . } . . impl TokenCursor { 11,451,288 ( 0.02%) fn next(&mut self) -> (Token, Spacing) { . loop { 5,451,091 ( 0.01%) let (tree, spacing) = if !self.frame.open_delim { 98,721 ( 0.00%) self.frame.open_delim = true; 493,605 ( 0.00%) TokenTree::open_tt(self.frame.span, self.frame.delim).into() 11,157,191 ( 0.02%) } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { . tree 403,814 ( 0.00%) } else if !self.frame.close_delim { 94,616 ( 0.00%) self.frame.close_delim = true; 473,080 ( 0.00%) TokenTree::close_tt(self.frame.span, self.frame.delim).into() 186,826 ( 0.00%) } else if let Some(frame) = self.stack.pop() { 1,214,369 ( 0.00%) self.frame = frame; . continue; . } else { 249,804 ( 0.00%) (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone) . }; . 2,105,706 ( 0.00%) match tree { . TokenTree::Token(token) => { 7,634,192 ( 0.01%) return (token, spacing); . } . TokenTree::Delimited(sp, delim, tts) => { . let frame = TokenCursorFrame::new(sp, delim, tts); . self.stack.push(mem::replace(&mut self.frame, frame)); . } . } . } 7,634,192 ( 0.01%) } . 1,116,820 ( 0.00%) fn next_desugared(&mut self) -> (Token, Spacing) { 335,330 ( 0.00%) let (data, attr_style, sp) = match self.next() { 426 ( 0.00%) (Token { kind: token::DocComment(_, attr_style, data), span }, _) => { . (data, attr_style, span) . } 557,700 ( 0.00%) tok => return tok, . }; . . // Searches for the occurrences of `"#*` and returns the minimum number of `#`s . // required to wrap the text. . let mut num_of_hashes = 0; . let mut count = 0; 34,268 ( 0.00%) for ch in data.as_str().chars() { . count = match ch { . '"' => 1, 2 ( 0.00%) '#' if count > 0 => count + 1, . _ => 0, . }; . num_of_hashes = cmp::max(num_of_hashes, count); . } . 568 ( 0.00%) let delim_span = DelimSpan::from_single(sp); 710 ( 0.00%) let body = TokenTree::Delimited( . delim_span, . token::Bracket, 1,704 ( 0.00%) [ 994 ( 0.00%) TokenTree::token(token::Ident(sym::doc, false), sp), 568 ( 0.00%) TokenTree::token(token::Eq, sp), 1,136 ( 0.00%) TokenTree::token(TokenKind::lit(token::StrRaw(num_of_hashes), data, None), sp), . ] . .iter() . .cloned() . .collect::(), 142 ( 0.00%) ); . . self.stack.push(mem::replace( . &mut self.frame, . TokenCursorFrame::new( . delim_span, . token::NoDelim, 142 ( 0.00%) if attr_style == AttrStyle::Inner { . [TokenTree::token(token::Pound, sp), TokenTree::token(token::Not, sp), body] . .iter() . .cloned() . .collect::() . } else { 1,704 ( 0.00%) [TokenTree::token(token::Pound, sp), body] . .iter() . .cloned() . .collect::() . }, . ), . )); . 426 ( 0.00%) self.next() 1,005,138 ( 0.00%) } . } . 622,500 ( 0.00%) #[derive(Debug, Clone, PartialEq)] . enum TokenType { 498,000 ( 0.00%) Token(TokenKind), . Keyword(Symbol), . Operator, . Lifetime, . Ident, . Path, . Type, . Const, . } -- line 353 ---------------------------------------- -- line 378 ---------------------------------------- . /// The separator token. . sep: Option, . /// `true` if a trailing separator is allowed. . trailing_sep_allowed: bool, . } . . impl SeqSep { . fn trailing_allowed(t: TokenKind) -> SeqSep { 84 ( 0.00%) SeqSep { sep: Some(t), trailing_sep_allowed: true } . } . . fn none() -> SeqSep { . SeqSep { sep: None, trailing_sep_allowed: false } . } . } . . pub enum FollowedByType { . Yes, . No, . } . . fn token_descr_opt(token: &Token) -> Option<&'static str> { 18,672 ( 0.00%) Some(match token.kind { 37,344 ( 0.00%) _ if token.is_special_ident() => "reserved identifier", 37,344 ( 0.00%) _ if token.is_used_keyword() => "keyword", 37,344 ( 0.00%) _ if token.is_unused_keyword() => "reserved keyword", . token::DocComment(..) => "doc comment", . _ => return None, . }) . } . 65,352 ( 0.00%) pub(super) fn token_descr(token: &Token) -> String { 18,672 ( 0.00%) let token_str = pprust::token_to_string(token); . match token_descr_opt(token) { . Some(prefix) => format!("{} `{}`", prefix, token_str), 65,352 ( 0.00%) _ => format!("`{}`", token_str), . } 46,680 ( 0.00%) } . . impl<'a> Parser<'a> { 183,792 ( 0.00%) pub fn new( . sess: &'a ParseSess, . tokens: TokenStream, . desugar_doc_comments: bool, . subparser_name: Option<&'static str>, . ) -> Self { 13,128 ( 0.00%) let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens); 26,256 ( 0.00%) start_frame.open_delim = true; . start_frame.close_delim = true; . 406,968 ( 0.00%) let mut parser = Parser { . sess, 13,128 ( 0.00%) token: Token::dummy(), . token_spacing: Spacing::Alone, 13,128 ( 0.00%) prev_token: Token::dummy(), . capture_cfg: false, . restrictions: Restrictions::empty(), . expected_tokens: Vec::new(), . token_cursor: TokenCursor { 78,768 ( 0.00%) frame: start_frame, . stack: Vec::new(), . num_next_calls: 0, . desugar_doc_comments, . break_last_token: false, . }, . desugar_doc_comments, . unmatched_angle_bracket_count: 0, . max_angle_bracket_count: 0, -- line 445 ---------------------------------------- -- line 451 ---------------------------------------- . capturing: Capturing::No, . replace_ranges: Vec::new(), . inner_attr_ranges: Default::default(), . }, . current_closure: None, . }; . . // Make parser point to the first token. 26,256 ( 0.00%) parser.bump(); . . parser 118,152 ( 0.00%) } . . fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { . loop { 5,285,142 ( 0.01%) let (mut next, spacing) = if self.desugar_doc_comments { 446,728 ( 0.00%) self.token_cursor.next_desugared() . } else { 2,307,525 ( 0.00%) self.token_cursor.next() . }; 3,523,428 ( 0.01%) self.token_cursor.num_next_calls += 1; . // We've retrieved an token from the underlying . // cursor, so we no longer need to worry about . // an unglued token. See `break_and_eat` for more details 880,857 ( 0.00%) self.token_cursor.break_last_token = false; 3,523,428 ( 0.01%) if next.span.is_dummy() { . // Tweak the location for better diagnostics, but keep syntactic context intact. 52,624 ( 0.00%) next.span = fallback_span.with_ctxt(next.span.ctxt()); . } 2,914,152 ( 0.01%) if matches!( 1,761,714 ( 0.00%) next.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } 3,522,292 ( 0.01%) return (next, spacing); . } . } . . pub fn unexpected(&mut self) -> PResult<'a, T> { . match self.expect_one_of(&[], &[]) { . Err(e) => Err(e), . // We can get `Ok(true)` from `recover_closing_delimiter` . // which is called in `expected_one_of_not_found`. . Ok(_) => FatalError.raise(), . } . } . . /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. 783,045 ( 0.00%) pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { 87,005 ( 0.00%) if self.expected_tokens.is_empty() { 68,448 ( 0.00%) if self.token == *t { 57,040 ( 0.00%) self.bump(); . Ok(false) . } else { . self.unexpected_try_recover(t) . } . } else { 907,164 ( 0.00%) self.expect_one_of(slice::from_ref(t), &[]) . } 1,305,075 ( 0.00%) } . . /// Expect next token to be edible or inedible token. If edible, . /// then consume it; if inedible, then return without consuming . /// anything. Signal a fatal error if next token is unexpected. 1,151,388 ( 0.00%) pub fn expect_one_of( . &mut self, . edible: &[TokenKind], . inedible: &[TokenKind], . ) -> PResult<'a, bool /* recovered */> { 191,898 ( 0.00%) if edible.contains(&self.token.kind) { 255,801 ( 0.00%) self.bump(); . Ok(false) . } else if inedible.contains(&self.token.kind) { . // leave it in the input . Ok(false) . } else if self.last_unexpected_token_span == Some(self.token.span) { . FatalError.raise(); . } else { . self.expected_one_of_not_found(edible, inedible) . } 1,439,235 ( 0.00%) } . . // Public for rustfmt usage. . pub fn parse_ident(&mut self) -> PResult<'a, Ident> { 979,227 ( 0.00%) self.parse_ident_common(true) . } . . fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> { 741,075 ( 0.00%) self.token.ident().ok_or_else(|| match self.prev_token.kind { . TokenKind::DocComment(..) => { . self.span_err(self.prev_token.span, Error::UselessDocComment) . } . _ => self.expected_ident_found(), . }) . } . 1,711,577 ( 0.00%) fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { 244,511 ( 0.00%) let (ident, is_raw) = self.ident_or_err()?; 1,467,051 ( 0.00%) if !is_raw && ident.is_reserved() { . let mut err = self.expected_ident_found(); . if recover { . err.emit(); . } else { . return Err(err); . } . } 978,044 ( 0.00%) self.bump(); . Ok(ident) 2,445,110 ( 0.00%) } . . /// Checks if the next token is `tok`, and returns `true` if so. . /// . /// This method will automatically add `tok` to `expected_tokens` if `tok` is not . /// encountered. 17,624,698 ( 0.03%) fn check(&mut self, tok: &TokenKind) -> bool { 7,749,802 ( 0.01%) let is_present = self.token == *tok; 5,241,428 ( 0.01%) if !is_present { 10,459,483 ( 0.02%) self.expected_tokens.push(TokenType::Token(tok.clone())); . } . is_present 17,624,698 ( 0.03%) } . . /// Consumes a token 'tok' if it exists. Returns whether the given token was present. 136 ( 0.00%) pub fn eat(&mut self, tok: &TokenKind) -> bool { 3,567,150 ( 0.01%) let is_present = self.check(tok); 2,283,047 ( 0.00%) if is_present { 489,518 ( 0.00%) self.bump() . } . is_present 170 ( 0.00%) } . . /// If the next token is the given keyword, returns `true` without eating it. . /// An expectation is also added for diagnostics purposes. 686,630 ( 0.00%) fn check_keyword(&mut self, kw: Symbol) -> bool { 112,162 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw)); 4,199,035 ( 0.01%) self.token.is_keyword(kw) . } . . /// If the next token is the given keyword, eats it and returns `true`. . /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. . // Public for rustfmt usage. 1,519,120 ( 0.00%) pub fn eat_keyword(&mut self, kw: Symbol) -> bool { 916,440 ( 0.00%) if self.check_keyword(kw) { 88,491 ( 0.00%) self.bump(); . true . } else { . false . } 1,519,120 ( 0.00%) } . . fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool { 152,980 ( 0.00%) if self.token.is_keyword(kw) { 30,901 ( 0.00%) self.bump(); . true . } else { . false . } . } . . /// If the given word is not a keyword, signals an error. . /// If the next token is not the given word, signals an error. . /// Otherwise, eats it. 12,155 ( 0.00%) fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> { . if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } 9,724 ( 0.00%) } . . /// Is the given keyword `kw` followed by a non-reserved identifier? 592,312 ( 0.00%) fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool { 296,156 ( 0.00%) self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) 666,351 ( 0.00%) } . 1,210,770 ( 0.00%) fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool { 441,532 ( 0.00%) if ok { . true . } else { 252,920 ( 0.00%) self.expected_tokens.push(typ); . false . } 1,210,770 ( 0.00%) } . . fn check_ident(&mut self) -> bool { 285,232 ( 0.00%) self.check_or_expected(self.token.is_ident(), TokenType::Ident) . } . 369,784 ( 0.00%) fn check_path(&mut self) -> bool { 922,199 ( 0.00%) self.check_or_expected(self.token.is_path_start(), TokenType::Path) 462,230 ( 0.00%) } . . fn check_type(&mut self) -> bool { 136,260 ( 0.00%) self.check_or_expected(self.token.can_begin_type(), TokenType::Type) . } . . fn check_const_arg(&mut self) -> bool { 96,528 ( 0.00%) self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) . } . 312,129 ( 0.00%) fn check_inline_const(&self, dist: usize) -> bool { 173,405 ( 0.00%) self.is_keyword_ahead(dist, &[kw::Const]) . && self.look_ahead(dist + 1, |t| match t.kind { . token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), . token::OpenDelim(DelimToken::Brace) => true, . _ => false, . }) 312,129 ( 0.00%) } . . /// Checks to see if the next token is either `+` or `+=`. . /// Otherwise returns `false`. . fn check_plus(&mut self) -> bool { 117,864 ( 0.00%) self.check_or_expected( 58,932 ( 0.00%) self.token.is_like_plus(), 58,932 ( 0.00%) TokenType::Token(token::BinOp(token::Plus)), . ) . } . . /// Eats the expected token if it's present possibly breaking . /// compound tokens like multi-character operators in process. . /// Returns `true` if the token was eaten. 1,632,672 ( 0.00%) fn break_and_eat(&mut self, expected: TokenKind) -> bool { 952,392 ( 0.00%) if self.token.kind == expected { 82,958 ( 0.00%) self.bump(); . return true; . } 472,885 ( 0.00%) match self.token.kind.break_two_token_op() { 1,141 ( 0.00%) Some((first, second)) if first == expected => { 652 ( 0.00%) let first_span = self.sess.source_map().start_point(self.token.span); 815 ( 0.00%) let second_span = self.token.span.with_lo(first_span.hi()); 1,467 ( 0.00%) self.token = Token::new(first, first_span); . // Keep track of this token - if we end token capturing now, . // we'll want to append this token to the captured stream. . // . // If we consume any additional tokens, then this token . // is not needed (we'll capture the entire 'glued' token), . // and `next_tok` will set this field to `None` 163 ( 0.00%) self.token_cursor.break_last_token = true; . // Use the spacing of the glued token as the spacing . // of the unglued second token. 2,282 ( 0.00%) self.bump_with((Token::new(second, second_span), self.token_spacing)); . true . } . _ => { 283,242 ( 0.00%) self.expected_tokens.push(TokenType::Token(expected)); . false . } . } 1,602,160 ( 0.00%) } . . /// Eats `+` possibly breaking tokens like `+=` in process. . fn eat_plus(&mut self) -> bool { 13,320 ( 0.00%) self.break_and_eat(token::BinOp(token::Plus)) . } . . /// Eats `&` possibly breaking tokens like `&&` in process. . /// Signals an error if `&` is not eaten. . fn expect_and(&mut self) -> PResult<'a, ()> { 47,640 ( 0.00%) if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `|` possibly breaking tokens like `||` in process. . /// Signals an error if `|` was not eaten. . fn expect_or(&mut self) -> PResult<'a, ()> { 168 ( 0.00%) if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `<` possibly breaking tokens like `<<` in process. 84,292 ( 0.00%) fn eat_lt(&mut self) -> bool { 343,730 ( 0.00%) let ate = self.break_and_eat(token::Lt); 228,184 ( 0.00%) if ate { . // See doc comment for `unmatched_angle_bracket_count`. 68,876 ( 0.00%) self.unmatched_angle_bracket_count += 1; 74,533 ( 0.00%) self.max_angle_bracket_count += 1; . debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); . } . ate 84,292 ( 0.00%) } . . /// Eats `<` possibly breaking tokens like `<<` in process. . /// Signals an error if `<` was not eaten. . fn expect_lt(&mut self) -> PResult<'a, ()> { . if self.eat_lt() { Ok(()) } else { self.unexpected() } . } . . /// Eats `>` possibly breaking tokens like `>>` in process. . /// Signals an error if `>` was not eaten. . fn expect_gt(&mut self) -> PResult<'a, ()> { 98,682 ( 0.00%) if self.break_and_eat(token::Gt) { . // See doc comment for `unmatched_angle_bracket_count`. 49,341 ( 0.00%) if self.unmatched_angle_bracket_count > 0 { 32,894 ( 0.00%) self.unmatched_angle_bracket_count -= 1; . debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count); . } . Ok(()) . } else { . self.unexpected() . } . } . . fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool { . kets.iter().any(|k| match expect { 240,510 ( 0.00%) TokenExpectType::Expect => self.check(k), 420 ( 0.00%) TokenExpectType::NoExpect => self.token == **k, . }) . } . . fn parse_seq_to_before_tokens( . &mut self, . kets: &[&TokenKind], . sep: SeqSep, . expect: TokenExpectType, -- line 759 ---------------------------------------- -- line 761 ---------------------------------------- . ) -> PResult<'a, (Vec, bool /* trailing */, bool /* recovered */)> { . let mut first = true; . let mut recovered = false; . let mut trailing = false; . let mut v = vec![]; . let unclosed_delims = !self.unclosed_delims.is_empty(); . . while !self.expect_any_with_type(kets, expect) { 258,123 ( 0.00%) if let token::CloseDelim(..) | token::Eof = self.token.kind { . break; . } 208,338 ( 0.00%) if let Some(ref t) = sep.sep { 223,917 ( 0.00%) if first { . first = false; . } else { 71,260 ( 0.00%) match self.expect(t) { . Ok(false) => { . self.current_closure.take(); . } . Ok(true) => { . self.current_closure.take(); . recovered = true; . break; . } -- line 784 ---------------------------------------- -- line 857 ---------------------------------------- . e.cancel(); . break; . } . } . } . } . } . } 92,204 ( 0.00%) if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { . trailing = true; . break; . } . 74,947 ( 0.00%) let t = f(self)?; 33,227 ( 0.00%) v.push(t); . } . 149,024 ( 0.00%) Ok((v, trailing, recovered)) . } . . fn recover_missing_braces_around_closure_body( . &mut self, . closure_spans: ClosureSpans, . mut expect_err: DiagnosticBuilder<'_>, . ) -> PResult<'a, ()> { . let initial_semicolon = self.token.span; -- line 882 ---------------------------------------- -- line 937 ---------------------------------------- . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_seq_to_before_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool, bool)> { 149,719 ( 0.00%) self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) . } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. 219,390 ( 0.00%) fn parse_seq_to_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool /* trailing */)> { 96,656 ( 0.00%) let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; 52,256 ( 0.00%) if !recovered { . self.eat(ket); . } 247,373 ( 0.00%) Ok((val, trailing)) 175,512 ( 0.00%) } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_unspanned_seq( . &mut self, . bra: &TokenKind, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { 91,911 ( 0.00%) self.expect(bra)?; 175,512 ( 0.00%) self.parse_seq_to_end(ket, sep, f) . } . . fn parse_delim_comma_seq( . &mut self, . delim: DelimToken, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_unspanned_seq( 36,998 ( 0.00%) &token::OpenDelim(delim), 52,466 ( 0.00%) &token::CloseDelim(delim), . SeqSep::trailing_allowed(token::Comma), . f, . ) . } . . fn parse_paren_comma_seq( . &mut self, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_delim_comma_seq(token::Paren, f) . } . . /// Advance the parser by one token using provided token as the next one. 11,449,568 ( 0.02%) fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { . // Bumping after EOF is a bad sign, usually an infinite loop. 5,284,416 ( 0.01%) if self.prev_token.kind == TokenKind::Eof { . let msg = "attempted to bump the parser past EOF (may be stuck in a loop)"; . self.span_bug(self.token.span, msg); . } . . // Update the current and previous tokens. 3,522,944 ( 0.01%) self.prev_token = mem::replace(&mut self.token, next_token); 880,736 ( 0.00%) self.token_spacing = next_spacing; . . // Diagnostics. 880,736 ( 0.00%) self.expected_tokens.clear(); . } . . /// Advance the parser by one token. 7,044,584 ( 0.01%) pub fn bump(&mut self) { 3,522,292 ( 0.01%) let next_token = self.next_tok(self.token.span); 5,283,438 ( 0.01%) self.bump_with(next_token); 7,044,584 ( 0.01%) } . . /// Look-ahead `dist` tokens of `self.token` and get access to that token there. . /// When `dist == 0` then the current token is looked at. 272 ( 0.00%) pub fn look_ahead(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R { 99,112 ( 0.00%) if dist == 0 { 46,004 ( 0.00%) return looker(&self.token); . } . 467,945 ( 0.00%) let frame = &self.token_cursor.frame; 517,389 ( 0.00%) if frame.delim != DelimToken::NoDelim { . let all_normal = (0..dist).all(|i| { 1,455,738 ( 0.00%) let token = frame.tree_cursor.look_ahead(i); 1,901,432 ( 0.00%) !matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _))) . }); . if all_normal { 2,358,530 ( 0.00%) return match frame.tree_cursor.look_ahead(dist - 1) { 844,593 ( 0.00%) Some(tree) => match tree { 420,421 ( 0.00%) TokenTree::Token(token) => looker(token), . TokenTree::Delimited(dspan, delim, _) => { 44,867 ( 0.00%) looker(&Token::new(token::OpenDelim(*delim), dspan.open)) . } . }, 395,805 ( 0.00%) None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)), . }; . } . } . . let mut cursor = self.token_cursor.clone(); . let mut i = 0; 43,815 ( 0.00%) let mut token = Token::dummy(); 484 ( 0.00%) while i < dist { 306,863 ( 0.00%) token = cursor.next().0; 139,670 ( 0.00%) if matches!( 87,622 ( 0.00%) token.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } 210 ( 0.00%) i += 1; . } 13,050 ( 0.00%) return looker(&token); 306 ( 0.00%) } . . /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. 381,420 ( 0.00%) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { 96,592 ( 0.00%) self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) 381,420 ( 0.00%) } . . /// Parses asyncness: `async` or nothing. . fn parse_asyncness(&mut self) -> Async { . if self.eat_keyword(kw::Async) { . let span = self.prev_token.uninterpolated_span(); . Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID } . } else { . Async::No . } . } . . /// Parses unsafety: `unsafe` or nothing. 11,580 ( 0.00%) fn parse_unsafety(&mut self) -> Unsafe { . if self.eat_keyword(kw::Unsafe) { 5 ( 0.00%) Unsafe::Yes(self.prev_token.uninterpolated_span()) . } else { . Unsafe::No . } 46,320 ( 0.00%) } . . /// Parses constness: `const` or nothing. 171,088 ( 0.00%) fn parse_constness(&mut self) -> Const { . // Avoid const blocks to be parsed as const items 46,110 ( 0.00%) if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) . && self.eat_keyword(kw::Const) . { 5,515 ( 0.00%) Const::Yes(self.prev_token.uninterpolated_span()) . } else { . Const::No . } 299,404 ( 0.00%) } . . /// Parses inline const expressions. . fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P> { . if pat { . self.sess.gated_spans.gate(sym::inline_const_pat, span); . } else { . self.sess.gated_spans.gate(sym::inline_const, span); . } -- line 1104 ---------------------------------------- -- line 1108 ---------------------------------------- . id: DUMMY_NODE_ID, . value: self.mk_expr(blk.span, ExprKind::Block(blk, None), AttrVec::new()), . }; . let blk_span = anon_const.value.span; . Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new())) . } . . /// Parses mutability (`mut` or nothing). 476 ( 0.00%) fn parse_mutability(&mut self) -> Mutability { . if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not } 16,594 ( 0.00%) } . . /// Possibly parses mutability (`const` or `mut`). . fn parse_const_or_mut(&mut self) -> Option { . if self.eat_keyword(kw::Mut) { . Some(Mutability::Mut) . } else if self.eat_keyword(kw::Const) { . Some(Mutability::Not) . } else { . None . } . } . . fn parse_field_name(&mut self) -> PResult<'a, Ident> { 3,998 ( 0.00%) if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind . { 80 ( 0.00%) self.expect_no_suffix(self.token.span, "a tuple index", suffix); 32 ( 0.00%) self.bump(); . Ok(Ident::new(symbol, self.prev_token.span)) . } else { 5,853 ( 0.00%) self.parse_ident_common(true) . } 48 ( 0.00%) } . . fn parse_mac_args(&mut self) -> PResult<'a, P> { 13,001 ( 0.00%) self.parse_mac_args_common(true).map(P) . } . . fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> { 17,199 ( 0.00%) self.parse_mac_args_common(false) . } . 94,104 ( 0.00%) fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> { 83,648 ( 0.00%) Ok( 61,170 ( 0.00%) if self.check(&token::OpenDelim(DelimToken::Paren)) 6,693 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Bracket)) 6,642 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Brace)) . { 35,756 ( 0.00%) match self.parse_token_tree() { 44,695 ( 0.00%) TokenTree::Delimited(dspan, delim, tokens) => . // We've confirmed above that there is a delimiter so unwrapping is OK. . { 17,878 ( 0.00%) MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens) . } . _ => unreachable!(), . } 3,034 ( 0.00%) } else if !delimited_only { . if self.eat(&token::Eq) { 48 ( 0.00%) let eq_span = self.prev_token.span; . . // Collect tokens because they are used during lowering to HIR. 48 ( 0.00%) let expr = self.parse_expr_force_collect()?; 48 ( 0.00%) let span = expr.span; . 240 ( 0.00%) let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr))); 432 ( 0.00%) MacArgs::Eq(eq_span, Token::new(token_kind, span)) . } else { . MacArgs::Empty . } . } else { . return self.unexpected(); . }, . ) 73,192 ( 0.00%) } . . fn parse_or_use_outer_attributes( . &mut self, . already_parsed_attrs: Option, . ) -> PResult<'a, AttrWrapper> { 280,884 ( 0.00%) if let Some(attrs) = already_parsed_attrs { . Ok(attrs) . } else { 149,816 ( 0.00%) self.parse_outer_attributes() . } . } . . /// Parses a single token tree from the input. 85,952 ( 0.00%) pub(crate) fn parse_token_tree(&mut self) -> TokenTree { 33,232 ( 0.00%) match self.token.kind { . token::OpenDelim(..) => { . let depth = self.token_cursor.stack.len(); . . // We keep advancing the token cursor until we hit . // the matching `CloseDelim` token. 476,156 ( 0.00%) while !(depth == self.token_cursor.stack.len() . && matches!(self.token.kind, token::CloseDelim(_))) . { . // Advance one token at a time, so `TokenCursor::next()` . // can capture these tokens if necessary. 319,634 ( 0.00%) self.bump(); . } . // We are still inside the frame corresponding . // to the delimited stream we captured, so grab . // the tokens from this frame. . let frame = &self.token_cursor.frame; 20,988 ( 0.00%) let stream = frame.tree_cursor.stream.clone(); 20,988 ( 0.00%) let span = frame.span; 10,494 ( 0.00%) let delim = frame.delim; . // Consume close delimiter 20,988 ( 0.00%) self.bump(); 52,470 ( 0.00%) TokenTree::Delimited(span, delim, stream) . } . token::CloseDelim(_) | token::Eof => unreachable!(), . _ => { 500 ( 0.00%) self.bump(); 1,250 ( 0.00%) TokenTree::Token(self.prev_token.clone()) . } . } 75,208 ( 0.00%) } . . /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF. . pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { . let mut tts = Vec::new(); . while self.token != token::Eof { . tts.push(self.parse_token_tree()); . } . Ok(tts) -- line 1234 ---------------------------------------- -- line 1244 ---------------------------------------- . } . TokenStream::new(result) . } . . /// Evaluates the closure with restrictions in place. . /// . /// Afters the closure is evaluated, restrictions are reset. . fn with_res(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T { 86,650 ( 0.00%) let old = self.restrictions; 102,112 ( 0.00%) self.restrictions = res; . let res = f(self); 115,110 ( 0.00%) self.restrictions = old; . res . } . 236,016 ( 0.00%) fn is_crate_vis(&self) -> bool { 147,510 ( 0.00%) self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep) 265,518 ( 0.00%) } . . /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, . /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. . /// If the following element can't be a tuple (i.e., it's a function definition), then . /// it's not a tuple struct field), and the contents within the parentheses aren't valid, . /// so emit a proper diagnostic. . // Public for rustfmt usage. 231,600 ( 0.00%) pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { 46,330 ( 0.00%) maybe_whole!(self, NtVis, |x| x); . 23,159 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw::Crate)); 92,636 ( 0.00%) if self.is_crate_vis() { . self.bump(); // `crate` . self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span); . return Ok(Visibility { . span: self.prev_token.span, . kind: VisibilityKind::Crate(CrateSugar::JustCrate), . tokens: None, . }); . } . . if !self.eat_keyword(kw::Pub) { . // We need a span for our `Spanned`, but there's inherently no . // keyword to grab a span from for inherited visibility; an empty span at the . // beginning of the current token would seem to be the "Schelling span". 21,749 ( 0.00%) return Ok(Visibility { 65,247 ( 0.00%) span: self.token.span.shrink_to_lo(), . kind: VisibilityKind::Inherited, . tokens: None, . }); . } 1,410 ( 0.00%) let lo = self.prev_token.span; . 7,050 ( 0.00%) if self.check(&token::OpenDelim(token::Paren)) { . // We don't `self.bump()` the `(` yet because this might be a struct definition where . // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. . // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so . // by the following tokens. 18 ( 0.00%) if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep) . // account for `pub(crate::foo)` . { . // Parse `pub(crate)`. 6 ( 0.00%) self.bump(); // `(` 4 ( 0.00%) self.bump(); // `crate` 6 ( 0.00%) self.expect(&token::CloseDelim(token::Paren))?; // `)` 2 ( 0.00%) let vis = VisibilityKind::Crate(CrateSugar::PubCrate); . return Ok(Visibility { 6 ( 0.00%) span: lo.to(self.prev_token.span), . kind: vis, . tokens: None, . }); . } else if self.is_keyword_ahead(1, &[kw::In]) { . // Parse `pub(in path)`. . self.bump(); // `(` . self.bump(); // `in` . let path = self.parse_path(PathStyle::Mod)?; // `path` -- line 1317 ---------------------------------------- -- line 1338 ---------------------------------------- . } else if let FollowedByType::No = fbt { . // Provide this diagnostic if a type cannot follow; . // in particular, if this is not a tuple struct. . self.recover_incorrect_vis_restriction()?; . // Emit diagnostic, but continue with public visibility. . } . } . 4,224 ( 0.00%) Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None }) 208,440 ( 0.00%) } . . /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }` . fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { . self.bump(); // `(` . let path = self.parse_path(PathStyle::Mod)?; . self.expect(&token::CloseDelim(token::Paren))?; // `)` . . let msg = "incorrect visibility restriction"; -- line 1355 ---------------------------------------- -- line 1370 ---------------------------------------- . ) . .emit(); . . Ok(()) . } . . /// Parses `extern string_literal?`. . fn parse_extern(&mut self) -> Extern { 4,024 ( 0.00%) if self.eat_keyword(kw::Extern) { Extern::from_abi(self.parse_abi()) } else { Extern::None } . } . . /// Parses a string literal as an ABI spec. . fn parse_abi(&mut self) -> Option { . match self.parse_str_lit() { . Ok(str_lit) => Some(str_lit), . Err(Some(lit)) => match lit.kind { . ast::LitKind::Err(_) => None, -- line 1386 ---------------------------------------- -- line 1395 ---------------------------------------- . .emit(); . None . } . }, . Err(None) => None, . } . } . 39,520 ( 0.00%) pub fn collect_tokens_no_attrs( . &mut self, . f: impl FnOnce(&mut Self) -> PResult<'a, R>, . ) -> PResult<'a, R> { . // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use . // `ForceCollect::Yes` . self.collect_tokens_trailing_token( . AttrWrapper::empty(), . ForceCollect::Yes, 4,057 ( 0.00%) |this, _attrs| Ok((f(this)?, TrailingToken::None)), . ) 39,520 ( 0.00%) } . . /// `::{` or `::*` 1,746,912 ( 0.00%) fn is_import_coupler(&mut self) -> bool { 873,456 ( 0.00%) self.check(&token::ModSep) . && self.look_ahead(1, |t| { 1,557,239 ( 0.00%) *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star) . }) 1,144,296 ( 0.00%) } . . pub fn clear_expected_tokens(&mut self) { . self.expected_tokens.clear(); . } . } . . crate fn make_unclosed_delims_error( . unmatched: UnmatchedBrace, -- line 1430 ---------------------------------------- -- line 1450 ---------------------------------------- . err.span_label(sp, "closing delimiter possibly meant for this"); . } . if let Some(sp) = unmatched.unclosed_span { . err.span_label(sp, "unclosed delimiter"); . } . Some(err) . } . 209,056 ( 0.00%) pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &ParseSess) { 104,528 ( 0.00%) *sess.reached_eof.borrow_mut() |= . unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()); 104,528 ( 0.00%) for unmatched in unclosed_delims.drain(..) { . if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) { . e.emit(); . } . } 209,056 ( 0.00%) } . . /// A helper struct used when building an `AttrAnnotatedTokenStream` from . /// a `LazyTokenStream`. Both delimiter and non-delimited tokens . /// are stored as `FlatToken::Token`. A vector of `FlatToken`s . /// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested . /// `AttrAnnotatedTokenTree::Delimited` tokens 909 ( 0.00%) #[derive(Debug, Clone)] . pub enum FlatToken { . /// A token - this holds both delimiter (e.g. '{' and '}') . /// and non-delimiter tokens . Token(Token), . /// Holds the `AttributesData` for an AST node. The . /// `AttributesData` is inserted directly into the . /// constructed `AttrAnnotatedTokenStream` as . /// an `AttrAnnotatedTokenTree::Attributes` -- line 1481 ---------------------------------------- 5,723,454 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs -------------------------------------------------------------------------------- Ir -- line 70 ---------------------------------------- . . impl<'a, T> $name<'a, T> { . // Helper function for creating a slice from the iterator. . #[inline(always)] . fn make_slice(&self) -> &'a [T] { . // SAFETY: the iterator was created from a slice with pointer . // `self.ptr` and length `len!(self)`. This guarantees that all . // the prerequisites for `from_raw_parts` are fulfilled. 140,778 ( 0.00%) unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } . } . . // Helper function for moving the start of the iterator forwards by `offset` elements, . // returning the old start. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] . unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . let old = self.ptr.as_ptr(); . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // so this new pointer is inside `self` and thus guaranteed to be non-null. 5,196,894 ( 0.01%) self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }; . old . } . } . . // Helper function for moving the end of the iterator backwards by `offset` elements, . // returning the new end. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] -- line 101 ---------------------------------------- -- line 102 ---------------------------------------- . unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // which is guaranteed to not overflow an `isize`. Also, the resulting pointer . // is in bounds of `slice`, which fulfills the other requirements for `offset`. 40,100 ( 0.00%) self.end = unsafe { self.end.offset(-offset) }; . self.end . } . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ExactSizeIterator for $name<'_, T> { . #[inline(always)] . fn len(&self) -> usize { 1,904,901 ( 0.00%) len!(self) . } . . #[inline(always)] . fn is_empty(&self) -> bool { . is_empty!(self) . } . } . -- line 128 ---------------------------------------- -- line 134 ---------------------------------------- . fn next(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer . // must be non-null, and slices over non-ZSTs must also have a . // non-null end pointer. The call to `next_unchecked!` is safe . // since we check if the iterator is empty first. . unsafe { 4,539,531 ( 0.01%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 448,279 ( 0.00%) assume(!self.end.is_null()); . } 652,706,014 ( 1.23%) if is_empty!(self) { . None . } else { . Some(next_unchecked!(self)) . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 22,102,822 ( 0.04%) let exact = len!(self); . (exact, Some(exact)) . } . . #[inline] . fn count(self) -> usize { . len!(self) . } . . #[inline] . fn nth(&mut self, n: usize) -> Option<$elem> { 267,663 ( 0.00%) if n >= len!(self) { . // This iterator is now empty. . if mem::size_of::() == 0 { . // We have to do it this way as `ptr` may never be 0, but `end` . // could be (due to wrapping). . self.end = self.ptr.as_ptr(); . } else { . // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr . unsafe { -- line 175 ---------------------------------------- -- line 203 ---------------------------------------- . // faster to compile. . #[inline] . fn for_each(mut self, mut f: F) . where . Self: Sized, . F: FnMut(Self::Item), . { . while let Some(x) = self.next() { 63,343 ( 0.00%) f(x); . } . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn all(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 78,281 ( 0.00%) while let Some(x) = self.next() { 162,151 ( 0.00%) if !f(x) { . return false; . } . } . true . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] 16 ( 0.00%) fn any(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 7,091,626 ( 0.01%) while let Some(x) = self.next() { 7,008,189 ( 0.01%) if f(x) { . return true; . } . } . false 16 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find

(&mut self, mut predicate: P) -> Option . where . Self: Sized, . P: FnMut(&Self::Item) -> bool, . { 507,774 ( 0.00%) while let Some(x) = self.next() { 1,154,062 ( 0.00%) if predicate(&x) { . return Some(x); . } . } . None 5,562 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find_map(&mut self, mut f: F) -> Option . where . Self: Sized, . F: FnMut(Self::Item) -> Option, . { 446,808 ( 0.00%) while let Some(x) = self.next() { 2,499,936 ( 0.00%) if let Some(y) = f(x) { 390,904 ( 0.00%) return Some(y); . } . } . None 82,536 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . #[rustc_inherit_overflow_checks] . fn position

(&mut self, mut predicate: P) -> Option where . Self: Sized, . P: FnMut(Self::Item) -> bool, . { . let n = len!(self); . let mut i = 0; . while let Some(x) = self.next() { 5,847,892 ( 0.01%) if predicate(x) { . // SAFETY: we are guaranteed to be in bounds by the loop invariant: . // when `i >= n`, `self.next()` returns `None` and the loop breaks. . unsafe { assume(i < n) }; . return Some(i); . } . i += 1; . } . None -- line 303 ---------------------------------------- -- line 308 ---------------------------------------- . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . fn rposition

(&mut self, mut predicate: P) -> Option where . P: FnMut(Self::Item) -> bool, . Self: Sized + ExactSizeIterator + DoubleEndedIterator . { . let n = len!(self); . let mut i = n; 585,955 ( 0.00%) while let Some(x) = self.next_back() { 8,220,103 ( 0.02%) i -= 1; 6,496,518 ( 0.01%) if predicate(x) { . // SAFETY: `i` must be lower than `n` since it starts at `n` . // and is only decreasing. . unsafe { assume(i < n) }; . return Some(i); . } . } . None . } -- line 326 ---------------------------------------- -- line 332 ---------------------------------------- . // the returned references is guaranteed to refer to an element . // of the slice and thus guaranteed to be valid. . // . // Also note that the caller also guarantees that we're never . // called with the same index again, and that no other methods . // that will access this subslice are called, so it is valid . // for the returned reference to be mutable in the case of . // `IterMut` 1,288,969 ( 0.00%) unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } . } . . $($extra)* . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T> DoubleEndedIterator for $name<'a, T> { . #[inline] -- line 348 ---------------------------------------- -- line 349 ---------------------------------------- . fn next_back(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, . // and slices over non-ZSTs must also have a non-null end pointer. . // The call to `next_back_unchecked!` is safe since we check if the iterator is . // empty first. . unsafe { 14,440 ( 0.00%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 14,474 ( 0.00%) assume(!self.end.is_null()); . } 16,571,186 ( 0.03%) if is_empty!(self) { . None . } else { . Some(next_back_unchecked!(self)) . } . } . } . . #[inline] -- line 369 ---------------------------------------- 134,191,170 ( 0.25%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs -------------------------------------------------------------------------------- Ir -- line 180 ---------------------------------------- . /// assert_eq!(v, &[0.into()]); . /// . /// // Ensure that the last item was dropped. . /// assert!(weak.upgrade().is_none()); . /// ``` . #[stable(feature = "drop_in_place", since = "1.8.0")] . #[lang = "drop_in_place"] . #[allow(unconditional_recursion)] 444,503,013 ( 0.84%) pub unsafe fn drop_in_place(to_drop: *mut T) { . // Code here does not matter - this is replaced by the . // real drop glue by the compiler. . . // SAFETY: see comment above . unsafe { drop_in_place(to_drop) } . } . . /// Creates a null raw pointer. -- line 196 ---------------------------------------- -- line 366 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . . // Perform the swap . // SAFETY: the caller must guarantee that `x` and `y` are . // valid for writes and properly aligned. `tmp` cannot be . // overlapping either `x` or `y` because `tmp` was just allocated . // on the stack as a separate allocated object. . unsafe { 6 ( 0.00%) copy_nonoverlapping(x, tmp.as_mut_ptr(), 1); 6 ( 0.00%) copy(y, x, 1); // `x` and `y` may overlap . copy_nonoverlapping(tmp.as_ptr(), y, 1); . } . } . . /// Swaps `count * size_of::()` bytes between the two regions of memory . /// beginning at `x` and `y`. The two regions must *not* overlap. . /// . /// # Safety -- line 383 ---------------------------------------- -- line 448 ---------------------------------------- . return; . } . } . . // Direct swapping, for the cases not going through the block optimization. . // SAFETY: the caller must guarantee that `x` and `y` are valid . // for writes, properly aligned, and non-overlapping. . unsafe { 489 ( 0.00%) let z = read(x); . copy_nonoverlapping(y, x, 1); . write(y, z); . } . } . . #[inline] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . const unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) { -- line 464 ---------------------------------------- -- line 693 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . // SAFETY: the caller must guarantee that `src` is valid for reads. . // `src` cannot overlap `tmp` because `tmp` was just allocated on . // the stack as a separate allocated object. . // . // Also, since we just wrote a valid value into `tmp`, it is guaranteed . // to be properly initialized. . unsafe { 22,211,409 ( 0.04%) copy_nonoverlapping(src, tmp.as_mut_ptr(), 1); 163,765,061 ( 0.31%) tmp.assume_init() . } . } . . /// Reads the value from `src` without moving it. This leaves the . /// memory in `src` unchanged. . /// . /// Unlike [`read`], `read_unaligned` works with unaligned pointers. . /// -- line 710 ---------------------------------------- -- line 884 ---------------------------------------- . #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] . fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); . } . . // SAFETY: the caller must guarantee that `dst` is valid for writes. . // `dst` cannot overlap `src` because the caller has mutable access . // to `dst` while `src` is owned by this function. . unsafe { 456,603,065 ( 0.86%) copy_nonoverlapping(&src as *const T, dst, 1); . intrinsics::forget(src); . } . } . . /// Overwrites a memory location with the given value without reading or . /// dropping the old value. . /// . /// Unlike [`write()`], the pointer may be unaligned. -- line 900 ---------------------------------------- -- line 1206 ---------------------------------------- . if stride == 1 { . // `stride == 1` case can be computed more simply through `-p (mod a)`, but doing so . // inhibits LLVM's ability to select instructions like `lea`. Instead we compute . // . // round_up_to_next_alignment(p, a) - p . // . // which distributes operations around the load-bearing, but pessimizing `and` sufficiently . // for LLVM to be able to utilize the various optimizations it knows about. 194,700 ( 0.00%) return wrapping_sub( 404,994 ( 0.00%) wrapping_add(p as usize, a_minus_one) & wrapping_sub(0, a), . p as usize, . ); . } . . let pmoda = p as usize & a_minus_one; . if pmoda == 0 { . // Already aligned. Yay! . return 0; -- line 1223 ---------------------------------------- -- line 1348 ---------------------------------------- . /// assert!(std::ptr::eq( . /// &wrapper as &dyn Trait as *const dyn Trait as *const u8, . /// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, . /// )); . /// ``` . #[stable(feature = "ptr_eq", since = "1.17.0")] . #[inline] . pub fn eq(a: *const T, b: *const T) -> bool { 20,873,230 ( 0.04%) a == b . } . . /// Hash a raw pointer. . /// . /// This can be used to hash a `&T` reference (which coerces to `*const T` implicitly) . /// by its address rather than the value it points to . /// (which is what the `Hash for &T` implementation does). . /// -- line 1364 ---------------------------------------- 78,620,691 ( 0.15%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 755,082 ( 0.00%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 813,262 ( 0.00%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 1,733,347 ( 0.00%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 262,471 ( 0.00%) i += 2 . } . 813,262 ( 0.00%) if i < count { 553,566 ( 0.00%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 12,213,441 ( 0.02%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 84,173,630 ( 0.16%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 26,708,909 ( 0.05%) self.nbuf = nbuf + size; . . return; . } . 4,608,221 ( 0.01%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 1,372,830 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 1,372,830 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 13,728,300 ( 0.03%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 10,982,640 ( 0.02%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 12,355,470 ( 0.02%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 6,544,946 ( 0.01%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 5,491,320 ( 0.01%) self.processed += BUFFER_SIZE; 2,745,660 ( 0.01%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 70,521 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 2,554,537 ( 0.00%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 925,004 ( 0.00%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 465,960 ( 0.00%) self.nbuf = nbuf + length; . . return; . } . 400,507 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 227,695 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 45,539 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 136,617 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 45,539 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 91,078 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 348,957 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 394,496 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 697,914 ( 0.00%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 91,078 ( 0.00%) let input_left = length - processed; 49,146 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 26,486 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 26,486 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 26,486 ( 0.00%) self.state.v0 ^= elem; 52,972 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 45,539 ( 0.00%) self.nbuf = extra_bytes_left; 227,695 ( 0.00%) self.processed += nbuf + processed; 273,234 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 328,776 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 438,368 ( 0.00%) let mut state = self.state; . . for i in 0..last { 350,734 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 350,734 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 350,734 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 219,184 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 86,128 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 301,206 ( 0.00%) let length = self.processed + self.nbuf; 210,970 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 105,485 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 105,485 ( 0.00%) state.v0 ^= b; . 105,485 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 343,998 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 133,026 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 133,026 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 48,766,655 ( 0.09%) compress!(state); 52,422,569 ( 0.10%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 687,992 ( 0.00%) compress!(state); 687,992 ( 0.00%) compress!(state); 687,992 ( 0.00%) compress!(state); 582,508 ( 0.00%) compress!(state); . } . } 9,565,362 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs -------------------------------------------------------------------------------- Ir -- line 57 ---------------------------------------- . /// # Examples . /// . /// Basic usage: . /// . /// ``` . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::from_str_radix(\"A\", 16), Ok(10));")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 4,624 ( 0.00%) pub fn from_str_radix(src: &str, radix: u32) -> Result { 2,312 ( 0.00%) from_str_radix(src, radix) 6,936 ( 0.00%) } . . /// Returns the number of ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// . /// ``` -- line 75 ---------------------------------------- -- line 80 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[doc(alias = "popcount")] . #[doc(alias = "popcnt")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn count_ones(self) -> u32 { 3,506,385 ( 0.01%) intrinsics::ctpop(self as $ActualT) as u32 . } . . /// Returns the number of zeros in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 96 ---------------------------------------- -- line 118 ---------------------------------------- . /// assert_eq!(n.leading_zeros(), 2); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn leading_zeros(self) -> u32 { 5,302,189 ( 0.01%) intrinsics::ctlz(self as $ActualT) as u32 . } . . /// Returns the number of trailing zeros in the binary representation . /// of `self`. . /// . /// # Examples . /// . /// Basic usage: -- line 134 ---------------------------------------- -- line 139 ---------------------------------------- . /// assert_eq!(n.trailing_zeros(), 3); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn trailing_zeros(self) -> u32 { 4,650,608 ( 0.01%) intrinsics::cttz(self) as u32 . } . . /// Returns the number of leading ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 155 ---------------------------------------- -- line 204 ---------------------------------------- . #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn rotate_left(self, n: u32) -> Self { 255,376,542 ( 0.48%) intrinsics::rotate_left(self, n as $SelfT) . } . . /// Shifts the bits to the right by a specified amount, `n`, . /// wrapping the truncated bits to the beginning of the resulting . /// integer. . /// . /// Please note this isn't the same operation as the `>>` shifting operator! . /// -- line 220 ---------------------------------------- -- line 430 ---------------------------------------- . #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MAX - 2).checked_add(3), None);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_checked_int_methods", since = "1.47.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . pub const fn checked_add(self, rhs: Self) -> Option { 7,262 ( 0.00%) let (a, b) = self.overflowing_add(rhs); . if unlikely!(b) {None} else {Some(a)} . } . . /// Unchecked integer addition. Computes `self + rhs`, assuming overflow . /// cannot occur. . /// . /// # Safety . /// -- line 446 ---------------------------------------- -- line 456 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_add`. 12,758,456 ( 0.02%) unsafe { intrinsics::unchecked_add(self, rhs) } . } . . /// Checked addition with a signed integer. Computes `self + rhs`, . /// returning `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 472 ---------------------------------------- -- line 525 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_sub`. 3,650,722 ( 0.01%) unsafe { intrinsics::unchecked_sub(self, rhs) } . } . . /// Checked integer multiplication. Computes `self * rhs`, returning . /// `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 541 ---------------------------------------- -- line 596 ---------------------------------------- . without modifying the original"] . #[inline] . pub const fn checked_div(self, rhs: Self) -> Option { . if unlikely!(rhs == 0) { . None . } else { . // SAFETY: div by zero has been checked above and unsigned types have no other . // failure modes for division 1,040 ( 0.00%) Some(unsafe { intrinsics::unchecked_div(self, rhs) }) . } . } . . /// Checked Euclidean division. Computes `self.div_euclid(rhs)`, returning `None` . /// if `rhs == 0`. . /// . /// # Examples . /// -- line 612 ---------------------------------------- -- line 1035 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_add(self, rhs: Self) -> Self { 4,994,963 ( 0.01%) intrinsics::saturating_add(self, rhs) . } . . /// Saturating addition with a signed integer. Computes `self + rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1051 ---------------------------------------- -- line 1084 ---------------------------------------- . #[doc = concat!("assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_sub(self, rhs: Self) -> Self { 147,047 ( 0.00%) intrinsics::saturating_sub(self, rhs) . } . . /// Saturating integer multiplication. Computes `self * rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1100 ---------------------------------------- -- line 1175 ---------------------------------------- . #[doc = concat!("assert_eq!(200", stringify!($SelfT), ".wrapping_add(", stringify!($SelfT), "::MAX), 199);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_add(self, rhs: Self) -> Self { 138,907,503 ( 0.26%) intrinsics::wrapping_add(self, rhs) . } . . /// Wrapping (modular) addition with a signed integer. Computes . /// `self + rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . #[doc = concat!("assert_eq!(100", stringify!($SelfT), ".wrapping_sub(", stringify!($SelfT), "::MAX), 101);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_sub(self, rhs: Self) -> Self { 38,852,319 ( 0.07%) intrinsics::wrapping_sub(self, rhs) . } . . /// Wrapping (modular) multiplication. Computes `self * . /// rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1233 ---------------------------------------- -- line 1240 ---------------------------------------- . /// assert_eq!(25u8.wrapping_mul(12), 44); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_mul(self, rhs: Self) -> Self { 105,097,572 ( 0.20%) intrinsics::wrapping_mul(self, rhs) . } . . /// Wrapping (modular) division. Computes `self / rhs`. . /// Wrapped division on unsigned types is just normal division. . /// There's no way wrapping could ever happen. . /// This function exists, so that all operations . /// are accounted for in the wrapping operations. . /// -- line 1256 ---------------------------------------- -- line 1397 ---------------------------------------- . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_shl(self, rhs: u32) -> Self { . // SAFETY: the masking by the bitsize of the type ensures that we do not shift . // out of bounds . unsafe { 8 ( 0.00%) intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT) . } . } . . /// Panic-free bitwise shift-right; yields `self >> mask(rhs)`, . /// where `mask` removes any high-order bits of `rhs` that . /// would cause the shift to exceed the bitwidth of the type. . /// . /// Note that this is *not* the same as a rotate-right; the -- line 1413 ---------------------------------------- -- line 1491 ---------------------------------------- . #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_add(2), (7, false));")] . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.overflowing_add(1), (0, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 672 ( 0.00%) pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { 13,375,269 ( 0.03%) let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 1,344 ( 0.00%) } . . /// Calculates `self + rhs + carry` without the ability to overflow. . /// . /// Performs "ternary addition" which takes in an extra bit to add, and may return an . /// additional bit of overflow. This allows for chaining together multiple additions . /// to create "big integers" which represent larger values. . /// . #[doc = concat!("This can be thought of as a ", stringify!($BITS), "-bit \"full adder\", in the electronics sense.")] -- line 1510 ---------------------------------------- -- line 1587 ---------------------------------------- . #[doc = concat!("assert_eq!(5", stringify!($SelfT), ".overflowing_sub(2), (3, false));")] . #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_sub(1), (", stringify!($SelfT), "::MAX, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 1 ( 0.00%) pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { 3,224,909 ( 0.01%) let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 2 ( 0.00%) } . . /// Calculates `self - rhs - borrow` without the ability to overflow. . /// . /// Performs "ternary subtraction" which takes in an extra bit to subtract, and may return . /// an additional bit of overflow. This allows for chaining together multiple subtractions . /// to create "big integers" which represent larger values. . /// . /// # Examples -- line 1606 ---------------------------------------- -- line 1673 ---------------------------------------- . /// assert_eq!(5u32.overflowing_mul(2), (10, false)); . /// assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true)); . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 4 ( 0.00%) pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { 28,410,017 ( 0.05%) let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 2 ( 0.00%) } . . /// Calculates the divisor when `self` is divided by `rhs`. . /// . /// Returns a tuple of the divisor along with a boolean indicating . /// whether an arithmetic overflow would occur. Note that for unsigned . /// integers overflow never occurs, so the second value is always . /// `false`. . /// -- line 1692 ---------------------------------------- -- line 2132 ---------------------------------------- . #[doc = concat!("assert!(16", stringify!($SelfT), ".is_power_of_two());")] . #[doc = concat!("assert!(!10", stringify!($SelfT), ".is_power_of_two());")] . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] . #[inline(always)] . pub const fn is_power_of_two(self) -> bool { 276 ( 0.00%) self.count_ones() == 1 . } . . // Returns one less than next power of two. . // (For 8u8 next power of two is 8u8 and for 6u8 it is 8u8) . // . // 8u8.one_less_than_next_power_of_two() == 7 . // 6u8.one_less_than_next_power_of_two() == 7 . // . // This method cannot overflow, as in the `next_power_of_two` . // overflow cases it instead ends up returning the maximum value . // of the type, and can return 0 for 0. . #[inline] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . const fn one_less_than_next_power_of_two(self) -> Self { 639,612 ( 0.00%) if self <= 1 { return 0; } . 652,542 ( 0.00%) let p = self - 1; . // SAFETY: Because `p > 0`, it cannot consist entirely of leading zeros. . // That means the shift is always in-bounds, and some processors . // (such as intel pre-haswell) have more efficient ctlz . // intrinsics when the argument is non-zero. 1,957,084 ( 0.00%) let z = unsafe { intrinsics::ctlz_nonzero(p) }; 652,692 ( 0.00%) <$SelfT>::MAX >> z . } . . /// Returns the smallest power of two greater than or equal to `self`. . /// . /// When return value overflows (i.e., `self > (1 << (N-1))` for type . /// `uN`), it panics in debug mode and the return value is wrapped to 0 in . /// release mode (the only situation in which method can return 0). . /// -- line 2171 ---------------------------------------- -- line 2179 ---------------------------------------- . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . #[rustc_inherit_overflow_checks] . pub const fn next_power_of_two(self) -> Self { 665,276 ( 0.00%) self.one_less_than_next_power_of_two() + 1 . } . . /// Returns the smallest power of two greater than or equal to `n`. If . /// the next power of two is greater than the type's maximum value, . /// `None` is returned, otherwise the power of two is wrapped in `Some`. . /// . /// # Examples . /// -- line 2195 ---------------------------------------- 1,920,771 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/client.rs -------------------------------------------------------------------------------- Ir -- line 18 ---------------------------------------- . // FIXME(eddyb) use a reference to the `static COUNTERS`, instead of . // a wrapper `fn` pointer, once `const fn` can reference `static`s. . extern "C" fn get() -> &'static Self { . static COUNTERS: HandleCounters = HandleCounters { . $($oty: AtomicUsize::new(1),)* . $($ity: AtomicUsize::new(1),)* . }; . &COUNTERS 1,320 ( 0.00%) } . } . . // FIXME(eddyb) generate the definition of `HandleStore` in `server.rs`. . #[repr(C)] . #[allow(non_snake_case)] . pub(super) struct HandleStore { . $($oty: handle::OwnedStore,)* . $($ity: handle::InternedStore,)* . } . . impl HandleStore { 3,960 ( 0.00%) pub(super) fn new(handle_counters: &'static HandleCounters) -> Self { 38,940 ( 0.00%) HandleStore { . $($oty: handle::OwnedStore::new(&handle_counters.$oty),)* . $($ity: handle::InternedStore::new(&handle_counters.$ity),)* . } 5,280 ( 0.00%) } . } . . $( . #[repr(C)] . pub(crate) struct $oty(handle::Handle); . impl !Send for $oty {} . impl !Sync for $oty {} . . // Forward `Drop::drop` to the inherent `drop` method. . impl Drop for $oty { 69,744 ( 0.00%) fn drop(&mut self) { 386,446 ( 0.00%) $oty(self.0).drop(); 104,616 ( 0.00%) } . } . . impl Encode for $oty { . fn encode(self, w: &mut Writer, s: &mut S) { . let handle = self.0; . mem::forget(self); . handle.encode(w, s); . } . } . . impl DecodeMut<'_, '_, HandleStore>> . for Marked . { 3,623,709 ( 0.01%) fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { . s.$oty.take(handle::Handle::decode(r, &mut ())) 4,831,612 ( 0.01%) } . } . . impl Encode for &$oty { . fn encode(self, w: &mut Writer, s: &mut S) { 646,623 ( 0.00%) self.0.encode(w, s); . } . } . . impl<'s, S: server::Types> Decode<'_, 's, HandleStore>> . for &'s Marked . { 215,541 ( 0.00%) fn decode(r: &mut Reader<'_>, s: &'s HandleStore>) -> Self { . &s.$oty[handle::Handle::decode(r, &mut ())] 431,082 ( 0.00%) } . } . . impl Encode for &mut $oty { . fn encode(self, w: &mut Writer, s: &mut S) { 2,840,634 ( 0.01%) self.0.encode(w, s); . } . } . . impl<'s, S: server::Types> DecodeMut<'_, 's, HandleStore>> . for &'s mut Marked . { 946,878 ( 0.00%) fn decode( . r: &mut Reader<'_>, . s: &'s mut HandleStore> . ) -> Self { . &mut s.$oty[handle::Handle::decode(r, &mut ())] 1,893,756 ( 0.00%) } . } . . impl Encode>> . for Marked . { 7,159,686 ( 0.01%) fn encode(self, w: &mut Writer, s: &mut HandleStore>) { 1,157,968 ( 0.00%) s.$oty.alloc(self).encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for $oty { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { . $oty(handle::Handle::decode(r, s)) . } . } -- line 118 ---------------------------------------- -- line 129 ---------------------------------------- . fn encode(self, w: &mut Writer, s: &mut S) { . self.0.encode(w, s); . } . } . . impl DecodeMut<'_, '_, HandleStore>> . for Marked . { 1,059,529 ( 0.00%) fn decode(r: &mut Reader<'_>, s: &mut HandleStore>) -> Self { . s.$ity.copy(handle::Handle::decode(r, &mut ())) 2,119,058 ( 0.00%) } . } . . impl Encode>> . for Marked . { . fn encode(self, w: &mut Writer, s: &mut HandleStore>) { 8,987,434 ( 0.02%) s.$ity.alloc(self).encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for $ity { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { . $ity(handle::Handle::decode(r, s)) . } . } -- line 154 ---------------------------------------- -- line 175 ---------------------------------------- . . // FIXME(eddyb) generate these impls by pattern-matching on the . // names of methods - also could use the presence of `fn drop` . // to distinguish between 'owned and 'interned, above. . // Alternatively, special 'modes" could be listed of types in with_api . // instead of pattern matching on methods, here and in server decl. . . impl Clone for TokenStream { 103,443 ( 0.00%) fn clone(&self) -> Self { . self.clone() 103,443 ( 0.00%) } . } . . impl Clone for TokenStreamIter { . fn clone(&self) -> Self { . self.clone() . } . } . . impl Clone for Group { 68,649 ( 0.00%) fn clone(&self) -> Self { . self.clone() 68,649 ( 0.00%) } . } . . impl Clone for Literal { 13,944 ( 0.00%) fn clone(&self) -> Self { . self.clone() 13,944 ( 0.00%) } . } . . impl fmt::Debug for Literal { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . f.debug_struct("Literal") . // format the kind without quotes, as in `kind: Float` . .field("kind", &format_args!("{}", &self.debug_kind())) . .field("symbol", &self.symbol()) -- line 211 ---------------------------------------- -- line 219 ---------------------------------------- . impl Clone for SourceFile { . fn clone(&self) -> Self { . self.clone() . } . } . . impl fmt::Debug for Span { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 142,125 ( 0.00%) f.write_str(&self.debug()) . } . } . . macro_rules! define_client_side { . ($($name:ident { . $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)*;)* . }),* $(,)?) => { . $(impl $name { 5,012,256 ( 0.01%) $(pub(crate) fn $method($($arg: $arg_ty),*) $(-> $ret_ty)* { 2,568,332 ( 0.00%) Bridge::with(|bridge| { . let mut b = bridge.cached_buffer.take(); . . b.clear(); 13,587,273 ( 0.03%) api_tags::Method::$name(api_tags::$name::$method).encode(&mut b, &mut ()); 3,890,230 ( 0.01%) reverse_encode!(b; $($arg),*); . 27,174,546 ( 0.05%) b = bridge.dispatch.call(b); . 19,066,680 ( 0.04%) let r = Result::<_, PanicMessage>::decode(&mut &b[..], &mut ()); . 54,349,092 ( 0.10%) bridge.cached_buffer = b; . 6,786,057 ( 0.01%) r.unwrap_or_else(|e| panic::resume_unwind(e.into())) . }) 4,176,880 ( 0.01%) })* . })* . } . } . with_api!(self, self, define_client_side); . . enum BridgeState<'a> { . /// No server is currently connected to this client. . NotConnected, -- line 260 ---------------------------------------- -- line 281 ---------------------------------------- . impl BridgeState<'_> { . /// Take exclusive control of the thread-local . /// `BridgeState`, and pass it to `f`, mutably. . /// The state will be restored after `f` exits, even . /// by panic, including modifications made to it by `f`. . /// . /// N.B., while `f` is running, the thread-local state . /// is `BridgeState::InUse`. 2,225,125 ( 0.00%) fn with(f: impl FnOnce(&mut BridgeState<'_>) -> R) -> R { 2,264,794 ( 0.00%) BRIDGE_STATE.with(|state| { 24,747,341 ( 0.05%) state.replace(BridgeState::InUse, |mut state| { . // FIXME(#52812) pass `f` directly to `replace` when `RefMutL` is gone . f(&mut *state) . }) . }) 1,908,423 ( 0.00%) } . } . . impl Bridge<'_> { . pub(crate) fn is_available() -> bool { 3 ( 0.00%) BridgeState::with(|state| match state { . BridgeState::Connected(_) | BridgeState::InUse => true, . BridgeState::NotConnected => false, . }) . } . . fn enter(self, f: impl FnOnce() -> R) -> R { . let force_show_panics = self.force_show_panics; . // Hide the default panic output within `proc_macro` expansions. -- line 309 ---------------------------------------- -- line 320 ---------------------------------------- . } . }); . }); . . BRIDGE_STATE.with(|state| state.set(BridgeState::Connected(self), f)) . } . . fn with(f: impl FnOnce(&mut Bridge<'_>) -> R) -> R { 32,020,339 ( 0.06%) BridgeState::with(|state| match state { . BridgeState::NotConnected => { . panic!("procedural macro API is used outside of a procedural macro"); . } . BridgeState::InUse => { . panic!("procedural macro API is used while it's already in use"); . } . BridgeState::Connected(bridge) => f(bridge), . }) -- line 336 ---------------------------------------- -- line 396 ---------------------------------------- . b.clear(); . Err::<(), _>(e).encode(&mut b, &mut ()); . }); . b . } . . impl Client crate::TokenStream> { . #[rustc_allow_const_fn_unstable(const_fn)] 1 ( 0.00%) pub const fn expand1(f: fn(crate::TokenStream) -> crate::TokenStream) -> Self { . extern "C" fn run( . bridge: Bridge<'_>, . f: impl FnOnce(crate::TokenStream) -> crate::TokenStream, . ) -> Buffer { . run_client(bridge, |input| f(crate::TokenStream(input)).0) . } 5 ( 0.00%) Client { get_handle_counters: HandleCounters::get, run, f } 1 ( 0.00%) } . } . . impl Client crate::TokenStream> { . #[rustc_allow_const_fn_unstable(const_fn)] . pub const fn expand2( . f: fn(crate::TokenStream, crate::TokenStream) -> crate::TokenStream, . ) -> Self { . extern "C" fn run( -- line 420 ---------------------------------------- 11,724,137 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs -------------------------------------------------------------------------------- Ir -- line 21 ---------------------------------------- . use rustc_target::spec::abi; . use std::borrow::Cow; . use std::cmp::Ordering; . use std::marker::PhantomData; . use std::ops::Range; . use ty::util::IntTypeExt; . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 8 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub struct TypeAndMut<'tcx> { 29,184 ( 0.00%) pub ty: Ty<'tcx>, 145,920 ( 0.00%) pub mutbl: hir::Mutability, . } . . #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . /// A "free" region `fr` can be interpreted as "some region . /// at least as big as the scope `fr.scope`". . pub struct FreeRegion { 248,402 ( 0.00%) pub scope: DefId, 520,747 ( 0.00%) pub bound_region: BoundRegionKind, . } . 3,716,468 ( 0.01%) #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] 94 ( 0.00%) #[derive(HashStable)] . pub enum BoundRegionKind { . /// An anonymous region parameter for a given fn (&T) 66,141 ( 0.00%) BrAnon(u32), . . /// Named region parameters for functions (a in &'a T) . /// . /// The `DefId` is needed to distinguish free regions in . /// the event of shadowing. 23,305 ( 0.00%) BrNamed(DefId, Symbol), . . /// Anonymous region for the implicit env pointer parameter . /// to a closure . BrEnv, . } . 7,800 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . #[derive(HashStable)] . pub struct BoundRegion { 971,848 ( 0.00%) pub var: BoundVar, 860,588 ( 0.00%) pub kind: BoundRegionKind, . } . . impl BoundRegionKind { . pub fn is_named(&self) -> bool { . match *self { . BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime, . _ => false, . } . } . } . . /// Defines the kinds of types. . /// . /// N.B., if you change this, you'll probably want to change the corresponding . /// AST structure in `rustc_ast/src/ast.rs` as well. 116,501,322 ( 0.22%) #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable, Debug)] 258,477 ( 0.00%) #[derive(HashStable)] . #[rustc_diagnostic_item = "TyKind"] . pub enum TyKind<'tcx> { . /// The primitive boolean type. Written as `bool`. . Bool, . . /// The primitive character type; holds a Unicode scalar value . /// (a non-surrogate code point). Written as `char`. . Char, -- line 90 ---------------------------------------- -- line 99 ---------------------------------------- . Float(ty::FloatTy), . . /// Algebraic data types (ADT). For example: structures, enumerations and unions. . /// . /// InternalSubsts here, possibly against intuition, *may* contain `Param`s. . /// That is, even after substitution it is possible that there are type . /// variables. This happens when the `Adt` corresponds to an ADT . /// definition and not a concrete use of it. 17,690,393 ( 0.03%) Adt(&'tcx AdtDef, SubstsRef<'tcx>), . . /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. . Foreign(DefId), . . /// The pointee of a string slice. Written as `str`. . Str, . . /// An array with the given length. Written as `[T; n]`. 52,511 ( 0.00%) Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), . . /// The pointee of an array slice. Written as `[T]`. . Slice(Ty<'tcx>), . . /// A raw pointer. Written as `*mut T` or `*const T` . RawPtr(TypeAndMut<'tcx>), . . /// A reference; a pointer with an associated lifetime. Written as . /// `&'a mut T` or `&'a T`. 9,354,390 ( 0.02%) Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), . . /// The anonymous type of a function declaration/definition. Each . /// function has a unique type, which is output (for a function . /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. . /// . /// For example the type of `bar` here: . /// . /// ```rust -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . /// . /// ```rust . /// fn foo() -> i32 { 1 } . /// let bar: fn() -> i32 = foo; . /// ``` . FnPtr(PolyFnSig<'tcx>), . . /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. 314,167 ( 0.00%) Dynamic(&'tcx List>>, ty::Region<'tcx>), . . /// The anonymous type of a closure. Used to represent the type of . /// `|a| a`. . Closure(DefId, SubstsRef<'tcx>), . . /// The anonymous type of a generator. Used to represent the type of . /// `|a| yield a`. . Generator(DefId, SubstsRef<'tcx>, hir::Movability), -- line 159 ---------------------------------------- -- line 180 ---------------------------------------- . /// The substitutions are for the generics of the function in question. . /// After typeck, the concrete type can be found in the `types` map. . Opaque(DefId, SubstsRef<'tcx>), . . /// A type parameter; for example, `T` in `fn f(x: T) {}`. . Param(ParamTy), . . /// Bound type variable, used only when preparing a trait query. 69,862 ( 0.00%) Bound(ty::DebruijnIndex, BoundTy), . . /// A placeholder type - universally quantified higher-ranked type. . Placeholder(ty::PlaceholderType), . . /// A type variable used during type checking. . Infer(InferTy), . . /// A placeholder for a type which could not be computed; this is -- line 196 ---------------------------------------- -- line 330 ---------------------------------------- . pub closure_kind_ty: T, . pub closure_sig_as_fn_ptr_ty: T, . pub tupled_upvars_ty: T, . } . . impl<'tcx> ClosureSubsts<'tcx> { . /// Construct `ClosureSubsts` from `ClosureSubstsParts`, containing `Substs` . /// for the closure parent, alongside additional closure-specific components. 29 ( 0.00%) pub fn new( . tcx: TyCtxt<'tcx>, . parts: ClosureSubstsParts<'tcx, Ty<'tcx>>, . ) -> ClosureSubsts<'tcx> { . ClosureSubsts { . substs: tcx.mk_substs( 58 ( 0.00%) parts.parent_substs.iter().copied().chain( 116 ( 0.00%) [parts.closure_kind_ty, parts.closure_sig_as_fn_ptr_ty, parts.tupled_upvars_ty] . .iter() . .map(|&ty| ty.into()), . ), . ), . } 58 ( 0.00%) } . . /// Divides the closure substs into their respective components. . /// The ordering assumed here must match that used by `ClosureSubsts::new` above. 362 ( 0.00%) fn split(self) -> ClosureSubstsParts<'tcx, GenericArg<'tcx>> { . match self.substs[..] { 4,696 ( 0.00%) [ 1,330 ( 0.00%) ref parent_substs @ .., 1,088 ( 0.00%) closure_kind_ty, 736 ( 0.00%) closure_sig_as_fn_ptr_ty, 886 ( 0.00%) tupled_upvars_ty, 1,448 ( 0.00%) ] => ClosureSubstsParts { . parent_substs, . closure_kind_ty, . closure_sig_as_fn_ptr_ty, . tupled_upvars_ty, . }, . _ => bug!("closure substs missing synthetics"), . } 1,086 ( 0.00%) } . . /// Returns `true` only if enough of the synthetic types are known to . /// allow using all of the methods on `ClosureSubsts` without panicking. . /// . /// Used primarily by `ty::print::pretty` to be able to handle closure . /// types that haven't had their synthetic types substituted in. . pub fn is_valid(self) -> bool { . self.substs.len() >= 3 -- line 378 ---------------------------------------- -- line 383 ---------------------------------------- . pub fn parent_substs(self) -> &'tcx [GenericArg<'tcx>] { . self.split().parent_substs . } . . /// Returns an iterator over the list of types of captured paths by the closure. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] 6 ( 0.00%) pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 243 ( 0.00%) match self.tupled_upvars_ty().kind() { . TyKind::Error(_) => None, 74 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() 4 ( 0.00%) } . . /// Returns the tuple type representing the upvars for this closure. . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 1,676 ( 0.00%) self.split().tupled_upvars_ty.expect_ty() . } . . /// Returns the closure kind for this closure; may return a type . /// variable during inference. To get the closure kind during . /// inference, use `infcx.closure_kind(substs)`. 175 ( 0.00%) pub fn kind_ty(self) -> Ty<'tcx> { . self.split().closure_kind_ty.expect_ty() 350 ( 0.00%) } . . /// Returns the `fn` pointer type representing the closure signature for this . /// closure. . // FIXME(eddyb) this should be unnecessary, as the shallowly resolved . // type is known at the time of the creation of `ClosureSubsts`, . // see `rustc_typeck::check::closure`. . pub fn sig_as_fn_ptr_ty(self) -> Ty<'tcx> { . self.split().closure_sig_as_fn_ptr_ty.expect_ty() . } . . /// Returns the closure kind for this closure; only usable outside . /// of an inference context, because in that context we know that . /// there are no type variables. . /// . /// If you have an inference context, use `infcx.closure_kind()`. 1 ( 0.00%) pub fn kind(self) -> ty::ClosureKind { 5 ( 0.00%) self.kind_ty().to_opt_closure_kind().unwrap() 2 ( 0.00%) } . . /// Extracts the signature from the closure. 252 ( 0.00%) pub fn sig(self) -> ty::PolyFnSig<'tcx> { . let ty = self.sig_as_fn_ptr_ty(); 504 ( 0.00%) match ty.kind() { 1,008 ( 0.00%) ty::FnPtr(sig) => *sig, . _ => bug!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {:?}", ty.kind()), . } 756 ( 0.00%) } . } . . /// Similar to `ClosureSubsts`; see the above documentation for more. . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GeneratorSubsts<'tcx> { . pub substs: SubstsRef<'tcx>, . } . -- line 448 ---------------------------------------- -- line 676 ---------------------------------------- . } . . impl<'tcx> UpvarSubsts<'tcx> { . /// Returns an iterator over the list of types of captured paths by the closure/generator. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] . pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 60 ( 0.00%) let tupled_tys = match self { 89 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . }; . 148 ( 0.00%) match tupled_tys.kind() { . TyKind::Error(_) => None, 119 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() . } . . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 118 ( 0.00%) match self { 119 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . } . } . } . . /// An inline const is modeled like . /// . /// const InlineConst<'l0...'li, T0...Tj, R>: R; -- line 710 ---------------------------------------- -- line 760 ---------------------------------------- . } . . /// Returns the type of this inline const. . pub fn ty(self) -> Ty<'tcx> { . self.split().ty.expect_ty() . } . } . 221,954 ( 0.00%) #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, TyEncodable, TyDecodable)] 255,667 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub enum ExistentialPredicate<'tcx> { . /// E.g., `Iterator`. . Trait(ExistentialTraitRef<'tcx>), . /// E.g., `Iterator::Item = T`. . Projection(ExistentialProjection<'tcx>), . /// E.g., `Send`. 18 ( 0.00%) AutoTrait(DefId), . } . . impl<'tcx> ExistentialPredicate<'tcx> { . /// Compares via an ordering that will not change if modules are reordered or other changes are . /// made to the tree. In particular, this ordering is preserved across incremental compilations. 70 ( 0.00%) pub fn stable_cmp(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Ordering { . use self::ExistentialPredicate::*; 80 ( 0.00%) match (*self, *other) { . (Trait(_), Trait(_)) => Ordering::Equal, . (Projection(ref a), Projection(ref b)) => { . tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id)) . } . (AutoTrait(ref a), AutoTrait(ref b)) => { . tcx.def_path_hash(*a).cmp(&tcx.def_path_hash(*b)) . } . (Trait(_), _) => Ordering::Less, . (Projection(_), Trait(_)) => Ordering::Greater, . (Projection(_), _) => Ordering::Less, . (AutoTrait(_), _) => Ordering::Greater, . } 84 ( 0.00%) } . } . . impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> { 27,675 ( 0.00%) pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> { . use crate::ty::ToPredicate; 15,375 ( 0.00%) match self.skip_binder() { . ExistentialPredicate::Trait(tr) => { . self.rebind(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx) . } . ExistentialPredicate::Projection(p) => { . self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) . } . ExistentialPredicate::AutoTrait(did) => { . let trait_ref = self.rebind(ty::TraitRef { . def_id: did, . substs: tcx.mk_substs_trait(self_ty, &[]), . }); . trait_ref.without_const().to_predicate(tcx) . } . } 24,600 ( 0.00%) } . } . . impl<'tcx> List>> { . /// Returns the "principal `DefId`" of this set of existential predicates. . /// . /// A Rust trait object type consists (in addition to a lifetime bound) . /// of a set of trait bounds, which are separated into any number . /// of auto-trait bounds, and at most one non-auto-trait bound. The -- line 826 ---------------------------------------- -- line 839 ---------------------------------------- . /// are the set `{Sync}`. . /// . /// It is also possible to have a "trivial" trait object that . /// consists only of auto traits, with no principal - for example, . /// `dyn Send + Sync`. In that case, the set of auto-trait bounds . /// is `{Send, Sync}`, while there is no principal. These trait objects . /// have a "trivial" vtable consisting of just the size, alignment, . /// and destructor. 2,153 ( 0.00%) pub fn principal(&self) -> Option>> { 17,847 ( 0.00%) self[0] 11,388 ( 0.00%) .map_bound(|this| match this { . ExistentialPredicate::Trait(tr) => Some(tr), . _ => None, 8,612 ( 0.00%) }) . .transpose() 6,459 ( 0.00%) } . 3,075 ( 0.00%) pub fn principal_def_id(&self) -> Option { . self.principal().map(|trait_ref| trait_ref.skip_binder().def_id) 6,150 ( 0.00%) } . . #[inline] . pub fn projection_bounds<'a>( . &'a self, . ) -> impl Iterator>> + 'a { . self.iter().filter_map(|predicate| { . predicate . .map_bound(|pred| match pred { -- line 866 ---------------------------------------- -- line 886 ---------------------------------------- . /// T: Foo . /// . /// This would be represented by a trait-reference where the `DefId` is the . /// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, . /// and `U` as parameter 1. . /// . /// Trait references also appear in object types like `Foo`, but in . /// that case the `Self` parameter is absent from the substitutions. 44,167 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 18,757,200 ( 0.04%) #[derive(HashStable, TypeFoldable)] . pub struct TraitRef<'tcx> { 6,964 ( 0.00%) pub def_id: DefId, 11,566,340 ( 0.02%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> TraitRef<'tcx> { 20,501 ( 0.00%) pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> { . TraitRef { def_id, substs } 82,004 ( 0.00%) } . . /// Returns a `TraitRef` of the form `P0: Foo` where `Pi` . /// are the parameters defined on trait. 657 ( 0.00%) pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> Binder<'tcx, TraitRef<'tcx>> { 292 ( 0.00%) ty::Binder::dummy(TraitRef { . def_id, 438 ( 0.00%) substs: InternalSubsts::identity_for_item(tcx, def_id), . }) 584 ( 0.00%) } . . #[inline] . pub fn self_ty(&self) -> Ty<'tcx> { . self.substs.type_at(0) . } . 23,592 ( 0.00%) pub fn from_method( . tcx: TyCtxt<'tcx>, . trait_id: DefId, . substs: SubstsRef<'tcx>, . ) -> ty::TraitRef<'tcx> { . let defs = tcx.generics_of(trait_id); . 2,949 ( 0.00%) ty::TraitRef { def_id: trait_id, substs: tcx.intern_substs(&substs[..defs.params.len()]) } 32,439 ( 0.00%) } . } . . pub type PolyTraitRef<'tcx> = Binder<'tcx, TraitRef<'tcx>>; . . impl<'tcx> PolyTraitRef<'tcx> { . pub fn self_ty(&self) -> Binder<'tcx, Ty<'tcx>> { . self.map_bound_ref(|tr| tr.self_ty()) . } . . pub fn def_id(&self) -> DefId { 221,613 ( 0.00%) self.skip_binder().def_id 73,871 ( 0.00%) } . 35,280 ( 0.00%) pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { 70,560 ( 0.00%) self.map_bound(|trait_ref| ty::TraitPredicate { . trait_ref, . constness: ty::BoundConstness::NotConst, . polarity: ty::ImplPolarity::Positive, . }) 35,280 ( 0.00%) } . } . . /// An existential reference to a trait, where `Self` is erased. . /// For example, the trait object `Trait<'a, 'b, X, Y>` is: . /// . /// exists T. T: Trait<'a, 'b, X, Y> . /// . /// The substitutions don't include the erased `Self`, only trait . /// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above). 100 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 126,120 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ExistentialTraitRef<'tcx> { 16,408 ( 0.00%) pub def_id: DefId, 147,724 ( 0.00%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> ExistentialTraitRef<'tcx> { 1,888 ( 0.00%) pub fn erase_self_ty( . tcx: TyCtxt<'tcx>, . trait_ref: ty::TraitRef<'tcx>, . ) -> ty::ExistentialTraitRef<'tcx> { . // Assert there is a Self. . trait_ref.substs.type_at(0); . . ty::ExistentialTraitRef { . def_id: trait_ref.def_id, . substs: tcx.intern_substs(&trait_ref.substs[1..]), . } 1,888 ( 0.00%) } . . /// Object types don't have a self type specified. Therefore, when . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. . pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> { . // otherwise the escaping vars would be captured by the binder . // debug_assert!(!self_ty.has_escaping_bound_vars()); -- line 985 ---------------------------------------- -- line 994 ---------------------------------------- . pub fn def_id(&self) -> DefId { . self.skip_binder().def_id . } . . /// Object types don't have a self type specified. Therefore, when . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. 3,231 ( 0.00%) pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> { 5,107 ( 0.00%) self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty)) 4,308 ( 0.00%) } . } . 271,696 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 190 ( 0.00%) #[derive(HashStable)] . pub enum BoundVariableKind { . Ty(BoundTyKind), . Region(BoundRegionKind), . Const, . } . . /// Binder is a binder for higher-ranked lifetimes or types. It is part of the . /// compiler's representation for things like `for<'a> Fn(&'a isize)` . /// (which would be represented by the type `PolyTraitRef == . /// Binder<'tcx, TraitRef>`). Note that when we instantiate, . /// erase, or otherwise "discharge" these bound vars, we change the . /// type from `Binder<'tcx, T>` to just `T` (see . /// e.g., `liberate_late_bound_regions`). . /// . /// `Decodable` and `Encodable` are implemented for `Binder` using the `impl_binder_encode_decode!` macro. 327,267 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 4,183,291 ( 0.01%) pub struct Binder<'tcx, T>(T, &'tcx List); . . impl<'tcx, T> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . { . /// Wraps `value` in a binder, asserting that `value` does not . /// contain any bound vars that would be bound by the . /// binder. This is commonly used to 'inject' a value T into a . /// different binding level. 250,140 ( 0.00%) pub fn dummy(value: T) -> Binder<'tcx, T> { 1,438,139 ( 0.00%) assert!(!value.has_escaping_bound_vars()); 3,290,129 ( 0.01%) Binder(value, ty::List::empty()) 248,457 ( 0.00%) } . . pub fn bind_with_vars(value: T, vars: &'tcx List) -> Binder<'tcx, T> { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(vars); . value.visit_with(&mut validator); . } 230,258 ( 0.00%) Binder(value, vars) . } . } . . impl<'tcx, T> Binder<'tcx, T> { . /// Skips the binder and returns the "bound" value. This is a . /// risky thing to do because it's easy to get confused about . /// De Bruijn indices and the like. It is usually better to . /// discharge the binder using `no_bound_vars` or -- line 1053 ---------------------------------------- -- line 1059 ---------------------------------------- . /// accounting. . /// . /// Some examples where `skip_binder` is reasonable: . /// . /// - extracting the `DefId` from a PolyTraitRef; . /// - comparing the self type of a PolyTraitRef to see if it is equal to . /// a type parameter `X`, since the type `X` does not reference any regions . pub fn skip_binder(self) -> T { 13,364,631 ( 0.03%) self.0 . } . . pub fn bound_vars(&self) -> &'tcx List { 38,859 ( 0.00%) self.1 . } . . pub fn as_ref(&self) -> Binder<'tcx, &T> { 82,274 ( 0.00%) Binder(&self.0, self.1) . } . . pub fn map_bound_ref_unchecked(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . let value = f(&self.0); . Binder(value, self.1) -- line 1083 ---------------------------------------- -- line 1085 ---------------------------------------- . . pub fn map_bound_ref>(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . self.as_ref().map_bound(f) . } . 232 ( 0.00%) pub fn map_bound>(self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(T) -> U, . { 306,557 ( 0.00%) let value = f(self.0); . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 3,135,986 ( 0.01%) Binder(value, self.1) 232 ( 0.00%) } . . pub fn try_map_bound, E>(self, f: F) -> Result, E> . where . F: FnOnce(T) -> Result, . { . let value = f(self.0)?; . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 91,775 ( 0.00%) Ok(Binder(value, self.1)) . } . . /// Wraps a `value` in a binder, using the same bound variables as the . /// current `Binder`. This should not be used if the new value *changes* . /// the bound variables. Note: the (old or new) value itself does not . /// necessarily need to *name* all the bound variables. . /// . /// This currently doesn't do anything different than `bind`, because we -- line 1122 ---------------------------------------- -- line 1126 ---------------------------------------- . pub fn rebind(&self, value: U) -> Binder<'tcx, U> . where . U: TypeFoldable<'tcx>, . { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.bound_vars()); . value.visit_with(&mut validator); . } 217,142 ( 0.00%) Binder(value, self.1) . } . . /// Unwraps and returns the value within, but only if it contains . /// no bound vars at all. (In other words, if this binder -- . /// and indeed any enclosing binder -- doesn't bind anything at . /// all.) Otherwise, returns `None`. . /// . /// (One could imagine having a method that just unwraps a single -- line 1142 ---------------------------------------- -- line 1143 ---------------------------------------- . /// binder, but permits late-bound vars bound by enclosing . /// binders, but that would require adjusting the debruijn . /// indices, and given the shallow binding structure we often use, . /// would not be that useful.) . pub fn no_bound_vars(self) -> Option . where . T: TypeFoldable<'tcx>, . { 2,255,822 ( 0.00%) if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } 495 ( 0.00%) } . . /// Splits the contents into two things that share the same binder . /// level as the original, returning two distinct binders. . /// . /// `f` should consider bound regions at depth 1 to be free, and . /// anything it produces with bound regions at depth 1 will be . /// bound in the resulting return values. . pub fn split(self, f: F) -> (Binder<'tcx, U>, Binder<'tcx, V>) -- line 1160 ---------------------------------------- -- line 1170 ---------------------------------------- . pub fn transpose(self) -> Option> { . let bound_vars = self.1; . self.0.map(|v| Binder(v, bound_vars)) . } . } . . /// Represents the projection of an associated type. In explicit UFCS . /// form this would be written `>::N`. 20,737 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 1,797,037 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ProjectionTy<'tcx> { . /// The parameters of the associated item. 1,224,633 ( 0.00%) pub substs: SubstsRef<'tcx>, . . /// The `DefId` of the `TraitItem` for the associated type `N`. . /// . /// Note that this is not the `DefId` of the `TraitRef` containing this . /// associated type, which is in `tcx.associated_item(item_def_id).container`. 2,031,570 ( 0.00%) pub item_def_id: DefId, . } . . impl<'tcx> ProjectionTy<'tcx> { 233,898 ( 0.00%) pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId { 66,828 ( 0.00%) tcx.associated_item(self.item_def_id).container.id() 267,312 ( 0.00%) } . . /// Extracts the underlying trait reference and own substs from this projection. . /// For example, if this is a projection of `::Item<'a>`, . /// then this function would return a `T: Iterator` trait reference and `['a]` as the own substs 657,720 ( 0.00%) pub fn trait_ref_and_own_substs( . &self, . tcx: TyCtxt<'tcx>, . ) -> (ty::TraitRef<'tcx>, &'tcx [ty::GenericArg<'tcx>]) { 281,880 ( 0.00%) let def_id = tcx.associated_item(self.item_def_id).container.id(); . let trait_generics = tcx.generics_of(def_id); 469,800 ( 0.00%) ( . ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, trait_generics) }, . &self.substs[trait_generics.count()..], . ) 845,640 ( 0.00%) } . . /// Extracts the underlying trait reference from this projection. . /// For example, if this is a projection of `::Item`, . /// then this function would return a `T: Iterator` trait reference. . /// . /// WARNING: This will drop the substs for generic associated types . /// consider calling [Self::trait_ref_and_own_substs] to get those . /// as well. 300,465 ( 0.00%) pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { 33,385 ( 0.00%) let def_id = self.trait_def_id(tcx); 33,385 ( 0.00%) ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, tcx.generics_of(def_id)) } 367,235 ( 0.00%) } . 67,458 ( 0.00%) pub fn self_ty(&self) -> Ty<'tcx> { 202,374 ( 0.00%) self.substs.type_at(0) 134,916 ( 0.00%) } . } . . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GenSig<'tcx> { . pub resume_ty: Ty<'tcx>, . pub yield_ty: Ty<'tcx>, . pub return_ty: Ty<'tcx>, . } -- line 1233 ---------------------------------------- -- line 1235 ---------------------------------------- . pub type PolyGenSig<'tcx> = Binder<'tcx, GenSig<'tcx>>; . . /// Signature of a function type, which we have arbitrarily . /// decided to use to refer to the input/output types. . /// . /// - `inputs`: is the list of arguments and their modes. . /// - `output`: is the return type. . /// - `c_variadic`: indicates whether this is a C-variadic function. 518,713 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 281,286 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct FnSig<'tcx> { 10,967 ( 0.00%) pub inputs_and_output: &'tcx List>, 161,377 ( 0.00%) pub c_variadic: bool, 29,464 ( 0.00%) pub unsafety: hir::Unsafety, 175,982 ( 0.00%) pub abi: abi::Abi, . } . . impl<'tcx> FnSig<'tcx> { 248,631 ( 0.00%) pub fn inputs(&self) -> &'tcx [Ty<'tcx>] { 1,039,881 ( 0.00%) &self.inputs_and_output[..self.inputs_and_output.len() - 1] 497,262 ( 0.00%) } . 158,902 ( 0.00%) pub fn output(&self) -> Ty<'tcx> { 1,167,543 ( 0.00%) self.inputs_and_output[self.inputs_and_output.len() - 1] 317,804 ( 0.00%) } . . // Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible . // method. . fn fake() -> FnSig<'tcx> { . FnSig { . inputs_and_output: List::empty(), . c_variadic: false, . unsafety: hir::Unsafety::Normal, -- line 1267 ---------------------------------------- -- line 1270 ---------------------------------------- . } . } . . pub type PolyFnSig<'tcx> = Binder<'tcx, FnSig<'tcx>>; . . impl<'tcx> PolyFnSig<'tcx> { . #[inline] . pub fn inputs(&self) -> Binder<'tcx, &'tcx [Ty<'tcx>]> { 85,721 ( 0.00%) self.map_bound_ref_unchecked(|fn_sig| fn_sig.inputs()) . } . #[inline] . pub fn input(&self, index: usize) -> ty::Binder<'tcx, Ty<'tcx>> { 221,464 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.inputs()[index]) . } . pub fn inputs_and_output(&self) -> ty::Binder<'tcx, &'tcx List>> { . self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output) 5,728 ( 0.00%) } . #[inline] . pub fn output(&self) -> ty::Binder<'tcx, Ty<'tcx>> { 166,918 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.output()) . } . pub fn c_variadic(&self) -> bool { 11,456 ( 0.00%) self.skip_binder().c_variadic 5,728 ( 0.00%) } . pub fn unsafety(&self) -> hir::Unsafety { 29,142 ( 0.00%) self.skip_binder().unsafety 14,571 ( 0.00%) } . pub fn abi(&self) -> abi::Abi { 221,416 ( 0.00%) self.skip_binder().abi 55,354 ( 0.00%) } . } . . pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<'tcx, FnSig<'tcx>>>; . 100 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 534 ( 0.00%) #[derive(HashStable)] . pub struct ParamTy { 32 ( 0.00%) pub index: u32, . pub name: Symbol, . } . . impl<'tcx> ParamTy { 6,680 ( 0.00%) pub fn new(index: u32, name: Symbol) -> ParamTy { . ParamTy { index, name } 3,340 ( 0.00%) } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamTy { 54 ( 0.00%) ParamTy::new(def.index, def.name) 27 ( 0.00%) } . . #[inline] . pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { . tcx.mk_ty_param(self.index, self.name) . } . } . . #[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)] 14 ( 0.00%) #[derive(HashStable)] . pub struct ParamConst { 142 ( 0.00%) pub index: u32, 142 ( 0.00%) pub name: Symbol, . } . . impl ParamConst { . pub fn new(index: u32, name: Symbol) -> ParamConst { . ParamConst { index, name } . } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamConst { -- line 1338 ---------------------------------------- -- line 1440 ---------------------------------------- . /// the inference variable is supposed to satisfy the relation . /// *for every value of the placeholder region*. To ensure that doesn't . /// happen, you can use `leak_check`. This is more clearly explained . /// by the [rustc dev guide]. . /// . /// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ . /// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ . /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html 39,313,906 ( 0.07%) #[derive(Clone, PartialEq, Eq, Hash, Copy, TyEncodable, TyDecodable, PartialOrd, Ord)] 1,254 ( 0.00%) pub enum RegionKind { . /// Region bound in a type or fn declaration which will be . /// substituted 'early' -- that is, at the same time when type . /// parameters are substituted. . ReEarlyBound(EarlyBoundRegion), . . /// Region bound in a function scope, which will be substituted when the . /// function is called. 1,720,482 ( 0.00%) ReLateBound(ty::DebruijnIndex, BoundRegion), . . /// When checking a function body, the types of all arguments and so forth . /// that refer to bound region parameters are modified to refer to free . /// region parameters. . ReFree(FreeRegion), . . /// Static data that has an "infinite" lifetime. Top in the region lattice. . ReStatic, -- line 1465 ---------------------------------------- -- line 1478 ---------------------------------------- . /// regions visible from `U`, but not less than regions not visible . /// from `U`. . ReEmpty(ty::UniverseIndex), . . /// Erased region, used by trait selection, in MIR and during codegen. . ReErased, . } . 96,788 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . pub struct EarlyBoundRegion { 441,622 ( 0.00%) pub def_id: DefId, 246,246 ( 0.00%) pub index: u32, 1,404,963 ( 0.00%) pub name: Symbol, . } . . /// A **`const`** **v**ariable **ID**. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . pub struct ConstVid<'tcx> { 19,000 ( 0.00%) pub index: u32, 645 ( 0.00%) pub phantom: PhantomData<&'tcx ()>, . } . . rustc_index::newtype_index! { . /// A **region** (lifetime) **v**ariable **ID**. . pub struct RegionVid { . DEBUG_FORMAT = custom, . } . } -- line 1505 ---------------------------------------- -- line 1513 ---------------------------------------- . rustc_index::newtype_index! { . pub struct BoundVar { .. } . } . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct BoundTy { . pub var: BoundVar, 69,862 ( 0.00%) pub kind: BoundTyKind, . } . 1,187,753 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundTyKind { . Anon, . Param(Symbol), . } . . impl From for BoundTy { . fn from(var: BoundVar) -> Self { -- line 1532 ---------------------------------------- -- line 1616 ---------------------------------------- . RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(), . RegionKind::ReEmpty(_) => false, . RegionKind::ReErased => false, . } . } . . #[inline] . pub fn is_late_bound(&self) -> bool { 26,837 ( 0.00%) matches!(*self, ty::ReLateBound(..)) . } . . #[inline] . pub fn is_placeholder(&self) -> bool { . matches!(*self, ty::RePlaceholder(..)) . } . . #[inline] . pub fn bound_at_or_above_binder(&self, index: ty::DebruijnIndex) -> bool { 1,893,496 ( 0.00%) match *self { . ty::ReLateBound(debruijn, _) => debruijn >= index, . _ => false, . } . } . . pub fn type_flags(&self) -> TypeFlags { . let mut flags = TypeFlags::empty(); . 1,822,194 ( 0.00%) match *self { . ty::ReVar(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; . flags = flags | TypeFlags::HAS_RE_INFER; . } . ty::RePlaceholder(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; -- line 1651 ---------------------------------------- -- line 1669 ---------------------------------------- . ty::ReErased => { . flags = flags | TypeFlags::HAS_RE_ERASED; . } . } . . debug!("type_flags({:?}) = {:?}", self, flags); . . flags 337,986 ( 0.00%) } . . /// Given an early-bound or free region, returns the `DefId` where it was bound. . /// For example, consider the regions in this snippet of code: . /// . /// ``` . /// impl<'a> Foo { . /// ^^ -- early bound, declared on an impl . /// -- line 1685 ---------------------------------------- -- line 1713 ---------------------------------------- . . #[inline(always)] . pub fn flags(&self) -> TypeFlags { . self.flags . } . . #[inline] . pub fn is_unit(&self) -> bool { 42,622 ( 0.00%) match self.kind() { 8,254 ( 0.00%) Tuple(ref tys) => tys.is_empty(), . _ => false, . } . } . . #[inline] . pub fn is_never(&self) -> bool { 521,105 ( 0.00%) matches!(self.kind(), Never) . } . . #[inline] . pub fn is_primitive(&self) -> bool { . self.kind().is_primitive() . } . . #[inline] -- line 1737 ---------------------------------------- -- line 1741 ---------------------------------------- . . #[inline] . pub fn is_ref(&self) -> bool { . matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_ty_var(&self) -> bool { 722,311 ( 0.00%) matches!(self.kind(), Infer(TyVar(_))) . } . . #[inline] . pub fn ty_vid(&self) -> Option { 612,008 ( 0.00%) match self.kind() { 106,595 ( 0.00%) &Infer(TyVar(vid)) => Some(vid), . _ => None, . } . } . . #[inline] . pub fn is_ty_infer(&self) -> bool { 87 ( 0.00%) matches!(self.kind(), Infer(_)) . } . . #[inline] . pub fn is_phantom_data(&self) -> bool { . if let Adt(def, _) = self.kind() { def.is_phantom_data() } else { false } . } . . #[inline] . pub fn is_bool(&self) -> bool { 102 ( 0.00%) *self.kind() == Bool . } . . /// Returns `true` if this type is a `str`. . #[inline] . pub fn is_str(&self) -> bool { . *self.kind() == Str . } . . #[inline] . pub fn is_param(&self, index: u32) -> bool { 10,889 ( 0.00%) match self.kind() { . ty::Param(ref data) => data.index == index, . _ => false, . } . } . . #[inline] . pub fn is_slice(&self) -> bool { . match self.kind() { . RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => matches!(ty.kind(), Slice(_) | Str), . _ => false, . } . } . . #[inline] . pub fn is_array(&self) -> bool { 26 ( 0.00%) matches!(self.kind(), Array(..)) . } . . #[inline] . pub fn is_simd(&self) -> bool { . match self.kind() { . Adt(def, _) => def.repr.simd(), . _ => false, . } . } . 490 ( 0.00%) pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 980 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => ty, . Str => tcx.mk_mach_uint(ty::UintTy::U8), . _ => bug!("`sequence_element_type` called on non-sequence value: {}", self), . } 490 ( 0.00%) } . . pub fn simd_size_and_type(&self, tcx: TyCtxt<'tcx>) -> (u64, Ty<'tcx>) { . match self.kind() { . Adt(def, substs) => { . assert!(def.repr.simd(), "`simd_size_and_type` called on non-SIMD type"); . let variant = def.non_enum_variant(); . let f0_ty = variant.fields[0].ty(tcx, substs); . -- line 1824 ---------------------------------------- -- line 1838 ---------------------------------------- . } . } . _ => bug!("`simd_size_and_type` called on invalid type"), . } . } . . #[inline] . pub fn is_region_ptr(&self) -> bool { 187 ( 0.00%) matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_mutable_ptr(&self) -> bool { . matches!( . self.kind(), . RawPtr(TypeAndMut { mutbl: hir::Mutability::Mut, .. }) . | Ref(_, _, hir::Mutability::Mut) -- line 1854 ---------------------------------------- -- line 1861 ---------------------------------------- . match self.kind() { . Ref(_, _, mutability) => Some(*mutability), . _ => None, . } . } . . #[inline] . pub fn is_unsafe_ptr(&self) -> bool { 51,714 ( 0.00%) matches!(self.kind(), RawPtr(_)) . } . . /// Tests if this is any kind of primitive pointer type (reference, raw pointer, fn pointer). . #[inline] . pub fn is_any_ptr(&self) -> bool { 376 ( 0.00%) self.is_region_ptr() || self.is_unsafe_ptr() || self.is_fn_ptr() . } . . #[inline] . pub fn is_box(&self) -> bool { 685,050 ( 0.00%) match self.kind() { 140,034 ( 0.00%) Adt(def, _) => def.is_box(), . _ => false, . } . } . . /// Panics if called on any type other than `Box`. 6 ( 0.00%) pub fn boxed_ty(&self) -> Ty<'tcx> { 6 ( 0.00%) match self.kind() { 15 ( 0.00%) Adt(def, substs) if def.is_box() => substs.type_at(0), . _ => bug!("`boxed_ty` is called on non-box type {:?}", self), . } 6 ( 0.00%) } . . /// A scalar type is one that denotes an atomic datum, with no sub-components. . /// (A RawPtr is scalar because it represents a non-managed pointer, so its . /// contents are abstract to rustc.) . #[inline] . pub fn is_scalar(&self) -> bool { 8,427 ( 0.00%) matches!( 19,671 ( 0.00%) self.kind(), . Bool | Char . | Int(_) . | Float(_) . | Uint(_) . | FnDef(..) . | FnPtr(_) . | RawPtr(_) . | Infer(IntVar(_) | FloatVar(_)) . ) . } . . /// Returns `true` if this type is a floating point type. . #[inline] . pub fn is_floating_point(&self) -> bool { 3,140 ( 0.00%) matches!(self.kind(), Float(_) | Infer(FloatVar(_))) . } . . #[inline] . pub fn is_trait(&self) -> bool { 3,324 ( 0.00%) matches!(self.kind(), Dynamic(..)) . } . . #[inline] . pub fn is_enum(&self) -> bool { 41,330 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_enum()) . } . . #[inline] . pub fn is_union(&self) -> bool { 135,512 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_union()) . } . . #[inline] . pub fn is_closure(&self) -> bool { 27,304 ( 0.00%) matches!(self.kind(), Closure(..)) . } . . #[inline] . pub fn is_generator(&self) -> bool { 52,620 ( 0.00%) matches!(self.kind(), Generator(..)) . } . . #[inline] . pub fn is_integral(&self) -> bool { 211,313 ( 0.00%) matches!(self.kind(), Infer(IntVar(_)) | Int(_) | Uint(_)) . } . . #[inline] . pub fn is_fresh_ty(&self) -> bool { . matches!(self.kind(), Infer(FreshTy(_))) . } . . #[inline] . pub fn is_fresh(&self) -> bool { 34,344 ( 0.00%) matches!(self.kind(), Infer(FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_))) . } . . #[inline] . pub fn is_char(&self) -> bool { . matches!(self.kind(), Char) . } . . #[inline] . pub fn is_numeric(&self) -> bool { . self.is_integral() || self.is_floating_point() . } . . #[inline] . pub fn is_signed(&self) -> bool { 614 ( 0.00%) matches!(self.kind(), Int(_)) . } . . #[inline] . pub fn is_ptr_sized_integral(&self) -> bool { 8,904 ( 0.00%) matches!(self.kind(), Int(ty::IntTy::Isize) | Uint(ty::UintTy::Usize)) . } . . #[inline] . pub fn has_concrete_skeleton(&self) -> bool { . !matches!(self.kind(), Param(_) | Infer(_) | Error(_)) . } . . /// Returns the type and mutability of `*ty`. . /// . /// The parameter `explicit` indicates if this is an *explicit* dereference. . /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. 108,253 ( 0.00%) pub fn builtin_deref(&self, explicit: bool) -> Option> { 732,335 ( 0.00%) match self.kind() { 356 ( 0.00%) Adt(def, _) if def.is_box() => { 2 ( 0.00%) Some(TypeAndMut { ty: self.boxed_ty(), mutbl: hir::Mutability::Not }) . } 472,426 ( 0.00%) Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl: *mutbl }), 2,976 ( 0.00%) RawPtr(mt) if explicit => Some(*mt), . _ => None, . } 216,506 ( 0.00%) } . . /// Returns the type of `ty[i]`. . pub fn builtin_index(&self) -> Option> { 96,976 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => Some(ty), . _ => None, . } 7,462 ( 0.00%) } . 916,976 ( 0.00%) pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { 458,488 ( 0.00%) match self.kind() { 343,866 ( 0.00%) FnDef(def_id, substs) => tcx.fn_sig(*def_id).subst(tcx, substs), . FnPtr(f) => *f, . Error(_) => { . // ignore errors (#54954) . ty::Binder::dummy(FnSig::fake()) . } . Closure(..) => bug!( . "to get the signature of a closure, use `substs.as_closure().sig()` not `fn_sig()`", . ), . _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), . } 1,031,598 ( 0.00%) } . . #[inline] . pub fn is_fn(&self) -> bool { 55,053 ( 0.00%) matches!(self.kind(), FnDef(..) | FnPtr(_)) . } . . #[inline] . pub fn is_fn_ptr(&self) -> bool { . matches!(self.kind(), FnPtr(_)) . } . . #[inline] . pub fn is_impl_trait(&self) -> bool { . matches!(self.kind(), Opaque(..)) . } . . #[inline] . pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> { 52,021 ( 0.00%) match self.kind() { . Adt(adt, _) => Some(adt), . _ => None, . } . } . . /// Iterates over tuple fields. . /// Panics when called on anything but a tuple. 407 ( 0.00%) pub fn tuple_fields(&self) -> impl DoubleEndedIterator> { 18,064 ( 0.00%) match self.kind() { 9,871 ( 0.00%) Tuple(substs) => substs.iter().map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 814 ( 0.00%) } . . /// Get the `i`-th element of a tuple. . /// Panics when called on anything but a tuple. 3 ( 0.00%) pub fn tuple_element_ty(&self, i: usize) -> Option> { 6 ( 0.00%) match self.kind() { 6 ( 0.00%) Tuple(substs) => substs.iter().nth(i).map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 6 ( 0.00%) } . . /// If the type contains variants, returns the valid range of variant indices. . // . // FIXME: This requires the optimized MIR in the case of generators. . #[inline] . pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option> { . match self.kind() { . TyKind::Adt(adt, _) => Some(adt.variant_range()), -- line 2068 ---------------------------------------- -- line 2073 ---------------------------------------- . } . } . . /// If the type contains variants, returns the variant for `variant_index`. . /// Panics if `variant_index` is out of range. . // . // FIXME: This requires the optimized MIR in the case of generators. . #[inline] 57,190 ( 0.00%) pub fn discriminant_for_variant( . &self, . tcx: TyCtxt<'tcx>, . variant_index: VariantIdx, . ) -> Option> { 65,685 ( 0.00%) match self.kind() { 23,364 ( 0.00%) TyKind::Adt(adt, _) if adt.variants.is_empty() => { . // This can actually happen during CTFE, see . // https://github.com/rust-lang/rust/issues/89765. . None . } 11,682 ( 0.00%) TyKind::Adt(adt, _) if adt.is_enum() => { . Some(adt.discriminant_for_variant(tcx, variant_index)) . } . TyKind::Generator(def_id, substs, _) => { . Some(substs.as_generator().discriminant_for_variant(*def_id, tcx, variant_index)) . } . _ => None, . } 45,752 ( 0.00%) } . . /// Returns the type of the discriminant of this type. 197,160 ( 0.00%) pub fn discriminant_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 98,580 ( 0.00%) match self.kind() { 157,212 ( 0.00%) ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx), . ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx), . . ty::Param(_) | ty::Projection(_) | ty::Opaque(..) | ty::Infer(ty::TyVar(_)) => { . let assoc_items = tcx.associated_item_def_ids( . tcx.require_lang_item(hir::LangItem::DiscriminantKind, None), . ); . tcx.mk_projection(assoc_items[0], tcx.intern_substs(&[self.into()])) . } -- line 2113 ---------------------------------------- -- line 2127 ---------------------------------------- . | ty::FnDef(..) . | ty::FnPtr(..) . | ty::Dynamic(..) . | ty::Closure(..) . | ty::GeneratorWitness(..) . | ty::Never . | ty::Tuple(_) . | ty::Error(_) 86 ( 0.00%) | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, . . ty::Bound(..) . | ty::Placeholder(_) . | ty::Infer(FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`discriminant_ty` applied to unexpected type: {:?}", self) . } . } 157,728 ( 0.00%) } . . /// Returns the type of metadata for (potentially fat) pointers to this type. 900 ( 0.00%) pub fn ptr_metadata_ty( . &'tcx self, . tcx: TyCtxt<'tcx>, . normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, . ) -> Ty<'tcx> { 900 ( 0.00%) let tail = tcx.struct_tail_with_normalize(self, normalize); 500 ( 0.00%) match tail.kind() { . // Sized types . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) -- line 2160 ---------------------------------------- -- line 2170 ---------------------------------------- . | ty::Foreign(..) . // If returned by `struct_tail_without_normalization` this is a unit struct . // without any fields, or not a struct, and therefore is Sized. . | ty::Adt(..) . // If returned by `struct_tail_without_normalization` this is the empty tuple, . // a.k.a. unit type, which is Sized . | ty::Tuple(..) => tcx.types.unit, . 200 ( 0.00%) ty::Str | ty::Slice(_) => tcx.types.usize, . ty::Dynamic(..) => { . let dyn_metadata = tcx.lang_items().dyn_metadata().unwrap(); . tcx.type_of(dyn_metadata).subst(tcx, &[tail.into()]) . }, . . ty::Projection(_) . | ty::Param(_) . | ty::Opaque(..) . | ty::Infer(ty::TyVar(_)) . | ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`ptr_metadata_ty` applied to unexpected type: {:?}", tail) . } . } 800 ( 0.00%) } . . /// When we create a closure, we record its kind (i.e., what trait . /// it implements) into its `ClosureSubsts` using a type . /// parameter. This is kind of a phantom type, except that the . /// most convenient thing for us to are the integral types. This . /// function converts such a special type into the closure . /// kind. To go the other way, use . /// `tcx.closure_kind_ty(closure_kind)`. . /// . /// Note that during type checking, we use an inference variable . /// to represent the closure kind, because it has not yet been . /// inferred. Once upvar inference (in `rustc_typeck/src/check/upvar.rs`) . /// is complete, that type variable will be unified. 484 ( 0.00%) pub fn to_opt_closure_kind(&self) -> Option { 968 ( 0.00%) match self.kind() { 968 ( 0.00%) Int(int_ty) => match int_ty { . ty::IntTy::I8 => Some(ty::ClosureKind::Fn), . ty::IntTy::I16 => Some(ty::ClosureKind::FnMut), . ty::IntTy::I32 => Some(ty::ClosureKind::FnOnce), . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . }, . . // "Bound" types appear in canonical queries when the . // closure type is not yet known . Bound(..) | Infer(_) => None, . . Error(_) => Some(ty::ClosureKind::Fn), . . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . } 484 ( 0.00%) } . . /// Fast path helper for testing if a type is `Sized`. . /// . /// Returning true means the type is known to be sized. Returning . /// `false` means nothing -- could be sized, might not be. . /// . /// Note that we could never rely on the fact that a type such as `[_]` is . /// trivially `!Sized` because we could be in a type environment with a . /// bound such as `[_]: Copy`. A function with such a bound obviously never . /// can be called, but that doesn't mean it shouldn't typecheck. This is why . /// this method doesn't return `Option`. 2,141,415 ( 0.00%) pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool { 1,427,610 ( 0.00%) match self.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 2246 ---------------------------------------- -- line 2250 ---------------------------------------- . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => true, . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, . 13,110 ( 0.00%) ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)), . 228,789 ( 0.00%) ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false, . . ty::Infer(ty::TyVar(_)) => false, . . ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`is_trivially_sized` applied to unexpected type: {:?}", self) . } . } 2,141,415 ( 0.00%) } . } . . /// Extra information about why we ended up with a particular variance. . /// This is only used to add more information to error messages, and . /// has no effect on soundness. While choosing the 'wrong' `VarianceDiagInfo` . /// may lead to confusing notes in error messages, it will never cause . /// a miscompilation or unsoundness. . /// . /// When in doubt, use `VarianceDiagInfo::default()` 18 ( 0.00%) #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] . pub enum VarianceDiagInfo<'tcx> { . /// No additional information - this is the default. . /// We will not add any additional information to error messages. . #[default] . None, . /// We switched our variance because a generic argument occurs inside . /// the invariant generic argument of another type. . Invariant { -- line 2290 ---------------------------------------- -- line 2295 ---------------------------------------- . /// (e.g. `0` for `*mut T`, `1` for `MyStruct<'CovariantParam, 'InvariantParam>`) . param_index: u32, . }, . } . . impl<'tcx> VarianceDiagInfo<'tcx> { . /// Mirrors `Variance::xform` - used to 'combine' the existing . /// and new `VarianceDiagInfo`s when our variance changes. 521,726 ( 0.00%) pub fn xform(self, other: VarianceDiagInfo<'tcx>) -> VarianceDiagInfo<'tcx> { . // For now, just use the first `VarianceDiagInfo::Invariant` that we see 782,589 ( 0.00%) match self { . VarianceDiagInfo::None => other, . VarianceDiagInfo::Invariant { .. } => self, . } 260,863 ( 0.00%) } . } 161,049,605 ( 0.30%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/node.rs -------------------------------------------------------------------------------- Ir -- line 221 ---------------------------------------- . NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData } . } . } . . impl NodeRef { . fn new_internal(child: Root) -> Self { . let mut new_node = unsafe { InternalNode::new() }; . new_node.edges[0].write(child.node); 136,702 ( 0.00%) unsafe { NodeRef::from_new_internal(new_node, child.height + 1) } . } . . /// # Safety . /// `height` must not be zero. . unsafe fn from_new_internal(internal: Box>, height: usize) -> Self { . debug_assert!(height > 0); . let node = NonNull::from(Box::leak(internal)).cast(); . let mut this = NodeRef { height, node, _marker: PhantomData }; -- line 237 ---------------------------------------- -- line 269 ---------------------------------------- . impl NodeRef { . /// Finds the length of the node. This is the number of keys or values. . /// The number of edges is `len() + 1`. . /// Note that, despite being safe, calling this function can have the side effect . /// of invalidating mutable references that unsafe code has created. . pub fn len(&self) -> usize { . // Crucially, we only access the `len` field here. If BorrowType is marker::ValMut, . // there might be outstanding mutable references to values that we must not invalidate. 2,341,841 ( 0.00%) unsafe { usize::from((*Self::as_leaf_ptr(self)).len) } . } . . /// Returns the number of levels that the node and leaves are apart. Zero . /// height means the node is a leaf itself. If you picture trees with the . /// root on top, the number says at which elevation the node appears. . /// If you picture trees with leaves on top, the number says how high . /// the tree extends above the node. . pub fn height(&self) -> usize { . self.height . } . . /// Temporarily takes out another, immutable reference to the same node. . pub fn reborrow(&self) -> NodeRef, K, V, Type> { 5,957,289 ( 0.01%) NodeRef { height: self.height, node: self.node, _marker: PhantomData } . } . . /// Exposes the leaf portion of any leaf or internal node. . /// . /// Returns a raw ptr to avoid invalidating other references to this node. . fn as_leaf_ptr(this: &Self) -> *mut LeafNode { . // The node must be valid for at least the LeafNode portion. . // This is not a reference in the NodeRef type because we don't know if -- line 299 ---------------------------------------- -- line 314 ---------------------------------------- . /// both, upon success, do nothing. . pub fn ascend( . self, . ) -> Result, marker::Edge>, Self> { . assert!(BorrowType::PERMITS_TRAVERSAL); . // We need to use raw pointers to nodes because, if BorrowType is marker::ValMut, . // there might be outstanding mutable references to values that we must not invalidate. . let leaf_ptr: *const _ = Self::as_leaf_ptr(&self); 1,553,057 ( 0.00%) unsafe { (*leaf_ptr).parent } . .as_ref() . .map(|parent| Handle { 1,805,037 ( 0.00%) node: NodeRef::from_internal(*parent, self.height + 1), 587,079 ( 0.00%) idx: unsafe { usize::from((*leaf_ptr).parent_idx.assume_init()) }, . _marker: PhantomData, . }) . .ok_or(self) . } . . pub fn first_edge(self) -> Handle { . unsafe { Handle::new_edge(self, 0) } . } -- line 334 ---------------------------------------- -- line 373 ---------------------------------------- . // SAFETY: there can be no mutable references into this tree borrowed as `Immut`. . unsafe { &*ptr } . } . . /// Borrows a view into the keys stored in the node. . pub fn keys(&self) -> &[K] { . let leaf = self.into_leaf(); . unsafe { 12,415,873 ( 0.02%) MaybeUninit::slice_assume_init_ref(leaf.keys.get_unchecked(..usize::from(leaf.len))) . } . } . } . . impl NodeRef { . /// Similar to `ascend`, gets a reference to a node's parent node, but also . /// deallocates the current node in the process. This is unsafe because the . /// current node will still be accessible despite being deallocated. -- line 389 ---------------------------------------- -- line 391 ---------------------------------------- . self, . ) -> Option, marker::Edge>> { . let height = self.height; . let node = self.node; . let ret = self.ascend().ok(); . unsafe { . Global.deallocate( . node.cast(), 372,087 ( 0.00%) if height > 0 { . Layout::new::>() . } else { . Layout::new::>() . }, . ); . } . ret . } -- line 407 ---------------------------------------- -- line 435 ---------------------------------------- . // SAFETY: we have exclusive access to the entire node. . unsafe { &mut *ptr } . } . } . . impl NodeRef { . /// Borrows exclusive access to the leaf portion of a dying leaf or internal node. . fn as_leaf_dying(&mut self) -> &mut LeafNode { 5 ( 0.00%) let ptr = Self::as_leaf_ptr(self); . // SAFETY: we have exclusive access to the entire node. . unsafe { &mut *ptr } . } . } . . impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { . /// Borrows exclusive access to an element of the key storage area. . /// -- line 451 ---------------------------------------- -- line 467 ---------------------------------------- . /// `index` is in bounds of 0..CAPACITY . unsafe fn val_area_mut(&mut self, index: I) -> &mut Output . where . I: SliceIndex<[MaybeUninit], Output = Output>, . { . // SAFETY: the caller will not be able to call further methods on self . // until the value slice reference is dropped, as we have unique access . // for the lifetime of the borrow. 2,067,276 ( 0.00%) unsafe { self.as_leaf_mut().vals.as_mut_slice().get_unchecked_mut(index) } . } . } . . impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { . /// Borrows exclusive access to an element or slice of the node's storage area for edge contents. . /// . /// # Safety . /// `index` is in bounds of 0..CAPACITY + 1 -- line 483 ---------------------------------------- -- line 534 ---------------------------------------- . } . } . . impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { . /// Sets the node's link to its parent edge, . /// without invalidating other references to the node. . fn set_parent_link(&mut self, parent: NonNull>, parent_idx: usize) { . let leaf = Self::as_leaf_ptr(self); 314,973 ( 0.00%) unsafe { (*leaf).parent = Some(parent) }; . unsafe { (*leaf).parent_idx.write(parent_idx as u16) }; . } . } . . impl NodeRef { . /// Clears the root's link to its parent edge. . fn clear_parent_link(&mut self) { . let mut root_node = self.borrow_mut(); . let leaf = root_node.as_leaf_mut(); 63,587 ( 0.00%) leaf.parent = None; . } . } . . impl NodeRef { . /// Returns a new owned tree, with its own root node that is initially empty. . pub fn new() -> Self { . NodeRef::new_leaf().forget_type() . } . . /// Adds a new internal node with a single edge pointing to the previous root node, . /// make that new node the root node, and return it. This increases the height by 1 . /// and is the opposite of `pop_internal_level`. 410,106 ( 0.00%) pub fn push_internal_level(&mut self) -> NodeRef, K, V, marker::Internal> { . super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root).forget_type()); . . // `self.borrow_mut()`, except that we just forgot we're internal now: . NodeRef { height: self.height, node: self.node, _marker: PhantomData } 546,808 ( 0.00%) } . . /// Removes the internal root node, using its first child as the new root node. . /// As it is intended only to be called when the root node has only one child, . /// no cleanup is done on any of the keys, values and other children. . /// This decreases the height by 1 and is the opposite of `push_internal_level`. . /// . /// Requires exclusive access to the `NodeRef` object but not to the root node; . /// it will not invalidate other handles or references to the root node. . /// . /// Panics if there is no internal level, i.e., if the root node is a leaf. . pub fn pop_internal_level(&mut self) { 187,993 ( 0.00%) assert!(self.height > 0); . . let top = self.node; . . // SAFETY: we asserted to be internal. . let internal_self = unsafe { self.borrow_mut().cast_to_internal_unchecked() }; . // SAFETY: we borrowed `self` exclusively and its borrow type is exclusive. . let internal_node = unsafe { &mut *NodeRef::as_internal_ptr(&internal_self) }; . // SAFETY: the first edge is always initialized. 63,587 ( 0.00%) self.node = unsafe { internal_node.edges[0].assume_init_read() }; 127,174 ( 0.00%) self.height -= 1; . self.clear_parent_link(); . . unsafe { . Global.deallocate(top.cast(), Layout::new::>()); . } . } . } . . impl NodeRef { . /// Mutably borrows the owned root node. Unlike `reborrow_mut`, this is safe . /// because the return value cannot be used to destroy the root, and there . /// cannot be other references to the tree. . pub fn borrow_mut(&mut self) -> NodeRef, K, V, Type> { 11,472,568 ( 0.02%) NodeRef { height: self.height, node: self.node, _marker: PhantomData } . } . . /// Slightly mutably borrows the owned root node. . pub fn borrow_valmut(&mut self) -> NodeRef, K, V, Type> { . NodeRef { height: self.height, node: self.node, _marker: PhantomData } . } . . /// Irreversibly transitions to a reference that permits traversal and offers -- line 614 ---------------------------------------- -- line 617 ---------------------------------------- . NodeRef { height: self.height, node: self.node, _marker: PhantomData } . } . } . . impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Leaf> { . /// Adds a key-value pair to the end of the node. . pub fn push(&mut self, key: K, val: V) { . let len = self.len_mut(); 38 ( 0.00%) let idx = usize::from(*len); 19 ( 0.00%) assert!(idx < CAPACITY); 50 ( 0.00%) *len += 1; . unsafe { . self.key_area_mut(idx).write(key); . self.val_area_mut(idx).write(val); . } . } . } . . impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::Internal> { . /// Adds a key-value pair, and an edge to go to the right of that pair, . /// to the end of the node. . pub fn push(&mut self, key: K, val: V, edge: Root) { 273,404 ( 0.00%) assert!(edge.height == self.height - 1); . . let len = self.len_mut(); 136,702 ( 0.00%) let idx = usize::from(*len); 68,351 ( 0.00%) assert!(idx < CAPACITY); 136,702 ( 0.00%) *len += 1; . unsafe { . self.key_area_mut(idx).write(key); . self.val_area_mut(idx).write(val); 68,351 ( 0.00%) self.edge_area_mut(idx + 1).write(edge.node); . Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link(); . } . } . } . . impl NodeRef { . /// Removes any static information asserting that this node is a `Leaf` node. . pub fn forget_type(self) -> NodeRef { -- line 656 ---------------------------------------- -- line 668 ---------------------------------------- . impl NodeRef { . /// Checks whether a node is an `Internal` node or a `Leaf` node. . pub fn force( . self, . ) -> ForceResult< . NodeRef, . NodeRef, . > { 11,239,818 ( 0.02%) if self.height == 0 { . ForceResult::Leaf(NodeRef { . height: self.height, . node: self.node, . _marker: PhantomData, . }) . } else { . ForceResult::Internal(NodeRef { . height: self.height, -- line 684 ---------------------------------------- -- line 747 ---------------------------------------- . Handle { node, idx, _marker: PhantomData } . } . . pub fn left_edge(self) -> Handle, marker::Edge> { . unsafe { Handle::new_edge(self.node, self.idx) } . } . . pub fn right_edge(self) -> Handle, marker::Edge> { 1,304,011 ( 0.00%) unsafe { Handle::new_edge(self.node, self.idx + 1) } . } . } . . impl PartialEq . for Handle, HandleType> . { . fn eq(&self, other: &Self) -> bool { . let Self { node, idx, _marker } = self; -- line 763 ---------------------------------------- -- line 790 ---------------------------------------- . } . . impl Handle, marker::Edge> { . /// Creates a new handle to an edge in `node`. . /// Unsafe because the caller must ensure that `idx <= node.len()`. . pub unsafe fn new_edge(node: NodeRef, idx: usize) -> Self { . debug_assert!(idx <= node.len()); . 2,403,817 ( 0.00%) Handle { node, idx, _marker: PhantomData } . } . . pub fn left_kv(self) -> Result, marker::KV>, Self> { 179,418 ( 0.00%) if self.idx > 0 { 169,291 ( 0.00%) Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) }) . } else { . Err(self) . } . } . . pub fn right_kv(self) -> Result, marker::KV>, Self> { 1,920,857 ( 0.00%) if self.idx < self.node.len() { . Ok(unsafe { Handle::new_kv(self.node, self.idx) }) . } else { . Err(self) . } . } . } . . pub enum LeftOrRight { -- line 818 ---------------------------------------- -- line 820 ---------------------------------------- . Right(T), . } . . /// Given an edge index where we want to insert into a node filled to capacity, . /// computes a sensible KV index of a split point and where to perform the insertion. . /// The goal of the split point is for its key and value to end up in a parent node; . /// the keys, values and edges to the left of the split point become the left child; . /// the keys, values and edges to the right of the split point become the right child. 196,009 ( 0.00%) fn splitpoint(edge_idx: usize) -> (usize, LeftOrRight) { . debug_assert!(edge_idx <= CAPACITY); . // Rust issue #74834 tries to explain these symmetric rules. 773,800 ( 0.00%) match edge_idx { 392,018 ( 0.00%) 0..EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER - 1, LeftOrRight::Left(edge_idx)), . EDGE_IDX_LEFT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Left(edge_idx)), . EDGE_IDX_RIGHT_OF_CENTER => (KV_IDX_CENTER, LeftOrRight::Right(0)), 573,171 ( 0.00%) _ => (KV_IDX_CENTER + 1, LeftOrRight::Right(edge_idx - (KV_IDX_CENTER + 1 + 1))), . } 196,009 ( 0.00%) } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { . /// Inserts a new key-value pair between the key-value pairs to the right and left of . /// this edge. This method assumes that there is enough space in the node for the new . /// pair to fit. . /// . /// The returned pointer points to the inserted value. 15,872,278 ( 0.03%) fn insert_fit(&mut self, key: K, val: V) -> *mut V { . debug_assert!(self.node.len() < CAPACITY); . let new_len = self.node.len() + 1; . . unsafe { 2,014,496 ( 0.00%) slice_insert(self.node.key_area_mut(..new_len), self.idx, key); 2,287,872 ( 0.00%) slice_insert(self.node.val_area_mut(..new_len), self.idx, val); 2,014,478 ( 0.00%) *self.node.len_mut() = new_len as u16; . . self.node.val_area_mut(self.idx).assume_init_mut() . } 16,115,824 ( 0.03%) } . } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { . /// Inserts a new key-value pair between the key-value pairs to the right and left of . /// this edge. This method splits the node if there isn't enough room. . /// . /// The returned pointer points to the inserted value. . fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) { 6,043,434 ( 0.01%) if self.node.len() < CAPACITY { 10,020,462 ( 0.02%) let val_ptr = self.insert_fit(key, val); 3,643,658 ( 0.01%) let kv = unsafe { Handle::new_kv(self.node, self.idx) }; . (InsertResult::Fit(kv), val_ptr) . } else { 1,541,192 ( 0.00%) let (middle_kv_idx, insertion) = splitpoint(self.idx); 381,759 ( 0.00%) let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; . let mut result = middle.split(); 942,172 ( 0.00%) let mut insertion_edge = match insertion { . LeftOrRight::Left(insert_idx) => unsafe { . Handle::new_edge(result.left.reborrow_mut(), insert_idx) . }, . LeftOrRight::Right(insert_idx) => unsafe { . Handle::new_edge(result.right.borrow_mut(), insert_idx) . }, . }; 609,277 ( 0.00%) let val_ptr = insertion_edge.insert_fit(key, val); 463,621 ( 0.00%) (InsertResult::Split(result), val_ptr) . } . } . } . . impl<'a, K, V> Handle, K, V, marker::Internal>, marker::Edge> { . /// Fixes the parent pointer and index in the child node that this edge . /// links to. This is useful when the ordering of edges has been changed, . fn correct_parent_link(self) { -- line 890 ---------------------------------------- -- line 895 ---------------------------------------- . child.set_parent_link(ptr, idx); . } . } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::Edge> { . /// Inserts a new key-value pair and an edge that will go to the right of that new pair . /// between this edge and the key-value pair to the right of this edge. This method assumes . /// that there is enough space in the node for the new pair to fit. 1,017,953 ( 0.00%) fn insert_fit(&mut self, key: K, val: V, edge: Root) { . debug_assert!(self.node.len() < CAPACITY); . debug_assert!(edge.height == self.node.height - 1); . let new_len = self.node.len() + 1; . . unsafe { 127,658 ( 0.00%) slice_insert(self.node.key_area_mut(..new_len), self.idx, key); 65,532 ( 0.00%) slice_insert(self.node.val_area_mut(..new_len), self.idx, val); . slice_insert(self.node.edge_area_mut(..new_len + 1), self.idx + 1, edge.node); 127,658 ( 0.00%) *self.node.len_mut() = new_len as u16; . . self.node.correct_childrens_parent_links(self.idx + 1..new_len + 1); . } 1,021,264 ( 0.00%) } . . /// Inserts a new key-value pair and an edge that will go to the right of that new pair . /// between this edge and the key-value pair to the right of this edge. This method splits . /// the node if there isn't enough room. . fn insert( . mut self, . key: K, . val: V, . edge: Root, . ) -> InsertResult<'a, K, V, marker::Internal> { 382,974 ( 0.00%) assert!(edge.height == self.node.height - 1); . 255,316 ( 0.00%) if self.node.len() < CAPACITY { 552,038 ( 0.00%) self.insert_fit(key, val, edge); . let kv = unsafe { Handle::new_kv(self.node, self.idx) }; . InsertResult::Fit(kv) . } else { 24,359 ( 0.00%) let (middle_kv_idx, insertion) = splitpoint(self.idx); . let middle = unsafe { Handle::new_kv(self.node, middle_kv_idx) }; . let mut result = middle.split(); 4,193 ( 0.00%) let mut insertion_edge = match insertion { . LeftOrRight::Left(insert_idx) => unsafe { . Handle::new_edge(result.left.reborrow_mut(), insert_idx) . }, . LeftOrRight::Right(insert_idx) => unsafe { . Handle::new_edge(result.right.borrow_mut(), insert_idx) . }, . }; 29,024 ( 0.00%) insertion_edge.insert_fit(key, val, edge); 8,343 ( 0.00%) InsertResult::Split(result) . } . } . } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { . /// Inserts a new key-value pair between the key-value pairs to the right and left of . /// this edge. This method splits the node if there isn't enough room, and tries to . /// insert the split off portion into the parent node recursively, until the root is reached. . /// . /// If the returned result is a `Fit`, its handle's node can be this edge's node or an ancestor. . /// If the returned result is a `Split`, the `left` field will be the root node. . /// The returned pointer points to the inserted value. 19,986,701 ( 0.04%) pub fn insert_recursing( . self, . key: K, . value: V, . ) -> (InsertResult<'a, K, V, marker::LeafOrInternal>, *mut V) { 11,657,469 ( 0.02%) let (mut split, val_ptr) = match self.insert(key, value) { . (InsertResult::Fit(handle), ptr) => { 9,109,145 ( 0.02%) return (InsertResult::Fit(handle.forget_node_type()), ptr); . } . (InsertResult::Split(split), val_ptr) => (split.forget_node_type(), val_ptr), . }; . . loop { 953,123 ( 0.00%) split = match split.left.ascend() { 558,468 ( 0.00%) Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right) { . InsertResult::Fit(handle) => { 648,861 ( 0.00%) return (InsertResult::Fit(handle.forget_node_type()), val_ptr); . } . InsertResult::Split(split) => split.forget_node_type(), . }, . Err(root) => { 607,007 ( 0.00%) return (InsertResult::Split(SplitResult { left: root, ..split }), val_ptr); . } . }; . } 18,130,302 ( 0.03%) } . } . . impl . Handle, marker::Edge> . { . /// Finds the node pointed to by this edge. . /// . /// The method name assumes you picture trees with the root node on top. -- line 992 ---------------------------------------- -- line 999 ---------------------------------------- . // marker::ValMut, there might be outstanding mutable references to . // values that we must not invalidate. There's no worry accessing the . // height field because that value is copied. Beware that, once the . // node pointer is dereferenced, we access the edges array with a . // reference (Rust issue #73987) and invalidate any other references . // to or inside the array, should any be around. . let parent_ptr = NodeRef::as_internal_ptr(&self.node); . let node = unsafe { (*parent_ptr).edges.get_unchecked(self.idx).assume_init_read() }; 485,057 ( 0.00%) NodeRef { node, height: self.node.height - 1, _marker: PhantomData } . } . } . . impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { . pub fn into_kv(self) -> (&'a K, &'a V) { . debug_assert!(self.idx < self.node.len()); . let leaf = self.node.into_leaf(); . let k = unsafe { leaf.keys.get_unchecked(self.idx).assume_init_ref() }; . let v = unsafe { leaf.vals.get_unchecked(self.idx).assume_init_ref() }; 2,681,676 ( 0.01%) (k, v) . } . } . . impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { . pub fn key_mut(&mut self) -> &mut K { . unsafe { self.node.key_area_mut(self.idx).assume_init_mut() } . } . -- line 1025 ---------------------------------------- -- line 1037 ---------------------------------------- . } . . impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { . pub fn kv_mut(&mut self) -> (&mut K, &mut V) { . debug_assert!(self.idx < self.node.len()); . // We cannot call separate key and value methods, because calling the second one . // invalidates the reference returned by the first. . unsafe { 7,427 ( 0.00%) let leaf = self.node.as_leaf_mut(); 7,427 ( 0.00%) let key = leaf.keys.get_unchecked_mut(self.idx).assume_init_mut(); . let val = leaf.vals.get_unchecked_mut(self.idx).assume_init_mut(); 6,760 ( 0.00%) (key, val) . } . } . . /// Replaces the key and value that the KV handle refers to. . pub fn replace_kv(&mut self, k: K, v: V) -> (K, V) { . let (key, val) = self.kv_mut(); . (mem::replace(key, k), mem::replace(val, v)) . } -- line 1056 ---------------------------------------- -- line 1069 ---------------------------------------- . (key, val) . } . } . . /// Drops the key and value that the KV handle refers to. . /// # Safety . /// The node that the handle refers to must not yet have been deallocated. . #[inline] 30 ( 0.00%) pub unsafe fn drop_key_val(mut self) { . debug_assert!(self.idx < self.node.len()); . let leaf = self.node.as_leaf_dying(); . unsafe { 5 ( 0.00%) leaf.keys.get_unchecked_mut(self.idx).assume_init_drop(); 138,860 ( 0.00%) leaf.vals.get_unchecked_mut(self.idx).assume_init_drop(); . } 35 ( 0.00%) } . } . . impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { . /// Helps implementations of `split` for a particular `NodeType`, . /// by taking care of leaf data. . fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { . debug_assert!(self.idx < self.node.len()); . let old_len = self.node.len(); 997,618 ( 0.00%) let new_len = old_len - self.idx - 1; 196,669 ( 0.00%) new_node.len = new_len as u16; . unsafe { 163,190 ( 0.00%) let k = self.node.key_area_mut(self.idx).assume_init_read(); 151,657 ( 0.00%) let v = self.node.val_area_mut(self.idx).assume_init_read(); . . move_to_slice( 588,027 ( 0.00%) self.node.key_area_mut(self.idx + 1..old_len), 196,009 ( 0.00%) &mut new_node.keys[..new_len], . ); . move_to_slice( . self.node.val_area_mut(self.idx + 1..old_len), 197,877 ( 0.00%) &mut new_node.vals[..new_len], . ); . 347,670 ( 0.00%) *self.node.len_mut() = self.idx as u16; 185,567 ( 0.00%) (k, v) . } . } . } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { . /// Splits the underlying node into three parts: . /// . /// - The node is truncated to only contain the key-value pairs to the left of -- line 1117 ---------------------------------------- -- line 1120 ---------------------------------------- . /// - All the key-value pairs to the right of this handle are put into a newly . /// allocated node. . pub fn split(mut self) -> SplitResult<'a, K, V, marker::Leaf> { . let mut new_node = LeafNode::new(); . . let kv = self.split_leaf_data(&mut new_node); . . let right = NodeRef::from_new_leaf(new_node); 1,720,895 ( 0.00%) SplitResult { left: self.node, kv, right } . } . . /// Removes the key-value pair pointed to by this handle and returns it, along with the edge . /// that the key-value pair collapsed into. . pub fn remove( . mut self, . ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { . let old_len = self.node.len(); . unsafe { . let k = slice_remove(self.node.key_area_mut(..old_len), self.idx); 1,207,903 ( 0.00%) let v = slice_remove(self.node.val_area_mut(..old_len), self.idx); 6,089,455 ( 0.01%) *self.node.len_mut() = (old_len - 1) as u16; 5,345,433 ( 0.01%) ((k, v), self.left_edge()) . } . } . } . . impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { . /// Splits the underlying node into three parts: . /// . /// - The node is truncated to only contain the edges and key-value pairs to the -- line 1149 ---------------------------------------- -- line 1150 ---------------------------------------- . /// left of this handle. . /// - The key and value pointed to by this handle are extracted. . /// - All the edges and key-value pairs to the right of this handle are put into . /// a newly allocated node. . pub fn split(mut self) -> SplitResult<'a, K, V, marker::Internal> { . let old_len = self.node.len(); . unsafe { . let mut new_node = InternalNode::new(); 5,604 ( 0.00%) let kv = self.split_leaf_data(&mut new_node.data); . let new_len = usize::from(new_node.data.len); . move_to_slice( 3,360 ( 0.00%) self.node.edge_area_mut(self.idx + 1..old_len + 1), 6,720 ( 0.00%) &mut new_node.edges[..new_len + 1], . ); . . let height = self.node.height; . let right = NodeRef::from_new_internal(new_node, height); . 22,290 ( 0.00%) SplitResult { left: self.node, kv, right } . } . } . } . . /// Represents a session for evaluating and performing a balancing operation . /// around an internal key-value pair. . pub struct BalancingContext<'a, K, V> { . parent: Handle, K, V, marker::Internal>, marker::KV>, -- line 1176 ---------------------------------------- -- line 1242 ---------------------------------------- . . pub fn into_right_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { . self.right_child . } . . /// Returns whether merging is possible, i.e., whether there is enough room . /// in a node to combine the central KV with both adjacent child nodes. . pub fn can_merge(&self) -> bool { 526,048 ( 0.00%) self.left_child.len() + 1 + self.right_child.len() <= CAPACITY . } . } . . impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { . /// Performs a merge and lets a closure decide what to return. . fn do_merge< . F: FnOnce( . NodeRef, K, V, marker::Internal>, -- line 1258 ---------------------------------------- -- line 1264 ---------------------------------------- . result: F, . ) -> R { . let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent; . let old_parent_len = parent_node.len(); . let mut left_node = self.left_child; . let old_left_len = left_node.len(); . let mut right_node = self.right_child; . let right_len = right_node.len(); 335,164 ( 0.00%) let new_left_len = old_left_len + 1 + right_len; . 335,164 ( 0.00%) assert!(new_left_len <= CAPACITY); . . unsafe { 167,582 ( 0.00%) *left_node.len_mut() = new_left_len as u16; . . let parent_key = slice_remove(parent_node.key_area_mut(..old_parent_len), parent_idx); . left_node.key_area_mut(old_left_len).write(parent_key); . move_to_slice( . right_node.key_area_mut(..right_len), . left_node.key_area_mut(old_left_len + 1..new_left_len), . ); . . let parent_val = slice_remove(parent_node.val_area_mut(..old_parent_len), parent_idx); . left_node.val_area_mut(old_left_len).write(parent_val); . move_to_slice( . right_node.val_area_mut(..right_len), . left_node.val_area_mut(old_left_len + 1..new_left_len), . ); . 167,582 ( 0.00%) slice_remove(&mut parent_node.edge_area_mut(..old_parent_len + 1), parent_idx + 1); . parent_node.correct_childrens_parent_links(parent_idx + 1..old_parent_len); 1,005,687 ( 0.00%) *parent_node.len_mut() -= 1; . 335,164 ( 0.00%) if parent_node.height > 1 { . // SAFETY: the height of the nodes being merged is one below the height . // of the node of this edge, thus above zero, so they are internal. . let mut left_node = left_node.reborrow_mut().cast_to_internal_unchecked(); . let mut right_node = right_node.cast_to_internal_unchecked(); . move_to_slice( . right_node.edge_area_mut(..right_len + 1), 2,069 ( 0.00%) left_node.edge_area_mut(old_left_len + 1..new_left_len + 1), . ); . . left_node.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); . . Global.deallocate(right_node.node.cast(), Layout::new::>()); . } else { . Global.deallocate(right_node.node.cast(), Layout::new::>()); . } -- line 1312 ---------------------------------------- -- line 1313 ---------------------------------------- . } . result(parent_node, left_node) . } . . /// Merges the parent's key-value pair and both adjacent child nodes into . /// the left child node and returns the shrunk parent node. . /// . /// Panics unless we `.can_merge()`. 14,483 ( 0.00%) pub fn merge_tracking_parent(self) -> NodeRef, K, V, marker::Internal> { 4,138 ( 0.00%) self.do_merge(|parent, _child| parent) 20,684 ( 0.00%) } . . /// Merges the parent's key-value pair and both adjacent child nodes into . /// the left child node and returns that child node. . /// . /// Panics unless we `.can_merge()`. . pub fn merge_tracking_child(self) -> NodeRef, K, V, marker::LeafOrInternal> { . self.do_merge(|_parent, child| child) . } . . /// Merges the parent's key-value pair and both adjacent child nodes into . /// the left child node and returns the edge handle in that child node . /// where the tracked child edge ended up, . /// . /// Panics unless we `.can_merge()`. 1,158,591 ( 0.00%) pub fn merge_tracking_child_edge( . self, . track_edge_idx: LeftOrRight, . ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { 165,513 ( 0.00%) let old_left_len = self.left_child.len(); 165,513 ( 0.00%) let right_len = self.right_child.len(); 496,539 ( 0.00%) assert!(match track_edge_idx { . LeftOrRight::Left(idx) => idx <= old_left_len, . LeftOrRight::Right(idx) => idx <= right_len, . }); . let child = self.merge_tracking_child(); 165,513 ( 0.00%) let new_idx = match track_edge_idx { . LeftOrRight::Left(idx) => idx, 487,587 ( 0.00%) LeftOrRight::Right(idx) => old_left_len + 1 + idx, . }; . unsafe { Handle::new_edge(child, new_idx) } 1,324,104 ( 0.00%) } . . /// Removes a key-value pair from the left child and places it in the key-value storage . /// of the parent, while pushing the old parent key-value pair into the right child. . /// Returns a handle to the edge in the right child corresponding to where the original . /// edge specified by `track_right_edge_idx` ended up. . pub fn steal_left( . mut self, . track_right_edge_idx: usize, . ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { 9,390 ( 0.00%) self.bulk_steal_left(1); . unsafe { Handle::new_edge(self.right_child, 1 + track_right_edge_idx) } . } . . /// Removes a key-value pair from the right child and places it in the key-value storage . /// of the parent, while pushing the old parent key-value pair onto the left child. . /// Returns a handle to the edge in the left child specified by `track_left_edge_idx`, . /// which didn't move. . pub fn steal_right( . mut self, . track_left_edge_idx: usize, . ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { 5,464 ( 0.00%) self.bulk_steal_right(1); . unsafe { Handle::new_edge(self.left_child, track_left_edge_idx) } . } . . /// This does stealing similar to `steal_left` but steals multiple elements at once. 32,865 ( 0.00%) pub fn bulk_steal_left(&mut self, count: usize) { 9,390 ( 0.00%) assert!(count > 0); . unsafe { . let left_node = &mut self.left_child; . let old_left_len = left_node.len(); . let right_node = &mut self.right_child; 4,695 ( 0.00%) let old_right_len = right_node.len(); . . // Make sure that we may steal safely. 23,475 ( 0.00%) assert!(old_right_len + count <= CAPACITY); 13,856 ( 0.00%) assert!(old_left_len >= count); . . let new_left_len = old_left_len - count; . let new_right_len = old_right_len + count; 4,695 ( 0.00%) *left_node.len_mut() = new_left_len as u16; 8,320 ( 0.00%) *right_node.len_mut() = new_right_len as u16; . . // Move leaf data. . { . // Make room for stolen elements in the right child. . slice_shr(right_node.key_area_mut(..new_right_len), count); . slice_shr(right_node.val_area_mut(..new_right_len), count); . . // Move elements from the left child to the right one. . move_to_slice( 9,390 ( 0.00%) left_node.key_area_mut(new_left_len + 1..old_left_len), 4,695 ( 0.00%) right_node.key_area_mut(..count - 1), . ); . move_to_slice( . left_node.val_area_mut(new_left_len + 1..old_left_len), . right_node.val_area_mut(..count - 1), . ); . . // Move the left-most stolen pair to the parent. 4,466 ( 0.00%) let k = left_node.key_area_mut(new_left_len).assume_init_read(); . let v = left_node.val_area_mut(new_left_len).assume_init_read(); 6,381 ( 0.00%) let (k, v) = self.parent.replace_kv(k, v); . . // Move parent's key-value pair to the right child. . right_node.key_area_mut(count - 1).write(k); . right_node.val_area_mut(count - 1).write(v); . } . 23,475 ( 0.00%) match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) { . (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { . // Make room for stolen edges. . slice_shr(right.edge_area_mut(..new_right_len + 1), count); . . // Steal edges. . move_to_slice( . left.edge_area_mut(new_left_len + 1..old_left_len + 1), . right.edge_area_mut(..count), -- line 1432 ---------------------------------------- -- line 1433 ---------------------------------------- . ); . . right.correct_childrens_parent_links(0..new_right_len + 1); . } . (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} . _ => unreachable!(), . } . } 37,560 ( 0.00%) } . . /// The symmetric clone of `bulk_steal_left`. 19,124 ( 0.00%) pub fn bulk_steal_right(&mut self, count: usize) { 5,464 ( 0.00%) assert!(count > 0); . unsafe { . let left_node = &mut self.left_child; 2,732 ( 0.00%) let old_left_len = left_node.len(); . let right_node = &mut self.right_child; . let old_right_len = right_node.len(); . . // Make sure that we may steal safely. 13,660 ( 0.00%) assert!(old_left_len + count <= CAPACITY); 5,606 ( 0.00%) assert!(old_right_len >= count); . . let new_left_len = old_left_len + count; . let new_right_len = old_right_len - count; 2,732 ( 0.00%) *left_node.len_mut() = new_left_len as u16; 2,732 ( 0.00%) *right_node.len_mut() = new_right_len as u16; . . // Move leaf data. . { . // Move the right-most stolen pair to the parent. 3,155 ( 0.00%) let k = right_node.key_area_mut(count - 1).assume_init_read(); . let v = right_node.val_area_mut(count - 1).assume_init_read(); 1,692 ( 0.00%) let (k, v) = self.parent.replace_kv(k, v); . . // Move parent's key-value pair to the left child. . left_node.key_area_mut(old_left_len).write(k); . left_node.val_area_mut(old_left_len).write(v); . . // Move elements from the right child to the left one. . move_to_slice( . right_node.key_area_mut(..count - 1), 2,732 ( 0.00%) left_node.key_area_mut(old_left_len + 1..new_left_len), . ); . move_to_slice( . right_node.val_area_mut(..count - 1), . left_node.val_area_mut(old_left_len + 1..new_left_len), . ); . . // Fill gap where stolen elements used to be. . slice_shl(right_node.key_area_mut(..old_right_len), count); . slice_shl(right_node.val_area_mut(..old_right_len), count); . } . 13,660 ( 0.00%) match (left_node.reborrow_mut().force(), right_node.reborrow_mut().force()) { . (ForceResult::Internal(mut left), ForceResult::Internal(mut right)) => { . // Steal edges. . move_to_slice( . right.edge_area_mut(..count), . left.edge_area_mut(old_left_len + 1..new_left_len + 1), . ); . . // Fill gap where stolen edges used to be. -- line 1495 ---------------------------------------- -- line 1497 ---------------------------------------- . . left.correct_childrens_parent_links(old_left_len + 1..new_left_len + 1); . right.correct_childrens_parent_links(0..new_right_len + 1); . } . (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {} . _ => unreachable!(), . } . } 21,856 ( 0.00%) } . } . . impl Handle, marker::Edge> { . pub fn forget_node_type( . self, . ) -> Handle, marker::Edge> { . unsafe { Handle::new_edge(self.node.forget_type(), self.idx) } . } -- line 1513 ---------------------------------------- -- line 1681 ---------------------------------------- . /// . /// # Safety . /// The slice has more than `idx` elements. . unsafe fn slice_insert(slice: &mut [MaybeUninit], idx: usize, val: T) { . unsafe { . let len = slice.len(); . debug_assert!(len > idx); . let slice_ptr = slice.as_mut_ptr(); 15,331,873 ( 0.03%) if len > idx + 1 { 1,753,690 ( 0.00%) ptr::copy(slice_ptr.add(idx), slice_ptr.add(idx + 1), len - idx - 1); . } . (*slice_ptr.add(idx)).write(val); . } . } . . /// Removes and returns a value from a slice of all initialized elements, leaving behind one . /// trailing uninitialized element. . /// -- line 1698 ---------------------------------------- -- line 1699 ---------------------------------------- . /// # Safety . /// The slice has more than `idx` elements. . unsafe fn slice_remove(slice: &mut [MaybeUninit], idx: usize) -> T { . unsafe { . let len = slice.len(); . debug_assert!(idx < len); . let slice_ptr = slice.as_mut_ptr(); . let ret = (*slice_ptr.add(idx)).assume_init_read(); 15,698,784 ( 0.03%) ptr::copy(slice_ptr.add(idx + 1), slice_ptr.add(idx), len - idx - 1); . ret . } . } . . /// Shifts the elements in a slice `distance` positions to the left. . /// . /// # Safety . /// The slice has at least `distance` elements. -- line 1715 ---------------------------------------- -- line 1722 ---------------------------------------- . . /// Shifts the elements in a slice `distance` positions to the right. . /// . /// # Safety . /// The slice has at least `distance` elements. . unsafe fn slice_shr(slice: &mut [MaybeUninit], distance: usize) { . unsafe { . let slice_ptr = slice.as_mut_ptr(); 10,460 ( 0.00%) ptr::copy(slice_ptr, slice_ptr.add(distance), slice.len() - distance); . } . } . . /// Moves all values from a slice of initialized elements to a slice . /// of uninitialized elements, leaving behind `src` as all uninitialized. . /// Works like `dst.copy_from_slice(src)` but does not require `T` to be `Copy`. . fn move_to_slice(src: &mut [MaybeUninit], dst: &mut [MaybeUninit]) { 755,916 ( 0.00%) assert!(src.len() == dst.len()); . unsafe { . ptr::copy_nonoverlapping(src.as_ptr(), dst.as_mut_ptr(), src.len()); . } . } . . #[cfg(test)] . mod tests; 44,854,514 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs -------------------------------------------------------------------------------- Ir -- line 121 ---------------------------------------- . #[derive(Debug)] . pub enum ProcessResult { . Unchanged, . Changed(Vec), . Error(E), . } . . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 572,048 ( 0.00%) struct ObligationTreeId(usize); . . type ObligationTreeIdGenerator = . std::iter::Map, fn(usize) -> ObligationTreeId>; . . pub struct ObligationForest { . /// The list of obligations. In between calls to `process_obligations`, . /// this list only contains nodes in the `Pending` or `Waiting` state. . /// -- line 137 ---------------------------------------- -- line 181 ---------------------------------------- . has_parent: bool, . . /// Identifier of the obligation tree to which this node belongs. . obligation_tree_id: ObligationTreeId, . } . . impl Node { . fn new(parent: Option, obligation: O, obligation_tree_id: ObligationTreeId) -> Node { 5,849,536 ( 0.01%) Node { . obligation, . state: Cell::new(NodeState::Pending), 604,582 ( 0.00%) dependents: if let Some(parent_index) = parent { vec![parent_index] } else { vec![] }, . has_parent: parent.is_some(), . obligation_tree_id, . } . } . } . . /// The state of one node in some tree within the forest. This represents the . /// current state of processing for the obligation (of type `O`) associated -- line 200 ---------------------------------------- -- line 223 ---------------------------------------- . /// | compress() . /// v . /// (Removed) . /// ``` . /// The `Error` state can be introduced in several places, via `error_at()`. . /// . /// Outside of `ObligationForest` methods, nodes should be either `Pending` or . /// `Waiting`. 7,886,421 ( 0.01%) #[derive(Debug, Copy, Clone, PartialEq, Eq)] . enum NodeState { . /// This obligation has not yet been selected successfully. Cannot have . /// subobligations. . Pending, . . /// This obligation was selected successfully, but may or may not have . /// subobligations. . Success, -- line 239 ---------------------------------------- -- line 279 ---------------------------------------- . pub stalled: bool, . } . . impl OutcomeTrait for Outcome { . type Error = Error; . type Obligation = O; . . fn new() -> Self { 1,046,752 ( 0.00%) Self { stalled: true, errors: vec![] } . } . . fn mark_not_stalled(&mut self) { 359,946 ( 0.00%) self.stalled = false; . } . . fn is_stalled(&self) -> bool { 228,397 ( 0.00%) self.stalled . } . . fn record_completed(&mut self, _outcome: &Self::Obligation) { . // do nothing . } . . fn record_error(&mut self, error: Self::Error) { 1,536 ( 0.00%) self.errors.push(error) . } . } . . #[derive(Debug, PartialEq, Eq)] . pub struct Error { . pub error: E, . pub backtrace: Vec, . } . . impl ObligationForest { 131,792 ( 0.00%) pub fn new() -> ObligationForest { 1,581,504 ( 0.00%) ObligationForest { . nodes: vec![], . done_cache: Default::default(), . active_cache: Default::default(), . reused_node_vec: vec![], . obligation_tree_id_generator: (0..).map(ObligationTreeId), . error_cache: Default::default(), . } 131,792 ( 0.00%) } . . /// Returns the total number of nodes in the forest that have not . /// yet been fully resolved. . pub fn len(&self) -> usize { . self.nodes.len() . } . . /// Registers an obligation. . pub fn register_obligation(&mut self, obligation: O) { . // Ignore errors here - there is no guarantee of success. 4,698,642 ( 0.01%) let _ = self.register_obligation_at(obligation, None); . } . . // Returns Err(()) if we already know this obligation failed. 6,206,849 ( 0.01%) fn register_obligation_at(&mut self, obligation: O, parent: Option) -> Result<(), ()> { 1,128,518 ( 0.00%) let cache_key = obligation.as_cache_key(); 1,128,518 ( 0.00%) if self.done_cache.contains(&cache_key) { . debug!("register_obligation_at: ignoring already done obligation: {:?}", obligation); . return Ok(()); . } . 2,678,106 ( 0.01%) match self.active_cache.entry(cache_key) { . Entry::Occupied(o) => { 161,510 ( 0.00%) let node = &mut self.nodes[*o.get()]; 161,510 ( 0.00%) if let Some(parent_index) = parent { . // If the node is already in `active_cache`, it has already . // had its chance to be marked with a parent. So if it's . // not already present, just dump `parent` into the . // dependents as a non-parent. 150,393 ( 0.00%) if !node.dependents.contains(&parent_index) { . node.dependents.push(parent_index); . } . } 242,265 ( 0.00%) if let NodeState::Error = node.state.get() { Err(()) } else { Ok(()) } . } 1,096,788 ( 0.00%) Entry::Vacant(v) => { 2,193,576 ( 0.00%) let obligation_tree_id = match parent { 159,324 ( 0.00%) Some(parent_index) => self.nodes[parent_index].obligation_tree_id, . None => self.obligation_tree_id_generator.next().unwrap(), . }; . . let already_failed = parent.is_some() . && self . .error_cache . .get(&obligation_tree_id) . .map_or(false, |errors| errors.contains(v.key())); . . if already_failed { . Err(()) . } else { 365,596 ( 0.00%) let new_index = self.nodes.len(); . v.insert(new_index); . self.nodes.push(Node::new(parent, obligation, obligation_tree_id)); . Ok(()) . } . } . } 5,078,331 ( 0.01%) } . . /// Converts all remaining obligations to the given error. 1,553,559 ( 0.00%) pub fn to_errors(&mut self, error: E) -> Vec> { . let errors = self . .nodes . .iter() . .enumerate() . .filter(|(_index, node)| node.state.get() == NodeState::Pending) . .map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) }) . .collect(); . 443,874 ( 0.00%) self.compress(|_| assert!(false)); . errors 1,331,622 ( 0.00%) } . . /// Returns the set of obligations that are in a pending state. . pub fn map_pending_obligations(&self, f: F) -> Vec

. where . F: Fn(&O) -> P, . { 30 ( 0.00%) self.nodes . .iter() . .filter(|node| node.state.get() == NodeState::Pending) . .map(|node| f(&node.obligation)) . .collect() . } . 1,932 ( 0.00%) fn insert_into_error_cache(&mut self, index: usize) { . let node = &self.nodes[index]; 276 ( 0.00%) self.error_cache 828 ( 0.00%) .entry(node.obligation_tree_id) . .or_default() . .insert(node.obligation.as_cache_key()); 2,208 ( 0.00%) } . . /// Performs a pass through the obligation list. This must . /// be called in a loop until `outcome.stalled` is false. . /// . /// This _cannot_ be unrolled (presently, at least). . #[inline(never)] 4,710,384 ( 0.01%) pub fn process_obligations(&mut self, processor: &mut P) -> OUT . where . P: ObligationProcessor, . OUT: OutcomeTrait>, . { . let mut outcome = OUT::new(); . . // Note that the loop body can append new nodes, and those new nodes . // will then be processed by subsequent iterations of the loop. . // . // We can't use an iterator for the loop because `self.nodes` is . // appended to and the borrow checker would complain. We also can't use . // `for index in 0..self.nodes.len() { ... }` because the range would . // be computed with the initial length, and we would miss the appended . // nodes. Therefore we use a `while` loop. . let mut index = 0; 3,872,748 ( 0.01%) while let Some(node) = self.nodes.get_mut(index) { . // `processor.process_obligation` can modify the predicate within . // `node.obligation`, and that predicate is the key used for . // `self.active_cache`. This means that `self.active_cache` can get . // out of sync with `nodes`. It's not very common, but it does . // happen, and code in `compress` has to allow for it. 6,698,744 ( 0.01%) if node.state.get() != NodeState::Pending { 80,896 ( 0.00%) index += 1; . continue; . } . 2,127,324 ( 0.00%) match processor.process_obligation(&mut node.obligation) { . ProcessResult::Unchanged => { . // No change in state. . } 1,439,400 ( 0.00%) ProcessResult::Changed(children) => { . // We are not (yet) stalled. . outcome.mark_not_stalled(); . node.state.set(NodeState::Success); . 1,845,050 ( 0.00%) for child in children { 3,448,025 ( 0.01%) let st = self.register_obligation_at(child, Some(index)); 405,650 ( 0.00%) if let Err(()) = st { . // Error already reported - propagate it . // to our node. . self.error_at(index); . } . } . } . ProcessResult::Error(err) => { . outcome.mark_not_stalled(); 3,168 ( 0.00%) outcome.record_error(Error { error: err, backtrace: self.error_at(index) }); . } . } 6,536,952 ( 0.01%) index += 1; . } . . // There's no need to perform marking, cycle processing and compression when nothing . // changed. 228,397 ( 0.00%) if !outcome.is_stalled() { . self.mark_successes(); . self.process_cycles(processor); 211,790 ( 0.00%) self.compress(|obl| outcome.record_completed(obl)); . } . . outcome 4,710,384 ( 0.01%) } . . /// Returns a vector of obligations for `p` and all of its . /// ancestors, putting them into the error state in the process. 672 ( 0.00%) fn error_at(&self, mut index: usize) -> Vec { . let mut error_stack: Vec = vec![]; . let mut trace = vec![]; . . loop { . let node = &self.nodes[index]; 276 ( 0.00%) node.state.set(NodeState::Error); . trace.push(node.obligation.clone()); 552 ( 0.00%) if node.has_parent { . // The first dependent is the parent, which is treated . // specially. . error_stack.extend(node.dependents.iter().skip(1)); 180 ( 0.00%) index = node.dependents[0]; . } else { . // No parent; treat all dependents non-specially. . error_stack.extend(node.dependents.iter()); . break; . } . } . . while let Some(index) = error_stack.pop() { -- line 508 ---------------------------------------- -- line 509 ---------------------------------------- . let node = &self.nodes[index]; . if node.state.get() != NodeState::Error { . node.state.set(NodeState::Error); . error_stack.extend(node.dependents.iter()); . } . } . . trace 768 ( 0.00%) } . . /// Mark all `Waiting` nodes as `Success`, except those that depend on a . /// pending node. . fn mark_successes(&self) { . // Convert all `Waiting` nodes to `Success`. . for node in &self.nodes { 2,645,719 ( 0.00%) if node.state.get() == NodeState::Waiting { . node.state.set(NodeState::Success); . } . } . . // Convert `Success` nodes that depend on a pending node back to . // `Waiting`. . for node in &self.nodes { 1,337,298 ( 0.00%) if node.state.get() == NodeState::Pending { . // This call site is hot. . self.inlined_mark_dependents_as_waiting(node); . } . } . } . . // This always-inlined function is for the hot call site. . #[inline(always)] . fn inlined_mark_dependents_as_waiting(&self, node: &Node) { 60,518 ( 0.00%) for &index in node.dependents.iter() { . let node = &self.nodes[index]; 60,518 ( 0.00%) let state = node.state.get(); 60,518 ( 0.00%) if state == NodeState::Success { . // This call site is cold. 91,968 ( 0.00%) self.uninlined_mark_dependents_as_waiting(node); . } else { . debug_assert!(state == NodeState::Waiting || state == NodeState::Error) . } . } . } . . // This never-inlined function is for the cold call site. . #[inline(never)] 214,592 ( 0.00%) fn uninlined_mark_dependents_as_waiting(&self, node: &Node) { . // Mark node Waiting in the cold uninlined code instead of the hot inlined . node.state.set(NodeState::Waiting); . self.inlined_mark_dependents_as_waiting(node) 245,248 ( 0.00%) } . . /// Report cycles between all `Success` nodes, and convert all `Success` . /// nodes to `Done`. This must be called after `mark_successes`. . fn process_cycles

(&mut self, processor: &mut P) . where . P: ObligationProcessor, . { 105,895 ( 0.00%) let mut stack = std::mem::take(&mut self.reused_node_vec); . for (index, node) in self.nodes.iter().enumerate() { . // For some benchmarks this state test is extremely hot. It's a win . // to handle the no-op cases immediately to avoid the cost of the . // function call. 1,337,298 ( 0.00%) if node.state.get() == NodeState::Success { 1,694,755 ( 0.00%) self.find_cycles_from_node(&mut stack, processor, index); . } . } . . debug_assert!(stack.is_empty()); 847,160 ( 0.00%) self.reused_node_vec = stack; . } . 4,173,561 ( 0.01%) fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, index: usize) . where . P: ObligationProcessor, . { . let node = &self.nodes[index]; 927,458 ( 0.00%) if node.state.get() == NodeState::Success { 41,438 ( 0.00%) match stack.iter().rposition(|&n| n == index) { . None => { . stack.push(index); 124,778 ( 0.00%) for &dep_index in node.dependents.iter() { 499,112 ( 0.00%) self.find_cycles_from_node(stack, processor, dep_index); . } . stack.pop(); . node.state.set(NodeState::Done); . } . Some(rpos) => { . // Cycle detected. . processor.process_backedge( . stack[rpos..].iter().map(|&i| &self.nodes[i].obligation), . PhantomData, . ); . } . } . } 3,709,832 ( 0.01%) } . . /// Compresses the vector, removing all popped nodes. This adjusts the . /// indices and hence invalidates any outstanding indices. `process_cycles` . /// must be run beforehand to remove any cycles on `Success` nodes. . #[inline(never)] 2,400,719 ( 0.00%) fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) { 327,832 ( 0.00%) let orig_nodes_len = self.nodes.len(); . let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec); . debug_assert!(node_rewrites.is_empty()); . node_rewrites.extend(0..orig_nodes_len); . let mut dead_nodes = 0; . . // Move removable nodes to the end, preserving the order of the . // remaining nodes. . // . // LOOP INVARIANT: . // self.nodes[0..index - dead_nodes] are the first remaining nodes . // self.nodes[index - dead_nodes..index] are all dead . // self.nodes[index..] are unchanged . for index in 0..orig_nodes_len { . let node = &self.nodes[index]; 7,110,070 ( 0.01%) match node.state.get() { . NodeState::Pending | NodeState::Waiting => { 1,954,704 ( 0.00%) if dead_nodes > 0 { 744,885 ( 0.00%) self.nodes.swap(index, index - dead_nodes); 1,241,475 ( 0.00%) node_rewrites[index] -= dead_nodes; . } . } . NodeState::Done => { . // This lookup can fail because the contents of . // `self.active_cache` are not guaranteed to match those of . // `self.nodes`. See the comment in `process_obligation` . // for more details. 1,303,518 ( 0.00%) if let Some((predicate, _)) = 1,079,010 ( 0.00%) self.active_cache.remove_entry(&node.obligation.as_cache_key()) . { . self.done_cache.insert(predicate); . } else { . self.done_cache.insert(node.obligation.as_cache_key().clone()); . } . // Extract the success stories. . outcome_cb(&node.obligation); 719,340 ( 0.00%) node_rewrites[index] = orig_nodes_len; 719,340 ( 0.00%) dead_nodes += 1; . } . NodeState::Error => { . // We *intentionally* remove the node from the cache at this point. Otherwise . // tests must come up with a different type on every type error they . // check against. 828 ( 0.00%) self.active_cache.remove(&node.obligation.as_cache_key()); 552 ( 0.00%) self.insert_into_error_cache(index); 552 ( 0.00%) node_rewrites[index] = orig_nodes_len; 552 ( 0.00%) dead_nodes += 1; . } . NodeState::Success => unreachable!(), . } . } . . if dead_nodes > 0 { . // Remove the dead nodes and rewrite indices. 188,216 ( 0.00%) self.nodes.truncate(orig_nodes_len - dead_nodes); 94,108 ( 0.00%) self.apply_rewrites(&node_rewrites); . } . . node_rewrites.truncate(0); 1,311,328 ( 0.00%) self.reused_node_vec = node_rewrites; 2,622,656 ( 0.00%) } . . #[inline(never)] 846,972 ( 0.00%) fn apply_rewrites(&mut self, node_rewrites: &[usize]) { . let orig_nodes_len = node_rewrites.len(); . . for node in &mut self.nodes { . let mut i = 0; 816,573 ( 0.00%) while let Some(dependent) = node.dependents.get_mut(i) { 129,884 ( 0.00%) let new_index = node_rewrites[*dependent]; 64,942 ( 0.00%) if new_index >= orig_nodes_len { . node.dependents.swap_remove(i); . if i == 0 && node.has_parent { . // We just removed the parent. . node.has_parent = false; . } . } else { 32,471 ( 0.00%) *dependent = new_index; 64,942 ( 0.00%) i += 1; . } . } . } . . // This updating of `self.active_cache` is necessary because the . // removal of nodes within `compress` can fail. See above. . self.active_cache.retain(|_predicate, index| { 3,677,056 ( 0.01%) let new_index = node_rewrites[*index]; 1,838,528 ( 0.00%) if new_index >= orig_nodes_len { . false . } else { 784,102 ( 0.00%) *index = new_index; . true . } . }); 752,864 ( 0.00%) } . } 14,095,261 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/rpc.rs -------------------------------------------------------------------------------- Ir -- line 21 ---------------------------------------- . . pub(super) trait DecodeMut<'a, 's, S>: Sized { . fn decode(r: &mut Reader<'a>, s: &'s mut S) -> Self; . } . . macro_rules! rpc_encode_decode { . (le $ty:ty) => { . impl Encode for $ty { 25,067,580 ( 0.05%) fn encode(self, w: &mut Writer, _: &mut S) { . w.extend_from_array(&self.to_le_bytes()); 31,334,475 ( 0.06%) } . } . . impl DecodeMut<'_, '_, S> for $ty { . fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { . const N: usize = ::std::mem::size_of::<$ty>(); . . let mut bytes = [0; N]; 3,983,493 ( 0.01%) bytes.copy_from_slice(&r[..N]); 16,588,628 ( 0.03%) *r = &r[N..]; . . Self::from_le_bytes(bytes) . } . } . }; . (struct $name:ident { $($field:ident),* $(,)? }) => { . impl Encode for $name { . fn encode(self, w: &mut Writer, s: &mut S) { -- line 48 ---------------------------------------- -- line 55 ---------------------------------------- . $name { . $($field: DecodeMut::decode(r, s)),* . } . } . } . }; . (enum $name:ident $(<$($T:ident),+>)? { $($variant:ident $(($field:ident))*),* $(,)? }) => { . impl),+)?> Encode for $name $(<$($T),+>)? { 69,787,039 ( 0.13%) fn encode(self, w: &mut Writer, s: &mut S) { . // HACK(eddyb): `Tag` enum duplicated between the . // two impls as there's no other place to stash it. . #[allow(non_upper_case_globals)] . mod tag { . #[repr(u8)] enum Tag { $($variant),* } . . $(pub const $variant: u8 = Tag::$variant as u8;)* . } . 38,208,835 ( 0.07%) match self { 7,722,542 ( 0.01%) $($name::$variant $(($field))* => { 580,361 ( 0.00%) tag::$variant.encode(w, s); 8,774,830 ( 0.02%) $($field.encode(w, s);)* . })* . } 54,850,208 ( 0.10%) } . } . . impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S> . for $name $(<$($T),+>)? . { 17,988,200 ( 0.03%) fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { . // HACK(eddyb): `Tag` enum duplicated between the . // two impls as there's no other place to stash it. . #[allow(non_upper_case_globals)] . mod tag { . #[repr(u8)] enum Tag { $($variant),* } . . $(pub const $variant: u8 = Tag::$variant as u8;)* . } . 17,255,344 ( 0.03%) match u8::decode(r, s) { . $(tag::$variant => { 352,584 ( 0.00%) $(let $field = DecodeMut::decode(r, s);)* 12,443,670 ( 0.02%) $name::$variant $(($field))* 175,302 ( 0.00%) })* . _ => unreachable!(), . } 31,199,554 ( 0.06%) } . } . } . } . . impl Encode for () { . fn encode(self, _: &mut Writer, _: &mut S) {} . } . . impl DecodeMut<'_, '_, S> for () { . fn decode(_: &mut Reader<'_>, _: &mut S) -> Self {} . } . . impl Encode for u8 { 3,722,544 ( 0.01%) fn encode(self, w: &mut Writer, _: &mut S) { . w.push(self); 3,102,120 ( 0.01%) } . } . . impl DecodeMut<'_, '_, S> for u8 { . fn decode(r: &mut Reader<'_>, _: &mut S) -> Self { 69,808,853 ( 0.13%) let x = r[0]; 38,720,398 ( 0.07%) *r = &r[1..]; . x . } . } . . rpc_encode_decode!(le u32); . rpc_encode_decode!(le usize); . . impl Encode for bool { . fn encode(self, w: &mut Writer, s: &mut S) { 680,025 ( 0.00%) (self as u8).encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for bool { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { 454,362 ( 0.00%) match u8::decode(r, s) { . 0 => false, . 1 => true, . _ => unreachable!(), . } . } . } . . impl Encode for char { . fn encode(self, w: &mut Writer, s: &mut S) { 663,090 ( 0.00%) (self as u32).encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for char { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { . char::from_u32(u32::decode(r, s)).unwrap() . } . } . . impl Encode for NonZeroU32 { . fn encode(self, w: &mut Writer, s: &mut S) { 17,591,981 ( 0.03%) self.get().encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for NonZeroU32 { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { . Self::new(u32::decode(r, s)).unwrap() . } . } -- line 170 ---------------------------------------- -- line 204 ---------------------------------------- . Ok(x), . Err(e), . } . ); . . impl Encode for &[u8] { . fn encode(self, w: &mut Writer, s: &mut S) { . self.len().encode(w, s); 513,360 ( 0.00%) w.write_all(self).unwrap(); . } . } . . impl<'a, S> DecodeMut<'a, '_, S> for &'a [u8] { . fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { . let len = usize::decode(r, s); . let xs = &r[..len]; 1,095,911 ( 0.00%) *r = &r[len..]; . xs . } . } . . impl Encode for &str { 513,360 ( 0.00%) fn encode(self, w: &mut Writer, s: &mut S) { . self.as_bytes().encode(w, s); 641,700 ( 0.00%) } . } . . impl<'a, S> DecodeMut<'a, '_, S> for &'a str { 364,847 ( 0.00%) fn decode(r: &mut Reader<'a>, s: &mut S) -> Self { 729,694 ( 0.00%) str::from_utf8(<&[u8]>::decode(r, s)).unwrap() 729,694 ( 0.00%) } . } . . impl Encode for String { . fn encode(self, w: &mut Writer, s: &mut S) { 513,360 ( 0.00%) self[..].encode(w, s); . } . } . . impl DecodeMut<'_, '_, S> for String { . fn decode(r: &mut Reader<'_>, s: &mut S) -> Self { 641,700 ( 0.00%) <&str>::decode(r, s).to_string() . } . } . . /// Simplified version of panic payloads, ignoring . /// types other than `&'static str` and `String`. . pub enum PanicMessage { . StaticStr(&'static str), . String(String), -- line 253 ---------------------------------------- 65,366,146 ( 0.12%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs -------------------------------------------------------------------------------- Ir -- line 5 ---------------------------------------- . #[stable(feature = "alloc_system_type", since = "1.28.0")] . unsafe impl GlobalAlloc for System { . #[inline] . unsafe fn alloc(&self, layout: Layout) -> *mut u8 { . // jemalloc provides alignment less than MIN_ALIGN for small allocations. . // So only rely on MIN_ALIGN if size >= align. . // Also see and . // . 65,656,918 ( 0.12%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 49,242,687 ( 0.09%) libc::malloc(layout.size()) as *mut u8 . } else { . #[cfg(target_os = "macos")] . { . if layout.align() > (1 << 31) { . return ptr::null_mut(); . } . } . aligned_malloc(&layout) . } . } . . #[inline] . unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { . // See the comment above in `alloc` for why this check looks the way it does. 2,539,400 ( 0.00%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 3,809,100 ( 0.01%) libc::calloc(layout.size(), 1) as *mut u8 . } else { . let ptr = self.alloc(layout); . if !ptr.is_null() { . ptr::write_bytes(ptr, 0, layout.size()); . } . ptr . } . } . . #[inline] . unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 17,049,068 ( 0.03%) libc::free(ptr as *mut libc::c_void) . } . . #[inline] . unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 6,447,920 ( 0.01%) if layout.align() <= MIN_ALIGN && layout.align() <= new_size { 12,895,840 ( 0.02%) libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 . } else { . realloc_fallback(self, ptr, layout, new_size) . } . } . } . . cfg_if::cfg_if! { . if #[cfg(any( -- line 56 ---------------------------------------- -- line 84 ---------------------------------------- . } else if #[cfg(target_os = "wasi")] { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { . libc::aligned_alloc(layout.align(), layout.size()) as *mut u8 . } . } else { . #[inline] . unsafe fn aligned_malloc(layout: &Layout) -> *mut u8 { 1 ( 0.00%) let mut out = ptr::null_mut(); . // posix_memalign requires that the alignment be a multiple of `sizeof(void*)`. . // Since these are all powers of 2, we can just use max. . let align = layout.align().max(crate::mem::size_of::()); 2 ( 0.00%) let ret = libc::posix_memalign(&mut out, align, layout.size()); 2 ( 0.00%) if ret != 0 { ptr::null_mut() } else { out as *mut u8 } . } . } . } 1 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs -------------------------------------------------------------------------------- Ir -- line 118 ---------------------------------------- . stability: InternedSet<'tcx, attr::Stability>, . . /// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes . const_stability: InternedSet<'tcx, attr::ConstStability>, . } . . impl<'tcx> CtxtInterners<'tcx> { . fn new(arena: &'tcx WorkerLocal>) -> CtxtInterners<'tcx> { 17 ( 0.00%) CtxtInterners { . arena, . type_: Default::default(), . type_list: Default::default(), . substs: Default::default(), . region: Default::default(), . poly_existential_predicates: Default::default(), . canonical_var_infos: Default::default(), . predicate: Default::default(), -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . stability: Default::default(), . const_stability: Default::default(), . } . } . . /// Interns a type. . #[allow(rustc::usage_of_ty_tykind)] . #[inline(never)] 30,391,928 ( 0.06%) fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> { . self.type_ 18,994,955 ( 0.04%) .intern(kind, |kind| { 432,326 ( 0.00%) let flags = super::flags::FlagComputation::for_kind(&kind); . . let ty_struct = TyS { 864,652 ( 0.00%) kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(ty_struct)) . }) . .0 34,190,919 ( 0.06%) } . . #[inline(never)] 12,449,864 ( 0.02%) fn intern_predicate( . &self, . kind: Binder<'tcx, PredicateKind<'tcx>>, . ) -> &'tcx PredicateInner<'tcx> { . self.predicate 10,893,631 ( 0.02%) .intern(kind, |kind| { 1,028,580 ( 0.00%) let flags = super::flags::FlagComputation::for_predicate(kind); . . let predicate_struct = PredicateInner { . kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(predicate_struct)) . }) . .0 14,006,097 ( 0.03%) } . } . . pub struct CommonTypes<'tcx> { . pub unit: Ty<'tcx>, . pub bool: Ty<'tcx>, . pub char: Ty<'tcx>, . pub isize: Ty<'tcx>, . pub i8: Ty<'tcx>, -- line 193 ---------------------------------------- -- line 237 ---------------------------------------- . /// safely used as a key in the maps of a TypeckResults. For that to be . /// the case, the HirId must have the same `owner` as all the other IDs in . /// this table (signified by `hir_owner`). Otherwise the HirId . /// would be in a different frame of reference and using its `local_id` . /// would result in lookup errors, or worse, in silently wrong data being . /// stored/returned. . #[inline] . fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { 4,206,433 ( 0.01%) if hir_id.owner != hir_owner { . invalid_hir_id_for_typeck_results(hir_owner, hir_id); . } . } . . #[cold] . #[inline(never)] . fn invalid_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { . ty::tls::with(|tcx| { -- line 253 ---------------------------------------- -- line 261 ---------------------------------------- . } . . impl<'a, V> LocalTableInContext<'a, V> { . pub fn contains_key(&self, id: hir::HirId) -> bool { . validate_hir_id_for_typeck_results(self.hir_owner, id); . self.data.contains_key(&id.local_id) . } . 3,435,418 ( 0.01%) pub fn get(&self, id: hir::HirId) -> Option<&V> { 685,522 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); 685,522 ( 0.00%) self.data.get(&id.local_id) 2,755,624 ( 0.01%) } . . pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> { . self.data.iter() . } . } . . impl<'a, V> ::std::ops::Index for LocalTableInContext<'a, V> { . type Output = V; . 34,368 ( 0.00%) fn index(&self, key: hir::HirId) -> &V { 3 ( 0.00%) self.get(key).expect("LocalTableInContext: key not found") 22,912 ( 0.00%) } . } . . pub struct LocalTableInContextMut<'a, V> { . hir_owner: LocalDefId, . data: &'a mut ItemLocalMap, . } . . impl<'a, V> LocalTableInContextMut<'a, V> { -- line 292 ---------------------------------------- -- line 507 ---------------------------------------- . pub treat_byte_string_as_slice: ItemLocalSet, . . /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` . /// on closure size. . pub closure_size_eval: FxHashMap>, . } . . impl<'tcx> TypeckResults<'tcx> { 156,228 ( 0.00%) pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> { 1,848,698 ( 0.00%) TypeckResults { . hir_owner, . type_dependent_defs: Default::default(), . field_indices: Default::default(), . user_provided_types: Default::default(), . user_provided_sigs: Default::default(), . node_types: Default::default(), . node_substs: Default::default(), . adjustments: Default::default(), -- line 524 ---------------------------------------- -- line 532 ---------------------------------------- . tainted_by_errors: None, . concrete_opaque_types: Default::default(), . closure_min_captures: Default::default(), . closure_fake_reads: Default::default(), . generator_interior_types: ty::Binder::dummy(Default::default()), . treat_byte_string_as_slice: Default::default(), . closure_size_eval: Default::default(), . } 156,228 ( 0.00%) } . . /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node. 874,080 ( 0.00%) pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res { 582,720 ( 0.00%) match *qpath { 1,646,490 ( 0.00%) hir::QPath::Resolved(_, ref path) => path.res, . hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self . .type_dependent_def(id) 101,670 ( 0.00%) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), . } 1,456,800 ( 0.00%) } . 4,103 ( 0.00%) pub fn type_dependent_defs( . &self, . ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { 51,224 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs } 4,103 ( 0.00%) } . 23,535 ( 0.00%) pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { 41,228 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok()) 47,070 ( 0.00%) } . 2,992 ( 0.00%) pub fn type_dependent_def_id(&self, id: HirId) -> Option { . self.type_dependent_def(id).map(|(_, def_id)| def_id) 2,992 ( 0.00%) } . 243,679 ( 0.00%) pub fn type_dependent_defs_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { 243,679 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs } 243,679 ( 0.00%) } . . pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { 39,725 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices } 7,340 ( 0.00%) } . . pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { 52,986 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices } 26,493 ( 0.00%) } . . pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { 137,174 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types } 68,587 ( 0.00%) } . . pub fn user_provided_types_mut( . &mut self, . ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { 30,448 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types } 15,224 ( 0.00%) } . . pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { 623,374 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types } 311,687 ( 0.00%) } . . pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { 1,043,644 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types } 521,822 ( 0.00%) } . 4,573,122 ( 0.01%) pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { 762,187 ( 0.00%) self.node_type_opt(id).unwrap_or_else(|| { . bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) . }) 3,810,935 ( 0.01%) } . 970,476 ( 0.00%) pub fn node_type_opt(&self, id: hir::HirId) -> Option> { 970,476 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_types.get(&id.local_id).cloned() 1,940,952 ( 0.00%) } . . pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { 160,028 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs } 80,014 ( 0.00%) } . 337,008 ( 0.00%) pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { 337,008 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty()) 674,016 ( 0.00%) } . 228,223 ( 0.00%) pub fn node_substs_opt(&self, id: hir::HirId) -> Option> { 228,223 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned() 456,446 ( 0.00%) } . . // Returns the type of a pattern as a monotype. Like @expr_ty, this function . // doesn't provide type parameter substitutions. . pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> { 96,712 ( 0.00%) self.node_type(pat.hir_id) . } . . // Returns the type of an expression as a monotype. . // . // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in . // some cases, we insert `Adjustment` annotations such as auto-deref or . // auto-ref. The type returned by this function does not consider such . // adjustments. See `expr_ty_adjusted()` instead. . // . // NB (2): This type doesn't provide type parameter substitutions; e.g., if you . // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" . // instead of "fn(ty) -> T with T = isize". . pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 809,358 ( 0.00%) self.node_type(expr.hir_id) . } . . pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option> { 756,463 ( 0.00%) self.node_type_opt(expr.hir_id) . } . . pub fn adjustments(&self) -> LocalTableInContext<'_, Vec>> { 450,968 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments } 225,484 ( 0.00%) } . . pub fn adjustments_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Vec>> { 578,660 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments } 289,330 ( 0.00%) } . 336,938 ( 0.00%) pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] { 673,876 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id); . self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) 673,876 ( 0.00%) } . . /// Returns the type of `expr`, considering any `Adjustment` . /// entry recorded for that expression. 48,180 ( 0.00%) pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 9,636 ( 0.00%) self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target) 5,752 ( 0.00%) } . 125 ( 0.00%) pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option> { 25 ( 0.00%) self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr)) 80 ( 0.00%) } . 47,121 ( 0.00%) pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool { . // Only paths and method calls/overloaded operators have . // entries in type_dependent_defs, ignore the former here. 94,242 ( 0.00%) if let hir::ExprKind::Path(_) = expr.kind { . return false; . } . 100,658 ( 0.00%) matches!(self.type_dependent_defs().get(expr.hir_id), Some(Ok((DefKind::AssocFn, _)))) 94,242 ( 0.00%) } . 246,429 ( 0.00%) pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option { . self.pat_binding_modes().get(id).copied().or_else(|| { . s.delay_span_bug(sp, "missing binding mode"); . None . }) 328,572 ( 0.00%) } . . pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { 220,341 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes } 69,099 ( 0.00%) } . . pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { 109,956 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes } 54,978 ( 0.00%) } . . pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec>> { 312,602 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments } 156,301 ( 0.00%) } . . pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { 95,092 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } 47,546 ( 0.00%) } . . /// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured . /// by the closure. 27,720 ( 0.00%) pub fn closure_min_captures_flattened( . &self, . closure_def_id: DefId, . ) -> impl Iterator> { . self.closure_min_captures . .get(&closure_def_id) . .map(|closure_min_captures| closure_min_captures.values().flat_map(|v| v.iter())) . .into_iter() . .flatten() 34,650 ( 0.00%) } . . pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, HirPlace<'tcx>)> { 13,566 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins } 6,783 ( 0.00%) } . . pub fn closure_kind_origins_mut( . &mut self, . ) -> LocalTableInContextMut<'_, (Span, HirPlace<'tcx>)> { . LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins } . } . . pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { 36,478 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs } 18,239 ( 0.00%) } . . pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { 22,912 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs } 11,456 ( 0.00%) } . . pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec>> { 13,572 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types } 6,786 ( 0.00%) } . . pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { 12 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types } 6 ( 0.00%) } . 561 ( 0.00%) pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool { 187 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, hir_id); . self.coercion_casts.contains(&hir_id.local_id) 374 ( 0.00%) } . . pub fn set_coercion_cast(&mut self, id: ItemLocalId) { . self.coercion_casts.insert(id); . } . . pub fn coercion_casts(&self) -> &ItemLocalSet { 6,783 ( 0.00%) &self.coercion_casts 6,783 ( 0.00%) } . } . . impl<'a, 'tcx> HashStable> for TypeckResults<'tcx> { . fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { . let ty::TypeckResults { . hir_owner, . ref type_dependent_defs, . ref field_indices, -- line 764 ---------------------------------------- -- line 819 ---------------------------------------- . const START_INDEX = 0, . } . } . . /// Mapping of type annotation indices to canonical user type annotations. . pub type CanonicalUserTypeAnnotations<'tcx> = . IndexVec>; . 6,938 ( 0.00%) #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)] . pub struct CanonicalUserTypeAnnotation<'tcx> { . pub user_ty: CanonicalUserType<'tcx>, . pub span: Span, 8,938 ( 0.00%) pub inferred_ty: Ty<'tcx>, . } . . /// Canonicalized user type annotation. . pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; . . impl<'tcx> CanonicalUserType<'tcx> { . /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, . /// i.e., each thing is mapped to a canonical variable with the same index. 36,619 ( 0.00%) pub fn is_identity(&self) -> bool { 73,238 ( 0.00%) match self.value { . UserType::Ty(_) => false, . UserType::TypeOf(_, user_substs) => { 36,619 ( 0.00%) if user_substs.user_self_ty.is_some() { . return false; . } . . iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| { . match kind.unpack() { 101,658 ( 0.00%) GenericArgKind::Type(ty) => match ty.kind() { . ty::Bound(debruijn, b) => { . // We only allow a `ty::INNERMOST` index in substitutions. 48,456 ( 0.00%) assert_eq!(*debruijn, ty::INNERMOST); . cvar == b.var . } . _ => false, . }, . 13,028 ( 0.00%) GenericArgKind::Lifetime(r) => match r { . ty::ReLateBound(debruijn, br) => { . // We only allow a `ty::INNERMOST` index in substitutions. . assert_eq!(*debruijn, ty::INNERMOST); . cvar == br.var . } . _ => false, . }, . -- line 867 ---------------------------------------- -- line 872 ---------------------------------------- . cvar == b . } . _ => false, . }, . } . }) . } . } 73,238 ( 0.00%) } . } . . /// A user-given type annotation attached to a constant. These arise . /// from constants that are named via paths, like `Foo::::new` and . /// so forth. 53,628 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] 398,827 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub enum UserType<'tcx> { . Ty(Ty<'tcx>), . . /// The canonical type is the result of `type_of(def_id)` with the . /// given substitutions applied. . TypeOf(DefId, UserSubsts<'tcx>), . } . . impl<'tcx> CommonTypes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { 111 ( 0.00%) let mk = |ty| interners.intern_ty(ty); . . CommonTypes { . unit: mk(Tuple(List::empty())), . bool: mk(Bool), . char: mk(Char), . never: mk(Never), . isize: mk(Int(ty::IntTy::Isize)), . i8: mk(Int(ty::IntTy::I8)), -- line 906 ---------------------------------------- -- line 921 ---------------------------------------- . . trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), . } . } . } . . impl<'tcx> CommonLifetimes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { 67 ( 0.00%) let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0; . . CommonLifetimes { 5 ( 0.00%) re_root_empty: mk(RegionKind::ReEmpty(ty::UniverseIndex::ROOT)), 5 ( 0.00%) re_static: mk(RegionKind::ReStatic), 5 ( 0.00%) re_erased: mk(RegionKind::ReErased), . } . } . } . . impl<'tcx> CommonConsts<'tcx> { . fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { 6 ( 0.00%) let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0; . . CommonConsts { . unit: mk_const(ty::Const { . val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::ZST)), . ty: types.unit, . }), . } . } -- line 949 ---------------------------------------- -- line 1045 ---------------------------------------- . . /// Stores memory for globals (statics/consts). . pub(crate) alloc_map: Lock>, . . output_filenames: Arc, . } . . impl<'tcx> TyCtxt<'tcx> { 296,640 ( 0.00%) pub fn typeck_opt_const_arg( . self, . def: ty::WithOptConstParam, . ) -> &'tcx TypeckResults<'tcx> { 118,656 ( 0.00%) if let Some(param_did) = def.const_param_did { . self.typeck_const_arg((def.did, param_did)) . } else { . self.typeck(def.did) . } 237,312 ( 0.00%) } . 20,436 ( 0.00%) pub fn alloc_steal_thir(self, thir: Thir<'tcx>) -> &'tcx Steal> { 6,812 ( 0.00%) self.arena.alloc(Steal::new(thir)) 27,248 ( 0.00%) } . 136,240 ( 0.00%) pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal> { 27,248 ( 0.00%) self.arena.alloc(Steal::new(mir)) 190,736 ( 0.00%) } . 27,248 ( 0.00%) pub fn alloc_steal_promoted( . self, . promoted: IndexVec>, . ) -> &'tcx Steal>> { 6,812 ( 0.00%) self.arena.alloc(Steal::new(promoted)) 34,060 ( 0.00%) } . 12,312 ( 0.00%) pub fn alloc_adt_def( . self, . did: DefId, . kind: AdtKind, . variants: IndexVec, . repr: ReprOptions, . ) -> &'tcx ty::AdtDef { 21,888 ( 0.00%) self.intern_adt_def(ty::AdtDef::new(self, did, kind, variants, repr)) 5,472 ( 0.00%) } . . /// Allocates a read-only byte or string literal for `mir::interpret`. 1,970 ( 0.00%) pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId { . // Create an allocation that just contains these bytes. 394 ( 0.00%) let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); 5,122 ( 0.00%) let alloc = self.intern_const_alloc(alloc); . self.create_memory_alloc(alloc) 1,970 ( 0.00%) } . . /// Returns a range of the start/end indices specified with the . /// `rustc_layout_scalar_valid_range` attribute. . // FIXME(eddyb) this is an awkward spot for this method, maybe move it? 234,592 ( 0.00%) pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound, Bound) { 117,296 ( 0.00%) let attrs = self.get_attrs(def_id); 498,508 ( 0.00%) let get = |name| { 117,296 ( 0.00%) let attr = match attrs.iter().find(|a| a.has_name(name)) { . Some(attr) => attr, . None => return Bound::Unbounded, . }; . debug!("layout_scalar_valid_range: attr={:?}", attr); 968 ( 0.00%) if let Some( . &[ . ast::NestedMetaItem::Literal(ast::Lit { 484 ( 0.00%) kind: ast::LitKind::Int(a, _), .. . }), . ], 242 ( 0.00%) ) = attr.meta_item_list().as_deref() . { . Bound::Included(a) . } else { . self.sess . .delay_span_bug(attr.span, "invalid rustc_layout_scalar_valid_range attribute"); . Bound::Unbounded . } 586,480 ( 0.00%) }; 175,944 ( 0.00%) ( 175,944 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_start), 87,972 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_end), . ) 234,592 ( 0.00%) } . . pub fn lift>(self, value: T) -> Option { . value.lift_to_tcx(self) . } . . /// Creates a type context and call the closure with a `TyCtxt` reference . /// to the context. The closure enforces that the type context and any interned . /// value (types, substs, etc.) can only be used while `ty::tls` has a valid . /// reference to the context, to allow formatting values that need it. 23 ( 0.00%) pub fn create_global_ctxt( . s: &'tcx Session, . lint_store: Lrc, . arena: &'tcx WorkerLocal>, . resolutions: ty::ResolverOutputs, . krate: &'tcx hir::Crate<'tcx>, . dep_graph: DepGraph, . on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>, . queries: &'tcx dyn query::QueryEngine<'tcx>, . query_kinds: &'tcx [DepKindStruct], . crate_name: &str, . output_filenames: OutputFilenames, . ) -> GlobalCtxt<'tcx> { 1 ( 0.00%) let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| { . s.fatal(&err); . }); . let interners = CtxtInterners::new(arena); . let common_types = CommonTypes::new(&interners); . let common_lifetimes = CommonLifetimes::new(&interners); . let common_consts = CommonConsts::new(&interners, &common_types); . 127 ( 0.00%) GlobalCtxt { . sess: s, 6 ( 0.00%) lint_store, . arena, 2 ( 0.00%) interners, 5 ( 0.00%) dep_graph, 4 ( 0.00%) untracked_resolutions: resolutions, 2 ( 0.00%) prof: s.prof.clone(), . types: common_types, . lifetimes: common_lifetimes, . consts: common_consts, . untracked_crate: krate, . on_disk_cache, . queries, . query_caches: query::QueryCaches::default(), . query_kinds, . ty_rcache: Default::default(), . pred_rcache: Default::default(), . selection_cache: Default::default(), . evaluation_cache: Default::default(), 1 ( 0.00%) crate_name: Symbol::intern(crate_name), . data_layout, . alloc_map: Lock::new(interpret::AllocMap::new()), . output_filenames: Arc::new(output_filenames), . } 9 ( 0.00%) } . . crate fn query_kind(self, k: DepKind) -> &'tcx DepKindStruct { . &self.query_kinds[k as usize] . } . . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . ty: Ty<'tcx>, . span: S, . msg: &str, . ) -> &'tcx Const<'tcx> { . self.sess.delay_span_bug(span, msg); . self.mk_const(ty::Const { val: ty::ConstKind::Error(DelaySpanBugEmitted(())), ty }) . } . 292,390 ( 0.00%) pub fn consider_optimizing String>(self, msg: T) -> bool { 41,770 ( 0.00%) let cname = self.crate_name(LOCAL_CRATE); 312,642 ( 0.00%) self.sess.consider_optimizing(cname.as_str(), msg) 334,160 ( 0.00%) } . . /// Obtain all lang items of this crate and all dependencies (recursively) 10,921,442 ( 0.02%) pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems { . self.get_lang_items(()) 14,041,854 ( 0.03%) } . . /// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to . /// compare against another `DefId`, since `is_diagnostic_item` is cheaper. 2,814 ( 0.00%) pub fn get_diagnostic_item(self, name: Symbol) -> Option { . self.all_diagnostic_items(()).name_to_id.get(&name).copied() 3,216 ( 0.00%) } . . /// Obtain the diagnostic item's name 524,181 ( 0.00%) pub fn get_diagnostic_name(self, id: DefId) -> Option { . self.diagnostic_items(id.krate).id_to_name.get(&id).copied() 599,064 ( 0.00%) } . . /// Check whether the diagnostic item with the given `name` has the given `DefId`. 105,056 ( 0.00%) pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool { . self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did) 120,064 ( 0.00%) } . 145,159 ( 0.00%) pub fn stability(self) -> &'tcx stability::Index<'tcx> { . self.stability_index(()) 186,633 ( 0.00%) } . 4,603,249 ( 0.01%) pub fn features(self) -> &'tcx rustc_feature::Features { . self.features_query(()) 5,260,856 ( 0.01%) } . 44,218 ( 0.00%) pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey { . // Accessing the DefKey is ok, since it is part of DefPathHash. 1,247,400 ( 0.00%) if let Some(id) = id.as_local() { . self.untracked_resolutions.definitions.def_key(id) . } else { 791,876 ( 0.00%) self.untracked_resolutions.cstore.def_key(id) . } 25,256 ( 0.00%) } . . /// Converts a `DefId` into its fully expanded `DefPath` (every . /// `DefId` is really just an interned `DefPath`). . /// . /// Note that if `id` is not local to this crate, the result will . /// be a non-local `DefPath`. 108 ( 0.00%) pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath { . // Accessing the DefPath is ok, since it is part of DefPathHash. 162 ( 0.00%) if let Some(id) = id.as_local() { 84 ( 0.00%) self.untracked_resolutions.definitions.def_path(id) . } else { 130 ( 0.00%) self.untracked_resolutions.cstore.def_path(id) . } 162 ( 0.00%) } . . #[inline] . pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash { . // Accessing the DefPathHash is ok, it is incr. comp. stable. 74,514 ( 0.00%) if let Some(def_id) = def_id.as_local() { . self.untracked_resolutions.definitions.def_path_hash(def_id) . } else { 12,492 ( 0.00%) self.untracked_resolutions.cstore.def_path_hash(def_id) . } . } . . #[inline] . pub fn stable_crate_id(self, crate_num: CrateNum) -> StableCrateId { . if crate_num == LOCAL_CRATE { . self.sess.local_stable_crate_id() . } else { -- line 1296 ---------------------------------------- -- line 1353 ---------------------------------------- . &(format!("{:08x}", stable_crate_id.to_u64()))[..4], . self.def_path(def_id).to_string_no_crate_verbose() . ) . } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn cstore_untracked(self) -> &'tcx ty::CrateStoreDyn { 74,058 ( 0.00%) &*self.untracked_resolutions.cstore 37,029 ( 0.00%) } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions { . &self.untracked_resolutions.definitions . } . . #[inline(always)] . pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { 5,140 ( 0.00%) let resolutions = &self.gcx.untracked_resolutions; 21,577 ( 0.00%) StableHashingContext::new(self.sess, &resolutions.definitions, &*resolutions.cstore) . } . . #[inline(always)] . pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> { . let resolutions = &self.gcx.untracked_resolutions; . StableHashingContext::ignore_spans( . self.sess, . &resolutions.definitions, -- line 1381 ---------------------------------------- -- line 1390 ---------------------------------------- . /// If `true`, we should use the MIR-based borrowck, but also . /// fall back on the AST borrowck if the MIR-based one errors. . pub fn migrate_borrowck(self) -> bool { . self.borrowck_mode().migrate() . } . . /// What mode(s) of borrowck should we run? AST? MIR? both? . /// (Also considers the `#![feature(nll)]` setting.) 13,566 ( 0.00%) pub fn borrowck_mode(self) -> BorrowckMode { . // Here are the main constraints we need to deal with: . // . // 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is . // synonymous with no `-Z borrowck=...` flag at all. . // . // 2. We want to allow developers on the Nightly channel . // to opt back into the "hard error" mode for NLL, . // (which they can do via specifying `#![feature(nll)]` -- line 1406 ---------------------------------------- -- line 1413 ---------------------------------------- . // errors. (To simplify the code here, it now even overrides . // a user's attempt to specify `-Z borrowck=compare`, which . // we arguably do not need anymore and should remove.) . // . // * Otherwise, if no `-Z borrowck=...` then use migrate mode . // . // * Otherwise, use the behavior requested via `-Z borrowck=...` . 20,349 ( 0.00%) if self.features().nll { . return BorrowckMode::Mir; . } . 6,783 ( 0.00%) self.sess.opts.borrowck_mode 27,132 ( 0.00%) } . . /// If `true`, we should use lazy normalization for constants, otherwise . /// we still evaluate them eagerly. . #[inline] . pub fn lazy_normalization(self) -> bool { 49 ( 0.00%) let features = self.features(); . // Note: We only use lazy normalization for generic const expressions. 11 ( 0.00%) features.generic_const_exprs . } . . #[inline] . pub fn local_crate_exports_generics(self) -> bool { . debug_assert!(self.sess.opts.share_generics()); . . self.sess.crate_types().iter().any(|crate_type| { . match crate_type { -- line 1442 ---------------------------------------- -- line 1540 ---------------------------------------- . // FIXME(#42706) -- in some cases, we could do better here. . return true; . } . false . } . . /// Determines whether identifiers in the assembly have strict naming rules. . /// Currently, only NVPTX* targets need it. 25,438 ( 0.00%) pub fn has_strict_asm_symbol_naming(self) -> bool { 25,438 ( 0.00%) self.sess.target.arch.contains("nvptx") 50,876 ( 0.00%) } . . /// Returns `&'static core::panic::Location<'static>`. 504 ( 0.00%) pub fn caller_location_ty(self) -> Ty<'tcx> { . self.mk_imm_ref( 84 ( 0.00%) self.lifetimes.re_static, . self.type_of(self.require_lang_item(LangItem::PanicLocation, None)) 168 ( 0.00%) .subst(self, self.mk_substs([self.lifetimes.re_static.into()].iter())), . ) 504 ( 0.00%) } . . /// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`). 33,517 ( 0.00%) pub fn article_and_description(self, def_id: DefId) -> (&'static str, &'static str) { 24,376 ( 0.00%) match self.def_kind(def_id) { . DefKind::Generator => match self.generator_kind(def_id).unwrap() { . rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"), . rustc_hir::GeneratorKind::Gen => ("a", "generator"), . }, 54,846 ( 0.00%) def_kind => (def_kind.article(), def_kind.descr(def_id)), . } 27,423 ( 0.00%) } . 64,729 ( 0.00%) pub fn type_length_limit(self) -> Limit { . self.limits(()).type_length_limit 73,976 ( 0.00%) } . 3,842,216 ( 0.01%) pub fn recursion_limit(self) -> Limit { . self.limits(()).recursion_limit 4,391,104 ( 0.01%) } . 248,640 ( 0.00%) pub fn move_size_limit(self) -> Limit { . self.limits(()).move_size_limit 284,160 ( 0.00%) } . 23,800 ( 0.00%) pub fn const_eval_limit(self) -> Limit { . self.limits(()).const_eval_limit 27,200 ( 0.00%) } . . pub fn all_traits(self) -> impl Iterator + 'tcx { . iter::once(LOCAL_CRATE) . .chain(self.crates(()).iter().copied()) . .flat_map(move |cnum| self.traits_in_crate(cnum).iter().copied()) . } . } . -- line 1594 ---------------------------------------- -- line 1614 ---------------------------------------- . fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option; . } . . macro_rules! nop_lift { . ($set:ident; $ty:ty => $lifted:ty) => { . impl<'a, 'tcx> Lift<'tcx> for $ty { . type Lifted = $lifted; . fn lift_to_tcx(self, tcx: TyCtxt<'tcx>) -> Option { 10,564 ( 0.00%) if tcx.interners.$set.contains_pointer_to(&Interned(self)) { . Some(unsafe { mem::transmute(self) }) . } else { . None . } . } . } . }; . } -- line 1630 ---------------------------------------- -- line 1684 ---------------------------------------- . /// This is the implicit state of rustc. It contains the current . /// `TyCtxt` and query. It is updated when creating a local interner or . /// executing a new query. Whenever there's a `TyCtxt` value available . /// you should also have access to an `ImplicitCtxt` through the functions . /// in this module. . #[derive(Clone)] . pub struct ImplicitCtxt<'a, 'tcx> { . /// The current `TyCtxt`. 2 ( 0.00%) pub tcx: TyCtxt<'tcx>, . . /// The current query job, if any. This is updated by `JobOwner::start` in . /// `ty::query::plumbing` when executing a query. . pub query: Option>, . . /// Where to store diagnostics for the current query job, if any. . /// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query. . pub diagnostics: Option<&'a Lock>>, -- line 1700 ---------------------------------------- -- line 1703 ---------------------------------------- . pub layout_depth: usize, . . /// The current dep graph task. This is used to add dependencies to queries . /// when executing them. . pub task_deps: TaskDepsRef<'a>, . } . . impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> { 5 ( 0.00%) pub fn new(gcx: &'tcx GlobalCtxt<'tcx>) -> Self { . let tcx = TyCtxt { gcx }; 25 ( 0.00%) ImplicitCtxt { . tcx, . query: None, . diagnostics: None, . layout_depth: 0, . task_deps: TaskDepsRef::Ignore, . } 5 ( 0.00%) } . } . . /// Sets Rayon's thread-local variable, which is preserved for Rayon jobs . /// to `value` during the call to `f`. It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(parallel_compiler)] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { -- line 1728 ---------------------------------------- -- line 1745 ---------------------------------------- . . /// Sets TLV to `value` during the call to `f`. . /// It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { . let old = get_tlv(); 849,670 ( 0.00%) let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old))); . TLV.with(|tlv| tlv.set(value)); . f() . } . . /// Gets the pointer to the current `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn get_tlv() -> usize { -- line 1761 ---------------------------------------- -- line 1763 ---------------------------------------- . } . . /// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`. . #[inline] . pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, . { 1 ( 0.00%) set_tlv(context as *const _ as usize, || f(&context)) . } . . /// Allows access to the current `ImplicitCtxt` in a closure if one is available. . #[inline] . pub fn with_context_opt(f: F) -> R . where . F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R, . { . let context = get_tlv(); 3,567,749 ( 0.01%) if context == 0 { . f(None) . } else { . // We could get an `ImplicitCtxt` pointer from another thread. . // Ensure that `ImplicitCtxt` is `Sync`. . sync::assert_sync::>(); . . unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) } . } -- line 1789 ---------------------------------------- -- line 1791 ---------------------------------------- . . /// Allows access to the current `ImplicitCtxt`. . /// Panics if there is no `ImplicitCtxt` available. . #[inline] . pub fn with_context(f: F) -> R . where . F: for<'a, 'tcx> FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, . { 5,030 ( 0.00%) with_context_opt(|opt_context| f(opt_context.expect("no ImplicitCtxt stored in tls"))) . } . . /// Allows access to the current `ImplicitCtxt` whose tcx field is the same as the tcx argument . /// passed in. This means the closure is given an `ImplicitCtxt` with the same `'tcx` lifetime . /// as the `TyCtxt` passed in. . /// This will panic if you pass it a `TyCtxt` which is different from the current . /// `ImplicitCtxt`'s `tcx` field. . #[inline] . pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, . { . with_context(|context| unsafe { 1,662,438 ( 0.00%) assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); . let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); 1,638,682 ( 0.00%) f(context) . }) . } . . /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. . /// Panics if there is no `ImplicitCtxt` available. . #[inline] . pub fn with(f: F) -> R . where -- line 1823 ---------------------------------------- -- line 1981 ---------------------------------------- . fn into_pointer(&self) -> *const () { . self.0 as *const _ as *const () . } . } . . #[allow(rustc::usage_of_ty_tykind)] . impl<'tcx> Borrow> for Interned<'tcx, TyS<'tcx>> { . fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { 3,634,955 ( 0.01%) &self.0.kind() . } . } . . impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { . fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind() == other.0.kind() -- line 1997 ---------------------------------------- -- line 1998 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 698,680 ( 0.00%) self.0.kind().hash(s) . } . } . . impl<'tcx> Borrow>> for Interned<'tcx, PredicateInner<'tcx>> { . fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> { 1,413,559 ( 0.00%) &self.0.kind . } . } . . impl<'tcx> PartialEq for Interned<'tcx, PredicateInner<'tcx>> { . fn eq(&self, other: &Interned<'tcx, PredicateInner<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind == other.0.kind -- line 2020 ---------------------------------------- -- line 2021 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, PredicateInner<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, PredicateInner<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 229,372 ( 0.00%) self.0.kind.hash(s) . } . } . . impl<'tcx, T> Borrow<[T]> for Interned<'tcx, List> { . fn borrow<'a>(&'a self) -> &'a [T] { 5,074,731 ( 0.01%) &self.0[..] . } . } . . impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List> { . fn eq(&self, other: &Interned<'tcx, List>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0[..] == other.0[..] -- line 2043 ---------------------------------------- -- line 2044 ---------------------------------------- . } . } . . impl<'tcx, T: Eq> Eq for Interned<'tcx, List> {} . . impl<'tcx, T: Hash> Hash for Interned<'tcx, List> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 358,470 ( 0.00%) self.0[..].hash(s) . } . } . . macro_rules! direct_interners { . ($($name:ident: $method:ident($ty:ty),)+) => { . $(impl<'tcx> Borrow<$ty> for Interned<'tcx, $ty> { . fn borrow<'a>(&'a self) -> &'a $ty { 1,667,113 ( 0.00%) &self.0 . } . } . . impl<'tcx> PartialEq for Interned<'tcx, $ty> { . fn eq(&self, other: &Self) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` . // equals `x == y`. . self.0 == other.0 -- line 2068 ---------------------------------------- -- line 2070 ---------------------------------------- . } . . impl<'tcx> Eq for Interned<'tcx, $ty> {} . . impl<'tcx> Hash for Interned<'tcx, $ty> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == . // x.hash(s)`. 146,002 ( 0.00%) self.0.hash(s) . } . } . . impl<'tcx> TyCtxt<'tcx> { 11,433,172 ( 0.02%) pub fn $method(self, v: $ty) -> &'tcx $ty { 5,862,765 ( 0.01%) self.interners.$name.intern(v, |v| { 36,325 ( 0.00%) Interned(self.interners.arena.alloc(v)) . }).0 12,838,806 ( 0.02%) } . })+ . } . } . . direct_interners! { . region: mk_region(RegionKind), . const_: mk_const(Const<'tcx>), . const_allocation: intern_const_alloc(Allocation), -- line 2095 ---------------------------------------- -- line 2097 ---------------------------------------- . adt_def: intern_adt_def(AdtDef), . stability: intern_stability(attr::Stability), . const_stability: intern_const_stability(attr::ConstStability), . } . . macro_rules! slice_interners { . ($($field:ident: $method:ident($ty:ty)),+ $(,)?) => ( . impl<'tcx> TyCtxt<'tcx> { 53,241,619 ( 0.10%) $(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { . self.interners.$field.intern_ref(v, || { . Interned(List::from_arena(&*self.arena, v)) . }).0 47,457,315 ( 0.09%) })+ . } . ); . } . . slice_interners!( . type_list: _intern_type_list(Ty<'tcx>), . substs: _intern_substs(GenericArg<'tcx>), . canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>), -- line 2117 ---------------------------------------- -- line 2129 ---------------------------------------- . /// unsafe. . pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { . assert_eq!(sig.unsafety(), hir::Unsafety::Normal); . self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) . } . . /// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name` . /// returns true if the `trait_def_id` defines an associated item of name `assoc_name`. 21,444 ( 0.00%) pub fn trait_may_define_assoc_type(self, trait_def_id: DefId, assoc_name: Ident) -> bool { . self.super_traits_of(trait_def_id).any(|trait_did| { . self.associated_items(trait_did) . .find_by_name_and_kind(self, assoc_name, ty::AssocKind::Type, trait_did) . .is_some() . }) 16,083 ( 0.00%) } . . /// Computes the def-ids of the transitive supertraits of `trait_def_id`. This (intentionally) . /// does not compute the full elaborated super-predicates but just the set of def-ids. It is used . /// to identify which traits may define a given associated type to help avoid cycle errors. . /// Returns a `DefId` iterator. . fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator + 'tcx { . let mut set = FxHashSet::default(); 3,574 ( 0.00%) let mut stack = vec![trait_def_id]; . . set.insert(trait_def_id); . 16,083 ( 0.00%) iter::from_fn(move || -> Option { 3,576 ( 0.00%) let trait_did = stack.pop()?; 1,788 ( 0.00%) let generic_predicates = self.super_predicates_of(trait_did); . . for (predicate, _) in generic_predicates.predicates { 3,996 ( 0.00%) if let ty::PredicateKind::Trait(data) = predicate.kind().skip_binder() { 2,664 ( 0.00%) if set.insert(data.def_id()) { . stack.push(data.def_id()); . } . } . } . . Some(trait_did) . }) . } -- line 2169 ---------------------------------------- -- line 2188 ---------------------------------------- . self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) . }) . } . . /// Same a `self.mk_region(kind)`, but avoids accessing the interners if . /// `*r == kind`. . #[inline] . pub fn reuse_or_mk_region(self, r: Region<'tcx>, kind: RegionKind) -> Region<'tcx> { 1,644,180 ( 0.00%) if *r == kind { r } else { self.mk_region(kind) } . } . . #[allow(rustc::usage_of_ty_tykind)] . #[inline] . pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> { 26,836,984 ( 0.05%) self.interners.intern_ty(st) . } . . #[inline] . pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> { 16,824,468 ( 0.03%) let inner = self.interners.intern_predicate(binder); . Predicate { inner } . } . . #[inline] 20,316,832 ( 0.04%) pub fn reuse_or_mk_predicate( . self, . pred: Predicate<'tcx>, . binder: Binder<'tcx, PredicateKind<'tcx>>, . ) -> Predicate<'tcx> { 1,590,522 ( 0.00%) if pred.kind() != binder { self.mk_predicate(binder) } else { pred } 20,316,832 ( 0.04%) } . . pub fn mk_mach_int(self, tm: IntTy) -> Ty<'tcx> { 752 ( 0.00%) match tm { . IntTy::Isize => self.types.isize, . IntTy::I8 => self.types.i8, . IntTy::I16 => self.types.i16, . IntTy::I32 => self.types.i32, . IntTy::I64 => self.types.i64, . IntTy::I128 => self.types.i128, . } 752 ( 0.00%) } . . pub fn mk_mach_uint(self, tm: UintTy) -> Ty<'tcx> { 4,559 ( 0.00%) match tm { . UintTy::Usize => self.types.usize, . UintTy::U8 => self.types.u8, . UintTy::U16 => self.types.u16, . UintTy::U32 => self.types.u32, . UintTy::U64 => self.types.u64, . UintTy::U128 => self.types.u128, . } 4,559 ( 0.00%) } . . pub fn mk_mach_float(self, tm: FloatTy) -> Ty<'tcx> { 308 ( 0.00%) match tm { . FloatTy::F32 => self.types.f32, . FloatTy::F64 => self.types.f64, . } 308 ( 0.00%) } . . #[inline] . pub fn mk_static_str(self) -> Ty<'tcx> { 13,436 ( 0.00%) self.mk_imm_ref(self.lifetimes.re_static, self.types.str_) . } . . #[inline] . pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . // Take a copy of substs so that we own the vectors inside. . self.mk_ty(Adt(def, substs)) . } . -- line 2259 ---------------------------------------- -- line 2329 ---------------------------------------- . } . . #[inline] . pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not }) . } . . #[inline] 28,637 ( 0.00%) pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { . self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) 29,656 ( 0.00%) } . . #[inline] . pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ty(Slice(ty)) . } . . #[inline] . pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { 656 ( 0.00%) let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 2,582 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) . } . . pub fn mk_tup], Ty<'tcx>>>(self, iter: I) -> I::Output { 29,850 ( 0.00%) iter.intern_with(|ts| { 14,286 ( 0.00%) let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 107,850 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) 26,865 ( 0.00%) }) . } . . #[inline] . pub fn mk_unit(self) -> Ty<'tcx> { 11,209 ( 0.00%) self.types.unit . } . . #[inline] . pub fn mk_diverging_default(self) -> Ty<'tcx> { . if self.features().never_type_fallback { self.types.never } else { self.types.unit } . } . . #[inline] . pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(FnDef(def_id, substs)) . } . . #[inline] . pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { 7,284 ( 0.00%) self.mk_ty(FnPtr(fty)) . } . . #[inline] . pub fn mk_dynamic( . self, . obj: &'tcx List>>, . reg: ty::Region<'tcx>, . ) -> Ty<'tcx> { -- line 2384 ---------------------------------------- -- line 2412 ---------------------------------------- . . #[inline] . pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> { . self.mk_ty_infer(TyVar(v)) . } . . #[inline] . pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { 1,266 ( 0.00%) self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty }) . } . . #[inline] . pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> { . self.mk_ty_infer(IntVar(v)) . } . . #[inline] -- line 2428 ---------------------------------------- -- line 2445 ---------------------------------------- . self.mk_ty(Param(ParamTy { index, name })) . } . . #[inline] . pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { . self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty }) . } . 273,602 ( 0.00%) pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 152,384 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { 129,138 ( 0.00%) self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() . } 35,126 ( 0.00%) GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), . GenericParamDefKind::Const { .. } => { . self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into() . } . } 273,602 ( 0.00%) } . . #[inline] . pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(Opaque(def_id, substs)) . } . 34,811 ( 0.00%) pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { 34,811 ( 0.00%) self.mk_place_elem(place, PlaceElem::Field(f, ty)) 69,622 ( 0.00%) } . 297 ( 0.00%) pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> { 891 ( 0.00%) self.mk_place_elem(place, PlaceElem::Deref) 594 ( 0.00%) } . 16,654 ( 0.00%) pub fn mk_place_downcast( . self, . place: Place<'tcx>, . adt_def: &'tcx AdtDef, . variant_index: VariantIdx, . ) -> Place<'tcx> { 16,654 ( 0.00%) self.mk_place_elem( . place, 33,308 ( 0.00%) PlaceElem::Downcast(Some(adt_def.variants[variant_index].name), variant_index), . ) 33,308 ( 0.00%) } . . pub fn mk_place_downcast_unnamed( . self, . place: Place<'tcx>, . variant_index: VariantIdx, . ) -> Place<'tcx> { . self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index)) . } . 14 ( 0.00%) pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> { 14 ( 0.00%) self.mk_place_elem(place, PlaceElem::Index(index)) 28 ( 0.00%) } . . /// This method copies `Place`'s projection, add an element and reintern it. Should not be used . /// to build a full `Place` it's just a convenient way to grab a projection and modify it in . /// flight. 623,781 ( 0.00%) pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> { . let mut projection = place.projection.to_vec(); . projection.push(elem); . . Place { local: place.local, projection: self.intern_place_elems(&projection) } 693,090 ( 0.00%) } . 54,194 ( 0.00%) pub fn intern_poly_existential_predicates( . self, . eps: &[ty::Binder<'tcx, ExistentialPredicate<'tcx>>], . ) -> &'tcx List>> { 7,742 ( 0.00%) assert!(!eps.is_empty()); . assert!( . eps.array_windows() 70 ( 0.00%) .all(|[a, b]| a.skip_binder().stable_cmp(self, &b.skip_binder()) . != Ordering::Greater) . ); 30,968 ( 0.00%) self._intern_poly_existential_predicates(eps) 61,936 ( 0.00%) } . . pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List> { . // FIXME consider asking the input slice to be sorted to avoid . // re-interning permutations, in which case that would be asserted . // here. 328,668 ( 0.00%) if preds.is_empty() { . // The macro-generated method below asserts we don't intern an empty slice. . List::empty() . } else { 275,201 ( 0.00%) self._intern_predicates(preds) . } 109,360 ( 0.00%) } . . pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List> { 501,728 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) } 17,800 ( 0.00%) } . . pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List> { 7,998,370 ( 0.02%) if ts.is_empty() { List::empty() } else { self._intern_substs(ts) } 393,052 ( 0.00%) } . . pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List { . if ps.is_empty() { List::empty() } else { self._intern_projs(ps) } . } . . pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List> { 368,153 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) } 130,294 ( 0.00%) } . . pub fn intern_canonical_var_infos( . self, . ts: &[CanonicalVarInfo<'tcx>], . ) -> CanonicalVarInfos<'tcx> { 707,043 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) } 5,990 ( 0.00%) } . . pub fn intern_bound_variable_kinds( . self, . ts: &[ty::BoundVariableKind], . ) -> &'tcx List { 118,975 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_bound_variable_kinds(ts) } 174,732 ( 0.00%) } . . pub fn mk_fn_sig( . self, . inputs: I, . output: I::Item, . c_variadic: bool, . unsafety: hir::Unsafety, . abi: abi::Abi, . ) -> , ty::FnSig<'tcx>>>::Output . where . I: Iterator, ty::FnSig<'tcx>>>, . { . inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { 31,412 ( 0.00%) inputs_and_output: self.intern_type_list(xs), 10,136 ( 0.00%) c_variadic, 11,733 ( 0.00%) unsafety, 17,078 ( 0.00%) abi, 290 ( 0.00%) }) . } . . pub fn mk_poly_existential_predicates< . I: InternAs< . [ty::Binder<'tcx, ExistentialPredicate<'tcx>>], . &'tcx List>>, . >, . >( . self, . iter: I, . ) -> I::Output { 30,941 ( 0.00%) iter.intern_with(|xs| self.intern_poly_existential_predicates(xs)) . } . . pub fn mk_predicates], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 34 ( 0.00%) iter.intern_with(|xs| self.intern_predicates(xs)) . } . . pub fn mk_type_list], &'tcx List>>>(self, iter: I) -> I::Output { 107,095 ( 0.00%) iter.intern_with(|xs| self.intern_type_list(xs)) . } . 72 ( 0.00%) pub fn mk_substs], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 3,531,646 ( 0.01%) iter.intern_with(|xs| self.intern_substs(xs)) 96 ( 0.00%) } . . pub fn mk_place_elems], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 89,778 ( 0.00%) iter.intern_with(|xs| self.intern_place_elems(xs)) . } . 310,560 ( 0.00%) pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { . self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) 621,120 ( 0.00%) } . . pub fn mk_bound_variable_kinds< . I: InternAs<[ty::BoundVariableKind], &'tcx List>, . >( . self, . iter: I, . ) -> I::Output { 531,423 ( 0.00%) iter.intern_with(|xs| self.intern_bound_variable_kinds(xs)) . } . . /// Walks upwards from `id` to find a node which might change lint levels with attributes. . /// It stops at `bound` and just returns it if reached. 2,370,620 ( 0.00%) pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId { 2,370,620 ( 0.00%) let hir = self.hir(); . loop { 5,268,762 ( 0.01%) if id == bound { . return bound; . } . 7,089,634 ( 0.01%) if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) { . return id; . } . let next = hir.get_parent_node(id); 4,252,782 ( 0.01%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 3,386,600 ( 0.01%) } . 81,232 ( 0.00%) pub fn lint_level_at_node( . self, . lint: &'static Lint, . mut id: hir::HirId, . ) -> (Level, LintLevelSource) { . let sets = self.lint_levels(()); . loop { 514,324 ( 0.00%) if let Some(pair) = sets.level_and_source(lint, id, self.sess) { . return pair; . } 41,352 ( 0.00%) let next = self.hir().get_parent_node(id); 124,056 ( 0.00%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 81,232 ( 0.00%) } . 16,702 ( 0.00%) pub fn struct_span_lint_hir( . self, . lint: &'static Lint, . hir_id: HirId, . span: impl Into, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { 19,084 ( 0.00%) let (level, src) = self.lint_level_at_node(lint, hir_id); 10,385 ( 0.00%) struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate); 9,544 ( 0.00%) } . . pub fn struct_lint_node( . self, . lint: &'static Lint, . id: HirId, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { . let (level, src) = self.lint_level_at_node(lint, id); . struct_lint_level(self.sess, lint, level, src, None, decorate); . } . 30,128 ( 0.00%) pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> { 8,608 ( 0.00%) let map = self.in_scope_traits_map(id.owner)?; . let candidates = map.get(&id.local_id)?; 8,608 ( 0.00%) Some(&*candidates) 34,432 ( 0.00%) } . 205,016 ( 0.00%) pub fn named_region(self, id: HirId) -> Option { . debug!(?id, "named_region"); . self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned()) 263,592 ( 0.00%) } . 28,386 ( 0.00%) pub fn is_late_bound(self, id: HirId) -> bool { . self.is_late_bound_map(id.owner) 1,575 ( 0.00%) .map_or(false, |(owner, set)| owner == id.owner && set.contains(&id.local_id)) 25,232 ( 0.00%) } . 157,850 ( 0.00%) pub fn object_lifetime_defaults(self, id: HirId) -> Option> { . self.object_lifetime_defaults_map(id.owner) 202,950 ( 0.00%) } . 206,676 ( 0.00%) pub fn late_bound_vars(self, id: HirId) -> &'tcx List { . self.mk_bound_variable_kinds( . self.late_bound_vars_map(id.owner) . .and_then(|map| map.get(&id.local_id).cloned()) . .unwrap_or_else(|| { . bug!("No bound vars found for {:?} ({:?})", self.hir().node_to_string(id), id) . }) . .iter(), . ) 206,676 ( 0.00%) } . . pub fn lifetime_scope(self, id: HirId) -> Option { . self.lifetime_scope_map(id.owner).and_then(|mut map| map.remove(&id.local_id)) . } . . /// Whether the `def_id` counts as const fn in the current crate, considering all active . /// feature gates 3,780 ( 0.00%) pub fn is_const_fn(self, def_id: DefId) -> bool { 378 ( 0.00%) if self.is_const_fn_raw(def_id) { 962 ( 0.00%) match self.lookup_const_stability(def_id) { 4 ( 0.00%) Some(stability) if stability.level.is_unstable() => { . // has a `rustc_const_unstable` attribute, check whether the user enabled the . // corresponding feature gate. 2 ( 0.00%) self.features() . .declared_lib_features . .iter() . .any(|&(sym, _)| sym == stability.feature) . } . // functions without const stability are either stable user written . // const fn or the user is using feature gates and we thus don't . // care what they do . _ => true, . } . } else { . false . } 3,402 ( 0.00%) } . } . . impl<'tcx> TyCtxtAt<'tcx> { . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { . self.tcx.ty_error_with_message(self.span, "TyKind::Error constructed but no error reported") . } -- line 2759 ---------------------------------------- -- line 2774 ---------------------------------------- . } . . impl InternAs<[T], R> for I . where . E: InternIteratorElement, . I: Iterator, . { . type Output = E::Output; 169,784 ( 0.00%) fn intern_with(self, f: F) -> Self::Output . where . F: FnOnce(&[T]) -> R, . { 16,940,929 ( 0.03%) E::intern_with(self, f) 136,827 ( 0.00%) } . } . . pub trait InternIteratorElement: Sized { . type Output; . fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; . } . . impl InternIteratorElement for T { . type Output = R; 2,402,430 ( 0.00%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`. 2,654,778 ( 0.01%) match iter.size_hint() { . (0, Some(0)) => { 180 ( 0.00%) assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { . let t0 = iter.next().unwrap(); 111 ( 0.00%) assert!(iter.next().is_none()); 318,985 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { . let t0 = iter.next().unwrap(); . let t1 = iter.next().unwrap(); 1,627 ( 0.00%) assert!(iter.next().is_none()); 347,033 ( 0.00%) f(&[t0, t1]) . } 34,040 ( 0.00%) _ => f(&iter.collect::>()), . } 2,470,955 ( 0.00%) } . } . . impl<'a, T, R> InternIteratorElement for &'a T . where . T: Clone + 'a, . { . type Output = R; 116,889 ( 0.00%) fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { . // This code isn't hot. 116 ( 0.00%) f(&iter.cloned().collect::>()) 93,616 ( 0.00%) } . } . . impl InternIteratorElement for Result { . type Output = Result; 5,864,615 ( 0.01%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`, unless a failure happens first, in which case the result . // will be an error anyway. 4,636,584 ( 0.01%) Ok(match iter.size_hint() { . (0, Some(0)) => { . assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { 23,148 ( 0.00%) let t0 = iter.next().unwrap()?; . assert!(iter.next().is_none()); 318,582 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { 1,396 ( 0.00%) let t0 = iter.next().unwrap()?; 1,396 ( 0.00%) let t1 = iter.next().unwrap()?; 2,094 ( 0.00%) assert!(iter.next().is_none()); 552,994 ( 0.00%) f(&[t0, t1]) . } 44,980 ( 0.00%) _ => f(&iter.collect::, _>>()?), . }) 6,592,903 ( 0.01%) } . } . . // We are comparing types with different invariant lifetimes, so `ptr::eq` . // won't work for us. . fn ptr_eq(t: *const T, u: *const U) -> bool { 1,662,438 ( 0.00%) t as *const () == u as *const () . } . . pub fn provide(providers: &mut ty::query::Providers) { 2 ( 0.00%) providers.in_scope_traits_map = . |tcx, id| tcx.hir_crate(()).owners[id].as_ref().map(|owner_info| &owner_info.trait_map); 3 ( 0.00%) providers.resolutions = |tcx, ()| &tcx.untracked_resolutions; 2 ( 0.00%) providers.module_reexports = . |tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]); 2 ( 0.00%) providers.crate_name = |tcx, id| { 1 ( 0.00%) assert_eq!(id, LOCAL_CRATE); 1 ( 0.00%) tcx.crate_name . }; 2 ( 0.00%) providers.maybe_unused_trait_import = . |tcx, id| tcx.resolutions(()).maybe_unused_trait_imports.contains(&id); 2 ( 0.00%) providers.maybe_unused_extern_crates = . |tcx, ()| &tcx.resolutions(()).maybe_unused_extern_crates[..]; 2 ( 0.00%) providers.names_imported_by_glob_use = |tcx, id| { . tcx.arena.alloc(tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default()) . }; . 92 ( 0.00%) providers.lookup_stability = |tcx, id| tcx.stability().local_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_const_stability = 1,143 ( 0.00%) |tcx, id| tcx.stability().local_const_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_deprecation_entry = 164,792 ( 0.00%) |tcx, id| tcx.stability().local_deprecation_entry(id.expect_local()); 2 ( 0.00%) providers.extern_mod_stmt_cnum = . |tcx, id| tcx.resolutions(()).extern_crate_map.get(&id).cloned(); 3 ( 0.00%) providers.output_filenames = |tcx, ()| tcx.output_filenames.clone(); 3 ( 0.00%) providers.features_query = |tcx, ()| tcx.sess.features_untracked(); 2 ( 0.00%) providers.is_panic_runtime = |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); 7 ( 0.00%) tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::panic_runtime) . }; 2 ( 0.00%) providers.is_compiler_builtins = |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); 7 ( 0.00%) tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins) . }; 2 ( 0.00%) providers.has_panic_handler = |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); . // We want to check if the panic handler was defined in this crate 3 ( 0.00%) tcx.lang_items().panic_impl().map_or(false, |did| did.is_local()) . }; . } 13,983,681 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 23,234,536 ( 0.04%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 1,764,342 ( 0.00%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 573,226,728 ( 1.08%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 3,880,026 ( 0.01%) self.stride += Group::WIDTH; 3,880,026 ( 0.01%) self.pos += self.stride; 3,461,868 ( 0.01%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 2,207,854 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 4,769,725 ( 0.01%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 899,892 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 5,888,848 ( 0.01%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 1,115,405 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 4,488,729 ( 0.01%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 6,011,765 ( 0.01%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 17,662 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 34,215 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 3,940 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 110,673 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 359,950 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 359,950 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 719,900 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 270,324 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 449,576 ( 0.00%) self.erase_no_drop(&item); 3,876 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 9,330,012 ( 0.02%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 27,518 ( 0.00%) match self.find(hash, eq) { 86,312 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 2,449,860 ( 0.00%) None => None, . } 12,830,824 ( 0.02%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 256,276 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 5,137,342 ( 0.01%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 3,833,900 ( 0.01%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 8,838,086 ( 0.02%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 6,354,656 ( 0.01%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 33,098,166 ( 0.06%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 86,259 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 22,299,164 ( 0.04%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 4 ( 0.00%) bucket.write(value); . bucket . } 24,464,858 ( 0.05%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 5,665,944 ( 0.01%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 4,173 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 4,249,458 ( 0.01%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 207,052 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 4,580,098 ( 0.01%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 190,702 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 9,972,901 ( 0.02%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 7,974,062 ( 0.02%) self.table.items += 1; . bucket 309,279 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 217,826 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 117,368 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 635,908 ( 0.00%) eq(self.bucket(index).as_ref()) 46,926 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 14,888 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 241,000 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 382,500 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] 17,514 ( 0.00%) pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 121,126 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } 20,016 ( 0.00%) } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. . /// . /// At most one mutable reference will be returned to any entry. `None` will be returned if any . /// of the hashes are duplicates. `None` will be returned if the hash is not found. . /// -- line 859 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 16,063,051 ( 0.03%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . -- line 936 ---------------------------------------- -- line 938 ---------------------------------------- . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { . let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 2,029,063 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 114,916 ( 0.00%) let allocation = self.into_allocation(); 86,187 ( 0.00%) RawIntoIter { 143,645 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 59,042 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 7,733 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 12,328,199 ( 0.02%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 8,058,904 ( 0.02%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 1,881,268 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 4,443,222 ( 0.01%) Ok(Self { . ctrl, 1,845,252 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 5,866,294 ( 0.01%) } . . #[inline] 397,299 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 104,412 ( 0.00%) if capacity == 0 { 39,088 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 3,647,129 ( 0.01%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 258,191 ( 0.00%) Ok(result) . } . } 397,299 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 1,635,910 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 1,635,910 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 3,271,820 ( 0.01%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 15,059,113 ( 0.03%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 44,178,252 ( 0.08%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 20,295,834 ( 0.04%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 2,081,240 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 63,649,081 ( 0.12%) for bit in group.match_byte(h2_hash) { 128,197,179 ( 0.24%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 83,001,254 ( 0.16%) if likely(eq(index)) { . return Some(index); . } . } . 17,462,922 ( 0.03%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_rehash_in_place(&mut self) { . // Bulk convert all full control bytes to DELETED, and all DELETED . // control bytes to EMPTY. This effectively frees up all buckets . // containing a DELETED entry. 57 ( 0.00%) for i in (0..self.buckets()).step_by(Group::WIDTH) { . let group = Group::load_aligned(self.ctrl(i)); . let group = group.convert_special_to_empty_and_full_to_deleted(); . group.store_aligned(self.ctrl(i)); . } . . // Fix up the trailing control bytes. See the comments in set_ctrl . // for the handling of tables smaller than the group width. 57 ( 0.00%) if self.buckets() < Group::WIDTH { . self.ctrl(0) . .copy_to(self.ctrl(Group::WIDTH), self.buckets()); . } else { . self.ctrl(0) . .copy_to(self.ctrl(self.buckets()), Group::WIDTH); . } . } . -- line 1220 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 53,448,624 ( 0.10%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 328,510,331 ( 0.62%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 27,859,285 ( 0.05%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 22,287,388 ( 0.04%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; 567 ( 0.00%) probe_index(i) == probe_index(new_i) . } . . /// Sets a control byte to the hash, and possibly also the replicated control byte at . /// the end of the array. . #[inline] . unsafe fn set_ctrl_h2(&self, index: usize, hash: u64) { . self.set_ctrl(index, h2(hash)); . } -- line 1289 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 42,060,037 ( 0.08%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 14,017,267 ( 0.03%) *self.ctrl(index) = ctrl; 14,028,913 ( 0.03%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 6,096,136 ( 0.01%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 4,704,975 ( 0.01%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 22,263,181 ( 0.04%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 68,526 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 2,437,370 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 1,083,737 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 2,167,526 ( 0.00%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 2,167,512 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 4,587,829 ( 0.01%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); 38 ( 0.00%) Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 1,083,737 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 169,088 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 8,510,549 ( 0.02%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 1,083,737 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1496 ---------------------------------------- . } . self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items; . }); . . // At this point, DELETED elements are elements that we haven't . // rehashed yet. Find them and re-insert them at their ideal . // position. . 'outer: for i in 0..guard.buckets() { 1,216 ( 0.00%) if *guard.ctrl(i) != DELETED { . continue; . } . . let i_p = guard.bucket_ptr(i, size_of); . . 'inner: loop { . // Hash the current item . let hash = hasher(*guard, i); -- line 1512 ---------------------------------------- -- line 1515 ---------------------------------------- . let new_i = guard.find_insert_slot(hash); . let new_i_p = guard.bucket_ptr(new_i, size_of); . . // Probing works by scanning through all of the control . // bytes in groups, which may not be aligned to the group . // size. If both the new and old position fall within the . // same unaligned group, then there is no benefit in moving . // it and we can just continue to the next item. 189 ( 0.00%) if likely(guard.is_in_same_group(i, new_i, hash)) { . guard.set_ctrl_h2(i, hash); . continue 'outer; . } . . // We are moving the current item to a new position. Write . // our H2 to the control byte of the new position. . let prev_ctrl = guard.replace_ctrl_h2(new_i, hash); . if prev_ctrl == EMPTY { -- line 1531 ---------------------------------------- -- line 1541 ---------------------------------------- . // swapped into the old slot. . debug_assert_eq!(prev_ctrl, DELETED); . ptr::swap_nonoverlapping(i_p, new_i_p, size_of); . continue 'inner; . } . } . } . 57 ( 0.00%) guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items; . . mem::forget(guard); . } . . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 738,727 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 215,521 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 267,630 ( 0.00%) self.items = 0; 215,559 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 1,297,685 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 5,190,740 ( 0.01%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 6,327,715 ( 0.01%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 5,190,740 ( 0.01%) self.items -= 1; . } . } . . impl Clone for RawTable { 91,256 ( 0.00%) fn clone(&self) -> Self { 12,193 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 102,663 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 1,576 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 10,351,443 ( 0.02%) fn drop(&mut self) { 9,632,980 ( 0.02%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 10,962,786 ( 0.02%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 114,916 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 143,645 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 4,989,721 ( 0.01%) if let Some(index) = self.current_group.lowest_set_bit() { 754,607 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 1,248,484 ( 0.00%) return Some(self.data.next_n(index)); . } . 4,785,969 ( 0.01%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 102,188 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 120,028 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 168,666 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 26,895 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 1,033,183 ( 0.00%) fn next(&mut self) -> Option> { 2,626,164 ( 0.00%) if let Some(b) = self.iter.next() { 7,387,676 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 2,066,366 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 40,688 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 278,426 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 7,376 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 30,908 ( 0.00%) fn next(&mut self) -> Option { 35,529 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 66,449 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 5 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 30,744 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 3,843 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 30,744 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 180,547,900 ( 0.34%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/terminator.rs -------------------------------------------------------------------------------- Ir -- line 12 ---------------------------------------- . use rustc_span::Span; . use std::borrow::Cow; . use std::fmt::{self, Debug, Formatter, Write}; . use std::iter; . use std::slice; . . pub use super::query::*; . 392,219 ( 0.00%) #[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, PartialOrd)] . pub struct SwitchTargets { . /// Possible values. The locations to branch to in each case . /// are found in the corresponding indices from the `targets` vector. . values: SmallVec<[u128; 1]>, . . /// Possible branch sites. The last element of this vector is used . /// for the otherwise branch, so targets.len() == values.len() + 1 . /// should hold. -- line 28 ---------------------------------------- -- line 38 ---------------------------------------- . targets: SmallVec<[BasicBlock; 2]>, . } . . impl SwitchTargets { . /// Creates switch targets from an iterator of values and target blocks. . /// . /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to . /// `goto otherwise;`. 204,034 ( 0.00%) pub fn new(targets: impl Iterator, otherwise: BasicBlock) -> Self { 306,832 ( 0.00%) let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip(); . targets.push(otherwise); 306,832 ( 0.00%) Self { values, targets } 204,034 ( 0.00%) } . . /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`, . /// and to `else_` if not. 14 ( 0.00%) pub fn static_if(value: u128, then: BasicBlock, else_: BasicBlock) -> Self { 105 ( 0.00%) Self { values: smallvec![value], targets: smallvec![then, else_] } 14 ( 0.00%) } . . /// Returns the fallback target that is jumped to when none of the values match the operand. 75,768 ( 0.00%) pub fn otherwise(&self) -> BasicBlock { 75,865 ( 0.00%) *self.targets.last().unwrap() 151,536 ( 0.00%) } . . /// Returns an iterator over the switch targets. . /// . /// The iterator will yield tuples containing the value and corresponding target to jump to, not . /// including the `otherwise` fallback target. . /// . /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory. 75,856 ( 0.00%) pub fn iter(&self) -> SwitchTargetsIter<'_> { 530,992 ( 0.00%) SwitchTargetsIter { inner: iter::zip(&self.values, &self.targets) } 75,856 ( 0.00%) } . . /// Returns a slice with all possible jump targets (including the fallback target). 140,374 ( 0.00%) pub fn all_targets(&self) -> &[BasicBlock] { . &self.targets 140,374 ( 0.00%) } . 1 ( 0.00%) pub fn all_targets_mut(&mut self) -> &mut [BasicBlock] { . &mut self.targets 1 ( 0.00%) } . . /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the . /// specific value. This cannot fail, as it'll return the `otherwise` . /// branch if there's not a specific match for the value. 1,770 ( 0.00%) pub fn target_for_value(&self, value: u128) -> BasicBlock { 1,416 ( 0.00%) self.iter().find_map(|(v, t)| (v == value).then_some(t)).unwrap_or_else(|| self.otherwise()) 2,124 ( 0.00%) } . } . . pub struct SwitchTargetsIter<'a> { . inner: iter::Zip, slice::Iter<'a, BasicBlock>>, . } . . impl<'a> Iterator for SwitchTargetsIter<'a> { . type Item = (u128, BasicBlock); . . fn next(&mut self) -> Option { 234,832 ( 0.00%) self.inner.next().map(|(val, bb)| (*val, *bb)) 173,467 ( 0.00%) } . 4,081 ( 0.00%) fn size_hint(&self) -> (usize, Option) { 4,081 ( 0.00%) self.inner.size_hint() 12,243 ( 0.00%) } . } . . impl<'a> ExactSizeIterator for SwitchTargetsIter<'a> {} . 5,766,028 ( 0.01%) #[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)] . pub enum TerminatorKind<'tcx> { . /// Block should have one successor in the graph; we jump there. 569,139 ( 0.00%) Goto { target: BasicBlock }, . . /// Operand evaluates to an integer; jump depending on its value . /// to one of the targets, and otherwise fallback to `otherwise`. . SwitchInt { . /// The discriminant value being tested. . discr: Operand<'tcx>, . . /// The type of value being tested. . /// This is always the same as the type of `discr`. . /// FIXME: remove this redundant information. Currently, it is relied on by pretty-printing. 17,556 ( 0.00%) switch_ty: Ty<'tcx>, . . targets: SwitchTargets, . }, . . /// Indicates that the landing pad is finished and unwinding should . /// continue. Emitted by `build::scope::diverge_cleanup`. . Resume, . -- line 130 ---------------------------------------- -- line 136 ---------------------------------------- . /// been filled in before this executes. This can occur multiple times . /// in different basic blocks. . Return, . . /// Indicates a terminator that can never be reached. . Unreachable, . . /// Drop the `Place`. 983,830 ( 0.00%) Drop { place: Place<'tcx>, target: BasicBlock, unwind: Option }, . . /// Drop the `Place` and assign the new value over it. This ensures . /// that the assignment to `P` occurs *even if* the destructor for . /// place unwinds. Its semantics are best explained by the . /// elaboration: . /// . /// ``` . /// BB0 { -- line 152 ---------------------------------------- -- line 167 ---------------------------------------- . /// BB2 { . /// // P is now uninitialized -- its dtor panicked . /// P <- V . /// } . /// ``` . /// . /// Note that DropAndReplace is eliminated as part of the `ElaborateDrops` pass. . DropAndReplace { 1,071 ( 0.00%) place: Place<'tcx>, . value: Operand<'tcx>, 714 ( 0.00%) target: BasicBlock, 357 ( 0.00%) unwind: Option, . }, . . /// Block ends with a call of a function. . Call { . /// The function that’s being called. . func: Operand<'tcx>, . /// Arguments the function is called with. . /// These are owned by the callee, which is free to modify them. . /// This allows the memory occupied by "by-value" arguments to be . /// reused across function calls without duplicating the contents. 85,938 ( 0.00%) args: Vec>, . /// Destination for the return value. If some, the call is converging. . destination: Option<(Place<'tcx>, BasicBlock)>, . /// Cleanups to be done if the call unwinds. 57,292 ( 0.00%) cleanup: Option, . /// `true` if this is from a call in HIR rather than from an overloaded . /// operator. True for overloaded function call. . from_hir_call: bool, . /// This `Span` is the span of the function, without the dot and receiver . /// (e.g. `foo(a, b)` in `x.foo(a, b)` . fn_span: Span, . }, . . /// Jump to the target if the condition has the expected value, . /// otherwise panic with a message and a cleanup target. . Assert { . cond: Operand<'tcx>, 10 ( 0.00%) expected: bool, . msg: AssertMessage<'tcx>, 20 ( 0.00%) target: BasicBlock, 20 ( 0.00%) cleanup: Option, . }, . . /// A suspend point. . Yield { . /// The value to return. . value: Operand<'tcx>, . /// Where to resume to. . resume: BasicBlock, -- line 217 ---------------------------------------- -- line 223 ---------------------------------------- . . /// Indicates the end of the dropping of a generator. . GeneratorDrop, . . /// A block where control flow only ever takes one real path, but borrowck . /// needs to be more conservative. . FalseEdge { . /// The target normal control flow will take. 17,500 ( 0.00%) real_target: BasicBlock, . /// A block control flow could conceptually jump to, but won't in . /// practice. 26,250 ( 0.00%) imaginary_target: BasicBlock, . }, . /// A terminator for blocks that only take one path in reality, but where we . /// reserve the right to unwind in borrowck, even if it won't happen in practice. . /// This can arise in infinite loops with no function calls for example. . FalseUnwind { . /// The target normal control flow will take. 538 ( 0.00%) real_target: BasicBlock, . /// The imaginary cleanup block link. This particular path will never be taken . /// in practice, but in order to avoid fragility we want to always . /// consider it in borrowck. We don't want to accept programs which . /// pass borrowck only when `panic=abort` or some assertions are disabled . /// due to release vs. debug mode builds. This needs to be an `Option` because . /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes. 538 ( 0.00%) unwind: Option, . }, . . /// Block ends with an inline assembly block. This is a terminator since . /// inline assembly is allowed to diverge. . InlineAsm { . /// The template for the inline assembly, with placeholders. . template: &'tcx [InlineAsmTemplatePiece], . -- line 256 ---------------------------------------- -- line 268 ---------------------------------------- . /// diverging (InlineAsmOptions::NORETURN). . destination: Option, . . /// Cleanup to be done if the inline assembly unwinds. This is present . /// if and only if InlineAsmOptions::MAY_UNWIND is set. . cleanup: Option, . }, . } 5,475,314 ( 0.01%) #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] . pub struct Terminator<'tcx> { . pub source_info: SourceInfo, . pub kind: TerminatorKind<'tcx>, . } . . impl<'tcx> Terminator<'tcx> { 3,183,826 ( 0.01%) pub fn successors(&self) -> Successors<'_> { 6,611,692 ( 0.01%) self.kind.successors() 4,775,739 ( 0.01%) } . 2,880,118 ( 0.01%) pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { 1,440,059 ( 0.00%) self.kind.successors_mut() 4,320,177 ( 0.01%) } . . pub fn unwind(&self) -> Option<&Option> { . self.kind.unwind() . } . . pub fn unwind_mut(&mut self) -> Option<&mut Option> { . self.kind.unwind_mut() 228,749 ( 0.00%) } . } . . impl<'tcx> TerminatorKind<'tcx> { 5,735 ( 0.00%) pub fn if_( . tcx: TyCtxt<'tcx>, . cond: Operand<'tcx>, . t: BasicBlock, . f: BasicBlock, . ) -> TerminatorKind<'tcx> { 57,350 ( 0.00%) TerminatorKind::SwitchInt { 11,470 ( 0.00%) discr: cond, 5,735 ( 0.00%) switch_ty: tcx.types.bool, . targets: SwitchTargets::static_if(0, f, t), . } 5,735 ( 0.00%) } . 4,948,276 ( 0.01%) pub fn successors(&self) -> Successors<'_> { . use self::TerminatorKind::*; 32,597,576 ( 0.06%) match *self { . Resume . | Abort . | GeneratorDrop . | Return . | Unreachable . | Call { destination: None, cleanup: None, .. } . | InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&[]), 2,957,523 ( 0.01%) Goto { target: ref t } . | Call { destination: None, cleanup: Some(ref t), .. } . | Call { destination: Some((_, ref t)), cleanup: None, .. } . | Yield { resume: ref t, drop: None, .. } . | DropAndReplace { target: ref t, unwind: None, .. } . | Drop { target: ref t, unwind: None, .. } . | Assert { target: ref t, cleanup: None, .. } . | FalseUnwind { real_target: ref t, unwind: None } . | InlineAsm { destination: Some(ref t), cleanup: None, .. } -- line 332 ---------------------------------------- -- line 338 ---------------------------------------- . | DropAndReplace { target: ref t, unwind: Some(ref u), .. } . | Drop { target: ref t, unwind: Some(ref u), .. } . | Assert { target: ref t, cleanup: Some(ref u), .. } . | FalseUnwind { real_target: ref t, unwind: Some(ref u) } . | InlineAsm { destination: Some(ref t), cleanup: Some(ref u), .. } => { . Some(t).into_iter().chain(slice::from_ref(u)) . } . SwitchInt { ref targets, .. } => None.into_iter().chain(&targets.targets), 295,802 ( 0.00%) FalseEdge { ref real_target, ref imaginary_target } => { . Some(real_target).into_iter().chain(slice::from_ref(imaginary_target)) . } . } 4,948,276 ( 0.01%) } . 1,440,059 ( 0.00%) pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { . use self::TerminatorKind::*; 9,530,397 ( 0.02%) match *self { . Resume . | Abort . | GeneratorDrop . | Return . | Unreachable . | Call { destination: None, cleanup: None, .. } . | InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&mut []), 1,132,893 ( 0.00%) Goto { target: ref mut t } . | Call { destination: None, cleanup: Some(ref mut t), .. } . | Call { destination: Some((_, ref mut t)), cleanup: None, .. } . | Yield { resume: ref mut t, drop: None, .. } . | DropAndReplace { target: ref mut t, unwind: None, .. } . | Drop { target: ref mut t, unwind: None, .. } . | Assert { target: ref mut t, cleanup: None, .. } . | FalseUnwind { real_target: ref mut t, unwind: None } . | InlineAsm { destination: Some(ref mut t), cleanup: None, .. } -- line 370 ---------------------------------------- -- line 376 ---------------------------------------- . | DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. } . | Drop { target: ref mut t, unwind: Some(ref mut u), .. } . | Assert { target: ref mut t, cleanup: Some(ref mut u), .. } . | FalseUnwind { real_target: ref mut t, unwind: Some(ref mut u) } . | InlineAsm { destination: Some(ref mut t), cleanup: Some(ref mut u), .. } => { . Some(t).into_iter().chain(slice::from_mut(u)) . } . SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets.targets), 59,636 ( 0.00%) FalseEdge { ref mut real_target, ref mut imaginary_target } => { . Some(real_target).into_iter().chain(slice::from_mut(imaginary_target)) . } . } 1,440,059 ( 0.00%) } . . pub fn unwind(&self) -> Option<&Option> { . match *self { . TerminatorKind::Goto { .. } . | TerminatorKind::Resume . | TerminatorKind::Abort . | TerminatorKind::Return . | TerminatorKind::Unreachable -- line 396 ---------------------------------------- -- line 403 ---------------------------------------- . | TerminatorKind::DropAndReplace { ref unwind, .. } . | TerminatorKind::Drop { ref unwind, .. } . | TerminatorKind::FalseUnwind { ref unwind, .. } . | TerminatorKind::InlineAsm { cleanup: ref unwind, .. } => Some(unwind), . } . } . . pub fn unwind_mut(&mut self) -> Option<&mut Option> { 1,372,494 ( 0.00%) match *self { . TerminatorKind::Goto { .. } . | TerminatorKind::Resume . | TerminatorKind::Abort . | TerminatorKind::Return . | TerminatorKind::Unreachable . | TerminatorKind::GeneratorDrop . | TerminatorKind::Yield { .. } . | TerminatorKind::SwitchInt { .. } . | TerminatorKind::FalseEdge { .. } => None, 83,510 ( 0.00%) TerminatorKind::Call { cleanup: ref mut unwind, .. } . | TerminatorKind::Assert { cleanup: ref mut unwind, .. } . | TerminatorKind::DropAndReplace { ref mut unwind, .. } . | TerminatorKind::Drop { ref mut unwind, .. } . | TerminatorKind::FalseUnwind { ref mut unwind, .. } . | TerminatorKind::InlineAsm { cleanup: ref mut unwind, .. } => Some(unwind), . } . } . -- line 429 ---------------------------------------- 18,458,972 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs -------------------------------------------------------------------------------- Ir -- line 108 ---------------------------------------- . suppress_errors: bool, . }, . } . . impl RegionckMode { . /// Indicates that the MIR borrowck will repeat these region . /// checks, so we should ignore errors if NLL is (unconditionally) . /// enabled. 6,783 ( 0.00%) pub fn for_item_body(tcx: TyCtxt<'_>) -> Self { . // FIXME(Centril): Once we actually remove `::Migrate` also make . // this always `true` and then proceed to eliminate the dead code. 6,783 ( 0.00%) match tcx.borrowck_mode() { . // If we're on Migrate mode, report AST region errors . BorrowckMode::Migrate => RegionckMode::Erase { suppress_errors: false }, . . // If we're on MIR, don't report AST region errors as they should be reported by NLL . BorrowckMode::Mir => RegionckMode::Erase { suppress_errors: true }, . } 13,566 ( 0.00%) } . } . . /// This type contains all the things within `InferCtxt` that sit within a . /// `RefCell` and are involved with taking/rolling back snapshots. Snapshot . /// operations are hot enough that we want only one call to `borrow_mut` per . /// call to `start_snapshot` and `rollback_to`. . pub struct InferCtxtInner<'tcx> { . /// Cache for projections. This cache is snapshotted along with the infcx. -- line 134 ---------------------------------------- -- line 202 ---------------------------------------- . /// type instantiations (`ty::Infer`) to the actual opaque . /// type (`ty::Opaque`). Used during fallback to map unconstrained . /// opaque type inference variables to their corresponding . /// opaque type. . pub opaque_types_vars: FxHashMap, Ty<'tcx>>, . } . . impl<'tcx> InferCtxtInner<'tcx> { 889,480 ( 0.00%) fn new() -> InferCtxtInner<'tcx> { 6,048,464 ( 0.01%) InferCtxtInner { . projection_cache: Default::default(), . type_variable_storage: type_variable::TypeVariableStorage::new(), . undo_log: InferCtxtUndoLogs::default(), . const_unification_storage: ut::UnificationTableStorage::new(), . int_unification_storage: ut::UnificationTableStorage::new(), . float_unification_storage: ut::UnificationTableStorage::new(), 533,688 ( 0.00%) region_constraint_storage: Some(RegionConstraintStorage::new()), . region_obligations: vec![], . opaque_types: Default::default(), . opaque_types_vars: Default::default(), . } 1,067,376 ( 0.00%) } . . #[inline] . pub fn region_obligations(&self) -> &[(hir::HirId, RegionObligation<'tcx>)] { . &self.region_obligations . } . . #[inline] . pub fn projection_cache(&mut self) -> traits::ProjectionCache<'_, 'tcx> { 177,534 ( 0.00%) self.projection_cache.with_log(&mut self.undo_log) . } . . #[inline] . fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'tcx> { 7,052,773 ( 0.01%) self.type_variable_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn int_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::IntVid, . &mut ut::UnificationStorage, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 35,627 ( 0.00%) self.int_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn float_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::FloatVid, -- line 258 ---------------------------------------- -- line 268 ---------------------------------------- . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::ConstVid<'tcx>, . &mut ut::UnificationStorage>, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 27,049 ( 0.00%) self.const_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'_, 'tcx> { 1,031,776 ( 0.00%) self.region_constraint_storage . .as_mut() . .expect("region constraints already solved") 1,345,730 ( 0.00%) .with_log(&mut self.undo_log) . } . } . . pub struct InferCtxt<'a, 'tcx> { . pub tcx: TyCtxt<'tcx>, . . /// The `DefId` of the item in whose context we are performing inference or typeck. . /// It is used to check whether an opaque type use is a defining use. -- line 292 ---------------------------------------- -- line 361 ---------------------------------------- . /// item we are type-checking, and just consider those names as . /// part of the root universe. So this would only get incremented . /// when we enter into a higher-ranked (`for<..>`) type or trait . /// bound. . universe: Cell, . } . . /// See the `error_reporting` module for more details. 2,394,160 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)] . pub enum ValuePairs<'tcx> { . Types(ExpectedFound>), . Regions(ExpectedFound>), . Consts(ExpectedFound<&'tcx ty::Const<'tcx>>), . TraitRefs(ExpectedFound>), . PolyTraitRefs(ExpectedFound>), . } . -- line 377 ---------------------------------------- -- line 383 ---------------------------------------- . pub struct TypeTrace<'tcx> { . cause: ObligationCause<'tcx>, . values: ValuePairs<'tcx>, . } . . /// The origin of a `r1 <= r2` constraint. . /// . /// See `error_reporting` module for more details 1,976,842 ( 0.00%) #[derive(Clone, Debug)] . pub enum SubregionOrigin<'tcx> { . /// Arose from a subtyping relation 115,740 ( 0.00%) Subtype(Box>), . . /// When casting `&'a T` to an `&'b Trait` object, . /// relating `'a` to `'b` . RelateObjectBound(Span), . . /// Some type parameter was instantiated with the given type, . /// and that type must outlive some region. 14,437 ( 0.00%) RelateParamBound(Span, Ty<'tcx>, Option), . . /// The given region parameter was instantiated with a region . /// that must outlive some other region. . RelateRegionParamBound(Span), . . /// Creating a pointer `b` to contents of another reference . Reborrow(Span), . . /// Creating a pointer `b` to contents of an upvar . ReborrowUpvar(Span, ty::UpvarId), . . /// Data with type `Ty<'tcx>` was borrowed 14,298 ( 0.00%) DataBorrowed(Ty<'tcx>, Span), . . /// (&'a &'b T) where a >= b 9,883 ( 0.00%) ReferenceOutlivesReferent(Ty<'tcx>, Span), . . /// Comparing the signature and requirements of an impl method against . /// the containing trait. . CompareImplMethodObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, . . /// Comparing the signature and requirements of an impl associated type . /// against the containing trait . CompareImplTypeObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, -- line 426 ---------------------------------------- -- line 554 ---------------------------------------- . defining_use_anchor: Option, . } . . pub trait TyCtxtInferExt<'tcx> { . fn infer_ctxt(self) -> InferCtxtBuilder<'tcx>; . } . . impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> { 177,896 ( 0.00%) fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> { 533,688 ( 0.00%) InferCtxtBuilder { tcx: self, defining_use_anchor: None, fresh_typeck_results: None } 177,896 ( 0.00%) } . } . . impl<'tcx> InferCtxtBuilder<'tcx> { . /// Used only by `rustc_typeck` during body type-checking/inference, . /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`. . /// Will also change the scope for opaque type defining use checks to the given owner. 173,295 ( 0.00%) pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self { 211,805 ( 0.00%) self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner))); 96,275 ( 0.00%) self.with_opaque_type_inference(table_owner) 134,785 ( 0.00%) } . . /// Whenever the `InferCtxt` should be able to handle defining uses of opaque types, . /// you need to call this function. Otherwise the opaque type will be treated opaquely. . /// . /// It is only meant to be called in two places, for typeck . /// (via `with_fresh_in_progress_typeck_results`) and for the inference context used . /// in mir borrowck. 13,624 ( 0.00%) pub fn with_opaque_type_inference(mut self, defining_use_anchor: LocalDefId) -> Self { 6,812 ( 0.00%) self.defining_use_anchor = Some(defining_use_anchor); 52,134 ( 0.00%) self 20,436 ( 0.00%) } . . /// Given a canonical value `C` as a starting point, create an . /// inference context that contains each of the bound values . /// within instantiated as a fresh variable. The `f` closure is . /// invoked with the new infcx, along with the instantiated value . /// `V` and a substitution `S`. This substitution `S` maps from . /// the bound values in `C` to their instantiated values in `V` . /// (in other words, `S(C) = V`). 251,919 ( 0.00%) pub fn enter_with_canonical( . &mut self, . span: Span, . canonical: &Canonical<'tcx, T>, . f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R, . ) -> R . where . T: TypeFoldable<'tcx>, . { . self.enter(|infcx| { 354,728 ( 0.00%) let (value, subst) = 28,666 ( 0.00%) infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical); 496,765 ( 0.00%) f(infcx, value, subst) . }) 273,491 ( 0.00%) } . 1,043,914 ( 0.00%) pub fn enter(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R { 501,475 ( 0.00%) let InferCtxtBuilder { tcx, defining_use_anchor, ref fresh_typeck_results } = *self; . let in_progress_typeck_results = fresh_typeck_results.as_ref(); 7,632,482 ( 0.01%) f(InferCtxt { . tcx, . defining_use_anchor, . in_progress_typeck_results, 177,896 ( 0.00%) inner: RefCell::new(InferCtxtInner::new()), . lexical_region_resolutions: RefCell::new(None), . selection_cache: Default::default(), . evaluation_cache: Default::default(), . reported_trait_errors: Default::default(), . reported_closure_mismatch: Default::default(), . tainted_by_errors_flag: Cell::new(false), 177,896 ( 0.00%) err_count_on_creation: tcx.sess.err_count(), . in_snapshot: Cell::new(false), . skip_leak_check: Cell::new(false), . universe: Cell::new(ty::UniverseIndex::ROOT), . }) 1,132,830 ( 0.00%) } . } . . impl<'tcx, T> InferOk<'tcx, T> { . pub fn unit(self) -> InferOk<'tcx, ()> { . InferOk { value: (), obligations: self.obligations } . } . . /// Extracts `value`, registering any obligations into `fulfill_cx`. . pub fn into_value_registering_obligations( . self, . infcx: &InferCtxt<'_, 'tcx>, . fulfill_cx: &mut dyn TraitEngine<'tcx>, . ) -> T { 10,961 ( 0.00%) let InferOk { value, obligations } = self; 38,638 ( 0.00%) for obligation in obligations { . fulfill_cx.register_predicate_obligation(infcx, obligation); . } . value . } . } . . impl<'tcx> InferOk<'tcx, ()> { 259,721 ( 0.00%) pub fn into_obligations(self) -> PredicateObligations<'tcx> { 1,038,884 ( 0.00%) self.obligations 259,721 ( 0.00%) } . } . . #[must_use = "once you start a snapshot, you should always consume it"] . pub struct CombinedSnapshot<'a, 'tcx> { . undo_snapshot: Snapshot<'tcx>, . region_constraints_snapshot: RegionSnapshot, . universe: ty::UniverseIndex, . was_in_snapshot: bool, -- line 662 ---------------------------------------- -- line 674 ---------------------------------------- . let canonical = self.canonicalize_query((a, b), &mut OriginalQueryValues::default()); . debug!("canonical consts: {:?}", &canonical.value); . . self.tcx.try_unify_abstract_consts(canonical.value) . } . . pub fn is_in_snapshot(&self) -> bool { . self.in_snapshot.get() 361,434 ( 0.00%) } . 2,590,624 ( 0.00%) pub fn freshen>(&self, t: T) -> T { 2,914,452 ( 0.01%) t.fold_with(&mut self.freshener()) 2,914,452 ( 0.01%) } . . /// Returns the origin of the type variable identified by `vid`, or `None` . /// if this is not a type variable. . /// . /// No attempt is made to resolve `ty`. 8,302 ( 0.00%) pub fn type_var_origin(&'a self, ty: Ty<'tcx>) -> Option { 16,604 ( 0.00%) match *ty.kind() { 4,151 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 16,604 ( 0.00%) Some(*self.inner.borrow_mut().type_variables().var_origin(vid)) . } . _ => None, . } 16,604 ( 0.00%) } . 323,828 ( 0.00%) pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, false) 323,828 ( 0.00%) } . . /// Like `freshener`, but does not replace `'static` regions. 1,140,622 ( 0.00%) pub fn freshener_keep_static<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, true) 1,140,622 ( 0.00%) } . 30,276 ( 0.00%) pub fn unsolved_variables(&self) -> Vec> { 15,138 ( 0.00%) let mut inner = self.inner.borrow_mut(); 15,138 ( 0.00%) let mut vars: Vec> = inner . .type_variables() . .unsolved_variables() . .into_iter() 8,302 ( 0.00%) .map(|t| self.tcx.mk_ty_var(t)) . .collect(); . vars.extend( . (0..inner.int_unification_table().len()) . .map(|i| ty::IntVid { index: i as u32 }) 3,560 ( 0.00%) .filter(|&vid| inner.int_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_int_var(v)), . ); . vars.extend( . (0..inner.float_unification_table().len()) . .map(|i| ty::FloatVid { index: i as u32 }) . .filter(|&vid| inner.float_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_float_var(v)), . ); . vars 52,983 ( 0.00%) } . 613,684 ( 0.00%) fn combine_fields( . &'a self, . trace: TypeTrace<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> CombineFields<'a, 'tcx> { 2,464,436 ( 0.00%) CombineFields { . infcx: self, 6,161,090 ( 0.01%) trace, . cause: None, . param_env, . obligations: PredicateObligations::new(), . } 613,684 ( 0.00%) } . . /// Clear the "currently in a snapshot" flag, invoke the closure, . /// then restore the flag to its original value. This flag is a . /// debugging measure designed to detect cases where we start a . /// snapshot, create type variables, and register obligations . /// which may involve those type variables in the fulfillment cx, . /// potentially leaving "dangling type variables" behind. . /// In such cases, an assertion will fail when attempting to -- line 753 ---------------------------------------- -- line 755 ---------------------------------------- . /// better than grovelling through megabytes of `RUSTC_LOG` output. . /// . /// HOWEVER, in some cases the flag is unhelpful. In particular, we . /// sometimes create a "mini-fulfilment-cx" in which we enroll . /// obligations. As long as this fulfillment cx is fully drained . /// before we return, this is not a problem, as there won't be any . /// escaping obligations in the main cx. In those cases, you can . /// use this function. 16 ( 0.00%) pub fn save_and_restore_in_snapshot_flag(&self, func: F) -> R . where . F: FnOnce(&Self) -> R, . { . let flag = self.in_snapshot.replace(false); 153,948 ( 0.00%) let result = func(self); . self.in_snapshot.set(flag); . result 18 ( 0.00%) } . 3,214,856 ( 0.01%) fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> { . debug!("start_snapshot()"); . . let in_snapshot = self.in_snapshot.replace(true); . . let mut inner = self.inner.borrow_mut(); . 9,644,568 ( 0.02%) CombinedSnapshot { . undo_snapshot: inner.undo_log.start_snapshot(), . region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), . universe: self.universe(), . was_in_snapshot: in_snapshot, . // Borrow typeck results "in progress" (i.e., during typeck) . // to ban writes from within a snapshot to them. 1,607,428 ( 0.00%) _in_progress_typeck_results: self . .in_progress_typeck_results . .map(|typeck_results| typeck_results.borrow()), . } 6,429,712 ( 0.01%) } . 4,280,078 ( 0.01%) #[instrument(skip(self, snapshot), level = "debug")] . fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 389,098 ( 0.00%) undo_snapshot, 389,098 ( 0.00%) region_constraints_snapshot, 389,098 ( 0.00%) universe, 389,098 ( 0.00%) was_in_snapshot, 778,196 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . self.universe.set(universe); . . let mut inner = self.inner.borrow_mut(); 389,098 ( 0.00%) inner.rollback_to(undo_snapshot); . inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); . } . 18,274,950 ( 0.03%) #[instrument(skip(self, snapshot), level = "debug")] . fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 1,218,330 ( 0.00%) undo_snapshot, . region_constraints_snapshot: _, . universe: _, 1,218,330 ( 0.00%) was_in_snapshot, 2,436,660 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . . self.inner.borrow_mut().commit(undo_snapshot); . } . . /// Executes `f` and commit the bindings. 805,603 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 1,029,582 ( 0.00%) pub fn commit_unconditionally(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 93,702 ( 0.00%) let snapshot = self.start_snapshot(); 369,525 ( 0.00%) let r = f(&snapshot); 655,154 ( 0.00%) self.commit_from(snapshot); 784,893 ( 0.00%) r . } . . /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. 7,475,641 ( 0.01%) #[instrument(skip(self, f), level = "debug")] 9,205,061 ( 0.02%) pub fn commit_if_ok(&self, f: F) -> Result . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, . { 1,444,619 ( 0.00%) let snapshot = self.start_snapshot(); 4,249,730 ( 0.01%) let r = f(&snapshot); . debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); 1,256,201 ( 0.00%) match r { . Ok(_) => { 8,196,532 ( 0.02%) self.commit_from(snapshot); . } . Err(_) => { 1,449,579 ( 0.00%) self.rollback_to("commit_if_ok -- error", snapshot); . } . } 7,461,615 ( 0.01%) r . } . . /// Execute `f` then unroll any bindings it creates. 1,512,310 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 1,737,787 ( 0.00%) pub fn probe(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 476,352 ( 0.00%) let snapshot = self.start_snapshot(); 663,891 ( 0.00%) let r = f(&snapshot); 2,218,868 ( 0.00%) self.rollback_to("probe", snapshot); 502,396 ( 0.00%) r . } . . /// If `should_skip` is true, then execute `f` then unroll any bindings it creates. 468 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 624 ( 0.00%) pub fn probe_maybe_skip_leak_check(&self, should_skip: bool, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 104 ( 0.00%) let snapshot = self.start_snapshot(); 52 ( 0.00%) let was_skip_leak_check = self.skip_leak_check.get(); 104 ( 0.00%) if should_skip { . self.skip_leak_check.set(true); . } 208 ( 0.00%) let r = f(&snapshot); 468 ( 0.00%) self.rollback_to("probe", snapshot); . self.skip_leak_check.set(was_skip_leak_check); 624 ( 0.00%) r . } . . /// Scan the constraints produced since `snapshot` began and returns: . /// . /// - `None` -- if none of them involve "region outlives" constraints . /// - `Some(true)` -- if there are `'a: 'b` constraints where `'a` or `'b` is a placeholder . /// - `Some(false)` -- if there are `'a: 'b` constraints but none involve placeholders 158,726 ( 0.00%) pub fn region_constraints_added_in_snapshot( . &self, . snapshot: &CombinedSnapshot<'a, 'tcx>, . ) -> Option { 317,452 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() . .region_constraints_added_in_snapshot(&snapshot.undo_snapshot) 238,089 ( 0.00%) } . . pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { . self.inner.borrow_mut().unwrap_region_constraints().add_given(sub, sup); . } . 71,940 ( 0.00%) pub fn can_sub(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 53,955 ( 0.00%) } . 88,932 ( 0.00%) pub fn can_eq(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 66,699 ( 0.00%) } . 543,360 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn sub_regions( . &self, . origin: SubregionOrigin<'tcx>, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) { 489,024 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); . } . . /// Require that the region `r` be equal to one of the regions in . /// the set `regions`. . #[instrument(skip(self), level = "debug")] . pub fn member_constraint( . &self, . opaque_type_def_id: DefId, -- line 947 ---------------------------------------- -- line 969 ---------------------------------------- . /// to `subtype_predicate` -- that is, "coercing" `a` to `b` winds up . /// actually requiring `a <: b`. This is of course a valid coercion, . /// but it's not as flexible as `FnCtxt::coerce` would be. . /// . /// (We may refactor this in the future, but there are a number of . /// practical obstacles. Among other things, `FnCtxt::coerce` presently . /// records adjustments that are required on the HIR in order to perform . /// the coercion, and we don't currently have a way to manage that.) 3,612 ( 0.00%) pub fn coerce_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolyCoercePredicate<'tcx>, . ) -> Option> { 2,408 ( 0.00%) let subtype_predicate = predicate.map_bound(|p| ty::SubtypePredicate { . a_is_expected: false, // when coercing from `a` to `b`, `b` is expected . a: p.a, . b: p.b, . }); 6,020 ( 0.00%) self.subtype_predicate(cause, param_env, subtype_predicate) 4,816 ( 0.00%) } . 124,824 ( 0.00%) pub fn subtype_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolySubtypePredicate<'tcx>, . ) -> Option> { . // Check for two unresolved inference variables, in which case we can . // make no progress. This is partly a micro-optimization, but it's . // also an opportunity to "sub-unify" the variables. This isn't -- line 999 ---------------------------------------- -- line 1002 ---------------------------------------- . // earlier that they are sub-unified). . // . // Note that we can just skip the binders here because . // type variables can't (at present, at . // least) capture any of the things bound by this binder. . // . // Note that this sub here is not just for diagnostics - it has semantic . // effects as well. 10,402 ( 0.00%) let r_a = self.shallow_resolve(predicate.skip_binder().a); 10,402 ( 0.00%) let r_b = self.shallow_resolve(predicate.skip_binder().b); 73,720 ( 0.00%) match (r_a.kind(), r_b.kind()) { 16,058 ( 0.00%) (&ty::Infer(ty::TyVar(a_vid)), &ty::Infer(ty::TyVar(b_vid))) => { . self.inner.borrow_mut().type_variables().sub(a_vid, b_vid); 16,058 ( 0.00%) return None; . } . _ => {} . } . . Some(self.commit_if_ok(|_snapshot| { 2,373 ( 0.00%) let ty::SubtypePredicate { a_is_expected, a, b } = . self.replace_bound_vars_with_placeholders(predicate); . 4,746 ( 0.00%) let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?; . . Ok(ok.unit()) . })) 93,618 ( 0.00%) } . 149,820 ( 0.00%) pub fn region_outlives_predicate( . &self, . cause: &traits::ObligationCause<'tcx>, . predicate: ty::PolyRegionOutlivesPredicate<'tcx>, . ) -> UnitResult<'tcx> { . self.commit_if_ok(|_snapshot| { . let ty::OutlivesPredicate(r_a, r_b) = . self.replace_bound_vars_with_placeholders(predicate); . let origin = SubregionOrigin::from_obligation_cause(cause, || { . RelateRegionParamBound(cause.span) . }); 124,850 ( 0.00%) self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` . Ok(()) . }) 99,880 ( 0.00%) } . . /// Number of type variables created so far. 786 ( 0.00%) pub fn num_ty_vars(&self) -> usize { . self.inner.borrow_mut().type_variables().num_vars() 1,572 ( 0.00%) } . 269,912 ( 0.00%) pub fn next_ty_var_id(&self, origin: TypeVariableOrigin) -> TyVid { 1,349,560 ( 0.00%) self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) 404,868 ( 0.00%) } . 231,420 ( 0.00%) pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { 925,446 ( 0.00%) self.tcx.mk_ty_var(self.next_ty_var_id(origin)) 347,130 ( 0.00%) } . 19,582 ( 0.00%) pub fn next_ty_var_in_universe( . &self, . origin: TypeVariableOrigin, . universe: ty::UniverseIndex, . ) -> Ty<'tcx> { 107,701 ( 0.00%) let vid = self.inner.borrow_mut().type_variables().new_var(universe, origin); 9,791 ( 0.00%) self.tcx.mk_ty_var(vid) 29,373 ( 0.00%) } . . pub fn next_const_var( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . ) -> &'tcx ty::Const<'tcx> { . self.tcx.mk_const_var(self.next_const_var_id(origin), ty) . } -- line 1074 ---------------------------------------- -- line 1090 ---------------------------------------- . pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { . self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }) . } . . fn next_int_var_id(&self) -> IntVid { 4,475 ( 0.00%) self.inner.borrow_mut().int_unification_table().new_key(None) . } . 2,685 ( 0.00%) pub fn next_int_var(&self) -> Ty<'tcx> { . self.tcx.mk_int_var(self.next_int_var_id()) 3,580 ( 0.00%) } . . fn next_float_var_id(&self) -> FloatVid { . self.inner.borrow_mut().float_unification_table().new_key(None) . } . . pub fn next_float_var(&self) -> Ty<'tcx> { . self.tcx.mk_float_var(self.next_float_var_id()) . } . . /// Creates a fresh region variable with the next available index. . /// The variable will be created in the maximum universe created . /// thus far, allowing it to name any region created thus far. 95,085 ( 0.00%) pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { 2,150,358 ( 0.00%) self.next_region_var_in_universe(origin, self.universe()) 190,170 ( 0.00%) } . . /// Creates a fresh region variable with the next available index . /// in the given universe; typically, you can use . /// `next_region_var` and just use the maximal universe. 912,512 ( 0.00%) pub fn next_region_var_in_universe( . &self, . origin: RegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { . let region_var = 5,931,328 ( 0.01%) self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe, origin); 2,281,280 ( 0.00%) self.tcx.mk_region(ty::ReVar(region_var)) 1,368,768 ( 0.00%) } . . /// Return the universe that the region `r` was created in. For . /// most regions (e.g., `'static`, named regions from the user, . /// etc) this is the root universe U0. For inference variables or . /// placeholders, however, it will return the universe which which . /// they are associated. 147,824 ( 0.00%) pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex { . self.inner.borrow_mut().unwrap_region_constraints().universe(r) 221,736 ( 0.00%) } . . /// Number of region variables created so far. 54,554 ( 0.00%) pub fn num_region_vars(&self) -> usize { . self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() 81,831 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 186,883 ( 0.00%) pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> { . self.next_region_var(RegionVariableOrigin::Nll(origin)) 373,766 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 203 ( 0.00%) pub fn next_nll_region_var_in_universe( . &self, . origin: NllRegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { 5,145 ( 0.00%) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe) 406 ( 0.00%) } . 1,564,008 ( 0.00%) pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 839,175 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { . // Create a region inference variable for the given . // region parameter definition. 69,165 ( 0.00%) self.next_region_var(EarlyBoundRegion(span, param.name)).into() . } . GenericParamDefKind::Type { .. } => { . // Create a type inference variable for the given . // type parameter definition. The substitutions are . // for actual parameters that may be referred to by . // the default of this type parameter, if it exists. . // e.g., `struct Foo(...);` when . // used in a path such as `Foo::::new()` will . // use an inference variable for `C` with `[T, U]` . // as the substitutions for the default, `(T, U)`. 504,500 ( 0.00%) let ty_var_id = self.inner.borrow_mut().type_variables().new_var( . self.universe(), 630,625 ( 0.00%) TypeVariableOrigin { . kind: TypeVariableOriginKind::TypeParameterDefinition( 126,125 ( 0.00%) param.name, 126,125 ( 0.00%) Some(param.def_id), . ), . span, . }, . ); . 126,125 ( 0.00%) self.tcx.mk_ty_var(ty_var_id).into() . } . GenericParamDefKind::Const { .. } => { . let origin = ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstParameterDefinition( . param.name, . param.def_id, . ), . span, . }; . let const_var_id = 2,321 ( 0.00%) self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }); 211 ( 0.00%) self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into() . } . } 1,688 ( 0.00%) } . . /// Given a set of generics defined on a type or impl, returns a substitution mapping each . /// type/region parameter to a fresh inference variable. 250,947 ( 0.00%) pub fn fresh_substs_for_item(&self, span: Span, def_id: DefId) -> SubstsRef<'tcx> { 1,218,254 ( 0.00%) InternalSubsts::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) 167,298 ( 0.00%) } . . /// Returns `true` if errors have been reported since this infcx was . /// created. This is sometimes used as a heuristic to skip . /// reporting errors that often occur as a result of earlier . /// errors, but where it's hard to be 100% sure (e.g., unresolved . /// inference variables, regionck errors). 21,868 ( 0.00%) pub fn is_tainted_by_errors(&self) -> bool { . debug!( . "is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ . tainted_by_errors_flag={})", . self.tcx.sess.err_count(), . self.err_count_on_creation, . self.tainted_by_errors_flag.get() . ); . 478,520 ( 0.00%) if self.tcx.sess.err_count() > self.err_count_on_creation { . return true; // errors reported since this infcx was made . } . self.tainted_by_errors_flag.get() 32,802 ( 0.00%) } . . /// Set the "tainted by errors" flag to true. We call this when we . /// observe an error from a prior pass. . pub fn set_tainted_by_errors(&self) { . debug!("set_tainted_by_errors()"); . self.tainted_by_errors_flag.set(true) . } . . /// Process the region constraints and return any any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 468,979 ( 0.00%) pub fn resolve_regions( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) -> Vec> { 1,272,943 ( 0.00%) let (var_infos, data) = { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; 66,997 ( 0.00%) assert!( 200,991 ( 0.00%) self.is_tainted_by_errors() || inner.region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . inner.region_obligations . ); . inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) . .into_infos_and_data() 66,997 ( 0.00%) }; . . let region_rels = 66,997 ( 0.00%) &RegionRelations::new(self.tcx, region_context, outlives_env.free_region_map()); . 602,973 ( 0.00%) let (lexical_region_resolutions, errors) = 1,607,928 ( 0.00%) lexical_region_resolve::resolve(region_rels, var_infos, data, mode); . 267,988 ( 0.00%) let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); 66,997 ( 0.00%) assert!(old_value.is_none()); . . errors 602,973 ( 0.00%) } . . /// Process the region constraints and report any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 669,970 ( 0.00%) pub fn resolve_regions_and_report_errors( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) { 133,994 ( 0.00%) let errors = self.resolve_regions(region_context, outlives_env, mode); . 200,991 ( 0.00%) if !self.is_tainted_by_errors() { . // As a heuristic, just skip reporting region errors . // altogether if other errors have been reported while . // this infcx was in use. This is totally hokey but . // otherwise we have a hard time separating legit region . // errors from silly ones. 133,994 ( 0.00%) self.report_region_errors(&errors); . } 334,985 ( 0.00%) } . . /// Obtains (and clears) the current set of region . /// constraints. The inference context is still usable: further . /// unifications will simply add new constraints. . /// . /// This method is not meant to be used with normal lexical region . /// resolution. Rather, it is used in the NLL mode as a kind of . /// interim hack: basically we run normal type-check and generate -- line 1307 ---------------------------------------- -- line 1319 ---------------------------------------- . } . . /// Gives temporary access to the region constraint data. . pub fn with_region_constraints( . &self, . op: impl FnOnce(&RegionConstraintData<'tcx>) -> R, . ) -> R { . let mut inner = self.inner.borrow_mut(); 114,616 ( 0.00%) op(inner.unwrap_region_constraints().data()) . } . . pub fn region_var_origin(&self, vid: ty::RegionVid) -> RegionVariableOrigin { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; . inner . .region_constraint_storage . .as_mut() -- line 1335 ---------------------------------------- -- line 1338 ---------------------------------------- . .var_origin(vid) . } . . /// Takes ownership of the list of variable regions. This implies . /// that all the region constraints have already been taken, and . /// hence that `resolve_regions_and_report_errors` can never be . /// called. This is used only during NLL processing to "hand off" ownership . /// of the set of region variables into the NLL region context. 34,060 ( 0.00%) pub fn take_region_var_origins(&self) -> VarInfos { . let mut inner = self.inner.borrow_mut(); 129,428 ( 0.00%) let (var_infos, data) = inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) 6,812 ( 0.00%) .into_infos_and_data(); 6,812 ( 0.00%) assert!(data.is_empty()); . var_infos 54,496 ( 0.00%) } . . pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { . self.resolve_vars_if_possible(t).to_string() . } . . /// If `TyVar(vid)` resolves to a type, return that type. Else, return the . /// universe index of `TyVar(vid)`. 155,504 ( 0.00%) pub fn probe_ty_var(&self, vid: TyVid) -> Result, ty::UniverseIndex> { . use self::type_variable::TypeVariableValue; . 466,512 ( 0.00%) match self.inner.borrow_mut().type_variables().probe(vid) { . TypeVariableValue::Known { value } => Ok(value), . TypeVariableValue::Unknown { universe } => Err(universe), . } 544,264 ( 0.00%) } . . /// Resolve any type variables found in `value` -- but only one . /// level. So, if the variable `?X` is bound to some type . /// `Foo`, then this would return `Foo` (but `?Y` may . /// itself be bound to a type). . /// . /// Useful when you only need to inspect the outermost level of . /// the type and don't care about nested types (or perhaps you . /// will be resolving them as well, e.g. in a loop). . pub fn shallow_resolve(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 2,563,074 ( 0.00%) value.fold_with(&mut ShallowResolver { infcx: self }) . } . 187,004 ( 0.00%) pub fn root_var(&self, var: ty::TyVid) -> ty::TyVid { . self.inner.borrow_mut().type_variables().root_var(var) 280,506 ( 0.00%) } . . /// Where possible, replaces type/const variables in . /// `value` with their final value. Note that region variables . /// are unaffected. If a type/const variable has not been unified, it . /// is left as is. This is an idempotent operation that does . /// not affect inference state in any way and so you can do it . /// at will. 1,772 ( 0.00%) pub fn resolve_vars_if_possible(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 2,405,683 ( 0.00%) if !value.needs_infer() { 1,842,524 ( 0.00%) return value; // Avoid duplicated subst-folding. . } 2,566,271 ( 0.00%) let mut r = resolve::OpportunisticVarResolver::new(self); 3,151,180 ( 0.01%) value.fold_with(&mut r) 1,851 ( 0.00%) } . . /// Returns the first unresolved variable contained in `T`. In the . /// process of visiting `T`, this will resolve (where possible) . /// type variables in `T`, but it never constructs the final, . /// resolved type, so it's more efficient than . /// `resolve_vars_if_possible()`. . pub fn unresolved_type_vars(&self, value: &T) -> Option<(Ty<'tcx>, Option)> . where -- line 1415 ---------------------------------------- -- line 1490 ---------------------------------------- . expected: &'tcx ty::Const<'tcx>, . actual: &'tcx ty::Const<'tcx>, . err: TypeError<'tcx>, . ) -> DiagnosticBuilder<'tcx> { . let trace = TypeTrace::consts(cause, true, expected, actual); . self.report_and_explain_type_error(trace, &err) . } . 249,750 ( 0.00%) pub fn replace_bound_vars_with_fresh_vars( . &self, . span: Span, . lbrct: LateBoundRegionConversionTime, . value: ty::Binder<'tcx, T>, . ) -> (T, BTreeMap>) . where . T: TypeFoldable<'tcx>, . { . let fld_r = 861,528 ( 0.00%) |br: ty::BoundRegion| self.next_region_var(LateBoundRegion(span, br.kind, lbrct)); . let fld_t = |_| { . self.next_ty_var(TypeVariableOrigin { . kind: TypeVariableOriginKind::MiscVariable, . span, . }) . }; . let fld_c = |_, ty| { . self.next_const_var( . ty, . ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span }, . ) . }; 2,855,490 ( 0.01%) self.tcx.replace_bound_vars(value, fld_r, fld_t, fld_c) 166,500 ( 0.00%) } . . /// See the [`region_constraints::RegionConstraintCollector::verify_generic_bound`] method. 71,016 ( 0.00%) pub fn verify_generic_bound( . &self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . a: ty::Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { . debug!("verify_generic_bound({:?}, {:?} <: {:?})", kind, a, bound); . 26,631 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() 150,909 ( 0.00%) .verify_generic_bound(origin, kind, a, bound); 62,139 ( 0.00%) } . . /// Obtains the latest type of the given closure; this may be a . /// closure in the current function, in which case its . /// `ClosureKind` may not yet be known. 350 ( 0.00%) pub fn closure_kind(&self, closure_substs: SubstsRef<'tcx>) -> Option { 700 ( 0.00%) let closure_kind_ty = closure_substs.as_closure().kind_ty(); . let closure_kind_ty = self.shallow_resolve(closure_kind_ty); 525 ( 0.00%) closure_kind_ty.to_opt_closure_kind() . } . . /// Clears the selection, evaluation, and projection caches. This is useful when . /// repeatedly attempting to select an `Obligation` while changing only . /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. . pub fn clear_caches(&self) { . self.selection_cache.clear(); . self.evaluation_cache.clear(); . self.inner.borrow_mut().projection_cache().clear(); . } . . pub fn universe(&self) -> ty::UniverseIndex { 4,067,026 ( 0.01%) self.universe.get() 1,393,461 ( 0.00%) } . . /// Creates and return a fresh universe that extends all previous . /// universes. Updates `self.universe` to that new universe. 1,400 ( 0.00%) pub fn create_next_universe(&self) -> ty::UniverseIndex { 1,568 ( 0.00%) let u = self.universe.get().next_universe(); . self.universe.set(u); . u 1,400 ( 0.00%) } . . /// Resolves and evaluates a constant. . /// . /// The constant can be located on a trait like `::C`, in which case the given . /// substitutions and environment are used to resolve the constant. Alternatively if the . /// constant has generic parameters in scope the substitutions are used to evaluate the value of . /// the constant. For example in `fn foo() { let _ = [0; bar::()]; }` the repeat count . /// constant `bar::()` requires a substitution for `T`, if the substitution for `T` is still -- line 1576 ---------------------------------------- -- line 1606 ---------------------------------------- . // variables, thus we don't need to substitute back the original values. . self.tcx.const_eval_resolve(param_env_erased, unevaluated, span) . } . . /// If `typ` is a type variable of some kind, resolve it one level . /// (but do not resolve types found in the result). If `typ` is . /// not a type variable, just return it unmodified. . // FIXME(eddyb) inline into `ShallowResolver::visit_ty`. 28,293,392 ( 0.05%) fn shallow_resolve_ty(&self, typ: Ty<'tcx>) -> Ty<'tcx> { 15,323,009 ( 0.03%) match *typ.kind() { . ty::Infer(ty::TyVar(v)) => { . // Not entirely obvious: if `typ` is a type variable, . // it can be resolved to an int/float variable, which . // can then be recursively resolved, hence the . // recursion. Note though that we prevent type . // variables from unifying to other type variables . // directly (though they may be embedded . // structurally), and we prevent cycles in any case, . // so this recursion should always be of very limited . // depth. . // . // Note: if these two lines are combined into one we get . // dynamic borrow errors on `self.inner`. 8,174,192 ( 0.02%) let known = self.inner.borrow_mut().type_variables().probe(v).known(); . known.map_or(typ, |t| self.shallow_resolve_ty(t)) . } . 75,420 ( 0.00%) ty::Infer(ty::IntVar(v)) => self . .inner . .borrow_mut() . .int_unification_table() . .probe_value(v) 21,756 ( 0.00%) .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . ty::Infer(ty::FloatVar(v)) => self . .inner . .borrow_mut() . .float_unification_table() . .probe_value(v) . .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . _ => typ, . } 31,830,066 ( 0.06%) } . . /// `ty_or_const_infer_var_changed` is equivalent to one of these two: . /// * `shallow_resolve(ty) != ty` (where `ty.kind = ty::Infer(_)`) . /// * `shallow_resolve(ct) != ct` (where `ct.kind = ty::ConstKind::Infer(_)`) . /// . /// However, `ty_or_const_infer_var_changed` is more efficient. It's always . /// inlined, despite being large, because it has only two call sites that . /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` -- line 1659 ---------------------------------------- -- line 1662 ---------------------------------------- . #[inline(always)] . pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { . match infer_var { . TyOrConstInferVar::Ty(v) => { . use self::type_variable::TypeVariableValue; . . // If `inlined_probe` returns a `Known` value, it never equals . // `ty::Infer(ty::TyVar(v))`. 14,786,368 ( 0.03%) match self.inner.borrow_mut().type_variables().inlined_probe(v) { . TypeVariableValue::Unknown { .. } => false, . TypeVariableValue::Known { .. } => true, . } . } . . TyOrConstInferVar::TyInt(v) => { . // If `inlined_probe_value` returns a value it's always a . // `ty::Int(_)` or `ty::UInt(_)`, which never matches a . // `ty::Infer(_)`. 1,192 ( 0.00%) self.inner.borrow_mut().int_unification_table().inlined_probe_value(v).is_some() . } . . TyOrConstInferVar::TyFloat(v) => { . // If `probe_value` returns a value it's always a . // `ty::Float(_)`, which never matches a `ty::Infer(_)`. . // . // Not `inlined_probe_value(v)` because this call site is colder. . self.inner.borrow_mut().float_unification_table().probe_value(v).is_some() -- line 1688 ---------------------------------------- -- line 1716 ---------------------------------------- . /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. . Const(ConstVid<'tcx>), . } . . impl<'tcx> TyOrConstInferVar<'tcx> { . /// Tries to extract an inference variable from a type or a constant, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). 101,904 ( 0.00%) pub fn maybe_from_generic_arg(arg: GenericArg<'tcx>) -> Option { . match arg.unpack() { . GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), . GenericArgKind::Const(ct) => Self::maybe_from_const(ct), . GenericArgKind::Lifetime(_) => None, . } 101,904 ( 0.00%) } . . /// Tries to extract an inference variable from a type, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). 16,042 ( 0.00%) pub fn maybe_from_ty(ty: Ty<'tcx>) -> Option { 1,059,290 ( 0.00%) match *ty.kind() { 235,128 ( 0.00%) ty::Infer(ty::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), 180 ( 0.00%) ty::Infer(ty::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), . ty::Infer(ty::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), . _ => None, . } 16,042 ( 0.00%) } . . /// Tries to extract an inference variable from a constant, returns `None` . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). . pub fn maybe_from_const(ct: &'tcx ty::Const<'tcx>) -> Option { . match ct.val { . ty::ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), . _ => None, . } -- line 1749 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { . self.infcx.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 5,356,261 ( 0.01%) self.infcx.shallow_resolve_ty(ty) . } . 6,656 ( 0.00%) fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 3,824 ( 0.00%) if let ty::Const { val: ty::ConstKind::Infer(InferConst::Var(vid)), .. } = ct { 1,240 ( 0.00%) self.infcx . .inner . .borrow_mut() . .const_unification_table() 744 ( 0.00%) .probe_value(*vid) . .val . .known() . .unwrap_or(ct) . } else { . ct . } 8,320 ( 0.00%) } . } . . impl<'tcx> TypeTrace<'tcx> { . pub fn span(&self) -> Span { 336 ( 0.00%) self.cause.span . } . . pub fn types( . cause: &ObligationCause<'tcx>, . a_is_expected: bool, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> TypeTrace<'tcx> { -- line 1792 ---------------------------------------- -- line 1800 ---------------------------------------- . b: &'tcx ty::Const<'tcx>, . ) -> TypeTrace<'tcx> { . TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) } . } . } . . impl<'tcx> SubregionOrigin<'tcx> { . pub fn span(&self) -> Span { 840 ( 0.00%) match *self { 168 ( 0.00%) Subtype(ref a) => a.span(), . RelateObjectBound(a) => a, . RelateParamBound(a, ..) => a, . RelateRegionParamBound(a) => a, . Reborrow(a) => a, . ReborrowUpvar(a, _) => a, . DataBorrowed(_, a) => a, . ReferenceOutlivesReferent(_, a) => a, . CompareImplMethodObligation { span, .. } => span, -- line 1817 ---------------------------------------- -- line 1818 ---------------------------------------- . CompareImplTypeObligation { span, .. } => span, . } . } . . pub fn from_obligation_cause(cause: &traits::ObligationCause<'tcx>, default: F) -> Self . where . F: FnOnce() -> Self, . { 178,076 ( 0.00%) match *cause.code() { 24,347 ( 0.00%) traits::ObligationCauseCode::ReferenceOutlivesReferent(ref_type) => { 121,735 ( 0.00%) SubregionOrigin::ReferenceOutlivesReferent(ref_type, cause.span) . } . . traits::ObligationCauseCode::CompareImplMethodObligation { . impl_item_def_id, . trait_item_def_id, . } => SubregionOrigin::CompareImplMethodObligation { . span: cause.span, . impl_item_def_id, -- line 1836 ---------------------------------------- 30,167,952 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs -------------------------------------------------------------------------------- Ir -- line 40 ---------------------------------------- . . impl NodeRef { . /// Looks up a given key in a (sub)tree headed by the node, recursively. . /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, . /// returns a `GoDown` with the handle of the leaf edge where the key belongs. . /// . /// The result is meaningful only if the tree is ordered by key, like the tree . /// in a `BTreeMap` is. 21,459,286 ( 0.04%) pub fn search_tree( . mut self, . key: &Q, . ) -> SearchResult . where . Q: Ord, . K: Borrow, . { . loop { . self = match self.search_node(key) { . Found(handle) => return Found(handle), 4,531,753 ( 0.01%) GoDown(handle) => match handle.force() { . Leaf(leaf) => return GoDown(leaf), . Internal(internal) => internal.descend(), . }, . } . } 8,847,309 ( 0.02%) } . . /// Descends to the nearest node where the edge matching the lower bound . /// of the range is different from the edge matching the upper bound, i.e., . /// the nearest node that has at least one key contained in the range. . /// . /// If found, returns an `Ok` with that node, the strictly ascending pair of . /// edge indices in the node delimiting the range, and the corresponding . /// pair of bounds for continuing the search in the child nodes, in case -- line 73 ---------------------------------------- -- line 202 ---------------------------------------- . unsafe fn find_key_index(&self, key: &Q, start_index: usize) -> IndexResult . where . Q: Ord, . K: Borrow, . { . let node = self.reborrow(); . let keys = node.keys(); . debug_assert!(start_index <= keys.len()); 45,591,919 ( 0.09%) for (offset, k) in unsafe { keys.get_unchecked(start_index..) }.iter().enumerate() { 186,995,902 ( 0.35%) match key.cmp(k.borrow()) { . Ordering::Greater => {} . Ordering::Equal => return IndexResult::KV(start_index + offset), . Ordering::Less => return IndexResult::Edge(start_index + offset), . } . } . IndexResult::Edge(keys.len()) . } . -- line 219 ---------------------------------------- 8,885,150 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/scoped_cell.rs -------------------------------------------------------------------------------- Ir -- line 39 ---------------------------------------- . pub const fn new(value: >::Out) -> Self { . ScopedCell(Cell::new(value)) . } . . /// Sets the value in `self` to `replacement` while . /// running `f`, which gets the old value, mutably. . /// The old value will be restored after `f` exits, even . /// by panic, including modifications made to it by `f`. 33,223,228 ( 0.06%) pub fn replace<'a, R>( . &self, . replacement: >::Out, . f: impl for<'b, 'c> FnOnce(RefMutL<'b, 'c, T>) -> R, . ) -> R { . /// Wrapper that ensures that the cell always gets filled . /// (with the original state, optionally changed by `f`), . /// even if `f` had panicked. . struct PutBackOnDrop<'a, T: LambdaL> { . cell: &'a ScopedCell, . value: Option<>::Out>, . } . . impl<'a, T: LambdaL> Drop for PutBackOnDrop<'a, T> { . fn drop(&mut self) { 4,529,092 ( 0.01%) self.cell.0.set(self.value.take().unwrap()); . } . } . 9,058,183 ( 0.02%) let mut put_back_on_drop = PutBackOnDrop { . cell: self, . value: Some(self.0.replace(unsafe { . let erased = mem::transmute_copy(&replacement); . mem::forget(replacement); . erased . })), . }; . 2,589,276 ( 0.00%) f(RefMutL(put_back_on_drop.value.as_mut().unwrap())) 44,097,986 ( 0.08%) } . . /// Sets the value in `self` to `value` while running `f`. . pub fn set(&self, value: >::Out, f: impl FnOnce() -> R) -> R { . self.replace(value, |_| f()) . } . } 2,408,080 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/hash/sip.rs -------------------------------------------------------------------------------- Ir -- line 124 ---------------------------------------- . /// sizes and avoid calling `memcpy`, which is good for speed. . /// . /// Unsafe because: unchecked indexing at start..start+len . #[inline] . unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { . debug_assert!(len < 8); . let mut i = 0; // current byte index (from LSB) in the output u64 . let mut out = 0; 18,194,040 ( 0.03%) if i + 3 < len { . // SAFETY: `i` cannot be greater than `len`, and the caller must guarantee . // that the index start..start+len is in bounds. . out = unsafe { load_int_le!(buf, start + i, u32) } as u64; . i += 4; . } 37,859,003 ( 0.07%) if i + 1 < len { . // SAFETY: same as above. 18,929,474 ( 0.04%) out |= (unsafe { load_int_le!(buf, start + i, u16) } as u64) << (i * 8); 9,464,524 ( 0.02%) i += 2 . } 21,081,952 ( 0.04%) if i < len { . // SAFETY: same as above. 30,970,633 ( 0.06%) out |= (unsafe { *buf.get_unchecked(start + i) } as u64) << (i * 8); . i += 1; . } . debug_assert_eq!(i, len); . out . } . . impl SipHasher { . /// Creates a new `SipHasher` with the two initial keys set to 0. -- line 153 ---------------------------------------- -- line 190 ---------------------------------------- . /// Creates a `SipHasher13` that is keyed off the provided keys. . #[inline] . #[unstable(feature = "hashmap_internals", issue = "none")] . #[rustc_deprecated( . since = "1.13.0", . reason = "use `std::collections::hash_map::DefaultHasher` instead" . )] . pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { 3,626,023 ( 0.01%) SipHasher13 { hasher: Hasher::new_with_keys(key0, key1) } . } . } . . impl Hasher { . #[inline] . fn new_with_keys(key0: u64, key1: u64) -> Hasher { 9 ( 0.00%) let mut state = Hasher { . k0: key0, . k1: key1, . length: 0, . state: State { v0: 0, v1: 0, v2: 0, v3: 0 }, . tail: 0, . ntail: 0, . _marker: PhantomData, . }; . state.reset(); . state . } . . #[inline] . fn reset(&mut self) { . self.length = 0; 5,439,030 ( 0.01%) self.state.v0 = self.k0 ^ 0x736f6d6570736575; 5,806,758 ( 0.01%) self.state.v1 = self.k1 ^ 0x646f72616e646f6d; . self.state.v2 = self.k0 ^ 0x6c7967656e657261; . self.state.v3 = self.k1 ^ 0x7465646279746573; . self.ntail = 0; . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl super::Hasher for SipHasher { -- line 230 ---------------------------------------- -- line 258 ---------------------------------------- . // in librustc_data_structures/sip128.rs, and add `write_u*`/`write_i*` . // methods to `SipHasher`, `SipHasher13`, and `DefaultHasher`. This would . // greatly speed up integer hashing by those hashers, at the cost of . // slightly slowing down compile speeds on some benchmarks. See #69152 for . // details. . #[inline] . fn write(&mut self, msg: &[u8]) { . let length = msg.len(); 23,321,978 ( 0.04%) self.length += length; . . let mut needed = 0; . 22,954,239 ( 0.04%) if self.ntail != 0 { 11,676,662 ( 0.02%) needed = 8 - self.ntail; . // SAFETY: `cmp::min(length, needed)` is guaranteed to not be over `length` 58,383,270 ( 0.11%) self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail); 11,676,654 ( 0.02%) if length < needed { 8,050,016 ( 0.02%) self.ntail += length; . return; . } else { 1,813,319 ( 0.00%) self.state.v3 ^= self.tail; . S::c_rounds(&mut self.state); 3,626,638 ( 0.01%) self.state.v0 ^= self.tail; 1,813,319 ( 0.00%) self.ntail = 0; . } . } . . // Buffered tail is now flushed, process new input. 6,517,343 ( 0.01%) let len = length - needed; 6,517,354 ( 0.01%) let left = len & 0x7; // len % 8 . . let mut i = needed; 9,776,361 ( 0.02%) while i < len - left { . // SAFETY: because `len - left` is the biggest multiple of 8 under . // `len`, and because `i` starts at `needed` where `len` is `length - needed`, . // `i + 8` is guaranteed to be less than or equal to `length`. . let mi = unsafe { load_int_le!(msg, i, u64) }; . 165 ( 0.00%) self.state.v3 ^= mi; . S::c_rounds(&mut self.state); 165 ( 0.00%) self.state.v0 ^= mi; . 330 ( 0.00%) i += 8; . } . . // SAFETY: `i` is now `needed + len.div_euclid(8) * 8`, . // so `i + left` = `needed + len` = `length`, which is by . // definition equal to `msg.len()`. 6,885,071 ( 0.01%) self.tail = unsafe { u8to64_le(msg, i, left) }; . self.ntail = left; . } . . #[inline] . fn finish(&self) -> u64 { 1,813,013 ( 0.00%) let mut state = self.state; . 9,065,065 ( 0.02%) let b: u64 = ((self.length as u64 & 0xff) << 56) | self.tail; . 1,813,013 ( 0.00%) state.v3 ^= b; . S::c_rounds(&mut state); 1,813,013 ( 0.00%) state.v0 ^= b; . 1,813,013 ( 0.00%) state.v2 ^= 0xff; . S::d_rounds(&mut state); . 3,626,026 ( 0.01%) state.v0 ^ state.v1 ^ state.v2 ^ state.v3 . } . } . . impl Clone for Hasher { . #[inline] . fn clone(&self) -> Hasher { . Hasher { . k0: self.k0, -- line 331 ---------------------------------------- -- line 354 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip13Rounds; . . impl Sip for Sip13Rounds { . #[inline] . fn c_rounds(state: &mut State) { 23,572,821 ( 0.04%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 7,252,052 ( 0.01%) compress!(state); 7,252,052 ( 0.01%) compress!(state); 5,439,039 ( 0.01%) compress!(state); . } . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip for Sip24Rounds { . #[inline] -- line 377 ---------------------------------------- 54,359,142 ( 0.10%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 84 ---------------------------------------- . pub struct SessionGlobals { . symbol_interner: symbol::Interner, . span_interner: Lock, . hygiene_data: Lock, . source_map: Lock>>, . } . . impl SessionGlobals { 12 ( 0.00%) pub fn new(edition: Edition) -> SessionGlobals { 50 ( 0.00%) SessionGlobals { 2 ( 0.00%) symbol_interner: symbol::Interner::fresh(), . span_interner: Lock::new(span_encoding::SpanInterner::default()), 6 ( 0.00%) hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), . source_map: Lock::new(None), . } 10 ( 0.00%) } . } . . #[inline] . pub fn create_session_globals_then(edition: Edition, f: impl FnOnce() -> R) -> R { 1 ( 0.00%) assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 7 ( 0.00%) SESSION_GLOBALS.set(&session_globals, f) 1 ( 0.00%) } . . #[inline] . pub fn set_session_globals_then(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R { . assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); -- line 119 ---------------------------------------- -- line 120 ---------------------------------------- . SESSION_GLOBALS.set(session_globals, f) . } . . #[inline] . pub fn create_default_session_if_not_set_then(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 4 ( 0.00%) create_session_if_not_set_then(edition::DEFAULT_EDITION, f) . } . . #[inline] . pub fn create_session_if_not_set_then(edition: Edition, f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 1 ( 0.00%) if !SESSION_GLOBALS.is_set() { 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 11 ( 0.00%) SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f)) 1 ( 0.00%) } else { . SESSION_GLOBALS.with(f) . } . } . . #[inline] . pub fn with_session_globals(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 16,494,307 ( 0.03%) SESSION_GLOBALS.with(f) . } . . #[inline] . pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { . create_session_globals_then(edition::DEFAULT_EDITION, f) . } . . // If this ever becomes non thread-local, `decode_syntax_context` . // and `decode_expn_id` will need to be updated to handle concurrent . // deserialization. . scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals); . . // FIXME: We should use this enum or something like it to get rid of the . // use of magic `/rust/1.x/...` paths across the board. 13,877 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] 1,164 ( 0.00%) #[derive(Decodable)] . pub enum RealFileName { 2,762 ( 0.00%) LocalPath(PathBuf), . /// For remapped paths (namely paths into libstd that have been mapped . /// to the appropriate spot on the local host's file system, and local file . /// system paths that have been remapped with `FilePathMapping`), . Remapped { . /// `local_path` is the (host-dependent) local path to the file. This is . /// None if the file was imported from another crate . local_path: Option, . /// `virtual_name` is the stable path rustc will store internally within -- line 175 ---------------------------------------- -- line 179 ---------------------------------------- . } . . impl Hash for RealFileName { . fn hash(&self, state: &mut H) { . // To prevent #70924 from happening again we should only hash the . // remapped (virtualized) path if that exists. This is because . // virtualized paths to sysroot crates (/rust/$hash or /rust/$version) . // remain stable even if the corresponding local_path changes 3,448 ( 0.00%) self.remapped_path_if_available().hash(state) . } . } . . // This is functionally identical to #[derive(Encodable)], with the exception of . // an added assert statement . impl Encodable for RealFileName { . fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { 56 ( 0.00%) encoder.emit_enum(|encoder| match *self { . RealFileName::LocalPath(ref local_path) => { 224 ( 0.00%) encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| { . encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; . Ok(()) . }) . } . . RealFileName::Remapped { ref local_path, ref virtual_name } => encoder . .emit_enum_variant("Remapped", 1, 2, |encoder| { . // For privacy and build reproducibility, we must not embed host-dependant path in artifacts -- line 205 ---------------------------------------- -- line 213 ---------------------------------------- . } . } . . impl RealFileName { . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path(&self) -> Option<&Path> { 2 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => { . p.as_ref().map(PathBuf::as_path) . } . } . } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. 2 ( 0.00%) pub fn into_local_path(self) -> Option { 2 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => p, . } 3 ( 0.00%) } . . /// Returns the path suitable for embedding into build artifacts. This would still . /// be a local path if it has not been remapped. A remapped path will not correspond . /// to a valid file system path: see `local_path_if_available()` for something that . /// is more likely to return paths into the local host file system. . pub fn remapped_path_if_available(&self) -> &Path { 2,762 ( 0.00%) match self { . RealFileName::LocalPath(p) . | RealFileName::Remapped { local_path: _, virtual_name: p } => &p, . } 29 ( 0.00%) } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. Otherwise returns the remapped name. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path_if_available(&self) -> &Path { 28 ( 0.00%) match self { . RealFileName::LocalPath(path) . | RealFileName::Remapped { local_path: None, virtual_name: path } . | RealFileName::Remapped { local_path: Some(path), virtual_name: _ } => path, . } . } . . pub fn to_string_lossy(&self, display_pref: FileNameDisplayPreference) -> Cow<'_, str> { 75 ( 0.00%) match display_pref { . FileNameDisplayPreference::Local => self.local_path_if_available().to_string_lossy(), . FileNameDisplayPreference::Remapped => { . self.remapped_path_if_available().to_string_lossy() . } . } . } . } . . /// Differentiates between real files and common virtual files. 78,034 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] 7,388 ( 0.00%) #[derive(Decodable, Encodable)] . pub enum FileName { . Real(RealFileName), . /// Call to `quote!`. 1 ( 0.00%) QuoteExpansion(u64), . /// Command line. . Anon(u64), . /// Hack in `src/librustc_ast/parse.rs`. . // FIXME(jseyfried) . MacroExpansion(u64), 1,599 ( 0.00%) ProcMacroSourceCode(u64), . /// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`. . CfgSpec(u64), . /// Strings provided as crate attributes in the CLI. . CliCrateAttr(u64), . /// Custom sources for explicit parser calls from plugins and drivers. . Custom(String), . DocTest(PathBuf, isize), . /// Post-substitution inline assembly from LLVM. . InlineAsm(u64), . } . . impl From for FileName { 196 ( 0.00%) fn from(p: PathBuf) -> Self { 56 ( 0.00%) assert!(!p.to_string_lossy().ends_with('>')); 168 ( 0.00%) FileName::Real(RealFileName::LocalPath(p)) 196 ( 0.00%) } . } . 34 ( 0.00%) #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] . pub enum FileNameDisplayPreference { . Remapped, . Local, . } . . pub struct FileNameDisplay<'a> { . inner: &'a FileName, . display_pref: FileNameDisplayPreference, . } . . impl fmt::Display for FileNameDisplay<'_> { 112 ( 0.00%) fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { . use FileName::*; 168 ( 0.00%) match *self.inner { . Real(ref name) => { 168 ( 0.00%) write!(fmt, "{}", name.to_string_lossy(self.display_pref)) . } . QuoteExpansion(_) => write!(fmt, ""), . MacroExpansion(_) => write!(fmt, ""), . Anon(_) => write!(fmt, ""), . ProcMacroSourceCode(_) => write!(fmt, ""), . CfgSpec(_) => write!(fmt, ""), . CliCrateAttr(_) => write!(fmt, ""), . Custom(ref s) => write!(fmt, "<{}>", s), . DocTest(ref path, _) => write!(fmt, "{}", path.display()), . InlineAsm(_) => write!(fmt, ""), . } 140 ( 0.00%) } . } . . impl FileNameDisplay<'_> { 188 ( 0.00%) pub fn to_string_lossy(&self) -> Cow<'_, str> { 141 ( 0.00%) match self.inner { 47 ( 0.00%) FileName::Real(ref inner) => inner.to_string_lossy(self.display_pref), . _ => Cow::from(format!("{}", self)), . } 188 ( 0.00%) } . } . . impl FileName { . pub fn is_real(&self) -> bool { . use FileName::*; 1,226 ( 0.00%) match *self { . Real(_) => true, . Anon(_) . | MacroExpansion(_) . | ProcMacroSourceCode(_) . | CfgSpec(_) . | CliCrateAttr(_) . | Custom(_) . | QuoteExpansion(_) . | DocTest(_, _) . | InlineAsm(_) => false, . } . } . 47 ( 0.00%) pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped } 94 ( 0.00%) } . . // This may include transient local filesystem information. . // Must not be embedded in build outputs. 28 ( 0.00%) pub fn prefer_local(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local } 56 ( 0.00%) } . . pub fn display(&self, display_pref: FileNameDisplayPreference) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref } . } . . pub fn macro_expansion_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); -- line 375 ---------------------------------------- -- line 377 ---------------------------------------- . } . . pub fn anon_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); . FileName::Anon(hasher.finish()) . } . 7,995 ( 0.00%) pub fn proc_macro_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); 3,198 ( 0.00%) FileName::ProcMacroSourceCode(hasher.finish()) 7,995 ( 0.00%) } . 5 ( 0.00%) pub fn cfg_spec_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); 2 ( 0.00%) FileName::QuoteExpansion(hasher.finish()) 5 ( 0.00%) } . . pub fn cli_crate_attr_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); . FileName::CliCrateAttr(hasher.finish()) . } . . pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName { -- line 403 ---------------------------------------- -- line 423 ---------------------------------------- . /// that the length of the span is equal to `span.hi - span.lo`; there may be space in the . /// [`BytePos`] range between files. . /// . /// `SpanData` is public because `Span` uses a thread-local interner and can't be . /// sent to other threads, but some pieces of performance infra run in a separate thread. . /// Using `Span` is generally preferred. . #[derive(Clone, Copy, Hash, PartialEq, Eq)] . pub struct SpanData { 14 ( 0.00%) pub lo: BytePos, 14 ( 0.00%) pub hi: BytePos, . /// Information about where the macro came from, if this piece of . /// code was created by a macro expansion. 42 ( 0.00%) pub ctxt: SyntaxContext, 14 ( 0.00%) pub parent: Option, . } . . // Order spans by position in the file. . impl Ord for SpanData { . fn cmp(&self, other: &Self) -> Ordering { . let SpanData { . lo: s_lo, . hi: s_hi, -- line 444 ---------------------------------------- -- line 485 ---------------------------------------- . } . #[inline] . pub fn with_parent(&self, parent: Option) -> Span { . Span::new(self.lo, self.hi, self.ctxt, parent) . } . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { 4,734,273 ( 0.01%) self.lo.0 == 0 && self.hi.0 == 0 . } . /// Returns `true` if `self` fully encloses `other`. . pub fn contains(self, other: Self) -> bool { 26,481 ( 0.00%) self.lo <= other.lo && other.hi <= self.hi . } . } . . // The interner is pointed to by a thread local value which is only set on the main thread . // with parallelization is disabled. So we don't allow `Span` to transfer between threads . // to avoid panics and other errors, even though it would be memory safe to do so. . #[cfg(not(parallel_compiler))] . impl !Send for Span {} . #[cfg(not(parallel_compiler))] . impl !Sync for Span {} . . impl PartialOrd for Span { 200,480 ( 0.00%) fn partial_cmp(&self, rhs: &Self) -> Option { 150,360 ( 0.00%) PartialOrd::partial_cmp(&self.data(), &rhs.data()) 200,480 ( 0.00%) } . } . impl Ord for Span { . fn cmp(&self, rhs: &Self) -> Ordering { . Ord::cmp(&self.data(), &rhs.data()) . } . } . . /// A collection of `Span`s. -- line 520 ---------------------------------------- -- line 532 ---------------------------------------- . } . . impl Span { . #[inline] . pub fn lo(self) -> BytePos { . self.data().lo . } . #[inline] 1,058,454 ( 0.00%) pub fn with_lo(self, lo: BytePos) -> Span { . self.data().with_lo(lo) 705,636 ( 0.00%) } . #[inline] 115,912 ( 0.00%) pub fn hi(self) -> BytePos { . self.data().hi 115,912 ( 0.00%) } . #[inline] 543,132 ( 0.00%) pub fn with_hi(self, hi: BytePos) -> Span { . self.data().with_hi(hi) 362,088 ( 0.00%) } . #[inline] . pub fn ctxt(self) -> SyntaxContext { . self.data_untracked().ctxt . } . #[inline] 142,965 ( 0.00%) pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { . self.data_untracked().with_ctxt(ctxt) 85,779 ( 0.00%) } . #[inline] . pub fn parent(self) -> Option { . self.data().parent . } . #[inline] . pub fn with_parent(self, ctxt: Option) -> Span { . self.data().with_parent(ctxt) . } -- line 566 ---------------------------------------- -- line 568 ---------------------------------------- . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { . self.data_untracked().is_dummy() . } . . /// Returns `true` if this span comes from a macro or desugaring. . #[inline] 18 ( 0.00%) pub fn from_expansion(self) -> bool { . self.ctxt() != SyntaxContext::root() 12 ( 0.00%) } . . /// Returns `true` if `span` originates in a derive-macro's expansion. . pub fn in_derive_expansion(self) -> bool { . matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _)) . } . . /// Gate suggestions that would not be appropriate in a context the user didn't write. . pub fn can_be_used_for_suggestions(self) -> bool { -- line 586 ---------------------------------------- -- line 600 ---------------------------------------- . /// Returns a new span representing an empty span at the beginning of this span. . #[inline] . pub fn shrink_to_lo(self) -> Span { . let span = self.data_untracked(); . span.with_hi(span.lo) . } . /// Returns a new span representing an empty span at the end of this span. . #[inline] 2,163 ( 0.00%) pub fn shrink_to_hi(self) -> Span { . let span = self.data_untracked(); . span.with_lo(span.hi) 1,442 ( 0.00%) } . . #[inline] . /// Returns `true` if `hi == lo`. . pub fn is_empty(self) -> bool { . let span = self.data_untracked(); . span.hi == span.lo . } . . /// Returns `self` if `self` is not the dummy span, and `other` otherwise. . pub fn substitute_dummy(self, other: Span) -> Span { . if self.is_dummy() { other } else { self } . } . . /// Returns `true` if `self` fully encloses `other`. 79,443 ( 0.00%) pub fn contains(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.contains(other) 52,962 ( 0.00%) } . . /// Returns `true` if `self` touches `other`. . pub fn overlaps(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.lo < other.hi && other.lo < span.hi . } . -- line 638 ---------------------------------------- -- line 663 ---------------------------------------- . /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, . /// if any. . pub fn parent_callsite(self) -> Option { . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(expn_data.call_site) } else { None } . } . . /// Walk down the expansion ancestors to find a span that's contained within `outer`. 37,660 ( 0.00%) pub fn find_ancestor_inside(mut self, outer: Span) -> Option { 18,830 ( 0.00%) while !outer.contains(self) { . self = self.parent_callsite()?; . } . Some(self) 41,426 ( 0.00%) } . . /// Edition of the crate from which this span came. 263,493 ( 0.00%) pub fn edition(self) -> edition::Edition { . self.ctxt().edition() 175,662 ( 0.00%) } . . #[inline] . pub fn rust_2015(self) -> bool { 77,699 ( 0.00%) self.edition() == edition::Edition::Edition2015 . } . . #[inline] . pub fn rust_2018(self) -> bool { 3,968 ( 0.00%) self.edition() >= edition::Edition::Edition2018 . } . . #[inline] . pub fn rust_2021(self) -> bool { 8,861 ( 0.00%) self.edition() >= edition::Edition::Edition2021 . } . . /// Returns the source callee. . /// . /// Returns `None` if the supplied span has no expansion trace, . /// else returns the `ExpnData` for the macro definition . /// corresponding to the source callsite. . pub fn source_callee(self) -> Option { -- line 703 ---------------------------------------- -- line 707 ---------------------------------------- . } . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } . } . . /// Checks if a span is "internal" to a macro in which `#[unstable]` . /// items can be used (that is, a macro marked with . /// `#[allow_internal_unstable]`). 11,640 ( 0.00%) pub fn allows_unstable(self, feature: Symbol) -> bool { 2,328 ( 0.00%) self.ctxt() . .outer_expn_data() . .allow_internal_unstable . .map_or(false, |features| features.iter().any(|&f| f == feature)) 9,312 ( 0.00%) } . . /// Checks if this span arises from a compiler desugaring of kind `kind`. 351,239 ( 0.00%) pub fn is_desugaring(self, kind: DesugaringKind) -> bool { 301,062 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => k == kind, . _ => false, . } 250,885 ( 0.00%) } . . /// Returns the compiler desugaring that created this span, or `None` . /// if this span is not from a desugaring. 5,060 ( 0.00%) pub fn desugaring_kind(self) -> Option { 6,072 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => Some(k), . _ => None, . } 4,048 ( 0.00%) } . . /// Checks if a span is "internal" to a macro in which `unsafe` . /// can be used without triggering the `unsafe_code` lint. . // (that is, a macro marked with `#[allow_internal_unsafe]`). 4 ( 0.00%) pub fn allows_unsafe(self) -> bool { 1 ( 0.00%) self.ctxt().outer_expn_data().allow_internal_unsafe 4 ( 0.00%) } . . pub fn macro_backtrace(mut self) -> impl Iterator { . let mut prev_span = DUMMY_SP; . std::iter::from_fn(move || { . loop { . let expn_data = self.ctxt().outer_expn_data(); . if expn_data.is_root() { . return None; -- line 752 ---------------------------------------- -- line 767 ---------------------------------------- . . /// Returns a `Span` that would enclose both `self` and `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^^^^ . /// ``` 7,806,271 ( 0.01%) pub fn to(self, end: Span) -> Span { . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 1,394,859 ( 0.00%) if span_data.ctxt != end_data.ctxt { 24,463 ( 0.00%) if span_data.ctxt == SyntaxContext::root() { . return end; 17,923 ( 0.00%) } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . cmp::min(span_data.lo, end_data.lo), . cmp::max(span_data.hi, end_data.hi), . if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 4,826,871 ( 0.01%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 6,386,949 ( 0.01%) } . . /// Returns a `Span` between the end of `self` to the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^ . /// ``` 18,997 ( 0.00%) pub fn between(self, end: Span) -> Span { . let span = self.data(); . let end = end.data(); . Span::new( . span.hi, . end.lo, . if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, 12,089 ( 0.00%) if span.parent == end.parent { span.parent } else { None }, . ) 13,816 ( 0.00%) } . . /// Returns a `Span` from the beginning of `self` until the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^ . /// ``` 43,384 ( 0.00%) pub fn until(self, end: Span) -> Span { . // Most of this function's body is copied from `to`. . // We can't just do `self.to(end.shrink_to_lo())`, . // because to also does some magic where it uses min/max so . // it can handle overlapping spans. Some advanced mis-use of . // `until` with different ctxts makes this visible. . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 7,888 ( 0.00%) if span_data.ctxt != end_data.ctxt { . if span_data.ctxt == SyntaxContext::root() { . return end; . } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . span_data.lo, . end_data.lo, . if end_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 27,608 ( 0.00%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 35,496 ( 0.00%) } . 374 ( 0.00%) pub fn from_inner(self, inner: InnerSpan) -> Span { . let span = self.data(); . Span::new( . span.lo + BytePos::from_usize(inner.start), . span.lo + BytePos::from_usize(inner.end), . span.ctxt, . span.parent, . ) 238 ( 0.00%) } . . /// Equivalent of `Span::def_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span { 19,966 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Opaque) . } . . /// Equivalent of `Span::call_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span { 1,322 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Transparent) . } . . /// Equivalent of `Span::mixed_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span { 1,320 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) . } . . /// Produces a span with the same location as `self` and context produced by a macro with the . /// given ID and transparency, assuming that macro was defined directly and not produced by . /// some other macro (which is the case for built-in and procedural macros). 135,648 ( 0.00%) pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) 79,128 ( 0.00%) } . . #[inline] . pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . let span = self.data(); 1,281,681 ( 0.00%) span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency)) . } . . #[inline] . pub fn remove_mark(&mut self) -> ExpnId { 37,900 ( 0.00%) let mut span = self.data(); 37,900 ( 0.00%) let mark = span.ctxt.remove_mark(); 151,600 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] . pub fn adjust(&mut self, expn_id: ExpnId) -> Option { . let mut span = self.data(); . let mark = span.ctxt.adjust(expn_id); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 1,449,675 ( 0.00%) pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { 966,450 ( 0.00%) let mut span = self.data(); 792,547 ( 0.00%) let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id); 1,449,675 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 1,597,922 ( 0.00%) } . . #[inline] . pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option> { 11,688 ( 0.00%) let mut span = self.data(); 17,532 ( 0.00%) let mark = span.ctxt.glob_adjust(expn_id, glob_span); 46,752 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 21,186 ( 0.00%) pub fn reverse_glob_adjust( . &mut self, . expn_id: ExpnId, . glob_span: Span, . ) -> Option> { 14,688 ( 0.00%) let mut span = self.data(); 12,898 ( 0.00%) let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span); 23,442 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 23,540 ( 0.00%) } . . #[inline] 1,576,696 ( 0.00%) pub fn normalize_to_macros_2_0(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macros_2_0()) 1,379,609 ( 0.00%) } . . #[inline] . pub fn normalize_to_macro_rules(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macro_rules()) . } . } . -- line 948 ---------------------------------------- -- line 958 ---------------------------------------- . . /// What label should we attach to this span (if any)? . pub label: Option, . } . . impl Default for Span { . fn default() -> Self { . DUMMY_SP 2 ( 0.00%) } . } . . impl Encodable for Span { . default fn encode(&self, s: &mut E) -> Result<(), E::Error> { . let span = self.data(); . s.emit_struct(false, |s| { . s.emit_struct_field("lo", true, |s| span.lo.encode(s))?; . s.emit_struct_field("hi", false, |s| span.hi.encode(s)) -- line 974 ---------------------------------------- -- line 990 ---------------------------------------- . /// any spans that are debug-printed during the closure's execution. . /// . /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap` . /// (see `rustc_interface::callbacks::span_debug1`). However, some parts . /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before . /// a `TyCtxt` is available. In this case, we fall back to . /// the `SourceMap` provided to this function. If that is not available, . /// we fall back to printing the raw `Span` field values. 9 ( 0.00%) pub fn with_source_map T>(source_map: Lrc, f: F) -> T { . with_session_globals(|session_globals| { 2 ( 0.00%) *session_globals.source_map.borrow_mut() = Some(source_map); . }); . struct ClearSourceMap; . impl Drop for ClearSourceMap { . fn drop(&mut self) { . with_session_globals(|session_globals| { 1 ( 0.00%) session_globals.source_map.borrow_mut().take(); . }); . } . } . . let _guard = ClearSourceMap; 4 ( 0.00%) f() 8 ( 0.00%) } . . pub fn debug_with_source_map( . span: Span, . f: &mut fmt::Formatter<'_>, . source_map: &SourceMap, . ) -> fmt::Result { . write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(span), span.ctxt()) . } -- line 1021 ---------------------------------------- -- line 1048 ---------------------------------------- . . impl MultiSpan { . #[inline] . pub fn new() -> MultiSpan { . MultiSpan { primary_spans: vec![], span_labels: vec![] } . } . . pub fn from_span(primary_span: Span) -> MultiSpan { 188,075 ( 0.00%) MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } . } . . pub fn from_spans(mut vec: Vec) -> MultiSpan { . vec.sort(); 5,965 ( 0.00%) MultiSpan { primary_spans: vec, span_labels: vec![] } . } . . pub fn push_span_label(&mut self, span: Span, label: String) { . self.span_labels.push((span, label)); . } . . /// Selects the first primary span (if any). . pub fn primary_span(&self) -> Option { . self.primary_spans.first().cloned() 9,337 ( 0.00%) } . . /// Returns all primary spans. . pub fn primary_spans(&self) -> &[Span] { . &self.primary_spans 1 ( 0.00%) } . . /// Returns `true` if any of the primary spans are displayable. . pub fn has_primary_spans(&self) -> bool { . self.primary_spans.iter().any(|sp| !sp.is_dummy()) . } . . /// Returns `true` if this contains only a dummy primary span with any hygienic context. . pub fn is_dummy(&self) -> bool { -- line 1084 ---------------------------------------- -- line 1139 ---------------------------------------- . . /// Returns `true` if any of the span labels is displayable. . pub fn has_span_labels(&self) -> bool { . self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) . } . } . . impl From for MultiSpan { 188,075 ( 0.00%) fn from(span: Span) -> MultiSpan { . MultiSpan::from_span(span) 188,075 ( 0.00%) } . } . . impl From> for MultiSpan { 5,965 ( 0.00%) fn from(spans: Vec) -> MultiSpan { 4,772 ( 0.00%) MultiSpan::from_spans(spans) 5,965 ( 0.00%) } . } . . /// Identifies an offset of a multi-byte character in a `SourceFile`. 88 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct MultiByteChar { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The number of bytes, `>= 2`. . pub bytes: u8, . } . . /// Identifies an offset of a non-narrow character in a `SourceFile`. 325 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub enum NonNarrowChar { . /// Represents a zero-width character. . ZeroWidth(BytePos), . /// Represents a wide (full-width) character. . Wide(BytePos), . /// Represents a tab character, represented visually with a width of 4 characters. . Tab(BytePos), . } . . impl NonNarrowChar { . fn new(pos: BytePos, width: usize) -> Self { 2 ( 0.00%) match width { . 0 => NonNarrowChar::ZeroWidth(pos), . 2 => NonNarrowChar::Wide(pos), . 4 => NonNarrowChar::Tab(pos), . _ => panic!("width {} given for non-narrow character", width), . } . } . . /// Returns the absolute offset of the character in the `SourceMap`. -- line 1188 ---------------------------------------- -- line 1201 ---------------------------------------- . } . } . } . . impl Add for NonNarrowChar { . type Output = Self; . . fn add(self, rhs: BytePos) -> Self { 544 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), . } . } . } . . impl Sub for NonNarrowChar { . type Output = Self; . 158 ( 0.00%) fn sub(self, rhs: BytePos) -> Self { 544 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), . } 474 ( 0.00%) } . } . . /// Identifies an offset of a character that was normalized away from `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct NormalizedPos { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The difference between original and normalized string at position. . pub diff: u32, . } . 84 ( 0.00%) #[derive(PartialEq, Eq, Clone, Debug)] . pub enum ExternalSource { . /// No external source has to be loaded, since the `SourceFile` represents a local crate. . Unneeded, . Foreign { . kind: ExternalSourceKind, . /// This SourceFile's byte-offset within the source_map of its original crate. . original_start_pos: BytePos, . /// The end of this SourceFile within the source_map of its original crate. -- line 1246 ---------------------------------------- -- line 1257 ---------------------------------------- . AbsentOk, . /// A failed attempt has been made to load the external source. . AbsentErr, . Unneeded, . } . . impl ExternalSource { . pub fn get_source(&self) -> Option<&Lrc> { 796 ( 0.00%) match self { . ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src), . _ => None, . } . } . } . . #[derive(Debug)] . pub struct OffsetOverflowError; . 1,180 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] . pub enum SourceFileHashAlgorithm { . Md5, . Sha1, . Sha256, . } . . impl FromStr for SourceFileHashAlgorithm { . type Err = (); -- line 1283 ---------------------------------------- -- line 1290 ---------------------------------------- . _ => Err(()), . } . } . } . . rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm); . . /// The hash of the on-disk source file used for debug info. 168 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] 2,360 ( 0.00%) #[derive(HashStable_Generic, Encodable, Decodable)] . pub struct SourceFileHash { . pub kind: SourceFileHashAlgorithm, . value: [u8; 32], . } . . impl SourceFileHash { . pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash { 1 ( 0.00%) let mut hash = SourceFileHash { kind, value: Default::default() }; . let len = hash.hash_len(); . let value = &mut hash.value[..len]; . let data = src.as_bytes(); . match kind { . SourceFileHashAlgorithm::Md5 => { 126 ( 0.00%) value.copy_from_slice(&Md5::digest(data)); . } . SourceFileHashAlgorithm::Sha1 => { . value.copy_from_slice(&Sha1::digest(data)); . } . SourceFileHashAlgorithm::Sha256 => { . value.copy_from_slice(&Sha256::digest(data)); . } . } . hash . } . . /// Check if the stored hash matches the hash of the string. . pub fn matches(&self, src: &str) -> bool { 1 ( 0.00%) Self::new(self.kind, src) == *self . } . . /// The bytes of the hash. . pub fn hash_bytes(&self) -> &[u8] { . let len = self.hash_len(); . &self.value[..len] . } . . fn hash_len(&self) -> usize { 125 ( 0.00%) match self.kind { . SourceFileHashAlgorithm::Md5 => 16, . SourceFileHashAlgorithm::Sha1 => 20, . SourceFileHashAlgorithm::Sha256 => 32, . } . } . } . . /// A single source in the [`SourceMap`]. 1,624 ( 0.00%) #[derive(Clone)] . pub struct SourceFile { . /// The name of the file that the source came from. Source that doesn't . /// originate from files has names between angle brackets by convention . /// (e.g., ``). . pub name: FileName, . /// The complete source code. 28 ( 0.00%) pub src: Option>, . /// The source code's hash. . pub src_hash: SourceFileHash, . /// The external source code (used for external crates, which will have a `None` . /// value as `self.src`. . pub external_src: Lock, . /// The start position of this source in the `SourceMap`. . pub start_pos: BytePos, . /// The end position of this source in the `SourceMap`. -- line 1361 ---------------------------------------- -- line 1364 ---------------------------------------- . pub lines: Vec, . /// Locations of multi-byte characters in the source code. . pub multibyte_chars: Vec, . /// Width of characters that are not narrow in the source code. . pub non_narrow_chars: Vec, . /// Locations of characters removed during normalization. . pub normalized_pos: Vec, . /// A hash of the filename, used for speeding up hashing in incremental compilation. 28 ( 0.00%) pub name_hash: u128, . /// Indicates which crate this `SourceFile` was imported from. 28 ( 0.00%) pub cnum: CrateNum, . } . . impl Encodable for SourceFile { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_struct(false, |s| { . s.emit_struct_field("name", true, |s| self.name.encode(s))?; . s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?; . s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?; . s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?; . s.emit_struct_field("lines", false, |s| { . let lines = &self.lines[..]; . // Store the length. . s.emit_u32(lines.len() as u32)?; . 112 ( 0.00%) if !lines.is_empty() { . // In order to preserve some space, we exploit the fact that . // the lines list is sorted and individual lines are . // probably not that long. Because of that we can store lines . // as a difference list, using as little space as possible . // for the differences. . let max_line_length = if lines.len() == 1 { . 0 . } else { -- line 1397 ---------------------------------------- -- line 1399 ---------------------------------------- . .array_windows() . .map(|&[fst, snd]| snd - fst) . .map(|bp| bp.to_usize()) . .max() . .unwrap() . }; . . let bytes_per_diff: u8 = match max_line_length { 56 ( 0.00%) 0..=0xFF => 1, 4 ( 0.00%) 0x100..=0xFFFF => 2, . _ => 4, . }; . . // Encode the number of bytes used per diff. . bytes_per_diff.encode(s)?; . . // Encode the first element. . lines[0].encode(s)?; . . let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst); . 58 ( 0.00%) match bytes_per_diff { . 1 => { . for diff in diff_iter { . (diff.0 as u8).encode(s)? . } . } . 2 => { . for diff in diff_iter { . (diff.0 as u16).encode(s)? -- line 1428 ---------------------------------------- -- line 1436 ---------------------------------------- . _ => unreachable!(), . } . } . . Ok(()) . })?; . s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?; . s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?; 84 ( 0.00%) s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?; . s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?; 308 ( 0.00%) s.emit_struct_field("cnum", false, |s| self.cnum.encode(s)) . }) . } . } . . impl Decodable for SourceFile { 5,310 ( 0.00%) fn decode(d: &mut D) -> SourceFile { . d.read_struct(|d| { . let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d)); . let src_hash: SourceFileHash = . d.read_struct_field("src_hash", |d| Decodable::decode(d)); . let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d)); . let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d)); . let lines: Vec = d.read_struct_field("lines", |d| { . let num_lines: u32 = Decodable::decode(d); 590 ( 0.00%) let mut lines = Vec::with_capacity(num_lines as usize); . . if num_lines > 0 { . // Read the number of bytes used per diff. . let bytes_per_diff: u8 = Decodable::decode(d); . . // Read the first element. . let mut line_start: BytePos = Decodable::decode(d); . lines.push(line_start); . . for _ in 1..num_lines { 965,948 ( 0.00%) let diff = match bytes_per_diff { . 1 => d.read_u8() as u32, . 2 => d.read_u16() as u32, . 4 => d.read_u32(), . _ => unreachable!(), . }; . . line_start = line_start + BytePos(diff); . -- line 1480 ---------------------------------------- -- line 1483 ---------------------------------------- . } . . lines . }); . let multibyte_chars: Vec = . d.read_struct_field("multibyte_chars", |d| Decodable::decode(d)); . let non_narrow_chars: Vec = . d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d)); 1,180 ( 0.00%) let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d)); . let normalized_pos: Vec = . d.read_struct_field("normalized_pos", |d| Decodable::decode(d)); . let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d)); 6,490 ( 0.00%) SourceFile { 4,720 ( 0.00%) name, . start_pos, . end_pos, . src: None, 2,360 ( 0.00%) src_hash, . // Unused - the metadata decoder will construct . // a new SourceFile, filling in `external_src` properly . external_src: Lock::new(ExternalSource::Unneeded), 2,360 ( 0.00%) lines, 2,360 ( 0.00%) multibyte_chars, 2,360 ( 0.00%) non_narrow_chars, 2,360 ( 0.00%) normalized_pos, . name_hash, . cnum, . } . }) 5,310 ( 0.00%) } . } . . impl fmt::Debug for SourceFile { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(fmt, "SourceFile({:?})", self.name) . } . } . . impl SourceFile { 451 ( 0.00%) pub fn new( . name: FileName, . mut src: String, . start_pos: BytePos, . hash_kind: SourceFileHashAlgorithm, . ) -> Self { . // Compute the file hash before any normalization. . let src_hash = SourceFileHash::new(hash_kind, &src); 123 ( 0.00%) let normalized_pos = normalize_src(&mut src, start_pos); . . let name_hash = { . let mut hasher: StableHasher = StableHasher::new(); 82 ( 0.00%) name.hash(&mut hasher); . hasher.finish::() . }; 82 ( 0.00%) let end_pos = start_pos.to_usize() + src.len(); 123 ( 0.00%) assert!(end_pos <= u32::MAX as usize); . 492 ( 0.00%) let (lines, multibyte_chars, non_narrow_chars) = 82 ( 0.00%) analyze_source_file::analyze_source_file(&src, start_pos); . 697 ( 0.00%) SourceFile { . name, . src: Some(Lrc::new(src)), 451 ( 0.00%) src_hash, . external_src: Lock::new(ExternalSource::Unneeded), . start_pos, . end_pos: Pos::from_usize(end_pos), 164 ( 0.00%) lines, 164 ( 0.00%) multibyte_chars, 164 ( 0.00%) non_narrow_chars, 164 ( 0.00%) normalized_pos, . name_hash, . cnum: LOCAL_CRATE, . } 369 ( 0.00%) } . . /// Returns the `BytePos` of the beginning of the current line. . pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { . let line_index = self.lookup_line(pos).unwrap(); . self.lines[line_index] . } . . /// Add externally loaded source. . /// If the hash of the input doesn't match or no input is supplied via None, . /// it is interpreted as an error and the corresponding enum variant is set. . /// The return value signifies whether some kind of source is present. 101,892 ( 0.00%) pub fn add_external_src(&self, get_src: F) -> bool . where . F: FnOnce() -> Option, . { 14,672 ( 0.00%) if matches!( 29,112 ( 0.00%) *self.external_src.borrow(), . ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. } . ) { 1 ( 0.00%) let src = get_src(); . let mut external_src = self.external_src.borrow_mut(); . // Check that no-one else have provided the source while we were getting it 3 ( 0.00%) if let ExternalSource::Foreign { . kind: src_kind @ ExternalSourceKind::AbsentOk, .. . } = &mut *external_src . { 5 ( 0.00%) if let Some(mut src) = src { . // The src_hash needs to be computed on the pre-normalized src. 1 ( 0.00%) if self.src_hash.matches(&src) { 3 ( 0.00%) normalize_src(&mut src, BytePos::from_usize(0)); 8 ( 0.00%) *src_kind = ExternalSourceKind::Present(Lrc::new(src)); . return true; . } . } else { . *src_kind = ExternalSourceKind::AbsentErr; . } . . false . } else { . self.src.is_some() || external_src.get_source().is_some() . } . } else { 14,555 ( 0.00%) self.src.is_some() || self.external_src.borrow().get_source().is_some() . } 131,004 ( 0.00%) } . . /// Gets a line from the list of pre-computed line-beginnings. . /// The line number here is 0-based. . pub fn get_line(&self, line_number: usize) -> Option> { . fn get_until_newline(src: &str, begin: usize) -> &str { . // We can't use `lines.get(line_number+1)` because we might . // be parsing when we call this function and thus the current . // line is the last one we have line info for. -- line 1610 ---------------------------------------- -- line 1627 ---------------------------------------- . Some(Cow::Owned(String::from(get_until_newline(src, begin)))) . } else { . None . } . } . . pub fn is_real_file(&self) -> bool { . self.name.is_real() 613 ( 0.00%) } . . pub fn is_imported(&self) -> bool { . self.src.is_none() 661,976 ( 0.00%) } . . pub fn count_lines(&self) -> usize { . self.lines.len() . } . . /// Finds the line containing the given position. The return value is the . /// index into the `lines` array of this `SourceFile`, not the 1-based line . /// number. If the source_file is empty or the position is located before the . /// first line, `None` is returned. . pub fn lookup_line(&self, pos: BytePos) -> Option { 72,790 ( 0.00%) match self.lines.binary_search(&pos) { . Ok(idx) => Some(idx), . Err(0) => None, . Err(idx) => Some(idx - 1), . } . } . . pub fn line_bounds(&self, line_index: usize) -> Range { 188,457 ( 0.00%) if self.is_empty() { . return self.start_pos..self.end_pos; . } . 124,582 ( 0.00%) assert!(line_index < self.lines.len()); 184,443 ( 0.00%) if line_index == (self.lines.len() - 1) { 162 ( 0.00%) self.lines[line_index]..self.end_pos . } else { 122,830 ( 0.00%) self.lines[line_index]..self.lines[line_index + 1] . } . } . . /// Returns whether or not the file contains the given `SourceMap` byte . /// position. The position one past the end of the file is considered to be . /// contained by the file. This implies that files for which `is_empty` . /// returns true still contain one byte position according to this function. . #[inline] -- line 1674 ---------------------------------------- -- line 1692 ---------------------------------------- . Err(i) if i == 0 => 0, . Err(i) => self.normalized_pos[i - 1].diff, . }; . . BytePos::from_u32(pos.0 - self.start_pos.0 + diff) . } . . /// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`. 11,144 ( 0.00%) pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { . // The number of extra bytes due to multibyte chars in the `SourceFile`. . let mut total_extra_bytes = 0; . 33,432 ( 0.00%) for mbc in self.multibyte_chars.iter() { . debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); 3,368 ( 0.00%) if mbc.pos < bpos { . // Every character is at least one byte, so we only . // count the actual extra bytes. 8,208 ( 0.00%) total_extra_bytes += mbc.bytes as u32 - 1; . // We should never see a byte position in the middle of a . // character. 6,840 ( 0.00%) assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); . } else { . break; . } . } . 66,864 ( 0.00%) assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); 55,720 ( 0.00%) CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize) 22,288 ( 0.00%) } . . /// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a . /// given `BytePos`. 39,004 ( 0.00%) pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) { 11,144 ( 0.00%) let chpos = self.bytepos_to_file_charpos(pos); 5,561 ( 0.00%) match self.lookup_line(pos) { . Some(a) => { . let line = a + 1; // Line numbers start at 1 5,572 ( 0.00%) let linebpos = self.lines[a]; 11,144 ( 0.00%) let linechpos = self.bytepos_to_file_charpos(linebpos); . let col = chpos - linechpos; . debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); . debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); . debug!("byte is on line: {}", line); 5,572 ( 0.00%) assert!(chpos >= linechpos); . (line, col) . } . None => (0, chpos), . } 44,576 ( 0.00%) } . . /// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based) . /// column offset when displayed, for a given `BytePos`. 44,576 ( 0.00%) pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) { 16,716 ( 0.00%) let (line, col_or_chpos) = self.lookup_file_pos(pos); 11,144 ( 0.00%) if line > 0 { . let col = col_or_chpos; 5,572 ( 0.00%) let linebpos = self.lines[line - 1]; . let col_display = { . let start_width_idx = self . .non_narrow_chars . .binary_search_by_key(&linebpos, |x| x.pos()) . .unwrap_or_else(|x| x); . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); 672 ( 0.00%) let special_chars = end_width_idx - start_width_idx; . let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx] . .iter() . .map(|x| x.width()) . .sum(); 27,860 ( 0.00%) col.0 - special_chars + non_narrow . }; . (line, col, col_display) . } else { . let chpos = col_or_chpos; . let col_display = { . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let non_narrow: usize = . self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum(); . chpos.0 - end_width_idx + non_narrow . }; . (0, chpos, col_display) . } 39,004 ( 0.00%) } . } . . /// Normalizes the source code and records the normalizations. 420 ( 0.00%) fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec { . let mut normalized_pos = vec![]; . remove_bom(src, &mut normalized_pos); . normalize_newlines(src, &mut normalized_pos); . . // Offset all the positions by start_pos to match the final file positions. . for np in &mut normalized_pos { . np.pos.0 += start_pos.0; . } . . normalized_pos 378 ( 0.00%) } . . /// Removes UTF-8 BOM, if any. . fn remove_bom(src: &mut String, normalized_pos: &mut Vec) { 40 ( 0.00%) if src.starts_with('\u{feff}') { . src.drain(..3); . normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 }); . } . } . . /// Replaces `\r\n` with `\n` in-place in `src`. . /// . /// Returns error if there's a lone `\r` in the string. . fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec) { 42 ( 0.00%) if !src.as_bytes().contains(&b'\r') { . return; . } . . // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. . // While we *can* call `as_mut_vec` and do surgery on the live string . // directly, let's rather steal the contents of `src`. This makes the code . // safe even if a panic occurs. . -- line 1816 ---------------------------------------- -- line 1877 ---------------------------------------- . ( . $( . $(#[$attr:meta])* . $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty); . )* . ) => { . $( . $(#[$attr])* 561,185 ( 0.00%) $vis struct $ident($inner_vis $inner_ty); . . impl Pos for $ident { . #[inline(always)] . fn from_usize(n: usize) -> $ident { 6,002 ( 0.00%) $ident(n as $inner_ty) . } . . #[inline(always)] . fn to_usize(&self) -> usize { 135,453 ( 0.00%) self.0 as usize . } . . #[inline(always)] . fn from_u32(n: u32) -> $ident { . $ident(n as $inner_ty) . } . . #[inline(always)] -- line 1903 ---------------------------------------- -- line 1906 ---------------------------------------- . } . } . . impl Add for $ident { . type Output = $ident; . . #[inline(always)] . fn add(self, rhs: $ident) -> $ident { 2,193,346 ( 0.00%) $ident(self.0 + rhs.0) . } . } . . impl Sub for $ident { . type Output = $ident; . . #[inline(always)] . fn sub(self, rhs: $ident) -> $ident { 11,129,000 ( 0.02%) $ident(self.0 - rhs.0) . } . } . )* . }; . } . . impl_pos! { . /// A byte offset. -- line 1931 ---------------------------------------- -- line 1939 ---------------------------------------- . /// Because of multibyte UTF-8 characters, a byte offset . /// is not equivalent to a character offset. The [`SourceMap`] will convert [`BytePos`] . /// values to `CharPos` values as necessary. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)] . pub struct CharPos(pub usize); . } . . impl Encodable for BytePos { 5 ( 0.00%) fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_u32(self.0) 6 ( 0.00%) } . } . . impl Decodable for BytePos { . fn decode(d: &mut D) -> BytePos { 358,618 ( 0.00%) BytePos(d.read_u32()) . } . } . . // _____________________________________________________________________________ . // Loc, SourceFileAndLine, SourceFileAndBytePos . // . . /// A source code location used for error reporting. -- line 1962 ---------------------------------------- -- line 2042 ---------------------------------------- . /// Range inside of a `Span` used for diagnostics when we only have access to relative positions. . #[derive(Copy, Clone, PartialEq, Eq, Debug)] . pub struct InnerSpan { . pub start: usize, . pub end: usize, . } . . impl InnerSpan { 172 ( 0.00%) pub fn new(start: usize, end: usize) -> InnerSpan { . InnerSpan { start, end } 86 ( 0.00%) } . } . . /// Requirements for a `StableHashingContext` to be used in this crate. . /// . /// This is a hack to allow using the [`HashStable_Generic`] derive macro . /// instead of implementing everything in rustc_middle. . pub trait HashStableContext { . fn def_path_hash(&self, def_id: DefId) -> DefPathHash; -- line 2060 ---------------------------------------- -- line 2079 ---------------------------------------- . /// offsets into the `SourceMap`). Instead, we hash the (file name, line, column) . /// triple, which stays the same even if the containing `SourceFile` has moved . /// within the `SourceMap`. . /// . /// Also note that we are hashing byte offsets for the column, not unicode . /// codepoint offsets. For the purpose of the hash that's sufficient. . /// Also, hashing filenames is expensive so we avoid doing it twice when the . /// span starts and ends in the same file, which is almost always the case. 8,939,392 ( 0.02%) fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { . const TAG_VALID_SPAN: u8 = 0; . const TAG_INVALID_SPAN: u8 = 1; . const TAG_RELATIVE_SPAN: u8 = 2; . 1,642,895 ( 0.00%) if !ctx.hash_spans() { . return; . } . 7,296,497 ( 0.01%) let span = self.data_untracked(); 2,733,659 ( 0.01%) span.ctxt.hash_stable(ctx, hasher); 3,831,168 ( 0.01%) span.parent.hash_stable(ctx, hasher); . 2,569,368 ( 0.00%) if span.is_dummy() { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . 2,523,600 ( 0.00%) if let Some(parent) = span.parent { . let def_span = ctx.def_span(parent).data_untracked(); . if def_span.contains(span) { . // This span is enclosed in a definition: only hash the relative position. . Hash::hash(&TAG_RELATIVE_SPAN, hasher); . (span.lo - def_span.lo).to_u32().hash_stable(ctx, hasher); . (span.hi - def_span.lo).to_u32().hash_stable(ctx, hasher); . return; . } . } . . // If this is not an empty or invalid span, we want to hash the last . // position that belongs to it, as opposed to hashing the first . // position past it. 5,047,200 ( 0.01%) let (file, line_lo, col_lo, line_hi, col_hi) = match ctx.span_data_to_lines_and_cols(&span) . { 2,523,600 ( 0.00%) Some(pos) => pos, . None => { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . }; . . Hash::hash(&TAG_VALID_SPAN, hasher); . // We truncate the stable ID hash and line and column numbers. The chances -- line 2129 ---------------------------------------- -- line 2134 ---------------------------------------- . // hash only the length, for example, then two otherwise equal spans with . // different end locations will have the same hash. This can cause a problem . // during incremental compilation wherein a previous result for a query that . // depends on the end location of a span will be incorrectly reused when the . // end location of the span it depends on has changed (see issue #74890). A . // similar analysis applies if some query depends specifically on the length . // of the span, but we only hash the end location. So hash both. . 1,261,800 ( 0.00%) let col_lo_trunc = (col_lo.0 as u64) & 0xFF; . let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8; . let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32; . let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40; . let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc; 1,261,800 ( 0.00%) let len = (span.hi - span.lo).0; . Hash::hash(&col_line, hasher); . Hash::hash(&len, hasher); 10,216,448 ( 0.02%) } . } 18,810,182 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/buffer.rs -------------------------------------------------------------------------------- Ir -- line 21 ---------------------------------------- . fn default() -> Self { . Self::from(vec![]) . } . } . . impl Deref for Buffer { . type Target = [T]; . fn deref(&self) -> &[T] { 10,251,767 ( 0.02%) unsafe { slice::from_raw_parts(self.data as *const T, self.len) } . } . } . . impl DerefMut for Buffer { . fn deref_mut(&mut self) -> &mut [T] { . unsafe { slice::from_raw_parts_mut(self.data, self.len) } . } . } . . impl Buffer { . pub(super) fn new() -> Self { . Self::default() . } . . pub(super) fn clear(&mut self) { 21,545,821 ( 0.04%) self.len = 0; . } . . pub(super) fn take(&mut self) -> Self { . mem::take(self) . } . . // We have the array method separate from extending from a slice. This is . // because in the case of small arrays, codegen can be more efficient . // (avoiding a memmove call). With extend_from_slice, LLVM at least . // currently is not able to make that optimization. . pub(super) fn extend_from_array(&mut self, xs: &[T; N]) { 47,748,907 ( 0.09%) if xs.len() > (self.capacity - self.len) { . let b = self.take(); 27,720 ( 0.00%) *self = (b.reserve)(b, xs.len()); . } . unsafe { 8,224,616 ( 0.02%) xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); 24,907,080 ( 0.05%) self.len += xs.len(); . } . } . 1,423,152 ( 0.00%) pub(super) fn extend_from_slice(&mut self, xs: &[T]) { 2,193,192 ( 0.00%) if xs.len() > (self.capacity - self.len) { . let b = self.take(); 28,161 ( 0.00%) *self = (b.reserve)(b, xs.len()); . } . unsafe { 1,341 ( 0.00%) xs.as_ptr().copy_to_nonoverlapping(self.data.add(self.len), xs.len()); 1,096,596 ( 0.00%) self.len += xs.len(); . } 1,660,344 ( 0.00%) } . . pub(super) fn push(&mut self, v: T) { . // The code here is taken from Vec::push, and we know that reserve() . // will panic if we're exceeding isize::MAX bytes and so there's no need . // to check for overflow. 53,683,797 ( 0.10%) if self.len == self.capacity { . let b = self.take(); . *self = (b.reserve)(b, 1); . } . unsafe { 34,753,629 ( 0.07%) *self.data.add(self.len) = v; 31,157,371 ( 0.06%) self.len += 1; . } . } . } . . impl Write for Buffer { . fn write(&mut self, xs: &[u8]) -> io::Result { . self.extend_from_slice(xs); . Ok(xs.len()) . } . 770,040 ( 0.00%) fn write_all(&mut self, xs: &[u8]) -> io::Result<()> { 711,576 ( 0.00%) self.extend_from_slice(xs); . Ok(()) 1,155,060 ( 0.00%) } . . fn flush(&mut self) -> io::Result<()> { . Ok(()) . } . } . . impl Drop for Buffer { . fn drop(&mut self) { . let b = self.take(); 54,357,012 ( 0.10%) (b.drop)(b); . } . } . . impl From> for Buffer { . fn from(mut v: Vec) -> Self { . let (data, len, capacity) = (v.as_mut_ptr(), v.len(), v.capacity()); . mem::forget(v); . -- line 120 ---------------------------------------- -- line 123 ---------------------------------------- . fn to_vec(b: Buffer) -> Vec { . unsafe { . let Buffer { data, len, capacity, .. } = b; . mem::forget(b); . Vec::from_raw_parts(data, len, capacity) . } . } . 10,644 ( 0.00%) extern "C" fn reserve(b: Buffer, additional: usize) -> Buffer { 7,983 ( 0.00%) let mut v = to_vec(b); . v.reserve(additional); 7,983 ( 0.00%) Buffer::from(v) 10,644 ( 0.00%) } . . extern "C" fn drop(b: Buffer) { 4,530,411 ( 0.01%) mem::drop(to_vec(b)); 4,529,091 ( 0.01%) } . 23,907 ( 0.00%) Buffer { data, len, capacity, reserve, drop } . } . } 21,192,749 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/proc_macro/src/bridge/server.rs -------------------------------------------------------------------------------- Ir -- line 28 ---------------------------------------- . (type SourceFile: 'static + Clone;); . (type MultiSpan) => . (type MultiSpan: 'static;); . (type Diagnostic) => . (type Diagnostic: 'static;); . (type Span) => . (type Span: 'static + Copy + Eq + Hash;); . (fn drop(&mut self, $arg:ident: $arg_ty:ty)) => 403,380 ( 0.00%) (fn drop(&mut self, $arg: $arg_ty) { mem::drop($arg) }); . (fn clone(&mut self, $arg:ident: $arg_ty:ty) -> $ret_ty:ty) => 34,481 ( 0.00%) (fn clone(&mut self, $arg: $arg_ty) -> $ret_ty { $arg.clone() }); . ($($item:tt)*) => ($($item)*;) . } . . macro_rules! declare_server_traits { . ($($name:ident { . $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* . }),* $(,)?) => { . pub trait Types { -- line 46 ---------------------------------------- -- line 63 ---------------------------------------- . ($($name:ident { . $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)* . }),* $(,)?) => { . impl Types for MarkedTypes { . $(type $name = Marked;)* . } . . $(impl $name for MarkedTypes { 9,594 ( 0.00%) $(fn $method(&mut self, $($arg: $arg_ty),*) $(-> $ret_ty)? { 18,646,408 ( 0.04%) <_>::mark($name::$method(&mut self.0, $($arg.unmark()),*)) 9,594 ( 0.00%) })* . })* . } . } . with_api!(Self, self_, define_mark_types_impls); . . struct Dispatcher { . handle_store: HandleStore, . server: S, -- line 81 ---------------------------------------- -- line 89 ---------------------------------------- . pub trait DispatcherTrait { . // HACK(eddyb) these are here to allow `Self::$name` to work below. . $(type $name;)* . fn dispatch(&mut self, b: Buffer) -> Buffer; . } . . impl DispatcherTrait for Dispatcher> { . $(type $name = as Types>::$name;)* 45,290,910 ( 0.09%) fn dispatch(&mut self, mut b: Buffer) -> Buffer { . let Dispatcher { handle_store, server } = self; . 13,587,273 ( 0.03%) let mut reader = &b[..]; 31,703,637 ( 0.06%) match api_tags::Method::decode(&mut reader, &mut ()) { 21,782,226 ( 0.04%) $(api_tags::Method::$name(m) => match m { . $(api_tags::$name::$method => { . let mut call_method = || { 14,518,296 ( 0.03%) reverse_decode!(reader, handle_store; $($arg: $arg_ty),*); 4,797 ( 0.00%) $name::$method(server, $($arg),*) . }; . // HACK(eddyb) don't use `panic::catch_unwind` in a panic. . // If client and server happen to use the same `libstd`, . // `catch_unwind` asserts that the panic counter was 0, . // even when the closure passed to it didn't panic. . let r = if thread::panicking() { . Ok(call_method()) . } else { 20,278,007 ( 0.04%) panic::catch_unwind(panic::AssertUnwindSafe(call_method)) . .map_err(PanicMessage::from) . }; . . b.clear(); 49,969,631 ( 0.09%) r.encode(&mut b, handle_store); . })* . }),* . } 27,174,546 ( 0.05%) b 40,761,819 ( 0.08%) } . } . } . } . with_api!(Self, self_, define_dispatcher_impl); . . pub trait ExecutionStrategy { . fn run_bridge_and_client( . &self, -- line 133 ---------------------------------------- -- line 145 ---------------------------------------- . fn run_bridge_and_client( . &self, . dispatcher: &mut impl DispatcherTrait, . input: Buffer, . run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, . client_data: D, . force_show_panics: bool, . ) -> Buffer { 40,762,479 ( 0.08%) let mut dispatch = |b| dispatcher.dispatch(b); . 6,600 ( 0.00%) run_client( 7,260 ( 0.00%) Bridge { cached_buffer: input, dispatch: (&mut dispatch).into(), force_show_panics }, . client_data, . ) . } . } . . // NOTE(eddyb) Two implementations are provided, the second one is a bit . // faster but neither is anywhere near as fast as same-thread execution. . -- line 164 ---------------------------------------- -- line 263 ---------------------------------------- . *state.lock().unwrap() = State::Res(b); . join_handle.thread().unpark(); . } . . join_handle.join().unwrap() . } . } . 8,580 ( 0.00%) fn run_server< . S: Server, . I: Encode>>, . O: for<'a, 's> DecodeMut<'a, 's, HandleStore>>, . D: Copy + Send + 'static, . >( . strategy: &impl ExecutionStrategy, . handle_counters: &'static client::HandleCounters, . server: S, . input: I, . run_client: extern "C" fn(Bridge<'_>, D) -> Buffer, . client_data: D, . force_show_panics: bool, . ) -> Result { . let mut dispatcher = 11,220 ( 0.00%) Dispatcher { handle_store: HandleStore::new(handle_counters), server: MarkedTypes(server) }; . . let mut b = Buffer::new(); 1,980 ( 0.00%) input.encode(&mut b, &mut dispatcher.handle_store); . 3,960 ( 0.00%) b = strategy.run_bridge_and_client( . &mut dispatcher, . b, . run_client, . client_data, . force_show_panics, . ); . 3,300 ( 0.00%) Result::decode(&mut &b[..], &mut dispatcher.handle_store) 6,600 ( 0.00%) } . . impl client::Client crate::TokenStream> { . pub fn run( . &self, . strategy: &impl ExecutionStrategy, . server: S, . input: S::TokenStream, . force_show_panics: bool, . ) -> Result { 1,320 ( 0.00%) let client::Client { get_handle_counters, run, f } = *self; 7,260 ( 0.00%) run_server( . strategy, 660 ( 0.00%) get_handle_counters(), 6,600 ( 0.00%) server, . as Types>::TokenStream::mark(input), . run, . f, . force_show_panics, . ) . .map( as Types>::TokenStream::unmark) . } . } -- line 322 ---------------------------------------- 10,253,290 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/type_variable.rs -------------------------------------------------------------------------------- Ir -- line 44 ---------------------------------------- . /// Convert from a specific kind of undo to the more general UndoLog . impl<'tcx> From for UndoLog<'tcx> { . fn from(l: Instantiate) -> Self { . UndoLog::Values(sv::UndoLog::Other(l)) . } . } . . impl<'tcx> Rollback> for TypeVariableStorage<'tcx> { 759,325 ( 0.00%) fn reverse(&mut self, undo: UndoLog<'tcx>) { 3,193,073 ( 0.01%) match undo { 603,552 ( 0.00%) UndoLog::EqRelation(undo) => self.eq_relations.reverse(undo), 331,222 ( 0.00%) UndoLog::SubRelation(undo) => self.sub_relations.reverse(undo), 583,876 ( 0.00%) UndoLog::Values(undo) => self.values.reverse(undo), . } 1,518,650 ( 0.00%) } . } . . pub struct TypeVariableStorage<'tcx> { . values: sv::SnapshotVecStorage, . . /// Two variables are unified in `eq_relations` when we have a . /// constraint `?X == ?Y`. This table also stores, for each key, . /// the known value. -- line 66 ---------------------------------------- -- line 136 ---------------------------------------- . DynReturnFn, . LatticeVariable, . } . . pub(crate) struct TypeVariableData { . origin: TypeVariableOrigin, . } . 7,230,028 ( 0.01%) #[derive(Copy, Clone, Debug)] . pub enum TypeVariableValue<'tcx> { . Known { value: Ty<'tcx> }, . Unknown { universe: ty::UniverseIndex }, . } . . impl<'tcx> TypeVariableValue<'tcx> { . /// If this value is known, returns the type it is known to be. . /// Otherwise, `None`. . pub fn known(&self) -> Option> { 4,687,276 ( 0.01%) match *self { . TypeVariableValue::Unknown { .. } => None, . TypeVariableValue::Known { value } => Some(value), . } . } . . pub fn is_unknown(&self) -> bool { . match *self { . TypeVariableValue::Unknown { .. } => true, -- line 162 ---------------------------------------- -- line 166 ---------------------------------------- . } . . pub(crate) struct Instantiate; . . pub(crate) struct Delegate; . . impl<'tcx> TypeVariableStorage<'tcx> { . pub fn new() -> TypeVariableStorage<'tcx> { 533,688 ( 0.00%) TypeVariableStorage { . values: sv::SnapshotVecStorage::new(), . eq_relations: ut::UnificationTableStorage::new(), . sub_relations: ut::UnificationTableStorage::new(), . } . } . . #[inline] . pub(crate) fn with_log<'a>( -- line 182 ---------------------------------------- -- line 188 ---------------------------------------- . } . . impl<'tcx> TypeVariableTable<'_, 'tcx> { . /// Returns the origin that was given when `vid` was created. . /// . /// Note that this function does not return care whether . /// `vid` has been unified with something else or not. . pub fn var_origin(&self, vid: ty::TyVid) -> &TypeVariableOrigin { 14,021 ( 0.00%) &self.storage.values.get(vid.as_usize()).origin . } . . /// Records that `a == b`, depending on `dir`. . /// . /// Precondition: neither `a` nor `b` are known. . pub fn equate(&mut self, a: ty::TyVid, b: ty::TyVid) { . debug_assert!(self.probe(a).is_unknown()); . debug_assert!(self.probe(b).is_unknown()); 79,275 ( 0.00%) self.eq_relations().union(a, b); 67,950 ( 0.00%) self.sub_relations().union(a, b); . } . . /// Records that `a <: b`, depending on `dir`. . /// . /// Precondition: neither `a` nor `b` are known. . pub fn sub(&mut self, a: ty::TyVid, b: ty::TyVid) { . debug_assert!(self.probe(a).is_unknown()); . debug_assert!(self.probe(b).is_unknown()); 81,466 ( 0.00%) self.sub_relations().union(a, b); . } . . /// Instantiates `vid` with the type `ty`. . /// . /// Precondition: `vid` must not have been previously instantiated. 1,421,765 ( 0.00%) pub fn instantiate(&mut self, vid: ty::TyVid, ty: Ty<'tcx>) { . let vid = self.root_var(vid); . debug_assert!(self.probe(vid).is_unknown()); . debug_assert!( . self.eq_relations().probe_value(vid).is_unknown(), . "instantiating type variable `{:?}` twice: new-value = {:?}, old-value={:?}", . vid, . ty, . self.eq_relations().probe_value(vid) . ); 1,990,471 ( 0.00%) self.eq_relations().union_value(vid, TypeVariableValue::Known { value: ty }); . . // Hack: we only need this so that `types_escaping_snapshot` . // can see what has been unified; see the Delegate impl for . // more details. . self.undo_log.push(Instantiate); 1,421,765 ( 0.00%) } . . /// Creates a new type variable. . /// . /// - `diverging`: indicates if this is a "diverging" type . /// variable, e.g., one created as the type of a `return` . /// expression. The code in this module doesn't care if a . /// variable is diverging, but the main Rust type-checker will . /// sometimes "unify" such variables with the `!` or `()` types. . /// - `origin`: indicates *why* the type variable was created. . /// The code in this module doesn't care, but it can be useful . /// for improving error messages. 1,684,452 ( 0.00%) pub fn new_var( . &mut self, . universe: ty::UniverseIndex, . origin: TypeVariableOrigin, . ) -> ty::TyVid { 2,526,678 ( 0.00%) let eq_key = self.eq_relations().new_key(TypeVariableValue::Unknown { universe }); . 1,403,710 ( 0.00%) let sub_key = self.sub_relations().new_key(()); 280,742 ( 0.00%) assert_eq!(eq_key.vid, sub_key); . 2,526,678 ( 0.00%) let index = self.values().push(TypeVariableData { origin }); 1,122,968 ( 0.00%) assert_eq!(eq_key.vid.as_u32(), index as u32); . . debug!("new_var(index={:?}, universe={:?}, origin={:?}", eq_key.vid, universe, origin,); . . eq_key.vid 1,965,194 ( 0.00%) } . . /// Returns the number of type variables created thus far. . pub fn num_vars(&self) -> usize { 25,657 ( 0.00%) self.storage.values.len() . } . . /// Returns the "root" variable of `vid` in the `eq_relations` . /// equivalence table. All type variables that have been equated . /// will yield the same root variable (per the union-find . /// algorithm), so `root_var(a) == root_var(b)` implies that `a == . /// b` (transitively). . pub fn root_var(&mut self, vid: ty::TyVid) -> ty::TyVid { 1,193,247 ( 0.00%) self.eq_relations().find(vid).vid . } . . /// Returns the "root" variable of `vid` in the `sub_relations` . /// equivalence table. All type variables that have been are . /// related via equality or subtyping will yield the same root . /// variable (per the union-find algorithm), so `sub_root_var(a) . /// == sub_root_var(b)` implies that: . /// . /// exists X. (a <: X || X <: a) && (b <: X || X <: b) 5,502 ( 0.00%) pub fn sub_root_var(&mut self, vid: ty::TyVid) -> ty::TyVid { 912,741 ( 0.00%) self.sub_relations().find(vid) 11,004 ( 0.00%) } . . /// Returns `true` if `a` and `b` have same "sub-root" (i.e., exists some . /// type X such that `forall i in {a, b}. (i <: X || X <: i)`. . pub fn sub_unified(&mut self, a: ty::TyVid, b: ty::TyVid) -> bool { . self.sub_root_var(a) == self.sub_root_var(b) . } . . /// Retrieves the type to which `vid` has been instantiated, if . /// any. 16,567,565 ( 0.03%) pub fn probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { . self.inlined_probe(vid) 16,567,565 ( 0.03%) } . . /// An always-inlined variant of `probe`, for very hot call sites. . #[inline(always)] . pub fn inlined_probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { 12,500,166 ( 0.02%) self.eq_relations().inlined_probe_value(vid) . } . . /// If `t` is a type-inference variable, and it has been . /// instantiated, then return the with which it was . /// instantiated. Otherwise, returns `t`. . pub fn replace_if_possible(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { 2,968,722 ( 0.01%) match *t.kind() { 2,442,638 ( 0.00%) ty::Infer(ty::TyVar(v)) => match self.probe(v) { . TypeVariableValue::Unknown { .. } => t, . TypeVariableValue::Known { value } => value, . }, . _ => t, . } . } . . #[inline] -- line 323 ---------------------------------------- -- line 324 ---------------------------------------- . fn values( . &mut self, . ) -> sv::SnapshotVec, &mut InferCtxtUndoLogs<'tcx>> { . self.storage.values.with_log(self.undo_log) . } . . #[inline] . fn eq_relations(&mut self) -> super::UnificationTable<'_, 'tcx, TyVidEqKey<'tcx>> { 14,697,115 ( 0.03%) self.storage.eq_relations.with_log(self.undo_log) . } . . #[inline] . fn sub_relations(&mut self) -> super::UnificationTable<'_, 'tcx, ty::TyVid> { 644,957 ( 0.00%) self.storage.sub_relations.with_log(self.undo_log) . } . . /// Returns a range of the type variables created during the snapshot. 128,285 ( 0.00%) pub fn vars_since_snapshot( . &mut self, . value_count: usize, . ) -> (Range, Vec) { . let range = TyVid::from_usize(value_count)..TyVid::from_usize(self.num_vars()); 153,942 ( 0.00%) ( . range.start..range.end, . (range.start.as_usize()..range.end.as_usize()) . .map(|index| self.storage.values.get(index).origin) . .collect(), . ) 153,942 ( 0.00%) } . . /// Returns indices of all variables that are not yet . /// instantiated. . pub fn unsolved_variables(&mut self) -> Vec { . (0..self.storage.values.len()) . .filter_map(|i| { . let vid = ty::TyVid::from_usize(i); 868,456 ( 0.00%) match self.probe(vid) { . TypeVariableValue::Unknown { .. } => Some(vid), . TypeVariableValue::Known { .. } => None, . } . }) . .collect() . } . } . -- line 368 ---------------------------------------- -- line 391 ---------------------------------------- . /// These structs (a newtyped TyVid) are used as the unification key . /// for the `eq_relations`; they carry a `TypeVariableValue` along . /// with them. . #[derive(Copy, Clone, Debug, PartialEq, Eq)] . pub(crate) struct TyVidEqKey<'tcx> { . vid: ty::TyVid, . . // in the table, we map each ty-vid to one of these: 13,346,483 ( 0.03%) phantom: PhantomData>, . } . . impl<'tcx> From for TyVidEqKey<'tcx> { . #[inline] // make this function eligible for inlining - it is quite hot. . fn from(vid: ty::TyVid) -> Self { . TyVidEqKey { vid, phantom: PhantomData } . } . } -- line 407 ---------------------------------------- -- line 419 ---------------------------------------- . "TyVidEqKey" . } . } . . impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> { . type Error = ut::NoError; . . fn unify_values(value1: &Self, value2: &Self) -> Result { 1,194,037 ( 0.00%) match (value1, value2) { . // We never equate two type variables, both of which . // have known types. Instead, we recursively equate . // those types. . (&TypeVariableValue::Known { .. }, &TypeVariableValue::Known { .. }) => { . bug!("equating two type variables, both of which have known types") . } . . // If one side is known, prefer that one. . (&TypeVariableValue::Known { .. }, &TypeVariableValue::Unknown { .. }) => Ok(*value1), . (&TypeVariableValue::Unknown { .. }, &TypeVariableValue::Known { .. }) => Ok(*value2), . . // If both sides are *unknown*, it hardly matters, does it? . ( 11,325 ( 0.00%) &TypeVariableValue::Unknown { universe: universe1 }, 11,325 ( 0.00%) &TypeVariableValue::Unknown { universe: universe2 }, . ) => { . // If we unify two unbound variables, ?T and ?U, then whatever . // value they wind up taking (which must be the same value) must . // be nameable by both universes. Therefore, the resulting . // universe is the minimum of the two universes, because that is . // the one which contains the fewest names in scope. . let universe = cmp::min(universe1, universe2); 45,300 ( 0.00%) Ok(TypeVariableValue::Unknown { universe }) . } . } . } . } 851,716 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_transform/src/simplify.rs -------------------------------------------------------------------------------- Ir -- line 38 ---------------------------------------- . use std::convert::TryInto; . . pub struct SimplifyCfg { . label: String, . } . . impl SimplifyCfg { . pub fn new(label: &str) -> Self { 337,235 ( 0.00%) SimplifyCfg { label: format!("SimplifyCfg-{}", label) } . } . } . 404,390 ( 0.00%) pub fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { 161,756 ( 0.00%) CfgSimplifier::new(body).simplify(); 121,317 ( 0.00%) remove_dead_blocks(tcx, body); . . // FIXME: Should probably be moved into some kind of pass manager . body.basic_blocks_mut().raw.shrink_to_fit(); 123,680 ( 0.00%) } . . impl<'tcx> MirPass<'tcx> for SimplifyCfg { 28,897 ( 0.00%) fn name(&self) -> Cow<'_, str> { 86,691 ( 0.00%) Cow::Borrowed(&self.label) 28,897 ( 0.00%) } . 28,897 ( 0.00%) fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { . debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, body.source); 80,706 ( 0.00%) simplify_cfg(tcx, body); . } . } . . pub struct CfgSimplifier<'a, 'tcx> { . basic_blocks: &'a mut IndexVec>, . pred_count: IndexVec, . } . . impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { 283,073 ( 0.00%) pub fn new(body: &'a mut Body<'tcx>) -> Self { 80,878 ( 0.00%) let mut pred_count = IndexVec::from_elem(0u32, body.basic_blocks()); . . // we can't use mir.predecessors() here because that counts . // dead blocks, which we don't want to. 121,317 ( 0.00%) pred_count[START_BLOCK] = 1; . 3,327,164 ( 0.01%) for (_, data) in traversal::preorder(body) { 2,010,105 ( 0.00%) if let Some(ref term) = data.terminator { 2,930,304 ( 0.01%) for &tgt in term.successors() { 4,600,995 ( 0.01%) pred_count[tgt] += 1; . } . } . } . . let basic_blocks = body.basic_blocks_mut(); . 161,756 ( 0.00%) CfgSimplifier { basic_blocks, pred_count } 323,512 ( 0.00%) } . 323,512 ( 0.00%) pub fn simplify(mut self) { 40,439 ( 0.00%) self.strip_nops(); . . // Vec of the blocks that should be merged. We store the indices here, instead of the . // statements itself to avoid moving the (relatively) large statements twice. . // We do not push the statements directly into the target block (`bb`) as that is slower . // due to additional reallocations . let mut merged_blocks = Vec::new(); . loop { . let mut changed = false; . 93,826 ( 0.00%) for bb in self.basic_blocks.indices() { 2,265,308 ( 0.00%) if self.pred_count[bb] == 0 { . continue; . } . . debug!("simplifying {:?}", bb); . . let mut terminator = . self.basic_blocks[bb].terminator.take().expect("invalid terminator state"); . 2,451,810 ( 0.00%) for successor in terminator.successors_mut() { . self.collapse_goto_chain(successor, &mut changed); . } . . let mut inner_changed = true; . merged_blocks.clear(); 2,579,902 ( 0.00%) while inner_changed { . inner_changed = false; . inner_changed |= self.simplify_branch(&mut terminator); 881,316 ( 0.00%) inner_changed |= self.merge_successor(&mut merged_blocks, &mut terminator); 2,643,948 ( 0.00%) changed |= inner_changed; . } . . let statements_to_merge = 941,938 ( 0.00%) merged_blocks.iter().map(|&i| self.basic_blocks[i].statements.len()).sum(); . 96,000 ( 0.00%) if statements_to_merge > 0 { . let mut statements = std::mem::take(&mut self.basic_blocks[bb].statements); . statements.reserve(statements_to_merge); 54,128 ( 0.00%) for &from in &merged_blocks { 108,256 ( 0.00%) statements.append(&mut self.basic_blocks[from].statements); . } 358,146 ( 0.00%) self.basic_blocks[bb].statements = statements; . } . 25,335,370 ( 0.05%) self.basic_blocks[bb].terminator = Some(terminator); . } . 93,826 ( 0.00%) if !changed { . break; . } . } 323,512 ( 0.00%) } . . /// This function will return `None` if . /// * the block has statements . /// * the block has a terminator other than `goto` . /// * the block has no terminator (meaning some other part of the current optimization stole it) . fn take_terminator_if_simple_goto(&mut self, bb: BasicBlock) -> Option> { 8,405,523 ( 0.02%) match self.basic_blocks[bb] { . BasicBlockData { . ref statements, . terminator: . ref mut terminator @ Some(Terminator { kind: TerminatorKind::Goto { .. }, .. }), . .. 311,562 ( 0.00%) } if statements.is_empty() => terminator.take(), . // if `terminator` is None, this means we are in a loop. In that . // case, let all the loop collapse to its entry. . _ => None, . } . } . . /// Collapse a goto chain starting from `start` . fn collapse_goto_chain(&mut self, start: &mut BasicBlock, changed: &mut bool) { . // Using `SmallVec` here, because in some logs on libcore oli-obk saw many single-element . // goto chains. We should probably benchmark different sizes. . let mut terminators: SmallVec<[_; 1]> = Default::default(); 3,460,110 ( 0.01%) let mut current = *start; 4,361,071 ( 0.01%) while let Some(terminator) = self.take_terminator_if_simple_goto(current) { 94,838 ( 0.00%) let target = match terminator { 47,419 ( 0.00%) Terminator { kind: TerminatorKind::Goto { target }, .. } => target, . _ => unreachable!(), . }; 900,961 ( 0.00%) terminators.push((current, terminator)); . current = target; . } . let last = current; 1,153,370 ( 0.00%) *start = last; 711,285 ( 0.00%) while let Some((current, mut terminator)) = terminators.pop() { 94,838 ( 0.00%) let target = match terminator { . Terminator { kind: TerminatorKind::Goto { ref mut target }, .. } => target, . _ => unreachable!(), . }; 94,838 ( 0.00%) *changed |= *target != last; 94,838 ( 0.00%) *target = last; . debug!("collapsing goto chain from {:?} to {:?}", current, target); . 94,838 ( 0.00%) if self.pred_count[current] == 1 { . // This is the last reference to current, so the pred-count to . // to target is moved into the current block. . self.pred_count[current] = 0; . } else { 33,296 ( 0.00%) self.pred_count[*target] += 1; 24,972 ( 0.00%) self.pred_count[current] -= 1; . } 1,564,827 ( 0.00%) self.basic_blocks[current].terminator = Some(terminator); . } . } . . // merge a block with 1 `goto` predecessor to its parent . fn merge_successor( . &mut self, . merged_blocks: &mut Vec, . terminator: &mut Terminator<'tcx>, . ) -> bool { 2,321,370 ( 0.00%) let target = match terminator.kind { 971,920 ( 0.00%) TerminatorKind::Goto { target } if self.pred_count[target] == 1 => target, . _ => return false, . }; . . debug!("merging block {:?} into {:?}", target, terminator); 1,433,682 ( 0.00%) *terminator = match self.basic_blocks[target].terminator.take() { . Some(terminator) => terminator, . None => { . // unreachable loop - this should not be possible, as we . // don't strand blocks, but handle it correctly. . return false; . } . }; . . merged_blocks.push(target); 187,002 ( 0.00%) self.pred_count[target] = 0; . . true . } . . // turn a branch with all successors identical to a goto . fn simplify_branch(&mut self, terminator: &mut Terminator<'tcx>) -> bool { 2,643,948 ( 0.00%) match terminator.kind { . TerminatorKind::SwitchInt { .. } => {} . _ => return false, . }; . . let first_succ = { 1,012,446 ( 0.00%) if let Some(&first_succ) = terminator.successors().next() { 506,223 ( 0.00%) if terminator.successors().all(|s| *s == first_succ) { 32,586 ( 0.00%) let count = terminator.successors().count(); 65,172 ( 0.00%) self.pred_count[first_succ] -= (count - 1) as u32; . first_succ . } else { . return false; . } . } else { . return false; . } . }; . . debug!("simplifying branch {:?}", terminator); 65,172 ( 0.00%) terminator.kind = TerminatorKind::Goto { target: first_succ }; . true . } . . fn strip_nops(&mut self) { . for blk in self.basic_blocks.iter_mut() { 3,166,271 ( 0.01%) blk.statements.retain(|stmt| !matches!(stmt.kind, StatementKind::Nop)) . } . } . } . 468,200 ( 0.00%) pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { 46,820 ( 0.00%) let reachable = traversal::reachable_as_bitset(body); . let num_blocks = body.basic_blocks().len(); 93,640 ( 0.00%) if num_blocks == reachable.count() { . return; . } . . let basic_blocks = body.basic_blocks_mut(); . let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect(); . let mut used_blocks = 0; 429,480 ( 0.00%) for alive_index in reachable.iter() { . let alive_index = alive_index.index(); 429,480 ( 0.00%) replacements[alive_index] = BasicBlock::new(used_blocks); 858,960 ( 0.00%) if alive_index != used_blocks { . // Swap the next alive block data with the current available slot. Since . // alive_index is non-decreasing this is a valid operation. . basic_blocks.raw.swap(alive_index, used_blocks); . } 429,480 ( 0.00%) used_blocks += 1; . } . 103,316 ( 0.00%) if tcx.sess.instrument_coverage() { . save_unreachable_coverage(basic_blocks, used_blocks); . } . . basic_blocks.raw.truncate(used_blocks); . . for block in basic_blocks { 858,960 ( 0.00%) for target in block.terminator_mut().successors_mut() { 2,353,508 ( 0.00%) *target = replacements[target.index()]; . } . } 374,560 ( 0.00%) } . . /// Some MIR transforms can determine at compile time that a sequences of . /// statements will never be executed, so they can be dropped from the MIR. . /// For example, an `if` or `else` block that is guaranteed to never be executed . /// because its condition can be evaluated at compile time, such as by const . /// evaluation: `if false { ... }`. . /// . /// Those statements are bypassed by redirecting paths in the CFG around the -- line 305 ---------------------------------------- -- line 363 ---------------------------------------- . })), . }) . } . } . . pub struct SimplifyLocals; . . impl<'tcx> MirPass<'tcx> for SimplifyLocals { 11,456 ( 0.00%) fn is_enabled(&self, sess: &rustc_session::Session) -> bool { 17,184 ( 0.00%) sess.mir_opt_level() > 0 11,456 ( 0.00%) } . 5,728 ( 0.00%) fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { . trace!("running SimplifyLocals on {:?}", body.source); 5,728 ( 0.00%) simplify_locals(body, tcx); . } . } . 40,096 ( 0.00%) pub fn simplify_locals<'tcx>(body: &mut Body<'tcx>, tcx: TyCtxt<'tcx>) { . // First, we're going to get a count of *actual* uses for every `Local`. . let mut used_locals = UsedLocals::new(body); . . // Next, we're going to remove any `Local` with zero actual uses. When we remove those . // `Locals`, we're also going to subtract any uses of other `Locals` from the `used_locals` . // count. For example, if we removed `_2 = discriminant(_1)`, then we'll subtract one from . // `use_counts[_1]`. That in turn might make `_1` unused, so we loop until we hit a . // fixedpoint where there are no more unused locals. -- line 389 ---------------------------------------- -- line 390 ---------------------------------------- . remove_unused_definitions(&mut used_locals, body); . . // Finally, we'll actually do the work of shrinking `body.local_decls` and remapping the `Local`s. . let map = make_local_map(&mut body.local_decls, &used_locals); . . // Only bother running the `LocalUpdater` if we actually found locals to remove. . if map.iter().any(Option::is_none) { . // Update references to all vars and tmps now 7,728 ( 0.00%) let mut updater = LocalUpdater { map, tcx }; . updater.visit_body(body); . . body.local_decls.shrink_to_fit(); . } 45,824 ( 0.00%) } . . /// Construct the mapping while swapping out unused stuff out from the `vec`. . fn make_local_map( . local_decls: &mut IndexVec, . used_locals: &UsedLocals, . ) -> IndexVec> { 11,456 ( 0.00%) let mut map: IndexVec> = IndexVec::from_elem(None, &*local_decls); . let mut used = Local::new(0); . 5,728 ( 0.00%) for alive_index in local_decls.indices() { . // `is_used` treats the `RETURN_PLACE` and arguments as used. 119,123 ( 0.00%) if !used_locals.is_used(alive_index) { . continue; . } . 101,330 ( 0.00%) map[alive_index] = Some(used); 101,330 ( 0.00%) if alive_index != used { . local_decls.swap(alive_index, used); . } . used.increment_by(1); . } . local_decls.truncate(used.index()); . map . } . -- line 428 ---------------------------------------- -- line 431 ---------------------------------------- . increment: bool, . arg_count: u32, . use_count: IndexVec, . } . . impl UsedLocals { . /// Determines which locals are used & unused in the given body. . fn new(body: &Body<'_>) -> Self { 40,096 ( 0.00%) let mut this = Self { . increment: true, 5,728 ( 0.00%) arg_count: body.arg_count.try_into().unwrap(), 17,184 ( 0.00%) use_count: IndexVec::from_elem(0, &body.local_decls), . }; . this.visit_body(body); . this . } . . /// Checks if local is used. . /// . /// Return place and arguments are always considered used. . fn is_used(&self, local: Local) -> bool { . trace!("is_used({:?}): use_count: {:?}", local, self.use_count[local]); 1,313,463 ( 0.00%) local.as_u32() <= self.arg_count || self.use_count[local] != 0 . } . . /// Updates the use counts to reflect the removal of given statement. . fn statement_removed(&mut self, statement: &Statement<'_>) { 30,763 ( 0.00%) self.increment = false; . . // The location of the statement is irrelevant. . let location = Location { block: START_BLOCK, statement_index: 0 }; 153,815 ( 0.00%) self.visit_statement(statement, location); . } . . /// Visits a left-hand side of an assignment. 722,120 ( 0.00%) fn visit_lhs(&mut self, place: &Place<'_>, location: Location) { 577,696 ( 0.00%) if place.is_indirect() { . // A use, not a definition. . self.visit_place(place, PlaceContext::MutatingUse(MutatingUseContext::Store), location); . } else { . // A definition. The base local itself is not visited, so this occurrence is not counted . // toward its use count. There might be other locals still, used in an indexing . // projection. . self.super_projection( . place.as_ref(), . PlaceContext::MutatingUse(MutatingUseContext::Projection), . location, . ); . } 577,696 ( 0.00%) } . } . . impl<'tcx> Visitor<'tcx> for UsedLocals { 1,650,935 ( 0.00%) fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) { 1,650,935 ( 0.00%) match statement.kind { . StatementKind::CopyNonOverlapping(..) . | StatementKind::Retag(..) . | StatementKind::Coverage(..) . | StatementKind::FakeRead(..) . | StatementKind::AscribeUserType(..) => { . self.super_statement(statement, location); . } . . StatementKind::Nop => {} . . StatementKind::StorageLive(_local) | StatementKind::StorageDead(_local) => {} . 128,128 ( 0.00%) StatementKind::Assign(box (ref place, ref rvalue)) => { 384,384 ( 0.00%) self.visit_lhs(place, location); . self.visit_rvalue(rvalue, location); . } . . StatementKind::SetDiscriminant { ref place, variant_index: _ } => { 114,072 ( 0.00%) self.visit_lhs(place, location); . } . } 1,159,030 ( 0.00%) } . . fn visit_local(&mut self, local: &Local, _ctx: PlaceContext, _location: Location) { 476,183 ( 0.00%) if self.increment { 541,956 ( 0.00%) self.use_count[*local] += 1; . } else { 32,682 ( 0.00%) assert_ne!(self.use_count[*local], 0); 10,894 ( 0.00%) self.use_count[*local] -= 1; . } . } . } . . /// Removes unused definitions. Updates the used locals to reflect the changes made. . fn remove_unused_definitions(used_locals: &mut UsedLocals, body: &mut Body<'_>) { . // The use counts are updated as we remove the statements. A local might become unused . // during the retain operation, leading to a temporary inconsistency (storage statements or . // definitions referencing the local might remain). For correctness it is crucial that this . // computation reaches a fixed point. . . let mut modified = true; 90,956 ( 0.00%) while modified { 6,987 ( 0.00%) modified = false; . . for data in body.basic_blocks_mut() { . // Remove unnecessary StorageLive and StorageDead annotations. 440,744 ( 0.00%) data.statements.retain(|statement| { 2,267,132 ( 0.00%) let keep = match &statement.kind { . StatementKind::StorageLive(local) | StatementKind::StorageDead(local) => { 276,933 ( 0.00%) used_locals.is_used(*local) . } 324,392 ( 0.00%) StatementKind::Assign(box (place, _)) => used_locals.is_used(place.local), . . StatementKind::SetDiscriminant { ref place, .. } => { 46,556 ( 0.00%) used_locals.is_used(place.local) . } . _ => true, . }; . 429,620 ( 0.00%) if !keep { . trace!("removing statement {:?}", statement); 30,763 ( 0.00%) modified = true; . used_locals.statement_removed(statement); . } . . keep . }); . } . } . } -- line 555 ---------------------------------------- -- line 560 ---------------------------------------- . } . . impl<'tcx> MutVisitor<'tcx> for LocalUpdater<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { . self.tcx . } . . fn visit_local(&mut self, l: &mut Local, _: PlaceContext, _: Location) { 677,892 ( 0.00%) *l = self.map[*l].unwrap(); . } . } 11,882,678 ( 0.02%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/../elf/dl-tls.c ./elf/../sysdeps/x86_64/tls_get_addr.S ./malloc/malloc.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/strcmp-avx2.S ./string/../sysdeps/x86_64/multiarch/strlen-avx2.S /tmp/gcc-build/x86_64-unknown-linux-gnu/libstdc++-v3/libsupc++/../../../../gcc-5.5.0/libstdc++-v3/libsupc++/new_op.cc -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 11,769,889,480 (22.22%) events annotated