-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name unindent --edition=2018 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata -C embed-bitcode=no -C debuginfo=2 -C metadata=24a3c4e2e92ca9e5 -C extra-filename=-24a3c4e2e92ca9e5 --out-dir /usr/home/liquid/tmp/.tmpc4SI4t/target/debug/deps -L dependency=/usr/home/liquid/tmp/.tmpc4SI4t/target/debug/deps -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-unindent-0.1.7-Check-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 101,009,952 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 10,305,124 (10.20%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 4,835,679 ( 4.79%) ./elf/dl-lookup.c:do_lookup_x 4,141,271 ( 4.10%) ./elf/../sysdeps/x86_64/dl-machine.h:_dl_relocate_object 3,244,803 ( 3.21%) ./malloc/malloc.c:_int_malloc 2,793,830 ( 2.77%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/opaque.rs:>::decode 2,522,222 ( 2.50%) ./malloc/malloc.c:_int_free 2,475,668 ( 2.45%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:>::decode 2,328,030 ( 2.30%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::decode 2,139,033 ( 2.12%) ./elf/do-rel.h:_dl_relocate_object 1,727,379 ( 1.71%) ./malloc/malloc.c:malloc 1,395,246 ( 1.38%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 1,362,217 ( 1.35%) ???:SetImpliedBits(llvm::FeatureBitset&, llvm::FeatureBitset const&, llvm::ArrayRef) 1,071,721 ( 1.06%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 1,067,761 ( 1.06%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::new_imported_source_file 1,064,836 ( 1.05%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:::new_imported_source_file 864,856 ( 0.86%) ./elf/dl-lookup.c:check_match 778,908 ( 0.77%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/decoder.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 710,656 ( 0.70%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 698,003 ( 0.69%) ./malloc/malloc.c:free 529,040 ( 0.52%) ./string/../sysdeps/x86_64/strcmp.S:strcmp 480,219 ( 0.48%) ./malloc/malloc.c:malloc_consolidate 459,321 ( 0.45%) ???:llvm::StringMapImpl::LookupBucketFor(llvm::StringRef) 436,730 ( 0.43%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:::read_u32 377,113 ( 0.37%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 376,381 ( 0.37%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/source_map.rs:::new_imported_source_file 353,216 ( 0.35%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs:>::decode 353,216 ( 0.35%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:>::decode 353,216 ( 0.35%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/range.rs:>::decode 316,905 ( 0.31%) ./malloc/malloc.c:unlink_chunk.constprop.0 287,846 ( 0.28%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:::read_u32 267,326 ( 0.26%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/decoder.rs:>::decode 256,424 ( 0.25%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/rustc-hash-1.1.0/src/lib.rs:<&str as core::hash::Hash>::hash:: 241,385 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:::read_usize 221,269 ( 0.22%) ./stdlib/cxa_finalize.c:__cxa_finalize 221,153 ( 0.22%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 217,155 ( 0.21%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>::insert::>::{closure#0}> 211,432 ( 0.21%) ./malloc/malloc.c:realloc 202,347 ( 0.20%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::hash:: 201,047 ( 0.20%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/indexmap-1.8.0/src/map/core.rs:, alloc::vec::Vec>>::or_default 194,663 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 183,855 ( 0.18%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:)>>::remove_entry::>::{closure#0}> 180,623 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:>::decode 177,005 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::intern_ty 176,641 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 175,644 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/indexmap-1.8.0/src/map/core/raw.rs:, alloc::vec::Vec>>::entry 172,365 ( 0.17%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.11.2/src/raw/mod.rs:, alloc::vec::Vec>>::or_default 166,610 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::eq 165,920 ( 0.16%) ???:llvm::StringMapImpl::RehashTable(unsigned int) 163,793 ( 0.16%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 154,451 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::_intern_substs 152,786 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/path.rs:::hash:: 148,302 ( 0.15%) ./malloc/malloc.c:_int_realloc 147,004 ( 0.15%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.11.2/src/raw/mod.rs:>::reserve_rehash::, alloc::vec::Vec>::{closure#0}> 145,266 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::_intern_substs 143,767 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>>::insert 137,775 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::, rustc_middle::ty::context::Interned>::{closure#0}> 136,435 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 133,285 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs:::hash:: 130,640 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/opaque.rs:::read_usize 128,393 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::hash:: 127,631 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:::read_usize 123,152 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::from_key_hashed_nocheck:: 122,360 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::shallow_resolve_ty 121,225 ( 0.12%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 118,428 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/raw_vec.rs:>::reserve_for_push 117,047 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 113,836 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/symbol.rs:::intern 113,246 ( 0.11%) ./stdio-common/vfscanf-internal.c:__vfscanf_internal 111,005 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/raw_vec.rs:alloc::raw_vec::finish_grow:: 109,081 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/trait_def.rs:rustc_middle::ty::trait_def::trait_impls_of_provider 109,027 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::hash:: 108,229 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:>::decode 104,711 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/fast_reject.rs: as rustc_serialize::serialize::Decodable>::decode 104,066 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:::intern 101,991 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, ())>>::reserve_rehash::, rustc_middle::ty::context::Interned, (), core::hash::BuildHasherDefault>::{closure#0}> 101,040 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>::prepare_insert_slot -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs -------------------------------------------------------------------------------- Ir -- line 141 ---------------------------------------- . /// ``` . /// let a = [1, 2, 3]; . /// assert!(!a.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] . #[inline] . pub const fn is_empty(&self) -> bool { 15,727 ( 0.02%) self.len() == 0 . } . . /// Returns the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 157 ---------------------------------------- -- line 159 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.first()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn first(&self) -> Option<&T> { 51 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 175 ---------------------------------------- -- line 178 ---------------------------------------- . /// *first = 5; . /// } . /// assert_eq!(x, &[5, 1, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn first_mut(&mut self) -> Option<&mut T> { 24 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &[0, 1, 2]; -- line 194 ---------------------------------------- -- line 237 ---------------------------------------- . /// assert_eq!(last, &2); . /// assert_eq!(elements, &[0, 1]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_last(&self) -> Option<(&T, &[T])> { 222 ( 0.00%) if let [init @ .., last] = self { Some((last, init)) } else { None } . } . . /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 253 ---------------------------------------- -- line 276 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.last()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn last(&self) -> Option<&T> { 5,708 ( 0.01%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a mutable pointer to the last item in the slice. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 292 ---------------------------------------- -- line 295 ---------------------------------------- . /// *last = 10; . /// } . /// assert_eq!(x, &[0, 1, 10]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn last_mut(&mut self) -> Option<&mut T> { 9,247 ( 0.01%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a reference to an element or subslice depending on the type of . /// index. . /// . /// - If given a position, returns a reference to the element at that . /// position or `None` if out of bounds. . /// - If given a range, returns the subslice corresponding to that range, -- line 311 ---------------------------------------- -- line 448 ---------------------------------------- . /// } . /// ``` . /// . /// [`as_mut_ptr`]: slice::as_mut_ptr . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] . #[inline] . pub const fn as_ptr(&self) -> *const T { 17,048 ( 0.02%) self as *const [T] as *const T . } . . /// Returns an unsafe mutable pointer to the slice's buffer. . /// . /// The caller must ensure that the slice outlives the pointer this . /// function returns, or else it will end up pointing to garbage. . /// . /// Modifying the container referenced by this slice may cause its buffer -- line 464 ---------------------------------------- -- line 476 ---------------------------------------- . /// } . /// } . /// assert_eq!(x, &[3, 4, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")] . #[inline] . pub const fn as_mut_ptr(&mut self) -> *mut T { 1 ( 0.00%) self as *mut [T] as *mut T . } . . /// Returns the two raw pointers spanning the slice. . /// . /// The returned range is half-open, which means that the end pointer . /// points *one past* the last element of the slice. This way, an empty . /// slice is represented by two equal pointers, and the difference between . /// the two pointers represents the size of the slice. -- line 492 ---------------------------------------- -- line 582 ---------------------------------------- . /// v.swap(2, 4); . /// assert!(v == ["a", "b", "e", "d", "c"]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . #[inline] . #[track_caller] . pub const fn swap(&mut self, a: usize, b: usize) { 92 ( 0.00%) let _ = &self[a]; 440 ( 0.00%) let _ = &self[b]; . . // SAFETY: we just checked that both `a` and `b` are in bounds . unsafe { self.swap_unchecked(a, b) } . } . . /// Swaps two elements in the slice, without doing bounds checking. . /// . /// For a safe alternative see [`swap`]. -- line 599 ---------------------------------------- -- line 677 ---------------------------------------- . . // Because this function is first compiled in isolation, . // this check tells LLVM that the indexing below is . // in-bounds. Then after inlining -- once the actual . // lengths of the slices are known -- it's removed. . let (a, b) = (&mut a[..n], &mut b[..n]); . . for i in 0..n { 88 ( 0.00%) mem::swap(&mut a[i], &mut b[n - 1 - i]); . } . } . } . . /// Returns an iterator over the slice. . /// . /// # Examples . /// -- line 693 ---------------------------------------- -- line 1499 ---------------------------------------- . /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(right, []); . /// } . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at(&self, mid: usize) -> (&[T], &[T]) { 18 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_unchecked(mid) } . } . . /// Divides one mutable slice into two at an index. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1515 ---------------------------------------- -- line 1530 ---------------------------------------- . /// left[1] = 2; . /// right[1] = 4; . /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { 924 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_mut_unchecked(mid) } . } . . /// Divides one slice into two at an index, without doing bounds checking. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1546 ---------------------------------------- -- line 1628 ---------------------------------------- . pub unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) { . let len = self.len(); . let ptr = self.as_mut_ptr(); . . // SAFETY: Caller has to check that `0 <= mid <= self.len()`. . // . // `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference . // is fine. 134 ( 0.00%) unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) } . } . . /// Divides one slice into an array and a remainder slice at an index. . /// . /// The array will contain all indices from `[0, N)` (excluding . /// the index `N` itself) and the slice will contain all . /// indices from `[N, len)` (excluding the index `len` itself). . /// -- line 1644 ---------------------------------------- -- line 2113 ---------------------------------------- . /// assert!(!v.iter().any(|e| e == "hi")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn contains(&self, x: &T) -> bool . where . T: PartialEq, . { 6 ( 0.00%) cmp::SliceContains::slice_contains(x, self) . } . . /// Returns `true` if `needle` is a prefix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2129 ---------------------------------------- -- line 2142 ---------------------------------------- . /// assert!(v.starts_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn starts_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let n = needle.len(); 5,125 ( 0.01%) self.len() >= n && needle == &self[..n] . } . . /// Returns `true` if `needle` is a suffix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2158 ---------------------------------------- -- line 2171 ---------------------------------------- . /// assert!(v.ends_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn ends_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let (m, n) = (self.len(), needle.len()); 2,208 ( 0.00%) m >= n && needle == &self[m - n..] . } . . /// Returns a subslice with the prefix removed. . /// . /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`. . /// If `prefix` is empty, simply returns the original slice. . /// . /// If the slice does not start with `prefix`, returns `None`. -- line 2187 ---------------------------------------- -- line 2293 ---------------------------------------- . /// s.insert(idx, num); . /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn binary_search(&self, x: &T) -> Result . where . T: Ord, . { 3 ( 0.00%) self.binary_search_by(|p| p.cmp(x)) . } . . /// Binary searches this sorted slice with a comparator function. . /// . /// The comparator function should implement an order consistent . /// with the sort order of the underlying slice, returning an . /// order code that indicates whether its argument is `Less`, . /// `Equal` or `Greater` the desired target. -- line 2309 ---------------------------------------- -- line 2345 ---------------------------------------- . #[inline] . pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result . where . F: FnMut(&'a T) -> Ordering, . { . let mut size = self.len(); . let mut left = 0; . let mut right = size; 50,055 ( 0.05%) while left < right { 64,339 ( 0.06%) let mid = left + size / 2; . . // SAFETY: the call is made safe by the following invariants: . // - `mid >= 0` . // - `mid < size`: `mid` is limited by `[left; right)` bound. 30,103 ( 0.03%) let cmp = f(unsafe { self.get_unchecked(mid) }); . . // The reason why we use if/else control flow rather than match . // is because match reorders comparison operations, which is perf sensitive. . // This is x86 asm for u8: https://rust.godbolt.org/z/8Y8Pra. 19,892 ( 0.02%) if cmp == Less { 21,068 ( 0.02%) left = mid + 1; 10,709 ( 0.01%) } else if cmp == Greater { . right = mid; . } else { . // SAFETY: same as the `get_unchecked` above . unsafe { crate::intrinsics::assume(mid < self.len()) }; . return Ok(mid); . } . 57,369 ( 0.06%) size = right - left; . } . Err(left) . } . . /// Binary searches this sorted slice with a key extraction function. . /// . /// Assumes that the slice is sorted by the key, for instance with . /// [`sort_by_key`] using the same key extraction function. -- line 2382 ---------------------------------------- -- line 3203 ---------------------------------------- . #[track_caller] . fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { . panic!( . "source slice length ({}) does not match destination slice length ({})", . src_len, dst_len, . ); . } . 1,858 ( 0.00%) if self.len() != src.len() { . len_mismatch_fail(self.len(), src.len()); . } . . // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was . // checked to have the same length. The slices cannot overlap because . // mutable references are exclusive. . unsafe { . ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); -- line 3219 ---------------------------------------- -- line 3382 ---------------------------------------- . } . let gcd: usize = gcd(mem::size_of::(), mem::size_of::()); . let ts: usize = mem::size_of::() / gcd; . let us: usize = mem::size_of::() / gcd; . . // Armed with this knowledge, we can find how many `U`s we can fit! . let us_len = self.len() / ts * us; . // And how many `T`s will be in the trailing slice! 100 ( 0.00%) let ts_len = self.len() % ts; . (us_len, ts_len) . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// . /// This method splits the slice into three distinct slices: prefix, correctly aligned middle . /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest -- line 3398 ---------------------------------------- -- line 3429 ---------------------------------------- . return (self, &[], &[]); . } . . // First, find at what point do we split between the first and 2nd slice. Easy with . // ptr.align_offset. . let ptr = self.as_ptr(); . // SAFETY: See the `align_to_mut` method for the detailed safety comment. . let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; 100 ( 0.00%) if offset > self.len() { . (self, &[], &[]) . } else { . let (left, rest) = self.split_at(offset); . let (us_len, ts_len) = rest.align_to_offsets::(); . // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay, . // since the caller guarantees that we can transmute `T` to `U` safely. . unsafe { . ( . left, . from_raw_parts(rest.as_ptr() as *const U, us_len), 100 ( 0.00%) from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), . ) . } . } . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// -- line 3456 ---------------------------------------- -- line 3731 ---------------------------------------- . /// let v = [1, 2, 3, 3, 5, 6, 7]; . /// let i = v.partition_point(|&x| x < 5); . /// . /// assert_eq!(i, 4); . /// assert!(v[..i].iter().all(|&x| x < 5)); . /// assert!(v[i..].iter().all(|&x| !(x < 5))); . /// ``` . #[stable(feature = "partition_point", since = "1.52.0")] 2 ( 0.00%) pub fn partition_point

(&self, mut pred: P) -> usize . where . P: FnMut(&T) -> bool, . { . self.binary_search_by(|x| if pred(x) { Less } else { Greater }).unwrap_or_else(|i| i) 4 ( 0.00%) } . . /// Removes the subslice corresponding to the given range . /// and returns a reference to it. . /// . /// Returns `None` and does not modify the slice if the given . /// range is out of bounds. . /// . /// Note that this method only accepts one-sided ranges such as -- line 3752 ---------------------------------------- 33,964 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.11.2/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 114 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 13,859 ( 0.01%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . -- line 130 ---------------------------------------- -- line 146 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 13,339 ( 0.01%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 162 ---------------------------------------- -- line 173 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 1,192 ( 0.00%) self.stride += Group::WIDTH; 1,192 ( 0.00%) self.pos += self.stride; 596 ( 0.00%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 540 ( 0.00%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 820 ( 0.00%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 636 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous divison). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 2,129 ( 0.00%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 456 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 236 ---------------------------------------- -- line 249 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 1,132 ( 0.00%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 1,402 ( 0.00%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 266 ---------------------------------------- -- line 636 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 13 ( 0.00%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 1,359 ( 0.00%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 654 ---------------------------------------- -- line 665 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 2,269 ( 0.00%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 540 ( 0.00%) let new_items = match self.table.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 540 ( 0.00%) let full_capacity = bucket_mask_to_capacity(self.table.bucket_mask); 1,080 ( 0.00%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher); . Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize( 270 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . ) . } 2,430 ( 0.00%) } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . fn rehash_in_place(&mut self, hasher: impl Fn(&T) -> u64) { . unsafe { . // If the hash function panics then properly clean up any elements -- line 707 ---------------------------------------- -- line 780 ---------------------------------------- . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . let mut new_table = 2,171 ( 0.00%) self.table . .prepare_resize(TableLayout::new::(), capacity, fallibility)?; . . // Copy all elements to the new table. . for item in self.iter() { . // This may panic. 7,454 ( 0.01%) let hash = hasher(item.as_ref()); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. . // - we know there is enough space in the table. . // - all elements are unique. . let (index, _) = new_table.prepare_insert_slot(hash); 7,454 ( 0.01%) new_table.bucket(index).copy_from_nonoverlapping(&item); . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(&mut self.table, &mut new_table); . 270 ( 0.00%) Ok(()) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 34 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 22,140 ( 0.02%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . 5,543 ( 0.01%) let bucket = self.bucket(index); . bucket.write(value); . bucket . } . } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the -- line 841 ---------------------------------------- -- line 911 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 1,842 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { . unsafe { 39,919 ( 0.04%) for bucket in self.iter_hash(hash) { . let elm = bucket.as_ref(); 4,419 ( 0.00%) if likely(eq(elm)) { . return Some(bucket); . } . } . None . } 2,456 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 544 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] . pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { -- line 943 ---------------------------------------- -- line 1007 ---------------------------------------- . } . . /// Returns the number of elements the map can hold without reallocating. . /// . /// This number is a lower bound; the table might be able to hold . /// more, but is guaranteed to be able to hold at least this many. . #[cfg_attr(feature = "inline-more", inline)] . pub fn capacity(&self) -> usize { 546 ( 0.00%) self.table.items + self.table.growth_left . } . . /// Returns the number of elements in the table. . #[cfg_attr(feature = "inline-more", inline)] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns the number of buckets in the table. . #[cfg_attr(feature = "inline-more", inline)] . pub fn buckets(&self) -> usize { 12 ( 0.00%) self.table.bucket_mask + 1 . } . . /// Returns an iterator over every element in the table. It is up to . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn iter(&self) -> RawIter { 270 ( 0.00%) let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), . items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// -- line 1044 ---------------------------------------- -- line 1122 ---------------------------------------- . } . . unsafe impl Send for RawTable where T: Send {} . unsafe impl Sync for RawTable where T: Sync {} . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] . const fn new_in(alloc: A) -> Self { 1,071 ( 0.00%) Self { . // Be careful to cast the entire slice to a raw pointer. 568 ( 0.00%) ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . -- line 1140 ---------------------------------------- -- line 1149 ---------------------------------------- . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(buckets) { . Some(lco) => lco, . None => return Err(fallibility.capacity_overflow()), . }; . 540 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); . Ok(Self { . ctrl, 291 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) . } . . #[inline] 1,390 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 10 ( 0.00%) if capacity == 0 { . Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . . let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 835 ( 0.00%) Ok(result) . } . } 1,120 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); -- line 1200 ---------------------------------------- -- line 1208 ---------------------------------------- . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 13,561 ( 0.01%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 39,798 ( 0.04%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the begining of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 18,809 ( 0.02%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1236 ---------------------------------------- -- line 1278 ---------------------------------------- . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 66,123 ( 0.07%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1294 ---------------------------------------- -- line 1300 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 27,715 ( 0.03%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 22,172 ( 0.02%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; . probe_index(i) == probe_index(new_i) -- line 1318 ---------------------------------------- -- line 1349 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 38,991 ( 0.04%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 12,997 ( 0.01%) *self.ctrl(index) = ctrl; 12,997 ( 0.01%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 540 ( 0.00%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { . self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 1,652 ( 0.00%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] 231 ( 0.00%) unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 700 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 540 ( 0.00%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. 168 ( 0.00%) Ok(guard(new_table, move |self_| { 270 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) 168 ( 0.00%) } . . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 98 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { . if !self.is_empty_singleton() { -- line 1434 ---------------------------------------- -- line 1667 ---------------------------------------- . } . } . } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { 573 ( 0.00%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } . } . } . -- line 1683 ---------------------------------------- -- line 1798 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 849 ( 0.00%) if let Some(index) = self.current_group.lowest_set_bit() { . self.current_group = self.current_group.remove_lowest_bit(); . return Some(self.data.next_n(index)); . } . 1,404 ( 0.00%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). -- line 1819 ---------------------------------------- -- line 1990 ---------------------------------------- . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . if let Some(b) = self.iter.next() { 14,908 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } -- line 2006 ---------------------------------------- -- line 2178 ---------------------------------------- . } . } . impl<'a, A: Allocator + Clone> RawIterHashInner<'a, A> { . #[cfg_attr(feature = "inline-more", inline)] . fn new(table: &'a RawTableInner, hash: u64) -> Self { . unsafe { . let h2_hash = h2(hash); . let probe_seq = table.probe_seq(hash); 7,130 ( 0.01%) let group = Group::load(table.ctrl(probe_seq.pos)); . let bitmask = group.match_byte(h2_hash).into_iter(); . . RawIterHashInner { . table, . h2_hash, . probe_seq, . group, . bitmask, -- line 2194 ---------------------------------------- -- line 2195 ---------------------------------------- . } . } . } . } . . impl<'a, T, A: Allocator + Clone> Iterator for RawIterHash<'a, T, A> { . type Item = Bucket; . 216 ( 0.00%) fn next(&mut self) -> Option> { . unsafe { . match self.inner.next() { 108 ( 0.00%) Some(index) => Some(self.inner.table.bucket(index)), . None => None, . } . } 216 ( 0.00%) } . } . . impl<'a, A: Allocator + Clone> Iterator for RawIterHashInner<'a, A> { . type Item = usize; . . fn next(&mut self) -> Option { . unsafe { . loop { . if let Some(bit) = self.bitmask.next() { 5,547 ( 0.01%) let index = (self.probe_seq.pos + bit) & self.table.bucket_mask; . return Some(index); . } 5,988 ( 0.01%) if likely(self.group.match_empty().any_bit_set()) { . return None; . } . self.probe_seq.move_next(self.table.bucket_mask); . self.group = Group::load(self.table.ctrl(self.probe_seq.pos)); 903 ( 0.00%) self.bitmask = self.group.match_byte(self.h2_hash).into_iter(); . } . } . } . } . . #[cfg(test)] . mod test_map { . use super::*; -- line 2236 ---------------------------------------- 59,461 ( 0.06%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/indexmap-1.8.0/src/map/core/raw.rs -------------------------------------------------------------------------------- Ir -- line 30 ---------------------------------------- . bucket.write(i - offset); . } else if i >= start { . self.indices.erase(bucket); . } . } . } . } . 72,804 ( 0.07%) pub(crate) fn entry(&mut self, hash: HashValue, key: K) -> Entry<'_, K, V> . where . K: Eq, . { . let eq = equivalent(&key, &self.entries); 2,390 ( 0.00%) match self.indices.find(hash.get(), eq) { . // SAFETY: The entry is created with a live raw bucket, at the same time . // we have a &mut reference to the map, so it can not be modified further. 4,331 ( 0.00%) Some(raw_bucket) => Entry::Occupied(OccupiedEntry { . map: self, . raw_bucket, 1,112 ( 0.00%) key, . }), 21,964 ( 0.02%) None => Entry::Vacant(VacantEntry { . map: self, . hash, 3,646 ( 0.00%) key, . }), . } 53,409 ( 0.05%) } . . pub(super) fn indices_mut(&mut self) -> impl Iterator { . // SAFETY: we're not letting any of the buckets escape this function, . // only the item references that are appropriately bound to `&mut self`. . unsafe { self.indices.iter().map(|bucket| bucket.as_mut()) } . } . . /// Return the raw bucket for the given index -- line 65 ---------------------------------------- -- line 139 ---------------------------------------- . pub fn index(&self) -> usize { . // SAFETY: we have &mut map keep keeping the bucket stable . unsafe { self.raw_bucket.read() } . } . . /// Converts into a mutable reference to the entry's value in the map, . /// with a lifetime bound to the map itself. . pub fn into_mut(self) -> &'a mut V { 716 ( 0.00%) let index = self.index(); 2,148 ( 0.00%) &mut self.map.entries[index].value . } . . /// Remove and return the key, value pair stored in the map for this entry . /// . /// Like `Vec::swap_remove`, the pair is removed by swapping it with the . /// last element of the map and popping it off. **This perturbs . /// the position of what used to be the last element!** . /// -- line 156 ---------------------------------------- 51,933 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/opaque.rs -------------------------------------------------------------------------------- Ir -- line 14 ---------------------------------------- . . pub type EncodeResult = Result<(), !>; . . pub struct Encoder { . pub data: Vec, . } . . impl Encoder { 1 ( 0.00%) pub fn new(data: Vec) -> Encoder { 4 ( 0.00%) Encoder { data } 1 ( 0.00%) } . 1 ( 0.00%) pub fn into_inner(self) -> Vec { 4 ( 0.00%) self.data 1 ( 0.00%) } . . #[inline] . pub fn position(&self) -> usize { 643 ( 0.00%) self.data.len() . } . } . . macro_rules! write_leb128 { . ($enc:expr, $value:expr, $int_ty:ty, $fun:ident) => {{ . const MAX_ENCODED_LEN: usize = max_leb128_len!($int_ty); . let old_len = $enc.data.len(); . -- line 40 ---------------------------------------- -- line 67 ---------------------------------------- . . #[inline] . fn emit_unit(&mut self) -> EncodeResult { . Ok(()) . } . . #[inline] . fn emit_usize(&mut self, v: usize) -> EncodeResult { 9,852 ( 0.01%) write_leb128!(self, v, usize, write_usize_leb128) . } . . #[inline] . fn emit_u128(&mut self, v: u128) -> EncodeResult { 7 ( 0.00%) write_leb128!(self, v, u128, write_u128_leb128) . } . . #[inline] . fn emit_u64(&mut self, v: u64) -> EncodeResult { 224 ( 0.00%) write_leb128!(self, v, u64, write_u64_leb128) . } . . #[inline] . fn emit_u32(&mut self, v: u32) -> EncodeResult { 7,262 ( 0.01%) write_leb128!(self, v, u32, write_u32_leb128) . } . . #[inline] . fn emit_u16(&mut self, v: u16) -> EncodeResult { . self.data.extend_from_slice(&v.to_le_bytes()); . Ok(()) . } . -- line 98 ---------------------------------------- -- line 131 ---------------------------------------- . #[inline] . fn emit_i8(&mut self, v: i8) -> EncodeResult { . let as_u8: u8 = unsafe { std::mem::transmute(v) }; . self.emit_u8(as_u8) . } . . #[inline] . fn emit_bool(&mut self, v: bool) -> EncodeResult { 87 ( 0.00%) self.emit_u8(if v { 1 } else { 0 }) . } . . #[inline] . fn emit_f64(&mut self, v: f64) -> EncodeResult { . let as_u64: u64 = v.to_bits(); . self.emit_u64(as_u64) . } . -- line 147 ---------------------------------------- -- line 549 ---------------------------------------- . #[inline] . pub fn advance(&mut self, bytes: usize) { . self.position += bytes; . } . . #[inline] . pub fn read_raw_bytes(&mut self, bytes: usize) -> &'a [u8] { . let start = self.position; 19 ( 0.00%) self.position += bytes; 19 ( 0.00%) &self.data[start..self.position] . } . } . . macro_rules! read_leb128 { . ($dec:expr, $fun:ident) => {{ leb128::$fun($dec.data, &mut $dec.position) }}; . } . . impl<'a> serialize::Decoder for Decoder<'a> { . #[inline] . fn read_unit(&mut self) -> () { . () . } . . #[inline] . fn read_u128(&mut self) -> u128 { 526 ( 0.00%) read_leb128!(self, read_u128_leb128) . } . . #[inline] . fn read_u64(&mut self) -> u64 { 752 ( 0.00%) read_leb128!(self, read_u64_leb128) . } . . #[inline] 9,374 ( 0.01%) fn read_u32(&mut self) -> u32 { 47,406 ( 0.05%) read_leb128!(self, read_u32_leb128) 18,748 ( 0.02%) } . . #[inline] . fn read_u16(&mut self) -> u16 { 561,386 ( 0.56%) let bytes = [self.data[self.position], self.data[self.position + 1]]; 80,198 ( 0.08%) let value = u16::from_le_bytes(bytes); 160,396 ( 0.16%) self.position += 2; . value . } . . #[inline] . fn read_u8(&mut self) -> u8 { 1,736,330 ( 1.72%) let value = self.data[self.position]; 322,731 ( 0.32%) self.position += 1; . value . } . . #[inline] 59,613 ( 0.06%) fn read_usize(&mut self) -> usize { 59,613 ( 0.06%) read_leb128!(self, read_usize_leb128) 119,226 ( 0.12%) } . . #[inline] . fn read_i128(&mut self) -> i128 { . read_leb128!(self, read_i128_leb128) . } . . #[inline] . fn read_i64(&mut self) -> i64 { -- line 613 ---------------------------------------- -- line 637 ---------------------------------------- . #[inline] . fn read_isize(&mut self) -> isize { . read_leb128!(self, read_isize_leb128) . } . . #[inline] . fn read_bool(&mut self) -> bool { . let value = self.read_u8(); 2,788 ( 0.00%) value != 0 . } . . #[inline] . fn read_f64(&mut self) -> f64 { . let bits = self.read_u64(); . f64::from_bits(bits) . } . -- line 653 ---------------------------------------- -- line 660 ---------------------------------------- . #[inline] . fn read_char(&mut self) -> char { . let bits = self.read_u32(); . std::char::from_u32(bits).unwrap() . } . . #[inline] . fn read_str(&mut self) -> Cow<'_, str> { 8,071 ( 0.01%) let len = self.read_usize(); 47,760 ( 0.05%) let sentinel = self.data[self.position + len]; 11,940 ( 0.01%) assert!(sentinel == STR_SENTINEL); . let s = unsafe { . std::str::from_utf8_unchecked(&self.data[self.position..self.position + len]) . }; 29,850 ( 0.03%) self.position += len + 1; 6,303 ( 0.01%) Cow::Borrowed(s) . } . . #[inline] . fn read_raw_bytes_into(&mut self, s: &mut [u8]) { 28 ( 0.00%) let start = self.position; 142 ( 0.00%) self.position += s.len(); 28 ( 0.00%) s.copy_from_slice(&self.data[start..self.position]); . } . } . . // Specializations for contiguous byte sequences follow. The default implementations for slices . // encode and decode each element individually. This isn't necessary for `u8` slices when using . // opaque encoders and decoders, because each `u8` is unchanged by encoding and decoding. . // Therefore, we can use more efficient implementations that process the entire sequence at once. . -- line 690 ---------------------------------------- 194,018 ( 0.19%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/rustc-hash-1.1.0/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 71 ---------------------------------------- . fn default() -> FxHasher { . FxHasher { hash: 0 } . } . } . . impl FxHasher { . #[inline] . fn add_to_hash(&mut self, i: usize) { 56,176 ( 0.06%) self.hash = self.hash.rotate_left(5).bitxor(i).wrapping_mul(K); . } . } . . impl Hasher for FxHasher { . #[inline] 696 ( 0.00%) fn write(&mut self, mut bytes: &[u8]) { . #[cfg(target_pointer_width = "32")] . let read_usize = |bytes: &[u8]| u32::from_ne_bytes(bytes[..4].try_into().unwrap()); . #[cfg(target_pointer_width = "64")] . let read_usize = |bytes: &[u8]| u64::from_ne_bytes(bytes[..8].try_into().unwrap()); . 14,587 ( 0.01%) let mut hash = FxHasher { hash: self.hash }; . assert!(size_of::() <= 8); 168,864 ( 0.17%) while bytes.len() >= size_of::() { . hash.add_to_hash(read_usize(bytes) as usize); . bytes = &bytes[size_of::()..]; . } 28,034 ( 0.03%) if (size_of::() > 4) && (bytes.len() >= 4) { . hash.add_to_hash(u32::from_ne_bytes(bytes[..4].try_into().unwrap()) as usize); . bytes = &bytes[4..]; . } 28,034 ( 0.03%) if (size_of::() > 2) && bytes.len() >= 2 { . hash.add_to_hash(u16::from_ne_bytes(bytes[..2].try_into().unwrap()) as usize); . bytes = &bytes[2..]; . } 31,170 ( 0.03%) if (size_of::() > 1) && bytes.len() >= 1 { 7,121 ( 0.01%) hash.add_to_hash(bytes[0] as usize); . } 1,270 ( 0.00%) self.hash = hash.hash; 696 ( 0.00%) } . . #[inline] . fn write_u8(&mut self, i: u8) { 13 ( 0.00%) self.add_to_hash(i as usize); . } . . #[inline] . fn write_u16(&mut self, i: u16) { 2 ( 0.00%) self.add_to_hash(i as usize); . } . . #[inline] . fn write_u32(&mut self, i: u32) { 55,844 ( 0.06%) self.add_to_hash(i as usize); . } . . #[cfg(target_pointer_width = "32")] . #[inline] . fn write_u64(&mut self, i: u64) { . self.add_to_hash(i as usize); . self.add_to_hash((i >> 32) as usize); . } -- line 131 ---------------------------------------- 1 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/source_map.rs -------------------------------------------------------------------------------- Ir -- line 72 ---------------------------------------- . fn deref(&self) -> &Self::Target { . &self.0 . } . } . . impl !DerefMut for MonotonicVec {} . } . 371 ( 0.00%) #[derive(Clone, Encodable, Decodable, Debug, Copy, HashStable_Generic)] . pub struct Spanned { . pub node: T, . pub span: Span, . } . . pub fn respan(sp: Span, t: T) -> Spanned { 230 ( 0.00%) Spanned { node: t, span: sp } 634 ( 0.00%) } . . pub fn dummy_spanned(t: T) -> Spanned { . respan(DUMMY_SP, t) . } . . // _____________________________________________________________________________ . // SourceFile, MultiByteChar, FileName, FileLines . // -- line 96 ---------------------------------------- -- line 107 ---------------------------------------- . /// A FileLoader that uses std::fs to load real files. . pub struct RealFileLoader; . . impl FileLoader for RealFileLoader { . fn file_exists(&self, path: &Path) -> bool { . path.exists() . } . 2 ( 0.00%) fn read_file(&self, path: &Path) -> io::Result { . fs::read_to_string(path) 3 ( 0.00%) } . } . . /// This is a [SourceFile] identifier that is used to correlate source files between . /// subsequent compilation sessions (which is something we need to do during . /// incremental compilation). . /// . /// The [StableSourceFileId] also contains the CrateNum of the crate the source . /// file was originally parsed for. This way we get two separate entries in -- line 125 ---------------------------------------- -- line 128 ---------------------------------------- . /// at the point where we discover that there's a local use of the file in . /// addition to the upstream one, we might already have made decisions based on . /// the assumption that it's an upstream file. Treating the two files as . /// different has no real downsides. . #[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable, Debug)] . pub struct StableSourceFileId { . // A hash of the source file's FileName. This is hash so that it's size . // is more predictable than if we included the actual FileName value. 46 ( 0.00%) pub file_name_hash: u64, . . // The CrateNum of the crate this source file was originally parsed for. . // We cannot include this information in the hash because at the time . // of hashing we don't have the context to map from the CrateNum's numeric . // value to a StableCrateId. . pub cnum: CrateNum, . } . . // FIXME: we need a more globally consistent approach to the problem solved by . // StableSourceFileId, perhaps built atop source_file.name_hash. . impl StableSourceFileId { . pub fn new(source_file: &SourceFile) -> StableSourceFileId { 1,052 ( 0.00%) StableSourceFileId::new_from_name(&source_file.name, source_file.cnum) . } . 2,635 ( 0.00%) fn new_from_name(name: &FileName, cnum: CrateNum) -> StableSourceFileId { . let mut hasher = StableHasher::new(); 527 ( 0.00%) name.hash(&mut hasher); . StableSourceFileId { file_name_hash: hasher.finish(), cnum } 3,162 ( 0.00%) } . } . . // _____________________________________________________________________________ . // SourceMap . // . . #[derive(Default)] . pub(super) struct SourceMapFiles { -- line 164 ---------------------------------------- -- line 184 ---------------------------------------- . pub fn new(path_mapping: FilePathMapping) -> SourceMap { . Self::with_file_loader_and_hash_kind( . Box::new(RealFileLoader), . path_mapping, . SourceFileHashAlgorithm::Md5, . ) . } . 1 ( 0.00%) pub fn with_file_loader_and_hash_kind( . file_loader: Box, . path_mapping: FilePathMapping, . hash_kind: SourceFileHashAlgorithm, . ) -> SourceMap { 5 ( 0.00%) SourceMap { . used_address_space: AtomicU32::new(0), . files: Default::default(), . file_loader, 4 ( 0.00%) path_mapping, . hash_kind, . } 1 ( 0.00%) } . . pub fn path_mapping(&self) -> &FilePathMapping { 3 ( 0.00%) &self.path_mapping 3 ( 0.00%) } . . pub fn file_exists(&self, path: &Path) -> bool { . self.file_loader.file_exists(path) . } . 11 ( 0.00%) pub fn load_file(&self, path: &Path) -> io::Result> { 4 ( 0.00%) let src = self.file_loader.read_file(path)?; . let filename = path.to_owned().into(); 18 ( 0.00%) Ok(self.new_source_file(filename, src)) 9 ( 0.00%) } . . /// Loads source file as a binary blob. . /// . /// Unlike `load_file`, guarantees that no normalization like BOM-removal . /// takes place. . pub fn load_binary_file(&self, path: &Path) -> io::Result> { . // Ideally, this should use `self.file_loader`, but it can't . // deal with binary files yet. -- line 226 ---------------------------------------- -- line 233 ---------------------------------------- . // empty string. . let text = std::str::from_utf8(&bytes).unwrap_or("").to_string(); . self.new_source_file(path.to_owned().into(), text); . Ok(bytes) . } . . // By returning a `MonotonicVec`, we ensure that consumers cannot invalidate . // any existing indices pointing into `files`. 4 ( 0.00%) pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec>> { . ReadGuard::map(self.files.borrow(), |files| &files.source_files) 12 ( 0.00%) } . 4 ( 0.00%) pub fn source_file_by_stable_id( . &self, . stable_id: StableSourceFileId, . ) -> Option> { . self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned() 5 ( 0.00%) } . . fn allocate_address_space(&self, size: usize) -> Result { 527 ( 0.00%) let size = u32::try_from(size).map_err(|_| OffsetOverflowError)?; . . loop { 527 ( 0.00%) let current = self.used_address_space.load(Ordering::Relaxed); 1,581 ( 0.00%) let next = current . .checked_add(size) . // Add one so there is some space between files. This lets us distinguish . // positions in the `SourceMap`, even in the presence of zero-length files. . .and_then(|next| next.checked_add(1)) . .ok_or(OffsetOverflowError)?; . . if self . .used_address_space -- line 265 ---------------------------------------- -- line 269 ---------------------------------------- . return Ok(usize::try_from(current).unwrap()); . } . } . } . . /// Creates a new `SourceFile`. . /// If a file already exists in the `SourceMap` with the same ID, that file is returned . /// unmodified. 8 ( 0.00%) pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc { 12 ( 0.00%) self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| { . eprintln!("fatal error: rustc does not support files larger than 4GB"); . crate::fatal_error::FatalError.raise() . }) 9 ( 0.00%) } . . fn try_new_source_file( . &self, . filename: FileName, . src: String, . ) -> Result, OffsetOverflowError> { . // Note that filename may not be a valid path, eg it may be `` etc, . // but this is okay because the directory determined by `path.pop()` will . // be empty, so the working directory will be used. 13 ( 0.00%) let (filename, _) = self.path_mapping.map_filename_prefix(&filename); . 2 ( 0.00%) let file_id = StableSourceFileId::new_from_name(&filename, LOCAL_CRATE); . 6 ( 0.00%) let lrc_sf = match self.source_file_by_stable_id(file_id) { . Some(lrc_sf) => lrc_sf, . None => { . let start_pos = self.allocate_address_space(src.len())?; . 6 ( 0.00%) let source_file = Lrc::new(SourceFile::new( 9 ( 0.00%) filename, 3 ( 0.00%) src, . Pos::from_usize(start_pos), . self.hash_kind, . )); . . // Let's make sure the file_id we generated above actually matches . // the ID we generate for the SourceFile we just created. . debug_assert_eq!(StableSourceFileId::new(&source_file), file_id); . -- line 311 ---------------------------------------- -- line 319 ---------------------------------------- . }; . Ok(lrc_sf) . } . . /// Allocates a new `SourceFile` representing a source file from an external . /// crate. The source code of such an "imported `SourceFile`" is not available, . /// but we still know enough to generate accurate debuginfo location . /// information for things inlined from other crates. 4,208 ( 0.00%) pub fn new_imported_source_file( . &self, . filename: FileName, . src_hash: SourceFileHash, . name_hash: u128, . source_len: usize, . cnum: CrateNum, . mut file_local_lines: Vec, . mut file_local_multibyte_chars: Vec, -- line 335 ---------------------------------------- -- line 341 ---------------------------------------- . let start_pos = self . .allocate_address_space(source_len) . .expect("not enough address space for imported source file"); . . let end_pos = Pos::from_usize(start_pos + source_len); . let start_pos = Pos::from_usize(start_pos); . . for pos in &mut file_local_lines { 353,216 ( 0.35%) *pos = *pos + start_pos; . } . . for mbc in &mut file_local_multibyte_chars { 797 ( 0.00%) mbc.pos = mbc.pos + start_pos; . } . . for swc in &mut file_local_non_narrow_chars { 616 ( 0.00%) *swc = *swc + start_pos; . } . . for nc in &mut file_local_normalized_pos { . nc.pos = nc.pos + start_pos; . } . 1,578 ( 0.00%) let source_file = Lrc::new(SourceFile { 4,208 ( 0.00%) name: filename, . src: None, . src_hash, . external_src: Lock::new(ExternalSource::Foreign { . kind: ExternalSourceKind::AbsentOk, . original_start_pos, . original_end_pos, . }), . start_pos, -- line 373 ---------------------------------------- -- line 383 ---------------------------------------- . let mut files = self.files.borrow_mut(); . . files.source_files.push(source_file.clone()); . files . .stable_id_to_source_file . .insert(StableSourceFileId::new(&source_file), source_file.clone()); . . source_file 4,734 ( 0.00%) } . . // If there is a doctest offset, applies it to the line. . pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize { . match file { . FileName::DocTest(_, offset) => { . if *offset < 0 { . orig - (-(*offset)) as usize . } else { -- line 399 ---------------------------------------- -- line 400 ---------------------------------------- . orig + *offset as usize . } . } . _ => orig, . } . } . . /// Return the SourceFile that contains the given `BytePos` 3 ( 0.00%) pub fn lookup_source_file(&self, pos: BytePos) -> Lrc { 1 ( 0.00%) let idx = self.lookup_source_file_idx(pos); 1 ( 0.00%) (*self.files.borrow().source_files)[idx].clone() 3 ( 0.00%) } . . /// Looks up source information about a `BytePos`. . pub fn lookup_char_pos(&self, pos: BytePos) -> Loc { 5 ( 0.00%) let sf = self.lookup_source_file(pos); 6 ( 0.00%) let (line, col, col_display) = sf.lookup_file_pos_with_col_display(pos); 3 ( 0.00%) Loc { file: sf, line, col, col_display } . } . . // If the corresponding `SourceFile` is empty, does not return a line number. . pub fn lookup_line(&self, pos: BytePos) -> Result> { . let f = self.lookup_source_file(pos); . . match f.lookup_line(pos) { . Some(line) => Ok(SourceFileAndLine { sf: f, line }), -- line 425 ---------------------------------------- -- line 451 ---------------------------------------- . . /// Format the span location to be printed in diagnostics. Must not be emitted . /// to build artifacts as this may leak local file paths. Use span_to_embeddable_string . /// for string suitable for embedding. . pub fn span_to_diagnostic_string(&self, sp: Span) -> String { . self.span_to_string(sp, self.path_mapping.filename_display_for_diagnostics) . } . 11 ( 0.00%) pub fn span_to_filename(&self, sp: Span) -> FileName { 3 ( 0.00%) self.lookup_char_pos(sp.lo()).file.name.clone() 8 ( 0.00%) } . . pub fn filename_for_diagnostics<'a>(&self, filename: &'a FileName) -> FileNameDisplay<'a> { . filename.display(self.path_mapping.filename_display_for_diagnostics) . } . 209 ( 0.00%) pub fn is_multiline(&self, sp: Span) -> bool { 76 ( 0.00%) let lo = self.lookup_source_file_idx(sp.lo()); 76 ( 0.00%) let hi = self.lookup_source_file_idx(sp.hi()); 38 ( 0.00%) if lo != hi { . return true; . } 38 ( 0.00%) let f = (*self.files.borrow().source_files)[lo].clone(); . f.lookup_line(sp.lo()) != f.lookup_line(sp.hi()) 171 ( 0.00%) } . . #[instrument(skip(self), level = "trace")] . pub fn is_valid_span(&self, sp: Span) -> Result<(Loc, Loc), SpanLinesError> { . let lo = self.lookup_char_pos(sp.lo()); . trace!(?lo); . let hi = self.lookup_char_pos(sp.hi()); . trace!(?hi); . if lo.file.start_pos != hi.file.start_pos { -- line 483 ---------------------------------------- -- line 533 ---------------------------------------- . . /// Extracts the source surrounding the given `Span` using the `extract_source` function. The . /// extract function takes three arguments: a string slice containing the source, an index in . /// the slice for the beginning of the span and an index in the slice for the end of the span. . fn span_to_source(&self, sp: Span, extract_source: F) -> Result . where . F: Fn(&str, usize, usize) -> Result, . { 119 ( 0.00%) let local_begin = self.lookup_byte_offset(sp.lo()); 119 ( 0.00%) let local_end = self.lookup_byte_offset(sp.hi()); . 34 ( 0.00%) if local_begin.sf.start_pos != local_end.sf.start_pos { . Err(SpanSnippetError::DistinctSources(DistinctSources { . begin: (local_begin.sf.name.clone(), local_begin.sf.start_pos), . end: (local_end.sf.name.clone(), local_end.sf.start_pos), . })) . } else { . self.ensure_source_file_source_present(local_begin.sf.clone()); . . let start_index = local_begin.pos.to_usize(); . let end_index = local_end.pos.to_usize(); 17 ( 0.00%) let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize(); . 68 ( 0.00%) if start_index > end_index || end_index > source_len { . return Err(SpanSnippetError::MalformedForSourcemap(MalformedSourceMapPositions { . name: local_begin.sf.name.clone(), . source_len, . begin_pos: local_begin.pos, . end_pos: local_end.pos, . })); . } . 51 ( 0.00%) if let Some(ref src) = local_begin.sf.src { . extract_source(src, start_index, end_index) . } else if let Some(src) = local_begin.sf.external_src.borrow().get_source() { . extract_source(src, start_index, end_index) . } else { . Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() }) . } . } . } -- line 573 ---------------------------------------- -- line 579 ---------------------------------------- . pub fn is_local_span(&self, sp: Span) -> bool { . let local_begin = self.lookup_byte_offset(sp.lo()); . let local_end = self.lookup_byte_offset(sp.hi()); . // This might be a weird span that covers multiple files . local_begin.sf.src.is_some() && local_end.sf.src.is_some() . } . . /// Returns the source snippet as `String` corresponding to the given `Span`. 204 ( 0.00%) pub fn span_to_snippet(&self, sp: Span) -> Result { . self.span_to_source(sp, |src, start_index, end_index| { . src.get(start_index..end_index) . .map(|s| s.to_string()) . .ok_or(SpanSnippetError::IllFormedSpan(sp)) . }) 153 ( 0.00%) } . . pub fn span_to_margin(&self, sp: Span) -> Option { . Some(self.indentation_before(sp)?.len()) . } . . pub fn indentation_before(&self, sp: Span) -> Option { . self.span_to_source(sp, |src, start_index, _| { . let before = &src[..start_index]; -- line 601 ---------------------------------------- -- line 679 ---------------------------------------- . } . } . . sp . } . . /// Given a `Span`, tries to get a shorter span ending before the first occurrence of `char` . /// `c`. 187 ( 0.00%) pub fn span_until_char(&self, sp: Span, c: char) -> Span { 68 ( 0.00%) match self.span_to_snippet(sp) { 68 ( 0.00%) Ok(snippet) => { . let snippet = snippet.split(c).next().unwrap_or("").trim_end(); 34 ( 0.00%) if !snippet.is_empty() && !snippet.contains('\n') { 85 ( 0.00%) sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32)) . } else { . sp . } . } . _ => sp, . } 153 ( 0.00%) } . . /// Given a `Span`, tries to get a shorter span ending just after the first occurrence of `char` . /// `c`. . pub fn span_through_char(&self, sp: Span, c: char) -> Span { . if let Ok(snippet) = self.span_to_snippet(sp) { . if let Some(offset) = snippet.find(c) { . return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32)); . } -- line 707 ---------------------------------------- -- line 750 ---------------------------------------- . /// Given a `Span`, return a span ending in the closest `{`. This is useful when you have a . /// `Span` enclosing a whole item but we need to point at only the head (usually the first . /// line) of that item. . /// . /// *Only suitable for diagnostics.* . pub fn guess_head_span(&self, sp: Span) -> Span { . // FIXME: extend the AST items to have a head span, or replace callers with pointing at . // the item's ident when appropriate. 34 ( 0.00%) self.span_until_char(sp, '{') . } . . /// Returns a new span representing just the first character of the given span. 12 ( 0.00%) pub fn start_point(&self, sp: Span) -> Span { . let width = { . let sp = sp.data(); 5 ( 0.00%) let local_begin = self.lookup_byte_offset(sp.lo); . let start_index = local_begin.pos.to_usize(); . let src = local_begin.sf.external_src.borrow(); . 3 ( 0.00%) let snippet = if let Some(ref src) = local_begin.sf.src { . Some(&src[start_index..]) . } else if let Some(src) = src.get_source() { . Some(&src[start_index..]) . } else { . None . }; . 1 ( 0.00%) match snippet { . None => 1, . Some(snippet) => match snippet.chars().next() { . None => 1, . Some(c) => c.len_utf8(), . }, . } . }; . 12 ( 0.00%) sp.with_hi(BytePos(sp.lo().0 + width as u32)) . } . . /// Returns a new span representing just the last character of this span. 1,440 ( 0.00%) pub fn end_point(&self, sp: Span) -> Span { . let pos = sp.hi().0; . 480 ( 0.00%) let width = self.find_width_of_character_at_span(sp, false); . let corrected_end_position = pos.checked_sub(width).unwrap_or(pos); . . let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0)); 1,200 ( 0.00%) sp.with_lo(end_point) . } . . /// Returns a new span representing the next character after the end-point of this span. 176 ( 0.00%) pub fn next_point(&self, sp: Span) -> Span { 16 ( 0.00%) if sp.is_dummy() { . return sp; . } . let start_of_next_point = sp.hi().0; . 64 ( 0.00%) let width = self.find_width_of_character_at_span(sp.shrink_to_hi(), true); . // If the width is 1, then the next span should point to the same `lo` and `hi`. However, . // in the case of a multibyte character, where the width != 1, the next span should . // span multiple bytes to include the whole character. . let end_of_next_point = 32 ( 0.00%) start_of_next_point.checked_add(width - 1).unwrap_or(start_of_next_point); . 32 ( 0.00%) let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point)); . Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt(), None) 144 ( 0.00%) } . . /// Finds the width of the character, either before or after the end of provided span, . /// depending on the `forwards` parameter. 1,632 ( 0.00%) fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 { . let sp = sp.data(); 136 ( 0.00%) if sp.lo == sp.hi { . debug!("find_width_of_character_at_span: early return empty span"); . return 1; . } . 840 ( 0.00%) let local_begin = self.lookup_byte_offset(sp.lo); 1,080 ( 0.00%) let local_end = self.lookup_byte_offset(sp.hi); . debug!( . "find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`", . local_begin, local_end . ); . 240 ( 0.00%) if local_begin.sf.start_pos != local_end.sf.start_pos { . debug!("find_width_of_character_at_span: begin and end are in different files"); . return 1; . } . . let start_index = local_begin.pos.to_usize(); . let end_index = local_end.pos.to_usize(); . debug!( . "find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`", . start_index, end_index . ); . . // Disregard indexes that are at the start or end of their spans, they can't fit bigger . // characters. 480 ( 0.00%) if (!forwards && end_index == usize::MIN) || (forwards && start_index == usize::MAX) { . debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte"); . return 1; . } . 120 ( 0.00%) let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize(); . debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len); . // Ensure indexes are also not malformed. 480 ( 0.00%) if start_index > end_index || end_index > source_len { . debug!("find_width_of_character_at_span: source indexes are malformed"); . return 1; . } . . let src = local_begin.sf.external_src.borrow(); . . // We need to extend the snippet to the end of the src rather than to end_index so when . // searching forwards for boundaries we've got somewhere to search. 360 ( 0.00%) let snippet = if let Some(ref src) = local_begin.sf.src { . &src[start_index..] . } else if let Some(src) = src.get_source() { . &src[start_index..] . } else { . return 1; . }; . debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet); . 720 ( 0.00%) let mut target = if forwards { end_index + 1 } else { end_index - 1 }; . debug!("find_width_of_character_at_span: initial target=`{:?}`", target); . 480 ( 0.00%) while !snippet.is_char_boundary(target - start_index) && target < source_len { . target = if forwards { . target + 1 . } else { . match target.checked_sub(1) { . Some(target) => target, . None => { . break; . } . } . }; . debug!("find_width_of_character_at_span: target=`{:?}`", target); . } . debug!("find_width_of_character_at_span: final target=`{:?}`", target); . 480 ( 0.00%) if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 } 1,224 ( 0.00%) } . . pub fn get_source_file(&self, filename: &FileName) -> Option> { . // Remap filename before lookup . let filename = self.path_mapping().map_filename_prefix(filename).0; . for sf in self.files.borrow().source_files.iter() { . if filename == sf.name { . return Some(sf.clone()); . } . } . None . } . . /// For a global `BytePos`, computes the local offset within the containing `SourceFile`. 1,375 ( 0.00%) pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos { 275 ( 0.00%) let idx = self.lookup_source_file_idx(bpos); 550 ( 0.00%) let sf = (*self.files.borrow().source_files)[idx].clone(); . let offset = bpos - sf.start_pos; . SourceFileAndBytePos { sf, pos: offset } 1,375 ( 0.00%) } . . // Returns the index of the `SourceFile` (in `self.files`) that contains `pos`. . // This index is guaranteed to be valid for the lifetime of this `SourceMap`, . // since `source_files` is a `MonotonicVec` 942 ( 0.00%) pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize { . self.files . .borrow() . .source_files . .binary_search_by_key(&pos, |key| key.start_pos) 626 ( 0.00%) .unwrap_or_else(|p| p - 1) 1,256 ( 0.00%) } . . pub fn count_lines(&self) -> usize { . self.files().iter().fold(0, |a, f| a + f.count_lines()) . } . . pub fn generate_fn_name_span(&self, span: Span) -> Option { . let prev_span = self.span_extend_to_prev_str(span, "fn", true); . if let Ok(snippet) = self.span_to_snippet(prev_span) { -- line 931 ---------------------------------------- -- line 1010 ---------------------------------------- . . return Some((sugg_span, new_snippet)); . } . } . . None . } . pub fn ensure_source_file_source_present(&self, source_file: Lrc) -> bool { 119 ( 0.00%) source_file.add_external_src(|| { . match source_file.name { . FileName::Real(ref name) if let Some(local_path) = name.local_path() => { . self.file_loader.read_file(local_path).ok() . } . _ => None, . } . }) . } -- line 1026 ---------------------------------------- -- line 1053 ---------------------------------------- . let span = span.shrink_to_hi().with_hi(BytePos(span.hi().0.checked_add(1)?)); . if self.span_to_snippet(span).as_deref() != Ok(";") { . return None; . } . Some(span) . } . } . 8 ( 0.00%) #[derive(Clone)] . pub struct FilePathMapping { 4 ( 0.00%) mapping: Vec<(PathBuf, PathBuf)>, 2 ( 0.00%) filename_display_for_diagnostics: FileNameDisplayPreference, . } . . impl FilePathMapping { . pub fn empty() -> FilePathMapping { . FilePathMapping::new(Vec::new()) . } . 3 ( 0.00%) pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping { . let filename_display_for_diagnostics = if mapping.is_empty() { . FileNameDisplayPreference::Local . } else { . FileNameDisplayPreference::Remapped . }; . 12 ( 0.00%) FilePathMapping { mapping, filename_display_for_diagnostics } 3 ( 0.00%) } . . /// Applies any path prefix substitution as defined by the mapping. . /// The return value is the remapped path and a boolean indicating whether . /// the path was affected by the mapping. 45 ( 0.00%) pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) { . // NOTE: We are iterating over the mapping entries from last to first . // because entries specified later on the command line should . // take precedence. . for &(ref from, ref to) in self.mapping.iter().rev() { . if let Ok(rest) = path.strip_prefix(from) { . return (to.join(rest), true); . } . } . 25 ( 0.00%) (path, false) 45 ( 0.00%) } . 8 ( 0.00%) fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) { 2 ( 0.00%) match file { 2 ( 0.00%) FileName::Real(realfile) if let RealFileName::LocalPath(local_path) = realfile => { 14 ( 0.00%) let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf()); 1 ( 0.00%) let realfile = if mapped { . RealFileName::Remapped { . local_path: Some(local_path.clone()), . virtual_name: mapped_path, . } . } else { . realfile.clone() . }; 10 ( 0.00%) (FileName::Real(realfile), mapped) 4 ( 0.00%) } . FileName::Real(_) => unreachable!("attempted to remap an already remapped filename"), . other => (other.clone(), false), . } . } . } 4,784 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs -------------------------------------------------------------------------------- Ir -- line 17 ---------------------------------------- . #[inline] . pub fn $fn_name( . out: &mut [::std::mem::MaybeUninit; max_leb128_len!($int_ty)], . mut value: $int_ty, . ) -> &[u8] { . let mut i = 0; . . loop { 5,522 ( 0.01%) if value < 0x80 { . unsafe { 2,703 ( 0.00%) *out.get_unchecked_mut(i).as_mut_ptr() = value as u8; . } . 3,732 ( 0.00%) i += 1; . break; . } else { . unsafe { 2,360 ( 0.00%) *out.get_unchecked_mut(i).as_mut_ptr() = ((value & 0x7f) | 0x80) as u8; . } . 971 ( 0.00%) value >>= 7; . i += 1; . } . } . . unsafe { ::std::mem::MaybeUninit::slice_assume_init_ref(&out.get_unchecked(..i)) } . } . }; . } -- line 45 ---------------------------------------- -- line 53 ---------------------------------------- . macro_rules! impl_read_unsigned_leb128 { . ($fn_name:ident, $int_ty:ty) => { . #[inline] . pub fn $fn_name(slice: &[u8], position: &mut usize) -> $int_ty { . // The first iteration of this loop is unpeeled. This is a . // performance win because this code is hot and integer values less . // than 128 are very common, typically occurring 50-80% or more of . // the time, even for u64 and u128. 433,188 ( 0.43%) let byte = slice[*position]; 216,594 ( 0.21%) *position += 1; 216,594 ( 0.21%) if (byte & 0x80) == 0 { . return byte as $int_ty; . } 1,052 ( 0.00%) let mut result = (byte & 0x7F) as $int_ty; . let mut shift = 7; . loop { 181,713 ( 0.18%) let byte = slice[*position]; 115,595 ( 0.11%) *position += 1; 121,142 ( 0.12%) if (byte & 0x80) == 0 { 93,913 ( 0.09%) result |= (byte as $int_ty) << shift; . return result; . } else { . result |= ((byte & 0x7F) as $int_ty) << shift; . } . shift += 7; . } . } . }; -- line 80 ---------------------------------------- 195,571 ( 0.19%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/raw_vec.rs -------------------------------------------------------------------------------- Ir -- line 124 ---------------------------------------- . Self { ptr: Unique::dangling(), cap: 0, alloc } . } . . /// Like `with_capacity`, but parameterized over the choice of . /// allocator for the returned `RawVec`. . #[cfg(not(no_global_oom_handling))] . #[inline] . pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 38 ( 0.00%) Self::allocate_in(capacity, AllocInit::Uninitialized, alloc) . } . . /// Like `with_capacity_zeroed`, but parameterized over the choice . /// of allocator for the returned `RawVec`. . #[cfg(not(no_global_oom_handling))] . #[inline] . pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { . Self::allocate_in(capacity, AllocInit::Zeroed, alloc) -- line 140 ---------------------------------------- -- line 147 ---------------------------------------- . /// . /// # Safety . /// . /// * `len` must be greater than or equal to the most recently requested capacity, and . /// * `len` must be less than or equal to `self.capacity()`. . /// . /// Note, that the requested capacity and `self.capacity()` could differ, as . /// an allocator could overallocate and return a greater memory block than requested. 15 ( 0.00%) pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { . // Sanity-check one half of the safety requirement (we cannot check the other half). . debug_assert!( . len <= self.capacity(), . "`len` must be smaller than or equal to `self.capacity()`" . ); . . let me = ManuallyDrop::new(self); . unsafe { . let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); . Box::from_raw_in(slice, ptr::read(&me.alloc)) . } 15 ( 0.00%) } . . #[cfg(not(no_global_oom_handling))] 76 ( 0.00%) fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self { . if mem::size_of::() == 0 { . Self::new_in(alloc) . } else { . // We avoid `unwrap_or_else` here because it bloats the amount of . // LLVM IR generated. . let layout = match Layout::array::(capacity) { . Ok(layout) => layout, . Err(_) => capacity_overflow(), . }; . match alloc_guard(layout.size()) { . Ok(_) => {} . Err(_) => capacity_overflow(), . } 38 ( 0.00%) let result = match init { . AllocInit::Uninitialized => alloc.allocate(layout), . AllocInit::Zeroed => alloc.allocate_zeroed(layout), . }; . let ptr = match result { . Ok(ptr) => ptr, . Err(_) => handle_alloc_error(layout), . }; . -- line 192 ---------------------------------------- -- line 194 ---------------------------------------- . // matches the size requested. If that ever changes, the capacity . // here should change to `ptr.len() / mem::size_of::()`. . Self { . ptr: unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }, . cap: capacity, . alloc, . } . } 95 ( 0.00%) } . . /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator. . /// . /// # Safety . /// . /// The `ptr` must be allocated (via the given allocator `alloc`), and with the given . /// `capacity`. . /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit -- line 210 ---------------------------------------- -- line 233 ---------------------------------------- . } . . /// Returns a shared reference to the allocator backing this `RawVec`. . pub fn allocator(&self) -> &A { . &self.alloc . } . . fn current_memory(&self) -> Option<(NonNull, Layout)> { 134,849 ( 0.13%) if mem::size_of::() == 0 || self.cap == 0 { 9,278 ( 0.01%) None . } else { . // We have an allocated chunk of memory, so we can bypass runtime . // checks to get our current layout. . unsafe { . let align = mem::align_of::(); 42,758 ( 0.04%) let size = mem::size_of::() * self.cap; . let layout = Layout::from_size_align_unchecked(size, align); 8,788 ( 0.01%) Some((self.ptr.cast().into(), layout)) . } . } . } . . /// Ensures that the buffer contains at least enough space to hold `len + . /// additional` elements. If it doesn't already have enough capacity, will . /// reallocate enough space plus comfortable slack space to get amortized . /// *O*(1) behavior. Will limit this behavior if it would needlessly cause -- line 258 ---------------------------------------- -- line 274 ---------------------------------------- . #[cfg(not(no_global_oom_handling))] . #[inline] . pub fn reserve(&mut self, len: usize, additional: usize) { . // Callers expect this function to be very cheap when there is already sufficient capacity. . // Therefore, we move all the resizing and error-handling logic from grow_amortized and . // handle_reserve behind a call, while making sure that this function is likely to be . // inlined as just a comparison and a call if the comparison fails. . #[cold] 6,378 ( 0.01%) fn do_reserve_and_handle( . slf: &mut RawVec, . len: usize, . additional: usize, . ) { 189 ( 0.00%) handle_reserve(slf.grow_amortized(len, additional)); 7,901 ( 0.01%) } . 9,491 ( 0.01%) if self.needs_to_grow(len, additional) { 6,276 ( 0.01%) do_reserve_and_handle(self, len, additional); . } . } . . /// A specialized version of `reserve()` used only by the hot and . /// oft-instantiated `Vec::push()`, which does its own capacity check. . #[cfg(not(no_global_oom_handling))] . #[inline(never)] 37,980 ( 0.04%) pub fn reserve_for_push(&mut self, len: usize) { . handle_reserve(self.grow_amortized(len, 1)); 47,218 ( 0.05%) } . . /// The same as `reserve`, but returns on errors instead of panicking or aborting. . pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { . if self.needs_to_grow(len, additional) { . self.grow_amortized(len, additional) . } else { . Ok(()) . } -- line 309 ---------------------------------------- -- line 322 ---------------------------------------- . /// # Panics . /// . /// Panics if the new capacity exceeds `isize::MAX` bytes. . /// . /// # Aborts . /// . /// Aborts on OOM. . #[cfg(not(no_global_oom_handling))] 1,500 ( 0.00%) pub fn reserve_exact(&mut self, len: usize, additional: usize) { . handle_reserve(self.try_reserve_exact(len, additional)); 1,500 ( 0.00%) } . . /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting. . pub fn try_reserve_exact( . &mut self, . len: usize, . additional: usize, . ) -> Result<(), TryReserveError> { 764 ( 0.00%) if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) } . } . . /// Shrinks the buffer down to the specified capacity. If the given amount . /// is 0, actually completely deallocates. . /// . /// # Panics . /// . /// Panics if the given amount is *larger* than the current capacity. . /// . /// # Aborts . /// . /// Aborts on OOM. . #[cfg(not(no_global_oom_handling))] 185 ( 0.00%) pub fn shrink_to_fit(&mut self, cap: usize) { . handle_reserve(self.shrink(cap)); 222 ( 0.00%) } . } . . impl RawVec { . /// Returns if the buffer needs to grow to fulfill the needed extra capacity. . /// Mainly used to make inlining reserve-calls possible without inlining `grow`. . fn needs_to_grow(&self, len: usize, additional: usize) -> bool { 19,378 ( 0.02%) additional > self.capacity().wrapping_sub(len) . } . . fn set_ptr_and_cap(&mut self, ptr: NonNull<[u8]>, cap: usize) { . // Allocators currently return a `NonNull<[u8]>` whose length matches . // the size requested. If that ever changes, the capacity here should . // change to `ptr.len() / mem::size_of::()`. 11,538 ( 0.01%) self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) }; 11,577 ( 0.01%) self.cap = cap; . } . . // This method is usually instantiated many times. So we want it to be as . // small as possible, to improve compile times. But we also want as much of . // its contents to be statically computable as possible, to make the . // generated code run faster. Therefore, this method is carefully written . // so that all of the code that depends on `T` is within it, while as much . // of the code that doesn't depend on `T` as possible is in functions that . // are non-generic over `T`. 234 ( 0.00%) fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { . // This is ensured by the calling contexts. . debug_assert!(additional > 0); . . if mem::size_of::() == 0 { . // Since we return a capacity of `usize::MAX` when `elem_size` is . // 0, getting to here necessarily means the `RawVec` is overfull. . return Err(CapacityOverflow.into()); . } . . // Nothing we can really do about these checks, sadly. . let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?; . . // This guarantees exponential growth. The doubling cannot overflow . // because `cap <= isize::MAX` and the type of `cap` is `usize`. 44,896 ( 0.04%) let cap = cmp::max(self.cap * 2, required_cap); . let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap); . . let new_layout = Layout::array::(cap); . . // `finish_grow` is non-generic over `T`. 33,962 ( 0.03%) let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; . self.set_ptr_and_cap(ptr, cap); . Ok(()) 234 ( 0.00%) } . . // The constraints on this method are much the same as those on . // `grow_amortized`, but this method is usually instantiated less often so . // it's less critical. . fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> { . if mem::size_of::() == 0 { . // Since we return a capacity of `usize::MAX` when the type size is . // 0, getting to here necessarily means the `RawVec` is overfull. . return Err(CapacityOverflow.into()); . } . . let cap = len.checked_add(additional).ok_or(CapacityOverflow)?; . let new_layout = Layout::array::(cap); . . // `finish_grow` is non-generic over `T`. 820 ( 0.00%) let ptr = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?; . self.set_ptr_and_cap(ptr, cap); . Ok(()) . } . . fn shrink(&mut self, cap: usize) -> Result<(), TryReserveError> { 111 ( 0.00%) assert!(cap <= self.capacity(), "Tried to shrink to a larger capacity"); . . let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) }; 111 ( 0.00%) let new_size = cap * mem::size_of::(); . . let ptr = unsafe { . let new_layout = Layout::from_size_align_unchecked(new_size, layout.align()); . self.alloc . .shrink(ptr, layout, new_layout) . .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })? . }; . self.set_ptr_and_cap(ptr, cap); -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . } . } . . // This function is outside `RawVec` to minimize compile times. See the comment . // above `RawVec::grow_amortized` for details. (The `A` parameter isn't . // significant, because the number of different `A` types seen in practice is . // much smaller than the number of `T` types.) . #[inline(never)] 57,485 ( 0.06%) fn finish_grow( . new_layout: Result, . current_memory: Option<(NonNull, Layout)>, . alloc: &mut A, . ) -> Result, TryReserveError> . where . A: Allocator, . { . // Check for the error here to minimize the size of `RawVec::grow_*`. . let new_layout = new_layout.map_err(|_| CapacityOverflow)?; . . alloc_guard(new_layout.size())?; . 36,666 ( 0.04%) let memory = if let Some((ptr, old_layout)) = current_memory { . debug_assert_eq!(old_layout.align(), new_layout.align()); . unsafe { . // The allocator checks for alignment equality . intrinsics::assume(old_layout.align() == new_layout.align()); . alloc.grow(ptr, old_layout, new_layout) . } . } else { . alloc.allocate(new_layout) . }; . . memory.map_err(|_| AllocError { layout: new_layout, non_exhaustive: () }.into()) 45,988 ( 0.05%) } . . unsafe impl<#[may_dangle] T, A: Allocator> Drop for RawVec { . /// Frees the memory owned by the `RawVec` *without* trying to drop its contents. 184 ( 0.00%) fn drop(&mut self) { 46,548 ( 0.05%) if let Some((ptr, layout)) = self.current_memory() { . unsafe { self.alloc.deallocate(ptr, layout) } . } 188 ( 0.00%) } . } . . // Central function for reserve error handling. . #[cfg(not(no_global_oom_handling))] . #[inline] . fn handle_reserve(result: Result<(), TryReserveError>) { . match result.map_err(|e| e.kind()) { . Err(CapacityOverflow) => capacity_overflow(), -- line 489 ---------------------------------------- 191,439 ( 0.19%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/range.rs -------------------------------------------------------------------------------- Ir -- line 198 ---------------------------------------- . } . . #[inline] . #[allow(arithmetic_overflow)] . #[rustc_inherit_overflow_checks] . fn forward(start: Self, n: usize) -> Self { . // In debug builds, trigger a panic on overflow. . // This should optimize completely out in release builds. 3,465 ( 0.00%) if Self::forward_checked(start, n).is_none() { . let _ = Self::MAX + 1; . } . // Do wrapping math to allow e.g. `Step::forward(-128i8, 255)`. . start.wrapping_add(n as Self) . } . . #[inline] . #[allow(arithmetic_overflow)] -- line 214 ---------------------------------------- -- line 613 ---------------------------------------- . . if taken < n { Err(taken) } else { Ok(()) } . } . } . . impl RangeIteratorImpl for ops::Range { . #[inline] . fn spec_next(&mut self) -> Option { 543,184 ( 0.54%) if self.start < self.end { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . Some(mem::replace(&mut self.start, n)) . } else { . None . } . } . -- line 629 ---------------------------------------- -- line 657 ---------------------------------------- . // Otherwise 0 is returned which always safe to use. . self.start = unsafe { Step::forward_unchecked(self.start.clone(), taken) }; . . if taken < n { Err(taken) } else { Ok(()) } . } . . #[inline] . fn spec_next_back(&mut self) -> Option { 7,020 ( 0.01%) if self.start < self.end { . // SAFETY: just checked precondition . self.end = unsafe { Step::backward_unchecked(self.end.clone(), 1) }; . Some(self.end.clone()) . } else { . None . } . } . -- line 673 ---------------------------------------- -- line 708 ---------------------------------------- . . #[inline] . fn next(&mut self) -> Option { . self.spec_next() . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 8,850 ( 0.01%) if self.start < self.end { . let hint = Step::steps_between(&self.start, &self.end); . (hint.unwrap_or(usize::MAX), hint) . } else { . (0, Some(0)) . } . } . . #[inline] -- line 724 ---------------------------------------- -- line 857 ---------------------------------------- . impl FusedIterator for ops::Range {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl Iterator for ops::RangeFrom { . type Item = A; . . #[inline] . fn next(&mut self) -> Option { 552 ( 0.00%) let n = Step::forward(self.start.clone(), 1); . Some(mem::replace(&mut self.start, n)) . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (usize::MAX, None) . } . -- line 873 ---------------------------------------- -- line 998 ---------------------------------------- . . try { accum } . } . } . . impl RangeInclusiveIteratorImpl for ops::RangeInclusive { . #[inline] . fn spec_next(&mut self) -> Option { 137 ( 0.00%) if self.is_empty() { . return None; . } . let is_iterating = self.start < self.end; 86 ( 0.00%) Some(if is_iterating { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . mem::replace(&mut self.start, n) . } else { . self.exhausted = true; . self.start.clone() . }) . } -- line 1018 ---------------------------------------- -- line 1025 ---------------------------------------- . R: Try, . { . if self.is_empty() { . return try { init }; . } . . let mut accum = init; . 568 ( 0.00%) while self.start < self.end { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . let n = mem::replace(&mut self.start, n); . accum = f(accum, n)?; . } . . self.exhausted = true; . 22 ( 0.00%) if self.start == self.end { . accum = f(accum, self.start.clone())?; . } . . try { accum } . } . . #[inline] . fn spec_next_back(&mut self) -> Option { -- line 1050 ---------------------------------------- -- line 1098 ---------------------------------------- . . #[inline] . fn next(&mut self) -> Option { . self.spec_next() . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 456 ( 0.00%) if self.is_empty() { . return (0, Some(0)); . } . . match Step::steps_between(&self.start, &self.end) { . Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), . None => (usize::MAX, None), . } . } -- line 1114 ---------------------------------------- 73,534 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs -------------------------------------------------------------------------------- Ir -- line 180 ---------------------------------------- . /// assert_eq!(v, &[0.into()]); . /// . /// // Ensure that the last item was dropped. . /// assert!(weak.upgrade().is_none()); . /// ``` . #[stable(feature = "drop_in_place", since = "1.8.0")] . #[lang = "drop_in_place"] . #[allow(unconditional_recursion)] 560,410 ( 0.55%) pub unsafe fn drop_in_place(to_drop: *mut T) { . // Code here does not matter - this is replaced by the . // real drop glue by the compiler. . . // SAFETY: see comment above . unsafe { drop_in_place(to_drop) } . } . . /// Creates a null raw pointer. -- line 196 ---------------------------------------- -- line 366 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . . // Perform the swap . // SAFETY: the caller must guarantee that `x` and `y` are . // valid for writes and properly aligned. `tmp` cannot be . // overlapping either `x` or `y` because `tmp` was just allocated . // on the stack as a separate allocated object. . unsafe { 6 ( 0.00%) copy_nonoverlapping(x, tmp.as_mut_ptr(), 1); 6 ( 0.00%) copy(y, x, 1); // `x` and `y` may overlap . copy_nonoverlapping(tmp.as_ptr(), y, 1); . } . } . . /// Swaps `count * size_of::()` bytes between the two regions of memory . /// beginning at `x` and `y`. The two regions must *not* overlap. . /// . /// # Safety -- line 383 ---------------------------------------- -- line 448 ---------------------------------------- . return; . } . } . . // Direct swapping, for the cases not going through the block optimization. . // SAFETY: the caller must guarantee that `x` and `y` are valid . // for writes, properly aligned, and non-overlapping. . unsafe { 8 ( 0.00%) let z = read(x); . copy_nonoverlapping(y, x, 1); . write(y, z); . } . } . . #[inline] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . const unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) { -- line 464 ---------------------------------------- -- line 693 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . // SAFETY: the caller must guarantee that `src` is valid for reads. . // `src` cannot overlap `tmp` because `tmp` was just allocated on . // the stack as a separate allocated object. . // . // Also, since we just wrote a valid value into `tmp`, it is guaranteed . // to be properly initialized. . unsafe { 12,691 ( 0.01%) copy_nonoverlapping(src, tmp.as_mut_ptr(), 1); 87,672 ( 0.09%) tmp.assume_init() . } . } . . /// Reads the value from `src` without moving it. This leaves the . /// memory in `src` unchanged. . /// . /// Unlike [`read`], `read_unaligned` works with unaligned pointers. . /// -- line 710 ---------------------------------------- -- line 884 ---------------------------------------- . #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] . fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); . } . . // SAFETY: the caller must guarantee that `dst` is valid for writes. . // `dst` cannot overlap `src` because the caller has mutable access . // to `dst` while `src` is owned by this function. . unsafe { 1,266,280 ( 1.25%) copy_nonoverlapping(&src as *const T, dst, 1); . intrinsics::forget(src); . } . } . . /// Overwrites a memory location with the given value without reading or . /// dropping the old value. . /// . /// Unlike [`write()`], the pointer may be unaligned. -- line 900 ---------------------------------------- -- line 1206 ---------------------------------------- . if stride == 1 { . // `stride == 1` case can be computed more simply through `-p (mod a)`, but doing so . // inhibits LLVM's ability to select instructions like `lea`. Instead we compute . // . // round_up_to_next_alignment(p, a) - p . // . // which distributes operations around the load-bearing, but pessimizing `and` sufficiently . // for LLVM to be able to utilize the various optimizations it knows about. 50 ( 0.00%) return wrapping_sub( 348 ( 0.00%) wrapping_add(p as usize, a_minus_one) & wrapping_sub(0, a), . p as usize, . ); . } . . let pmoda = p as usize & a_minus_one; . if pmoda == 0 { . // Already aligned. Yay! . return 0; -- line 1223 ---------------------------------------- -- line 1348 ---------------------------------------- . /// assert!(std::ptr::eq( . /// &wrapper as &dyn Trait as *const dyn Trait as *const u8, . /// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, . /// )); . /// ``` . #[stable(feature = "ptr_eq", since = "1.17.0")] . #[inline] . pub fn eq(a: *const T, b: *const T) -> bool { 24,670 ( 0.02%) a == b . } . . /// Hash a raw pointer. . /// . /// This can be used to hash a `&T` reference (which coerces to `*const T` implicitly) . /// by its address rather than the value it points to . /// (which is what the `Hash for &T` implementation does). . /// -- line 1364 ---------------------------------------- 78,874 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs -------------------------------------------------------------------------------- Ir -- line 70 ---------------------------------------- . . impl<'a, T> $name<'a, T> { . // Helper function for creating a slice from the iterator. . #[inline(always)] . fn make_slice(&self) -> &'a [T] { . // SAFETY: the iterator was created from a slice with pointer . // `self.ptr` and length `len!(self)`. This guarantees that all . // the prerequisites for `from_raw_parts` are fulfilled. 1,271 ( 0.00%) unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } . } . . // Helper function for moving the start of the iterator forwards by `offset` elements, . // returning the old start. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] . unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . let old = self.ptr.as_ptr(); . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // so this new pointer is inside `self` and thus guaranteed to be non-null. 10,264 ( 0.01%) self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }; . old . } . } . . // Helper function for moving the end of the iterator backwards by `offset` elements, . // returning the new end. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] -- line 101 ---------------------------------------- -- line 102 ---------------------------------------- . unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // which is guaranteed to not overflow an `isize`. Also, the resulting pointer . // is in bounds of `slice`, which fulfills the other requirements for `offset`. 19 ( 0.00%) self.end = unsafe { self.end.offset(-offset) }; . self.end . } . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ExactSizeIterator for $name<'_, T> { . #[inline(always)] . fn len(&self) -> usize { 4,972 ( 0.00%) len!(self) . } . . #[inline(always)] . fn is_empty(&self) -> bool { . is_empty!(self) . } . } . -- line 128 ---------------------------------------- -- line 134 ---------------------------------------- . fn next(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer . // must be non-null, and slices over non-ZSTs must also have a . // non-null end pointer. The call to `next_unchecked!` is safe . // since we check if the iterator is empty first. . unsafe { 4,080 ( 0.00%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 1,451 ( 0.00%) assume(!self.end.is_null()); . } 2,854,353 ( 2.83%) if is_empty!(self) { . None . } else { . Some(next_unchecked!(self)) . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 20,563 ( 0.02%) let exact = len!(self); . (exact, Some(exact)) . } . . #[inline] . fn count(self) -> usize { . len!(self) . } . . #[inline] . fn nth(&mut self, n: usize) -> Option<$elem> { 771 ( 0.00%) if n >= len!(self) { . // This iterator is now empty. . if mem::size_of::() == 0 { . // We have to do it this way as `ptr` may never be 0, but `end` . // could be (due to wrapping). . self.end = self.ptr.as_ptr(); . } else { . // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr . unsafe { -- line 175 ---------------------------------------- -- line 203 ---------------------------------------- . // faster to compile. . #[inline] . fn for_each(mut self, mut f: F) . where . Self: Sized, . F: FnMut(Self::Item), . { . while let Some(x) = self.next() { 92 ( 0.00%) f(x); . } . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn all(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { . while let Some(x) = self.next() { 2,452 ( 0.00%) if !f(x) { . return false; . } . } . true . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn any(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 8,420 ( 0.01%) while let Some(x) = self.next() { 13,849 ( 0.01%) if f(x) { . return true; . } . } . false . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find

(&mut self, mut predicate: P) -> Option . where . Self: Sized, . P: FnMut(&Self::Item) -> bool, . { 430 ( 0.00%) while let Some(x) = self.next() { 4,210 ( 0.00%) if predicate(&x) { . return Some(x); . } . } . None 304 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find_map(&mut self, mut f: F) -> Option . where . Self: Sized, . F: FnMut(Self::Item) -> Option, . { 1,215 ( 0.00%) while let Some(x) = self.next() { 3,190 ( 0.00%) if let Some(y) = f(x) { 547 ( 0.00%) return Some(y); . } . } . None 165 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . #[rustc_inherit_overflow_checks] . fn position

(&mut self, mut predicate: P) -> Option where . Self: Sized, . P: FnMut(Self::Item) -> bool, . { . let n = len!(self); . let mut i = 0; . while let Some(x) = self.next() { 8,065 ( 0.01%) if predicate(x) { . // SAFETY: we are guaranteed to be in bounds by the loop invariant: . // when `i >= n`, `self.next()` returns `None` and the loop breaks. . unsafe { assume(i < n) }; . return Some(i); . } . i += 1; . } . None -- line 303 ---------------------------------------- -- line 308 ---------------------------------------- . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . fn rposition

(&mut self, mut predicate: P) -> Option where . P: FnMut(Self::Item) -> bool, . Self: Sized + ExactSizeIterator + DoubleEndedIterator . { . let n = len!(self); . let mut i = n; 375 ( 0.00%) while let Some(x) = self.next_back() { 5,662 ( 0.01%) i -= 1; 5,152 ( 0.01%) if predicate(x) { . // SAFETY: `i` must be lower than `n` since it starts at `n` . // and is only decreasing. . unsafe { assume(i < n) }; . return Some(i); . } . } . None . } -- line 326 ---------------------------------------- -- line 332 ---------------------------------------- . // the returned references is guaranteed to refer to an element . // of the slice and thus guaranteed to be valid. . // . // Also note that the caller also guarantees that we're never . // called with the same index again, and that no other methods . // that will access this subslice are called, so it is valid . // for the returned reference to be mutable in the case of . // `IterMut` 2,036 ( 0.00%) unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } . } . . $($extra)* . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T> DoubleEndedIterator for $name<'a, T> { . #[inline] -- line 348 ---------------------------------------- -- line 351 ---------------------------------------- . . // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, . // and slices over non-ZSTs must also have a non-null end pointer. . // The call to `next_back_unchecked!` is safe since we check if the iterator is . // empty first. . unsafe { . assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 20 ( 0.00%) assume(!self.end.is_null()); . } 16,851 ( 0.02%) if is_empty!(self) { . None . } else { . Some(next_back_unchecked!(self)) . } . } . } . . #[inline] -- line 369 ---------------------------------------- 146,438 ( 0.14%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/trait_def.rs -------------------------------------------------------------------------------- Ir -- line 47 ---------------------------------------- . . /// List of functions from `#[rustc_must_implement_one_of]` attribute one of which . /// must be implemented. . pub must_implement_one_of: Option>, . } . . /// Whether this trait is treated specially by the standard library . /// specialization lint. 24 ( 0.00%) #[derive(HashStable, PartialEq, Clone, Copy, TyEncodable, TyDecodable)] . pub enum TraitSpecializationKind { . /// The default. Specializing on this trait is not allowed. . None, . /// Specializing on this trait is allowed because it doesn't have any . /// methods. For example `Sized` or `FusedIterator`. . /// Applies to traits with the `rustc_unsafe_specialization_marker` . /// attribute. . Marker, . /// Specializing on this trait is allowed because all of the impls of this . /// trait are "always applicable". Always applicable means that if . /// `X<'x>: T<'y>` for any lifetimes, then `for<'a, 'b> X<'a>: T<'b>`. . /// Applies to traits with the `rustc_specialization_trait` attribute. . AlwaysApplicable, . } . 200 ( 0.00%) #[derive(Default, Debug, HashStable)] . pub struct TraitImpls { . blanket_impls: Vec, . /// Impls indexed by their simplified self type, for fast lookup. . non_blanket_impls: FxIndexMap>, . } . . impl TraitImpls { . pub fn blanket_impls(&self) -> &[DefId] { . self.blanket_impls.as_slice() . } . } . . impl<'tcx> TraitDef { 84 ( 0.00%) pub fn new( . def_id: DefId, . unsafety: hir::Unsafety, . paren_sugar: bool, . has_auto_impl: bool, . is_marker: bool, . skip_array_during_method_dispatch: bool, . specialization_kind: TraitSpecializationKind, . def_path_hash: DefPathHash, . must_implement_one_of: Option>, . ) -> TraitDef { 140 ( 0.00%) TraitDef { . def_id, . unsafety, . paren_sugar, . has_auto_impl, . is_marker, . skip_array_during_method_dispatch, . specialization_kind, . def_path_hash, . must_implement_one_of, . } 14 ( 0.00%) } . 148 ( 0.00%) pub fn ancestors( . &self, . tcx: TyCtxt<'tcx>, . of_impl: DefId, . ) -> Result, ErrorReported> { 185 ( 0.00%) specialization_graph::ancestors(tcx, self.def_id, of_impl) 111 ( 0.00%) } . } . . impl<'tcx> TyCtxt<'tcx> { . pub fn for_each_impl(self, def_id: DefId, mut f: F) { . let impls = self.trait_impls_of(def_id); . . for &impl_def_id in impls.blanket_impls.iter() { . f(impl_def_id); -- line 123 ---------------------------------------- -- line 127 ---------------------------------------- . for &impl_def_id in v { . f(impl_def_id); . } . } . } . . /// Iterate over every impl that could possibly match the . /// self type `self_ty`. 1,088 ( 0.00%) pub fn for_each_relevant_impl( . self, . def_id: DefId, . self_ty: Ty<'tcx>, . mut f: F, . ) { . let _: Option<()> = self.find_map_relevant_impl(def_id, self_ty, |did| { . f(did); . None . }); 1,088 ( 0.00%) } . . /// Applies function to every impl that could possibly match the self type `self_ty` and returns . /// the first non-none value. . pub fn find_map_relevant_impl Option>( . self, . def_id: DefId, . self_ty: Ty<'tcx>, . mut f: F, -- line 153 ---------------------------------------- -- line 154 ---------------------------------------- . ) -> Option { . // FIXME: This depends on the set of all impls for the trait. That is . // unfortunate wrt. incremental compilation. . // . // If we want to be faster, we could have separate queries for . // blanket and non-blanket impls, and compare them separately. . let impls = self.trait_impls_of(def_id); . 42 ( 0.00%) for &impl_def_id in impls.blanket_impls.iter() { . if let result @ Some(_) = f(impl_def_id) { . return result; . } . } . . // Note that we're using `SimplifyParams::Yes` to query `non_blanket_impls` while using . // `SimplifyParams::No` while actually adding them. . // . // This way, when searching for some impl for `T: Trait`, we do not look at any impls . // whose outer level is not a parameter or projection. Especially for things like . // `T: Clone` this is incredibly useful as we would otherwise look at all the impls . // of `Clone` for `Option`, `Vec`, `ConcreteType` and so on. 1,676 ( 0.00%) if let Some(simp) = 1,114 ( 0.00%) fast_reject::simplify_type(self, self_ty, SimplifyParams::Yes, StripReferences::No) . { 154 ( 0.00%) if let Some(impls) = impls.non_blanket_impls.get(&simp) { 170 ( 0.00%) for &impl_def_id in impls { 14 ( 0.00%) if let result @ Some(_) = f(impl_def_id) { . return result; . } . } . } . } else { . for &impl_def_id in impls.non_blanket_impls.values().flatten() { . if let result @ Some(_) = f(impl_def_id) { . return result; . } . } . } . . None 126 ( 0.00%) } . . /// Returns an iterator containing all impls 49 ( 0.00%) pub fn all_impls(self, def_id: DefId) -> impl Iterator + 'tcx { . let TraitImpls { blanket_impls, non_blanket_impls } = self.trait_impls_of(def_id); . . blanket_impls.iter().chain(non_blanket_impls.iter().map(|(_, v)| v).flatten()).cloned() 63 ( 0.00%) } . } . . // Query provider for `trait_impls_of`. 220 ( 0.00%) pub(super) fn trait_impls_of_provider(tcx: TyCtxt<'_>, trait_id: DefId) -> TraitImpls { . let mut impls = TraitImpls::default(); . . // Traits defined in the current crate can't have impls in upstream . // crates, so we don't bother querying the cstore. 20 ( 0.00%) if !trait_id.is_local() { 684 ( 0.00%) for &cnum in tcx.crates(()).iter() { 58,480 ( 0.06%) for &(impl_def_id, simplified_self_ty) in 342 ( 0.00%) tcx.implementations_of_trait((cnum, trait_id)).iter() . { 4,874 ( 0.00%) if let Some(simplified_self_ty) = simplified_self_ty { 34,111 ( 0.03%) impls . .non_blanket_impls . .entry(simplified_self_ty) . .or_default() . .push(impl_def_id); . } else { . impls.blanket_impls.push(impl_def_id); . } . } . } . } . 126 ( 0.00%) for &impl_def_id in tcx.hir().trait_impls(trait_id) { . let impl_def_id = impl_def_id.to_def_id(); . . let impl_self_ty = tcx.type_of(impl_def_id); 6 ( 0.00%) if impl_self_ty.references_error() { . continue; . } . 12 ( 0.00%) if let Some(simplified_self_ty) = 24 ( 0.00%) fast_reject::simplify_type(tcx, impl_self_ty, SimplifyParams::No, StripReferences::No) . { 12 ( 0.00%) impls.non_blanket_impls.entry(simplified_self_ty).or_default().push(impl_def_id); . } else { . impls.blanket_impls.push(impl_def_id); . } . } . . impls 160 ( 0.00%) } 9,832 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs -------------------------------------------------------------------------------- Ir -- line 417 ---------------------------------------- . /// # #![allow(unused_mut)] . /// let mut vec: Vec = Vec::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> Self { 91,514 ( 0.09%) Vec { buf: RawVec::NEW, len: 0 } 43 ( 0.00%) } . . /// Constructs a new, empty `Vec` with the specified capacity. . /// . /// The vector will be able to hold exactly `capacity` elements without . /// reallocating. If `capacity` is 0, the vector will not allocate. . /// . /// It is important to note that although the returned vector has the . /// *capacity* specified, the vector will have a zero *length*. For an -- line 434 ---------------------------------------- -- line 601 ---------------------------------------- . /// vec.push(11); . /// assert_eq!(vec.len(), 11); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 34,406 ( 0.03%) Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } . } . . /// Creates a `Vec` directly from the raw components of another vector. . /// . /// # Safety . /// . /// This is highly unsafe, due to the number of invariants that aren't . /// checked: -- line 617 ---------------------------------------- -- line 677 ---------------------------------------- . /// // Put everything back together into a Vec . /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); . /// assert_eq!(rebuilt, [4, 5, 6]); . /// } . /// ``` . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { 1,539 ( 0.00%) unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } . } . . /// Decomposes a `Vec` into its raw components. . /// . /// Returns the raw pointer to the underlying data, the length of . /// the vector (in elements), and the allocated capacity of the . /// data (in elements). These are the same arguments in the same . /// order as the arguments to [`from_raw_parts`]. -- line 693 ---------------------------------------- -- line 778 ---------------------------------------- . /// . /// ``` . /// let vec: Vec = Vec::with_capacity(10); . /// assert_eq!(vec.capacity(), 10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn capacity(&self) -> usize { 8,255 ( 0.01%) self.buf.capacity() . } . . /// Reserves capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 794 ---------------------------------------- -- line 801 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve(&mut self, additional: usize) { 6,766 ( 0.01%) self.buf.reserve(self.len, additional); . } . . /// Reserves the minimum capacity for exactly `additional` more elements to . /// be inserted in the given `Vec`. After calling `reserve_exact`, . /// capacity will be greater than or equal to `self.len() + additional`. . /// Does nothing if the capacity is already sufficient. . /// . /// Note that the allocator may give the collection more space than it -- line 817 ---------------------------------------- -- line 829 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve_exact(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve_exact(&mut self, additional: usize) { 625 ( 0.00%) self.buf.reserve_exact(self.len, additional); . } . . /// Tries to reserve capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `try_reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 845 ---------------------------------------- -- line 930 ---------------------------------------- . /// assert!(vec.capacity() >= 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn shrink_to_fit(&mut self) { . // The capacity is never less than the length, and there's nothing to do when . // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` . // by only calling it with a greater capacity. 726 ( 0.00%) if self.capacity() > self.len { 129 ( 0.00%) self.buf.shrink_to_fit(self.len); . } . } . . /// Shrinks the capacity of the vector with a lower bound. . /// . /// The capacity will remain at least as large as both the length . /// and the supplied value. . /// -- line 947 ---------------------------------------- -- line 990 ---------------------------------------- . /// let slice = vec.into_boxed_slice(); . /// assert_eq!(slice.into_vec().capacity(), 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn into_boxed_slice(mut self) -> Box<[T], A> { . unsafe { . self.shrink_to_fit(); 188 ( 0.00%) let me = ManuallyDrop::new(self); . let buf = ptr::read(&me.buf); . let len = me.len(); . buf.into_box(len).assume_init() . } . } . . /// Shortens the vector, keeping the first `len` elements and dropping . /// the rest. -- line 1006 ---------------------------------------- -- line 1040 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.truncate(0); . /// assert_eq!(vec, []); . /// ``` . /// . /// [`clear`]: Vec::clear . /// [`drain`]: Vec::drain . #[stable(feature = "rust1", since = "1.0.0")] 3,587 ( 0.00%) pub fn truncate(&mut self, len: usize) { . // This is safe because: . // . // * the slice passed to `drop_in_place` is valid; the `len > self.len` . // case avoids creating an invalid slice, and . // * the `len` of the vector is shrunk before calling `drop_in_place`, . // such that no value will be dropped twice in case `drop_in_place` . // were to panic once (if it panics twice, the program aborts). . unsafe { . // Note: It's intentional that this is `>` and not `>=`. . // Changing it to `>=` has negative performance . // implications in some cases. See #78884 for more. 3,743 ( 0.00%) if len > self.len { . return; . } . let remaining_len = self.len - len; . let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); 4,453 ( 0.00%) self.len = len; 222 ( 0.00%) ptr::drop_in_place(s); . } 4,304 ( 0.00%) } . . /// Extracts a slice containing the entire vector. . /// . /// Equivalent to `&s[..]`. . /// . /// # Examples . /// . /// ``` -- line 1076 ---------------------------------------- -- line 1126 ---------------------------------------- . /// ``` . /// . /// [`as_mut_ptr`]: Vec::as_mut_ptr . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_ptr(&self) -> *const T { . // We shadow the slice method of the same name to avoid going through . // `deref`, which creates an intermediate reference. 93,707 ( 0.09%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns an unsafe mutable pointer to the vector's buffer. . /// -- line 1142 ---------------------------------------- -- line 1162 ---------------------------------------- . /// } . /// assert_eq!(&*x, &[0, 1, 2, 3]); . /// ``` . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_mut_ptr(&mut self) -> *mut T { . // We shadow the slice method of the same name to avoid going through . // `deref_mut`, which creates an intermediate reference. 427,889 ( 0.42%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns a reference to the underlying allocator. . #[unstable(feature = "allocator_api", issue = "32838")] -- line 1178 ---------------------------------------- -- line 1259 ---------------------------------------- . /// . /// Normally, here, one would use [`clear`] instead to correctly drop . /// the contents and thus not leak memory. . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn set_len(&mut self, new_len: usize) { . debug_assert!(new_len <= self.capacity()); . 13,205 ( 0.01%) self.len = new_len; 76 ( 0.00%) } . . /// Removes an element from the vector and returns it. . /// . /// The removed element is replaced by the last element of the vector. . /// . /// This does not preserve ordering, but is *O*(1). . /// If you need to preserve the element order, use [`remove`] instead. . /// -- line 1276 ---------------------------------------- -- line 1329 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.insert(1, 4); . /// assert_eq!(vec, [1, 4, 2, 3]); . /// vec.insert(4, 5); . /// assert_eq!(vec, [1, 4, 2, 3, 5]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 196 ( 0.00%) pub fn insert(&mut self, index: usize, element: T) { . #[cold] . #[inline(never)] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("insertion index (is {}) should be <= len (is {})", index, len); . } . 28 ( 0.00%) let len = self.len(); 56 ( 0.00%) if index > len { . assert_failed(index, len); . } . . // space for the new element 62 ( 0.00%) if len == self.buf.capacity() { . self.reserve(1); . } . . unsafe { . // infallible . // The spot to put the new value . { . let p = self.as_mut_ptr().add(index); . // Shift everything over to make space. (Duplicating the . // `index`th element into two consecutive places.) 93 ( 0.00%) ptr::copy(p, p.offset(1), len - index); . // Write it in, overwriting the first copy of the `index`th . // element. . ptr::write(p, element); . } 62 ( 0.00%) self.set_len(len + 1); . } 168 ( 0.00%) } . . /// Removes and returns the element at position `index` within the vector, . /// shifting all elements after it to the left. . /// . /// Note: Because this shifts over the remaining elements, it has a . /// worst-case performance of *O*(*n*). If you don't need the order of elements . /// to be preserved, use [`swap_remove`] instead. If you'd like to remove . /// elements from the beginning of the `Vec`, consider using -- line 1376 ---------------------------------------- -- line 1395 ---------------------------------------- . pub fn remove(&mut self, index: usize) -> T { . #[cold] . #[inline(never)] . #[track_caller] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("removal index (is {}) should be < len (is {})", index, len); . } . 41 ( 0.00%) let len = self.len(); 164 ( 0.00%) if index >= len { . assert_failed(index, len); . } . unsafe { . // infallible . let ret; . { . // the place we are taking from. . let ptr = self.as_mut_ptr().add(index); . // copy it out, unsafely having a copy of the value on . // the stack and in the vector at the same time. . ret = ptr::read(ptr); . . // Shift everything down to fill in that spot. 148 ( 0.00%) ptr::copy(ptr.offset(1), ptr, len - index - 1); . } 44 ( 0.00%) self.set_len(len - 1); . ret . } . } . . /// Retains only the elements specified by the predicate. . /// . /// In other words, remove all elements `e` such that `f(&e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the -- line 1428 ---------------------------------------- -- line 1442 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3, 4, 5]; . /// let keep = [false, true, true, false, true]; . /// let mut iter = keep.iter(); . /// vec.retain(|_| *iter.next().unwrap()); . /// assert_eq!(vec, [2, 3, 5]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 5,383 ( 0.01%) pub fn retain(&mut self, mut f: F) . where . F: FnMut(&T) -> bool, . { 448 ( 0.00%) self.retain_mut(|elem| f(elem)); 5,192 ( 0.01%) } . . /// Retains only the elements specified by the predicate, passing a mutable reference to it. . /// . /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the . /// original order, and preserves the order of the retained elements. . /// . /// # Examples -- line 1463 ---------------------------------------- -- line 1474 ---------------------------------------- . /// }); . /// assert_eq!(vec, [2, 3, 4]); . /// ``` . #[unstable(feature = "vec_retain_mut", issue = "90829")] . pub fn retain_mut(&mut self, mut f: F) . where . F: FnMut(&mut T) -> bool, . { 726 ( 0.00%) let original_len = self.len(); . // Avoid double drop if the drop guard is not executed, . // since we may make some holes during the process. . unsafe { self.set_len(0) }; . . // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] . // |<- processed len ->| ^- next to check . // |<- deleted cnt ->| . // |<- original_len ->| -- line 1490 ---------------------------------------- -- line 1499 ---------------------------------------- . v: &'a mut Vec, . processed_len: usize, . deleted_cnt: usize, . original_len: usize, . } . . impl Drop for BackshiftOnDrop<'_, T, A> { . fn drop(&mut self) { 500 ( 0.00%) if self.deleted_cnt > 0 { . // SAFETY: Trailing unchecked items must be valid since we never touch them. . unsafe { . ptr::copy( . self.v.as_ptr().add(self.processed_len), 286 ( 0.00%) self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), . self.original_len - self.processed_len, . ); . } . } . // SAFETY: After filling holes, all items are in contiguous memory. . unsafe { 2 ( 0.00%) self.v.set_len(self.original_len - self.deleted_cnt); . } . } . } . . let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; . . fn process_loop( . original_len: usize, . f: &mut F, . g: &mut BackshiftOnDrop<'_, T, A>, . ) where . F: FnMut(&mut T) -> bool, . { 7,786 ( 0.01%) while g.processed_len != original_len { . // SAFETY: Unchecked element must be valid. . let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; 2,087 ( 0.00%) if !f(cur) { . // Advance early to avoid double drop if `drop_in_place` panicked. 32 ( 0.00%) g.processed_len += 1; 12 ( 0.00%) g.deleted_cnt += 1; . // SAFETY: We never touch this element again after dropped. . unsafe { ptr::drop_in_place(cur) }; . // We already advanced the counter. . if DELETED { . continue; . } else { . break; . } . } . if DELETED { . // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. . // We use copy for move, and never touch this element again. . unsafe { 36 ( 0.00%) let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); . ptr::copy_nonoverlapping(cur, hole_slot, 1); . } . } 24 ( 0.00%) g.processed_len += 1; . } . } . . // Stage 1: Nothing was deleted. . process_loop::(original_len, &mut f, &mut g); . . // Stage 2: Some elements were deleted. . process_loop::(original_len, &mut f, &mut g); -- line 1565 ---------------------------------------- -- line 1606 ---------------------------------------- . /// ``` . /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; . /// . /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); . /// . /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); . /// ``` . #[stable(feature = "dedup_by", since = "1.16.0")] 7 ( 0.00%) pub fn dedup_by(&mut self, mut same_bucket: F) . where . F: FnMut(&mut T, &mut T) -> bool, . { 1 ( 0.00%) let len = self.len(); 70 ( 0.00%) if len <= 1 { . return; . } . . /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ . struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { . /* Offset of the element we want to check if it is duplicate */ . read: usize, . -- line 1627 ---------------------------------------- -- line 1670 ---------------------------------------- . let ptr = gap.vec.as_mut_ptr(); . . /* Drop items while going through Vec, it should be more efficient than . * doing slice partition_dedup + truncate */ . . /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr . * are always in-bounds and read_ptr never aliases prev_ptr */ . unsafe { 12 ( 0.00%) while gap.read < len { . let read_ptr = ptr.add(gap.read); . let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); . 16 ( 0.00%) if same_bucket(&mut *read_ptr, &mut *prev_ptr) { . // Increase `gap.read` now since the drop may panic. . gap.read += 1; . /* We have found duplicate, drop it in-place */ . ptr::drop_in_place(read_ptr); . } else { . let write_ptr = ptr.add(gap.write); . . /* Because `read_ptr` can be equal to `write_ptr`, we either . * have to use `copy` or conditional `copy_nonoverlapping`. . * Looks like the first option is faster. */ . ptr::copy(read_ptr, write_ptr, 1); . . /* We have filled that place, so go further */ 4 ( 0.00%) gap.write += 1; . gap.read += 1; . } . } . . /* Technically we could let `gap` clean up with its Drop, but . * when `same_bucket` is guaranteed to not panic, this bloats a little . * the codegen, so we just do it manually */ . gap.vec.set_len(gap.write); . mem::forget(gap); . } 8 ( 0.00%) } . . /// Appends an element to the back of a collection. . /// . /// # Panics . /// . /// Panics if the new capacity exceeds `isize::MAX` bytes. . /// . /// # Examples -- line 1715 ---------------------------------------- -- line 1717 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2]; . /// vec.push(3); . /// assert_eq!(vec, [1, 2, 3]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 10 ( 0.00%) pub fn push(&mut self, value: T) { . // This will panic or abort if we would allocate > isize::MAX bytes . // or if the length increment would overflow for zero-sized types. 1,197,785 ( 1.19%) if self.len == self.buf.capacity() { 24,983 ( 0.02%) self.buf.reserve_for_push(self.len); . } . unsafe { 9,585 ( 0.01%) let end = self.as_mut_ptr().add(self.len); . ptr::write(end, value); 1,229,057 ( 1.22%) self.len += 1; . } 8 ( 0.00%) } . . /// Removes the last element from a vector and returns it, or [`None`] if it . /// is empty. . /// . /// If you'd like to pop the first element, consider using . /// [`VecDeque::pop_front`] instead. . /// . /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front -- line 1744 ---------------------------------------- -- line 1748 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3]; . /// assert_eq!(vec.pop(), Some(3)); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop(&mut self) -> Option { 21,679 ( 0.02%) if self.len == 0 { 2 ( 0.00%) None . } else { . unsafe { 16,964 ( 0.02%) self.len -= 1; 2,851 ( 0.00%) Some(ptr::read(self.as_ptr().add(self.len()))) . } . } . } . . /// Moves all the elements of `other` into `Self`, leaving `other` empty. . /// . /// # Panics . /// -- line 1769 ---------------------------------------- -- line 1776 ---------------------------------------- . /// let mut vec2 = vec![4, 5, 6]; . /// vec.append(&mut vec2); . /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(vec2, []); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "append", since = "1.4.0")] 42 ( 0.00%) pub fn append(&mut self, other: &mut Self) { . unsafe { . self.append_elements(other.as_slice() as _); . other.set_len(0); . } 36 ( 0.00%) } . . /// Appends elements to `Self` from other buffer. . #[cfg(not(no_global_oom_handling))] . #[inline] . unsafe fn append_elements(&mut self, other: *const [T]) { . let count = unsafe { (*other).len() }; . self.reserve(count); 788 ( 0.00%) let len = self.len(); . unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; 8,613 ( 0.01%) self.len += count; . } . . /// Creates a draining iterator that removes the specified range in the vector . /// and yields the removed items. . /// . /// When the iterator **is** dropped, all elements in the range are removed . /// from the vector, even if the iterator was not fully consumed. If the . /// iterator **is not** dropped (with [`mem::forget`] for example), it is -- line 1807 ---------------------------------------- -- line 1834 ---------------------------------------- . // When the Drain is first created, it shortens the length of . // the source vector to make sure no uninitialized or moved-from elements . // are accessible at all if the Drain's destructor never gets to run. . // . // Drain will ptr::read out the values to remove. . // When finished, remaining tail of the vec is copied back to cover . // the hole, and the vector length is restored to the new length. . // 530 ( 0.00%) let len = self.len(); . let Range { start, end } = slice::range(range, ..len); . . unsafe { . // set self.vec length's to start, to be safe in case Drain is leaked . self.set_len(start); . // Use the borrow in the IterMut to indicate borrowing behavior of the . // whole Drain iterator (like &mut T). 6 ( 0.00%) let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); 27 ( 0.00%) Drain { . tail_start: end, 6 ( 0.00%) tail_len: len - end, . iter: range_slice.iter(), . vec: NonNull::from(self), . } . } . } . . /// Clears the vector, removing all values. . /// -- line 1861 ---------------------------------------- -- line 1869 ---------------------------------------- . /// . /// v.clear(); . /// . /// assert!(v.is_empty()); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn clear(&mut self) { 6,419 ( 0.01%) self.truncate(0) . } . . /// Returns the number of elements in the vector, also referred to . /// as its 'length'. . /// . /// # Examples . /// . /// ``` -- line 1885 ---------------------------------------- -- line 1900 ---------------------------------------- . /// let mut v = Vec::new(); . /// assert!(v.is_empty()); . /// . /// v.push(1); . /// assert!(!v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 10,060 ( 0.01%) self.len() == 0 . } . . /// Splits the collection into two at the given index. . /// . /// Returns a newly allocated vector containing the elements in the range . /// `[at, len)`. After the call, the original vector will be left containing . /// the elements `[0, at)` with its previous capacity unchanged. . /// -- line 1916 ---------------------------------------- -- line 1935 ---------------------------------------- . A: Clone, . { . #[cold] . #[inline(never)] . fn assert_failed(at: usize, len: usize) -> ! { . panic!("`at` split index (is {}) should be <= len (is {})", at, len); . } . 60 ( 0.00%) if at > self.len() { . assert_failed(at, self.len()); . } . 48 ( 0.00%) if at == 0 { . // the new vector can take over the original buffer and avoid the copy . return mem::replace( . self, . Vec::with_capacity_in(self.capacity(), self.allocator().clone()), . ); . } . . let other_len = self.len - at; -- line 1955 ---------------------------------------- -- line 1988 ---------------------------------------- . /// . /// let mut vec = vec![]; . /// let mut p = 1; . /// vec.resize_with(4, || { p *= 2; p }); . /// assert_eq!(vec, [2, 4, 8, 16]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize_with", since = "1.33.0")] 749 ( 0.00%) pub fn resize_with(&mut self, new_len: usize, f: F) . where . F: FnMut() -> T, . { 97 ( 0.00%) let len = self.len(); 194 ( 0.00%) if new_len > len { 776 ( 0.00%) self.extend_with(new_len - len, ExtendFunc(f)); . } else { . self.truncate(new_len); . } 652 ( 0.00%) } . . /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, . /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime . /// `'a`. If the type has only static references, or none at all, then this . /// may be chosen to be `'static`. . /// . /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, . /// so the leaked allocation may include unused capacity that is not part -- line 2014 ---------------------------------------- -- line 2070 ---------------------------------------- . #[stable(feature = "vec_spare_capacity", since = "1.60.0")] . #[inline] . pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { . // Note: . // This method is not implemented in terms of `split_at_spare_mut`, . // to prevent invalidation of pointers to the buffer. . unsafe { . slice::from_raw_parts_mut( 2 ( 0.00%) self.as_mut_ptr().add(self.len) as *mut MaybeUninit, 18 ( 0.00%) self.buf.capacity() - self.len, . ) . } . } . . /// Returns vector content as a slice of `T`, along with the remaining spare . /// capacity of the vector as a slice of `MaybeUninit`. . /// . /// The returned spare capacity slice can be used to fill the vector with data -- line 2087 ---------------------------------------- -- line 2193 ---------------------------------------- . /// assert_eq!(vec, [1, 2]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize", since = "1.5.0")] . pub fn resize(&mut self, new_len: usize, value: T) { . let len = self.len(); . . if new_len > len { 126 ( 0.00%) self.extend_with(new_len - len, ExtendElement(value)) . } else { . self.truncate(new_len); . } . } . . /// Clones and appends all elements in a slice to the `Vec`. . /// . /// Iterates over the slice `other`, clones each element, and then appends -- line 2209 ---------------------------------------- -- line 2291 ---------------------------------------- . fn last(mut self) -> T { . (self.0)() . } . } . . impl Vec { . #[cfg(not(no_global_oom_handling))] . /// Extend the vector by `n` values, using the given generator. 3,349 ( 0.00%) fn extend_with>(&mut self, n: usize, mut value: E) { . self.reserve(n); . . unsafe { 262 ( 0.00%) let mut ptr = self.as_mut_ptr().add(self.len()); . // Use SetLenOnDrop to work around bug where compiler . // might not realize the store through `ptr` through self.set_len() . // don't alias. . let mut local_len = SetLenOnDrop::new(&mut self.len); . . // Write all elements except the last one . for _ in 1..n { . ptr::write(ptr, value.next()); . ptr = ptr.offset(1); . // Increment the length in every step in case next() panics . local_len.increment_len(1); . } . 6,039 ( 0.01%) if n > 0 { . // We can write the last element directly without cloning needlessly . ptr::write(ptr, value.last()); . local_len.increment_len(1); . } . . // len set by scope guard . } 2,655 ( 0.00%) } . } . . impl Vec { . /// Removes consecutive repeated elements in the vector according to the . /// [`PartialEq`] trait implementation. . /// . /// If the vector is sorted, this removes all duplicates. . /// -- line 2333 ---------------------------------------- -- line 2338 ---------------------------------------- . /// . /// vec.dedup(); . /// . /// assert_eq!(vec, [1, 2, 3, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn dedup(&mut self) { 2 ( 0.00%) self.dedup_by(|a, b| a == b) . } . } . . //////////////////////////////////////////////////////////////////////////////// . // Internal methods and functions . //////////////////////////////////////////////////////////////////////////////// . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 779 ( 0.00%) pub fn from_elem(elem: T, n: usize) -> Vec { 5,492 ( 0.01%) ::from_elem(elem, n, Global) 973 ( 0.00%) } . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { . ::from_elem(elem, n, alloc) . } . -- line 2367 ---------------------------------------- -- line 2424 ---------------------------------------- . // Common trait implementations for Vec . //////////////////////////////////////////////////////////////////////////////// . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Deref for Vec { . type Target = [T]; . . fn deref(&self) -> &[T] { 127,901 ( 0.13%) unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } 3 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::DerefMut for Vec { . fn deref_mut(&mut self) -> &mut [T] { 29,021 ( 0.03%) unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } . } . } . . #[cfg(not(no_global_oom_handling))] . trait SpecCloneFrom { . fn clone_from(this: &mut Self, other: &Self); . } . -- line 2447 ---------------------------------------- -- line 2468 ---------------------------------------- . this.extend_from_slice(other); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for Vec { . #[cfg(not(test))] 4,492 ( 0.00%) fn clone(&self) -> Self { . let alloc = self.allocator().clone(); 2 ( 0.00%) <[T]>::to_vec_in(&**self, alloc) 5,854 ( 0.01%) } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Instead use the . // `slice::to_vec` function which is only available with cfg(test) . // NB see the slice::hack module in slice.rs for more information . #[cfg(test)] . fn clone(&self) -> Self { . let alloc = self.allocator().clone(); -- line 2487 ---------------------------------------- -- line 2518 ---------------------------------------- . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> Index for Vec { . type Output = I::Output; . . #[inline] . fn index(&self, index: I) -> &Self::Output { 4,284 ( 0.00%) Index::index(&**self, index) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_on_unimplemented( . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> IndexMut for Vec { . #[inline] . fn index_mut(&mut self, index: I) -> &mut Self::Output { 12 ( 0.00%) IndexMut::index_mut(&mut **self, index) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for Vec { . #[inline] . fn from_iter>(iter: I) -> Vec { 43,939 ( 0.04%) >::from_iter(iter.into_iter()) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl IntoIterator for Vec { . type Item = T; . type IntoIter = IntoIter; . -- line 2555 ---------------------------------------- -- line 2564 ---------------------------------------- . /// for s in v.into_iter() { . /// // s has type String, not &String . /// println!("{}", s); . /// } . /// ``` . #[inline] . fn into_iter(self) -> IntoIter { . unsafe { 17,623 ( 0.02%) let mut me = ManuallyDrop::new(self); . let alloc = ptr::read(me.allocator()); . let begin = me.as_mut_ptr(); . let end = if mem::size_of::() == 0 { . arith_offset(begin as *const i8, me.len() as isize) as *const T . } else { . begin.add(me.len()) as *const T . }; . let cap = me.buf.capacity(); 7,258 ( 0.01%) IntoIter { . buf: NonNull::new_unchecked(begin), . phantom: PhantomData, . cap, . alloc, . ptr: begin, . end, . } . } -- line 2589 ---------------------------------------- -- line 2601 ---------------------------------------- . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { . type Item = &'a mut T; . type IntoIter = slice::IterMut<'a, T>; . . fn into_iter(self) -> slice::IterMut<'a, T> { 1,052 ( 0.00%) self.iter_mut() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for Vec { . #[inline] 232 ( 0.00%) fn extend>(&mut self, iter: I) { 18,665 ( 0.02%) >::spec_extend(self, iter.into_iter()) 232 ( 0.00%) } . . #[inline] . fn extend_one(&mut self, item: T) { . self.push(item); . } . . #[inline] . fn extend_reserve(&mut self, additional: usize) { -- line 2627 ---------------------------------------- -- line 2636 ---------------------------------------- . fn extend_desugared>(&mut self, mut iterator: I) { . // This is the case for a general iterator. . // . // This function should be the moral equivalent of: . // . // for item in iterator { . // self.push(item); . // } 4,463 ( 0.00%) while let Some(element) = iterator.next() { 611 ( 0.00%) let len = self.len(); 6,682 ( 0.01%) if len == self.capacity() { 98 ( 0.00%) let (lower, _) = iterator.size_hint(); . self.reserve(lower.saturating_add(1)); . } . unsafe { . ptr::write(self.as_mut_ptr().add(len), element); . // Since next() executes user code which can panic we have to bump the length . // after each step. . // NB can't overflow since we would have had to alloc the address space 5,491 ( 0.01%) self.set_len(len + 1); . } . } 50 ( 0.00%) } . . /// Creates a splicing iterator that replaces the specified range in the vector . /// with the given `replace_with` iterator and yields the removed items. . /// `replace_with` does not need to be the same length as `range`. . /// . /// `range` is removed even if the iterator is not consumed until the end. . /// . /// It is unspecified how many elements are removed from the vector -- line 2666 ---------------------------------------- -- line 2745 ---------------------------------------- . /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); . /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); . /// ``` . #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] . pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> . where . F: FnMut(&mut T) -> bool, . { 158 ( 0.00%) let old_len = self.len(); . . // Guard against us getting leaked (leak amplification) . unsafe { . self.set_len(0); . } . 720 ( 0.00%) DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } . } . } . . /// Extend implementation that copies elements out of references before pushing them onto the Vec. . /// . /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to . /// append the entire slice at once. . /// -- line 2768 ---------------------------------------- -- line 2803 ---------------------------------------- . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(&**self, &**other) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { 22,247 ( 0.02%) fn drop(&mut self) { . unsafe { . // use drop for [T] . // use a raw slice to refer to the elements of the vector as weakest necessary type; . // could avoid questions of validity in certain cases 9,830 ( 0.01%) ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) . } . // RawVec handles deallocation 26,168 ( 0.03%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] . impl const Default for Vec { . /// Creates an empty `Vec`. . fn default() -> Vec { . Vec::new() -- line 2827 ---------------------------------------- -- line 2976 ---------------------------------------- . /// newly-allocated buffer with exactly the right capacity. . /// . /// # Examples . /// . /// ``` . /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); . /// ``` . fn from(v: Vec) -> Self { 4 ( 0.00%) v.into_boxed_slice() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl From<&str> for Vec { . /// Allocate a `Vec` and fill it with a UTF-8 string. . /// -- line 2992 ---------------------------------------- 38,900 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/path.rs -------------------------------------------------------------------------------- Ir -- line 316 ---------------------------------------- . . //////////////////////////////////////////////////////////////////////////////// . // Cross-platform, iterator-independent parsing . //////////////////////////////////////////////////////////////////////////////// . . /// Says whether the first byte after the prefix is a separator. . fn has_physical_root(s: &[u8], prefix: Option>) -> bool { . let path = if let Some(p) = prefix { &s[p.len()..] } else { s }; 86 ( 0.00%) !path.is_empty() && is_sep_byte(path[0]) . } . . // basic workhorse for splitting stem and extension . fn rsplit_file_at_dot(file: &OsStr) -> (Option<&OsStr>, Option<&OsStr>) { . if os_str_as_u8_slice(file) == b".." { . return (Some(file), None); . } . . // The unsafety here stems from converting between &OsStr and &[u8] . // and back. This is safe to do because (1) we only look at ASCII . // contents of the encoding and (2) new &OsStr values are produced . // only from ASCII-bounded slices of existing &OsStr values. 75 ( 0.00%) let mut iter = os_str_as_u8_slice(file).rsplitn(2, |b| *b == b'.'); . let after = iter.next(); . let before = iter.next(); . if before == Some(b"") { . (Some(file), None) . } else { . unsafe { (before.map(|s| u8_slice_as_os_str(s)), after.map(|s| u8_slice_as_os_str(s))) } . } . } -- line 345 ---------------------------------------- -- line 368 ---------------------------------------- . //////////////////////////////////////////////////////////////////////////////// . . /// Component parsing works by a double-ended state machine; the cursors at the . /// front and back of the path each keep track of what parts of the path have . /// been consumed so far. . /// . /// Going front to back, a path is made up of a prefix, a starting . /// directory component, and a body (of normal components) 103 ( 0.00%) #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] . enum State { . Prefix = 0, // c: . StartDir = 1, // / or . or nothing . Body = 2, // foo/bar/baz . Done = 3, . } . . /// A structure wrapping a Windows path prefix as well as its unparsed string -- line 384 ---------------------------------------- -- line 493 ---------------------------------------- . /// let components = path.components().collect::>(); . /// assert_eq!(&components, &[ . /// Component::RootDir, . /// Component::Normal("tmp".as_ref()), . /// Component::Normal("foo".as_ref()), . /// Component::Normal("bar.txt".as_ref()), . /// ]); . /// ``` 2 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] . #[stable(feature = "rust1", since = "1.0.0")] . pub enum Component<'a> { . /// A Windows path prefix, e.g., `C:` or `\\server\share`. . /// . /// There is a large variety of prefix types, see [`Prefix`]'s documentation . /// for more. . /// . /// Does not occur on Unix. -- line 509 ---------------------------------------- -- line 586 ---------------------------------------- . /// let path = Path::new("/tmp/foo/bar.txt"); . /// . /// for component in path.components() { . /// println!("{:?}", component); . /// } . /// ``` . /// . /// [`components`]: Path::components 72 ( 0.00%) #[derive(Clone)] . #[must_use = "iterators are lazy and do nothing unless consumed"] . #[stable(feature = "rust1", since = "1.0.0")] . pub struct Components<'a> { . // The path left to parse components from 12 ( 0.00%) path: &'a [u8], . . // The prefix as it was originally parsed, if any . prefix: Option>, . . // true if path *physically* has a root separator; for most Windows . // prefixes, it may have a "logical" root separator for the purposes of . // normalization, e.g., \\server\share == \\server\share\. . has_physical_root: bool, . . // The iterator is double-ended, and these two states keep track of what has . // been produced from either end 6 ( 0.00%) front: State, 6 ( 0.00%) back: State, . } . . /// An iterator over the [`Component`]s of a [`Path`], as [`OsStr`] slices. . /// . /// This `struct` is created by the [`iter`] method on [`Path`]. . /// See its documentation for more. . /// . /// [`iter`]: Path::iter -- line 620 ---------------------------------------- -- line 650 ---------------------------------------- . #[inline] . fn prefix_verbatim(&self) -> bool { . self.prefix.as_ref().map(Prefix::is_verbatim).unwrap_or(false) . } . . /// how much of the prefix is left from the point of view of iteration? . #[inline] . fn prefix_remaining(&self) -> usize { 91 ( 0.00%) if self.front == State::Prefix { self.prefix_len() } else { 0 } . } . . // Given the iteration so far, how much of the pre-State::Body path is left? . #[inline] 637 ( 0.00%) fn len_before_body(&self) -> usize { 364 ( 0.00%) let root = if self.front <= State::StartDir && self.has_physical_root { 1 } else { 0 }; 103 ( 0.00%) let cur_dir = if self.front <= State::StartDir && self.include_cur_dir() { 1 } else { 0 }; 364 ( 0.00%) self.prefix_remaining() + root + cur_dir 819 ( 0.00%) } . . // is the iteration complete? . #[inline] . fn finished(&self) -> bool { . self.front == State::Done || self.back == State::Done || self.front > self.back . } . . #[inline] -- line 675 ---------------------------------------- -- line 687 ---------------------------------------- . /// let mut components = Path::new("/tmp/foo/bar.txt").components(); . /// components.next(); . /// components.next(); . /// . /// assert_eq!(Path::new("foo/bar.txt"), components.as_path()); . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] 36 ( 0.00%) pub fn as_path(&self) -> &'a Path { . let mut comps = self.clone(); 6 ( 0.00%) if comps.front == State::Body { . comps.trim_left(); . } 6 ( 0.00%) if comps.back == State::Body { . comps.trim_right(); . } . unsafe { Path::from_u8_slice(comps.path) } 54 ( 0.00%) } . . /// Is the *original* path rooted? . fn has_root(&self) -> bool { 182 ( 0.00%) if self.has_physical_root { . return true; . } 15 ( 0.00%) if let Some(p) = self.prefix { . if p.has_implicit_root() { . return true; . } . } . false . } . . /// Should the normalized path include a leading . ? . fn include_cur_dir(&self) -> bool { 6 ( 0.00%) if self.has_root() { . return false; . } 11 ( 0.00%) let mut iter = self.path[self.prefix_len()..].iter(); 4 ( 0.00%) match (iter.next(), iter.next()) { . (Some(&b'.'), None) => true, . (Some(&b'.'), Some(&b)) => self.is_sep_byte(b), . _ => false, . } . } . . // parse a given byte sequence into the corresponding path component . fn parse_single_component<'b>(&self, comp: &'b [u8]) -> Option> { . match comp { 270 ( 0.00%) b"." if self.prefix_verbatim() => Some(Component::CurDir), . b"." => None, // . components are normalized away, except at . // the beginning of a path, which is treated . // separately via `include_cur_dir` . b".." => Some(Component::ParentDir), . b"" => None, . _ => Some(Component::Normal(unsafe { u8_slice_as_os_str(comp) })), . } . } -- line 743 ---------------------------------------- -- line 750 ---------------------------------------- . None => (0, self.path), . Some(i) => (1, &self.path[..i]), . }; . (comp.len() + extra, self.parse_single_component(comp)) . } . . // parse a component from the right, saying how many bytes to consume to . // remove the component 225 ( 0.00%) fn parse_next_component_back(&self) -> (usize, Option>) { . debug_assert!(self.back == State::Body); 90 ( 0.00%) let start = self.len_before_body(); 45 ( 0.00%) let (extra, comp) = match self.path[start..].iter().rposition(|b| self.is_sep_byte(*b)) { . None => (0, &self.path[start..]), 176 ( 0.00%) Some(i) => (1, &self.path[start + i + 1..]), . }; 270 ( 0.00%) (comp.len() + extra, self.parse_single_component(comp)) 180 ( 0.00%) } . . // trim away repeated separators (i.e., empty components) on the left . fn trim_left(&mut self) { . while !self.path.is_empty() { . let (size, comp) = self.parse_next_component(); . if comp.is_some() { . return; . } else { . self.path = &self.path[size..]; . } . } . } . . // trim away repeated separators (i.e., empty components) on the right . fn trim_right(&mut self) { 18 ( 0.00%) while self.path.len() > self.len_before_body() { 18 ( 0.00%) let (size, comp) = self.parse_next_component_back(); 6 ( 0.00%) if comp.is_some() { . return; . } else { . self.path = &self.path[..self.path.len() - size]; . } . } . } . } . -- line 792 ---------------------------------------- -- line 930 ---------------------------------------- . } . } . None . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a> DoubleEndedIterator for Components<'a> { 400 ( 0.00%) fn next_back(&mut self) -> Option> { 202 ( 0.00%) while !self.finished() { 90 ( 0.00%) match self.back { 200 ( 0.00%) State::Body if self.path.len() > self.len_before_body() => { 468 ( 0.00%) let (size, comp) = self.parse_next_component_back(); 156 ( 0.00%) self.path = &self.path[..self.path.len() - size]; 39 ( 0.00%) if comp.is_some() { 312 ( 0.00%) return comp; . } . } . State::Body => { 3 ( 0.00%) self.back = State::StartDir; . } . State::StartDir => { 2 ( 0.00%) self.back = State::Prefix; 40 ( 0.00%) if self.has_physical_root { . self.path = &self.path[..self.path.len() - 1]; . return Some(Component::RootDir); 12 ( 0.00%) } else if let Some(p) = self.prefix { . if p.has_implicit_root() && !p.is_verbatim() { . return Some(Component::RootDir); . } . } else if self.include_cur_dir() { . self.path = &self.path[..self.path.len() - 1]; . return Some(Component::CurDir); . } . } -- line 964 ---------------------------------------- -- line 965 ---------------------------------------- . State::Prefix if self.prefix_len() > 0 => { . self.back = State::Done; . return Some(Component::Prefix(PrefixComponent { . raw: unsafe { u8_slice_as_os_str(self.path) }, . parsed: self.prefix.unwrap(), . })); . } . State::Prefix => { 1 ( 0.00%) self.back = State::Done; . return None; . } . State::Done => unreachable!(), . } . } . None 360 ( 0.00%) } . } . . #[stable(feature = "fused", since = "1.26.0")] . impl FusedIterator for Components<'_> {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a> cmp::PartialEq for Components<'a> { . #[inline] -- line 988 ---------------------------------------- -- line 1001 ---------------------------------------- . // possible future improvement: this could bail out earlier if there were a . // reverse memcmp/bcmp comparing back to front . if self.path == other.path { . return true; . } . } . . // compare back to front since absolute paths often share long prefixes 1 ( 0.00%) Iterator::eq(self.clone().rev(), other.clone().rev()) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::Eq for Components<'_> {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a> cmp::PartialOrd for Components<'a> { -- line 1017 ---------------------------------------- -- line 1179 ---------------------------------------- . /// use std::path::PathBuf; . /// . /// let path = PathBuf::new(); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . #[inline] . pub fn new() -> PathBuf { 36 ( 0.00%) PathBuf { inner: OsString::new() } . } . . /// Creates a new `PathBuf` with a given capacity used to create the . /// internal [`OsString`]. See [`with_capacity`] defined on [`OsString`]. . /// . /// # Examples . /// . /// ``` -- line 1195 ---------------------------------------- -- line 1260 ---------------------------------------- . /// use std::path::PathBuf; . /// . /// let mut path = PathBuf::from("/tmp"); . /// path.push("/etc"); . /// assert_eq!(path, PathBuf::from("/etc")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn push>(&mut self, path: P) { 184 ( 0.00%) self._push(path.as_ref()) . } . 368 ( 0.00%) fn _push(&mut self, path: &Path) { . // in general, a separator is needed if the rightmost byte is not a separator . let mut need_sep = self.as_mut_vec().last().map(|c| !is_sep_byte(*c)).unwrap_or(false); . . // in the special case of `C:` on Windows, do *not* add a separator . let comps = self.components(); . . if comps.prefix_len() > 0 . && comps.prefix_len() == comps.path.len() -- line 1279 ---------------------------------------- -- line 1327 ---------------------------------------- . return; . . // `path` has a root but no prefix, e.g., `\windows` (Windows only) . } else if path.has_root() { . let prefix_len = self.components().prefix_remaining(); . self.as_mut_vec().truncate(prefix_len); . . // `path` is a pure relative path 45 ( 0.00%) } else if need_sep { . self.inner.push(MAIN_SEP_STR); . } . . self.inner.push(path); 276 ( 0.00%) } . . /// Truncates `self` to [`self.parent`]. . /// . /// Returns `false` and does nothing if [`self.parent`] is [`None`]. . /// Otherwise, returns `true`. . /// . /// [`self.parent`]: Path::parent . /// -- line 1348 ---------------------------------------- -- line 1354 ---------------------------------------- . /// let mut p = PathBuf::from("/spirited/away.rs"); . /// . /// p.pop(); . /// assert_eq!(Path::new("/spirited"), p); . /// p.pop(); . /// assert_eq!(Path::new("/"), p); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 16 ( 0.00%) pub fn pop(&mut self) -> bool { 4 ( 0.00%) match self.parent().map(|p| p.as_u8_slice().len()) { . Some(len) => { . self.as_mut_vec().truncate(len); . true . } . None => false, . } 16 ( 0.00%) } . . /// Updates [`self.file_name`] to `file_name`. . /// . /// If [`self.file_name`] was [`None`], this is equivalent to pushing . /// `file_name`. . /// . /// Otherwise it is equivalent to calling [`pop`] and then pushing . /// `file_name`. The new path will be a sibling of the original path. -- line 1378 ---------------------------------------- -- line 1428 ---------------------------------------- . /// p.set_extension("force"); . /// assert_eq!(Path::new("/feel/the.force"), p.as_path()); . /// . /// p.set_extension("dark_side"); . /// assert_eq!(Path::new("/feel/the.dark_side"), p.as_path()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn set_extension>(&mut self, extension: S) -> bool { 12 ( 0.00%) self._set_extension(extension.as_ref()) . } . 30 ( 0.00%) fn _set_extension(&mut self, extension: &OsStr) -> bool { 24 ( 0.00%) let file_stem = match self.file_stem() { . None => return false, . Some(f) => os_str_as_u8_slice(f), . }; . . // truncate until right after the file stem . let end_file_stem = file_stem[file_stem.len()..].as_ptr() as usize; . let start = os_str_as_u8_slice(&self.inner).as_ptr() as usize; . let v = self.as_mut_vec(); . v.truncate(end_file_stem.wrapping_sub(start)); . . // add the new extension, if any . let new = os_str_as_u8_slice(extension); 3 ( 0.00%) if !new.is_empty() { 9 ( 0.00%) v.reserve_exact(new.len() + 1); . v.push(b'.'); . v.extend_from_slice(new); . } . . true 24 ( 0.00%) } . . /// Consumes the `PathBuf`, yielding its internal [`OsString`] storage. . /// . /// # Examples . /// . /// ``` . /// use std::path::PathBuf; . /// -- line 1468 ---------------------------------------- -- line 1558 ---------------------------------------- . self.inner.shrink_to(min_capacity) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for PathBuf { . #[inline] . fn clone(&self) -> Self { 6 ( 0.00%) PathBuf { inner: self.inner.clone() } . } . . #[inline] . fn clone_from(&mut self, source: &Self) { . self.inner.clone_from(&source.inner) . } . } . -- line 1574 ---------------------------------------- -- line 1642 ---------------------------------------- . . #[stable(feature = "rust1", since = "1.0.0")] . impl From for PathBuf { . /// Converts an [`OsString`] into a [`PathBuf`] . /// . /// This conversion does not allocate or copy memory. . #[inline] . fn from(s: OsString) -> PathBuf { 141 ( 0.00%) PathBuf { inner: s } . } . } . . #[stable(feature = "from_path_buf_for_os_string", since = "1.14.0")] . impl From for OsString { . /// Converts a [`PathBuf`] into an [`OsString`] . /// . /// This conversion does not allocate or copy memory. -- line 1658 ---------------------------------------- -- line 1825 ---------------------------------------- . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ToOwned for Path { . type Owned = PathBuf; . #[inline] . fn to_owned(&self) -> PathBuf { 5 ( 0.00%) self.to_path_buf() . } . #[inline] . fn clone_into(&self, target: &mut PathBuf) { . self.inner.clone_into(&mut target.inner); . } . } . . #[stable(feature = "rust1", since = "1.0.0")] -- line 1841 ---------------------------------------- -- line 1843 ---------------------------------------- . #[inline] . fn eq(&self, other: &PathBuf) -> bool { . self.components() == other.components() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Hash for PathBuf { 220 ( 0.00%) fn hash(&self, h: &mut H) { . self.as_path().hash(h) 176 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::Eq for PathBuf {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::PartialOrd for PathBuf { . #[inline] -- line 1861 ---------------------------------------- -- line 1936 ---------------------------------------- . . impl Path { . // The following (private!) function allows construction of a path from a u8 . // slice, which is only safe when it is known to follow the OsStr encoding. . unsafe fn from_u8_slice(s: &[u8]) -> &Path { . unsafe { Path::new(u8_slice_as_os_str(s)) } . } . // The following (private!) function reveals the byte encoding used for OsStr. 1,136 ( 0.00%) fn as_u8_slice(&self) -> &[u8] { . os_str_as_u8_slice(&self.inner) 568 ( 0.00%) } . . /// Directly wraps a string slice as a `Path` slice. . /// . /// This is a cost-free conversion. . /// . /// # Examples . /// . /// ``` -- line 1954 ---------------------------------------- -- line 1964 ---------------------------------------- . /// . /// let string = String::from("foo.txt"); . /// let from_string = Path::new(&string); . /// let from_path = Path::new(&from_string); . /// assert_eq!(from_string, from_path); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn new + ?Sized>(s: &S) -> &Path { 4 ( 0.00%) unsafe { &*(s.as_ref() as *const OsStr as *const Path) } . } . . /// Yields the underlying [`OsStr`] slice. . /// . /// # Examples . /// . /// ``` . /// use std::path::Path; -- line 1980 ---------------------------------------- -- line 2050 ---------------------------------------- . /// . /// let path_buf = Path::new("foo.txt").to_path_buf(); . /// assert_eq!(path_buf, std::path::PathBuf::from("foo.txt")); . /// ``` . #[rustc_conversion_suggestion] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[stable(feature = "rust1", since = "1.0.0")] 40 ( 0.00%) pub fn to_path_buf(&self) -> PathBuf { . PathBuf::from(self.inner.to_os_string()) 35 ( 0.00%) } . . /// Returns `true` if the `Path` is absolute, i.e., if it is independent of . /// the current directory. . /// . /// * On Unix, a path is absolute if it starts with the root, so . /// `is_absolute` and [`has_root`] are equivalent. . /// . /// * On Windows, a path is absolute if it has a prefix and starts with the -- line 2068 ---------------------------------------- -- line 2082 ---------------------------------------- . #[allow(deprecated)] . pub fn is_absolute(&self) -> bool { . if cfg!(target_os = "redox") { . // FIXME: Allow Redox prefixes . self.has_root() || has_redox_scheme(self.as_u8_slice()) . } else { . self.has_root() && (cfg!(any(unix, target_os = "wasi")) || self.prefix().is_some()) . } 1 ( 0.00%) } . . /// Returns `true` if the `Path` is relative, i.e., not absolute. . /// . /// See [`is_absolute`]'s documentation for more details. . /// . /// # Examples . /// . /// ``` -- line 2098 ---------------------------------------- -- line 2150 ---------------------------------------- . /// assert_eq!(parent, Path::new("/foo")); . /// . /// let grand_parent = parent.parent().unwrap(); . /// assert_eq!(grand_parent, Path::new("/")); . /// assert_eq!(grand_parent.parent(), None); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] 2 ( 0.00%) pub fn parent(&self) -> Option<&Path> { . let mut comps = self.components(); 6 ( 0.00%) let comp = comps.next_back(); 8 ( 0.00%) comp.and_then(|p| match p { . Component::Normal(_) | Component::CurDir | Component::ParentDir => { 10 ( 0.00%) Some(comps.as_path()) . } . _ => None, . }) 4 ( 0.00%) } . . /// Produces an iterator over `Path` and its ancestors. . /// . /// The iterator will yield the `Path` that is returned if the [`parent`] method is used zero . /// or more times. That means, the iterator will yield `&self`, `&self.parent().unwrap()`, . /// `&self.parent().unwrap().parent().unwrap()` and so on. If the [`parent`] method returns . /// [`None`], the iterator will do likewise. The iterator will always yield at least one value, . /// namely `&self`. -- line 2175 ---------------------------------------- -- line 2217 ---------------------------------------- . /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("tmp/foo.txt").file_name()); . /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.").file_name()); . /// assert_eq!(Some(OsStr::new("foo.txt")), Path::new("foo.txt/.//").file_name()); . /// assert_eq!(None, Path::new("foo.txt/..").file_name()); . /// assert_eq!(None, Path::new("/").file_name()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] 29 ( 0.00%) pub fn file_name(&self) -> Option<&OsStr> { 96 ( 0.00%) self.components().next_back().and_then(|p| match p { . Component::Normal(p) => Some(p), . _ => None, . }) 58 ( 0.00%) } . . /// Returns a path that, when joined onto `base`, yields `self`. . /// . /// # Errors . /// . /// If `base` is not a prefix of `self` (i.e., [`starts_with`] . /// returns `false`), returns [`Err`]. . /// -- line 2238 ---------------------------------------- -- line 2354 ---------------------------------------- . /// # See Also . /// This method is similar to [`Path::file_prefix`], which extracts the portion of the file name . /// before the *first* `.` . /// . /// [`Path::file_prefix`]: Path::file_prefix . /// . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] 3 ( 0.00%) pub fn file_stem(&self) -> Option<&OsStr> { . self.file_name().map(rsplit_file_at_dot).and_then(|(before, after)| before.or(after)) 6 ( 0.00%) } . . /// Extracts the prefix of [`self.file_name`]. . /// . /// The prefix is: . /// . /// * [`None`], if there is no file name; . /// * The entire file name if there is no embedded `.`; . /// * The portion of the file name before the first non-beginning `.`; -- line 2372 ---------------------------------------- -- line 2431 ---------------------------------------- . /// ``` . /// use std::path::{Path, PathBuf}; . /// . /// assert_eq!(Path::new("/etc").join("passwd"), PathBuf::from("/etc/passwd")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub fn join>(&self, path: P) -> PathBuf { 40 ( 0.00%) self._join(path.as_ref()) . } . 132 ( 0.00%) fn _join(&self, path: &Path) -> PathBuf { . let mut buf = self.to_path_buf(); . buf.push(path); . buf 99 ( 0.00%) } . . /// Creates an owned [`PathBuf`] like `self` but with the given file name. . /// . /// See [`PathBuf::set_file_name`] for more details. . /// . /// # Examples . /// . /// ``` -- line 2454 ---------------------------------------- -- line 2531 ---------------------------------------- . /// assert_eq!(components.next(), Some(Component::Normal(OsStr::new("foo.txt")))); . /// assert_eq!(components.next(), None) . /// ``` . /// . /// [`CurDir`]: Component::CurDir . #[stable(feature = "rust1", since = "1.0.0")] . pub fn components(&self) -> Components<'_> { . let prefix = parse_prefix(self.as_os_str()); 266 ( 0.00%) Components { . path: self.as_u8_slice(), . prefix, 85 ( 0.00%) has_physical_root: has_physical_root(self.as_u8_slice(), prefix) . || has_redox_scheme(self.as_u8_slice()), . front: State::Prefix, . back: State::Body, . } . } . . /// Produces an iterator over the path's components viewed as [`OsStr`] . /// slices. -- line 2550 ---------------------------------------- -- line 2775 ---------------------------------------- . /// . /// When the goal is simply to read from (or write to) the source, the most . /// reliable way to test the source can be read (or written to) is to open . /// it. Only using `is_file` can break workflows like `diff <( prog_a )` on . /// a Unix-like system for example. See [`fs::File::open`] or . /// [`fs::OpenOptions::open`] for more information. . #[stable(feature = "path_ext", since = "1.5.0")] . #[must_use] 5 ( 0.00%) pub fn is_file(&self) -> bool { . fs::metadata(self).map(|m| m.is_file()).unwrap_or(false) 3 ( 0.00%) } . . /// Returns `true` if the path exists on disk and is pointing at a directory. . /// . /// This function will traverse symbolic links to query information about the . /// destination file. . /// . /// If you cannot access the metadata of the file, e.g. because of a . /// permission error or broken symbolic links, this will return `false`. -- line 2793 ---------------------------------------- -- line 2802 ---------------------------------------- . /// . /// # See Also . /// . /// This is a convenience function that coerces errors to false. If you want to . /// check errors, call [`fs::metadata`] and handle its [`Result`]. Then call . /// [`fs::Metadata::is_dir`] if it was [`Ok`]. . #[stable(feature = "path_ext", since = "1.5.0")] . #[must_use] 10 ( 0.00%) pub fn is_dir(&self) -> bool { . fs::metadata(self).map(|m| m.is_dir()).unwrap_or(false) 6 ( 0.00%) } . . /// Returns `true` if the path exists on disk and is pointing at a symbolic link. . /// . /// This function will not traverse symbolic links. . /// In case of a broken symbolic link this will also return true. . /// . /// If you cannot access the directory containing the file, e.g., because of a . /// permission error, this will return false. -- line 2820 ---------------------------------------- -- line 2898 ---------------------------------------- . impl fmt::Debug for Display<'_> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(&self.path, f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl fmt::Display for Display<'_> { 4 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 4 ( 0.00%) self.path.inner.display(f) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::PartialEq for Path { . #[inline] . fn eq(&self, other: &Path) -> bool { . self.components() == other.components() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Hash for Path { 4,192 ( 0.00%) fn hash(&self, h: &mut H) { 612 ( 0.00%) let bytes = self.as_u8_slice(); . let prefix_len = match parse_prefix(&self.inner) { . Some(prefix) => { . prefix.hash(h); . prefix.len() . } . None => 0, . }; . let bytes = &bytes[prefix_len..]; . . let mut component_start = 0; . let mut bytes_hashed = 0; . . for i in 0..bytes.len() { 56,945 ( 0.06%) if is_sep_byte(bytes[i]) { 19,647 ( 0.02%) if i > component_start { . let to_hash = &bytes[component_start..i]; 8 ( 0.00%) h.write(to_hash); 11,966 ( 0.01%) bytes_hashed += to_hash.len(); . } . . // skip over separator and optionally a following CurDir item . // since components() would normalize these away 26,640 ( 0.03%) component_start = i + match bytes[i..] { 13,098 ( 0.01%) [_, b'.', b'/', ..] | [_, b'.'] => 2, . _ => 1, . }; . } . } . 1,136 ( 0.00%) if component_start < bytes.len() { . let to_hash = &bytes[component_start..]; 3 ( 0.00%) h.write(to_hash); 1,136 ( 0.00%) bytes_hashed += to_hash.len(); . } . . h.write_usize(bytes_hashed); 4,184 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::Eq for Path {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl cmp::PartialOrd for Path { . #[inline] -- line 2967 ---------------------------------------- 28,835 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 6,640 ( 0.01%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 11,850 ( 0.01%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 25,513 ( 0.03%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 3,645 ( 0.00%) i += 2 . } . 11,850 ( 0.01%) if i < count { 5,794 ( 0.01%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 11,505 ( 0.01%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 73,945 ( 0.07%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 22,436 ( 0.02%) self.nbuf = nbuf + size; . . return; . } . 3,185 ( 0.00%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 947 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 947 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 9,470 ( 0.01%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 7,576 ( 0.01%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 8,523 ( 0.01%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 4,187 ( 0.00%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 3,788 ( 0.00%) self.processed += BUFFER_SIZE; 1,894 ( 0.00%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 6,039 ( 0.01%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 32,019 ( 0.03%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 11,658 ( 0.01%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 5,844 ( 0.01%) self.nbuf = nbuf + length; . . return; . } . 3,053 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 2,785 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 557 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 1,671 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 557 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 1,114 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 4,402 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 4,959 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 8,804 ( 0.01%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 1,114 ( 0.00%) let input_left = length - processed; 303 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 135 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 135 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 135 ( 0.00%) self.state.v0 ^= elem; 270 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 557 ( 0.00%) self.nbuf = extra_bytes_left; 2,785 ( 0.00%) self.processed += nbuf + processed; 3,342 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 2,217 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 2,956 ( 0.00%) let mut state = self.state; . . for i in 0..last { 1,957 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 1,957 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 1,957 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 1,478 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 638 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 2,116 ( 0.00%) let length = self.processed + self.nbuf; 1,476 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 738 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 738 ( 0.00%) state.v0 ^= b; . 738 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 1,792 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 314 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 314 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 61,850 ( 0.06%) compress!(state); 75,279 ( 0.07%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 3,580 ( 0.00%) compress!(state); 3,580 ( 0.00%) compress!(state); 3,580 ( 0.00%) compress!(state); 2,843 ( 0.00%) compress!(state); . } . } 10,301 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs -------------------------------------------------------------------------------- Ir -- line 57 ---------------------------------------- . /// # Examples . /// . /// Basic usage: . /// . /// ``` . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::from_str_radix(\"A\", 16), Ok(10));")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 12 ( 0.00%) pub fn from_str_radix(src: &str, radix: u32) -> Result { 6 ( 0.00%) from_str_radix(src, radix) 18 ( 0.00%) } . . /// Returns the number of ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// . /// ``` -- line 75 ---------------------------------------- -- line 80 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[doc(alias = "popcount")] . #[doc(alias = "popcnt")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn count_ones(self) -> u32 { 2,085 ( 0.00%) intrinsics::ctpop(self as $ActualT) as u32 . } . . /// Returns the number of zeros in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 96 ---------------------------------------- -- line 118 ---------------------------------------- . /// assert_eq!(n.leading_zeros(), 2); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn leading_zeros(self) -> u32 { 24,452 ( 0.02%) intrinsics::ctlz(self as $ActualT) as u32 . } . . /// Returns the number of trailing zeros in the binary representation . /// of `self`. . /// . /// # Examples . /// . /// Basic usage: -- line 134 ---------------------------------------- -- line 139 ---------------------------------------- . /// assert_eq!(n.trailing_zeros(), 3); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn trailing_zeros(self) -> u32 { 20,289 ( 0.02%) intrinsics::cttz(self) as u32 . } . . /// Returns the number of leading ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 155 ---------------------------------------- -- line 204 ---------------------------------------- . #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn rotate_left(self, n: u32) -> Self { 375,789 ( 0.37%) intrinsics::rotate_left(self, n as $SelfT) . } . . /// Shifts the bits to the right by a specified amount, `n`, . /// wrapping the truncated bits to the beginning of the resulting . /// integer. . /// . /// Please note this isn't the same operation as the `>>` shifting operator! . /// -- line 220 ---------------------------------------- -- line 430 ---------------------------------------- . #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MAX - 2).checked_add(3), None);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_checked_int_methods", since = "1.47.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . pub const fn checked_add(self, rhs: Self) -> Option { 12 ( 0.00%) let (a, b) = self.overflowing_add(rhs); . if unlikely!(b) {None} else {Some(a)} . } . . /// Unchecked integer addition. Computes `self + rhs`, assuming overflow . /// cannot occur. . /// . /// # Safety . /// -- line 446 ---------------------------------------- -- line 456 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_add`. 56,954 ( 0.06%) unsafe { intrinsics::unchecked_add(self, rhs) } . } . . /// Checked addition with a signed integer. Computes `self + rhs`, . /// returning `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 472 ---------------------------------------- -- line 525 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_sub`. 1,152 ( 0.00%) unsafe { intrinsics::unchecked_sub(self, rhs) } . } . . /// Checked integer multiplication. Computes `self * rhs`, returning . /// `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 541 ---------------------------------------- -- line 1035 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_add(self, rhs: Self) -> Self { 3,805 ( 0.00%) intrinsics::saturating_add(self, rhs) . } . . /// Saturating addition with a signed integer. Computes `self + rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1051 ---------------------------------------- -- line 1084 ---------------------------------------- . #[doc = concat!("assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_sub(self, rhs: Self) -> Self { 244 ( 0.00%) intrinsics::saturating_sub(self, rhs) . } . . /// Saturating integer multiplication. Computes `self * rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1100 ---------------------------------------- -- line 1175 ---------------------------------------- . #[doc = concat!("assert_eq!(200", stringify!($SelfT), ".wrapping_add(", stringify!($SelfT), "::MAX), 199);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_add(self, rhs: Self) -> Self { 160,231 ( 0.16%) intrinsics::wrapping_add(self, rhs) . } . . /// Wrapping (modular) addition with a signed integer. Computes . /// `self + rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . #[doc = concat!("assert_eq!(100", stringify!($SelfT), ".wrapping_sub(", stringify!($SelfT), "::MAX), 101);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_sub(self, rhs: Self) -> Self { 94,413 ( 0.09%) intrinsics::wrapping_sub(self, rhs) . } . . /// Wrapping (modular) multiplication. Computes `self * . /// rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1233 ---------------------------------------- -- line 1240 ---------------------------------------- . /// assert_eq!(25u8.wrapping_mul(12), 44); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_mul(self, rhs: Self) -> Self { 275,878 ( 0.27%) intrinsics::wrapping_mul(self, rhs) . } . . /// Wrapping (modular) division. Computes `self / rhs`. . /// Wrapped division on unsigned types is just normal division. . /// There's no way wrapping could ever happen. . /// This function exists, so that all operations . /// are accounted for in the wrapping operations. . /// -- line 1256 ---------------------------------------- -- line 1492 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.overflowing_add(1), (0, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { 42,063 ( 0.04%) let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates `self + rhs + carry` without the ability to overflow. . /// . /// Performs "ternary addition" which takes in an extra bit to add, and may return an . /// additional bit of overflow. This allows for chaining together multiple additions . /// to create "big integers" which represent larger values. -- line 1508 ---------------------------------------- -- line 1588 ---------------------------------------- . #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_sub(1), (", stringify!($SelfT), "::MAX, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { 12,834 ( 0.01%) let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates `self - rhs - borrow` without the ability to overflow. . /// . /// Performs "ternary subtraction" which takes in an extra bit to subtract, and may return . /// an additional bit of overflow. This allows for chaining together multiple subtractions . /// to create "big integers" which represent larger values. -- line 1604 ---------------------------------------- -- line 1674 ---------------------------------------- . /// assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true)); . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { 62,683 ( 0.06%) let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates the divisor when `self` is divided by `rhs`. . /// . /// Returns a tuple of the divisor along with a boolean indicating . /// whether an arithmetic overflow would occur. Note that for unsigned . /// integers overflow never occurs, so the second value is always -- line 1690 ---------------------------------------- -- line 2132 ---------------------------------------- . #[doc = concat!("assert!(16", stringify!($SelfT), ".is_power_of_two());")] . #[doc = concat!("assert!(!10", stringify!($SelfT), ".is_power_of_two());")] . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] . #[inline(always)] . pub const fn is_power_of_two(self) -> bool { 132 ( 0.00%) self.count_ones() == 1 . } . . // Returns one less than next power of two. . // (For 8u8 next power of two is 8u8 and for 6u8 it is 8u8) . // . // 8u8.one_less_than_next_power_of_two() == 7 . // 6u8.one_less_than_next_power_of_two() == 7 . // . // This method cannot overflow, as in the `next_power_of_two` . // overflow cases it instead ends up returning the maximum value . // of the type, and can return 0 for 0. . #[inline] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . const fn one_less_than_next_power_of_two(self) -> Self { 146 ( 0.00%) if self <= 1 { return 0; } . 879 ( 0.00%) let p = self - 1; . // SAFETY: Because `p > 0`, it cannot consist entirely of leading zeros. . // That means the shift is always in-bounds, and some processors . // (such as intel pre-haswell) have more efficient ctlz . // intrinsics when the argument is non-zero. 2,637 ( 0.00%) let z = unsafe { intrinsics::ctlz_nonzero(p) }; 879 ( 0.00%) <$SelfT>::MAX >> z . } . . /// Returns the smallest power of two greater than or equal to `self`. . /// . /// When return value overflows (i.e., `self > (1 << (N-1))` for type . /// `uN`), it panics in debug mode and the return value is wrapped to 0 in . /// release mode (the only situation in which method can return 0). . /// -- line 2171 ---------------------------------------- -- line 2179 ---------------------------------------- . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . #[rustc_inherit_overflow_checks] . pub const fn next_power_of_two(self) -> Self { 1,612 ( 0.00%) self.one_less_than_next_power_of_two() + 1 . } . . /// Returns the smallest power of two greater than or equal to `n`. If . /// the next power of two is greater than the type's maximum value, . /// `None` is returned, otherwise the power of two is wrapped in `Some`. . /// . /// # Examples . /// -- line 2195 ---------------------------------------- 3,998 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs -------------------------------------------------------------------------------- Ir -- line 108 ---------------------------------------- . suppress_errors: bool, . }, . } . . impl RegionckMode { . /// Indicates that the MIR borrowck will repeat these region . /// checks, so we should ignore errors if NLL is (unconditionally) . /// enabled. 10 ( 0.00%) pub fn for_item_body(tcx: TyCtxt<'_>) -> Self { . // FIXME(Centril): Once we actually remove `::Migrate` also make . // this always `true` and then proceed to eliminate the dead code. 10 ( 0.00%) match tcx.borrowck_mode() { . // If we're on Migrate mode, report AST region errors . BorrowckMode::Migrate => RegionckMode::Erase { suppress_errors: false }, . . // If we're on MIR, don't report AST region errors as they should be reported by NLL . BorrowckMode::Mir => RegionckMode::Erase { suppress_errors: true }, . } 20 ( 0.00%) } . } . . /// This type contains all the things within `InferCtxt` that sit within a . /// `RefCell` and are involved with taking/rolling back snapshots. Snapshot . /// operations are hot enough that we want only one call to `borrow_mut` per . /// call to `start_snapshot` and `rollback_to`. . pub struct InferCtxtInner<'tcx> { . /// Cache for projections. This cache is snapshotted along with the infcx. -- line 134 ---------------------------------------- -- line 202 ---------------------------------------- . /// type instantiations (`ty::Infer`) to the actual opaque . /// type (`ty::Opaque`). Used during fallback to map unconstrained . /// opaque type inference variables to their corresponding . /// opaque type. . pub opaque_types_vars: FxHashMap, Ty<'tcx>>, . } . . impl<'tcx> InferCtxtInner<'tcx> { 2,755 ( 0.00%) fn new() -> InferCtxtInner<'tcx> { 18,734 ( 0.02%) InferCtxtInner { . projection_cache: Default::default(), . type_variable_storage: type_variable::TypeVariableStorage::new(), . undo_log: InferCtxtUndoLogs::default(), . const_unification_storage: ut::UnificationTableStorage::new(), . int_unification_storage: ut::UnificationTableStorage::new(), . float_unification_storage: ut::UnificationTableStorage::new(), 1,653 ( 0.00%) region_constraint_storage: Some(RegionConstraintStorage::new()), . region_obligations: vec![], . opaque_types: Default::default(), . opaque_types_vars: Default::default(), . } 3,306 ( 0.00%) } . . #[inline] . pub fn region_obligations(&self) -> &[(hir::HirId, RegionObligation<'tcx>)] { . &self.region_obligations . } . . #[inline] . pub fn projection_cache(&mut self) -> traits::ProjectionCache<'_, 'tcx> { 274 ( 0.00%) self.projection_cache.with_log(&mut self.undo_log) . } . . #[inline] . fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'tcx> { 7,079 ( 0.01%) self.type_variable_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn int_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::IntVid, . &mut ut::UnificationStorage, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 59 ( 0.00%) self.int_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn float_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::FloatVid, -- line 258 ---------------------------------------- -- line 268 ---------------------------------------- . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::ConstVid<'tcx>, . &mut ut::UnificationStorage>, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 26 ( 0.00%) self.const_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'_, 'tcx> { 2,070 ( 0.00%) self.region_constraint_storage . .as_mut() . .expect("region constraints already solved") 2,781 ( 0.00%) .with_log(&mut self.undo_log) . } . } . . pub struct InferCtxt<'a, 'tcx> { . pub tcx: TyCtxt<'tcx>, . . /// The `DefId` of the item in whose context we are performing inference or typeck. . /// It is used to check whether an opaque type use is a defining use. -- line 292 ---------------------------------------- -- line 361 ---------------------------------------- . /// item we are type-checking, and just consider those names as . /// part of the root universe. So this would only get incremented . /// when we enter into a higher-ranked (`for<..>`) type or trait . /// bound. . universe: Cell, . } . . /// See the `error_reporting` module for more details. 3,496 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)] . pub enum ValuePairs<'tcx> { . Types(ExpectedFound>), . Regions(ExpectedFound>), . Consts(ExpectedFound<&'tcx ty::Const<'tcx>>), . TraitRefs(ExpectedFound>), . PolyTraitRefs(ExpectedFound>), . } . -- line 377 ---------------------------------------- -- line 383 ---------------------------------------- . pub struct TypeTrace<'tcx> { . cause: ObligationCause<'tcx>, . values: ValuePairs<'tcx>, . } . . /// The origin of a `r1 <= r2` constraint. . /// . /// See `error_reporting` module for more details 3,374 ( 0.00%) #[derive(Clone, Debug)] . pub enum SubregionOrigin<'tcx> { . /// Arose from a subtyping relation 211 ( 0.00%) Subtype(Box>), . . /// When casting `&'a T` to an `&'b Trait` object, . /// relating `'a` to `'b` . RelateObjectBound(Span), . . /// Some type parameter was instantiated with the given type, . /// and that type must outlive some region. 9 ( 0.00%) RelateParamBound(Span, Ty<'tcx>, Option), . . /// The given region parameter was instantiated with a region . /// that must outlive some other region. . RelateRegionParamBound(Span), . . /// Creating a pointer `b` to contents of another reference . Reborrow(Span), . . /// Creating a pointer `b` to contents of an upvar . ReborrowUpvar(Span, ty::UpvarId), . . /// Data with type `Ty<'tcx>` was borrowed 9 ( 0.00%) DataBorrowed(Ty<'tcx>, Span), . . /// (&'a &'b T) where a >= b 16 ( 0.00%) ReferenceOutlivesReferent(Ty<'tcx>, Span), . . /// Comparing the signature and requirements of an impl method against . /// the containing trait. . CompareImplMethodObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, . . /// Comparing the signature and requirements of an impl associated type . /// against the containing trait . CompareImplTypeObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, -- line 426 ---------------------------------------- -- line 554 ---------------------------------------- . defining_use_anchor: Option, . } . . pub trait TyCtxtInferExt<'tcx> { . fn infer_ctxt(self) -> InferCtxtBuilder<'tcx>; . } . . impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> { 551 ( 0.00%) fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> { 1,653 ( 0.00%) InferCtxtBuilder { tcx: self, defining_use_anchor: None, fresh_typeck_results: None } 551 ( 0.00%) } . } . . impl<'tcx> InferCtxtBuilder<'tcx> { . /// Used only by `rustc_typeck` during body type-checking/inference, . /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`. . /// Will also change the scope for opaque type defining use checks to the given owner. 333 ( 0.00%) pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self { 407 ( 0.00%) self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner))); 185 ( 0.00%) self.with_opaque_type_inference(table_owner) 259 ( 0.00%) } . . /// Whenever the `InferCtxt` should be able to handle defining uses of opaque types, . /// you need to call this function. Otherwise the opaque type will be treated opaquely. . /// . /// It is only meant to be called in two places, for typeck . /// (via `with_fresh_in_progress_typeck_results`) and for the inference context used . /// in mir borrowck. 20 ( 0.00%) pub fn with_opaque_type_inference(mut self, defining_use_anchor: LocalDefId) -> Self { 10 ( 0.00%) self.defining_use_anchor = Some(defining_use_anchor); 94 ( 0.00%) self 30 ( 0.00%) } . . /// Given a canonical value `C` as a starting point, create an . /// inference context that contains each of the bound values . /// within instantiated as a fresh variable. The `f` closure is . /// invoked with the new infcx, along with the instantiated value . /// `V` and a substitution `S`. This substitution `S` maps from . /// the bound values in `C` to their instantiated values in `V` . /// (in other words, `S(C) = V`). 1,418 ( 0.00%) pub fn enter_with_canonical( . &mut self, . span: Span, . canonical: &Canonical<'tcx, T>, . f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R, . ) -> R . where . T: TypeFoldable<'tcx>, . { . self.enter(|infcx| { 1,840 ( 0.00%) let (value, subst) = 120 ( 0.00%) infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical); 2,785 ( 0.00%) f(infcx, value, subst) . }) 1,528 ( 0.00%) } . 2,273 ( 0.00%) pub fn enter(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R { 1,560 ( 0.00%) let InferCtxtBuilder { tcx, defining_use_anchor, ref fresh_typeck_results } = *self; . let in_progress_typeck_results = fresh_typeck_results.as_ref(); 22,875 ( 0.02%) f(InferCtxt { . tcx, . defining_use_anchor, . in_progress_typeck_results, 551 ( 0.00%) inner: RefCell::new(InferCtxtInner::new()), . lexical_region_resolutions: RefCell::new(None), . selection_cache: Default::default(), . evaluation_cache: Default::default(), . reported_trait_errors: Default::default(), . reported_closure_mismatch: Default::default(), . tainted_by_errors_flag: Cell::new(false), 551 ( 0.00%) err_count_on_creation: tcx.sess.err_count(), . in_snapshot: Cell::new(false), . skip_leak_check: Cell::new(false), . universe: Cell::new(ty::UniverseIndex::ROOT), . }) 2,419 ( 0.00%) } . } . . impl<'tcx, T> InferOk<'tcx, T> { . pub fn unit(self) -> InferOk<'tcx, ()> { . InferOk { value: (), obligations: self.obligations } . } . . /// Extracts `value`, registering any obligations into `fulfill_cx`. . pub fn into_value_registering_obligations( . self, . infcx: &InferCtxt<'_, 'tcx>, . fulfill_cx: &mut dyn TraitEngine<'tcx>, . ) -> T { 5 ( 0.00%) let InferOk { value, obligations } = self; 22 ( 0.00%) for obligation in obligations { . fulfill_cx.register_predicate_obligation(infcx, obligation); . } . value . } . } . . impl<'tcx> InferOk<'tcx, ()> { 337 ( 0.00%) pub fn into_obligations(self) -> PredicateObligations<'tcx> { 1,348 ( 0.00%) self.obligations 337 ( 0.00%) } . } . . #[must_use = "once you start a snapshot, you should always consume it"] . pub struct CombinedSnapshot<'a, 'tcx> { . undo_snapshot: Snapshot<'tcx>, . region_constraints_snapshot: RegionSnapshot, . universe: ty::UniverseIndex, . was_in_snapshot: bool, -- line 662 ---------------------------------------- -- line 674 ---------------------------------------- . let canonical = self.canonicalize_query((a, b), &mut OriginalQueryValues::default()); . debug!("canonical consts: {:?}", &canonical.value); . . self.tcx.try_unify_abstract_consts(canonical.value) . } . . pub fn is_in_snapshot(&self) -> bool { . self.in_snapshot.get() 687 ( 0.00%) } . 4,760 ( 0.00%) pub fn freshen>(&self, t: T) -> T { 5,355 ( 0.01%) t.fold_with(&mut self.freshener()) 5,355 ( 0.01%) } . . /// Returns the origin of the type variable identified by `vid`, or `None` . /// if this is not a type variable. . /// . /// No attempt is made to resolve `ty`. 4 ( 0.00%) pub fn type_var_origin(&'a self, ty: Ty<'tcx>) -> Option { 8 ( 0.00%) match *ty.kind() { 2 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 8 ( 0.00%) Some(*self.inner.borrow_mut().type_variables().var_origin(vid)) . } . _ => None, . } 8 ( 0.00%) } . 595 ( 0.00%) pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, false) 595 ( 0.00%) } . . /// Like `freshener`, but does not replace `'static` regions. 2,051 ( 0.00%) pub fn freshener_keep_static<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, true) 2,051 ( 0.00%) } . 48 ( 0.00%) pub fn unsolved_variables(&self) -> Vec> { 24 ( 0.00%) let mut inner = self.inner.borrow_mut(); 24 ( 0.00%) let mut vars: Vec> = inner . .type_variables() . .unsolved_variables() . .into_iter() 4 ( 0.00%) .map(|t| self.tcx.mk_ty_var(t)) . .collect(); . vars.extend( . (0..inner.int_unification_table().len()) . .map(|i| ty::IntVid { index: i as u32 }) 4 ( 0.00%) .filter(|&vid| inner.int_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_int_var(v)), . ); . vars.extend( . (0..inner.float_unification_table().len()) . .map(|i| ty::FloatVid { index: i as u32 }) . .filter(|&vid| inner.float_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_float_var(v)), . ); . vars 84 ( 0.00%) } . 977 ( 0.00%) fn combine_fields( . &'a self, . trace: TypeTrace<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> CombineFields<'a, 'tcx> { 3,912 ( 0.00%) CombineFields { . infcx: self, 9,780 ( 0.01%) trace, . cause: None, . param_env, . obligations: PredicateObligations::new(), . } 977 ( 0.00%) } . . /// Clear the "currently in a snapshot" flag, invoke the closure, . /// then restore the flag to its original value. This flag is a . /// debugging measure designed to detect cases where we start a . /// snapshot, create type variables, and register obligations . /// which may involve those type variables in the fulfillment cx, . /// potentially leaving "dangling type variables" behind. . /// In such cases, an assertion will fail when attempting to -- line 753 ---------------------------------------- -- line 760 ---------------------------------------- . /// before we return, this is not a problem, as there won't be any . /// escaping obligations in the main cx. In those cases, you can . /// use this function. . pub fn save_and_restore_in_snapshot_flag(&self, func: F) -> R . where . F: FnOnce(&Self) -> R, . { . let flag = self.in_snapshot.replace(false); 156 ( 0.00%) let result = func(self); . self.in_snapshot.set(flag); . result . } . 5,396 ( 0.01%) fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> { . debug!("start_snapshot()"); . . let in_snapshot = self.in_snapshot.replace(true); . . let mut inner = self.inner.borrow_mut(); . 16,188 ( 0.02%) CombinedSnapshot { . undo_snapshot: inner.undo_log.start_snapshot(), . region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), . universe: self.universe(), . was_in_snapshot: in_snapshot, . // Borrow typeck results "in progress" (i.e., during typeck) . // to ban writes from within a snapshot to them. 2,698 ( 0.00%) _in_progress_typeck_results: self . .in_progress_typeck_results . .map(|typeck_results| typeck_results.borrow()), . } 10,792 ( 0.01%) } . 7,612 ( 0.01%) #[instrument(skip(self, snapshot), level = "debug")] . fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 692 ( 0.00%) undo_snapshot, 692 ( 0.00%) region_constraints_snapshot, 692 ( 0.00%) universe, 692 ( 0.00%) was_in_snapshot, 1,384 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . self.universe.set(universe); . . let mut inner = self.inner.borrow_mut(); 692 ( 0.00%) inner.rollback_to(undo_snapshot); . inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); . } . 30,090 ( 0.03%) #[instrument(skip(self, snapshot), level = "debug")] . fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 2,006 ( 0.00%) undo_snapshot, . region_constraints_snapshot: _, . universe: _, 2,006 ( 0.00%) was_in_snapshot, 4,012 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . . self.inner.borrow_mut().commit(undo_snapshot); . } . . /// Executes `f` and commit the bindings. 2,142 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 2,640 ( 0.00%) pub fn commit_unconditionally(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 240 ( 0.00%) let snapshot = self.start_snapshot(); 798 ( 0.00%) let r = f(&snapshot); 1,680 ( 0.00%) self.commit_from(snapshot); 1,454 ( 0.00%) r . } . . /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. 11,473 ( 0.01%) #[instrument(skip(self, f), level = "debug")] 14,121 ( 0.01%) pub fn commit_if_ok(&self, f: F) -> Result . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, . { 2,131 ( 0.00%) let snapshot = self.start_snapshot(); 6,559 ( 0.01%) let r = f(&snapshot); . debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); 1,878 ( 0.00%) match r { . Ok(_) => { 12,931 ( 0.01%) self.commit_from(snapshot); . } . Err(_) => { 1,491 ( 0.00%) self.rollback_to("commit_if_ok -- error", snapshot); . } . } 10,431 ( 0.01%) r . } . . /// Execute `f` then unroll any bindings it creates. 4,039 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 4,641 ( 0.00%) pub fn probe(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 1,048 ( 0.00%) let snapshot = self.start_snapshot(); 1,508 ( 0.00%) let r = f(&snapshot); 4,892 ( 0.00%) self.rollback_to("probe", snapshot); 626 ( 0.00%) r . } . . /// If `should_skip` is true, then execute `f` then unroll any bindings it creates. . #[instrument(skip(self, f), level = "debug")] . pub fn probe_maybe_skip_leak_check(&self, should_skip: bool, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { -- line 875 ---------------------------------------- -- line 884 ---------------------------------------- . r . } . . /// Scan the constraints produced since `snapshot` began and returns: . /// . /// - `None` -- if none of them involve "region outlives" constraints . /// - `Some(true)` -- if there are `'a: 'b` constraints where `'a` or `'b` is a placeholder . /// - `Some(false)` -- if there are `'a: 'b` constraints but none involve placeholders 442 ( 0.00%) pub fn region_constraints_added_in_snapshot( . &self, . snapshot: &CombinedSnapshot<'a, 'tcx>, . ) -> Option { 884 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() . .region_constraints_added_in_snapshot(&snapshot.undo_snapshot) 663 ( 0.00%) } . . pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { . self.inner.borrow_mut().unwrap_region_constraints().add_given(sub, sup); . } . 12 ( 0.00%) pub fn can_sub(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 9 ( 0.00%) } . 192 ( 0.00%) pub fn can_eq(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 144 ( 0.00%) } . 1,810 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn sub_regions( . &self, . origin: SubregionOrigin<'tcx>, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) { 1,629 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); . } . . /// Require that the region `r` be equal to one of the regions in . /// the set `regions`. . #[instrument(skip(self), level = "debug")] . pub fn member_constraint( . &self, . opaque_type_def_id: DefId, -- line 947 ---------------------------------------- -- line 983 ---------------------------------------- . let subtype_predicate = predicate.map_bound(|p| ty::SubtypePredicate { . a_is_expected: false, // when coercing from `a` to `b`, `b` is expected . a: p.a, . b: p.b, . }); . self.subtype_predicate(cause, param_env, subtype_predicate) . } . 96 ( 0.00%) pub fn subtype_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolySubtypePredicate<'tcx>, . ) -> Option> { . // Check for two unresolved inference variables, in which case we can . // make no progress. This is partly a micro-optimization, but it's . // also an opportunity to "sub-unify" the variables. This isn't -- line 999 ---------------------------------------- -- line 1002 ---------------------------------------- . // earlier that they are sub-unified). . // . // Note that we can just skip the binders here because . // type variables can't (at present, at . // least) capture any of the things bound by this binder. . // . // Note that this sub here is not just for diagnostics - it has semantic . // effects as well. 8 ( 0.00%) let r_a = self.shallow_resolve(predicate.skip_binder().a); 8 ( 0.00%) let r_b = self.shallow_resolve(predicate.skip_binder().b); 60 ( 0.00%) match (r_a.kind(), r_b.kind()) { 14 ( 0.00%) (&ty::Infer(ty::TyVar(a_vid)), &ty::Infer(ty::TyVar(b_vid))) => { . self.inner.borrow_mut().type_variables().sub(a_vid, b_vid); 14 ( 0.00%) return None; . } . _ => {} . } . . Some(self.commit_if_ok(|_snapshot| { 1 ( 0.00%) let ty::SubtypePredicate { a_is_expected, a, b } = . self.replace_bound_vars_with_placeholders(predicate); . 2 ( 0.00%) let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?; . . Ok(ok.unit()) . })) 72 ( 0.00%) } . 1,536 ( 0.00%) pub fn region_outlives_predicate( . &self, . cause: &traits::ObligationCause<'tcx>, . predicate: ty::PolyRegionOutlivesPredicate<'tcx>, . ) -> UnitResult<'tcx> { . self.commit_if_ok(|_snapshot| { . let ty::OutlivesPredicate(r_a, r_b) = . self.replace_bound_vars_with_placeholders(predicate); . let origin = SubregionOrigin::from_obligation_cause(cause, || { . RelateRegionParamBound(cause.span) . }); 1,280 ( 0.00%) self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` . Ok(()) . }) 1,024 ( 0.00%) } . . /// Number of type variables created so far. 2 ( 0.00%) pub fn num_ty_vars(&self) -> usize { . self.inner.borrow_mut().type_variables().num_vars() 4 ( 0.00%) } . 346 ( 0.00%) pub fn next_ty_var_id(&self, origin: TypeVariableOrigin) -> TyVid { 1,730 ( 0.00%) self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) 519 ( 0.00%) } . 328 ( 0.00%) pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { 1,202 ( 0.00%) self.tcx.mk_ty_var(self.next_ty_var_id(origin)) 492 ( 0.00%) } . 38 ( 0.00%) pub fn next_ty_var_in_universe( . &self, . origin: TypeVariableOrigin, . universe: ty::UniverseIndex, . ) -> Ty<'tcx> { 209 ( 0.00%) let vid = self.inner.borrow_mut().type_variables().new_var(universe, origin); 19 ( 0.00%) self.tcx.mk_ty_var(vid) 57 ( 0.00%) } . . pub fn next_const_var( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . ) -> &'tcx ty::Const<'tcx> { . self.tcx.mk_const_var(self.next_const_var_id(origin), ty) . } -- line 1074 ---------------------------------------- -- line 1090 ---------------------------------------- . pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { . self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }) . } . . fn next_int_var_id(&self) -> IntVid { 5 ( 0.00%) self.inner.borrow_mut().int_unification_table().new_key(None) . } . 3 ( 0.00%) pub fn next_int_var(&self) -> Ty<'tcx> { . self.tcx.mk_int_var(self.next_int_var_id()) 4 ( 0.00%) } . . fn next_float_var_id(&self) -> FloatVid { . self.inner.borrow_mut().float_unification_table().new_key(None) . } . . pub fn next_float_var(&self) -> Ty<'tcx> { . self.tcx.mk_float_var(self.next_float_var_id()) . } . . /// Creates a fresh region variable with the next available index. . /// The variable will be created in the maximum universe created . /// thus far, allowing it to name any region created thus far. 191 ( 0.00%) pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { 3,252 ( 0.00%) self.next_region_var_in_universe(origin, self.universe()) 382 ( 0.00%) } . . /// Creates a fresh region variable with the next available index . /// in the given universe; typically, you can use . /// `next_region_var` and just use the maximal universe. 1,674 ( 0.00%) pub fn next_region_var_in_universe( . &self, . origin: RegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { . let region_var = 10,881 ( 0.01%) self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe, origin); 4,185 ( 0.00%) self.tcx.mk_region(ty::ReVar(region_var)) 2,511 ( 0.00%) } . . /// Return the universe that the region `r` was created in. For . /// most regions (e.g., `'static`, named regions from the user, . /// etc) this is the root universe U0. For inference variables or . /// placeholders, however, it will return the universe which which . /// they are associated. 216 ( 0.00%) pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex { . self.inner.borrow_mut().unwrap_region_constraints().universe(r) 324 ( 0.00%) } . . /// Number of region variables created so far. 80 ( 0.00%) pub fn num_region_vars(&self) -> usize { . self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() 120 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 206 ( 0.00%) pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> { . self.next_region_var(RegionVariableOrigin::Nll(origin)) 412 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 22 ( 0.00%) pub fn next_nll_region_var_in_universe( . &self, . origin: NllRegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { 154 ( 0.00%) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe) 44 ( 0.00%) } . 3,600 ( 0.00%) pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 1,982 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { . // Create a region inference variable for the given . // region parameter definition. 134 ( 0.00%) self.next_region_var(EarlyBoundRegion(span, param.name)).into() . } . GenericParamDefKind::Type { .. } => { . // Create a type inference variable for the given . // type parameter definition. The substitutions are . // for actual parameters that may be referred to by . // the default of this type parameter, if it exists. . // e.g., `struct Foo(...);` when . // used in a path such as `Foo::::new()` will . // use an inference variable for `C` with `[T, U]` . // as the substitutions for the default, `(T, U)`. 1,264 ( 0.00%) let ty_var_id = self.inner.borrow_mut().type_variables().new_var( . self.universe(), 1,580 ( 0.00%) TypeVariableOrigin { . kind: TypeVariableOriginKind::TypeParameterDefinition( 316 ( 0.00%) param.name, 316 ( 0.00%) Some(param.def_id), . ), . span, . }, . ); . 316 ( 0.00%) self.tcx.mk_ty_var(ty_var_id).into() . } . GenericParamDefKind::Const { .. } => { . let origin = ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstParameterDefinition( . param.name, . param.def_id, . ), . span, -- line 1196 ---------------------------------------- -- line 1202 ---------------------------------------- . }); . self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into() . } . } . } . . /// Given a set of generics defined on a type or impl, returns a substitution mapping each . /// type/region parameter to a fresh inference variable. 978 ( 0.00%) pub fn fresh_substs_for_item(&self, span: Span, def_id: DefId) -> SubstsRef<'tcx> { 4,116 ( 0.00%) InternalSubsts::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) 652 ( 0.00%) } . . /// Returns `true` if errors have been reported since this infcx was . /// created. This is sometimes used as a heuristic to skip . /// reporting errors that often occur as a result of earlier . /// errors, but where it's hard to be 100% sure (e.g., unresolved . /// inference variables, regionck errors). 24 ( 0.00%) pub fn is_tainted_by_errors(&self) -> bool { . debug!( . "is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ . tainted_by_errors_flag={})", . self.tcx.sess.err_count(), . self.err_count_on_creation, . self.tainted_by_errors_flag.get() . ); . 1,188 ( 0.00%) if self.tcx.sess.err_count() > self.err_count_on_creation { . return true; // errors reported since this infcx was made . } . self.tainted_by_errors_flag.get() 36 ( 0.00%) } . . /// Set the "tainted by errors" flag to true. We call this when we . /// observe an error from a prior pass. . pub fn set_tainted_by_errors(&self) { . debug!("set_tainted_by_errors()"); . self.tainted_by_errors_flag.set(true) . } . . /// Process the region constraints and return any any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 1,288 ( 0.00%) pub fn resolve_regions( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) -> Vec> { 3,496 ( 0.00%) let (var_infos, data) = { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; 184 ( 0.00%) assert!( 552 ( 0.00%) self.is_tainted_by_errors() || inner.region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . inner.region_obligations . ); . inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) . .into_infos_and_data() 184 ( 0.00%) }; . . let region_rels = 184 ( 0.00%) &RegionRelations::new(self.tcx, region_context, outlives_env.free_region_map()); . 1,656 ( 0.00%) let (lexical_region_resolutions, errors) = 4,416 ( 0.00%) lexical_region_resolve::resolve(region_rels, var_infos, data, mode); . 736 ( 0.00%) let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); 184 ( 0.00%) assert!(old_value.is_none()); . . errors 1,656 ( 0.00%) } . . /// Process the region constraints and report any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 1,840 ( 0.00%) pub fn resolve_regions_and_report_errors( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) { 368 ( 0.00%) let errors = self.resolve_regions(region_context, outlives_env, mode); . 552 ( 0.00%) if !self.is_tainted_by_errors() { . // As a heuristic, just skip reporting region errors . // altogether if other errors have been reported while . // this infcx was in use. This is totally hokey but . // otherwise we have a hard time separating legit region . // errors from silly ones. 368 ( 0.00%) self.report_region_errors(&errors); . } 920 ( 0.00%) } . . /// Obtains (and clears) the current set of region . /// constraints. The inference context is still usable: further . /// unifications will simply add new constraints. . /// . /// This method is not meant to be used with normal lexical region . /// resolution. Rather, it is used in the NLL mode as a kind of . /// interim hack: basically we run normal type-check and generate -- line 1307 ---------------------------------------- -- line 1319 ---------------------------------------- . } . . /// Gives temporary access to the region constraint data. . pub fn with_region_constraints( . &self, . op: impl FnOnce(&RegionConstraintData<'tcx>) -> R, . ) -> R { . let mut inner = self.inner.borrow_mut(); 560 ( 0.00%) op(inner.unwrap_region_constraints().data()) . } . . pub fn region_var_origin(&self, vid: ty::RegionVid) -> RegionVariableOrigin { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; . inner . .region_constraint_storage . .as_mut() -- line 1335 ---------------------------------------- -- line 1338 ---------------------------------------- . .var_origin(vid) . } . . /// Takes ownership of the list of variable regions. This implies . /// that all the region constraints have already been taken, and . /// hence that `resolve_regions_and_report_errors` can never be . /// called. This is used only during NLL processing to "hand off" ownership . /// of the set of region variables into the NLL region context. 50 ( 0.00%) pub fn take_region_var_origins(&self) -> VarInfos { . let mut inner = self.inner.borrow_mut(); 190 ( 0.00%) let (var_infos, data) = inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) 10 ( 0.00%) .into_infos_and_data(); 10 ( 0.00%) assert!(data.is_empty()); . var_infos 80 ( 0.00%) } . . pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { . self.resolve_vars_if_possible(t).to_string() . } . . /// If `TyVar(vid)` resolves to a type, return that type. Else, return the . /// universe index of `TyVar(vid)`. 138 ( 0.00%) pub fn probe_ty_var(&self, vid: TyVid) -> Result, ty::UniverseIndex> { . use self::type_variable::TypeVariableValue; . 414 ( 0.00%) match self.inner.borrow_mut().type_variables().probe(vid) { . TypeVariableValue::Known { value } => Ok(value), . TypeVariableValue::Unknown { universe } => Err(universe), . } 483 ( 0.00%) } . . /// Resolve any type variables found in `value` -- but only one . /// level. So, if the variable `?X` is bound to some type . /// `Foo`, then this would return `Foo` (but `?Y` may . /// itself be bound to a type). . /// . /// Useful when you only need to inspect the outermost level of . /// the type and don't care about nested types (or perhaps you . /// will be resolving them as well, e.g. in a loop). . pub fn shallow_resolve(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 3,231 ( 0.00%) value.fold_with(&mut ShallowResolver { infcx: self }) . } . 134 ( 0.00%) pub fn root_var(&self, var: ty::TyVid) -> ty::TyVid { . self.inner.borrow_mut().type_variables().root_var(var) 201 ( 0.00%) } . . /// Where possible, replaces type/const variables in . /// `value` with their final value. Note that region variables . /// are unaffected. If a type/const variable has not been unified, it . /// is left as is. This is an idempotent operation that does . /// not affect inference state in any way and so you can do it . /// at will. . pub fn resolve_vars_if_possible(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 3,470 ( 0.00%) if !value.needs_infer() { 3,725 ( 0.00%) return value; // Avoid duplicated subst-folding. . } 3,668 ( 0.00%) let mut r = resolve::OpportunisticVarResolver::new(self); 6,305 ( 0.01%) value.fold_with(&mut r) . } . . /// Returns the first unresolved variable contained in `T`. In the . /// process of visiting `T`, this will resolve (where possible) . /// type variables in `T`, but it never constructs the final, . /// resolved type, so it's more efficient than . /// `resolve_vars_if_possible()`. . pub fn unresolved_type_vars(&self, value: &T) -> Option<(Ty<'tcx>, Option)> -- line 1414 ---------------------------------------- -- line 1490 ---------------------------------------- . expected: &'tcx ty::Const<'tcx>, . actual: &'tcx ty::Const<'tcx>, . err: TypeError<'tcx>, . ) -> DiagnosticBuilder<'tcx> { . let trace = TypeTrace::consts(cause, true, expected, actual); . self.report_and_explain_type_error(trace, &err) . } . 207 ( 0.00%) pub fn replace_bound_vars_with_fresh_vars( . &self, . span: Span, . lbrct: LateBoundRegionConversionTime, . value: ty::Binder<'tcx, T>, . ) -> (T, BTreeMap>) . where . T: TypeFoldable<'tcx>, . { . let fld_r = 2,635 ( 0.00%) |br: ty::BoundRegion| self.next_region_var(LateBoundRegion(span, br.kind, lbrct)); . let fld_t = |_| { . self.next_ty_var(TypeVariableOrigin { . kind: TypeVariableOriginKind::MiscVariable, . span, . }) . }; . let fld_c = |_, ty| { . self.next_const_var( . ty, . ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span }, . ) . }; 3,385 ( 0.00%) self.tcx.replace_bound_vars(value, fld_r, fld_t, fld_c) 138 ( 0.00%) } . . /// See the [`region_constraints::RegionConstraintCollector::verify_generic_bound`] method. 56 ( 0.00%) pub fn verify_generic_bound( . &self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . a: ty::Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { . debug!("verify_generic_bound({:?}, {:?} <: {:?})", kind, a, bound); . 21 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() 119 ( 0.00%) .verify_generic_bound(origin, kind, a, bound); 49 ( 0.00%) } . . /// Obtains the latest type of the given closure; this may be a . /// closure in the current function, in which case its . /// `ClosureKind` may not yet be known. . pub fn closure_kind(&self, closure_substs: SubstsRef<'tcx>) -> Option { . let closure_kind_ty = closure_substs.as_closure().kind_ty(); . let closure_kind_ty = self.shallow_resolve(closure_kind_ty); . closure_kind_ty.to_opt_closure_kind() -- line 1546 ---------------------------------------- -- line 1551 ---------------------------------------- . /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. . pub fn clear_caches(&self) { . self.selection_cache.clear(); . self.evaluation_cache.clear(); . self.inner.borrow_mut().projection_cache().clear(); . } . . pub fn universe(&self) -> ty::UniverseIndex { 6,514 ( 0.01%) self.universe.get() 1,907 ( 0.00%) } . . /// Creates and return a fresh universe that extends all previous . /// universes. Updates `self.universe` to that new universe. 184 ( 0.00%) pub fn create_next_universe(&self) -> ty::UniverseIndex { 184 ( 0.00%) let u = self.universe.get().next_universe(); . self.universe.set(u); . u 184 ( 0.00%) } . . /// Resolves and evaluates a constant. . /// . /// The constant can be located on a trait like `::C`, in which case the given . /// substitutions and environment are used to resolve the constant. Alternatively if the . /// constant has generic parameters in scope the substitutions are used to evaluate the value of . /// the constant. For example in `fn foo() { let _ = [0; bar::()]; }` the repeat count . /// constant `bar::()` requires a substitution for `T`, if the substitution for `T` is still -- line 1576 ---------------------------------------- -- line 1606 ---------------------------------------- . // variables, thus we don't need to substitute back the original values. . self.tcx.const_eval_resolve(param_env_erased, unevaluated, span) . } . . /// If `typ` is a type variable of some kind, resolve it one level . /// (but do not resolve types found in the result). If `typ` is . /// not a type variable, just return it unmodified. . // FIXME(eddyb) inline into `ShallowResolver::visit_ty`. 35,288 ( 0.03%) fn shallow_resolve_ty(&self, typ: Ty<'tcx>) -> Ty<'tcx> { 18,496 ( 0.02%) match *typ.kind() { . ty::Infer(ty::TyVar(v)) => { . // Not entirely obvious: if `typ` is a type variable, . // it can be resolved to an int/float variable, which . // can then be recursively resolved, hence the . // recursion. Note though that we prevent type . // variables from unifying to other type variables . // directly (though they may be embedded . // structurally), and we prevent cycles in any case, . // so this recursion should always be of very limited . // depth. . // . // Note: if these two lines are combined into one we get . // dynamic borrow errors on `self.inner`. 8,824 ( 0.01%) let known = self.inner.borrow_mut().type_variables().probe(v).known(); . known.map_or(typ, |t| self.shallow_resolve_ty(t)) . } . 100 ( 0.00%) ty::Infer(ty::IntVar(v)) => self . .inner . .borrow_mut() . .int_unification_table() . .probe_value(v) 30 ( 0.00%) .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . ty::Infer(ty::FloatVar(v)) => self . .inner . .borrow_mut() . .float_unification_table() . .probe_value(v) . .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . _ => typ, . } 39,699 ( 0.04%) } . . /// `ty_or_const_infer_var_changed` is equivalent to one of these two: . /// * `shallow_resolve(ty) != ty` (where `ty.kind = ty::Infer(_)`) . /// * `shallow_resolve(ct) != ct` (where `ct.kind = ty::ConstKind::Infer(_)`) . /// . /// However, `ty_or_const_infer_var_changed` is more efficient. It's always . /// inlined, despite being large, because it has only two call sites that . /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` -- line 1659 ---------------------------------------- -- line 1662 ---------------------------------------- . #[inline(always)] . pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { . match infer_var { . TyOrConstInferVar::Ty(v) => { . use self::type_variable::TypeVariableValue; . . // If `inlined_probe` returns a `Known` value, it never equals . // `ty::Infer(ty::TyVar(v))`. 3,713 ( 0.00%) match self.inner.borrow_mut().type_variables().inlined_probe(v) { . TypeVariableValue::Unknown { .. } => false, . TypeVariableValue::Known { .. } => true, . } . } . . TyOrConstInferVar::TyInt(v) => { . // If `inlined_probe_value` returns a value it's always a . // `ty::Int(_)` or `ty::UInt(_)`, which never matches a -- line 1678 ---------------------------------------- -- line 1716 ---------------------------------------- . /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. . Const(ConstVid<'tcx>), . } . . impl<'tcx> TyOrConstInferVar<'tcx> { . /// Tries to extract an inference variable from a type or a constant, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). 68 ( 0.00%) pub fn maybe_from_generic_arg(arg: GenericArg<'tcx>) -> Option { . match arg.unpack() { . GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), . GenericArgKind::Const(ct) => Self::maybe_from_const(ct), . GenericArgKind::Lifetime(_) => None, . } 68 ( 0.00%) } . . /// Tries to extract an inference variable from a type, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). 12 ( 0.00%) pub fn maybe_from_ty(ty: Ty<'tcx>) -> Option { 720 ( 0.00%) match *ty.kind() { 160 ( 0.00%) ty::Infer(ty::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), . ty::Infer(ty::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), . ty::Infer(ty::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), . _ => None, . } 12 ( 0.00%) } . . /// Tries to extract an inference variable from a constant, returns `None` . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). . pub fn maybe_from_const(ct: &'tcx ty::Const<'tcx>) -> Option { . match ct.val { . ty::ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), . _ => None, . } -- line 1749 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { . self.infcx.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 6,734 ( 0.01%) self.infcx.shallow_resolve_ty(ty) . } . . fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { . if let ty::Const { val: ty::ConstKind::Infer(InferConst::Var(vid)), .. } = ct { . self.infcx . .inner . .borrow_mut() . .const_unification_table() -- line 1771 ---------------------------------------- -- line 1776 ---------------------------------------- . } else { . ct . } . } . } . . impl<'tcx> TypeTrace<'tcx> { . pub fn span(&self) -> Span { 2 ( 0.00%) self.cause.span . } . . pub fn types( . cause: &ObligationCause<'tcx>, . a_is_expected: bool, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> TypeTrace<'tcx> { -- line 1792 ---------------------------------------- -- line 1800 ---------------------------------------- . b: &'tcx ty::Const<'tcx>, . ) -> TypeTrace<'tcx> { . TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) } . } . } . . impl<'tcx> SubregionOrigin<'tcx> { . pub fn span(&self) -> Span { 5 ( 0.00%) match *self { 1 ( 0.00%) Subtype(ref a) => a.span(), . RelateObjectBound(a) => a, . RelateParamBound(a, ..) => a, . RelateRegionParamBound(a) => a, . Reborrow(a) => a, . ReborrowUpvar(a, _) => a, . DataBorrowed(_, a) => a, . ReferenceOutlivesReferent(_, a) => a, . CompareImplMethodObligation { span, .. } => span, -- line 1817 ---------------------------------------- -- line 1818 ---------------------------------------- . CompareImplTypeObligation { span, .. } => span, . } . } . . pub fn from_obligation_cause(cause: &traits::ObligationCause<'tcx>, default: F) -> Self . where . F: FnOnce() -> Self, . { 1,267 ( 0.00%) match *cause.code() { 91 ( 0.00%) traits::ObligationCauseCode::ReferenceOutlivesReferent(ref_type) => { 455 ( 0.00%) SubregionOrigin::ReferenceOutlivesReferent(ref_type, cause.span) . } . . traits::ObligationCauseCode::CompareImplMethodObligation { . impl_item_def_id, . trait_item_def_id, . } => SubregionOrigin::CompareImplMethodObligation { . span: cause.span, . impl_item_def_id, -- line 1836 ---------------------------------------- 28,007 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs -------------------------------------------------------------------------------- Ir -- line 118 ---------------------------------------- . stability: InternedSet<'tcx, attr::Stability>, . . /// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes . const_stability: InternedSet<'tcx, attr::ConstStability>, . } . . impl<'tcx> CtxtInterners<'tcx> { . fn new(arena: &'tcx WorkerLocal>) -> CtxtInterners<'tcx> { 17 ( 0.00%) CtxtInterners { . arena, . type_: Default::default(), . type_list: Default::default(), . substs: Default::default(), . region: Default::default(), . poly_existential_predicates: Default::default(), . canonical_var_infos: Default::default(), . predicate: Default::default(), -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . stability: Default::default(), . const_stability: Default::default(), . } . } . . /// Interns a type. . #[allow(rustc::usage_of_ty_tykind)] . #[inline(never)] 59,552 ( 0.06%) fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> { . self.type_ 37,220 ( 0.04%) .intern(kind, |kind| { 3,782 ( 0.00%) let flags = super::flags::FlagComputation::for_kind(&kind); . . let ty_struct = TyS { 7,564 ( 0.01%) kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(ty_struct)) . }) . .0 66,996 ( 0.07%) } . . #[inline(never)] 20,632 ( 0.02%) fn intern_predicate( . &self, . kind: Binder<'tcx, PredicateKind<'tcx>>, . ) -> &'tcx PredicateInner<'tcx> { . self.predicate 18,053 ( 0.02%) .intern(kind, |kind| { 6,756 ( 0.01%) let flags = super::flags::FlagComputation::for_predicate(kind); . . let predicate_struct = PredicateInner { . kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(predicate_struct)) . }) . .0 23,211 ( 0.02%) } . } . . pub struct CommonTypes<'tcx> { . pub unit: Ty<'tcx>, . pub bool: Ty<'tcx>, . pub char: Ty<'tcx>, . pub isize: Ty<'tcx>, . pub i8: Ty<'tcx>, -- line 193 ---------------------------------------- -- line 237 ---------------------------------------- . /// safely used as a key in the maps of a TypeckResults. For that to be . /// the case, the HirId must have the same `owner` as all the other IDs in . /// this table (signified by `hir_owner`). Otherwise the HirId . /// would be in a different frame of reference and using its `local_id` . /// would result in lookup errors, or worse, in silently wrong data being . /// stored/returned. . #[inline] . fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { 4,693 ( 0.00%) if hir_id.owner != hir_owner { . invalid_hir_id_for_typeck_results(hir_owner, hir_id); . } . } . . #[cold] . #[inline(never)] . fn invalid_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { . ty::tls::with(|tcx| { -- line 253 ---------------------------------------- -- line 261 ---------------------------------------- . } . . impl<'a, V> LocalTableInContext<'a, V> { . pub fn contains_key(&self, id: hir::HirId) -> bool { . validate_hir_id_for_typeck_results(self.hir_owner, id); . self.data.contains_key(&id.local_id) . } . 3,354 ( 0.00%) pub fn get(&self, id: hir::HirId) -> Option<&V> { 678 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); 678 ( 0.00%) self.data.get(&id.local_id) 2,686 ( 0.00%) } . . pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> { . self.data.iter() . } . } . . impl<'a, V> ::std::ops::Index for LocalTableInContext<'a, V> { . type Output = V; . 60 ( 0.00%) fn index(&self, key: hir::HirId) -> &V { . self.get(key).expect("LocalTableInContext: key not found") 40 ( 0.00%) } . } . . pub struct LocalTableInContextMut<'a, V> { . hir_owner: LocalDefId, . data: &'a mut ItemLocalMap, . } . . impl<'a, V> LocalTableInContextMut<'a, V> { -- line 292 ---------------------------------------- -- line 507 ---------------------------------------- . pub treat_byte_string_as_slice: ItemLocalSet, . . /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` . /// on closure size. . pub closure_size_eval: FxHashMap>, . } . . impl<'tcx> TypeckResults<'tcx> { 282 ( 0.00%) pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> { 3,337 ( 0.00%) TypeckResults { . hir_owner, . type_dependent_defs: Default::default(), . field_indices: Default::default(), . user_provided_types: Default::default(), . user_provided_sigs: Default::default(), . node_types: Default::default(), . node_substs: Default::default(), . adjustments: Default::default(), -- line 524 ---------------------------------------- -- line 532 ---------------------------------------- . tainted_by_errors: None, . concrete_opaque_types: Default::default(), . closure_min_captures: Default::default(), . closure_fake_reads: Default::default(), . generator_interior_types: ty::Binder::dummy(Default::default()), . treat_byte_string_as_slice: Default::default(), . closure_size_eval: Default::default(), . } 282 ( 0.00%) } . . /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node. 636 ( 0.00%) pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res { 424 ( 0.00%) match *qpath { 1,008 ( 0.00%) hir::QPath::Resolved(_, ref path) => path.res, . hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self . .type_dependent_def(id) 264 ( 0.00%) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), . } 1,060 ( 0.00%) } . 2 ( 0.00%) pub fn type_dependent_defs( . &self, . ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { 44 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs } 2 ( 0.00%) } . 94 ( 0.00%) pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { 219 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok()) 188 ( 0.00%) } . 324 ( 0.00%) pub fn type_dependent_def_id(&self, id: HirId) -> Option { . self.type_dependent_def(id).map(|(_, def_id)| def_id) 324 ( 0.00%) } . 348 ( 0.00%) pub fn type_dependent_defs_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { 348 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs } 348 ( 0.00%) } . . pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { 26 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices } 4 ( 0.00%) } . . pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { 42 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices } 21 ( 0.00%) } . . pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { 142 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types } 71 ( 0.00%) } . . pub fn user_provided_types_mut( . &mut self, . ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { 4 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types } 2 ( 0.00%) } . . pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { 646 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types } 323 ( 0.00%) } . . pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { 1,116 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types } 558 ( 0.00%) } . 4,896 ( 0.00%) pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { 816 ( 0.00%) self.node_type_opt(id).unwrap_or_else(|| { . bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) . }) 4,080 ( 0.00%) } . 1,039 ( 0.00%) pub fn node_type_opt(&self, id: hir::HirId) -> Option> { 1,039 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_types.get(&id.local_id).cloned() 2,078 ( 0.00%) } . . pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { 182 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs } 91 ( 0.00%) } . 363 ( 0.00%) pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { 363 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty()) 726 ( 0.00%) } . 247 ( 0.00%) pub fn node_substs_opt(&self, id: hir::HirId) -> Option> { 247 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned() 494 ( 0.00%) } . . // Returns the type of a pattern as a monotype. Like @expr_ty, this function . // doesn't provide type parameter substitutions. . pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> { 112 ( 0.00%) self.node_type(pat.hir_id) . } . . // Returns the type of an expression as a monotype. . // . // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in . // some cases, we insert `Adjustment` annotations such as auto-deref or . // auto-ref. The type returned by this function does not consider such . // adjustments. See `expr_ty_adjusted()` instead. . // . // NB (2): This type doesn't provide type parameter substitutions; e.g., if you . // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" . // instead of "fn(ty) -> T with T = isize". . pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 1,023 ( 0.00%) self.node_type(expr.hir_id) . } . . pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option> { 854 ( 0.00%) self.node_type_opt(expr.hir_id) . } . . pub fn adjustments(&self) -> LocalTableInContext<'_, Vec>> { 456 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments } 228 ( 0.00%) } . . pub fn adjustments_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Vec>> { 760 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments } 380 ( 0.00%) } . 402 ( 0.00%) pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] { 804 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id); . self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) 804 ( 0.00%) } . . /// Returns the type of `expr`, considering any `Adjustment` . /// entry recorded for that expression. 35 ( 0.00%) pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 7 ( 0.00%) self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target) 8 ( 0.00%) } . 40 ( 0.00%) pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option> { 8 ( 0.00%) self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr)) 24 ( 0.00%) } . 42 ( 0.00%) pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool { . // Only paths and method calls/overloaded operators have . // entries in type_dependent_defs, ignore the former here. 84 ( 0.00%) if let hir::ExprKind::Path(_) = expr.kind { . return false; . } . 108 ( 0.00%) matches!(self.type_dependent_defs().get(expr.hir_id), Some(Ok((DefKind::AssocFn, _)))) 84 ( 0.00%) } . 207 ( 0.00%) pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option { . self.pat_binding_modes().get(id).copied().or_else(|| { . s.delay_span_bug(sp, "missing binding mode"); . None . }) 276 ( 0.00%) } . . pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { 171 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes } 51 ( 0.00%) } . . pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { 92 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes } 46 ( 0.00%) } . . pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec>> { 200 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments } 100 ( 0.00%) } . . pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { 62 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } 31 ( 0.00%) } . . /// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured . /// by the closure. 40 ( 0.00%) pub fn closure_min_captures_flattened( . &self, . closure_def_id: DefId, . ) -> impl Iterator> { . self.closure_min_captures . .get(&closure_def_id) . .map(|closure_min_captures| closure_min_captures.values().flat_map(|v| v.iter())) . .into_iter() . .flatten() 50 ( 0.00%) } . . pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, HirPlace<'tcx>)> { 20 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins } 10 ( 0.00%) } . . pub fn closure_kind_origins_mut( . &mut self, . ) -> LocalTableInContextMut<'_, (Span, HirPlace<'tcx>)> { . LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins } . } . . pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { 60 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs } 30 ( 0.00%) } . . pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { 40 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs } 20 ( 0.00%) } . . pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec>> { 20 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types } 10 ( 0.00%) } . . pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { . LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types } . } . 3 ( 0.00%) pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool { 1 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, hir_id); . self.coercion_casts.contains(&hir_id.local_id) 2 ( 0.00%) } . . pub fn set_coercion_cast(&mut self, id: ItemLocalId) { . self.coercion_casts.insert(id); . } . . pub fn coercion_casts(&self) -> &ItemLocalSet { 10 ( 0.00%) &self.coercion_casts 10 ( 0.00%) } . } . . impl<'a, 'tcx> HashStable> for TypeckResults<'tcx> { . fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { . let ty::TypeckResults { . hir_owner, . ref type_dependent_defs, . ref field_indices, -- line 764 ---------------------------------------- -- line 819 ---------------------------------------- . const START_INDEX = 0, . } . } . . /// Mapping of type annotation indices to canonical user type annotations. . pub type CanonicalUserTypeAnnotations<'tcx> = . IndexVec>; . 2 ( 0.00%) #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)] . pub struct CanonicalUserTypeAnnotation<'tcx> { . pub user_ty: CanonicalUserType<'tcx>, . pub span: Span, 1 ( 0.00%) pub inferred_ty: Ty<'tcx>, . } . . /// Canonicalized user type annotation. . pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; . . impl<'tcx> CanonicalUserType<'tcx> { . /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, . /// i.e., each thing is mapped to a canonical variable with the same index. 17 ( 0.00%) pub fn is_identity(&self) -> bool { 34 ( 0.00%) match self.value { . UserType::Ty(_) => false, . UserType::TypeOf(_, user_substs) => { 17 ( 0.00%) if user_substs.user_self_ty.is_some() { . return false; . } . . iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| { . match kind.unpack() { 36 ( 0.00%) GenericArgKind::Type(ty) => match ty.kind() { . ty::Bound(debruijn, b) => { . // We only allow a `ty::INNERMOST` index in substitutions. 18 ( 0.00%) assert_eq!(*debruijn, ty::INNERMOST); . cvar == b.var . } . _ => false, . }, . 2 ( 0.00%) GenericArgKind::Lifetime(r) => match r { . ty::ReLateBound(debruijn, br) => { . // We only allow a `ty::INNERMOST` index in substitutions. . assert_eq!(*debruijn, ty::INNERMOST); . cvar == br.var . } . _ => false, . }, . -- line 867 ---------------------------------------- -- line 872 ---------------------------------------- . cvar == b . } . _ => false, . }, . } . }) . } . } 34 ( 0.00%) } . } . . /// A user-given type annotation attached to a constant. These arise . /// from constants that are named via paths, like `Foo::::new` and . /// so forth. 6 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] 165 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub enum UserType<'tcx> { . Ty(Ty<'tcx>), . . /// The canonical type is the result of `type_of(def_id)` with the . /// given substitutions applied. . TypeOf(DefId, UserSubsts<'tcx>), . } . . impl<'tcx> CommonTypes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { 111 ( 0.00%) let mk = |ty| interners.intern_ty(ty); . . CommonTypes { . unit: mk(Tuple(List::empty())), . bool: mk(Bool), . char: mk(Char), . never: mk(Never), . isize: mk(Int(ty::IntTy::Isize)), . i8: mk(Int(ty::IntTy::I8)), -- line 906 ---------------------------------------- -- line 921 ---------------------------------------- . . trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), . } . } . } . . impl<'tcx> CommonLifetimes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { 67 ( 0.00%) let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0; . . CommonLifetimes { 5 ( 0.00%) re_root_empty: mk(RegionKind::ReEmpty(ty::UniverseIndex::ROOT)), 5 ( 0.00%) re_static: mk(RegionKind::ReStatic), 5 ( 0.00%) re_erased: mk(RegionKind::ReErased), . } . } . } . . impl<'tcx> CommonConsts<'tcx> { . fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { 6 ( 0.00%) let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0; . . CommonConsts { . unit: mk_const(ty::Const { . val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::ZST)), . ty: types.unit, . }), . } . } -- line 949 ---------------------------------------- -- line 1045 ---------------------------------------- . . /// Stores memory for globals (statics/consts). . pub(crate) alloc_map: Lock>, . . output_filenames: Arc, . } . . impl<'tcx> TyCtxt<'tcx> { 400 ( 0.00%) pub fn typeck_opt_const_arg( . self, . def: ty::WithOptConstParam, . ) -> &'tcx TypeckResults<'tcx> { 160 ( 0.00%) if let Some(param_did) = def.const_param_did { . self.typeck_const_arg((def.did, param_did)) . } else { . self.typeck(def.did) . } 320 ( 0.00%) } . 30 ( 0.00%) pub fn alloc_steal_thir(self, thir: Thir<'tcx>) -> &'tcx Steal> { 10 ( 0.00%) self.arena.alloc(Steal::new(thir)) 40 ( 0.00%) } . 150 ( 0.00%) pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal> { 30 ( 0.00%) self.arena.alloc(Steal::new(mir)) 210 ( 0.00%) } . 40 ( 0.00%) pub fn alloc_steal_promoted( . self, . promoted: IndexVec>, . ) -> &'tcx Steal>> { 10 ( 0.00%) self.arena.alloc(Steal::new(promoted)) 50 ( 0.00%) } . 171 ( 0.00%) pub fn alloc_adt_def( . self, . did: DefId, . kind: AdtKind, . variants: IndexVec, . repr: ReprOptions, . ) -> &'tcx ty::AdtDef { 304 ( 0.00%) self.intern_adt_def(ty::AdtDef::new(self, did, kind, variants, repr)) 76 ( 0.00%) } . . /// Allocates a read-only byte or string literal for `mir::interpret`. 10 ( 0.00%) pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId { . // Create an allocation that just contains these bytes. 2 ( 0.00%) let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); 26 ( 0.00%) let alloc = self.intern_const_alloc(alloc); . self.create_memory_alloc(alloc) 10 ( 0.00%) } . . /// Returns a range of the start/end indices specified with the . /// `rustc_layout_scalar_valid_range` attribute. . // FIXME(eddyb) this is an awkward spot for this method, maybe move it? 80 ( 0.00%) pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound, Bound) { 40 ( 0.00%) let attrs = self.get_attrs(def_id); 170 ( 0.00%) let get = |name| { 40 ( 0.00%) let attr = match attrs.iter().find(|a| a.has_name(name)) { . Some(attr) => attr, . None => return Bound::Unbounded, . }; . debug!("layout_scalar_valid_range: attr={:?}", attr); . if let Some( . &[ . ast::NestedMetaItem::Literal(ast::Lit { . kind: ast::LitKind::Int(a, _), .. -- line 1111 ---------------------------------------- -- line 1114 ---------------------------------------- . ) = attr.meta_item_list().as_deref() . { . Bound::Included(a) . } else { . self.sess . .delay_span_bug(attr.span, "invalid rustc_layout_scalar_valid_range attribute"); . Bound::Unbounded . } 200 ( 0.00%) }; 60 ( 0.00%) ( 60 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_start), 30 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_end), . ) 80 ( 0.00%) } . . pub fn lift>(self, value: T) -> Option { . value.lift_to_tcx(self) . } . . /// Creates a type context and call the closure with a `TyCtxt` reference . /// to the context. The closure enforces that the type context and any interned . /// value (types, substs, etc.) can only be used while `ty::tls` has a valid . /// reference to the context, to allow formatting values that need it. 23 ( 0.00%) pub fn create_global_ctxt( . s: &'tcx Session, . lint_store: Lrc, . arena: &'tcx WorkerLocal>, . resolutions: ty::ResolverOutputs, . krate: &'tcx hir::Crate<'tcx>, . dep_graph: DepGraph, . on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>, . queries: &'tcx dyn query::QueryEngine<'tcx>, . query_kinds: &'tcx [DepKindStruct], . crate_name: &str, . output_filenames: OutputFilenames, . ) -> GlobalCtxt<'tcx> { 1 ( 0.00%) let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| { . s.fatal(&err); . }); . let interners = CtxtInterners::new(arena); . let common_types = CommonTypes::new(&interners); . let common_lifetimes = CommonLifetimes::new(&interners); . let common_consts = CommonConsts::new(&interners, &common_types); . 127 ( 0.00%) GlobalCtxt { . sess: s, 6 ( 0.00%) lint_store, . arena, 2 ( 0.00%) interners, 5 ( 0.00%) dep_graph, 4 ( 0.00%) untracked_resolutions: resolutions, 2 ( 0.00%) prof: s.prof.clone(), . types: common_types, . lifetimes: common_lifetimes, . consts: common_consts, . untracked_crate: krate, . on_disk_cache, . queries, . query_caches: query::QueryCaches::default(), . query_kinds, . ty_rcache: Default::default(), . pred_rcache: Default::default(), . selection_cache: Default::default(), . evaluation_cache: Default::default(), 1 ( 0.00%) crate_name: Symbol::intern(crate_name), . data_layout, . alloc_map: Lock::new(interpret::AllocMap::new()), . output_filenames: Arc::new(output_filenames), . } 9 ( 0.00%) } . . crate fn query_kind(self, k: DepKind) -> &'tcx DepKindStruct { . &self.query_kinds[k as usize] . } . . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . ty: Ty<'tcx>, . span: S, . msg: &str, . ) -> &'tcx Const<'tcx> { . self.sess.delay_span_bug(span, msg); . self.mk_const(ty::Const { val: ty::ConstKind::Error(DelaySpanBugEmitted(())), ty }) . } . 7 ( 0.00%) pub fn consider_optimizing String>(self, msg: T) -> bool { 1 ( 0.00%) let cname = self.crate_name(LOCAL_CRATE); 8 ( 0.00%) self.sess.consider_optimizing(cname.as_str(), msg) 8 ( 0.00%) } . . /// Obtain all lang items of this crate and all dependencies (recursively) 19,495 ( 0.02%) pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems { . self.get_lang_items(()) 25,065 ( 0.02%) } . . /// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to . /// compare against another `DefId`, since `is_diagnostic_item` is cheaper. . pub fn get_diagnostic_item(self, name: Symbol) -> Option { . self.all_diagnostic_items(()).name_to_id.get(&name).copied() . } . . /// Obtain the diagnostic item's name 322 ( 0.00%) pub fn get_diagnostic_name(self, id: DefId) -> Option { . self.diagnostic_items(id.krate).id_to_name.get(&id).copied() 368 ( 0.00%) } . . /// Check whether the diagnostic item with the given `name` has the given `DefId`. 882 ( 0.00%) pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool { . self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did) 1,008 ( 0.00%) } . 224 ( 0.00%) pub fn stability(self) -> &'tcx stability::Index<'tcx> { . self.stability_index(()) 288 ( 0.00%) } . 5,474 ( 0.01%) pub fn features(self) -> &'tcx rustc_feature::Features { . self.features_query(()) 6,256 ( 0.01%) } . . pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey { . // Accessing the DefKey is ok, since it is part of DefPathHash. 340 ( 0.00%) if let Some(id) = id.as_local() { . self.untracked_resolutions.definitions.def_key(id) . } else { 161 ( 0.00%) self.untracked_resolutions.cstore.def_key(id) . } . } . . /// Converts a `DefId` into its fully expanded `DefPath` (every . /// `DefId` is really just an interned `DefPath`). . /// . /// Note that if `id` is not local to this crate, the result will . /// be a non-local `DefPath`. -- line 1272 ---------------------------------------- -- line 1277 ---------------------------------------- . } else { . self.untracked_resolutions.cstore.def_path(id) . } . } . . #[inline] . pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash { . // Accessing the DefPathHash is ok, it is incr. comp. stable. 137 ( 0.00%) if let Some(def_id) = def_id.as_local() { . self.untracked_resolutions.definitions.def_path_hash(def_id) . } else { 20 ( 0.00%) self.untracked_resolutions.cstore.def_path_hash(def_id) . } . } . . #[inline] . pub fn stable_crate_id(self, crate_num: CrateNum) -> StableCrateId { . if crate_num == LOCAL_CRATE { . self.sess.local_stable_crate_id() . } else { -- line 1296 ---------------------------------------- -- line 1353 ---------------------------------------- . &(format!("{:08x}", stable_crate_id.to_u64()))[..4], . self.def_path(def_id).to_string_no_crate_verbose() . ) . } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn cstore_untracked(self) -> &'tcx ty::CrateStoreDyn { 6,246 ( 0.01%) &*self.untracked_resolutions.cstore 3,123 ( 0.00%) } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions { . &self.untracked_resolutions.definitions . } . . #[inline(always)] . pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { 1 ( 0.00%) let resolutions = &self.gcx.untracked_resolutions; 4 ( 0.00%) StableHashingContext::new(self.sess, &resolutions.definitions, &*resolutions.cstore) . } . . #[inline(always)] . pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> { . let resolutions = &self.gcx.untracked_resolutions; . StableHashingContext::ignore_spans( . self.sess, . &resolutions.definitions, -- line 1381 ---------------------------------------- -- line 1390 ---------------------------------------- . /// If `true`, we should use the MIR-based borrowck, but also . /// fall back on the AST borrowck if the MIR-based one errors. . pub fn migrate_borrowck(self) -> bool { . self.borrowck_mode().migrate() . } . . /// What mode(s) of borrowck should we run? AST? MIR? both? . /// (Also considers the `#![feature(nll)]` setting.) 20 ( 0.00%) pub fn borrowck_mode(self) -> BorrowckMode { . // Here are the main constraints we need to deal with: . // . // 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is . // synonymous with no `-Z borrowck=...` flag at all. . // . // 2. We want to allow developers on the Nightly channel . // to opt back into the "hard error" mode for NLL, . // (which they can do via specifying `#![feature(nll)]` -- line 1406 ---------------------------------------- -- line 1413 ---------------------------------------- . // errors. (To simplify the code here, it now even overrides . // a user's attempt to specify `-Z borrowck=compare`, which . // we arguably do not need anymore and should remove.) . // . // * Otherwise, if no `-Z borrowck=...` then use migrate mode . // . // * Otherwise, use the behavior requested via `-Z borrowck=...` . 30 ( 0.00%) if self.features().nll { . return BorrowckMode::Mir; . } . 10 ( 0.00%) self.sess.opts.borrowck_mode 40 ( 0.00%) } . . /// If `true`, we should use lazy normalization for constants, otherwise . /// we still evaluate them eagerly. . #[inline] . pub fn lazy_normalization(self) -> bool { . let features = self.features(); . // Note: We only use lazy normalization for generic const expressions. . features.generic_const_exprs -- line 1434 ---------------------------------------- -- line 1554 ---------------------------------------- . self.mk_imm_ref( . self.lifetimes.re_static, . self.type_of(self.require_lang_item(LangItem::PanicLocation, None)) . .subst(self, self.mk_substs([self.lifetimes.re_static.into()].iter())), . ) . } . . /// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`). 88 ( 0.00%) pub fn article_and_description(self, def_id: DefId) -> (&'static str, &'static str) { 64 ( 0.00%) match self.def_kind(def_id) { . DefKind::Generator => match self.generator_kind(def_id).unwrap() { . rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"), . rustc_hir::GeneratorKind::Gen => ("a", "generator"), . }, 144 ( 0.00%) def_kind => (def_kind.article(), def_kind.descr(def_id)), . } 72 ( 0.00%) } . . pub fn type_length_limit(self) -> Limit { . self.limits(()).type_length_limit . } . 7,588 ( 0.01%) pub fn recursion_limit(self) -> Limit { . self.limits(()).recursion_limit 8,672 ( 0.01%) } . . pub fn move_size_limit(self) -> Limit { . self.limits(()).move_size_limit . } . . pub fn const_eval_limit(self) -> Limit { . self.limits(()).const_eval_limit . } -- line 1586 ---------------------------------------- -- line 1684 ---------------------------------------- . /// This is the implicit state of rustc. It contains the current . /// `TyCtxt` and query. It is updated when creating a local interner or . /// executing a new query. Whenever there's a `TyCtxt` value available . /// you should also have access to an `ImplicitCtxt` through the functions . /// in this module. . #[derive(Clone)] . pub struct ImplicitCtxt<'a, 'tcx> { . /// The current `TyCtxt`. 1 ( 0.00%) pub tcx: TyCtxt<'tcx>, . . /// The current query job, if any. This is updated by `JobOwner::start` in . /// `ty::query::plumbing` when executing a query. . pub query: Option>, . . /// Where to store diagnostics for the current query job, if any. . /// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query. . pub diagnostics: Option<&'a Lock>>, -- line 1700 ---------------------------------------- -- line 1703 ---------------------------------------- . pub layout_depth: usize, . . /// The current dep graph task. This is used to add dependencies to queries . /// when executing them. . pub task_deps: TaskDepsRef<'a>, . } . . impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> { 5 ( 0.00%) pub fn new(gcx: &'tcx GlobalCtxt<'tcx>) -> Self { . let tcx = TyCtxt { gcx }; 25 ( 0.00%) ImplicitCtxt { . tcx, . query: None, . diagnostics: None, . layout_depth: 0, . task_deps: TaskDepsRef::Ignore, . } 5 ( 0.00%) } . } . . /// Sets Rayon's thread-local variable, which is preserved for Rayon jobs . /// to `value` during the call to `f`. It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(parallel_compiler)] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { -- line 1728 ---------------------------------------- -- line 1745 ---------------------------------------- . . /// Sets TLV to `value` during the call to `f`. . /// It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { . let old = get_tlv(); 4,956 ( 0.00%) let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old))); . TLV.with(|tlv| tlv.set(value)); . f() . } . . /// Gets the pointer to the current `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn get_tlv() -> usize { -- line 1761 ---------------------------------------- -- line 1763 ---------------------------------------- . } . . /// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`. . #[inline] . pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, . { 1 ( 0.00%) set_tlv(context as *const _ as usize, || f(&context)) . } . . /// Allows access to the current `ImplicitCtxt` in a closure if one is available. . #[inline] . pub fn with_context_opt(f: F) -> R . where . F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R, . { . let context = get_tlv(); 20,431 ( 0.02%) if context == 0 { . f(None) . } else { . // We could get an `ImplicitCtxt` pointer from another thread. . // Ensure that `ImplicitCtxt` is `Sync`. . sync::assert_sync::>(); . . unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) } . } -- line 1789 ---------------------------------------- -- line 1805 ---------------------------------------- . /// This will panic if you pass it a `TyCtxt` which is different from the current . /// `ImplicitCtxt`'s `tcx` field. . #[inline] . pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, . { . with_context(|context| unsafe { 10,107 ( 0.01%) assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); . let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); 10,104 ( 0.01%) f(context) . }) . } . . /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. . /// Panics if there is no `ImplicitCtxt` available. . #[inline] . pub fn with(f: F) -> R . where -- line 1823 ---------------------------------------- -- line 1981 ---------------------------------------- . fn into_pointer(&self) -> *const () { . self.0 as *const _ as *const () . } . } . . #[allow(rustc::usage_of_ty_tykind)] . impl<'tcx> Borrow> for Interned<'tcx, TyS<'tcx>> { . fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { 5,770 ( 0.01%) &self.0.kind() . } . } . . impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { . fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind() == other.0.kind() -- line 1997 ---------------------------------------- -- line 1998 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 10,740 ( 0.01%) self.0.kind().hash(s) . } . } . . impl<'tcx> Borrow>> for Interned<'tcx, PredicateInner<'tcx>> { . fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> { 1,543 ( 0.00%) &self.0.kind . } . } . . impl<'tcx> PartialEq for Interned<'tcx, PredicateInner<'tcx>> { . fn eq(&self, other: &Interned<'tcx, PredicateInner<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind == other.0.kind -- line 2020 ---------------------------------------- -- line 2021 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, PredicateInner<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, PredicateInner<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 1,788 ( 0.00%) self.0.kind.hash(s) . } . } . . impl<'tcx, T> Borrow<[T]> for Interned<'tcx, List> { . fn borrow<'a>(&'a self) -> &'a [T] { 7,454 ( 0.01%) &self.0[..] . } . } . . impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List> { . fn eq(&self, other: &Interned<'tcx, List>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0[..] == other.0[..] -- line 2043 ---------------------------------------- -- line 2044 ---------------------------------------- . } . } . . impl<'tcx, T: Eq> Eq for Interned<'tcx, List> {} . . impl<'tcx, T: Hash> Hash for Interned<'tcx, List> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 2,318 ( 0.00%) self.0[..].hash(s) . } . } . . macro_rules! direct_interners { . ($($name:ident: $method:ident($ty:ty),)+) => { . $(impl<'tcx> Borrow<$ty> for Interned<'tcx, $ty> { . fn borrow<'a>(&'a self) -> &'a $ty { 2,718 ( 0.00%) &self.0 . } . } . . impl<'tcx> PartialEq for Interned<'tcx, $ty> { . fn eq(&self, other: &Self) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` . // equals `x == y`. . self.0 == other.0 -- line 2068 ---------------------------------------- -- line 2070 ---------------------------------------- . } . . impl<'tcx> Eq for Interned<'tcx, $ty> {} . . impl<'tcx> Hash for Interned<'tcx, $ty> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == . // x.hash(s)`. 3,051 ( 0.00%) self.0.hash(s) . } . } . . impl<'tcx> TyCtxt<'tcx> { 25,620 ( 0.03%) pub fn $method(self, v: $ty) -> &'tcx $ty { 12,875 ( 0.01%) self.interners.$name.intern(v, |v| { 556 ( 0.00%) Interned(self.interners.arena.alloc(v)) . }).0 28,683 ( 0.03%) } . })+ . } . } . . direct_interners! { . region: mk_region(RegionKind), . const_: mk_const(Const<'tcx>), . const_allocation: intern_const_alloc(Allocation), -- line 2095 ---------------------------------------- -- line 2097 ---------------------------------------- . adt_def: intern_adt_def(AdtDef), . stability: intern_stability(attr::Stability), . const_stability: intern_const_stability(attr::ConstStability), . } . . macro_rules! slice_interners { . ($($field:ident: $method:ident($ty:ty)),+ $(,)?) => ( . impl<'tcx> TyCtxt<'tcx> { 93,585 ( 0.09%) $(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { . self.interners.$field.intern_ref(v, || { . Interned(List::from_arena(&*self.arena, v)) . }).0 83,826 ( 0.08%) })+ . } . ); . } . . slice_interners!( . type_list: _intern_type_list(Ty<'tcx>), . substs: _intern_substs(GenericArg<'tcx>), . canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>), -- line 2117 ---------------------------------------- -- line 2129 ---------------------------------------- . /// unsafe. . pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { . assert_eq!(sig.unsafety(), hir::Unsafety::Normal); . self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) . } . . /// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name` . /// returns true if the `trait_def_id` defines an associated item of name `assoc_name`. 24 ( 0.00%) pub fn trait_may_define_assoc_type(self, trait_def_id: DefId, assoc_name: Ident) -> bool { . self.super_traits_of(trait_def_id).any(|trait_did| { . self.associated_items(trait_did) . .find_by_name_and_kind(self, assoc_name, ty::AssocKind::Type, trait_did) . .is_some() . }) 18 ( 0.00%) } . . /// Computes the def-ids of the transitive supertraits of `trait_def_id`. This (intentionally) . /// does not compute the full elaborated super-predicates but just the set of def-ids. It is used . /// to identify which traits may define a given associated type to help avoid cycle errors. . /// Returns a `DefId` iterator. . fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator + 'tcx { . let mut set = FxHashSet::default(); 4 ( 0.00%) let mut stack = vec![trait_def_id]; . . set.insert(trait_def_id); . 18 ( 0.00%) iter::from_fn(move || -> Option { 4 ( 0.00%) let trait_did = stack.pop()?; 2 ( 0.00%) let generic_predicates = self.super_predicates_of(trait_did); . . for (predicate, _) in generic_predicates.predicates { . if let ty::PredicateKind::Trait(data) = predicate.kind().skip_binder() { . if set.insert(data.def_id()) { . stack.push(data.def_id()); . } . } . } -- line 2165 ---------------------------------------- -- line 2188 ---------------------------------------- . self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) . }) . } . . /// Same a `self.mk_region(kind)`, but avoids accessing the interners if . /// `*r == kind`. . #[inline] . pub fn reuse_or_mk_region(self, r: Region<'tcx>, kind: RegionKind) -> Region<'tcx> { 4,124 ( 0.00%) if *r == kind { r } else { self.mk_region(kind) } . } . . #[allow(rustc::usage_of_ty_tykind)] . #[inline] . pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> { 53,511 ( 0.05%) self.interners.intern_ty(st) . } . . #[inline] . pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> { 27,166 ( 0.03%) let inner = self.interners.intern_predicate(binder); . Predicate { inner } . } . . #[inline] 19,288 ( 0.02%) pub fn reuse_or_mk_predicate( . self, . pred: Predicate<'tcx>, . binder: Binder<'tcx, PredicateKind<'tcx>>, . ) -> Predicate<'tcx> { 952 ( 0.00%) if pred.kind() != binder { self.mk_predicate(binder) } else { pred } 19,288 ( 0.02%) } . . pub fn mk_mach_int(self, tm: IntTy) -> Ty<'tcx> { . match tm { . IntTy::Isize => self.types.isize, . IntTy::I8 => self.types.i8, . IntTy::I16 => self.types.i16, . IntTy::I32 => self.types.i32, . IntTy::I64 => self.types.i64, . IntTy::I128 => self.types.i128, . } . } . . pub fn mk_mach_uint(self, tm: UintTy) -> Ty<'tcx> { 43 ( 0.00%) match tm { . UintTy::Usize => self.types.usize, . UintTy::U8 => self.types.u8, . UintTy::U16 => self.types.u16, . UintTy::U32 => self.types.u32, . UintTy::U64 => self.types.u64, . UintTy::U128 => self.types.u128, . } 43 ( 0.00%) } . . pub fn mk_mach_float(self, tm: FloatTy) -> Ty<'tcx> { . match tm { . FloatTy::F32 => self.types.f32, . FloatTy::F64 => self.types.f64, . } . } . -- line 2248 ---------------------------------------- -- line 2329 ---------------------------------------- . } . . #[inline] . pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not }) . } . . #[inline] 21 ( 0.00%) pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { . self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) 24 ( 0.00%) } . . #[inline] . pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ty(Slice(ty)) . } . . #[inline] . pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { . let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 195 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) . } . . pub fn mk_tup], Ty<'tcx>>>(self, iter: I) -> I::Output { 810 ( 0.00%) iter.intern_with(|ts| { 186 ( 0.00%) let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 520 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) 729 ( 0.00%) }) . } . . #[inline] . pub fn mk_unit(self) -> Ty<'tcx> { 16 ( 0.00%) self.types.unit . } . . #[inline] . pub fn mk_diverging_default(self) -> Ty<'tcx> { . if self.features().never_type_fallback { self.types.never } else { self.types.unit } . } . . #[inline] . pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(FnDef(def_id, substs)) . } . . #[inline] . pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { 104 ( 0.00%) self.mk_ty(FnPtr(fty)) . } . . #[inline] . pub fn mk_dynamic( . self, . obj: &'tcx List>>, . reg: ty::Region<'tcx>, . ) -> Ty<'tcx> { -- line 2384 ---------------------------------------- -- line 2445 ---------------------------------------- . self.mk_ty(Param(ParamTy { index, name })) . } . . #[inline] . pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { . self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty }) . } . 364 ( 0.00%) pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 224 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { 108 ( 0.00%) self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() . } 68 ( 0.00%) GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), . GenericParamDefKind::Const { .. } => { . self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into() . } . } 364 ( 0.00%) } . . #[inline] . pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(Opaque(def_id, substs)) . } . . pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { . self.mk_place_elem(place, PlaceElem::Field(f, ty)) -- line 2471 ---------------------------------------- -- line 2521 ---------------------------------------- . ); . self._intern_poly_existential_predicates(eps) . } . . pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List> { . // FIXME consider asking the input slice to be sorted to avoid . // re-interning permutations, in which case that would be asserted . // here. 289 ( 0.00%) if preds.is_empty() { . // The macro-generated method below asserts we don't intern an empty slice. . List::empty() . } else { 157 ( 0.00%) self._intern_predicates(preds) . } 264 ( 0.00%) } . . pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List> { 1,078 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) } 40 ( 0.00%) } . . pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List> { 15,954 ( 0.02%) if ts.is_empty() { List::empty() } else { self._intern_substs(ts) } 468 ( 0.00%) } . . pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List { . if ps.is_empty() { List::empty() } else { self._intern_projs(ps) } . } . . pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List> { 128 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) } 68 ( 0.00%) } . . pub fn intern_canonical_var_infos( . self, . ts: &[CanonicalVarInfo<'tcx>], . ) -> CanonicalVarInfos<'tcx> { 799 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) } 6 ( 0.00%) } . . pub fn intern_bound_variable_kinds( . self, . ts: &[ty::BoundVariableKind], . ) -> &'tcx List { 404 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_bound_variable_kinds(ts) } 574 ( 0.00%) } . . pub fn mk_fn_sig( . self, . inputs: I, . output: I::Item, . c_variadic: bool, . unsafety: hir::Unsafety, . abi: abi::Abi, . ) -> , ty::FnSig<'tcx>>>::Output . where . I: Iterator, ty::FnSig<'tcx>>>, . { . inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { 125 ( 0.00%) inputs_and_output: self.intern_type_list(xs), 17 ( 0.00%) c_variadic, 34 ( 0.00%) unsafety, . abi, . }) . } . . pub fn mk_poly_existential_predicates< . I: InternAs< . [ty::Binder<'tcx, ExistentialPredicate<'tcx>>], . &'tcx List>>, -- line 2589 ---------------------------------------- -- line 2594 ---------------------------------------- . ) -> I::Output { . iter.intern_with(|xs| self.intern_poly_existential_predicates(xs)) . } . . pub fn mk_predicates], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 4 ( 0.00%) iter.intern_with(|xs| self.intern_predicates(xs)) . } . . pub fn mk_type_list], &'tcx List>>>(self, iter: I) -> I::Output { 332 ( 0.00%) iter.intern_with(|xs| self.intern_type_list(xs)) . } . . pub fn mk_substs], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 15,164 ( 0.02%) iter.intern_with(|xs| self.intern_substs(xs)) . } . . pub fn mk_place_elems], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { . iter.intern_with(|xs| self.intern_place_elems(xs)) . } . 457 ( 0.00%) pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { . self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) 914 ( 0.00%) } . . pub fn mk_bound_variable_kinds< . I: InternAs<[ty::BoundVariableKind], &'tcx List>, . >( . self, . iter: I, . ) -> I::Output { 2,321 ( 0.00%) iter.intern_with(|xs| self.intern_bound_variable_kinds(xs)) . } . . /// Walks upwards from `id` to find a node which might change lint levels with attributes. . /// It stops at `bound` and just returns it if reached. 2,870 ( 0.00%) pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId { 2,870 ( 0.00%) let hir = self.hir(); . loop { 6,942 ( 0.01%) if id == bound { . return bound; . } . 9,520 ( 0.01%) if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) { . return id; . } . let next = hir.get_parent_node(id); 5,712 ( 0.01%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 4,100 ( 0.00%) } . 208 ( 0.00%) pub fn lint_level_at_node( . self, . lint: &'static Lint, . mut id: hir::HirId, . ) -> (Level, LintLevelSource) { . let sets = self.lint_levels(()); . loop { 796 ( 0.00%) if let Some(pair) = sets.level_and_source(lint, id, self.sess) { . return pair; . } 48 ( 0.00%) let next = self.hir().get_parent_node(id); 144 ( 0.00%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 208 ( 0.00%) } . 28 ( 0.00%) pub fn struct_span_lint_hir( . self, . lint: &'static Lint, . hir_id: HirId, . span: impl Into, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { 76 ( 0.00%) let (level, src) = self.lint_level_at_node(lint, hir_id); 27 ( 0.00%) struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate); 16 ( 0.00%) } . . pub fn struct_lint_node( . self, . lint: &'static Lint, . id: HirId, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { . let (level, src) = self.lint_level_at_node(lint, id); . struct_lint_level(self.sess, lint, level, src, None, decorate); . } . 196 ( 0.00%) pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> { 56 ( 0.00%) let map = self.in_scope_traits_map(id.owner)?; . let candidates = map.get(&id.local_id)?; 56 ( 0.00%) Some(&*candidates) 224 ( 0.00%) } . 392 ( 0.00%) pub fn named_region(self, id: HirId) -> Option { . debug!(?id, "named_region"); . self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned()) 504 ( 0.00%) } . 54 ( 0.00%) pub fn is_late_bound(self, id: HirId) -> bool { . self.is_late_bound_map(id.owner) 3 ( 0.00%) .map_or(false, |(owner, set)| owner == id.owner && set.contains(&id.local_id)) 48 ( 0.00%) } . 259 ( 0.00%) pub fn object_lifetime_defaults(self, id: HirId) -> Option> { . self.object_lifetime_defaults_map(id.owner) 333 ( 0.00%) } . 243 ( 0.00%) pub fn late_bound_vars(self, id: HirId) -> &'tcx List { . self.mk_bound_variable_kinds( . self.late_bound_vars_map(id.owner) . .and_then(|map| map.get(&id.local_id).cloned()) . .unwrap_or_else(|| { . bug!("No bound vars found for {:?} ({:?})", self.hir().node_to_string(id), id) . }) . .iter(), . ) 243 ( 0.00%) } . . pub fn lifetime_scope(self, id: HirId) -> Option { . self.lifetime_scope_map(id.owner).and_then(|mut map| map.remove(&id.local_id)) . } . . /// Whether the `def_id` counts as const fn in the current crate, considering all active . /// feature gates 50 ( 0.00%) pub fn is_const_fn(self, def_id: DefId) -> bool { 5 ( 0.00%) if self.is_const_fn_raw(def_id) { . match self.lookup_const_stability(def_id) { . Some(stability) if stability.level.is_unstable() => { . // has a `rustc_const_unstable` attribute, check whether the user enabled the . // corresponding feature gate. . self.features() . .declared_lib_features . .iter() . .any(|&(sym, _)| sym == stability.feature) -- line 2741 ---------------------------------------- -- line 2743 ---------------------------------------- . // functions without const stability are either stable user written . // const fn or the user is using feature gates and we thus don't . // care what they do . _ => true, . } . } else { . false . } 45 ( 0.00%) } . } . . impl<'tcx> TyCtxtAt<'tcx> { . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { . self.tcx.ty_error_with_message(self.span, "TyKind::Error constructed but no error reported") . } -- line 2759 ---------------------------------------- -- line 2774 ---------------------------------------- . } . . impl InternAs<[T], R> for I . where . E: InternIteratorElement, . I: Iterator, . { . type Output = E::Output; 8,656 ( 0.01%) fn intern_with(self, f: F) -> Self::Output . where . F: FnOnce(&[T]) -> R, . { 31,287 ( 0.03%) E::intern_with(self, f) 9,225 ( 0.01%) } . } . . pub trait InternIteratorElement: Sized { . type Output; . fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; . } . . impl InternIteratorElement for T { . type Output = R; 5,410 ( 0.01%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`. 9,320 ( 0.01%) match iter.size_hint() { . (0, Some(0)) => { . assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { . let t0 = iter.next().unwrap(); . assert!(iter.next().is_none()); 1,309 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { . let t0 = iter.next().unwrap(); . let t1 = iter.next().unwrap(); 17 ( 0.00%) assert!(iter.next().is_none()); 1,045 ( 0.00%) f(&[t0, t1]) . } . _ => f(&iter.collect::>()), . } 4,408 ( 0.00%) } . } . . impl<'a, T, R> InternIteratorElement for &'a T . where . T: Clone + 'a, . { . type Output = R; 135 ( 0.00%) fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { . // This code isn't hot. . f(&iter.cloned().collect::>()) 108 ( 0.00%) } . } . . impl InternIteratorElement for Result { . type Output = Result; 8,582 ( 0.01%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`, unless a failure happens first, in which case the result . // will be an error anyway. 7,132 ( 0.01%) Ok(match iter.size_hint() { . (0, Some(0)) => { . assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { . let t0 = iter.next().unwrap()?; . assert!(iter.next().is_none()); 873 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { 76 ( 0.00%) let t0 = iter.next().unwrap()?; 76 ( 0.00%) let t1 = iter.next().unwrap()?; 114 ( 0.00%) assert!(iter.next().is_none()); 582 ( 0.00%) f(&[t0, t1]) . } 220 ( 0.00%) _ => f(&iter.collect::, _>>()?), . }) 9,691 ( 0.01%) } . } . . // We are comparing types with different invariant lifetimes, so `ptr::eq` . // won't work for us. . fn ptr_eq(t: *const T, u: *const U) -> bool { 10,107 ( 0.01%) t as *const () == u as *const () . } . . pub fn provide(providers: &mut ty::query::Providers) { 2 ( 0.00%) providers.in_scope_traits_map = . |tcx, id| tcx.hir_crate(()).owners[id].as_ref().map(|owner_info| &owner_info.trait_map); 3 ( 0.00%) providers.resolutions = |tcx, ()| &tcx.untracked_resolutions; 2 ( 0.00%) providers.module_reexports = . |tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]); 2 ( 0.00%) providers.crate_name = |tcx, id| { 1 ( 0.00%) assert_eq!(id, LOCAL_CRATE); 1 ( 0.00%) tcx.crate_name . }; 2 ( 0.00%) providers.maybe_unused_trait_import = . |tcx, id| tcx.resolutions(()).maybe_unused_trait_imports.contains(&id); 2 ( 0.00%) providers.maybe_unused_extern_crates = . |tcx, ()| &tcx.resolutions(()).maybe_unused_extern_crates[..]; 2 ( 0.00%) providers.names_imported_by_glob_use = |tcx, id| { . tcx.arena.alloc(tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default()) . }; . 11 ( 0.00%) providers.lookup_stability = |tcx, id| tcx.stability().local_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_const_stability = . |tcx, id| tcx.stability().local_const_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_deprecation_entry = 240 ( 0.00%) |tcx, id| tcx.stability().local_deprecation_entry(id.expect_local()); 2 ( 0.00%) providers.extern_mod_stmt_cnum = . |tcx, id| tcx.resolutions(()).extern_crate_map.get(&id).cloned(); 3 ( 0.00%) providers.output_filenames = |tcx, ()| tcx.output_filenames.clone(); 3 ( 0.00%) providers.features_query = |tcx, ()| tcx.sess.features_untracked(); 2 ( 0.00%) providers.is_panic_runtime = |tcx, cnum| { . assert_eq!(cnum, LOCAL_CRATE); . tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::panic_runtime) . }; 2 ( 0.00%) providers.is_compiler_builtins = |tcx, cnum| { . assert_eq!(cnum, LOCAL_CRATE); . tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins) . }; 2 ( 0.00%) providers.has_panic_handler = |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); . // We want to check if the panic handler was defined in this crate 3 ( 0.00%) tcx.lang_items().panic_impl().map_or(false, |did| did.is_local()) . }; . } 35,015 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs -------------------------------------------------------------------------------- Ir -- line 186 ---------------------------------------- . /// // use the values stored in map . /// ``` . pub struct HashMap { . pub(crate) hash_builder: S, . pub(crate) table: RawTable<(K, V), A>, . } . . impl Clone for HashMap { 16 ( 0.00%) fn clone(&self) -> Self { 30 ( 0.00%) HashMap { . hash_builder: self.hash_builder.clone(), 5 ( 0.00%) table: self.table.clone(), . } 18 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.table.clone_from(&source.table); . . // Update hash_builder only if we successfully cloned all elements. . self.hash_builder.clone_from(&source.hash_builder); . } . } -- line 207 ---------------------------------------- -- line 210 ---------------------------------------- . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hasher(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_ . where . K: Borrow, . Q: Hash, . S: BuildHasher, . { 4,109 ( 0.00%) move |val| make_hash::(hash_builder, &val.0) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent_key(k: &Q) -> impl Fn(&(K, V)) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 7,250 ( 0.01%) move |x| k.eq(x.0.borrow()) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent(k: &Q) -> impl Fn(&K) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 11,310 ( 0.01%) move |x| k.eq(x.borrow()) . } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, -- line 248 ---------------------------------------- -- line 251 ---------------------------------------- . use core::hash::Hasher; . let mut state = hash_builder.build_hasher(); . val.hash(&mut state); . state.finish() . } . . #[cfg(feature = "nightly")] . #[cfg_attr(feature = "inline-more", inline)] 2 ( 0.00%) pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, . S: BuildHasher, . { . hash_builder.hash_one(val) 4 ( 0.00%) } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_insert_hash(hash_builder: &S, val: &K) -> u64 . where . K: Hash, . S: BuildHasher, . { -- line 274 ---------------------------------------- -- line 367 ---------------------------------------- . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . /// . /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html . #[cfg_attr(feature = "inline-more", inline)] . pub const fn with_hasher(hash_builder: S) -> Self { 4,991 ( 0.00%) Self { . hash_builder, . table: RawTable::new(), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. . /// -- line 383 ---------------------------------------- -- line 437 ---------------------------------------- . /// use hashbrown::hash_map::DefaultHashBuilder; . /// . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self { 61 ( 0.00%) Self { . hash_builder, . table: RawTable::new_in(alloc), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. It will be allocated with the given allocator. . /// -- line 453 ---------------------------------------- -- line 663 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert_eq!(a.len(), 0); . /// a.insert(1, "a"); . /// assert_eq!(a.len(), 1); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn len(&self) -> usize { 386 ( 0.00%) self.table.len() . } . . /// Returns `true` if the map contains no elements. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 679 ---------------------------------------- -- line 680 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert!(a.is_empty()); . /// a.insert(1, "a"); . /// assert!(!a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn is_empty(&self) -> bool { 2,036 ( 0.00%) self.len() == 0 . } . . /// Clears the map, returning all key-value pairs as an iterator. Keeps the . /// allocated memory for reuse. . /// . /// # Examples . /// . /// ``` -- line 696 ---------------------------------------- -- line 790 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut a = HashMap::new(); . /// a.insert(1, "a"); . /// a.clear(); . /// assert!(a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6 ( 0.00%) pub fn clear(&mut self) { . self.table.clear(); 6 ( 0.00%) } . . /// Creates a consuming iterator visiting all the keys in arbitrary order. . /// The map cannot be used after calling this. . /// The iterator element type is `K`. . /// . /// # Examples . /// . /// ``` -- line 808 ---------------------------------------- -- line 963 ---------------------------------------- . /// } . /// . /// assert_eq!(letters[&'s'], 2); . /// assert_eq!(letters[&'t'], 3); . /// assert_eq!(letters[&'u'], 1); . /// assert_eq!(letters.get(&'y'), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 120 ( 0.00%) pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { . let hash = make_insert_hash::(&self.hash_builder, &key); . if let Some(elem) = self.table.find(hash, equivalent_key(&key)) { 10 ( 0.00%) Entry::Occupied(OccupiedEntry { . hash, . key: Some(key), . elem, . table: self, . }) . } else { 140 ( 0.00%) Entry::Vacant(VacantEntry { . hash, . key, . table: self, . }) . } 150 ( 0.00%) } . . /// Gets the given key's corresponding entry by reference in the map for in-place manipulation. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; . /// -- line 995 ---------------------------------------- -- line 1047 ---------------------------------------- . /// ``` . #[inline] . pub fn get(&self, k: &Q) -> Option<&V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 13,364 ( 0.01%) match self.get_inner(k) { . Some(&(_, ref v)) => Some(v), . None => None, . } . } . . /// Returns the key-value pair corresponding to the supplied key. . /// . /// The supplied key may be any borrowed form of the map's key type, but -- line 1063 ---------------------------------------- -- line 1091 ---------------------------------------- . } . . #[inline] . fn get_inner(&self, k: &Q) -> Option<&(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 23,221 ( 0.02%) if self.table.is_empty() { . None . } else { 1 ( 0.00%) let hash = make_hash::(&self.hash_builder, k); . self.table.get(hash, equivalent_key(k)) . } . } . . /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value. . /// . /// The supplied key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for -- line 1110 ---------------------------------------- -- line 1155 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.contains_key(&1), true); . /// assert_eq!(map.contains_key(&2), false); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 9,656 ( 0.01%) pub fn contains_key(&self, k: &Q) -> bool . where . K: Borrow, . Q: Hash + Eq, . { . self.get_inner(k).is_some() 11,698 ( 0.01%) } . . /// Returns a mutable reference to the value corresponding to the key. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// . /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html -- line 1177 ---------------------------------------- -- line 1185 ---------------------------------------- . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// if let Some(x) = map.get_mut(&1) { . /// *x = "b"; . /// } . /// assert_eq!(map[&1], "b"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 17 ( 0.00%) pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 51 ( 0.00%) match self.get_inner_mut(k) { . Some(&mut (_, ref mut v)) => Some(v), . None => None, . } 34 ( 0.00%) } . . #[inline] . fn get_inner_mut(&mut self, k: &Q) -> Option<&mut (K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 17 ( 0.00%) if self.table.is_empty() { . None . } else { . let hash = make_hash::(&self.hash_builder, k); . self.table.get_mut(hash, equivalent_key(k)) . } . } . . /// Attempts to get mutable references to `N` values in the map at once. -- line 1219 ---------------------------------------- -- line 1495 ---------------------------------------- . /// assert_eq!(map.insert(37, "a"), None); . /// assert_eq!(map.is_empty(), false); . /// . /// map.insert(37, "b"); . /// assert_eq!(map.insert(37, "c"), Some("b")); . /// assert_eq!(map[&37], "c"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 160,216 ( 0.16%) pub fn insert(&mut self, k: K, v: V) -> Option { . let hash = make_insert_hash::(&self.hash_builder, &k); 14 ( 0.00%) if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) { . Some(mem::replace(item, v)) . } else { 90,095 ( 0.09%) self.table 86,177 ( 0.09%) .insert(hash, (k, v), make_hasher::(&self.hash_builder)); 6,480 ( 0.01%) None . } 141,929 ( 0.14%) } . . /// Insert a key-value pair into the map without checking . /// if the key already exists in the map. . /// . /// Returns a reference to the key and value just inserted. . /// . /// This operation is safe if a key does not exist in the map. . /// -- line 1520 ---------------------------------------- -- line 1592 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.remove(&1), Some("a")); . /// assert_eq!(map.remove(&1), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 1,355 ( 0.00%) pub fn remove(&mut self, k: &Q) -> Option . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 33,592 ( 0.03%) match self.remove_entry(k) { 432 ( 0.00%) Some((_, v)) => Some(v), 993 ( 0.00%) None => None, . } 3,046 ( 0.00%) } . . /// Removes a key from the map, returning the stored key and value if the . /// key was previously in the map. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// -- line 1618 ---------------------------------------- -- line 1631 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { . let hash = make_hash::(&self.hash_builder, k); 8,888 ( 0.01%) self.table.remove_entry(hash, equivalent_key(k)) . } . } . . impl HashMap { . /// Creates a raw entry builder for the HashMap. . /// . /// Raw entries provide the lowest level of control for searching and . /// manipulating a map. They must be manually initialized with a hash and -- line 1647 ---------------------------------------- -- line 2209 ---------------------------------------- . /// Creates a `RawEntryMut` from the given key and its hash. . #[inline] . #[allow(clippy::wrong_self_convention)] . pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> . where . K: Borrow, . Q: Eq, . { 63,684 ( 0.06%) self.from_hash(hash, equivalent(k)) . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilderMut<'a, K, V, S, A> { . /// Creates a `RawEntryMut` from the given hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 159,081 ( 0.16%) pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { . self.search(hash, is_match) 172,724 ( 0.17%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { 16,998 ( 0.02%) match self.map.table.find(hash, |(k, _)| is_match(k)) { 85,447 ( 0.08%) Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut { . elem, . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), 19,710 ( 0.02%) None => RawEntryMut::Vacant(RawVacantEntryMut { . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), . } . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilder<'a, K, V, S, A> { -- line 2251 ---------------------------------------- -- line 2260 ---------------------------------------- . { . let hash = make_hash::(&self.map.hash_builder, k); . self.from_key_hashed_nocheck(hash, k) . } . . /// Access an entry by a key and its hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 28,564 ( 0.03%) pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> . where . K: Borrow, . Q: Eq, . { 35,186 ( 0.03%) self.from_hash(hash, equivalent(k)) 51,394 ( 0.05%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)> . where . F: FnMut(&K) -> bool, . { 49,375 ( 0.05%) match self.map.table.get(hash, |(k, _)| is_match(k)) { . Some(&(ref key, ref value)) => Some((key, value)), . None => None, . } . } . . /// Access an entry by hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] -- line 2289 ---------------------------------------- -- line 2624 ---------------------------------------- . /// and returns a mutable reference to it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::shadow_unrelated)] . pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) . where . K: Hash, . S: BuildHasher, . { 32,252 ( 0.03%) let &mut (ref mut k, ref mut v) = self.table.insert_entry( . hash, . (key, value), . make_hasher::(self.hash_builder), . ); . (k, v) . } . . /// Set the value of an entry with a custom hasher function. -- line 2640 ---------------------------------------- -- line 2974 ---------------------------------------- . /// map.insert("a", 1); . /// map.insert("b", 2); . /// map.insert("c", 3); . /// . /// // Not possible with .iter() . /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 58 ( 0.00%) fn into_iter(self) -> IntoIter { 1,685 ( 0.00%) IntoIter { 1,258 ( 0.00%) inner: self.table.into_iter(), . } 174 ( 0.00%) } . } . . impl<'a, K, V> Iterator for Iter<'a, K, V> { . type Item = (&'a K, &'a V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(&'a K, &'a V)> { . // Avoid `Option::map` because it bloats LLVM IR. 2,530 ( 0.00%) match self.inner.next() { . Some(x) => unsafe { . let r = x.as_ref(); 16 ( 0.00%) Some((&r.0, &r.1)) . }, . None => None, . } . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } -- line 3006 ---------------------------------------- -- line 3051 ---------------------------------------- . } . } . . impl Iterator for IntoIter { . type Item = (K, V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(K, V)> { 40 ( 0.00%) self.inner.next() . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for IntoIter { . #[cfg_attr(feature = "inline-more", inline)] -- line 3067 ---------------------------------------- -- line 3076 ---------------------------------------- . f.debug_list().entries(self.iter()).finish() . } . } . . impl<'a, K, V> Iterator for Keys<'a, K, V> { . type Item = &'a K; . . #[cfg_attr(feature = "inline-more", inline)] 3 ( 0.00%) fn next(&mut self) -> Option<&'a K> { . // Avoid `Option::map` because it bloats LLVM IR. . match self.inner.next() { . Some((k, _)) => Some(k), . None => None, . } 6 ( 0.00%) } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for Keys<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { -- line 3098 ---------------------------------------- -- line 3819 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn insert(self, value: V) -> &'a mut V . where . K: Hash, . S: BuildHasher, . { . let table = &mut self.table.table; 8 ( 0.00%) let entry = table.insert_entry( . self.hash, . (self.key, value), . make_hasher::(&self.table.hash_builder), . ); . &mut entry.1 . } . . #[cfg_attr(feature = "inline-more", inline)] -- line 3835 ---------------------------------------- -- line 4557 ---------------------------------------- . /// keys with new values returned from the iterator. . impl Extend<(K, V)> for HashMap . where . K: Eq + Hash, . S: BuildHasher, . A: Allocator + Clone, . { . #[cfg_attr(feature = "inline-more", inline)] 973 ( 0.00%) fn extend>(&mut self, iter: T) { . // Keys may be already present or show multiple times in the iterator. . // Reserve the entire hint lower bound if the map is empty. . // Otherwise reserve half the hint (rounded up), so the map . // will only resize twice in the worst case. 785 ( 0.00%) let iter = iter.into_iter(); 426 ( 0.00%) let reserve = if self.is_empty() { . iter.size_hint().0 . } else { 3 ( 0.00%) (iter.size_hint().0 + 1) / 2 . }; . self.reserve(reserve); . iter.for_each(move |(k, v)| { 14,509 ( 0.01%) self.insert(k, v); . }); 608 ( 0.00%) } . . #[inline] . #[cfg(feature = "nightly")] . fn extend_one(&mut self, (k, v): (K, V)) { . self.insert(k, v); . } . . #[inline] -- line 4588 ---------------------------------------- 69,016 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 100,300 ( 0.10%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 7,612 ( 0.01%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 985,225 ( 0.98%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 9,158 ( 0.01%) self.stride += Group::WIDTH; 9,158 ( 0.01%) self.pos += self.stride; 7,406 ( 0.01%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 6,376 ( 0.01%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 12,665 ( 0.01%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 3,930 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 16,563 ( 0.02%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 4,992 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 12,945 ( 0.01%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 17,458 ( 0.02%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 14 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 12 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 20 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 522 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 827 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 827 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 1,654 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 202 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 1,452 ( 0.00%) self.erase_no_drop(&item); . item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 27,801 ( 0.03%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 8 ( 0.00%) match self.find(hash, eq) { 279 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 3,258 ( 0.00%) None => None, . } 39,742 ( 0.04%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 636 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 18,368 ( 0.02%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 10,858 ( 0.01%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 25,557 ( 0.03%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 16,080 ( 0.02%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 159,964 ( 0.16%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 426 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 102,901 ( 0.10%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 2 ( 0.00%) bucket.write(value); . bucket . } 117,789 ( 0.12%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 46,000 ( 0.05%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 48 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 34,500 ( 0.03%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 39 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 22,470 ( 0.02%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 275 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 45,943 ( 0.05%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 36,558 ( 0.04%) self.table.items += 1; . bucket 73 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 112 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 122 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 253 ( 0.00%) eq(self.bucket(index).as_ref()) 25 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 7 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 112 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 229 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] . pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 79 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } . } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. -- line 855 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 23,998 ( 0.02%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . -- line 936 ---------------------------------------- -- line 938 ---------------------------------------- . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { . let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 3,218 ( 0.00%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 588 ( 0.00%) let allocation = self.into_allocation(); 441 ( 0.00%) RawIntoIter { 735 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 205 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 87 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 24,167 ( 0.02%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 20,902 ( 0.02%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 5,204 ( 0.01%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 12,540 ( 0.01%) Ok(Self { . ctrl, 5,282 ( 0.01%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 15,328 ( 0.02%) } . . #[inline] 2,442 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 618 ( 0.00%) if capacity == 0 { 370 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 9,648 ( 0.01%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 1,398 ( 0.00%) Ok(result) . } . } 2,442 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 7,578 ( 0.01%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 7,578 ( 0.01%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 15,156 ( 0.02%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 66,376 ( 0.07%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 194,272 ( 0.19%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 90,461 ( 0.09%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 2,744 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 118,185 ( 0.12%) for bit in group.match_byte(h2_hash) { 186,969 ( 0.19%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 118,340 ( 0.12%) if likely(eq(index)) { . return Some(index); . } . } . 52,080 ( 0.05%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] -- line 1198 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 232,864 ( 0.23%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 836,888 ( 0.83%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 128,615 ( 0.13%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 102,892 ( 0.10%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; . probe_index(i) == probe_index(new_i) -- line 1281 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 187,329 ( 0.19%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 62,443 ( 0.06%) *self.ctrl(index) = ctrl; 62,446 ( 0.06%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 12,163 ( 0.01%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 11,694 ( 0.01%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 48,399 ( 0.05%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 528 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 6,706 ( 0.01%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 3,131 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 6,262 ( 0.01%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 6,262 ( 0.01%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 13,220 ( 0.01%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); . Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 3,131 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 1,320 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 35,562 ( 0.04%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 3,131 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1554 ---------------------------------------- . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 1,889 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 201 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 229 ( 0.00%) self.items = 0; 201 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 6,076 ( 0.01%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 24,304 ( 0.02%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 30,380 ( 0.03%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 24,304 ( 0.02%) self.items -= 1; . } . } . . impl Clone for RawTable { 48 ( 0.00%) fn clone(&self) -> Self { 8 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 54 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 8 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 24,095 ( 0.02%) fn drop(&mut self) { 20,239 ( 0.02%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 25,336 ( 0.03%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 588 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 735 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 8,468 ( 0.01%) if let Some(index) = self.current_group.lowest_set_bit() { 784 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 1,191 ( 0.00%) return Some(self.data.next_n(index)); . } . 6,500 ( 0.01%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 243 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 74 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 542 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 272 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 888 ( 0.00%) fn next(&mut self) -> Option> { 3,250 ( 0.00%) if let Some(b) = self.iter.next() { 10,236 ( 0.01%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 1,776 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 534 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 934 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 150 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 21 ( 0.00%) fn next(&mut self) -> Option { 6 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 42 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 2 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 8 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 1 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 8 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 471,604 ( 0.47%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs -------------------------------------------------------------------------------- Ir -- line 5 ---------------------------------------- . #[stable(feature = "alloc_system_type", since = "1.28.0")] . unsafe impl GlobalAlloc for System { . #[inline] . unsafe fn alloc(&self, layout: Layout) -> *mut u8 { . // jemalloc provides alignment less than MIN_ALIGN for small allocations. . // So only rely on MIN_ALIGN if size >= align. . // Also see and . // . 111,236 ( 0.11%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 83,427 ( 0.08%) libc::malloc(layout.size()) as *mut u8 . } else { . #[cfg(target_os = "macos")] . { . if layout.align() > (1 << 31) { . return ptr::null_mut(); . } . } . aligned_malloc(&layout) . } . } . . #[inline] . unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { . // See the comment above in `alloc` for why this check looks the way it does. 1,648 ( 0.00%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 2,472 ( 0.00%) libc::calloc(layout.size(), 1) as *mut u8 . } else { . let ptr = self.alloc(layout); . if !ptr.is_null() { . ptr::write_bytes(ptr, 0, layout.size()); . } . ptr . } . } . . #[inline] . unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 28,210 ( 0.03%) libc::free(ptr as *mut libc::c_void) . } . . #[inline] . unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 9,004 ( 0.01%) if layout.align() <= MIN_ALIGN && layout.align() <= new_size { 18,008 ( 0.02%) libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 . } else { . realloc_fallback(self, ptr, layout, new_size) . } . } . } . . cfg_if::cfg_if! { . if #[cfg(any( -- line 56 ---------------------------------------- -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/decoder.rs -------------------------------------------------------------------------------- Ir -- line 176 ---------------------------------------- . } . fn sess(self) -> Option<&'tcx Session> { . None . } . fn tcx(self) -> Option> { . None . } . 765 ( 0.00%) fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { 255 ( 0.00%) let tcx = self.tcx(); 86,410 ( 0.09%) DecodeContext { 8,998 ( 0.01%) opaque: opaque::Decoder::new(self.blob(), pos), 255 ( 0.00%) cdata: self.cdata(), . blob: self.blob(), 1,498 ( 0.00%) sess: self.sess().or(tcx.map(|tcx| tcx.sess)), . tcx, . last_source_file_index: 0, . lazy_state: LazyState::NoNode, . alloc_decoding_session: self . .cdata() 8,153 ( 0.01%) .map(|cdata| cdata.cdata.alloc_decoding_state.new_decoding_session()), . } 765 ( 0.00%) } . } . . impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob { . #[inline] . fn blob(self) -> &'a MetadataBlob { . self . } . } -- line 206 ---------------------------------------- -- line 255 ---------------------------------------- . } . #[inline] . fn tcx(self) -> Option> { . Some(self.1) . } . } . . impl<'a, 'tcx, T: Decodable>> Lazy { 6,176 ( 0.01%) fn decode>(self, metadata: M) -> T { 3,282 ( 0.00%) let mut dcx = metadata.decoder(self.position.get()); 26,240 ( 0.03%) dcx.lazy_state = LazyState::NodeStart(self.position); 11,097 ( 0.01%) T::decode(&mut dcx) 7,944 ( 0.01%) } . } . . impl<'a: 'x, 'tcx: 'x, 'x, T: Decodable>> Lazy<[T]> { . fn decode>( . self, . metadata: M, . ) -> impl ExactSizeIterator + Captures<'a> + Captures<'tcx> + 'x { 21 ( 0.00%) let mut dcx = metadata.decoder(self.position.get()); 6 ( 0.00%) dcx.lazy_state = LazyState::NodeStart(self.position); 7,181 ( 0.01%) (0..self.meta).map(move |_| T::decode(&mut dcx)) . } . } . . impl<'a, 'tcx> DecodeContext<'a, 'tcx> { . #[inline] . fn tcx(&self) -> TyCtxt<'tcx> { . debug_assert!(self.tcx.is_some(), "missing TyCtxt in DecodeContext"); . self.tcx.unwrap() -- line 285 ---------------------------------------- -- line 292 ---------------------------------------- . . #[inline] . pub fn cdata(&self) -> CrateMetadataRef<'a> { . debug_assert!(self.cdata.is_some(), "missing CrateMetadata in DecodeContext"); . self.cdata.unwrap() . } . . fn map_encoded_cnum_to_current(&self, cnum: CrateNum) -> CrateNum { 26,880 ( 0.03%) if cnum == LOCAL_CRATE { self.cdata().cnum } else { self.cdata().cnum_map[cnum] } . } . 22,085 ( 0.02%) fn read_lazy_with_meta(&mut self, meta: T::Meta) -> Lazy { . let distance = self.read_usize(); 20,603 ( 0.02%) let position = match self.lazy_state { . LazyState::NoNode => bug!("read_lazy_with_meta: outside of a metadata node"), 2,136 ( 0.00%) LazyState::NodeStart(start) => { . let start = start.get(); 4,272 ( 0.00%) assert!(distance <= start); . start - distance . } 8,517 ( 0.01%) LazyState::Previous(last_pos) => last_pos.get() + distance, . }; 9,950 ( 0.01%) self.lazy_state = LazyState::Previous(NonZeroUsize::new(position).unwrap()); . Lazy::from_position_and_meta(NonZeroUsize::new(position).unwrap(), meta) 22,085 ( 0.02%) } . . #[inline] . pub fn read_raw_bytes(&mut self, len: usize) -> &'a [u8] { . self.opaque.read_raw_bytes(len) . } . } . . impl<'a, 'tcx> TyDecoder<'tcx> for DecodeContext<'a, 'tcx> { -- line 324 ---------------------------------------- -- line 326 ---------------------------------------- . . #[inline] . fn tcx(&self) -> TyCtxt<'tcx> { . self.tcx.expect("missing TyCtxt in DecodeContext") . } . . #[inline] . fn peek_byte(&self) -> u8 { 11,160 ( 0.01%) self.opaque.data[self.opaque.position()] . } . . #[inline] . fn position(&self) -> usize { . self.opaque.position() . } . 9,786 ( 0.01%) fn cached_ty_for_shorthand(&mut self, shorthand: usize, or_insert_with: F) -> Ty<'tcx> . where . F: FnOnce(&mut Self) -> Ty<'tcx>, . { 1,398 ( 0.00%) let tcx = self.tcx(); . 1,398 ( 0.00%) let key = ty::CReaderCacheKey { cnum: Some(self.cdata().cnum), pos: shorthand }; . 3,056 ( 0.00%) if let Some(&ty) = tcx.ty_rcache.borrow().get(&key) { . return ty; . } . 634 ( 0.00%) let ty = or_insert_with(self); . tcx.ty_rcache.borrow_mut().insert(key, ty); . ty 12,582 ( 0.01%) } . . fn with_position(&mut self, pos: usize, f: F) -> R . where . F: FnOnce(&mut Self) -> R, . { . let new_opaque = opaque::Decoder::new(self.opaque.data, pos); . let old_opaque = mem::replace(&mut self.opaque, new_opaque); . let old_state = mem::replace(&mut self.lazy_state, LazyState::NoNode); . let r = f(self); 3,575 ( 0.00%) self.opaque = old_opaque; 715 ( 0.00%) self.lazy_state = old_state; . r . } . . fn decode_alloc_id(&mut self) -> rustc_middle::mir::interpret::AllocId { . if let Some(alloc_decoding_session) = self.alloc_decoding_session { . alloc_decoding_session.decode_alloc_id(self) . } else { . bug!("Attempting to decode interpret::AllocId without CrateMetadata") . } . } . } . . impl<'a, 'tcx> Decodable> for CrateNum { . fn decode(d: &mut DecodeContext<'a, 'tcx>) -> CrateNum { 13,195 ( 0.01%) let cnum = CrateNum::from_u32(d.read_u32()); . d.map_encoded_cnum_to_current(cnum) . } . } . . impl<'a, 'tcx> Decodable> for DefIndex { 132 ( 0.00%) fn decode(d: &mut DecodeContext<'a, 'tcx>) -> DefIndex { 34,359 ( 0.03%) DefIndex::from_u32(d.read_u32()) 264 ( 0.00%) } . } . . impl<'a, 'tcx> Decodable> for ExpnIndex { . fn decode(d: &mut DecodeContext<'a, 'tcx>) -> ExpnIndex { . ExpnIndex::from_u32(d.read_u32()) . } . } . . impl<'a, 'tcx> Decodable> for SyntaxContext { 18,648 ( 0.02%) fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> SyntaxContext { 4,662 ( 0.00%) let cdata = decoder.cdata(); 4,662 ( 0.00%) let sess = decoder.sess.unwrap(); 9,324 ( 0.01%) let cname = cdata.root.name; . rustc_span::hygiene::decode_syntax_context(decoder, &cdata.hygiene_context, |_, id| { . debug!("SpecializedDecoder: decoding {}", id); 16 ( 0.00%) cdata . .root . .syntax_contexts . .get(cdata, id) . .unwrap_or_else(|| panic!("Missing SyntaxContext {:?} for crate {:?}", id, cname)) . .decode((cdata, sess)) . }) 23,310 ( 0.02%) } . } . . impl<'a, 'tcx> Decodable> for ExpnId { 1,736 ( 0.00%) fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> ExpnId { 248 ( 0.00%) let local_cdata = decoder.cdata(); 248 ( 0.00%) let sess = decoder.sess.unwrap(); . . let cnum = CrateNum::decode(decoder); . let index = u32::decode(decoder); . . let expn_id = rustc_span::hygiene::decode_expn_id(cnum, index, |expn_id| { . let ExpnId { krate: cnum, local_id: index } = expn_id; . // Lookup local `ExpnData`s in our own crate data. Foreign `ExpnData`s . // are stored in the owning crate, to avoid duplication. . debug_assert_ne!(cnum, LOCAL_CRATE); 10 ( 0.00%) let crate_data = if cnum == local_cdata.cnum { . local_cdata . } else { . local_cdata.cstore.get_crate_data(cnum) . }; 50 ( 0.00%) let expn_data = crate_data . .root . .expn_data . .get(crate_data, index) . .unwrap() . .decode((crate_data, sess)); 50 ( 0.00%) let expn_hash = crate_data . .root . .expn_hashes . .get(crate_data, index) . .unwrap() . .decode((crate_data, sess)); 100 ( 0.00%) (expn_data, expn_hash) . }); . expn_id 1,984 ( 0.00%) } . } . . impl<'a, 'tcx> Decodable> for Span { 37,200 ( 0.04%) fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Span { 4,650 ( 0.00%) let ctxt = SyntaxContext::decode(decoder); . let tag = u8::decode(decoder); . 9,300 ( 0.01%) if tag == TAG_PARTIAL_SPAN { . return DUMMY_SP.with_ctxt(ctxt); . } . . debug_assert!(tag == TAG_VALID_SPAN_LOCAL || tag == TAG_VALID_SPAN_FOREIGN); . . let lo = BytePos::decode(decoder); . let len = BytePos::decode(decoder); . let hi = lo + len; . 13,950 ( 0.01%) let Some(sess) = decoder.sess else { . bug!("Cannot decode Span without Session.") . }; . . // There are two possibilities here: . // 1. This is a 'local span', which is located inside a `SourceFile` . // that came from this crate. In this case, we use the source map data . // encoded in this crate. This branch should be taken nearly all of the time. . // 2. This is a 'foreign span', which is located inside a `SourceFile` -- line 476 ---------------------------------------- -- line 492 ---------------------------------------- . // a transitive dependency. . // . // When we encode a foreign span, we adjust its 'lo' and 'high' values . // to be based on the *foreign* crate (e.g. crate C), not the crate . // we are writing metadata for (e.g. crate B). This allows us to . // treat the 'local' and 'foreign' cases almost identically during deserialization: . // we can call `imported_source_files` for the proper crate, and binary search . // through the returned slice using our span. 11,045 ( 0.01%) let imported_source_files = if tag == TAG_VALID_SPAN_LOCAL { . decoder.cdata().imported_source_files(sess) . } else { . // When we encode a proc-macro crate, all `Span`s should be encoded . // with `TAG_VALID_SPAN_LOCAL` 3 ( 0.00%) if decoder.cdata().root.is_proc_macro_crate() { . // Decode `CrateNum` as u32 - using `CrateNum::decode` will ICE . // since we don't have `cnum_map` populated. . let cnum = u32::decode(decoder); . panic!( . "Decoding of crate {:?} tried to access proc-macro dep {:?}", . decoder.cdata().root.name, . cnum . ); -- line 513 ---------------------------------------- -- line 518 ---------------------------------------- . "SpecializedDecoder::specialized_decode: loading source files from cnum {:?}", . cnum . ); . . // Decoding 'foreign' spans should be rare enough that it's . // not worth it to maintain a per-CrateNum cache for `last_source_file_index`. . // We just set it to 0, to ensure that we don't try to access something out . // of bounds for our initial 'guess' 6 ( 0.00%) decoder.last_source_file_index = 0; . 3 ( 0.00%) let foreign_data = decoder.cdata().cstore.get_crate_data(cnum); . foreign_data.imported_source_files(sess) . }; . . let source_file = { . // Optimize for the case that most spans within a translated item . // originate from the same source_file. 13,950 ( 0.01%) let last_source_file = &imported_source_files[decoder.last_source_file_index]; . 18,514 ( 0.02%) if lo >= last_source_file.original_start_pos && lo <= last_source_file.original_end_pos . { . last_source_file . } else { . let index = imported_source_files . .binary_search_by_key(&lo, |source_file| source_file.original_start_pos) 3,488 ( 0.00%) .unwrap_or_else(|index| index - 1); . . // Don't try to cache the index for foreign spans, . // as this would require a map from CrateNums to indices 1,745 ( 0.00%) if tag == TAG_VALID_SPAN_LOCAL { 1,743 ( 0.00%) decoder.last_source_file_index = index; . } 3,490 ( 0.00%) &imported_source_files[index] . } . }; . . // Make sure our binary search above is correct. . debug_assert!( . lo >= source_file.original_start_pos && lo <= source_file.original_end_pos, . "Bad binary search: lo={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}", . lo, -- line 558 ---------------------------------------- -- line 565 ---------------------------------------- . hi >= source_file.original_start_pos && hi <= source_file.original_end_pos, . "Bad binary search: hi={:?} source_file.original_start_pos={:?} source_file.original_end_pos={:?}", . hi, . source_file.original_start_pos, . source_file.original_end_pos . ); . . let lo = 18,600 ( 0.02%) (lo + source_file.translated_source_file.start_pos) - source_file.original_start_pos; . let hi = . (hi + source_file.translated_source_file.start_pos) - source_file.original_start_pos; . . // Do not try to decode parent for foreign spans. . Span::new(lo, hi, ctxt, None) 41,850 ( 0.04%) } . } . . impl<'a, 'tcx> Decodable> for &'tcx [thir::abstract_const::Node<'tcx>] { . fn decode(d: &mut DecodeContext<'a, 'tcx>) -> Self { . ty::codec::RefDecodable::decode(d) . } . } . -- line 587 ---------------------------------------- -- line 590 ---------------------------------------- . ty::codec::RefDecodable::decode(d) . } . } . . impl<'a, 'tcx, T: Decodable>> Decodable> . for Lazy . { . fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self { 2,928 ( 0.00%) decoder.read_lazy_with_meta(()) . } . } . . impl<'a, 'tcx, T: Decodable>> Decodable> . for Lazy<[T]> . { . fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self { . let len = decoder.read_usize(); 12,432 ( 0.01%) if len == 0 { Lazy::empty() } else { decoder.read_lazy_with_meta(len) } . } . } . . impl<'a, 'tcx, I: Idx, T: Decodable>> Decodable> . for Lazy> . where . Option: FixedSizeEncoding, . { . fn decode(decoder: &mut DecodeContext<'a, 'tcx>) -> Self { . let len = decoder.read_usize(); 9,802 ( 0.01%) decoder.read_lazy_with_meta(len) . } . } . . implement_ty_decoder!(DecodeContext<'a, 'tcx>); . . impl<'tcx> MetadataBlob { . crate fn new(metadata_ref: MetadataRef) -> MetadataBlob { . MetadataBlob(Lrc::new(metadata_ref)) -- line 626 ---------------------------------------- -- line 630 ---------------------------------------- . self.blob().starts_with(METADATA_HEADER) . } . . crate fn get_rustc_version(&self) -> String { . Lazy::::from_position(NonZeroUsize::new(METADATA_HEADER.len() + 4).unwrap()) . .decode(self) . } . 174 ( 0.00%) crate fn get_root(&self) -> CrateRoot<'tcx> { 58 ( 0.00%) let slice = &self.blob()[..]; . let offset = METADATA_HEADER.len(); 290 ( 0.00%) let pos = (((slice[offset + 0] as u32) << 24) 116 ( 0.00%) | ((slice[offset + 1] as u32) << 16) 232 ( 0.00%) | ((slice[offset + 2] as u32) << 8) 174 ( 0.00%) | ((slice[offset + 3] as u32) << 0)) as usize; . Lazy::>::from_position(NonZeroUsize::new(pos).unwrap()).decode(self) 290 ( 0.00%) } . . crate fn list_crate_metadata(&self, out: &mut dyn io::Write) -> io::Result<()> { . let root = self.get_root(); . writeln!(out, "Crate info:")?; . writeln!(out, "name {}{}", root.name, root.extra_filename)?; . writeln!(out, "hash {} stable_crate_id {:?}", root.hash, root.stable_crate_id)?; . writeln!(out, "proc_macro {:?}", root.proc_macro_data.is_some())?; . writeln!(out, "=External Dependencies=")?; -- line 654 ---------------------------------------- -- line 666 ---------------------------------------- . } . write!(out, "\n")?; . Ok(()) . } . } . . impl CrateRoot<'_> { . crate fn is_proc_macro_crate(&self) -> bool { 38 ( 0.00%) self.proc_macro_data.is_some() . } . . crate fn name(&self) -> Symbol { 2 ( 0.00%) self.name . } . . crate fn hash(&self) -> Svh { 19 ( 0.00%) self.hash . } . . crate fn stable_crate_id(&self) -> StableCrateId { 38 ( 0.00%) self.stable_crate_id . } . . crate fn triple(&self) -> &TargetTriple { . &self.triple . } . . crate fn decode_crate_deps<'a>( . &self, . metadata: &'a MetadataBlob, . ) -> impl ExactSizeIterator + Captures<'a> { 38 ( 0.00%) self.crate_deps.decode(metadata) . } . } . . impl<'a, 'tcx> CrateMetadataRef<'a> { . fn raw_proc_macro(self, id: DefIndex) -> &'a ProcMacro { . // DefIndex's in root.proc_macro_data have a one-to-one correspondence . // with items in 'raw_proc_macros'. . let pos = self -- line 705 ---------------------------------------- -- line 709 ---------------------------------------- . .unwrap() . .macros . .decode(self) . .position(|i| i == id) . .unwrap(); . &self.raw_proc_macros.unwrap()[pos] . } . 13,035 ( 0.01%) fn opt_item_ident(self, item_index: DefIndex, sess: &Session) -> Option { 9,800 ( 0.01%) let name = self.def_key(item_index).disambiguated_data.data.get_opt_name()?; 6,916 ( 0.01%) let span = match self.root.tables.ident_span.get(self, item_index) { . Some(lazy_span) => lazy_span.decode((self, sess)), . None => { . // FIXME: this weird case of a name with no span is specific to `extern crate` . // items, which are supposed to be treated like `use` items and only be encoded . // to metadata as `Export`s, return `None` because that's what all the callers . // expect in this case. 45 ( 0.00%) assert_eq!(self.def_kind(item_index), DefKind::ExternCrate); . return None; . } . }; . Some(Ident::new(name, span)) 16,718 ( 0.02%) } . 126 ( 0.00%) fn item_ident(self, item_index: DefIndex, sess: &Session) -> Ident { 3,589 ( 0.00%) self.opt_item_ident(item_index, sess).expect("no encoded ident for item") 252 ( 0.00%) } . . fn maybe_kind(self, item_id: DefIndex) -> Option { 3,072 ( 0.00%) self.root.tables.kind.get(self, item_id).map(|k| k.decode(self)) . } . 12,288 ( 0.01%) fn kind(self, item_id: DefIndex) -> EntryKind { . self.maybe_kind(item_id).unwrap_or_else(|| { . bug!( . "CrateMetadata::kind({:?}): id not found, in crate {:?} with number {}", . item_id, . self.root.name, . self.cnum, . ) . }) 9,216 ( 0.01%) } . 3,108 ( 0.00%) fn def_kind(self, item_id: DefIndex) -> DefKind { 2,366 ( 0.00%) self.root.tables.def_kind.get(self, item_id).map(|k| k.decode(self)).unwrap_or_else(|| { . bug!( . "CrateMetadata::def_kind({:?}): id not found, in crate {:?} with number {}", . item_id, . self.root.name, . self.cnum, . ) . }) 1,776 ( 0.00%) } . 1,134 ( 0.00%) fn get_span(self, index: DefIndex, sess: &Session) -> Span { 2,082 ( 0.00%) self.root . .tables . .span . .get(self, index) . .unwrap_or_else(|| panic!("Missing span for {:?}", index)) . .decode((self, sess)) 630 ( 0.00%) } . . fn load_proc_macro(self, id: DefIndex, sess: &Session) -> SyntaxExtension { . let (name, kind, helper_attrs) = match *self.raw_proc_macro(id) { . ProcMacro::CustomDerive { trait_name, attributes, client } => { . let helper_attrs = . attributes.iter().cloned().map(Symbol::intern).collect::>(); . ( . trait_name, -- line 778 ---------------------------------------- -- line 795 ---------------------------------------- . self.get_span(id, sess), . helper_attrs, . self.root.edition, . Symbol::intern(name), . &attrs, . ) . } . 156 ( 0.00%) fn get_trait_def(self, item_id: DefIndex, sess: &Session) -> ty::TraitDef { 48 ( 0.00%) match self.kind(item_id) { 12 ( 0.00%) EntryKind::Trait(data) => { 72 ( 0.00%) let data = data.decode((self, sess)); 192 ( 0.00%) ty::TraitDef::new( . self.local_def_id(item_id), 24 ( 0.00%) data.unsafety, 24 ( 0.00%) data.paren_sugar, 24 ( 0.00%) data.has_auto_impl, 24 ( 0.00%) data.is_marker, 12 ( 0.00%) data.skip_array_during_method_dispatch, 12 ( 0.00%) data.specialization_kind, 48 ( 0.00%) self.def_path_hash(item_id), 12 ( 0.00%) data.must_implement_one_of, . ) . } . EntryKind::TraitAlias => ty::TraitDef::new( . self.local_def_id(item_id), . hir::Unsafety::Normal, . false, . false, . false, . false, . ty::trait_def::TraitSpecializationKind::None, . self.def_path_hash(item_id), . None, . ), . _ => bug!("def-index does not refer to trait or trait alias"), . } 108 ( 0.00%) } . 180 ( 0.00%) fn get_variant( . self, . kind: &EntryKind, . index: DefIndex, . parent_did: DefId, . sess: &Session, . ) -> ty::VariantDef { 80 ( 0.00%) let data = match kind { . EntryKind::Variant(data) | EntryKind::Struct(data, _) | EntryKind::Union(data, _) => { 100 ( 0.00%) data.decode(self) . } . _ => bug!(), . }; . 72 ( 0.00%) let adt_kind = match kind { . EntryKind::Variant(_) => ty::AdtKind::Enum, . EntryKind::Struct(..) => ty::AdtKind::Struct, . EntryKind::Union(..) => ty::AdtKind::Union, . _ => bug!(), . }; . . let variant_did = . if adt_kind == ty::AdtKind::Enum { Some(self.local_def_id(index)) } else { None }; 40 ( 0.00%) let ctor_did = data.ctor.map(|index| self.local_def_id(index)); . 440 ( 0.00%) ty::VariantDef::new( . self.item_ident(index, sess).name, . variant_did, . ctor_did, 80 ( 0.00%) data.discr, 120 ( 0.00%) self.root . .tables . .children . .get(self, index) . .unwrap_or_else(Lazy::empty) . .decode(self) . .map(|index| ty::FieldDef { 31 ( 0.00%) did: self.local_def_id(index), 62 ( 0.00%) name: self.item_ident(index, sess).name, 62 ( 0.00%) vis: self.get_visibility(index), . }) . .collect(), . data.ctor_kind, . adt_kind, . parent_did, . false, 20 ( 0.00%) data.is_non_exhaustive, . ) 180 ( 0.00%) } . 270 ( 0.00%) fn get_adt_def(self, item_id: DefIndex, tcx: TyCtxt<'tcx>) -> &'tcx ty::AdtDef { 72 ( 0.00%) let kind = self.kind(item_id); 36 ( 0.00%) let did = self.local_def_id(item_id); . 86 ( 0.00%) let (adt_kind, repr) = match kind { . EntryKind::Enum(repr) => (ty::AdtKind::Enum, repr), . EntryKind::Struct(_, repr) => (ty::AdtKind::Struct, repr), . EntryKind::Union(_, repr) => (ty::AdtKind::Union, repr), . _ => bug!("get_adt_def called on a non-ADT {:?}", did), . }; . . let variants = if let ty::AdtKind::Enum = adt_kind { 10 ( 0.00%) self.root . .tables . .children . .get(self, item_id) . .unwrap_or_else(Lazy::empty) . .decode(self) 68 ( 0.00%) .map(|index| self.get_variant(&self.kind(index), index, did, tcx.sess)) . .collect() . } else { 128 ( 0.00%) std::iter::once(self.get_variant(&kind, item_id, did, tcx.sess)).collect() . }; . 262 ( 0.00%) tcx.alloc_adt_def(did, adt_kind, variants, repr) 126 ( 0.00%) } . . fn get_explicit_predicates( . self, . item_id: DefIndex, . tcx: TyCtxt<'tcx>, . ) -> ty::GenericPredicates<'tcx> { 510 ( 0.00%) self.root.tables.explicit_predicates.get(self, item_id).unwrap().decode((self, tcx)) . } . . fn get_inferred_outlives( . self, . item_id: DefIndex, . tcx: TyCtxt<'tcx>, . ) -> &'tcx [(ty::Predicate<'tcx>, Span)] { 510 ( 0.00%) self.root . .tables . .inferred_outlives . .get(self, item_id) 8 ( 0.00%) .map(|predicates| tcx.arena.alloc_from_iter(predicates.decode((self, tcx)))) . .unwrap_or_default() . } . . fn get_super_predicates( . self, . item_id: DefIndex, . tcx: TyCtxt<'tcx>, . ) -> ty::GenericPredicates<'tcx> { 70 ( 0.00%) self.root.tables.super_predicates.get(self, item_id).unwrap().decode((self, tcx)) . } . . fn get_explicit_item_bounds( . self, . item_id: DefIndex, . tcx: TyCtxt<'tcx>, . ) -> &'tcx [(ty::Predicate<'tcx>, Span)] { 5 ( 0.00%) self.root . .tables . .explicit_item_bounds . .get(self, item_id) 4 ( 0.00%) .map(|bounds| tcx.arena.alloc_from_iter(bounds.decode((self, tcx)))) . .unwrap_or_default() . } . . fn get_generics(self, item_id: DefIndex, sess: &Session) -> ty::Generics { 601 ( 0.00%) self.root.tables.generics.get(self, item_id).unwrap().decode((self, sess)) . } . . fn get_type(self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 525 ( 0.00%) self.root . .tables . .ty . .get(self, id) . .unwrap_or_else(|| panic!("Not a type: {:?}", id)) . .decode((self, tcx)) . } . . fn get_stability(self, id: DefIndex) -> Option { 175 ( 0.00%) self.root.tables.stability.get(self, id).map(|stab| stab.decode(self)) . } . . fn get_const_stability(self, id: DefIndex) -> Option { . self.root.tables.const_stability.get(self, id).map(|stab| stab.decode(self)) . } . . fn get_deprecation(self, id: DefIndex) -> Option { 165 ( 0.00%) self.root.tables.deprecation.get(self, id).map(|depr| depr.decode(self)) . } . 36 ( 0.00%) fn get_visibility(self, id: DefIndex) -> ty::Visibility { 3,607 ( 0.00%) self.root.tables.visibility.get(self, id).unwrap().decode(self) 24 ( 0.00%) } . . fn get_impl_data(self, id: DefIndex) -> ImplData { 3,036 ( 0.00%) match self.kind(id) { 2,530 ( 0.00%) EntryKind::Impl(data) => data.decode(self), . _ => bug!(), . } . } . . fn get_parent_impl(self, id: DefIndex) -> Option { 1,401 ( 0.00%) self.get_impl_data(id).parent_impl . } . . fn get_impl_polarity(self, id: DefIndex) -> ty::ImplPolarity { 32 ( 0.00%) self.get_impl_data(id).polarity . } . . fn get_impl_defaultness(self, id: DefIndex) -> hir::Defaultness { 5 ( 0.00%) self.get_impl_data(id).defaultness . } . . fn get_impl_constness(self, id: DefIndex) -> hir::Constness { 2 ( 0.00%) self.get_impl_data(id).constness . } . . fn get_trait_item_def_id(self, id: DefIndex) -> Option { 3,095 ( 0.00%) self.root.tables.trait_item_def_id.get(self, id).map(|d| d.decode(self)) . } . . fn get_coerce_unsized_info(self, id: DefIndex) -> Option { . self.get_impl_data(id).coerce_unsized_info . } . . fn get_impl_trait(self, id: DefIndex, tcx: TyCtxt<'tcx>) -> Option> { 5,963 ( 0.01%) self.root.tables.impl_trait_ref.get(self, id).map(|tr| tr.decode((self, tcx))) . } . . fn get_expn_that_defined(self, id: DefIndex, sess: &Session) -> ExpnId { 1,170 ( 0.00%) self.root.tables.expn_that_defined.get(self, id).unwrap().decode((self, sess)) . } . . fn get_const_param_default( . self, . tcx: TyCtxt<'tcx>, . id: DefIndex, . ) -> rustc_middle::ty::Const<'tcx> { . self.root.tables.const_defaults.get(self, id).unwrap().decode((self, tcx)) -- line 1026 ---------------------------------------- -- line 1028 ---------------------------------------- . . /// Iterates over all the stability attributes in the given crate. . fn get_lib_features(self, tcx: TyCtxt<'tcx>) -> &'tcx [(Symbol, Option)] { . tcx.arena.alloc_from_iter(self.root.lib_features.decode(self)) . } . . /// Iterates over the language items in the given crate. . fn get_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [(DefId, usize)] { 57 ( 0.00%) tcx.arena.alloc_from_iter( 38 ( 0.00%) self.root . .lang_items . .decode(self) 564 ( 0.00%) .map(|(def_index, index)| (self.local_def_id(def_index), index)), . ) . } . . /// Iterates over the diagnostic items in the given crate. . fn get_diagnostic_items(self) -> DiagnosticItems { . let mut id_to_name = FxHashMap::default(); 4 ( 0.00%) let name_to_id = self . .root . .diagnostic_items . .decode(self) . .map(|(name, def_index)| { 106 ( 0.00%) let id = self.local_def_id(def_index); 106 ( 0.00%) id_to_name.insert(id, name); . (name, id) . }) . .collect(); 18 ( 0.00%) DiagnosticItems { id_to_name, name_to_id } . } . . /// Iterates over all named children of the given module, . /// including both proper items and reexports. . /// Module here is understood in name resolution sense - it can be a `mod` item, . /// or a crate root, or an enum, or a trait. . fn for_each_module_child( . self, . id: DefIndex, . mut callback: impl FnMut(ModChild), . sess: &Session, . ) { 124 ( 0.00%) if let Some(data) = &self.root.proc_macro_data { . // If we are loading as a proc macro, we want to return . // the view of this crate as a proc macro crate. . if id == CRATE_DEF_INDEX { . for def_index in data.macros.decode(self) { . let raw_macro = self.raw_proc_macro(def_index); . let res = Res::Def( . DefKind::Macro(macro_kind(raw_macro)), . self.local_def_id(def_index), -- line 1078 ---------------------------------------- -- line 1085 ---------------------------------------- . span: ident.span, . }); . } . } . return; . } . . // Iterate over all children. 248 ( 0.00%) if let Some(children) = self.root.tables.children.get(self, id) { 496 ( 0.00%) for child_index in children.decode((self, sess)) { 2,748 ( 0.00%) if let Some(ident) = self.opt_item_ident(child_index, sess) { 672 ( 0.00%) let kind = self.def_kind(child_index); 448 ( 0.00%) if matches!(kind, DefKind::Macro(..)) { . // FIXME: Macros are currently encoded twice, once as items and once as . // reexports. We ignore the items here and only use the reexports. . continue; . } . let def_id = self.local_def_id(child_index); . let res = Res::Def(kind, def_id); . let vis = self.get_visibility(child_index); . let span = self.get_span(child_index, sess); . . callback(ModChild { ident, res, vis, span }); . . // For non-re-export structs and variants add their constructors to children. . // Re-export lists automatically contain constructors when necessary. 896 ( 0.00%) match kind { . DefKind::Struct => { . if let Some((ctor_def_id, ctor_kind)) = . self.get_ctor_def_id_and_kind(child_index) . { . let ctor_res = . Res::Def(DefKind::Ctor(CtorOf::Struct, ctor_kind), ctor_def_id); . let vis = self.get_visibility(ctor_def_id.index); . callback(ModChild { ident, res: ctor_res, vis, span }); -- line 1119 ---------------------------------------- -- line 1144 ---------------------------------------- . callback(ModChild { ident, res: ctor_res, vis, span }); . } . _ => {} . } . } . } . } . 331 ( 0.00%) match self.kind(id) { 10 ( 0.00%) EntryKind::Mod(exports) => { 632 ( 0.00%) for exp in exports.decode((self, sess)) { . callback(exp); . } . } . EntryKind::Enum(..) | EntryKind::Trait(..) => {} . _ => bug!("`for_each_module_child` is called on a non-module: {:?}", self.def_kind(id)), . } . } . -- line 1162 ---------------------------------------- -- line 1164 ---------------------------------------- . self.root.tables.mir_for_ctfe.get(self, id).is_some() . } . . fn is_item_mir_available(self, id: DefIndex) -> bool { . self.root.tables.mir.get(self, id).is_some() . } . . fn module_expansion(self, id: DefIndex, sess: &Session) -> ExpnId { 1,505 ( 0.00%) match self.kind(id) { . EntryKind::Mod(_) | EntryKind::Enum(_) | EntryKind::Trait(_) => { . self.get_expn_that_defined(id, sess) . } . _ => panic!("Expected module, found {:?}", self.local_def_id(id)), . } . } . . fn get_optimized_mir(self, tcx: TyCtxt<'tcx>, id: DefIndex) -> Body<'tcx> { -- line 1180 ---------------------------------------- -- line 1242 ---------------------------------------- . qualif, . _, . ) => qualif, . _ => bug!("mir_const_qualif: unexpected kind"), . } . } . . fn get_fn_has_self_parameter(self, id: DefIndex) -> bool { 550 ( 0.00%) match self.kind(id) { 440 ( 0.00%) EntryKind::AssocFn(data) => data.decode(self).has_self, . _ => false, . } . } . . fn get_associated_item_def_ids(self, tcx: TyCtxt<'tcx>, id: DefIndex) -> &'tcx [DefId] { 385 ( 0.00%) if let Some(children) = self.root.tables.children.get(self, id) { 165 ( 0.00%) tcx.arena.alloc_from_iter( 55 ( 0.00%) children.decode((self, tcx.sess)).map(|child_index| self.local_def_id(child_index)), . ) . } else { . &[] . } . } . 6,588 ( 0.01%) fn get_associated_item(self, id: DefIndex, sess: &Session) -> ty::AssocItem { 3,843 ( 0.00%) let def_key = self.def_key(id); . let parent = self.local_def_id(def_key.parent.unwrap()); . let ident = self.item_ident(id, sess); . 3,883 ( 0.00%) let (kind, container, has_self) = match self.kind(id) { . EntryKind::AssocConst(container, _, _) => (ty::AssocKind::Const, container, false), 529 ( 0.00%) EntryKind::AssocFn(data) => { . let data = data.decode(self); 3,703 ( 0.00%) (ty::AssocKind::Fn, data.container, data.has_self) . } 40 ( 0.00%) EntryKind::AssocType(container) => (ty::AssocKind::Type, container, false), . _ => bug!("cannot get associated-item of `{:?}`", def_key), . }; . 9,882 ( 0.01%) ty::AssocItem { . name: ident.name, . kind, . vis: self.get_visibility(id), . defaultness: container.defaultness(), . def_id: self.local_def_id(id), . trait_item_def_id: self.get_trait_item_def_id(id), . container: container.with_def_id(parent), . fn_has_self_parameter: has_self, . } 4,941 ( 0.00%) } . . fn get_item_variances(self, id: DefIndex) -> impl Iterator + 'a { 65 ( 0.00%) self.root.tables.variances.get(self, id).unwrap_or_else(Lazy::empty).decode(self) . } . . fn get_ctor_def_id_and_kind(self, node_id: DefIndex) -> Option<(DefId, CtorKind)> { 448 ( 0.00%) match self.kind(node_id) { . EntryKind::Struct(data, _) | EntryKind::Variant(data) => { 192 ( 0.00%) let vdata = data.decode(self); 84 ( 0.00%) vdata.ctor.map(|index| (self.local_def_id(index), vdata.ctor_kind)) . } . _ => None, . } 320 ( 0.00%) } . 12 ( 0.00%) fn get_item_attrs( . self, . id: DefIndex, . sess: &'a Session, . ) -> impl Iterator + 'a { 255 ( 0.00%) self.root . .tables . .attributes . .get(self, id) . .unwrap_or_else(|| { . // Structure and variant constructors don't have any attributes encoded for them, . // but we assume that someone passing a constructor ID actually wants to look at . // the attributes on the corresponding struct or variant. . let def_key = self.def_key(id); -- line 1320 ---------------------------------------- -- line 1322 ---------------------------------------- . let parent_id = def_key.parent.expect("no parent for a constructor"); . self.root . .tables . .attributes . .get(self, parent_id) . .expect("no encoded attributes for a structure or variant") . }) . .decode((self, sess)) 9 ( 0.00%) } . . fn get_struct_field_names( . self, . id: DefIndex, . sess: &'a Session, . ) -> impl Iterator> + 'a { 320 ( 0.00%) self.root . .tables . .children . .get(self, id) . .unwrap_or_else(Lazy::empty) . .decode(self) 1,260 ( 0.00%) .map(move |index| respan(self.get_span(index, sess), self.item_ident(index, sess).name)) . } . . fn get_struct_field_visibilities(self, id: DefIndex) -> impl Iterator + 'a { 25 ( 0.00%) self.root . .tables . .children . .get(self, id) . .unwrap_or_else(Lazy::empty) . .decode(self) 12 ( 0.00%) .map(move |field_index| self.get_visibility(field_index)) . } . . fn get_inherent_implementations_for_type( . self, . tcx: TyCtxt<'tcx>, . id: DefIndex, . ) -> &'tcx [DefId] { 36 ( 0.00%) tcx.arena.alloc_from_iter( 45 ( 0.00%) self.root . .tables . .inherent_impls . .get(self, id) . .unwrap_or_else(Lazy::empty) . .decode(self) 571 ( 0.00%) .map(|index| self.local_def_id(index)), . ) . } . . fn get_traits(self) -> impl Iterator + 'a { . self.root.traits.decode(self).map(move |index| self.local_def_id(index)) . } . . fn get_trait_impls(self) -> impl Iterator)> + 'a { -- line 1376 ---------------------------------------- -- line 1380 ---------------------------------------- . index: *trait_index, . }; . impls.decode(self).map(move |(impl_index, simplified_self_ty)| { . (trait_def_id, self.local_def_id(impl_index), simplified_self_ty) . }) . }) . } . 3,078 ( 0.00%) fn get_implementations_of_trait( . self, . tcx: TyCtxt<'tcx>, . trait_def_id: DefId, . ) -> &'tcx [(DefId, Option)] { 342 ( 0.00%) if self.trait_impls.is_empty() { . return &[]; . } . . // Do a reverse lookup beforehand to avoid touching the crate_num . // hash map in the loop below. 576 ( 0.00%) let key = match self.reverse_translate_def_id(trait_def_id) { . Some(def_id) => (def_id.krate.as_u32(), def_id.index), . None => return &[], . }; . . if let Some(impls) = self.trait_impls.get(&key) { 408 ( 0.00%) tcx.arena.alloc_from_iter( 204 ( 0.00%) impls . .decode(self) 4,874 ( 0.00%) .map(|(idx, simplified_self_ty)| (self.local_def_id(idx), simplified_self_ty)), . ) . } else { . &[] . } 2,736 ( 0.00%) } . . fn get_trait_of_item(self, id: DefIndex) -> Option { 92 ( 0.00%) let def_key = self.def_key(id); 115 ( 0.00%) match def_key.disambiguated_data.data { . DefPathData::TypeNs(..) | DefPathData::ValueNs(..) => (), . // Not an associated item . _ => return None, . } 207 ( 0.00%) def_key.parent.and_then(|parent_index| match self.kind(parent_index) { . EntryKind::Trait(_) | EntryKind::TraitAlias => Some(self.local_def_id(parent_index)), . _ => None, . }) . } . . fn get_native_libraries(self, sess: &'a Session) -> impl Iterator + 'a { 38 ( 0.00%) self.root.native_libraries.decode((self, sess)) . } . . fn get_proc_macro_quoted_span(self, index: usize, sess: &Session) -> Span { . self.root . .tables . .proc_macro_quoted_spans . .get(self, index) . .unwrap_or_else(|| panic!("Missing proc macro quoted span: {:?}", index)) -- line 1437 ---------------------------------------- -- line 1450 ---------------------------------------- . self.root.dylib_dependency_formats.decode(self).enumerate().flat_map(|(i, link)| { . let cnum = CrateNum::new(i + 1); . link.map(|link| (self.cnum_map[cnum], link)) . }), . ) . } . . fn get_missing_lang_items(self, tcx: TyCtxt<'tcx>) -> &'tcx [lang_items::LangItem] { 95 ( 0.00%) tcx.arena.alloc_from_iter(self.root.lang_items_missing.decode(self)) . } . . fn get_fn_param_names(self, tcx: TyCtxt<'tcx>, id: DefIndex) -> &'tcx [Ident] { . let param_names = match self.kind(id) { . EntryKind::Fn(data) | EntryKind::ForeignFn(data) => data.decode(self).param_names, . EntryKind::AssocFn(data) => data.decode(self).fn_data.param_names, . _ => Lazy::empty(), . }; -- line 1466 ---------------------------------------- -- line 1479 ---------------------------------------- . EntryKind::AnonConst(_, data) . | EntryKind::Const(_, data) . | EntryKind::AssocConst(_, _, data) => data.decode(self).0, . _ => bug!(), . } . } . . fn get_macro(self, id: DefIndex, sess: &Session) -> MacroDef { 6 ( 0.00%) match self.kind(id) { 1 ( 0.00%) EntryKind::MacroDef(macro_def) => macro_def.decode((self, sess)), . _ => bug!(), . } . } . . // This replicates some of the logic of the crate-local `is_const_fn_raw` query, because we . // don't serialize constness for tuple variant and tuple struct constructors. . fn is_const_fn_raw(self, id: DefIndex) -> bool { 27 ( 0.00%) let constness = match self.kind(id) { 6 ( 0.00%) EntryKind::AssocFn(data) => data.decode(self).fn_data.constness, . EntryKind::Fn(data) => data.decode(self).constness, . EntryKind::ForeignFn(data) => data.decode(self).constness, . EntryKind::Variant(..) | EntryKind::Struct(..) => hir::Constness::Const, . _ => hir::Constness::NotConst, . }; . constness == hir::Constness::Const . } . -- line 1505 ---------------------------------------- -- line 1532 ---------------------------------------- . fn generator_kind(self, id: DefIndex) -> Option { . match self.kind(id) { . EntryKind::Generator(data) => Some(data), . _ => None, . } . } . . fn fn_sig(self, id: DefIndex, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> { 155 ( 0.00%) self.root.tables.fn_sig.get(self, id).unwrap().decode((self, tcx)) . } . . #[inline] 14,252 ( 0.01%) fn def_key(self, index: DefIndex) -> DefKey { 4,072 ( 0.00%) *self . .def_key_cache . .lock() . .entry(index) 5,930 ( 0.01%) .or_insert_with(|| self.root.tables.def_keys.get(self, index).unwrap().decode(self)) 16,288 ( 0.02%) } . . // Returns the path leading to the thing with this `id`. . fn def_path(self, id: DefIndex) -> DefPath { . debug!("def_path(cnum={:?}, id={:?})", self.cnum, id); . DefPath::make(self.cnum, id, |parent| self.def_key(parent)) . } . . fn def_path_hash_unlocked( . self, . index: DefIndex, . def_path_hashes: &mut FxHashMap, . ) -> DefPathHash { 196 ( 0.00%) *def_path_hashes.entry(index).or_insert_with(|| { 140 ( 0.00%) self.root.tables.def_path_hashes.get(self, index).unwrap().decode(self) . }) . } . . #[inline] 686 ( 0.00%) fn def_path_hash(self, index: DefIndex) -> DefPathHash { . let mut def_path_hashes = self.def_path_hash_cache.lock(); . self.def_path_hash_unlocked(index, &mut def_path_hashes) 784 ( 0.00%) } . . #[inline] . fn def_path_hash_to_def_index(self, hash: DefPathHash) -> DefIndex { . self.def_path_hash_map.def_path_hash_to_def_index(&hash) . } . . fn expn_hash_to_expn_id(self, sess: &Session, index_guess: u32, hash: ExpnHash) -> ExpnId { . debug_assert_eq!(ExpnId::from_hash(hash), None); -- line 1580 ---------------------------------------- -- line 1635 ---------------------------------------- . /// Proc macro crates don't currently export spans, so this function does not have . /// to work for them. . fn imported_source_files(self, sess: &Session) -> &'a [ImportedSourceFile] { . // Translate the virtual `/rustc/$hash` prefix back to a real directory . // that should hold actual sources, where possible. . // . // NOTE: if you update this, you might need to also update bootstrap's code for generating . // the `rust-src` component in `Src::run` in `src/bootstrap/dist.rs`. 9,300 ( 0.01%) let virtual_rust_source_base_dir = option_env!("CFG_VIRTUAL_RUST_SOURCE_BASE_DIR") . .map(Path::new) . .filter(|_| { . // Only spend time on further checks if we have what to translate *to*. . sess.opts.real_rust_source_base_dir.is_some() . }) . .filter(|virtual_dir| { . // Don't translate away `/rustc/$hash` if we're still remapping to it, . // since that means we're still building `std`/`rustc` that need it, . // and we don't want the real path to leak into codegen/debuginfo. . !sess.opts.remap_path_prefix.iter().any(|(_from, to)| to == virtual_dir) . }); 9,300 ( 0.01%) let try_to_translate_virtual_to_real = |name: &mut rustc_span::FileName| { . debug!( . "try_to_translate_virtual_to_real(name={:?}): \ . virtual_rust_source_base_dir={:?}, real_rust_source_base_dir={:?}", . name, virtual_rust_source_base_dir, sess.opts.real_rust_source_base_dir, . ); . 2,630 ( 0.00%) if let Some(virtual_dir) = virtual_rust_source_base_dir { . if let Some(real_dir) = &sess.opts.real_rust_source_base_dir { . if let rustc_span::FileName::Real(old_name) = name { . if let rustc_span::RealFileName::Remapped { local_path: _, virtual_name } = . old_name . { . if let Ok(rest) = virtual_name.strip_prefix(virtual_dir) { . let virtual_name = virtual_name.clone(); . -- line 1670 ---------------------------------------- -- line 1709 ---------------------------------------- . *old_name = new_name; . } . } . } . } . } . }; . 9,297 ( 0.01%) self.cdata.source_map_import_info.get_or_init(|| { 18 ( 0.00%) let external_source_map = self.root.source_map.decode(self); . . external_source_map . .map(|source_file_to_import| { . // We can't reuse an existing SourceFile, so allocate a new one . // containing the information we need. . let rustc_span::SourceFile { 11,046 ( 0.01%) mut name, 3,156 ( 0.00%) src_hash, 526 ( 0.00%) start_pos, 1,052 ( 0.00%) end_pos, 2,630 ( 0.00%) mut lines, 2,630 ( 0.00%) mut multibyte_chars, 3,156 ( 0.00%) mut non_narrow_chars, 2,630 ( 0.00%) mut normalized_pos, 3,156 ( 0.00%) name_hash, . .. . } = source_file_to_import; . . // If this file is under $sysroot/lib/rustlib/src/ but has not been remapped . // during rust bootstrapping by `remap-debuginfo = true`, and the user . // wish to simulate that behaviour by -Z simulate-remapped-rust-src-base, . // then we change `name` to a similar state as if the rust was bootstrapped . // with `remap-debuginfo = true`. . // This is useful for testing so that tests about the effects of . // `try_to_translate_virtual_to_real` don't have to worry about how the . // compiler is bootstrapped. 1,052 ( 0.00%) if let Some(virtual_dir) = . &sess.opts.debugging_opts.simulate_remapped_rust_src_base . { . if let Some(real_dir) = &sess.opts.real_rust_source_base_dir { . if let rustc_span::FileName::Real(ref mut old_name) = name { . if let rustc_span::RealFileName::LocalPath(local) = old_name { . if let Ok(rest) = local.strip_prefix(real_dir) { . *old_name = rustc_span::RealFileName::Remapped { . local_path: None, -- line 1753 ---------------------------------------- -- line 1767 ---------------------------------------- . let source_length = (end_pos - start_pos).to_usize(); . . // Translate line-start positions and multibyte character . // position into frame of reference local to file. . // `SourceMap::new_imported_source_file()` will then translate those . // coordinates to their new global frame of reference when the . // offset of the SourceFile is known. . for pos in &mut lines { 706,432 ( 0.70%) *pos = *pos - start_pos; . } . for mbc in &mut multibyte_chars { 1,594 ( 0.00%) mbc.pos = mbc.pos - start_pos; . } . for swc in &mut non_narrow_chars { 924 ( 0.00%) *swc = *swc - start_pos; . } . for np in &mut normalized_pos { . np.pos = np.pos - start_pos; . } . 9,468 ( 0.01%) let local_version = sess.source_map().new_imported_source_file( 6,838 ( 0.01%) name, 3,682 ( 0.00%) src_hash, . name_hash, . source_length, 1,052 ( 0.00%) self.cnum, 2,630 ( 0.00%) lines, 3,156 ( 0.00%) multibyte_chars, 2,630 ( 0.00%) non_narrow_chars, 2,104 ( 0.00%) normalized_pos, . start_pos, . end_pos, . ); . debug!( . "CrateMetaData::imported_source_files alloc \ . source_file {:?} original (start_pos {:?} end_pos {:?}) \ . translated (start_pos {:?} end_pos {:?})", . local_version.name, -- line 1804 ---------------------------------------- -- line 1808 ---------------------------------------- . local_version.end_pos . ); . . ImportedSourceFile { . original_start_pos: start_pos, . original_end_pos: end_pos, . translated_source_file: local_version, . } 1,578 ( 0.00%) }) . .collect() . }) . } . } . . impl CrateMetadata { 228 ( 0.00%) crate fn new( . sess: &Session, . blob: MetadataBlob, . root: CrateRoot<'static>, . raw_proc_macros: Option<&'static [ProcMacro]>, . cnum: CrateNum, . cnum_map: CrateNumMap, . dep_kind: CrateDepKind, . source: CrateSource, . private_dep: bool, . host_hash: Option, . ) -> CrateMetadata { 57 ( 0.00%) let trait_impls = root . .impls . .decode((&blob, sess)) . .map(|trait_impls| (trait_impls.trait_id, trait_impls.impls)) . .collect(); . let alloc_decoding_state = 57 ( 0.00%) AllocDecodingState::new(root.interpret_alloc_index.decode(&blob).collect()); . let dependencies = Lock::new(cnum_map.iter().cloned().collect()); . . // Pre-decode the DefPathHash->DefIndex table. This is a cheap operation . // that does not copy any data. It just does some data verification. 19 ( 0.00%) let def_path_hash_map = root.def_path_hash_map.decode(&blob); . 437 ( 0.00%) CrateMetadata { 19 ( 0.00%) blob, 57 ( 0.00%) root, 76 ( 0.00%) trait_impls, . raw_proc_macros, . source_map_import_info: OnceCell::new(), 76 ( 0.00%) def_path_hash_map, . expn_hash_map: Default::default(), 133 ( 0.00%) alloc_decoding_state, . cnum, 76 ( 0.00%) cnum_map, 95 ( 0.00%) dependencies, . dep_kind: Lock::new(dep_kind), 228 ( 0.00%) source, . private_dep, . host_hash, . extern_crate: Lock::new(None), . hygiene_context: Default::default(), . def_key_cache: Default::default(), . def_path_hash_cache: Default::default(), . } 171 ( 0.00%) } . . crate fn dependencies(&self) -> LockGuard<'_, Vec> { . self.dependencies.borrow() . } . . crate fn add_dependency(&self, cnum: CrateNum) { . self.dependencies.borrow_mut().push(cnum); . } . . crate fn update_extern_crate(&self, new_extern_crate: ExternCrate) -> bool { . let mut extern_crate = self.extern_crate.borrow_mut(); . let update = Some(new_extern_crate.rank()) > extern_crate.as_ref().map(ExternCrate::rank); . if update { 95 ( 0.00%) *extern_crate = Some(new_extern_crate); . } . update . } . . crate fn source(&self) -> &CrateSource { 32 ( 0.00%) &self.source . } . . crate fn dep_kind(&self) -> CrateDepKind { . *self.dep_kind.lock() . } . . crate fn update_dep_kind(&self, f: impl FnOnce(CrateDepKind) -> CrateDepKind) { 76 ( 0.00%) self.dep_kind.with_lock(|dep_kind| *dep_kind = f(*dep_kind)) . } . . crate fn panic_strategy(&self) -> PanicStrategy { . self.root.panic_strategy . } . . crate fn needs_panic_runtime(&self) -> bool { . self.root.needs_panic_runtime -- line 1905 ---------------------------------------- -- line 1909 ---------------------------------------- . self.root.panic_runtime . } . . crate fn is_profiler_runtime(&self) -> bool { . self.root.profiler_runtime . } . . crate fn needs_allocator(&self) -> bool { 5 ( 0.00%) self.root.needs_allocator . } . . crate fn has_global_allocator(&self) -> bool { . self.root.has_global_allocator . } . . crate fn has_default_lib_allocator(&self) -> bool { . self.root.has_default_lib_allocator . } . . crate fn is_proc_macro_crate(&self) -> bool { . self.root.is_proc_macro_crate() . } . . crate fn name(&self) -> Symbol { 8 ( 0.00%) self.root.name . } . . crate fn stable_crate_id(&self) -> StableCrateId { . self.root.stable_crate_id . } . . crate fn hash(&self) -> Svh { . self.root.hash . } . . fn num_def_ids(&self) -> usize { . self.root.tables.def_keys.size() . } . . fn local_def_id(&self, index: DefIndex) -> DefId { 8,588 ( 0.01%) DefId { krate: self.cnum, index } . } . . // Translate a DefId from the current compilation environment to a DefId . // for an external crate. . fn reverse_translate_def_id(&self, did: DefId) -> Option { . for (local, &global) in self.cnum_map.iter_enumerated() { 1,098 ( 0.00%) if global == did.krate { . return Some(DefId { krate: local, index: did.index }); . } . } . . None . } . } . -- line 1964 ---------------------------------------- 112,009 ( 0.11%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/fast_reject.rs -------------------------------------------------------------------------------- Ir -- line 12 ---------------------------------------- . pub type SimplifiedType = SimplifiedTypeGen; . . /// See `simplify_type` . /// . /// Note that we keep this type generic over the type of identifier it uses . /// because we sometimes need to use SimplifiedTypeGen values as stable sorting . /// keys (in which case we use a DefPathHash as id-type) but in the general case . /// the non-stable but fast to construct DefId-version is the better choice. 106,058 ( 0.10%) #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, TyEncodable, TyDecodable)] . pub enum SimplifiedTypeGen . where . D: Copy + Debug + Eq, . { . BoolSimplifiedType, . CharSimplifiedType, . IntSimplifiedType(ty::IntTy), . UintSimplifiedType(ty::UintTy), -- line 28 ---------------------------------------- -- line 78 ---------------------------------------- . /// . /// ¹ meaning that if two outermost layers are different, then the whole types are also different. . /// ² FIXME(@lcnr): this seems like it can actually end up being unsound with the way it's used during . /// candidate selection. We do not consider non blanket impls for `<_ as Trait>::Assoc` even . /// though `_` can be inferred to a concrete type later at which point a concrete impl . /// could actually apply. After experimenting for about an hour I wasn't able to cause any issues . /// this way so I am not going to change this until we actually find an issue as I am really . /// interesting in getting an actual test for this. 5,564 ( 0.01%) pub fn simplify_type( . tcx: TyCtxt<'_>, . ty: Ty<'_>, . can_simplify_params: SimplifyParams, . strip_references: StripReferences, . ) -> Option { 8,346 ( 0.01%) match *ty.kind() { . ty::Bool => Some(BoolSimplifiedType), . ty::Char => Some(CharSimplifiedType), . ty::Int(int_type) => Some(IntSimplifiedType(int_type)), 328 ( 0.00%) ty::Uint(uint_type) => Some(UintSimplifiedType(uint_type)), . ty::Float(float_type) => Some(FloatSimplifiedType(float_type)), 8,690 ( 0.01%) ty::Adt(def, _) => Some(AdtSimplifiedType(def.did)), . ty::Str => Some(StrSimplifiedType), . ty::Array(..) => Some(ArraySimplifiedType), . ty::Slice(..) => Some(SliceSimplifiedType), 36 ( 0.00%) ty::RawPtr(ptr) => Some(PtrSimplifiedType(ptr.mutbl)), . ty::Dynamic(ref trait_info, ..) => match trait_info.principal_def_id() { . Some(principal_def_id) if !tcx.trait_is_auto(principal_def_id) => { . Some(TraitSimplifiedType(principal_def_id)) . } . _ => Some(MarkerTraitObjectSimplifiedType), . }, 900 ( 0.00%) ty::Ref(_, ty, mutbl) => { 450 ( 0.00%) if strip_references == StripReferences::Yes { . // For diagnostics, when recommending similar impls we want to . // recommend impls even when there is a reference mismatch, . // so we treat &T and T equivalently in that case. . simplify_type(tcx, ty, can_simplify_params, strip_references) . } else { . Some(RefSimplifiedType(mutbl)) . } . } 21 ( 0.00%) ty::FnDef(def_id, _) | ty::Closure(def_id, _) => Some(ClosureSimplifiedType(def_id)), . ty::Generator(def_id, _, _) => Some(GeneratorSimplifiedType(def_id)), . ty::GeneratorWitness(ref tys) => { . Some(GeneratorWitnessSimplifiedType(tys.skip_binder().len())) . } . ty::Never => Some(NeverSimplifiedType), 90 ( 0.00%) ty::Tuple(ref tys) => Some(TupleSimplifiedType(tys.len())), 12 ( 0.00%) ty::FnPtr(ref f) => Some(FunctionSimplifiedType(f.skip_binder().inputs().len())), . ty::Projection(_) | ty::Param(_) => { 117 ( 0.00%) if can_simplify_params == SimplifyParams::Yes { . // In normalized types, projections don't unify with . // anything. when lazy normalization happens, this . // will change. It would still be nice to have a way . // to deal with known-not-to-unify-with-anything . // projections (e.g., the likes of <__S as Encoder>::Error). . Some(ParameterSimplifiedType) . } else { . None . } . } . ty::Opaque(def_id, _) => Some(OpaqueSimplifiedType(def_id)), . ty::Foreign(def_id) => Some(ForeignSimplifiedType(def_id)), . ty::Placeholder(..) | ty::Bound(..) | ty::Infer(_) | ty::Error(_) => None, . } 12,519 ( 0.01%) } . . impl SimplifiedTypeGen { . pub fn def(self) -> Option { . match self { . AdtSimplifiedType(d) . | ForeignSimplifiedType(d) . | TraitSimplifiedType(d) . | ClosureSimplifiedType(d) -- line 151 ---------------------------------------- 104,882 ( 0.10%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/symbol.rs -------------------------------------------------------------------------------- Ir -- line 15 ---------------------------------------- . use std::str; . . use crate::{with_session_globals, Edition, Span, DUMMY_SP}; . . #[cfg(test)] . mod tests; . . // The proc macro code for this is in `compiler/rustc_macros/src/symbols.rs`. 8 ( 0.00%) symbols! { . // After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`, . // this should be rarely necessary though if the keywords are kept in alphabetic order. . Keywords { . // Special reserved identifiers used internally for elided lifetimes, . // unnamed method parameters, crate root module, error recovery etc. . Empty: "", . PathRoot: "{{root}}", . DollarCrate: "$crate", -- line 31 ---------------------------------------- -- line 1498 ---------------------------------------- . write_str, . writeln_macro, . x87_reg, . xer, . xmm_reg, . ymm_reg, . zmm_reg, . } 10 ( 0.00%) } . 1,852 ( 0.00%) #[derive(Copy, Clone, Eq, HashStable_Generic, Encodable, Decodable)] . pub struct Ident { . pub name: Symbol, . pub span: Span, . } . . impl Ident { . #[inline] . /// Constructs a new identifier from a symbol and a span. . pub const fn new(name: Symbol, span: Span) -> Ident { . Ident { name, span } 8,257 ( 0.01%) } . . /// Constructs a new identifier with a dummy span. . #[inline] . pub const fn with_dummy_span(name: Symbol) -> Ident { . Ident::new(name, DUMMY_SP) . } . . #[inline] . pub fn empty() -> Ident { . Ident::with_dummy_span(kw::Empty) . } . . /// Maps a string to an identifier with a dummy span. 2 ( 0.00%) pub fn from_str(string: &str) -> Ident { 2 ( 0.00%) Ident::with_dummy_span(Symbol::intern(string)) 8 ( 0.00%) } . . /// Maps a string and a span to an identifier. . pub fn from_str_and_span(string: &str, span: Span) -> Ident { . Ident::new(Symbol::intern(string), span) . } . . /// Replaces `lo` and `hi` with those from `span`, but keep hygiene context. 24 ( 0.00%) pub fn with_span_pos(self, span: Span) -> Ident { . Ident::new(self.name, span.with_ctxt(self.span.ctxt())) 15 ( 0.00%) } . 56 ( 0.00%) pub fn without_first_quote(self) -> Ident { 56 ( 0.00%) Ident::new(Symbol::intern(self.as_str().trim_start_matches('\'')), self.span) 35 ( 0.00%) } . . /// "Normalize" ident for use in comparisons using "item hygiene". . /// Identifiers with same string value become same if they came from the same macro 2.0 macro . /// (e.g., `macro` item, but not `macro_rules` item) and stay different if they came from . /// different macro 2.0 macros. . /// Technically, this operation strips all non-opaque marks from ident's syntactic context. 7,704 ( 0.01%) pub fn normalize_to_macros_2_0(self) -> Ident { . Ident::new(self.name, self.span.normalize_to_macros_2_0()) 5,992 ( 0.01%) } . . /// "Normalize" ident for use in comparisons using "local variable hygiene". . /// Identifiers with same string value become same if they came from the same non-transparent . /// macro (e.g., `macro` or `macro_rules!` items) and stay different if they came from different . /// non-transparent macros. . /// Technically, this operation strips all transparent marks from ident's syntactic context. 234 ( 0.00%) pub fn normalize_to_macro_rules(self) -> Ident { . Ident::new(self.name, self.span.normalize_to_macro_rules()) 182 ( 0.00%) } . . /// Access the underlying string. This is a slowish operation because it . /// requires locking the symbol interner. . /// . /// Note that the lifetime of the return value is a lie. See . /// `Symbol::as_str()` for details. . pub fn as_str(&self) -> &str { 11 ( 0.00%) self.name.as_str() . } . } . . impl PartialEq for Ident { 876 ( 0.00%) fn eq(&self, rhs: &Self) -> bool { 2,162 ( 0.00%) self.name == rhs.name && self.span.ctxt() == rhs.span.ctxt() 1,168 ( 0.00%) } . } . . impl Hash for Ident { . fn hash(&self, state: &mut H) { . self.name.hash(state); 621 ( 0.00%) self.span.ctxt().hash(state); . } . } . . impl fmt::Debug for Ident { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Display::fmt(self, f)?; . fmt::Debug::fmt(&self.span.ctxt(), f) . } . } . . /// This implementation is supposed to be used in error messages, so it's expected to be identical . /// to printing the original identifier token written in source code (`token_to_string`), . /// except that AST identifiers don't keep the rawness flag, so we have to guess it. . impl fmt::Display for Ident { 16 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 23 ( 0.00%) fmt::Display::fmt(&IdentPrinter::new(self.name, self.is_raw_guess(), None), f) 16 ( 0.00%) } . } . . /// This is the most general way to print identifiers. . /// AST pretty-printer is used as a fallback for turning AST structures into token streams for . /// proc macros. Additionally, proc macros may stringify their input and expect it survive the . /// stringification (especially true for proc macro derives written between Rust 1.15 and 1.30). . /// So we need to somehow pretty-print `$crate` in a way preserving at least some of its . /// hygiene data, most importantly name of the crate it refers to. -- line 1613 ---------------------------------------- -- line 1622 ---------------------------------------- . is_raw: bool, . /// Span used for retrieving the crate name to which `$crate` refers to, . /// if this field is `None` then the `$crate` conversion doesn't happen. . convert_dollar_crate: Option, . } . . impl IdentPrinter { . /// The most general `IdentPrinter` constructor. Do not use this. 2 ( 0.00%) pub fn new(symbol: Symbol, is_raw: bool, convert_dollar_crate: Option) -> IdentPrinter { 58 ( 0.00%) IdentPrinter { symbol, is_raw, convert_dollar_crate } 2 ( 0.00%) } . . /// This implementation is supposed to be used when printing identifiers . /// as a part of pretty-printing for larger AST pieces. . /// Do not use this either. 6 ( 0.00%) pub fn for_ast_ident(ident: Ident, is_raw: bool) -> IdentPrinter { 6 ( 0.00%) IdentPrinter::new(ident.name, is_raw, Some(ident.span)) 3 ( 0.00%) } . } . . impl fmt::Display for IdentPrinter { 60 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 24 ( 0.00%) if self.is_raw { . f.write_str("r#")?; 12 ( 0.00%) } else if self.symbol == kw::DollarCrate { . if let Some(span) = self.convert_dollar_crate { . let converted = span.ctxt().dollar_crate_name(); . if !converted.is_path_segment_keyword() { . f.write_str("::")?; . } . return fmt::Display::fmt(&converted, f); . } . } -- line 1654 ---------------------------------------- -- line 1656 ---------------------------------------- . } . } . . /// An newtype around `Ident` that calls [Ident::normalize_to_macro_rules] on . /// construction. . // FIXME(matthewj, petrochenkov) Use this more often, add a similar . // `ModernIdent` struct and use that as well. . #[derive(Copy, Clone, Eq, PartialEq, Hash)] 10 ( 0.00%) pub struct MacroRulesNormalizedIdent(Ident); . . impl MacroRulesNormalizedIdent { . pub fn new(ident: Ident) -> Self { 5 ( 0.00%) Self(ident.normalize_to_macro_rules()) . } . } . . impl fmt::Debug for MacroRulesNormalizedIdent { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(&self.0, f) . } . } -- line 1676 ---------------------------------------- -- line 1686 ---------------------------------------- . /// Internally, a `Symbol` is implemented as an index, and all operations . /// (including hashing, equality, and ordering) operate on that index. The use . /// of `rustc_index::newtype_index!` means that `Option` only takes up 4 bytes, . /// because `rustc_index::newtype_index!` reserves the last 256 values for tagging purposes. . /// . /// Note that `Symbol` cannot directly be a `rustc_index::newtype_index!` because it . /// implements `fmt::Debug`, `Encodable`, and `Decodable` in special ways. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] 3,045 ( 0.00%) pub struct Symbol(SymbolIndex); . . rustc_index::newtype_index! { . struct SymbolIndex { .. } . } . . impl Symbol { . const fn new(n: u32) -> Self { . Symbol(SymbolIndex::from_u32(n)) 4 ( 0.00%) } . . /// Maps a string to its interned representation. 50,382 ( 0.05%) pub fn intern(string: &str) -> Self { . with_session_globals(|session_globals| session_globals.symbol_interner.intern(string)) 50,382 ( 0.05%) } . . /// Access the underlying string. This is a slowish operation because it . /// requires locking the symbol interner. . /// . /// Note that the lifetime of the return value is a lie. It's not the same . /// as `&self`, but actually tied to the lifetime of the underlying . /// interner. Interners are long-lived, and there are very few of them, and . /// this function is typically used for short-lived things, so in practice . /// it works out ok. 3,180 ( 0.00%) pub fn as_str(&self) -> &str { 1,060 ( 0.00%) with_session_globals(|session_globals| unsafe { . std::mem::transmute::<&str, &str>(session_globals.symbol_interner.get(*self)) . }) 4,240 ( 0.00%) } . . pub fn as_u32(self) -> u32 { . self.0.as_u32() . } . . pub fn is_empty(self) -> bool { . self == kw::Empty . } . . /// This method is supposed to be used in error messages, so it's expected to be . /// identical to printing the original identifier token written in source code . /// (`token_to_string`, `Ident::to_string`), except that symbols don't keep the rawness flag . /// or edition, so we have to guess the rawness using the global edition. 18 ( 0.00%) pub fn to_ident_string(self) -> String { . Ident::with_dummy_span(self).to_string() 18 ( 0.00%) } . } . . impl fmt::Debug for Symbol { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(self.as_str(), f) . } . } . . impl fmt::Display for Symbol { 168 ( 0.00%) fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 612 ( 0.00%) fmt::Display::fmt(self.as_str(), f) . } . } . . impl Encodable for Symbol { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { 1,185 ( 0.00%) s.emit_str(self.as_str()) . } . } . . impl Decodable for Symbol { . #[inline] 4,624 ( 0.00%) fn decode(d: &mut D) -> Symbol { 12,362 ( 0.01%) Symbol::intern(&d.read_str()) . } . } . . impl HashStable for Symbol { . #[inline] 6 ( 0.00%) fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) { 611 ( 0.00%) self.as_str().hash_stable(hcx, hasher); . } . } . . impl ToStableHashKey for Symbol { . type KeyType = String; . #[inline] . fn to_stable_hash_key(&self, _: &CTX) -> String { . self.as_str().to_string() -- line 1777 ---------------------------------------- -- line 1785 ---------------------------------------- . // . // The `FxHashMap`+`Vec` pair could be replaced by `FxIndexSet`, but #75278 . // found that to regress performance up to 2% in some cases. This might be . // revisited after further improvements to `indexmap`. . // . // This type is private to prevent accidentally constructing more than one . // `Interner` on the same thread, which makes it easy to mixup `Symbol`s . // between `Interner`s. 2 ( 0.00%) #[derive(Default)] . struct InternerInner { . arena: DroplessArena, . names: FxHashMap<&'static str, Symbol>, . strings: Vec<&'static str>, . } . . impl Interner { . fn prefill(init: &[&'static str]) -> Self { 30 ( 0.00%) Interner(Lock::new(InternerInner { . strings: init.into(), . names: init.iter().copied().zip((0..).map(Symbol::new)).collect(), . ..Default::default() . })) . } . . #[inline] . fn intern(&self, string: &str) -> Symbol { . let mut inner = self.0.lock(); 7,448 ( 0.01%) if let Some(&name) = inner.names.get(string) { . return name; . } . 1,874 ( 0.00%) let name = Symbol::new(inner.strings.len() as u32); . . // SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena, . // and immediately convert the clone back to `&[u8], all because there . // is no `inner.arena.alloc_str()` method. This is clearly safe. . let string: &str = . unsafe { str::from_utf8_unchecked(inner.arena.alloc_slice(string.as_bytes())) }; . . // SAFETY: we can extend the arena allocation to `'static` because we -- line 1824 ---------------------------------------- -- line 1831 ---------------------------------------- . // #91445 for details. . inner.names.insert(string, name); . name . } . . // Get the symbol as a string. `Symbol::as_str()` should be used in . // preference to this function. . fn get(&self, symbol: Symbol) -> &str { 3,180 ( 0.00%) self.0.lock().strings[symbol.0.as_usize()] . } . } . . // This module has a very short name because it's used a lot. . /// This module contains all the defined keyword `Symbol`s. . /// . /// Given that `kw` is imported, use them like `kw::keyword_name`. . /// For example `kw::Loop` or `kw::Break`. -- line 1847 ---------------------------------------- -- line 1863 ---------------------------------------- . . // Used from a macro in `librustc_feature/accepted.rs` . pub use super::kw::MacroRules as macro_rules; . . /// Get the symbol for an integer. . /// . /// The first few non-negative integers each have a static symbol and therefore . /// are fast. 6 ( 0.00%) pub fn integer + Copy + ToString>(n: N) -> Symbol { 6 ( 0.00%) if let Result::Ok(idx) = n.try_into() { . if idx < 10 { 8 ( 0.00%) return Symbol::new(super::SYMBOL_DIGITS_BASE + idx as u32); . } . } . Symbol::intern(&n.to_string()) . } . } . . impl Symbol { . fn is_special(self) -> bool { . self <= kw::Underscore . } . . fn is_used_keyword_always(self) -> bool { 6 ( 0.00%) self >= kw::As && self <= kw::While . } . . fn is_used_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool { 843 ( 0.00%) (self >= kw::Async && self <= kw::Dyn) && edition() >= Edition::Edition2018 . } . . fn is_unused_keyword_always(self) -> bool { 6 ( 0.00%) self >= kw::Abstract && self <= kw::Yield . } . . fn is_unused_keyword_conditional(self, edition: impl FnOnce() -> Edition) -> bool { 2 ( 0.00%) self == kw::Try && edition() >= Edition::Edition2018 . } . . pub fn is_reserved(self, edition: impl Copy + FnOnce() -> Edition) -> bool { 1,495 ( 0.00%) self.is_special() . || self.is_used_keyword_always() . || self.is_unused_keyword_always() . || self.is_used_keyword_conditional(edition) . || self.is_unused_keyword_conditional(edition) . } . . /// A keyword or reserved identifier that can be used as a path segment. . pub fn is_path_segment_keyword(self) -> bool { 4,018 ( 0.00%) self == kw::Super . || self == kw::SelfLower . || self == kw::SelfUpper . || self == kw::Crate . || self == kw::PathRoot . || self == kw::DollarCrate . } . . /// Returns `true` if the symbol is `true` or `false`. . pub fn is_bool_lit(self) -> bool { 84 ( 0.00%) self == kw::True || self == kw::False 84 ( 0.00%) } . . /// Returns `true` if this symbol can be a raw identifier. . pub fn can_be_raw(self) -> bool { 20 ( 0.00%) self != kw::Empty && self != kw::Underscore && !self.is_path_segment_keyword() . } . } . . impl Ident { . // Returns `true` for reserved identifiers used internally for elided lifetimes, . // unnamed method parameters, crate root module, error recovery etc. . pub fn is_special(self) -> bool { . self.name.is_special() 2 ( 0.00%) } . . /// Returns `true` if the token is a keyword used in the language. 2 ( 0.00%) pub fn is_used_keyword(self) -> bool { . // Note: `span.edition()` is relatively expensive, don't call it unless necessary. 2 ( 0.00%) self.name.is_used_keyword_always() . || self.name.is_used_keyword_conditional(|| self.span.edition()) 4 ( 0.00%) } . . /// Returns `true` if the token is a keyword reserved for possible future use. 2 ( 0.00%) pub fn is_unused_keyword(self) -> bool { . // Note: `span.edition()` is relatively expensive, don't call it unless necessary. 2 ( 0.00%) self.name.is_unused_keyword_always() . || self.name.is_unused_keyword_conditional(|| self.span.edition()) 4 ( 0.00%) } . . /// Returns `true` if the token is either a special identifier or a keyword. 289 ( 0.00%) pub fn is_reserved(self) -> bool { . // Note: `span.edition()` is relatively expensive, don't call it unless necessary. . self.name.is_reserved(|| self.span.edition()) 578 ( 0.00%) } . . /// A keyword or reserved identifier that can be used as a path segment. . pub fn is_path_segment_keyword(self) -> bool { . self.name.is_path_segment_keyword() 574 ( 0.00%) } . . /// We see this identifier in a normal identifier position, like variable name or a type. . /// How was it written originally? Did it use the raw form? Let's try to guess. 3 ( 0.00%) pub fn is_raw_guess(self) -> bool { . self.name.can_be_raw() && self.is_reserved() 6 ( 0.00%) } . } 6,873 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/indexmap-1.8.0/src/map/core.rs -------------------------------------------------------------------------------- Ir -- line 26 ---------------------------------------- . /// indices mapping from the entry hash to its index. . indices: RawTable, . /// entries is a dense vec of entries in their order. . entries: Vec>, . } . . #[inline(always)] . fn get_hash(entries: &[Bucket]) -> impl Fn(&usize) -> u64 + '_ { 36,562 ( 0.04%) move |&i| entries[i].hash.get() . } . . #[inline] . fn equivalent<'a, K, V, Q: ?Sized + Equivalent>( . key: &'a Q, . entries: &'a [Bucket], . ) -> impl Fn(&usize) -> bool + 'a { 4,457 ( 0.00%) move |&i| Q::equivalent(key, &entries[i].key) . } . . #[inline] . fn erase_index(table: &mut RawTable, hash: HashValue, index: usize) { . table.erase_entry(hash.get(), move |&i| i == index); . } . . #[inline] -- line 50 ---------------------------------------- -- line 132 ---------------------------------------- . IndexMapCore { . indices: RawTable::with_capacity(n), . entries: Vec::with_capacity(n), . } . } . . #[inline] . pub(crate) fn len(&self) -> usize { 157 ( 0.00%) self.indices.len() . } . . #[inline] . pub(crate) fn capacity(&self) -> usize { . cmp::min(self.indices.capacity(), self.entries.capacity()) . } . . pub(crate) fn clear(&mut self) { -- line 148 ---------------------------------------- -- line 152 ---------------------------------------- . . pub(crate) fn truncate(&mut self, len: usize) { . if len < self.len() { . self.erase_indices(len, self.entries.len()); . self.entries.truncate(len); . } . } . 27 ( 0.00%) pub(crate) fn drain(&mut self, range: R) -> Drain<'_, Bucket> . where . R: RangeBounds, . { 9 ( 0.00%) let range = simplify_range(range, self.entries.len()); . self.erase_indices(range.start, range.end); . self.entries.drain(range) 24 ( 0.00%) } . . #[cfg(feature = "rayon")] . pub(crate) fn par_drain(&mut self, range: R) -> rayon::vec::Drain<'_, Bucket> . where . K: Send, . V: Send, . R: RangeBounds, . { -- line 175 ---------------------------------------- -- line 194 ---------------------------------------- . /// Reserve capacity for `additional` more key-value pairs. . pub(crate) fn reserve(&mut self, additional: usize) { . self.indices.reserve(additional, get_hash(&self.entries)); . self.reserve_entries(); . } . . /// Reserve entries capacity to match the indices . fn reserve_entries(&mut self) { 1,090 ( 0.00%) let additional = self.indices.capacity() - self.entries.len(); . self.entries.reserve_exact(additional); . } . . /// Shrink the capacity of the map as much as possible. . pub(crate) fn shrink_to_fit(&mut self) { . self.indices.shrink_to(0, get_hash(&self.entries)); . self.entries.shrink_to_fit(); . } -- line 210 ---------------------------------------- -- line 218 ---------------------------------------- . } else { . None . } . } . . /// Append a key-value pair, *without* checking whether it already exists, . /// and return the pair's new index. . fn push(&mut self, hash: HashValue, key: K, value: V) -> usize { 5,518 ( 0.01%) let i = self.entries.len(); . self.indices.insert(hash.get(), i, get_hash(&self.entries)); 11,096 ( 0.01%) if i == self.entries.capacity() { . // Reserve our own capacity synced to the indices, . // rather than letting `Vec::push` just double it. . self.reserve_entries(); . } 33,817 ( 0.03%) self.entries.push(Bucket { hash, key, value }); . i . } . . /// Return the index in `entries` where an equivalent key can be found 512 ( 0.00%) pub(crate) fn get_index_of(&self, hash: HashValue, key: &Q) -> Option . where . Q: ?Sized + Equivalent, . { 50 ( 0.00%) let eq = equivalent(key, &self.entries); . self.indices.get(hash.get(), eq).copied() 519 ( 0.00%) } . 273 ( 0.00%) pub(crate) fn insert_full(&mut self, hash: HashValue, key: K, value: V) -> (usize, Option) . where . K: Eq, . { 15 ( 0.00%) match self.get_index_of(hash, &key) { . Some(i) => (i, Some(replace(&mut self.entries[i].value, value))), 40 ( 0.00%) None => (self.push(hash, key, value), None), . } 242 ( 0.00%) } . . /// Remove an entry by shifting all entries that follow it . pub(crate) fn shift_remove_full(&mut self, hash: HashValue, key: &Q) -> Option<(usize, K, V)> . where . Q: ?Sized + Equivalent, . { . let eq = equivalent(key, &self.entries); . match self.indices.remove_entry(hash.get(), eq) { -- line 262 ---------------------------------------- -- line 361 ---------------------------------------- . let (init, shifted_entries) = self.entries.split_at(end); . let (start_entries, erased_entries) = init.split_at(start); . . let erased = erased_entries.len(); . let shifted = shifted_entries.len(); . let half_capacity = self.indices.buckets() / 2; . . // Use a heuristic between different strategies 6 ( 0.00%) if erased == 0 { . // Degenerate case, nothing to do . } else if start + shifted < half_capacity && start < erased { . // Reinsert everything, as there are few kept indices . self.indices.clear(); . . // Reinsert stable indices . for (i, entry) in enumerate(start_entries) { . self.indices.insert_no_grow(entry.hash.get(), i); -- line 377 ---------------------------------------- -- line 468 ---------------------------------------- . Entry::Vacant(entry) => entry.insert(default), . } . } . . /// Inserts the result of the `call` function in the entry if it is vacant and returns a mutable . /// reference to it. Otherwise a mutable reference to an already existent value is returned. . /// . /// Computes in **O(1)** time (amortized average). 5,992 ( 0.01%) pub fn or_insert_with(self, call: F) -> &'a mut V . where . F: FnOnce() -> V, . { 1,498 ( 0.00%) match self { 156 ( 0.00%) Entry::Occupied(entry) => entry.into_mut(), 685 ( 0.00%) Entry::Vacant(entry) => entry.insert(call()), . } 5,992 ( 0.01%) } . . /// Inserts the result of the `call` function with a reference to the entry's key if it is . /// vacant, and returns a mutable reference to the new value. Otherwise a mutable reference to . /// an already existent value is returned. . /// . /// Computes in **O(1)** time (amortized average). . pub fn or_insert_with_key(self, call: F) -> &'a mut V . where -- line 492 ---------------------------------------- -- line 531 ---------------------------------------- . x => x, . } . } . . /// Inserts a default-constructed value in the entry if it is vacant and returns a mutable . /// reference to it. Otherwise a mutable reference to an already existent value is returned. . /// . /// Computes in **O(1)** time (amortized average). 37,499 ( 0.04%) pub fn or_default(self) -> &'a mut V . where . V: Default, . { 10,714 ( 0.01%) match self { 560 ( 0.00%) Entry::Occupied(entry) => entry.into_mut(), 14,391 ( 0.01%) Entry::Vacant(entry) => entry.insert(V::default()), . } 42,856 ( 0.04%) } . } . . impl fmt::Debug for Entry<'_, K, V> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . match *self { . Entry::Vacant(ref v) => f.debug_tuple(stringify!(Entry)).field(v).finish(), . Entry::Occupied(ref o) => f.debug_tuple(stringify!(Entry)).field(o).finish(), . } -- line 555 ---------------------------------------- -- line 634 ---------------------------------------- . . /// Return the index where the key-value pair will be inserted. . pub fn index(&self) -> usize { . self.map.len() . } . . /// Inserts the entry's key and the given value into the map, and returns a mutable reference . /// to the value. 989 ( 0.00%) pub fn insert(self, value: V) -> &'a mut V { 14,771 ( 0.01%) let i = self.map.push(self.hash, self.key, value); 56 ( 0.00%) &mut self.map.entries[i].value 1,098 ( 0.00%) } . } . . impl fmt::Debug for VacantEntry<'_, K, V> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . f.debug_tuple(stringify!(VacantEntry)) . .field(self.key()) . .finish() . } -- line 653 ---------------------------------------- 36,030 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . use std::cmp::Ordering; . use std::marker::PhantomData; . use std::ops::Range; . use ty::util::IntTypeExt; . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable, TypeFoldable, Lift)] . pub struct TypeAndMut<'tcx> { 5 ( 0.00%) pub ty: Ty<'tcx>, 25 ( 0.00%) pub mutbl: hir::Mutability, . } . . #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . /// A "free" region `fr` can be interpreted as "some region . /// at least as big as the scope `fr.scope`". . pub struct FreeRegion { 397 ( 0.00%) pub scope: DefId, 843 ( 0.00%) pub bound_region: BoundRegionKind, . } . 12,818 ( 0.01%) #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . pub enum BoundRegionKind { . /// An anonymous region parameter for a given fn (&T) 2,499 ( 0.00%) BrAnon(u32), . . /// Named region parameters for functions (a in &'a T) . /// . /// The `DefId` is needed to distinguish free regions in . /// the event of shadowing. . BrNamed(DefId, Symbol), . . /// Anonymous region for the implicit env pointer parameter . /// to a closure . BrEnv, . } . 48 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . #[derive(HashStable)] . pub struct BoundRegion { 1,668 ( 0.00%) pub var: BoundVar, 1,560 ( 0.00%) pub kind: BoundRegionKind, . } . . impl BoundRegionKind { . pub fn is_named(&self) -> bool { . match *self { . BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime, . _ => false, . } . } . } . . /// Defines the kinds of types. . /// . /// N.B., if you change this, you'll probably want to change the corresponding . /// AST structure in `rustc_ast/src/ast.rs` as well. 256,759 ( 0.25%) #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable, Debug)] . #[derive(HashStable)] . #[rustc_diagnostic_item = "TyKind"] . pub enum TyKind<'tcx> { . /// The primitive boolean type. Written as `bool`. . Bool, . . /// The primitive character type; holds a Unicode scalar value . /// (a non-surrogate code point). Written as `char`. -- line 89 ---------------------------------------- -- line 99 ---------------------------------------- . Float(ty::FloatTy), . . /// Algebraic data types (ADT). For example: structures, enumerations and unions. . /// . /// InternalSubsts here, possibly against intuition, *may* contain `Param`s. . /// That is, even after substitution it is possible that there are type . /// variables. This happens when the `Adt` corresponds to an ADT . /// definition and not a concrete use of it. 31,358 ( 0.03%) Adt(&'tcx AdtDef, SubstsRef<'tcx>), . . /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. . Foreign(DefId), . . /// The pointee of a string slice. Written as `str`. . Str, . . /// An array with the given length. Written as `[T; n]`. 164 ( 0.00%) Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), . . /// The pointee of an array slice. Written as `[T]`. . Slice(Ty<'tcx>), . . /// A raw pointer. Written as `*mut T` or `*const T` . RawPtr(TypeAndMut<'tcx>), . . /// A reference; a pointer with an associated lifetime. Written as . /// `&'a mut T` or `&'a T`. 20,346 ( 0.02%) Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), . . /// The anonymous type of a function declaration/definition. Each . /// function has a unique type, which is output (for a function . /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. . /// . /// For example the type of `bar` here: . /// . /// ```rust -- line 134 ---------------------------------------- -- line 180 ---------------------------------------- . /// The substitutions are for the generics of the function in question. . /// After typeck, the concrete type can be found in the `types` map. . Opaque(DefId, SubstsRef<'tcx>), . . /// A type parameter; for example, `T` in `fn f(x: T) {}`. . Param(ParamTy), . . /// Bound type variable, used only when preparing a trait query. 47 ( 0.00%) Bound(ty::DebruijnIndex, BoundTy), . . /// A placeholder type - universally quantified higher-ranked type. . Placeholder(ty::PlaceholderType), . . /// A type variable used during type checking. . Infer(InferTy), . . /// A placeholder for a type which could not be computed; this is -- line 196 ---------------------------------------- -- line 886 ---------------------------------------- . /// T: Foo . /// . /// This would be represented by a trait-reference where the `DefId` is the . /// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, . /// and `U` as parameter 1. . /// . /// Trait references also appear in object types like `Foo`, but in . /// that case the `Self` parameter is absent from the substitutions. 1,508 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 16,403 ( 0.02%) #[derive(HashStable, TypeFoldable)] . pub struct TraitRef<'tcx> { 70 ( 0.00%) pub def_id: DefId, 12,291 ( 0.01%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> TraitRef<'tcx> { 43 ( 0.00%) pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> { . TraitRef { def_id, substs } 172 ( 0.00%) } . . /// Returns a `TraitRef` of the form `P0: Foo` where `Pi` . /// are the parameters defined on trait. 117 ( 0.00%) pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> Binder<'tcx, TraitRef<'tcx>> { 52 ( 0.00%) ty::Binder::dummy(TraitRef { . def_id, 78 ( 0.00%) substs: InternalSubsts::identity_for_item(tcx, def_id), . }) 104 ( 0.00%) } . . #[inline] . pub fn self_ty(&self) -> Ty<'tcx> { . self.substs.type_at(0) . } . 120 ( 0.00%) pub fn from_method( . tcx: TyCtxt<'tcx>, . trait_id: DefId, . substs: SubstsRef<'tcx>, . ) -> ty::TraitRef<'tcx> { . let defs = tcx.generics_of(trait_id); . 15 ( 0.00%) ty::TraitRef { def_id: trait_id, substs: tcx.intern_substs(&substs[..defs.params.len()]) } 165 ( 0.00%) } . } . . pub type PolyTraitRef<'tcx> = Binder<'tcx, TraitRef<'tcx>>; . . impl<'tcx> PolyTraitRef<'tcx> { . pub fn self_ty(&self) -> Binder<'tcx, Ty<'tcx>> { . self.map_bound_ref(|tr| tr.self_ty()) . } . . pub fn def_id(&self) -> DefId { 120 ( 0.00%) self.skip_binder().def_id 40 ( 0.00%) } . 126 ( 0.00%) pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { 252 ( 0.00%) self.map_bound(|trait_ref| ty::TraitPredicate { . trait_ref, . constness: ty::BoundConstness::NotConst, . polarity: ty::ImplPolarity::Positive, . }) 126 ( 0.00%) } . } . . /// An existential reference to a trait, where `Self` is erased. . /// For example, the trait object `Trait<'a, 'b, X, Y>` is: . /// . /// exists T. T: Trait<'a, 'b, X, Y> . /// . /// The substitutions don't include the erased `Self`, only trait -- line 956 ---------------------------------------- -- line 999 ---------------------------------------- . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. . pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> { . self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty)) . } . } . 1,713 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundVariableKind { . Ty(BoundTyKind), . Region(BoundRegionKind), . Const, . } . . /// Binder is a binder for higher-ranked lifetimes or types. It is part of the -- line 1015 ---------------------------------------- -- line 1016 ---------------------------------------- . /// compiler's representation for things like `for<'a> Fn(&'a isize)` . /// (which would be represented by the type `PolyTraitRef == . /// Binder<'tcx, TraitRef>`). Note that when we instantiate, . /// erase, or otherwise "discharge" these bound vars, we change the . /// type from `Binder<'tcx, T>` to just `T` (see . /// e.g., `liberate_late_bound_regions`). . /// . /// `Decodable` and `Encodable` are implemented for `Binder` using the `impl_binder_encode_decode!` macro. 40 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 10,961 ( 0.01%) pub struct Binder<'tcx, T>(T, &'tcx List); . . impl<'tcx, T> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . { . /// Wraps `value` in a binder, asserting that `value` does not . /// contain any bound vars that would be bound by the . /// binder. This is commonly used to 'inject' a value T into a . /// different binding level. 610 ( 0.00%) pub fn dummy(value: T) -> Binder<'tcx, T> { 2,303 ( 0.00%) assert!(!value.has_escaping_bound_vars()); 5,830 ( 0.01%) Binder(value, ty::List::empty()) 610 ( 0.00%) } . . pub fn bind_with_vars(value: T, vars: &'tcx List) -> Binder<'tcx, T> { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(vars); . value.visit_with(&mut validator); . } 1,113 ( 0.00%) Binder(value, vars) . } . } . . impl<'tcx, T> Binder<'tcx, T> { . /// Skips the binder and returns the "bound" value. This is a . /// risky thing to do because it's easy to get confused about . /// De Bruijn indices and the like. It is usually better to . /// discharge the binder using `no_bound_vars` or -- line 1053 ---------------------------------------- -- line 1059 ---------------------------------------- . /// accounting. . /// . /// Some examples where `skip_binder` is reasonable: . /// . /// - extracting the `DefId` from a PolyTraitRef; . /// - comparing the self type of a PolyTraitRef to see if it is equal to . /// a type parameter `X`, since the type `X` does not reference any regions . pub fn skip_binder(self) -> T { 25,097 ( 0.02%) self.0 . } . . pub fn bound_vars(&self) -> &'tcx List { 16 ( 0.00%) self.1 . } . . pub fn as_ref(&self) -> Binder<'tcx, &T> { 92 ( 0.00%) Binder(&self.0, self.1) . } . . pub fn map_bound_ref_unchecked(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . let value = f(&self.0); . Binder(value, self.1) -- line 1083 ---------------------------------------- -- line 1089 ---------------------------------------- . { . self.as_ref().map_bound(f) . } . . pub fn map_bound>(self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(T) -> U, . { 1,343 ( 0.00%) let value = f(self.0); . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 5,087 ( 0.01%) Binder(value, self.1) . } . . pub fn try_map_bound, E>(self, f: F) -> Result, E> . where . F: FnOnce(T) -> Result, . { . let value = f(self.0)?; . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 235 ( 0.00%) Ok(Binder(value, self.1)) . } . . /// Wraps a `value` in a binder, using the same bound variables as the . /// current `Binder`. This should not be used if the new value *changes* . /// the bound variables. Note: the (old or new) value itself does not . /// necessarily need to *name* all the bound variables. . /// . /// This currently doesn't do anything different than `bind`, because we -- line 1122 ---------------------------------------- -- line 1126 ---------------------------------------- . pub fn rebind(&self, value: U) -> Binder<'tcx, U> . where . U: TypeFoldable<'tcx>, . { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.bound_vars()); . value.visit_with(&mut validator); . } 390 ( 0.00%) Binder(value, self.1) . } . . /// Unwraps and returns the value within, but only if it contains . /// no bound vars at all. (In other words, if this binder -- . /// and indeed any enclosing binder -- doesn't bind anything at . /// all.) Otherwise, returns `None`. . /// . /// (One could imagine having a method that just unwraps a single -- line 1142 ---------------------------------------- -- line 1143 ---------------------------------------- . /// binder, but permits late-bound vars bound by enclosing . /// binders, but that would require adjusting the debruijn . /// indices, and given the shallow binding structure we often use, . /// would not be that useful.) . pub fn no_bound_vars(self) -> Option . where . T: TypeFoldable<'tcx>, . { 4,236 ( 0.00%) if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } . } . . /// Splits the contents into two things that share the same binder . /// level as the original, returning two distinct binders. . /// . /// `f` should consider bound regions at depth 1 to be free, and . /// anything it produces with bound regions at depth 1 will be . /// bound in the resulting return values. -- line 1159 ---------------------------------------- -- line 1170 ---------------------------------------- . pub fn transpose(self) -> Option> { . let bound_vars = self.1; . self.0.map(|v| Binder(v, bound_vars)) . } . } . . /// Represents the projection of an associated type. In explicit UFCS . /// form this would be written `>::N`. 130 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 1,463 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ProjectionTy<'tcx> { . /// The parameters of the associated item. 958 ( 0.00%) pub substs: SubstsRef<'tcx>, . . /// The `DefId` of the `TraitItem` for the associated type `N`. . /// . /// Note that this is not the `DefId` of the `TraitRef` containing this . /// associated type, which is in `tcx.associated_item(item_def_id).container`. 880 ( 0.00%) pub item_def_id: DefId, . } . . impl<'tcx> ProjectionTy<'tcx> { 777 ( 0.00%) pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId { 222 ( 0.00%) tcx.associated_item(self.item_def_id).container.id() 888 ( 0.00%) } . . /// Extracts the underlying trait reference and own substs from this projection. . /// For example, if this is a projection of `::Item<'a>`, . /// then this function would return a `T: Iterator` trait reference and `['a]` as the own substs 266 ( 0.00%) pub fn trait_ref_and_own_substs( . &self, . tcx: TyCtxt<'tcx>, . ) -> (ty::TraitRef<'tcx>, &'tcx [ty::GenericArg<'tcx>]) { 114 ( 0.00%) let def_id = tcx.associated_item(self.item_def_id).container.id(); . let trait_generics = tcx.generics_of(def_id); 190 ( 0.00%) ( . ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, trait_generics) }, . &self.substs[trait_generics.count()..], . ) 342 ( 0.00%) } . . /// Extracts the underlying trait reference from this projection. . /// For example, if this is a projection of `::Item`, . /// then this function would return a `T: Iterator` trait reference. . /// . /// WARNING: This will drop the substs for generic associated types . /// consider calling [Self::trait_ref_and_own_substs] to get those . /// as well. 999 ( 0.00%) pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { 111 ( 0.00%) let def_id = self.trait_def_id(tcx); 111 ( 0.00%) ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, tcx.generics_of(def_id)) } 1,221 ( 0.00%) } . 244 ( 0.00%) pub fn self_ty(&self) -> Ty<'tcx> { 732 ( 0.00%) self.substs.type_at(0) 488 ( 0.00%) } . } . . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GenSig<'tcx> { . pub resume_ty: Ty<'tcx>, . pub yield_ty: Ty<'tcx>, . pub return_ty: Ty<'tcx>, . } -- line 1233 ---------------------------------------- -- line 1235 ---------------------------------------- . pub type PolyGenSig<'tcx> = Binder<'tcx, GenSig<'tcx>>; . . /// Signature of a function type, which we have arbitrarily . /// decided to use to refer to the input/output types. . /// . /// - `inputs`: is the list of arguments and their modes. . /// - `output`: is the return type. . /// - `c_variadic`: indicates whether this is a C-variadic function. 976 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 712 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct FnSig<'tcx> { 95 ( 0.00%) pub inputs_and_output: &'tcx List>, 1,003 ( 0.00%) pub c_variadic: bool, 228 ( 0.00%) pub unsafety: hir::Unsafety, 936 ( 0.00%) pub abi: abi::Abi, . } . . impl<'tcx> FnSig<'tcx> { 686 ( 0.00%) pub fn inputs(&self) -> &'tcx [Ty<'tcx>] { 2,924 ( 0.00%) &self.inputs_and_output[..self.inputs_and_output.len() - 1] 1,372 ( 0.00%) } . 460 ( 0.00%) pub fn output(&self) -> Ty<'tcx> { 3,394 ( 0.00%) self.inputs_and_output[self.inputs_and_output.len() - 1] 920 ( 0.00%) } . . // Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible . // method. . fn fake() -> FnSig<'tcx> { . FnSig { . inputs_and_output: List::empty(), . c_variadic: false, . unsafety: hir::Unsafety::Normal, -- line 1267 ---------------------------------------- -- line 1270 ---------------------------------------- . } . } . . pub type PolyFnSig<'tcx> = Binder<'tcx, FnSig<'tcx>>; . . impl<'tcx> PolyFnSig<'tcx> { . #[inline] . pub fn inputs(&self) -> Binder<'tcx, &'tcx [Ty<'tcx>]> { 61 ( 0.00%) self.map_bound_ref_unchecked(|fn_sig| fn_sig.inputs()) . } . #[inline] . pub fn input(&self, index: usize) -> ty::Binder<'tcx, Ty<'tcx>> { 70 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.inputs()[index]) . } . pub fn inputs_and_output(&self) -> ty::Binder<'tcx, &'tcx List>> { . self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output) 10 ( 0.00%) } . #[inline] . pub fn output(&self) -> ty::Binder<'tcx, Ty<'tcx>> { 87 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.output()) . } . pub fn c_variadic(&self) -> bool { 20 ( 0.00%) self.skip_binder().c_variadic 10 ( 0.00%) } . pub fn unsafety(&self) -> hir::Unsafety { 84 ( 0.00%) self.skip_binder().unsafety 42 ( 0.00%) } . pub fn abi(&self) -> abi::Abi { 192 ( 0.00%) self.skip_binder().abi 48 ( 0.00%) } . } . . pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<'tcx, FnSig<'tcx>>>; . 8 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct ParamTy { 2 ( 0.00%) pub index: u32, . pub name: Symbol, . } . . impl<'tcx> ParamTy { 2 ( 0.00%) pub fn new(index: u32, name: Symbol) -> ParamTy { . ParamTy { index, name } 1 ( 0.00%) } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamTy { 2 ( 0.00%) ParamTy::new(def.index, def.name) 1 ( 0.00%) } . . #[inline] . pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { . tcx.mk_ty_param(self.index, self.name) . } . } . . #[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)] . #[derive(HashStable)] . pub struct ParamConst { 12 ( 0.00%) pub index: u32, 12 ( 0.00%) pub name: Symbol, . } . . impl ParamConst { . pub fn new(index: u32, name: Symbol) -> ParamConst { . ParamConst { index, name } . } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamConst { -- line 1338 ---------------------------------------- -- line 1440 ---------------------------------------- . /// the inference variable is supposed to satisfy the relation . /// *for every value of the placeholder region*. To ensure that doesn't . /// happen, you can use `leak_check`. This is more clearly explained . /// by the [rustc dev guide]. . /// . /// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ . /// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ . /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html 92,628 ( 0.09%) #[derive(Clone, PartialEq, Eq, Hash, Copy, TyEncodable, TyDecodable, PartialOrd, Ord)] . pub enum RegionKind { . /// Region bound in a type or fn declaration which will be . /// substituted 'early' -- that is, at the same time when type . /// parameters are substituted. . ReEarlyBound(EarlyBoundRegion), . . /// Region bound in a function scope, which will be substituted when the . /// function is called. 3,120 ( 0.00%) ReLateBound(ty::DebruijnIndex, BoundRegion), . . /// When checking a function body, the types of all arguments and so forth . /// that refer to bound region parameters are modified to refer to free . /// region parameters. . ReFree(FreeRegion), . . /// Static data that has an "infinite" lifetime. Top in the region lattice. . ReStatic, -- line 1465 ---------------------------------------- -- line 1478 ---------------------------------------- . /// regions visible from `U`, but not less than regions not visible . /// from `U`. . ReEmpty(ty::UniverseIndex), . . /// Erased region, used by trait selection, in MIR and during codegen. . ReErased, . } . 2,108 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . pub struct EarlyBoundRegion { 324 ( 0.00%) pub def_id: DefId, 1,434 ( 0.00%) pub index: u32, 1,058 ( 0.00%) pub name: Symbol, . } . . /// A **`const`** **v**ariable **ID**. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . pub struct ConstVid<'tcx> { 19 ( 0.00%) pub index: u32, . pub phantom: PhantomData<&'tcx ()>, . } . . rustc_index::newtype_index! { . /// A **region** (lifetime) **v**ariable **ID**. . pub struct RegionVid { . DEBUG_FORMAT = custom, . } -- line 1504 ---------------------------------------- -- line 1513 ---------------------------------------- . rustc_index::newtype_index! { . pub struct BoundVar { .. } . } . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct BoundTy { . pub var: BoundVar, 47 ( 0.00%) pub kind: BoundTyKind, . } . 844 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundTyKind { . Anon, . Param(Symbol), . } . . impl From for BoundTy { . fn from(var: BoundVar) -> Self { -- line 1532 ---------------------------------------- -- line 1616 ---------------------------------------- . RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(), . RegionKind::ReEmpty(_) => false, . RegionKind::ReErased => false, . } . } . . #[inline] . pub fn is_late_bound(&self) -> bool { 36 ( 0.00%) matches!(*self, ty::ReLateBound(..)) . } . . #[inline] . pub fn is_placeholder(&self) -> bool { . matches!(*self, ty::RePlaceholder(..)) . } . . #[inline] . pub fn bound_at_or_above_binder(&self, index: ty::DebruijnIndex) -> bool { 2,916 ( 0.00%) match *self { . ty::ReLateBound(debruijn, _) => debruijn >= index, . _ => false, . } . } . . pub fn type_flags(&self) -> TypeFlags { . let mut flags = TypeFlags::empty(); . 4,915 ( 0.00%) match *self { . ty::ReVar(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; . flags = flags | TypeFlags::HAS_RE_INFER; . } . ty::RePlaceholder(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; -- line 1651 ---------------------------------------- -- line 1669 ---------------------------------------- . ty::ReErased => { . flags = flags | TypeFlags::HAS_RE_ERASED; . } . } . . debug!("type_flags({:?}) = {:?}", self, flags); . . flags 129 ( 0.00%) } . . /// Given an early-bound or free region, returns the `DefId` where it was bound. . /// For example, consider the regions in this snippet of code: . /// . /// ``` . /// impl<'a> Foo { . /// ^^ -- early bound, declared on an impl . /// -- line 1685 ---------------------------------------- -- line 1713 ---------------------------------------- . . #[inline(always)] . pub fn flags(&self) -> TypeFlags { . self.flags . } . . #[inline] . pub fn is_unit(&self) -> bool { 68 ( 0.00%) match self.kind() { 18 ( 0.00%) Tuple(ref tys) => tys.is_empty(), . _ => false, . } . } . . #[inline] . pub fn is_never(&self) -> bool { 641 ( 0.00%) matches!(self.kind(), Never) . } . . #[inline] . pub fn is_primitive(&self) -> bool { . self.kind().is_primitive() . } . . #[inline] -- line 1737 ---------------------------------------- -- line 1741 ---------------------------------------- . . #[inline] . pub fn is_ref(&self) -> bool { . matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_ty_var(&self) -> bool { 987 ( 0.00%) matches!(self.kind(), Infer(TyVar(_))) . } . . #[inline] . pub fn ty_vid(&self) -> Option { 780 ( 0.00%) match self.kind() { 79 ( 0.00%) &Infer(TyVar(vid)) => Some(vid), . _ => None, . } . } . . #[inline] . pub fn is_ty_infer(&self) -> bool { . matches!(self.kind(), Infer(_)) . } -- line 1763 ---------------------------------------- -- line 1775 ---------------------------------------- . /// Returns `true` if this type is a `str`. . #[inline] . pub fn is_str(&self) -> bool { . *self.kind() == Str . } . . #[inline] . pub fn is_param(&self, index: u32) -> bool { 5 ( 0.00%) match self.kind() { . ty::Param(ref data) => data.index == index, . _ => false, . } . } . . #[inline] . pub fn is_slice(&self) -> bool { . match self.kind() { . RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => matches!(ty.kind(), Slice(_) | Str), . _ => false, . } . } . . #[inline] . pub fn is_array(&self) -> bool { 10 ( 0.00%) matches!(self.kind(), Array(..)) . } . . #[inline] . pub fn is_simd(&self) -> bool { . match self.kind() { . Adt(def, _) => def.repr.simd(), . _ => false, . } -- line 1807 ---------------------------------------- -- line 1861 ---------------------------------------- . match self.kind() { . Ref(_, _, mutability) => Some(*mutability), . _ => None, . } . } . . #[inline] . pub fn is_unsafe_ptr(&self) -> bool { 36 ( 0.00%) matches!(self.kind(), RawPtr(_)) . } . . /// Tests if this is any kind of primitive pointer type (reference, raw pointer, fn pointer). . #[inline] . pub fn is_any_ptr(&self) -> bool { . self.is_region_ptr() || self.is_unsafe_ptr() || self.is_fn_ptr() . } . . #[inline] . pub fn is_box(&self) -> bool { 782 ( 0.00%) match self.kind() { 95 ( 0.00%) Adt(def, _) => def.is_box(), . _ => false, . } . } . . /// Panics if called on any type other than `Box`. . pub fn boxed_ty(&self) -> Ty<'tcx> { . match self.kind() { . Adt(def, substs) if def.is_box() => substs.type_at(0), -- line 1889 ---------------------------------------- -- line 1891 ---------------------------------------- . } . } . . /// A scalar type is one that denotes an atomic datum, with no sub-components. . /// (A RawPtr is scalar because it represents a non-managed pointer, so its . /// contents are abstract to rustc.) . #[inline] . pub fn is_scalar(&self) -> bool { 39 ( 0.00%) matches!( 92 ( 0.00%) self.kind(), . Bool | Char . | Int(_) . | Float(_) . | Uint(_) . | FnDef(..) . | FnPtr(_) . | RawPtr(_) . | Infer(IntVar(_) | FloatVar(_)) . ) . } . . /// Returns `true` if this type is a floating point type. . #[inline] . pub fn is_floating_point(&self) -> bool { 1 ( 0.00%) matches!(self.kind(), Float(_) | Infer(FloatVar(_))) . } . . #[inline] . pub fn is_trait(&self) -> bool { . matches!(self.kind(), Dynamic(..)) . } . . #[inline] . pub fn is_enum(&self) -> bool { . matches!(self.kind(), Adt(adt_def, _) if adt_def.is_enum()) . } . . #[inline] . pub fn is_union(&self) -> bool { 73 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_union()) . } . . #[inline] . pub fn is_closure(&self) -> bool { 8 ( 0.00%) matches!(self.kind(), Closure(..)) . } . . #[inline] . pub fn is_generator(&self) -> bool { 12 ( 0.00%) matches!(self.kind(), Generator(..)) . } . . #[inline] . pub fn is_integral(&self) -> bool { 149 ( 0.00%) matches!(self.kind(), Infer(IntVar(_)) | Int(_) | Uint(_)) . } . . #[inline] . pub fn is_fresh_ty(&self) -> bool { . matches!(self.kind(), Infer(FreshTy(_))) . } . . #[inline] . pub fn is_fresh(&self) -> bool { 145 ( 0.00%) matches!(self.kind(), Infer(FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_))) . } . . #[inline] . pub fn is_char(&self) -> bool { . matches!(self.kind(), Char) . } . . #[inline] -- line 1963 ---------------------------------------- -- line 1967 ---------------------------------------- . . #[inline] . pub fn is_signed(&self) -> bool { . matches!(self.kind(), Int(_)) . } . . #[inline] . pub fn is_ptr_sized_integral(&self) -> bool { 18 ( 0.00%) matches!(self.kind(), Int(ty::IntTy::Isize) | Uint(ty::UintTy::Usize)) . } . . #[inline] . pub fn has_concrete_skeleton(&self) -> bool { . !matches!(self.kind(), Param(_) | Infer(_) | Error(_)) . } . . /// Returns the type and mutability of `*ty`. . /// . /// The parameter `explicit` indicates if this is an *explicit* dereference. . /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. 136 ( 0.00%) pub fn builtin_deref(&self, explicit: bool) -> Option> { 737 ( 0.00%) match self.kind() { 24 ( 0.00%) Adt(def, _) if def.is_box() => { . Some(TypeAndMut { ty: self.boxed_ty(), mutbl: hir::Mutability::Not }) . } 412 ( 0.00%) Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl: *mutbl }), . RawPtr(mt) if explicit => Some(*mt), . _ => None, . } 272 ( 0.00%) } . . /// Returns the type of `ty[i]`. . pub fn builtin_index(&self) -> Option> { 8 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => Some(ty), . _ => None, . } 2 ( 0.00%) } . 1,368 ( 0.00%) pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { 684 ( 0.00%) match self.kind() { 366 ( 0.00%) FnDef(def_id, substs) => tcx.fn_sig(*def_id).subst(tcx, substs), 245 ( 0.00%) FnPtr(f) => *f, . Error(_) => { . // ignore errors (#54954) . ty::Binder::dummy(FnSig::fake()) . } . Closure(..) => bug!( . "to get the signature of a closure, use `substs.as_closure().sig()` not `fn_sig()`", . ), . _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), . } 1,539 ( 0.00%) } . . #[inline] . pub fn is_fn(&self) -> bool { 3 ( 0.00%) matches!(self.kind(), FnDef(..) | FnPtr(_)) . } . . #[inline] . pub fn is_fn_ptr(&self) -> bool { . matches!(self.kind(), FnPtr(_)) . } . . #[inline] . pub fn is_impl_trait(&self) -> bool { . matches!(self.kind(), Opaque(..)) . } . . #[inline] . pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> { 26 ( 0.00%) match self.kind() { . Adt(adt, _) => Some(adt), . _ => None, . } . } . . /// Iterates over tuple fields. . /// Panics when called on anything but a tuple. . pub fn tuple_fields(&self) -> impl DoubleEndedIterator> { 16 ( 0.00%) match self.kind() { 8 ( 0.00%) Tuple(substs) => substs.iter().map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } . } . . /// Get the `i`-th element of a tuple. . /// Panics when called on anything but a tuple. . pub fn tuple_element_ty(&self, i: usize) -> Option> { . match self.kind() { -- line 2056 ---------------------------------------- -- line 2095 ---------------------------------------- . TyKind::Generator(def_id, substs, _) => { . Some(substs.as_generator().discriminant_for_variant(*def_id, tcx, variant_index)) . } . _ => None, . } . } . . /// Returns the type of the discriminant of this type. 60 ( 0.00%) pub fn discriminant_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 30 ( 0.00%) match self.kind() { 48 ( 0.00%) ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx), . ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx), . . ty::Param(_) | ty::Projection(_) | ty::Opaque(..) | ty::Infer(ty::TyVar(_)) => { . let assoc_items = tcx.associated_item_def_ids( . tcx.require_lang_item(hir::LangItem::DiscriminantKind, None), . ); . tcx.mk_projection(assoc_items[0], tcx.intern_substs(&[self.into()])) . } -- line 2113 ---------------------------------------- -- line 2135 ---------------------------------------- . | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, . . ty::Bound(..) . | ty::Placeholder(_) . | ty::Infer(FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`discriminant_ty` applied to unexpected type: {:?}", self) . } . } 48 ( 0.00%) } . . /// Returns the type of metadata for (potentially fat) pointers to this type. . pub fn ptr_metadata_ty( . &'tcx self, . tcx: TyCtxt<'tcx>, . normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, . ) -> Ty<'tcx> { . let tail = tcx.struct_tail_with_normalize(self, normalize); -- line 2151 ---------------------------------------- -- line 2229 ---------------------------------------- . /// Returning true means the type is known to be sized. Returning . /// `false` means nothing -- could be sized, might not be. . /// . /// Note that we could never rely on the fact that a type such as `[_]` is . /// trivially `!Sized` because we could be in a type environment with a . /// bound such as `[_]: Copy`. A function with such a bound obviously never . /// can be called, but that doesn't mean it shouldn't typecheck. This is why . /// this method doesn't return `Option`. 2,367 ( 0.00%) pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool { 1,578 ( 0.00%) match self.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 2246 ---------------------------------------- -- line 2250 ---------------------------------------- . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => true, . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, . 20 ( 0.00%) ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)), . 144 ( 0.00%) ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false, . . ty::Infer(ty::TyVar(_)) => false, . . ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`is_trivially_sized` applied to unexpected type: {:?}", self) . } . } 2,367 ( 0.00%) } . } . . /// Extra information about why we ended up with a particular variance. . /// This is only used to add more information to error messages, and . /// has no effect on soundness. While choosing the 'wrong' `VarianceDiagInfo` . /// may lead to confusing notes in error messages, it will never cause . /// a miscompilation or unsoundness. . /// -- line 2280 ---------------------------------------- -- line 2295 ---------------------------------------- . /// (e.g. `0` for `*mut T`, `1` for `MyStruct<'CovariantParam, 'InvariantParam>`) . param_index: u32, . }, . } . . impl<'tcx> VarianceDiagInfo<'tcx> { . /// Mirrors `Variance::xform` - used to 'combine' the existing . /// and new `VarianceDiagInfo`s when our variance changes. 676 ( 0.00%) pub fn xform(self, other: VarianceDiagInfo<'tcx>) -> VarianceDiagInfo<'tcx> { . // For now, just use the first `VarianceDiagInfo::Invariant` that we see 1,014 ( 0.00%) match self { . VarianceDiagInfo::None => other, . VarianceDiagInfo::Invariant { .. } => self, . } 338 ( 0.00%) } . } 332,926 ( 0.33%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 84 ---------------------------------------- . pub struct SessionGlobals { . symbol_interner: symbol::Interner, . span_interner: Lock, . hygiene_data: Lock, . source_map: Lock>>, . } . . impl SessionGlobals { 12 ( 0.00%) pub fn new(edition: Edition) -> SessionGlobals { 50 ( 0.00%) SessionGlobals { 2 ( 0.00%) symbol_interner: symbol::Interner::fresh(), . span_interner: Lock::new(span_encoding::SpanInterner::default()), 6 ( 0.00%) hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), . source_map: Lock::new(None), . } 10 ( 0.00%) } . } . . #[inline] . pub fn create_session_globals_then(edition: Edition, f: impl FnOnce() -> R) -> R { 1 ( 0.00%) assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 7 ( 0.00%) SESSION_GLOBALS.set(&session_globals, f) 1 ( 0.00%) } . . #[inline] . pub fn set_session_globals_then(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R { . assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); -- line 119 ---------------------------------------- -- line 120 ---------------------------------------- . SESSION_GLOBALS.set(session_globals, f) . } . . #[inline] . pub fn create_default_session_if_not_set_then(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 4 ( 0.00%) create_session_if_not_set_then(edition::DEFAULT_EDITION, f) . } . . #[inline] . pub fn create_session_if_not_set_then(edition: Edition, f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 1 ( 0.00%) if !SESSION_GLOBALS.is_set() { 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 11 ( 0.00%) SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f)) 1 ( 0.00%) } else { . SESSION_GLOBALS.with(f) . } . } . . #[inline] . pub fn with_session_globals(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 12,202 ( 0.01%) SESSION_GLOBALS.with(f) . } . . #[inline] . pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { . create_session_globals_then(edition::DEFAULT_EDITION, f) . } . . // If this ever becomes non thread-local, `decode_syntax_context` . // and `decode_expn_id` will need to be updated to handle concurrent . // deserialization. . scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals); . . // FIXME: We should use this enum or something like it to get rid of the . // use of magic `/rust/1.x/...` paths across the board. 13 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] 1,040 ( 0.00%) #[derive(Decodable)] . pub enum RealFileName { . LocalPath(PathBuf), . /// For remapped paths (namely paths into libstd that have been mapped . /// to the appropriate spot on the local host's file system, and local file . /// system paths that have been remapped with `FilePathMapping`), . Remapped { . /// `local_path` is the (host-dependent) local path to the file. This is . /// None if the file was imported from another crate -- line 173 ---------------------------------------- -- line 179 ---------------------------------------- . } . . impl Hash for RealFileName { . fn hash(&self, state: &mut H) { . // To prevent #70924 from happening again we should only hash the . // remapped (virtualized) path if that exists. This is because . // virtualized paths to sysroot crates (/rust/$hash or /rust/$version) . // remain stable even if the corresponding local_path changes 2,625 ( 0.00%) self.remapped_path_if_available().hash(state) . } . } . . // This is functionally identical to #[derive(Encodable)], with the exception of . // an added assert statement . impl Encodable for RealFileName { . fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { 2 ( 0.00%) encoder.emit_enum(|encoder| match *self { . RealFileName::LocalPath(ref local_path) => { 8 ( 0.00%) encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| { . encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; . Ok(()) . }) . } . . RealFileName::Remapped { ref local_path, ref virtual_name } => encoder . .emit_enum_variant("Remapped", 1, 2, |encoder| { . // For privacy and build reproducibility, we must not embed host-dependant path in artifacts -- line 205 ---------------------------------------- -- line 224 ---------------------------------------- . p.as_ref().map(PathBuf::as_path) . } . } . } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. 2 ( 0.00%) pub fn into_local_path(self) -> Option { 2 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => p, . } 3 ( 0.00%) } . . /// Returns the path suitable for embedding into build artifacts. This would still . /// be a local path if it has not been remapped. A remapped path will not correspond . /// to a valid file system path: see `local_path_if_available()` for something that . /// is more likely to return paths into the local host file system. . pub fn remapped_path_if_available(&self) -> &Path { 2,096 ( 0.00%) match self { . RealFileName::LocalPath(p) . | RealFileName::Remapped { local_path: _, virtual_name: p } => &p, . } 2 ( 0.00%) } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. Otherwise returns the remapped name. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path_if_available(&self) -> &Path { 1 ( 0.00%) match self { . RealFileName::LocalPath(path) . | RealFileName::Remapped { local_path: None, virtual_name: path } . | RealFileName::Remapped { local_path: Some(path), virtual_name: _ } => path, . } . } . . pub fn to_string_lossy(&self, display_pref: FileNameDisplayPreference) -> Cow<'_, str> { 1 ( 0.00%) match display_pref { . FileNameDisplayPreference::Local => self.local_path_if_available().to_string_lossy(), . FileNameDisplayPreference::Remapped => { . self.remapped_path_if_available().to_string_lossy() . } . } . } . } . . /// Differentiates between real files and common virtual files. 5,357 ( 0.01%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] 6,283 ( 0.01%) #[derive(Decodable, Encodable)] . pub enum FileName { . Real(RealFileName), . /// Call to `quote!`. . QuoteExpansion(u64), . /// Command line. . Anon(u64), . /// Hack in `src/librustc_ast/parse.rs`. . // FIXME(jseyfried) -- line 281 ---------------------------------------- -- line 288 ---------------------------------------- . /// Custom sources for explicit parser calls from plugins and drivers. . Custom(String), . DocTest(PathBuf, isize), . /// Post-substitution inline assembly from LLVM. . InlineAsm(u64), . } . . impl From for FileName { 7 ( 0.00%) fn from(p: PathBuf) -> Self { 2 ( 0.00%) assert!(!p.to_string_lossy().ends_with('>')); 6 ( 0.00%) FileName::Real(RealFileName::LocalPath(p)) 7 ( 0.00%) } . } . 2 ( 0.00%) #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] . pub enum FileNameDisplayPreference { . Remapped, . Local, . } . . pub struct FileNameDisplay<'a> { . inner: &'a FileName, . display_pref: FileNameDisplayPreference, . } . . impl fmt::Display for FileNameDisplay<'_> { 4 ( 0.00%) fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { . use FileName::*; 6 ( 0.00%) match *self.inner { . Real(ref name) => { 6 ( 0.00%) write!(fmt, "{}", name.to_string_lossy(self.display_pref)) . } . QuoteExpansion(_) => write!(fmt, ""), . MacroExpansion(_) => write!(fmt, ""), . Anon(_) => write!(fmt, ""), . ProcMacroSourceCode(_) => write!(fmt, ""), . CfgSpec(_) => write!(fmt, ""), . CliCrateAttr(_) => write!(fmt, ""), . Custom(ref s) => write!(fmt, "<{}>", s), . DocTest(ref path, _) => write!(fmt, "{}", path.display()), . InlineAsm(_) => write!(fmt, ""), . } 5 ( 0.00%) } . } . . impl FileNameDisplay<'_> { . pub fn to_string_lossy(&self) -> Cow<'_, str> { . match self.inner { . FileName::Real(ref inner) => inner.to_string_lossy(self.display_pref), . _ => Cow::from(format!("{}", self)), . } . } . } . . impl FileName { . pub fn is_real(&self) -> bool { . use FileName::*; 1,054 ( 0.00%) match *self { . Real(_) => true, . Anon(_) . | MacroExpansion(_) . | ProcMacroSourceCode(_) . | CfgSpec(_) . | CliCrateAttr(_) . | Custom(_) . | QuoteExpansion(_) -- line 353 ---------------------------------------- -- line 357 ---------------------------------------- . } . . pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped } . } . . // This may include transient local filesystem information. . // Must not be embedded in build outputs. 1 ( 0.00%) pub fn prefer_local(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local } 2 ( 0.00%) } . . pub fn display(&self, display_pref: FileNameDisplayPreference) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref } . } . . pub fn macro_expansion_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); -- line 375 ---------------------------------------- -- line 423 ---------------------------------------- . /// that the length of the span is equal to `span.hi - span.lo`; there may be space in the . /// [`BytePos`] range between files. . /// . /// `SpanData` is public because `Span` uses a thread-local interner and can't be . /// sent to other threads, but some pieces of performance infra run in a separate thread. . /// Using `Span` is generally preferred. . #[derive(Clone, Copy, Hash, PartialEq, Eq)] . pub struct SpanData { 1 ( 0.00%) pub lo: BytePos, 1 ( 0.00%) pub hi: BytePos, . /// Information about where the macro came from, if this piece of . /// code was created by a macro expansion. 3 ( 0.00%) pub ctxt: SyntaxContext, 1 ( 0.00%) pub parent: Option, . } . . // Order spans by position in the file. . impl Ord for SpanData { . fn cmp(&self, other: &Self) -> Ordering { . let SpanData { . lo: s_lo, . hi: s_hi, -- line 444 ---------------------------------------- -- line 485 ---------------------------------------- . } . #[inline] . pub fn with_parent(&self, parent: Option) -> Span { . Span::new(self.lo, self.hi, self.ctxt, parent) . } . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { 3,882 ( 0.00%) self.lo.0 == 0 && self.hi.0 == 0 . } . /// Returns `true` if `self` fully encloses `other`. . pub fn contains(self, other: Self) -> bool { 15 ( 0.00%) self.lo <= other.lo && other.hi <= self.hi . } . } . . // The interner is pointed to by a thread local value which is only set on the main thread . // with parallelization is disabled. So we don't allow `Span` to transfer between threads . // to avoid panics and other errors, even though it would be memory safe to do so. . #[cfg(not(parallel_compiler))] . impl !Send for Span {} . #[cfg(not(parallel_compiler))] . impl !Sync for Span {} . . impl PartialOrd for Span { 4,184 ( 0.00%) fn partial_cmp(&self, rhs: &Self) -> Option { 3,138 ( 0.00%) PartialOrd::partial_cmp(&self.data(), &rhs.data()) 4,184 ( 0.00%) } . } . impl Ord for Span { . fn cmp(&self, rhs: &Self) -> Ordering { . Ord::cmp(&self.data(), &rhs.data()) . } . } . . /// A collection of `Span`s. -- line 520 ---------------------------------------- -- line 532 ---------------------------------------- . } . . impl Span { . #[inline] . pub fn lo(self) -> BytePos { . self.data().lo . } . #[inline] 1,242 ( 0.00%) pub fn with_lo(self, lo: BytePos) -> Span { . self.data().with_lo(lo) 828 ( 0.00%) } . #[inline] 248 ( 0.00%) pub fn hi(self) -> BytePos { . self.data().hi 248 ( 0.00%) } . #[inline] 756 ( 0.00%) pub fn with_hi(self, hi: BytePos) -> Span { . self.data().with_hi(hi) 504 ( 0.00%) } . #[inline] . pub fn ctxt(self) -> SyntaxContext { . self.data_untracked().ctxt . } . #[inline] 25 ( 0.00%) pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { . self.data_untracked().with_ctxt(ctxt) 15 ( 0.00%) } . #[inline] . pub fn parent(self) -> Option { . self.data().parent . } . #[inline] . pub fn with_parent(self, ctxt: Option) -> Span { . self.data().with_parent(ctxt) . } -- line 566 ---------------------------------------- -- line 600 ---------------------------------------- . /// Returns a new span representing an empty span at the beginning of this span. . #[inline] . pub fn shrink_to_lo(self) -> Span { . let span = self.data_untracked(); . span.with_hi(span.lo) . } . /// Returns a new span representing an empty span at the end of this span. . #[inline] 9 ( 0.00%) pub fn shrink_to_hi(self) -> Span { . let span = self.data_untracked(); . span.with_lo(span.hi) 6 ( 0.00%) } . . #[inline] . /// Returns `true` if `hi == lo`. . pub fn is_empty(self) -> bool { . let span = self.data_untracked(); . span.hi == span.lo . } . . /// Returns `self` if `self` is not the dummy span, and `other` otherwise. . pub fn substitute_dummy(self, other: Span) -> Span { . if self.is_dummy() { other } else { self } . } . . /// Returns `true` if `self` fully encloses `other`. 45 ( 0.00%) pub fn contains(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.contains(other) 30 ( 0.00%) } . . /// Returns `true` if `self` touches `other`. . pub fn overlaps(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.lo < other.hi && other.lo < span.hi . } . -- line 638 ---------------------------------------- -- line 663 ---------------------------------------- . /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, . /// if any. . pub fn parent_callsite(self) -> Option { . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(expn_data.call_site) } else { None } . } . . /// Walk down the expansion ancestors to find a span that's contained within `outer`. 30 ( 0.00%) pub fn find_ancestor_inside(mut self, outer: Span) -> Option { 15 ( 0.00%) while !outer.contains(self) { . self = self.parent_callsite()?; . } . Some(self) 33 ( 0.00%) } . . /// Edition of the crate from which this span came. 399 ( 0.00%) pub fn edition(self) -> edition::Edition { . self.ctxt().edition() 266 ( 0.00%) } . . #[inline] . pub fn rust_2015(self) -> bool { 74 ( 0.00%) self.edition() == edition::Edition::Edition2015 . } . . #[inline] . pub fn rust_2018(self) -> bool { 56 ( 0.00%) self.edition() >= edition::Edition::Edition2018 . } . . #[inline] . pub fn rust_2021(self) -> bool { 82 ( 0.00%) self.edition() >= edition::Edition::Edition2021 . } . . /// Returns the source callee. . /// . /// Returns `None` if the supplied span has no expansion trace, . /// else returns the `ExpnData` for the macro definition . /// corresponding to the source callsite. . pub fn source_callee(self) -> Option { -- line 703 ---------------------------------------- -- line 707 ---------------------------------------- . } . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } . } . . /// Checks if a span is "internal" to a macro in which `#[unstable]` . /// items can be used (that is, a macro marked with . /// `#[allow_internal_unstable]`). 5 ( 0.00%) pub fn allows_unstable(self, feature: Symbol) -> bool { 1 ( 0.00%) self.ctxt() . .outer_expn_data() . .allow_internal_unstable . .map_or(false, |features| features.iter().any(|&f| f == feature)) 4 ( 0.00%) } . . /// Checks if this span arises from a compiler desugaring of kind `kind`. 182 ( 0.00%) pub fn is_desugaring(self, kind: DesugaringKind) -> bool { 156 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => k == kind, . _ => false, . } 130 ( 0.00%) } . . /// Returns the compiler desugaring that created this span, or `None` . /// if this span is not from a desugaring. 15 ( 0.00%) pub fn desugaring_kind(self) -> Option { 18 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => Some(k), . _ => None, . } 12 ( 0.00%) } . . /// Checks if a span is "internal" to a macro in which `unsafe` . /// can be used without triggering the `unsafe_code` lint. . // (that is, a macro marked with `#[allow_internal_unsafe]`). . pub fn allows_unsafe(self) -> bool { . self.ctxt().outer_expn_data().allow_internal_unsafe . } . -- line 745 ---------------------------------------- -- line 767 ---------------------------------------- . . /// Returns a `Span` that would enclose both `self` and `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^^^^ . /// ``` 7,733 ( 0.01%) pub fn to(self, end: Span) -> Span { . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 1,404 ( 0.00%) if span_data.ctxt != end_data.ctxt { 2 ( 0.00%) if span_data.ctxt == SyntaxContext::root() { . return end; 2 ( 0.00%) } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . cmp::min(span_data.lo, end_data.lo), . cmp::max(span_data.hi, end_data.hi), . if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 4,921 ( 0.00%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 6,327 ( 0.01%) } . . /// Returns a `Span` between the end of `self` to the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^ . /// ``` 66 ( 0.00%) pub fn between(self, end: Span) -> Span { . let span = self.data(); . let end = end.data(); . Span::new( . span.hi, . end.lo, . if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, 42 ( 0.00%) if span.parent == end.parent { span.parent } else { None }, . ) 48 ( 0.00%) } . . /// Returns a `Span` from the beginning of `self` until the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^ . /// ``` 66 ( 0.00%) pub fn until(self, end: Span) -> Span { . // Most of this function's body is copied from `to`. . // We can't just do `self.to(end.shrink_to_lo())`, . // because to also does some magic where it uses min/max so . // it can handle overlapping spans. Some advanced mis-use of . // `until` with different ctxts makes this visible. . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 12 ( 0.00%) if span_data.ctxt != end_data.ctxt { . if span_data.ctxt == SyntaxContext::root() { . return end; . } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . span_data.lo, . end_data.lo, . if end_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 42 ( 0.00%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 54 ( 0.00%) } . . pub fn from_inner(self, inner: InnerSpan) -> Span { . let span = self.data(); . Span::new( . span.lo + BytePos::from_usize(inner.start), . span.lo + BytePos::from_usize(inner.end), . span.ctxt, . span.parent, . ) . } . . /// Equivalent of `Span::def_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span { 2 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Opaque) . } . . /// Equivalent of `Span::call_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span { 2 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Transparent) . } . . /// Equivalent of `Span::mixed_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span { . self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) . } . . /// Produces a span with the same location as `self` and context produced by a macro with the . /// given ID and transparency, assuming that macro was defined directly and not produced by . /// some other macro (which is the case for built-in and procedural macros). 24 ( 0.00%) pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) 14 ( 0.00%) } . . #[inline] . pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency)) . } . . #[inline] -- line 892 ---------------------------------------- -- line 901 ---------------------------------------- . pub fn adjust(&mut self, expn_id: ExpnId) -> Option { . let mut span = self.data(); . let mark = span.ctxt.adjust(expn_id); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 810 ( 0.00%) pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { 540 ( 0.00%) let mut span = self.data(); 428 ( 0.00%) let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id); 810 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 878 ( 0.00%) } . . #[inline] . pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option> { . let mut span = self.data(); . let mark = span.ctxt.glob_adjust(expn_id, glob_span); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } -- line 922 ---------------------------------------- -- line 929 ---------------------------------------- . ) -> Option> { . let mut span = self.data(); . let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 1,120 ( 0.00%) pub fn normalize_to_macros_2_0(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macros_2_0()) 980 ( 0.00%) } . . #[inline] . pub fn normalize_to_macro_rules(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macro_rules()) . } . } . -- line 948 ---------------------------------------- -- line 958 ---------------------------------------- . . /// What label should we attach to this span (if any)? . pub label: Option, . } . . impl Default for Span { . fn default() -> Self { . DUMMY_SP 2 ( 0.00%) } . } . . impl Encodable for Span { . default fn encode(&self, s: &mut E) -> Result<(), E::Error> { . let span = self.data(); . s.emit_struct(false, |s| { . s.emit_struct_field("lo", true, |s| span.lo.encode(s))?; . s.emit_struct_field("hi", false, |s| span.hi.encode(s)) -- line 974 ---------------------------------------- -- line 990 ---------------------------------------- . /// any spans that are debug-printed during the closure's execution. . /// . /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap` . /// (see `rustc_interface::callbacks::span_debug1`). However, some parts . /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before . /// a `TyCtxt` is available. In this case, we fall back to . /// the `SourceMap` provided to this function. If that is not available, . /// we fall back to printing the raw `Span` field values. 9 ( 0.00%) pub fn with_source_map T>(source_map: Lrc, f: F) -> T { . with_session_globals(|session_globals| { 2 ( 0.00%) *session_globals.source_map.borrow_mut() = Some(source_map); . }); . struct ClearSourceMap; . impl Drop for ClearSourceMap { . fn drop(&mut self) { . with_session_globals(|session_globals| { 1 ( 0.00%) session_globals.source_map.borrow_mut().take(); . }); . } . } . . let _guard = ClearSourceMap; 4 ( 0.00%) f() 8 ( 0.00%) } . . pub fn debug_with_source_map( . span: Span, . f: &mut fmt::Formatter<'_>, . source_map: &SourceMap, . ) -> fmt::Result { . write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(span), span.ctxt()) . } -- line 1021 ---------------------------------------- -- line 1048 ---------------------------------------- . . impl MultiSpan { . #[inline] . pub fn new() -> MultiSpan { . MultiSpan { primary_spans: vec![], span_labels: vec![] } . } . . pub fn from_span(primary_span: Span) -> MultiSpan { 45 ( 0.00%) MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } . } . . pub fn from_spans(mut vec: Vec) -> MultiSpan { . vec.sort(); 10 ( 0.00%) MultiSpan { primary_spans: vec, span_labels: vec![] } . } . . pub fn push_span_label(&mut self, span: Span, label: String) { . self.span_labels.push((span, label)); . } . . /// Selects the first primary span (if any). . pub fn primary_span(&self) -> Option { . self.primary_spans.first().cloned() 2 ( 0.00%) } . . /// Returns all primary spans. . pub fn primary_spans(&self) -> &[Span] { . &self.primary_spans . } . . /// Returns `true` if any of the primary spans are displayable. . pub fn has_primary_spans(&self) -> bool { -- line 1079 ---------------------------------------- -- line 1139 ---------------------------------------- . . /// Returns `true` if any of the span labels is displayable. . pub fn has_span_labels(&self) -> bool { . self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) . } . } . . impl From for MultiSpan { 45 ( 0.00%) fn from(span: Span) -> MultiSpan { . MultiSpan::from_span(span) 45 ( 0.00%) } . } . . impl From> for MultiSpan { 10 ( 0.00%) fn from(spans: Vec) -> MultiSpan { 8 ( 0.00%) MultiSpan::from_spans(spans) 10 ( 0.00%) } . } . . /// Identifies an offset of a multi-byte character in a `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct MultiByteChar { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The number of bytes, `>= 2`. . pub bytes: u8, . } . . /// Identifies an offset of a non-narrow character in a `SourceFile`. 308 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub enum NonNarrowChar { . /// Represents a zero-width character. . ZeroWidth(BytePos), . /// Represents a wide (full-width) character. . Wide(BytePos), . /// Represents a tab character, represented visually with a width of 4 characters. . Tab(BytePos), . } -- line 1176 ---------------------------------------- -- line 1201 ---------------------------------------- . } . } . } . . impl Add for NonNarrowChar { . type Output = Self; . . fn add(self, rhs: BytePos) -> Self { 532 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), . } . } . } . . impl Sub for NonNarrowChar { . type Output = Self; . 154 ( 0.00%) fn sub(self, rhs: BytePos) -> Self { 532 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), . } 462 ( 0.00%) } . } . . /// Identifies an offset of a character that was normalized away from `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct NormalizedPos { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The difference between original and normalized string at position. . pub diff: u32, . } . 3 ( 0.00%) #[derive(PartialEq, Eq, Clone, Debug)] . pub enum ExternalSource { . /// No external source has to be loaded, since the `SourceFile` represents a local crate. . Unneeded, . Foreign { . kind: ExternalSourceKind, . /// This SourceFile's byte-offset within the source_map of its original crate. . original_start_pos: BytePos, . /// The end of this SourceFile within the source_map of its original crate. -- line 1246 ---------------------------------------- -- line 1267 ---------------------------------------- . _ => None, . } . } . } . . #[derive(Debug)] . pub struct OffsetOverflowError; . 1,052 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] . pub enum SourceFileHashAlgorithm { . Md5, . Sha1, . Sha256, . } . . impl FromStr for SourceFileHashAlgorithm { . type Err = (); -- line 1283 ---------------------------------------- -- line 1290 ---------------------------------------- . _ => Err(()), . } . } . } . . rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm); . . /// The hash of the on-disk source file used for debug info. 6 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] 2,104 ( 0.00%) #[derive(HashStable_Generic, Encodable, Decodable)] . pub struct SourceFileHash { . pub kind: SourceFileHashAlgorithm, . value: [u8; 32], . } . . impl SourceFileHash { . pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash { . let mut hash = SourceFileHash { kind, value: Default::default() }; . let len = hash.hash_len(); . let value = &mut hash.value[..len]; . let data = src.as_bytes(); . match kind { . SourceFileHashAlgorithm::Md5 => { 3 ( 0.00%) value.copy_from_slice(&Md5::digest(data)); . } . SourceFileHashAlgorithm::Sha1 => { . value.copy_from_slice(&Sha1::digest(data)); . } . SourceFileHashAlgorithm::Sha256 => { . value.copy_from_slice(&Sha256::digest(data)); . } . } -- line 1321 ---------------------------------------- -- line 1329 ---------------------------------------- . . /// The bytes of the hash. . pub fn hash_bytes(&self) -> &[u8] { . let len = self.hash_len(); . &self.value[..len] . } . . fn hash_len(&self) -> usize { 3 ( 0.00%) match self.kind { . SourceFileHashAlgorithm::Md5 => 16, . SourceFileHashAlgorithm::Sha1 => 20, . SourceFileHashAlgorithm::Sha256 => 32, . } . } . } . . /// A single source in the [`SourceMap`]. 58 ( 0.00%) #[derive(Clone)] . pub struct SourceFile { . /// The name of the file that the source came from. Source that doesn't . /// originate from files has names between angle brackets by convention . /// (e.g., ``). . pub name: FileName, . /// The complete source code. 1 ( 0.00%) pub src: Option>, . /// The source code's hash. . pub src_hash: SourceFileHash, . /// The external source code (used for external crates, which will have a `None` . /// value as `self.src`. . pub external_src: Lock, . /// The start position of this source in the `SourceMap`. . pub start_pos: BytePos, . /// The end position of this source in the `SourceMap`. -- line 1361 ---------------------------------------- -- line 1364 ---------------------------------------- . pub lines: Vec, . /// Locations of multi-byte characters in the source code. . pub multibyte_chars: Vec, . /// Width of characters that are not narrow in the source code. . pub non_narrow_chars: Vec, . /// Locations of characters removed during normalization. . pub normalized_pos: Vec, . /// A hash of the filename, used for speeding up hashing in incremental compilation. 1 ( 0.00%) pub name_hash: u128, . /// Indicates which crate this `SourceFile` was imported from. 1 ( 0.00%) pub cnum: CrateNum, . } . . impl Encodable for SourceFile { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_struct(false, |s| { . s.emit_struct_field("name", true, |s| self.name.encode(s))?; . s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?; . s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?; . s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?; . s.emit_struct_field("lines", false, |s| { . let lines = &self.lines[..]; . // Store the length. . s.emit_u32(lines.len() as u32)?; . 4 ( 0.00%) if !lines.is_empty() { . // In order to preserve some space, we exploit the fact that . // the lines list is sorted and individual lines are . // probably not that long. Because of that we can store lines . // as a difference list, using as little space as possible . // for the differences. . let max_line_length = if lines.len() == 1 { . 0 . } else { -- line 1397 ---------------------------------------- -- line 1399 ---------------------------------------- . .array_windows() . .map(|&[fst, snd]| snd - fst) . .map(|bp| bp.to_usize()) . .max() . .unwrap() . }; . . let bytes_per_diff: u8 = match max_line_length { 2 ( 0.00%) 0..=0xFF => 1, 4 ( 0.00%) 0x100..=0xFFFF => 2, . _ => 4, . }; . . // Encode the number of bytes used per diff. . bytes_per_diff.encode(s)?; . . // Encode the first element. . lines[0].encode(s)?; . . let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst); . 4 ( 0.00%) match bytes_per_diff { . 1 => { . for diff in diff_iter { . (diff.0 as u8).encode(s)? . } . } . 2 => { . for diff in diff_iter { . (diff.0 as u16).encode(s)? -- line 1428 ---------------------------------------- -- line 1436 ---------------------------------------- . _ => unreachable!(), . } . } . . Ok(()) . })?; . s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?; . s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?; 3 ( 0.00%) s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?; . s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?; 11 ( 0.00%) s.emit_struct_field("cnum", false, |s| self.cnum.encode(s)) . }) . } . } . . impl Decodable for SourceFile { 4,734 ( 0.00%) fn decode(d: &mut D) -> SourceFile { . d.read_struct(|d| { . let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d)); . let src_hash: SourceFileHash = . d.read_struct_field("src_hash", |d| Decodable::decode(d)); . let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d)); . let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d)); . let lines: Vec = d.read_struct_field("lines", |d| { . let num_lines: u32 = Decodable::decode(d); 526 ( 0.00%) let mut lines = Vec::with_capacity(num_lines as usize); . . if num_lines > 0 { . // Read the number of bytes used per diff. . let bytes_per_diff: u8 = Decodable::decode(d); . . // Read the first element. . let mut line_start: BytePos = Decodable::decode(d); . lines.push(line_start); . . for _ in 1..num_lines { 865,776 ( 0.86%) let diff = match bytes_per_diff { . 1 => d.read_u8() as u32, . 2 => d.read_u16() as u32, . 4 => d.read_u32(), . _ => unreachable!(), . }; . . line_start = line_start + BytePos(diff); . -- line 1480 ---------------------------------------- -- line 1483 ---------------------------------------- . } . . lines . }); . let multibyte_chars: Vec = . d.read_struct_field("multibyte_chars", |d| Decodable::decode(d)); . let non_narrow_chars: Vec = . d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d)); 1,052 ( 0.00%) let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d)); . let normalized_pos: Vec = . d.read_struct_field("normalized_pos", |d| Decodable::decode(d)); . let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d)); 5,786 ( 0.01%) SourceFile { 4,208 ( 0.00%) name, . start_pos, . end_pos, . src: None, 2,104 ( 0.00%) src_hash, . // Unused - the metadata decoder will construct . // a new SourceFile, filling in `external_src` properly . external_src: Lock::new(ExternalSource::Unneeded), 2,104 ( 0.00%) lines, 2,104 ( 0.00%) multibyte_chars, 2,104 ( 0.00%) non_narrow_chars, 2,104 ( 0.00%) normalized_pos, . name_hash, . cnum, . } . }) 4,734 ( 0.00%) } . } . . impl fmt::Debug for SourceFile { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(fmt, "SourceFile({:?})", self.name) . } . } . . impl SourceFile { 11 ( 0.00%) pub fn new( . name: FileName, . mut src: String, . start_pos: BytePos, . hash_kind: SourceFileHashAlgorithm, . ) -> Self { . // Compute the file hash before any normalization. . let src_hash = SourceFileHash::new(hash_kind, &src); 3 ( 0.00%) let normalized_pos = normalize_src(&mut src, start_pos); . . let name_hash = { . let mut hasher: StableHasher = StableHasher::new(); 2 ( 0.00%) name.hash(&mut hasher); . hasher.finish::() . }; 2 ( 0.00%) let end_pos = start_pos.to_usize() + src.len(); 3 ( 0.00%) assert!(end_pos <= u32::MAX as usize); . 12 ( 0.00%) let (lines, multibyte_chars, non_narrow_chars) = 2 ( 0.00%) analyze_source_file::analyze_source_file(&src, start_pos); . 17 ( 0.00%) SourceFile { . name, . src: Some(Lrc::new(src)), 11 ( 0.00%) src_hash, . external_src: Lock::new(ExternalSource::Unneeded), . start_pos, . end_pos: Pos::from_usize(end_pos), 4 ( 0.00%) lines, 4 ( 0.00%) multibyte_chars, 4 ( 0.00%) non_narrow_chars, 4 ( 0.00%) normalized_pos, . name_hash, . cnum: LOCAL_CRATE, . } 9 ( 0.00%) } . . /// Returns the `BytePos` of the beginning of the current line. . pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { . let line_index = self.lookup_line(pos).unwrap(); . self.lines[line_index] . } . . /// Add externally loaded source. . /// If the hash of the input doesn't match or no input is supplied via None, . /// it is interpreted as an error and the corresponding enum variant is set. . /// The return value signifies whether some kind of source is present. 119 ( 0.00%) pub fn add_external_src(&self, get_src: F) -> bool . where . F: FnOnce() -> Option, . { 17 ( 0.00%) if matches!( 34 ( 0.00%) *self.external_src.borrow(), . ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. } . ) { . let src = get_src(); . let mut external_src = self.external_src.borrow_mut(); . // Check that no-one else have provided the source while we were getting it . if let ExternalSource::Foreign { . kind: src_kind @ ExternalSourceKind::AbsentOk, .. . } = &mut *external_src -- line 1582 ---------------------------------------- -- line 1592 ---------------------------------------- . *src_kind = ExternalSourceKind::AbsentErr; . } . . false . } else { . self.src.is_some() || external_src.get_source().is_some() . } . } else { 17 ( 0.00%) self.src.is_some() || self.external_src.borrow().get_source().is_some() . } 153 ( 0.00%) } . . /// Gets a line from the list of pre-computed line-beginnings. . /// The line number here is 0-based. . pub fn get_line(&self, line_number: usize) -> Option> { . fn get_until_newline(src: &str, begin: usize) -> &str { . // We can't use `lines.get(line_number+1)` because we might . // be parsing when we call this function and thus the current . // line is the last one we have line info for. -- line 1610 ---------------------------------------- -- line 1627 ---------------------------------------- . Some(Cow::Owned(String::from(get_until_newline(src, begin)))) . } else { . None . } . } . . pub fn is_real_file(&self) -> bool { . self.name.is_real() 527 ( 0.00%) } . . pub fn is_imported(&self) -> bool { . self.src.is_none() 671 ( 0.00%) } . . pub fn count_lines(&self) -> usize { . self.lines.len() . } . . /// Finds the line containing the given position. The return value is the . /// index into the `lines` array of this `SourceFile`, not the 1-based line . /// number. If the source_file is empty or the position is located before the . /// first line, `None` is returned. . pub fn lookup_line(&self, pos: BytePos) -> Option { 322 ( 0.00%) match self.lines.binary_search(&pos) { . Ok(idx) => Some(idx), . Err(0) => None, . Err(idx) => Some(idx - 1), . } . } . . pub fn line_bounds(&self, line_index: usize) -> Range { 621 ( 0.00%) if self.is_empty() { . return self.start_pos..self.end_pos; . } . 450 ( 0.00%) assert!(line_index < self.lines.len()); 600 ( 0.00%) if line_index == (self.lines.len() - 1) { 7 ( 0.00%) self.lines[line_index]..self.end_pos . } else { 394 ( 0.00%) self.lines[line_index]..self.lines[line_index + 1] . } . } . . /// Returns whether or not the file contains the given `SourceMap` byte . /// position. The position one past the end of the file is considered to be . /// contained by the file. This implies that files for which `is_empty` . /// returns true still contain one byte position according to this function. . #[inline] -- line 1674 ---------------------------------------- -- line 1692 ---------------------------------------- . Err(i) if i == 0 => 0, . Err(i) => self.normalized_pos[i - 1].diff, . }; . . BytePos::from_u32(pos.0 - self.start_pos.0 + diff) . } . . /// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`. 2 ( 0.00%) pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { . // The number of extra bytes due to multibyte chars in the `SourceFile`. . let mut total_extra_bytes = 0; . 6 ( 0.00%) for mbc in self.multibyte_chars.iter() { . debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); . if mbc.pos < bpos { . // Every character is at least one byte, so we only . // count the actual extra bytes. . total_extra_bytes += mbc.bytes as u32 - 1; . // We should never see a byte position in the middle of a . // character. . assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); . } else { . break; . } . } . 12 ( 0.00%) assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); 10 ( 0.00%) CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize) 4 ( 0.00%) } . . /// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a . /// given `BytePos`. 7 ( 0.00%) pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) { 2 ( 0.00%) let chpos = self.bytepos_to_file_charpos(pos); . match self.lookup_line(pos) { . Some(a) => { . let line = a + 1; // Line numbers start at 1 1 ( 0.00%) let linebpos = self.lines[a]; 2 ( 0.00%) let linechpos = self.bytepos_to_file_charpos(linebpos); . let col = chpos - linechpos; . debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); . debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); . debug!("byte is on line: {}", line); 1 ( 0.00%) assert!(chpos >= linechpos); . (line, col) . } . None => (0, chpos), . } 8 ( 0.00%) } . . /// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based) . /// column offset when displayed, for a given `BytePos`. 8 ( 0.00%) pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) { 3 ( 0.00%) let (line, col_or_chpos) = self.lookup_file_pos(pos); 2 ( 0.00%) if line > 0 { . let col = col_or_chpos; 1 ( 0.00%) let linebpos = self.lines[line - 1]; . let col_display = { . let start_width_idx = self . .non_narrow_chars . .binary_search_by_key(&linebpos, |x| x.pos()) . .unwrap_or_else(|x| x); . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let special_chars = end_width_idx - start_width_idx; . let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx] . .iter() . .map(|x| x.width()) . .sum(); 5 ( 0.00%) col.0 - special_chars + non_narrow . }; . (line, col, col_display) . } else { . let chpos = col_or_chpos; . let col_display = { . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let non_narrow: usize = . self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum(); . chpos.0 - end_width_idx + non_narrow . }; . (0, chpos, col_display) . } 7 ( 0.00%) } . } . . /// Normalizes the source code and records the normalizations. 10 ( 0.00%) fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec { . let mut normalized_pos = vec![]; . remove_bom(src, &mut normalized_pos); . normalize_newlines(src, &mut normalized_pos); . . // Offset all the positions by start_pos to match the final file positions. . for np in &mut normalized_pos { . np.pos.0 += start_pos.0; . } . . normalized_pos 9 ( 0.00%) } . . /// Removes UTF-8 BOM, if any. . fn remove_bom(src: &mut String, normalized_pos: &mut Vec) { 1 ( 0.00%) if src.starts_with('\u{feff}') { . src.drain(..3); . normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 }); . } . } . . /// Replaces `\r\n` with `\n` in-place in `src`. . /// . /// Returns error if there's a lone `\r` in the string. . fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec) { 1 ( 0.00%) if !src.as_bytes().contains(&b'\r') { . return; . } . . // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. . // While we *can* call `as_mut_vec` and do surgery on the live string . // directly, let's rather steal the contents of `src`. This makes the code . // safe even if a panic occurs. . -- line 1816 ---------------------------------------- -- line 1877 ---------------------------------------- . ( . $( . $(#[$attr:meta])* . $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty); . )* . ) => { . $( . $(#[$attr])* 1,342 ( 0.00%) $vis struct $ident($inner_vis $inner_ty); . . impl Pos for $ident { . #[inline(always)] . fn from_usize(n: usize) -> $ident { 123 ( 0.00%) $ident(n as $inner_ty) . } . . #[inline(always)] . fn to_usize(&self) -> usize { 2,356 ( 0.00%) self.0 as usize . } . . #[inline(always)] . fn from_u32(n: u32) -> $ident { . $ident(n as $inner_ty) . } . . #[inline(always)] -- line 1903 ---------------------------------------- -- line 1906 ---------------------------------------- . } . } . . impl Add for $ident { . type Output = $ident; . . #[inline(always)] . fn add(self, rhs: $ident) -> $ident { 1,446,664 ( 1.43%) $ident(self.0 + rhs.0) . } . } . . impl Sub for $ident { . type Output = $ident; . . #[inline(always)] . fn sub(self, rhs: $ident) -> $ident { 746,106 ( 0.74%) $ident(self.0 - rhs.0) . } . } . )* . }; . } . . impl_pos! { . /// A byte offset. -- line 1931 ---------------------------------------- -- line 1946 ---------------------------------------- . impl Encodable for BytePos { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_u32(self.0) . } . } . . impl Decodable for BytePos { . fn decode(d: &mut D) -> BytePos { 32,650 ( 0.03%) BytePos(d.read_u32()) . } . } . . // _____________________________________________________________________________ . // Loc, SourceFileAndLine, SourceFileAndBytePos . // . . /// A source code location used for error reporting. -- line 1962 ---------------------------------------- -- line 2079 ---------------------------------------- . /// offsets into the `SourceMap`). Instead, we hash the (file name, line, column) . /// triple, which stays the same even if the containing `SourceFile` has moved . /// within the `SourceMap`. . /// . /// Also note that we are hashing byte offsets for the column, not unicode . /// codepoint offsets. For the purpose of the hash that's sufficient. . /// Also, hashing filenames is expensive so we avoid doing it twice when the . /// span starts and ends in the same file, which is almost always the case. 9,611 ( 0.01%) fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { . const TAG_VALID_SPAN: u8 = 0; . const TAG_INVALID_SPAN: u8 = 1; . const TAG_RELATIVE_SPAN: u8 = 2; . 1,831 ( 0.00%) if !ctx.hash_spans() { . return; . } . 7,780 ( 0.01%) let span = self.data_untracked(); 2,745 ( 0.00%) span.ctxt.hash_stable(ctx, hasher); 4,119 ( 0.00%) span.parent.hash_stable(ctx, hasher); . 2,798 ( 0.00%) if span.is_dummy() { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . 2,642 ( 0.00%) if let Some(parent) = span.parent { . let def_span = ctx.def_span(parent).data_untracked(); . if def_span.contains(span) { . // This span is enclosed in a definition: only hash the relative position. . Hash::hash(&TAG_RELATIVE_SPAN, hasher); . (span.lo - def_span.lo).to_u32().hash_stable(ctx, hasher); . (span.hi - def_span.lo).to_u32().hash_stable(ctx, hasher); . return; . } . } . . // If this is not an empty or invalid span, we want to hash the last . // position that belongs to it, as opposed to hashing the first . // position past it. 5,284 ( 0.01%) let (file, line_lo, col_lo, line_hi, col_hi) = match ctx.span_data_to_lines_and_cols(&span) . { 2,642 ( 0.00%) Some(pos) => pos, . None => { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . }; . . Hash::hash(&TAG_VALID_SPAN, hasher); . // We truncate the stable ID hash and line and column numbers. The chances -- line 2129 ---------------------------------------- -- line 2134 ---------------------------------------- . // hash only the length, for example, then two otherwise equal spans with . // different end locations will have the same hash. This can cause a problem . // during incremental compilation wherein a previous result for a query that . // depends on the end location of a span will be incorrectly reused when the . // end location of the span it depends on has changed (see issue #74890). A . // similar analysis applies if some query depends specifically on the length . // of the span, but we only hash the end location. So hash both. . 1,321 ( 0.00%) let col_lo_trunc = (col_lo.0 as u64) & 0xFF; . let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8; . let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32; . let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40; . let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc; 1,321 ( 0.00%) let len = (span.hi - span.lo).0; . Hash::hash(&col_line, hasher); . Hash::hash(&len, hasher); 10,984 ( 0.01%) } . } 1,105,673 ( 1.09%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs -------------------------------------------------------------------------------- Ir -- line 223 ---------------------------------------- . fn eq(&self, other: &Rhs) -> bool; . . /// This method tests for `!=`. . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ne(&self, other: &Rhs) -> bool { 930 ( 0.00%) !self.eq(other) . } . } . . /// Derive macro generating an impl of the trait `PartialEq`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics, structural_match)] . pub macro PartialEq($item:item) { -- line 239 ---------------------------------------- -- line 569 ---------------------------------------- . /// let result = x.0.cmp(&y.0).then_with(|| x.1.cmp(&y.1)).then_with(|| x.2.cmp(&y.2)); . /// . /// assert_eq!(result, Ordering::Less); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "ordering_chaining", since = "1.17.0")] . pub fn then_with Ordering>(self, f: F) -> Ordering { 1 ( 0.00%) match self { . Equal => f(), . _ => self, . } . } . } . . /// A helper struct for reverse ordering. . /// -- line 585 ---------------------------------------- -- line 792 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn max(self, other: Self) -> Self . where . Self: Sized, . { 1,902 ( 0.00%) max_by(self, other, Ord::cmp) . } . . /// Compares and returns the minimum of two values. . /// . /// Returns the first argument if the comparison determines them to be equal. . /// . /// # Examples . /// -- line 808 ---------------------------------------- -- line 812 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn min(self, other: Self) -> Self . where . Self: Sized, . { 136 ( 0.00%) min_by(self, other, Ord::cmp) . } . . /// Restrict a value to a certain interval. . /// . /// Returns `max` if `self` is greater than `max`, and `min` if `self` is . /// less than `min`. Otherwise this returns `self`. . /// . /// # Panics -- line 828 ---------------------------------------- -- line 1097 ---------------------------------------- . /// let result = 2.0 < 1.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn lt(&self, other: &Rhs) -> bool { 2,493 ( 0.00%) matches!(self.partial_cmp(other), Some(Less)) . } . . /// This method tests less than or equal to (for `self` and `other`) and is used by the `<=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1113 ---------------------------------------- -- line 1121 ---------------------------------------- . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn le(&self, other: &Rhs) -> bool { . // Pattern `Some(Less | Eq)` optimizes worse than negating `None | Some(Greater)`. . // FIXME: The root cause was fixed upstream in LLVM with: . // https://github.com/llvm/llvm-project/commit/9bad7de9a3fb844f1ca2965f35d0c2a3d1e11775 . // Revert this workaround once support for LLVM 12 gets dropped. 12,704 ( 0.01%) !matches!(self.partial_cmp(other), None | Some(Greater)) . } . . /// This method tests greater than (for `self` and `other`) and is used by the `>` operator. . /// . /// # Examples . /// . /// ``` . /// let result = 1.0 > 2.0; -- line 1137 ---------------------------------------- -- line 1140 ---------------------------------------- . /// let result = 2.0 > 2.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn gt(&self, other: &Rhs) -> bool { 24,053 ( 0.02%) matches!(self.partial_cmp(other), Some(Greater)) . } . . /// This method tests greater than or equal to (for `self` and `other`) and is used by the `>=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1156 ---------------------------------------- -- line 1160 ---------------------------------------- . /// let result = 2.0 >= 2.0; . /// assert_eq!(result, true); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ge(&self, other: &Rhs) -> bool { 5 ( 0.00%) matches!(self.partial_cmp(other), Some(Greater | Equal)) . } . } . . /// Derive macro generating an impl of the trait `PartialOrd`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics)] . pub macro PartialOrd($item:item) { -- line 1176 ---------------------------------------- -- line 1210 ---------------------------------------- . /// . /// assert_eq!(cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 1); . /// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn min_by Ordering>(v1: T, v2: T, compare: F) -> T { 6,321 ( 0.01%) match compare(&v1, &v2) { . Ordering::Less | Ordering::Equal => v1, . Ordering::Greater => v2, . } . } . . /// Returns the element that gives the minimum value from the specified function. . /// . /// Returns the first argument if the comparison determines them to be equal. -- line 1226 ---------------------------------------- -- line 1273 ---------------------------------------- . /// . /// assert_eq!(cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn max_by Ordering>(v1: T, v2: T, compare: F) -> T { 19,832 ( 0.02%) match compare(&v1, &v2) { 80 ( 0.00%) Ordering::Less | Ordering::Equal => v2, . Ordering::Greater => v1, . } . } . . /// Returns the element that gives the maximum value from the specified function. . /// . /// Returns the second argument if the comparison determines them to be equal. . /// -- line 1290 ---------------------------------------- -- line 1308 ---------------------------------------- . use crate::cmp::Ordering::{self, Equal, Greater, Less}; . use crate::hint::unreachable_unchecked; . . macro_rules! partial_eq_impl { . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for $t { . #[inline] 409 ( 0.00%) fn eq(&self, other: &$t) -> bool { (*self) == (*other) } . #[inline] 3,447 ( 0.00%) fn ne(&self, other: &$t) -> bool { (*self) != (*other) } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for () { . #[inline] . fn eq(&self, _other: &()) -> bool { -- line 1326 ---------------------------------------- -- line 1392 ---------------------------------------- . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd for $t { . #[inline] . fn partial_cmp(&self, other: &$t) -> Option { . Some(self.cmp(other)) . } . #[inline] 667,511 ( 0.66%) fn lt(&self, other: &$t) -> bool { (*self) < (*other) } . #[inline] 870 ( 0.00%) fn le(&self, other: &$t) -> bool { (*self) <= (*other) } . #[inline] . fn ge(&self, other: &$t) -> bool { (*self) >= (*other) } . #[inline] . fn gt(&self, other: &$t) -> bool { (*self) > (*other) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for $t { . #[inline] . fn cmp(&self, other: &$t) -> Ordering { . // The order here is important to generate more optimal assembly. . // See for more info. 53,740 ( 0.05%) if *self < *other { Less } . else if *self == *other { Equal } . else { Greater } . } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] -- line 1423 ---------------------------------------- -- line 1430 ---------------------------------------- . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for bool { . #[inline] . fn cmp(&self, other: &bool) -> Ordering { . // Casting to i8's and converting the difference to an Ordering generates . // more optimal assembly. . // See for more info. 532 ( 0.00%) match (*self as i8) - (*other as i8) { . -1 => Less, . 0 => Equal, . 1 => Greater, . // SAFETY: bool as i8 returns 0 or 1, so the difference can't be anything else . _ => unsafe { unreachable_unchecked() }, . } . } . } -- line 1446 ---------------------------------------- -- line 1474 ---------------------------------------- . // & pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&B> for &A . where . A: PartialEq, . { . #[inline] 48 ( 0.00%) fn eq(&self, other: &&B) -> bool { 20,861 ( 0.02%) PartialEq::eq(*self, *other) 2,473 ( 0.00%) } . #[inline] . fn ne(&self, other: &&B) -> bool { 57 ( 0.00%) PartialEq::ne(*self, *other) 993 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd<&B> for &A . where . A: PartialOrd, . { . #[inline] . fn partial_cmp(&self, other: &&B) -> Option { -- line 1496 ---------------------------------------- -- line 1516 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for &A . where . A: Ord, . { . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(*self, *other) 386 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl Eq for &A where A: Eq {} . . // &mut pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&mut B> for &mut A -- line 1532 ---------------------------------------- 77,773 ( 0.08%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/../sysdeps/x86_64/dl-machine.h ./elf/dl-lookup.c ./elf/do-rel.h ./malloc/malloc.c ./stdio-common/vfscanf-internal.c ./stdlib/cxa_finalize.c ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S ./string/../sysdeps/x86_64/strcmp.S -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 32,920,506 (32.59%) events annotated