-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name wasmparser --edition=2018 src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata -C embed-bitcode=no -C debuginfo=2 -C metadata=6c6293133f8f1de3 -C extra-filename=-6c6293133f8f1de3 --out-dir /usr/home/liquid/tmp/.tmpngImYa/target/debug/deps -L dependency=/usr/home/liquid/tmp/.tmpngImYa/target/debug/deps -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-wasmparser-0.82.0-Check-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 6,026,958,768 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 155,255,326 ( 2.58%) ./malloc/malloc.c:_int_free 132,825,818 ( 2.20%) ./malloc/malloc.c:_int_malloc 131,553,530 ( 2.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/fulfill.rs:>::process_obligations::> 124,949,521 ( 2.07%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 96,892,543 ( 1.61%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs:>::process_obligations::> 94,005,526 ( 1.56%) ./malloc/malloc.c:malloc 84,198,336 ( 1.40%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:>::process_obligations::> 68,562,383 ( 1.14%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:>::process_obligations::> 56,096,682 ( 0.93%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs:>::process_obligations::> 56,038,881 ( 0.93%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/type_variable.rs:>::process_obligations::> 46,747,235 ( 0.78%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/select/mod.rs:>::process_obligations::> 46,706,976 ( 0.77%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/snapshot_vec.rs:>::process_obligations::> 44,940,028 ( 0.75%) ./malloc/malloc.c:free 40,668,937 ( 0.67%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 33,817,494 ( 0.56%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/smallvec-1.7.0/src/lib.rs:>::try_reserve 25,820,990 ( 0.43%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_index/src/bit_set.rs:>::union::> 25,685,852 ( 0.43%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::attrs 25,626,019 ( 0.43%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/smallvec-1.7.0/src/lib.rs: as core::iter::traits::collect::Extend<&rustc_mir_build::thir::pattern::deconstruct_pat::DeconstructedPat>>::extend::> 25,133,006 ( 0.42%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/smallvec-1.7.0/src/lib.rs:>::insert_from_slice 25,067,609 ( 0.42%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs:rustc_mir_build::thir::pattern::usefulness::is_useful 24,302,164 ( 0.40%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/smallvec-1.7.0/src/lib.rs:rustc_mir_build::thir::pattern::usefulness::is_useful 23,541,166 ( 0.39%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs:>::compress::<>::process_obligations>::{closure#0}> 23,222,655 ( 0.39%) ./malloc/malloc.c:malloc_consolidate 22,432,130 ( 0.37%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 21,824,238 ( 0.36%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs:>::search_tree:: 21,769,835 ( 0.36%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:>::process_obligations::> 21,735,244 ( 0.36%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs:rustc_mir_build::thir::pattern::usefulness::is_useful 21,571,622 ( 0.36%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>::apply_rewrites 21,496,193 ( 0.36%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::shallow_resolve_ty 20,766,060 ( 0.34%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/intrinsics.rs:>::compress::<>::process_obligations>::{closure#0}> 20,191,416 ( 0.34%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs:>::apply_rewrites 20,053,136 ( 0.33%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::eq 19,126,341 ( 0.32%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::from_key_hashed_nocheck:: 19,121,175 ( 0.32%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:>::process_obligations::> 18,956,609 ( 0.31%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::hash:: 18,828,024 ( 0.31%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 18,764,694 ( 0.31%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/unify/mod.rs:>::process_obligations::> 18,378,237 ( 0.30%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::intern_ty 15,808,688 ( 0.26%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/region_constraints/mod.rs:>::search_tree:: 15,160,467 ( 0.25%) ./malloc/malloc.c:unlink_chunk.constprop.0 15,093,930 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs:::push 15,047,640 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::short_write_process_buffer:: 14,908,308 ( 0.25%) ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S:__memset_avx2_erms 14,540,654 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs:::span_data_to_lines_and_cols 14,345,590 ( 0.24%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 14,238,534 ( 0.24%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::, rustc_middle::ty::context::Interned>::{closure#0}> 14,190,422 ( 0.24%) ./malloc/malloc.c:realloc 13,807,689 ( 0.23%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs:::specialize 13,424,628 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 13,306,346 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::_intern_substs 13,201,704 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/type_variable.rs:::probe 12,546,786 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::_intern_substs 12,512,935 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_borrowck/src/lib.rs:::access_place 11,857,854 ( 0.20%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 11,369,661 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::hash:: 11,306,388 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::hash_stable 10,305,124 ( 0.17%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 9,986,403 ( 0.17%) ./malloc/malloc.c:_int_realloc 9,976,861 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/index.rs:>::compress::<>::process_obligations>::{closure#0}> 9,841,173 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/index.rs:rustc_mir_build::thir::pattern::usefulness::is_useful 9,839,040 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 9,674,747 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs:rustc_mir_dataflow::drop_flag_effects::on_all_children_bits::is_terminal_path 9,580,309 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/index.rs:>::process_obligations::> 9,342,381 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_index/src/vec.rs:>::process_obligations::> 9,311,488 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs:>::search_tree:: 9,286,928 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::lookup_source_file_idx 9,244,324 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/traversal.rs:::traverse_successor 9,219,731 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_privacy/src/lib.rs: as rustc_middle::ty::fold::TypeVisitor>::visit_ty 8,546,816 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/hir/map/mod.rs:::attrs 8,501,732 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/hir/map/mod.rs:::find_parent_node 8,411,043 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:>::compress::<>::process_obligations>::{closure#0}> 8,242,156 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/adapters/zip.rs:>::union::> 7,965,657 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:>::hash_stable 7,886,093 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::eq 7,754,540 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/range.rs:>::compress::<>::process_obligations>::{closure#0}> 7,738,205 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/graph/scc/mod.rs:, rustc_borrowck::constraints::ConstraintSccIndex>>::start_walk_from 7,631,373 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/set_len_on_drop.rs:>::compress::<>::process_obligations>::{closure#0}> 7,407,978 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>::apply_rewrites 7,131,693 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/relate.rs:rustc_middle::ty::relate::super_relate_tys:: 7,090,693 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/mod.rs:::hash:: 7,005,667 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>>::from_key_hashed_nocheck:: 6,957,626 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::intern_predicate 6,892,158 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_transform/src/simplify.rs:::simplify 6,870,256 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/fold.rs:<&rustc_middle::ty::TyS as rustc_middle::ty::fold::TypeFoldable>::super_fold_with:: 6,867,519 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/graph/scc/mod.rs:, rustc_borrowck::constraints::ConstraintSccIndex>>::inspect_node 6,554,400 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_index/src/bit_set.rs:::is_local_ever_initialized 6,510,668 ( 0.11%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 6,495,578 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/region_constraints/mod.rs:::make_subregion 6,489,150 ( 0.11%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, ())>>::reserve_rehash::, rustc_middle::ty::context::Interned, (), core::hash::BuildHasherDefault>::{closure#0}> 6,383,306 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/subst.rs:::fold_ty 6,368,584 ( 0.11%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/bitmask.rs:>::apply_rewrites 6,365,765 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/combine.rs:::tys 6,318,969 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/undo_log.rs:::rollback_to 6,275,707 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ops/bit.rs:::hash:: 6,223,498 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:, (), core::hash::BuildHasherDefault>>::from_hash::, rustc_middle::ty::context::Interned>::{closure#0}> 6,213,174 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/terminator.rs:::successors 6,118,807 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/walk.rs:::next 6,112,596 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/mod.rs:::next_token 6,089,818 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::get:: -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/graph/scc/mod.rs -------------------------------------------------------------------------------- Ir -- line 35 ---------------------------------------- . . /// Contains the successors for all the Sccs, concatenated. The . /// range of indices corresponding to a given SCC is found in its . /// SccData. . all_successors: Vec, . } . . impl Sccs { 7,488 ( 0.00%) pub fn new(graph: &(impl DirectedGraph + WithNumNodes + WithSuccessors)) -> Self { . SccsConstruction::construct(graph) 7,506 ( 0.00%) } . . /// Returns the number of SCCs in the graph. . pub fn num_sccs(&self) -> usize { . self.scc_data.len() . } . . /// Returns an iterator over the SCCs in the graph. . /// -- line 53 ---------------------------------------- -- line 56 ---------------------------------------- . /// This is convenient when the edges represent dependencies: when you visit . /// `S1`, the value for `S2` will already have been computed. . pub fn all_sccs(&self) -> impl Iterator { . (0..self.scc_data.len()).map(S::new) . } . . /// Returns the SCC to which a node `r` belongs. . pub fn scc(&self, r: N) -> S { 113,375 ( 0.00%) self.scc_indices[r] . } . . /// Returns the successors of the given SCC. . pub fn successors(&self, scc: S) -> &[S] { . self.scc_data.successors(scc) . } . . /// Construct the reverse graph of the SCC graph. . pub fn reverse(&self) -> VecGraph { 20 ( 0.00%) VecGraph::new( . self.num_sccs(), . self.all_sccs() . .flat_map(|source| { . self.successors(source).iter().map(move |&target| (target, source)) . }) . .collect(), . ) . } -- line 82 ---------------------------------------- -- line 116 ---------------------------------------- . self.ranges.len() . } . . /// Returns the successors of the given SCC. . fn successors(&self, scc: S) -> &[S] { . // Annoyingly, `range` does not implement `Copy`, so we have . // to do `range.start..range.end`: . let range = &self.ranges[scc]; 287,504 ( 0.00%) &self.all_successors[range.start..range.end] . } . . /// Creates a new SCC with `successors` as its successors and . /// returns the resulting index. . fn create_scc(&mut self, successors: impl IntoIterator) -> S { . // Store the successors on `scc_successors_vec`, remembering . // the range of indices. 35,923 ( 0.00%) let all_successors_start = self.all_successors.len(); . self.all_successors.extend(successors); . let all_successors_end = self.all_successors.len(); . . debug!( . "create_scc({:?}) successors={:?}", . self.ranges.len(), . &self.all_successors[all_successors_start..all_successors_end], . ); -- line 140 ---------------------------------------- -- line 164 ---------------------------------------- . /// into the successors_stack, we sometimes get duplicate entries. . /// We use this set to remove those -- we also keep its storage . /// around between successors to amortize memory allocation costs. . duplicate_set: FxHashSet, . . scc_data: SccData, . } . 4,170 ( 0.00%) #[derive(Copy, Clone, Debug)] . enum NodeState { . /// This node has not yet been visited as part of the DFS. . /// . /// After SCC construction is complete, this state ought to be . /// impossible. . NotVisited, . . /// This node is currently being walk as part of our DFS. It is on -- line 180 ---------------------------------------- -- line 216 ---------------------------------------- . /// N with depth D can reach some other node N' with lower depth . /// D' (i.e., D' < D), we know that N, N', and all nodes in . /// between them on the stack are part of an SCC. . /// . /// [wikipedia]: https://bit.ly/2EZIx84 . fn construct(graph: &'c G) -> Sccs { . let num_nodes = graph.num_nodes(); . 9,174 ( 0.00%) let mut this = Self { . graph, . node_states: IndexVec::from_elem_n(NodeState::NotVisited, num_nodes), . node_stack: Vec::with_capacity(num_nodes), . successors_stack: Vec::new(), . scc_data: SccData { ranges: IndexVec::new(), all_successors: Vec::new() }, . duplicate_set: FxHashSet::default(), . }; . . let scc_indices = (0..num_nodes) . .map(G::Node::new) 328,008 ( 0.01%) .map(|node| match this.start_walk_from(node) { . WalkReturn::Complete { scc_index } => scc_index, . WalkReturn::Cycle { min_depth } => panic!( . "`start_walk_node({:?})` returned cycle with depth {:?}", . node, min_depth . ), . }) . .collect(); . 5,004 ( 0.00%) Sccs { scc_indices, scc_data: this.scc_data } . } . 492,012 ( 0.01%) fn start_walk_from(&mut self, node: G::Node) -> WalkReturn { 218,672 ( 0.00%) if let Some(result) = self.inspect_node(node) { . result . } else { . self.walk_unvisited_node(node) . } 546,680 ( 0.01%) } . . /// Inspect a node during the DFS. We first examine its current . /// state -- if it is not yet visited (`NotVisited`), return `None` so . /// that the caller might push it onto the stack and start walking its . /// successors. . /// . /// If it is already on the DFS stack it will be in the state . /// `BeingVisited`. In that case, we have found a cycle and we . /// return the depth from the stack. . /// . /// Otherwise, we are looking at a node that has already been . /// completely visited. We therefore return `WalkReturn::Complete` . /// with its associated SCC index. 720,788 ( 0.01%) fn inspect_node(&mut self, node: G::Node) -> Option> { 1,441,576 ( 0.02%) Some(match self.find_state(node) { 320,439 ( 0.01%) NodeState::InCycle { scc_index } => WalkReturn::Complete { scc_index }, . 56,148 ( 0.00%) NodeState::BeingVisited { depth: min_depth } => WalkReturn::Cycle { min_depth }, . . NodeState::NotVisited => return None, . . NodeState::InCycleWith { parent } => panic!( . "`find_state` returned `InCycleWith({:?})`, which ought to be impossible", . parent . ), . }) 1,261,379 ( 0.02%) } . . /// Fetches the state of the node `r`. If `r` is recorded as being . /// in a cycle with some other node `r2`, then fetches the state . /// of `r2` (and updates `r` to reflect current result). This is . /// basically the "find" part of a standard union-find algorithm . /// (with path compression). . fn find_state(&mut self, mut node: G::Node) -> NodeState { . // To avoid recursion we temporarily reuse the `parent` of each -- line 288 ---------------------------------------- -- line 299 ---------------------------------------- . // following links downwards is then simply as soon as we have . // found the initial self-loop. . let mut previous_node = node; . . // Ultimately assigned by the parent when following . // `InCycleWith` upwards. . let node_state = loop { . debug!("find_state(r = {:?} in state {:?})", node, self.node_states[node]); 1,411,964 ( 0.02%) match self.node_states[node] { 427,252 ( 0.01%) NodeState::InCycle { scc_index } => break NodeState::InCycle { scc_index }, 74,864 ( 0.00%) NodeState::BeingVisited { depth } => break NodeState::BeingVisited { depth }, 54,668 ( 0.00%) NodeState::NotVisited => break NodeState::NotVisited, 19,931 ( 0.00%) NodeState::InCycleWith { parent } => { . // We test this, to be extremely sure that we never . // ever break our termination condition for the . // reverse iteration loop. 19,931 ( 0.00%) assert!(node != parent, "Node can not be in cycle with itself"); . // Store the previous node as an inverted list link 99,655 ( 0.00%) self.node_states[node] = NodeState::InCycleWith { parent: previous_node }; . // Update to parent node. . previous_node = node; . node = parent; . } . } . }; . . // The states form a graph where up to one outgoing link is stored at -- line 325 ---------------------------------------- -- line 355 ---------------------------------------- . // We can then walk backwards, starting from `previous_node`, and assign . // each node in the list with the updated state. The loop terminates . // when we reach the self-cycle. . . // Move backwards until we found the node where we started. We . // will know when we hit the state where previous_node == node. . loop { . // Back at the beginning, we can return. 272,772 ( 0.00%) if previous_node == node { . return node_state; . } . // Update to previous node in the link. 39,862 ( 0.00%) match self.node_states[previous_node] { 19,931 ( 0.00%) NodeState::InCycleWith { parent: previous } => { . node = previous_node; . previous_node = previous; . } . // Only InCycleWith nodes were added to the reverse linked list. . other => panic!("Invalid previous link while compressing cycle: {:?}", other), . } . . debug!("find_state: parent_state = {:?}", node_state); . . // Update the node state from the parent state. The assigned . // state is actually a loop invariant but it will only be . // evaluated if there is at least one backlink to follow. . // Fully trusting llvm here to find this loop optimization. 79,724 ( 0.00%) match node_state { . // Path compression, make current node point to the same root. . NodeState::InCycle { .. } => { 37,490 ( 0.00%) self.node_states[node] = node_state; . } . // Still visiting nodes, compress to cycle to the node . // at that depth. 1,186 ( 0.00%) NodeState::BeingVisited { depth } => { 2,372 ( 0.00%) self.node_states[node] = 1,186 ( 0.00%) NodeState::InCycleWith { parent: self.node_stack[depth] }; . } . // These are never allowed as parent nodes. InCycleWith . // should have been followed to a real parent and . // NotVisited can not be part of a cycle since it should . // have instead gotten explored. . NodeState::NotVisited | NodeState::InCycleWith { .. } => { . panic!("invalid parent state: {:?}", node_state) . } . } . } 360,394 ( 0.01%) } . . /// Walks a node that has never been visited before. . /// . /// Call this method when `inspect_node` has returned `None`. Having the . /// caller decide avoids mutual recursion between the two methods and allows . /// us to maintain an allocated stack for nodes on the path between calls. . #[instrument(skip(self, initial), level = "debug")] . fn walk_unvisited_node(&mut self, initial: G::Node) -> WalkReturn { -- line 410 ---------------------------------------- -- line 418 ---------------------------------------- . successor_node: G::Node, . } . . // Move the stack to a local variable. We want to utilize the existing allocation and . // mutably borrow it without borrowing self at the same time. . let mut successors_stack = core::mem::take(&mut self.successors_stack); . debug_assert_eq!(successors_stack.len(), 0); . 5,845 ( 0.00%) let mut stack: Vec> = vec![VisitingNodeFrame { . node: initial, . depth: 0, . min_depth: 0, . iter: None, . successors_len: 0, . min_cycle_root: initial, . successor_node: initial, . }]; . . let mut return_value = None; . 54,668 ( 0.00%) 'recurse: while let Some(frame) = stack.last_mut() { . let VisitingNodeFrame { 216,964 ( 0.00%) node, . depth, . iter, 217,002 ( 0.00%) successors_len, . min_depth, . min_cycle_root, . successor_node, . } = frame; . 108,501 ( 0.00%) let node = *node; 217,040 ( 0.00%) let depth = *depth; . 325,503 ( 0.01%) let successors = match iter { . Some(iter) => iter, . None => { . // This None marks that we still have the initialize this node's frame. . debug!(?depth, ?node); . . debug_assert!(matches!(self.node_states[node], NodeState::NotVisited)); . . // Push `node` onto the stack. 218,672 ( 0.00%) self.node_states[node] = NodeState::BeingVisited { depth }; . self.node_stack.push(node); . . // Walk each successor of the node, looking to see if any of . // them can reach a node that is presently on the stack. If . // so, that means they can also reach us. 109,336 ( 0.00%) *successors_len = successors_stack.len(); . // Set and return a reference, this is currently empty. 218,648 ( 0.00%) iter.get_or_insert(self.graph.successors(node)) . } . }; . . // Now that iter is initialized, this is a constant for this frame. 217,002 ( 0.00%) let successors_len = *successors_len; . . // Construct iterators for the nodes and walk results. There are two cases: . // * The walk of a successor node returned. . // * The remaining successor nodes. . let returned_walk = . return_value.take().into_iter().map(|walk| (*successor_node, Some(walk))); . . let successor_walk = successors.by_ref().map(|successor_node| { . debug!(?node, ?successor_node); 376,587 ( 0.01%) (successor_node, self.inspect_node(successor_node)) . }); . 468,098 ( 0.01%) for (successor_node, walk) in returned_walk.chain(successor_walk) { 642,526 ( 0.01%) match walk { . Some(WalkReturn::Cycle { min_depth: successor_min_depth }) => { . // Track the minimum depth we can reach. 74,922 ( 0.00%) assert!(successor_min_depth <= depth); 93,943 ( 0.00%) if successor_min_depth < *min_depth { . debug!(?node, ?successor_min_depth); 57,049 ( 0.00%) *min_depth = successor_min_depth; 19,021 ( 0.00%) *min_cycle_root = successor_node; . } . } . . Some(WalkReturn::Complete { scc_index: successor_scc_index }) => { . // Push the completed SCC indices onto . // the `successors_stack` for later. . debug!(?node, ?successor_scc_index); . successors_stack.push(successor_scc_index); . } . . None => { 107,666 ( 0.00%) let depth = depth + 1; . debug!(?depth, ?successor_node); . // Remember which node the return value will come from. 53,833 ( 0.00%) frame.successor_node = successor_node; . // Start a new stack frame the step into it. . stack.push(VisitingNodeFrame { . node: successor_node, . depth, . iter: None, . successors_len: 0, . min_depth: depth, . min_cycle_root: successor_node, -- line 518 ---------------------------------------- -- line 531 ---------------------------------------- . let frame = stack.pop().unwrap(); . . // If `min_depth == depth`, then we are the root of the . // cycle: we can't reach anyone further down the stack. . . // Pass the 'return value' down the stack. . // We return one frame at a time so there can't be another return value. . debug_assert!(return_value.is_none()); 109,336 ( 0.00%) return_value = Some(if frame.min_depth == depth { . // Note that successor stack may have duplicates, so we . // want to remove those: . let deduplicated_successors = { . let duplicate_set = &mut self.duplicate_set; . duplicate_set.clear(); . successors_stack . .drain(successors_len..) 88,068 ( 0.00%) .filter(move |&i| duplicate_set.insert(i)) . }; . let scc_index = self.scc_data.create_scc(deduplicated_successors); 215,538 ( 0.00%) self.node_states[node] = NodeState::InCycle { scc_index }; . WalkReturn::Complete { scc_index } . } else { . // We are not the head of the cycle. Return back to our . // caller. They will take ownership of the . // `self.successors` data that we pushed. 112,470 ( 0.00%) self.node_states[node] = NodeState::InCycleWith { parent: frame.min_cycle_root }; . WalkReturn::Cycle { min_depth: frame.min_depth } . }); . } . . // Keep the allocation we used for successors_stack. 7,515 ( 0.00%) self.successors_stack = successors_stack; . debug_assert_eq!(self.successors_stack.len(), 0); . . return_value.unwrap() . } . } 2,457,327 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs -------------------------------------------------------------------------------- Ir -- line 346 ---------------------------------------- . /// . /// let c = Cell::new(5); . /// . /// c.set(10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn set(&self, val: T) { 1,456 ( 0.00%) let old = self.replace(val); 3,308 ( 0.00%) drop(old); . } . . /// Swaps the values of two `Cell`s. . /// Difference with `std::mem::swap` is that this function doesn't require `&mut` reference. . /// . /// # Examples . /// . /// ``` -- line 363 ---------------------------------------- -- line 434 ---------------------------------------- . /// . /// let five = c.get(); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn get(&self) -> T { . // SAFETY: This can cause data races if called from a separate thread, . // but `Cell` is `!Sync` so this won't happen. 520,743 ( 0.01%) unsafe { *self.value.get() } . } . . /// Updates the contained value using a function and returns the new value. . /// . /// # Examples . /// . /// ``` . /// #![feature(cell_update)] -- line 450 ---------------------------------------- -- line 689 ---------------------------------------- . . #[inline(always)] . fn is_writing(x: BorrowFlag) -> bool { . x < UNUSED . } . . #[inline(always)] . fn is_reading(x: BorrowFlag) -> bool { 1,149,523 ( 0.02%) x > UNUSED . } . . impl RefCell { . /// Creates a new `RefCell` containing `value`. . /// . /// # Examples . /// . /// ``` -- line 705 ---------------------------------------- -- line 706 ---------------------------------------- . /// use std::cell::RefCell; . /// . /// let c = RefCell::new(5); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_refcell_new", since = "1.24.0")] . #[inline] . pub const fn new(value: T) -> RefCell { 116,683 ( 0.00%) RefCell { 98,184 ( 0.00%) value: UnsafeCell::new(value), . borrow: Cell::new(UNUSED), . #[cfg(feature = "debug_refcell")] . borrowed_at: Cell::new(None), . } . } . . /// Consumes the `RefCell`, returning the wrapped value. . /// -- line 723 ---------------------------------------- -- line 731 ---------------------------------------- . /// let five = c.into_inner(); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_cell_into_inner", issue = "78729")] . #[inline] . pub const fn into_inner(self) -> T { . // Since this function takes `self` (the `RefCell`) by value, the . // compiler statically verifies that it is not currently borrowed. 6,332 ( 0.00%) self.value.into_inner() . } . . /// Replaces the wrapped value with a new one, returning the old value, . /// without deinitializing either one. . /// . /// This function corresponds to [`std::mem::replace`](../mem/fn.replace.html). . /// . /// # Panics -- line 747 ---------------------------------------- -- line 845 ---------------------------------------- . /// let c = RefCell::new(5); . /// . /// let m = c.borrow_mut(); . /// let b = c.borrow(); // this causes a panic . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] 28,703 ( 0.00%) pub fn borrow(&self) -> Ref<'_, T> { . self.try_borrow().expect("already mutably borrowed") 86,109 ( 0.00%) } . . /// Immutably borrows the wrapped value, returning an error if the value is currently mutably . /// borrowed. . /// . /// The borrow lasts until the returned `Ref` exits scope. Multiple immutable borrows can be . /// taken out at the same time. . /// . /// This is the non-panicking variant of [`borrow`](#method.borrow). -- line 863 ---------------------------------------- -- line 937 ---------------------------------------- . /// let c = RefCell::new(5); . /// let m = c.borrow(); . /// . /// let b = c.borrow_mut(); // this causes a panic . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] 70,229 ( 0.00%) pub fn borrow_mut(&self) -> RefMut<'_, T> { . self.try_borrow_mut().expect("already borrowed") 210,687 ( 0.00%) } . . /// Mutably borrows the wrapped value, returning an error if the value is currently borrowed. . /// . /// The borrow lasts until the returned `RefMut` or all `RefMut`s derived . /// from it exit scope. The value cannot be borrowed while this borrow is . /// active. . /// . /// This is the non-panicking variant of [`borrow_mut`](#method.borrow_mut). -- line 955 ---------------------------------------- -- line 975 ---------------------------------------- . match BorrowRefMut::new(&self.borrow) { . Some(b) => { . #[cfg(feature = "debug_refcell")] . { . self.borrowed_at.set(Some(crate::panic::Location::caller())); . } . . // SAFETY: `BorrowRef` guarantees unique access. 22 ( 0.00%) Ok(RefMut { value: unsafe { &mut *self.value.get() }, borrow: b }) . } . None => Err(BorrowMutError { . // If a borrow occurred, then we must already have an outstanding borrow, . // so `borrowed_at` will be `Some` . #[cfg(feature = "debug_refcell")] . location: self.borrowed_at.get().unwrap(), . }), . } -- line 991 ---------------------------------------- -- line 1254 ---------------------------------------- . . struct BorrowRef<'b> { . borrow: &'b Cell, . } . . impl<'b> BorrowRef<'b> { . #[inline] . fn new(borrow: &'b Cell) -> Option> { 1,522,765 ( 0.03%) let b = borrow.get().wrapping_add(1); 940,686 ( 0.02%) if !is_reading(b) { . // Incrementing borrow can result in a non-reading value (<= 0) in these cases: . // 1. It was < 0, i.e. there are writing borrows, so we can't allow a read borrow . // due to Rust's reference aliasing rules . // 2. It was isize::MAX (the max amount of reading borrows) and it overflowed . // into isize::MIN (the max amount of writing borrows) so we can't allow . // an additional read borrow because isize can't represent so many read borrows . // (this can only happen if you mem::forget more than a small constant amount of . // `Ref`s, which is not good practice) -- line 1271 ---------------------------------------- -- line 1279 ---------------------------------------- . Some(BorrowRef { borrow }) . } . } . } . . impl Drop for BorrowRef<'_> { . #[inline] . fn drop(&mut self) { 634,937 ( 0.01%) let borrow = self.borrow.get(); . debug_assert!(is_reading(borrow)); 1,260,305 ( 0.02%) self.borrow.set(borrow - 1); . } . } . . impl Clone for BorrowRef<'_> { . #[inline] . fn clone(&self) -> Self { . // Since this Ref exists, we know the borrow flag . // is a reading borrow. -- line 1297 ---------------------------------------- -- line 1645 ---------------------------------------- . . struct BorrowRefMut<'b> { . borrow: &'b Cell, . } . . impl Drop for BorrowRefMut<'_> { . #[inline] . fn drop(&mut self) { 18,568,898 ( 0.31%) let borrow = self.borrow.get(); . debug_assert!(is_writing(borrow)); 36,756,480 ( 0.61%) self.borrow.set(borrow + 1); . } . } . . impl<'b> BorrowRefMut<'b> { . #[inline] . fn new(borrow: &'b Cell) -> Option> { . // NOTE: Unlike BorrowRefMut::clone, new is called to create the initial . // mutable reference, and so there must currently be no existing . // references. Thus, while clone increments the mutable refcount, here . // we explicitly only allow going from UNUSED to UNUSED - 1. 37,522,571 ( 0.62%) match borrow.get() { . UNUSED => { . borrow.set(UNUSED - 1); . Some(BorrowRefMut { borrow }) . } . _ => None, . } . } . -- line 1674 ---------------------------------------- -- line 1711 ---------------------------------------- . self.value . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl DerefMut for RefMut<'_, T> { . #[inline] . fn deref_mut(&mut self) -> &mut T { 1,799 ( 0.00%) self.value . } . } . . #[unstable(feature = "coerce_unsized", issue = "27732")] . impl<'b, T: ?Sized + Unsize, U: ?Sized> CoerceUnsized> for RefMut<'b, T> {} . . #[stable(feature = "std_guard_impls", since = "1.20.0")] . impl fmt::Display for RefMut<'_, T> { -- line 1727 ---------------------------------------- -- line 1909 ---------------------------------------- . /// ``` . #[inline(always)] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_unsafecell_get", since = "1.32.0")] . pub const fn get(&self) -> *mut T { . // We can just cast the pointer from `UnsafeCell` to `T` because of . // #[repr(transparent)]. This exploits libstd's special status, there is . // no guarantee for user code that this will work in future versions of the compiler! 6,366,317 ( 0.11%) self as *const UnsafeCell as *const T as *mut T . } . . /// Returns a mutable reference to the underlying data. . /// . /// This call borrows the `UnsafeCell` mutably (at compile-time) which . /// guarantees that we possess the only reference. . /// . /// # Examples -- line 1925 ---------------------------------------- 23,067,846 ( 0.38%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_index/src/vec.rs -------------------------------------------------------------------------------- Ir -- line 13 ---------------------------------------- . /// . /// Purpose: avoid mixing indexes for different bitvector domains. . pub trait Idx: Copy + 'static + Eq + PartialEq + Debug + Hash { . fn new(idx: usize) -> Self; . . fn index(self) -> usize; . . fn increment_by(&mut self, amount: usize) { 108,128 ( 0.00%) *self = self.plus(amount); . } . . fn plus(self, amount: usize) -> Self { 108,328 ( 0.00%) Self::new(self.index() + amount) . } . } . . impl Idx for usize { . #[inline] . fn new(idx: usize) -> Self { . idx . } -- line 33 ---------------------------------------- -- line 35 ---------------------------------------- . fn index(self) -> usize { . self . } . } . . impl Idx for u32 { . #[inline] . fn new(idx: usize) -> Self { 9,421 ( 0.00%) assert!(idx <= u32::MAX as usize); . idx as u32 . } . #[inline] . fn index(self) -> usize { 389 ( 0.00%) self as usize . } . } . . /// Creates a struct type `S` that can be used as an index with . /// `IndexVec` and so on. . /// . /// There are two ways of interacting with these indices: . /// -- line 56 ---------------------------------------- -- line 71 ---------------------------------------- . . // Use default constants . ($(#[$attrs:meta])* $v:vis struct $name:ident { .. }) => ( . $crate::newtype_index!( . // Leave out derives marker so we can use its absence to ensure it comes first . @attrs [$(#[$attrs])*] . @type [$name] . // shave off 256 indices at the end to allow space for packing these indices into enums 161,557 ( 0.00%) @max [0xFFFF_FF00] . @vis [$v] . @debug_format ["{}"]); . ); . . // Define any constants . ($(#[$attrs:meta])* $v:vis struct $name:ident { $($tokens:tt)+ }) => ( . $crate::newtype_index!( . // Leave out derives marker so we can use its absence to ensure it comes first . @attrs [$(#[$attrs])*] . @type [$name] . // shave off 256 indices at the end to allow space for packing these indices into enums 864,162 ( 0.01%) @max [0xFFFF_FF00] . @vis [$v] . @debug_format ["{}"] . $($tokens)+); . ); . . // ---- private rules ---- . . // Base case, user-defined constants (if any) have already been defined -- line 99 ---------------------------------------- -- line 102 ---------------------------------------- . @type [$type:ident] . @max [$max:expr] . @vis [$v:vis] . @debug_format [$debug_format:tt]) => ( . $(#[$attrs])* . #[derive(Copy, PartialEq, Eq, Hash, PartialOrd, Ord, $($derives),*)] . #[rustc_layout_scalar_valid_range_end($max)] . $v struct $type { 29,863,854 ( 0.50%) private: u32 . } . . impl Clone for $type { . #[inline] . fn clone(&self) -> Self { . *self . } . } -- line 118 ---------------------------------------- -- line 126 ---------------------------------------- . . /// Creates a new index from a given `usize`. . /// . /// # Panics . /// . /// Will panic if `value` exceeds `MAX`. . #[inline] . $v const fn from_usize(value: usize) -> Self { 9,665,132 ( 0.16%) assert!(value <= ($max as usize)); . // SAFETY: We just checked that `value <= max`. . unsafe { 45 ( 0.00%) Self::from_u32_unchecked(value as u32) . } . } . . /// Creates a new index from a given `u32`. . /// . /// # Panics . /// . /// Will panic if `value` exceeds `MAX`. . #[inline] . $v const fn from_u32(value: u32) -> Self { 1,025,757 ( 0.02%) assert!(value <= $max); . // SAFETY: We just checked that `value <= max`. . unsafe { . Self::from_u32_unchecked(value) . } . } . . /// Creates a new index from a given `u32`. . /// -- line 156 ---------------------------------------- -- line 175 ---------------------------------------- . #[inline] . $v const fn as_u32(self) -> u32 { . self.private . } . . /// Extracts the value of this index as a `usize`. . #[inline] . $v const fn as_usize(self) -> usize { 5,532,309 ( 0.09%) self.as_u32() as usize . } . } . . impl std::ops::Add for $type { . type Output = Self; . 34 ( 0.00%) fn add(self, other: usize) -> Self { 13,248 ( 0.00%) Self::from_usize(self.index() + other) 68 ( 0.00%) } . } . . impl $crate::vec::Idx for $type { . #[inline] . fn new(value: usize) -> Self { . Self::from_usize(value) . } . -- line 200 ---------------------------------------- -- line 390 ---------------------------------------- . @vis [$v] . @debug_format [$debug_format] . $($tokens)*); . $crate::newtype_index!(@serializable $type); . ); . . (@serializable $type:ident) => ( . impl ::rustc_serialize::Decodable for $type { 538 ( 0.00%) fn decode(d: &mut D) -> Self { 1,530 ( 0.00%) Self::from_u32(d.read_u32()) 1,076 ( 0.00%) } . } . impl ::rustc_serialize::Encodable for $type { 123,735 ( 0.00%) fn encode(&self, e: &mut E) -> Result<(), E::Error> { . e.emit_u32(self.private) 148,482 ( 0.00%) } . } . ); . . // Rewrite final without comma to one that includes comma . (@derives [$($derives:ident,)*] . @attrs [$(#[$attrs:meta])*] . @type [$type:ident] . @max [$max:expr] -- line 413 ---------------------------------------- -- line 499 ---------------------------------------- . @type [$type] . @max [$max] . @vis [$v] . @debug_format [$debug_format] . $($tokens)*); . ); . } . 23,639 ( 0.00%) #[derive(Clone, PartialEq, Eq, Hash)] . pub struct IndexVec { 5,500 ( 0.00%) pub raw: Vec, . _marker: PhantomData, . } . . // Whether `IndexVec` is `Send` depends only on the data, . // not the phantom data. . unsafe impl Send for IndexVec where T: Send {} . . impl> Encodable for IndexVec { -- line 517 ---------------------------------------- -- line 523 ---------------------------------------- . impl> Encodable for &IndexVec { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . Encodable::encode(&self.raw, s) . } . } . . impl> Decodable for IndexVec { . fn decode(d: &mut D) -> Self { 91 ( 0.00%) IndexVec { raw: Decodable::decode(d), _marker: PhantomData } . } . } . . impl fmt::Debug for IndexVec { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Debug::fmt(&self.raw, fmt) . } . } . . impl IndexVec { . #[inline] . pub fn new() -> Self { 10,463 ( 0.00%) IndexVec { raw: Vec::new(), _marker: PhantomData } . } . . #[inline] . pub fn from_raw(raw: Vec) -> Self { 46,909 ( 0.00%) IndexVec { raw, _marker: PhantomData } . } . . #[inline] . pub fn with_capacity(capacity: usize) -> Self { 9,770 ( 0.00%) IndexVec { raw: Vec::with_capacity(capacity), _marker: PhantomData } . } . . #[inline] . pub fn from_elem(elem: T, universe: &IndexVec) -> Self . where . T: Clone, . { 28,765 ( 0.00%) IndexVec { raw: vec![elem; universe.len()], _marker: PhantomData } . } . . #[inline] . pub fn from_elem_n(elem: T, n: usize) -> Self . where . T: Clone, . { 356,538 ( 0.01%) IndexVec { raw: vec![elem; n], _marker: PhantomData } . } . . /// Create an `IndexVec` with `n` elements, where the value of each . /// element is the result of `func(i)`. (The underlying vector will . /// be allocated only once, with a capacity of at least `n`.) . #[inline] . pub fn from_fn_n(func: impl FnMut(I) -> T, n: usize) -> Self { . let indices = (0..n).map(I::new); . Self::from_raw(indices.map(func).collect()) . } . . #[inline] 90 ( 0.00%) pub fn push(&mut self, d: T) -> I { . let idx = I::new(self.len()); 2,277,818 ( 0.04%) self.raw.push(d); . idx 108 ( 0.00%) } . . #[inline] . pub fn pop(&mut self) -> Option { . self.raw.pop() . } . . #[inline] . pub fn len(&self) -> usize { 1,770,840 ( 0.03%) self.raw.len() . } . . /// Gives the next index that will be assigned when `push` is . /// called. . #[inline] . pub fn next_index(&self) -> I { . I::new(self.len()) . } -- line 604 ---------------------------------------- -- line 612 ---------------------------------------- . pub fn into_iter(self) -> vec::IntoIter { . self.raw.into_iter() . } . . #[inline] . pub fn into_iter_enumerated( . self, . ) -> impl DoubleEndedIterator + ExactSizeIterator { 5,616 ( 0.00%) self.raw.into_iter().enumerate().map(|(n, t)| (I::new(n), t)) . } . . #[inline] . pub fn iter(&self) -> slice::Iter<'_, T> { . self.raw.iter() . } . . #[inline] -- line 628 ---------------------------------------- -- line 692 ---------------------------------------- . . #[inline] . pub fn get_mut(&mut self, index: I) -> Option<&mut T> { . self.raw.get_mut(index.index()) . } . . /// Returns mutable references to two distinct elements, a and b. Panics if a == b. . #[inline] 73,284 ( 0.00%) pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) { . let (ai, bi) = (a.index(), b.index()); 146,568 ( 0.00%) assert!(ai != bi); . 73,284 ( 0.00%) if ai < bi { . let (c1, c2) = self.raw.split_at_mut(bi); 299,219 ( 0.00%) (&mut c1[ai], &mut c2[0]) . } else { . let (c2, c1) = self.pick2_mut(b, a); . (c1, c2) . } 219,852 ( 0.00%) } . . /// Returns mutable references to three distinct elements or panics otherwise. . #[inline] . pub fn pick3_mut(&mut self, a: I, b: I, c: I) -> (&mut T, &mut T, &mut T) { . let (ai, bi, ci) = (a.index(), b.index(), c.index()); . assert!(ai != bi && bi != ci && ci != ai); . let len = self.raw.len(); . assert!(ai < len && bi < len && ci < len); -- line 719 ---------------------------------------- -- line 726 ---------------------------------------- . } . . /// Grows the index vector so that it contains an entry for . /// `elem`; if that is already true, then has no . /// effect. Otherwise, inserts new values as needed by invoking . /// `fill_value`. . #[inline] . pub fn ensure_contains_elem(&mut self, elem: I, fill_value: impl FnMut() -> T) { 14,080 ( 0.00%) let min_new_len = elem.index() + 1; 792,918 ( 0.01%) if self.len() < min_new_len { 27,460 ( 0.00%) self.raw.resize_with(min_new_len, fill_value); . } . } . . #[inline] . pub fn resize_to_elem(&mut self, elem: I, fill_value: impl FnMut() -> T) { . let min_new_len = elem.index() + 1; . self.raw.resize_with(min_new_len, fill_value); . } -- line 744 ---------------------------------------- -- line 823 ---------------------------------------- . } . . impl FromIterator for IndexVec { . #[inline] . fn from_iter(iter: J) -> Self . where . J: IntoIterator, . { 245,793 ( 0.00%) IndexVec { raw: FromIterator::from_iter(iter), _marker: PhantomData } . } . } . . impl IntoIterator for IndexVec { . type Item = T; . type IntoIter = vec::IntoIter; . . #[inline] -- line 839 ---------------------------------------- 4,004,515 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . use std::cmp::Ordering; . use std::marker::PhantomData; . use std::ops::Range; . use ty::util::IntTypeExt; . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable, TypeFoldable, Lift)] . pub struct TypeAndMut<'tcx> { 262 ( 0.00%) pub ty: Ty<'tcx>, 1,310 ( 0.00%) pub mutbl: hir::Mutability, . } . . #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . /// A "free" region `fr` can be interpreted as "some region . /// at least as big as the scope `fr.scope`". . pub struct FreeRegion { 193,684 ( 0.00%) pub scope: DefId, 173,088 ( 0.00%) pub bound_region: BoundRegionKind, . } . 900,187 ( 0.01%) #[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, TyEncodable, TyDecodable, Copy)] . #[derive(HashStable)] . pub enum BoundRegionKind { . /// An anonymous region parameter for a given fn (&T) 16,226 ( 0.00%) BrAnon(u32), . . /// Named region parameters for functions (a in &'a T) . /// . /// The `DefId` is needed to distinguish free regions in . /// the event of shadowing. 1,643 ( 0.00%) BrNamed(DefId, Symbol), . . /// Anonymous region for the implicit env pointer parameter . /// to a closure . BrEnv, . } . 2,537 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . #[derive(HashStable)] . pub struct BoundRegion { 149,068 ( 0.00%) pub var: BoundVar, 124,746 ( 0.00%) pub kind: BoundRegionKind, . } . . impl BoundRegionKind { . pub fn is_named(&self) -> bool { . match *self { . BoundRegionKind::BrNamed(_, name) => name != kw::UnderscoreLifetime, . _ => false, . } . } . } . . /// Defines the kinds of types. . /// . /// N.B., if you change this, you'll probably want to change the corresponding . /// AST structure in `rustc_ast/src/ast.rs` as well. 24,989,536 ( 0.41%) #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable, Debug)] . #[derive(HashStable)] . #[rustc_diagnostic_item = "TyKind"] . pub enum TyKind<'tcx> { . /// The primitive boolean type. Written as `bool`. . Bool, . . /// The primitive character type; holds a Unicode scalar value . /// (a non-surrogate code point). Written as `char`. -- line 89 ---------------------------------------- -- line 99 ---------------------------------------- . Float(ty::FloatTy), . . /// Algebraic data types (ADT). For example: structures, enumerations and unions. . /// . /// InternalSubsts here, possibly against intuition, *may* contain `Param`s. . /// That is, even after substitution it is possible that there are type . /// variables. This happens when the `Adt` corresponds to an ADT . /// definition and not a concrete use of it. 4,060,319 ( 0.07%) Adt(&'tcx AdtDef, SubstsRef<'tcx>), . . /// An unsized FFI type that is opaque to Rust. Written as `extern type T`. . Foreign(DefId), . . /// The pointee of a string slice. Written as `str`. . Str, . . /// An array with the given length. Written as `[T; n]`. 18,052 ( 0.00%) Array(Ty<'tcx>, &'tcx ty::Const<'tcx>), . . /// The pointee of an array slice. Written as `[T]`. . Slice(Ty<'tcx>), . . /// A raw pointer. Written as `*mut T` or `*const T` . RawPtr(TypeAndMut<'tcx>), . . /// A reference; a pointer with an associated lifetime. Written as . /// `&'a mut T` or `&'a T`. 2,282,624 ( 0.04%) Ref(Region<'tcx>, Ty<'tcx>, hir::Mutability), . . /// The anonymous type of a function declaration/definition. Each . /// function has a unique type, which is output (for a function . /// named `foo` returning an `i32`) as `fn() -> i32 {foo}`. . /// . /// For example the type of `bar` here: . /// . /// ```rust -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . /// . /// ```rust . /// fn foo() -> i32 { 1 } . /// let bar: fn() -> i32 = foo; . /// ``` . FnPtr(PolyFnSig<'tcx>), . . /// A trait object. Written as `dyn for<'b> Trait<'b, Assoc = u32> + Send + 'a`. 75,770 ( 0.00%) Dynamic(&'tcx List>>, ty::Region<'tcx>), . . /// The anonymous type of a closure. Used to represent the type of . /// `|a| a`. . Closure(DefId, SubstsRef<'tcx>), . . /// The anonymous type of a generator. Used to represent the type of . /// `|a| yield a`. . Generator(DefId, SubstsRef<'tcx>, hir::Movability), -- line 159 ---------------------------------------- -- line 180 ---------------------------------------- . /// The substitutions are for the generics of the function in question. . /// After typeck, the concrete type can be found in the `types` map. . Opaque(DefId, SubstsRef<'tcx>), . . /// A type parameter; for example, `T` in `fn f(x: T) {}`. . Param(ParamTy), . . /// Bound type variable, used only when preparing a trait query. 10,641 ( 0.00%) Bound(ty::DebruijnIndex, BoundTy), . . /// A placeholder type - universally quantified higher-ranked type. . Placeholder(ty::PlaceholderType), . . /// A type variable used during type checking. . Infer(InferTy), . . /// A placeholder for a type which could not be computed; this is -- line 196 ---------------------------------------- -- line 330 ---------------------------------------- . pub closure_kind_ty: T, . pub closure_sig_as_fn_ptr_ty: T, . pub tupled_upvars_ty: T, . } . . impl<'tcx> ClosureSubsts<'tcx> { . /// Construct `ClosureSubsts` from `ClosureSubstsParts`, containing `Substs` . /// for the closure parent, alongside additional closure-specific components. 45 ( 0.00%) pub fn new( . tcx: TyCtxt<'tcx>, . parts: ClosureSubstsParts<'tcx, Ty<'tcx>>, . ) -> ClosureSubsts<'tcx> { . ClosureSubsts { . substs: tcx.mk_substs( 90 ( 0.00%) parts.parent_substs.iter().copied().chain( 180 ( 0.00%) [parts.closure_kind_ty, parts.closure_sig_as_fn_ptr_ty, parts.tupled_upvars_ty] . .iter() . .map(|&ty| ty.into()), . ), . ), . } 90 ( 0.00%) } . . /// Divides the closure substs into their respective components. . /// The ordering assumed here must match that used by `ClosureSubsts::new` above. 813 ( 0.00%) fn split(self) -> ClosureSubstsParts<'tcx, GenericArg<'tcx>> { . match self.substs[..] { 6,980 ( 0.00%) [ 1,939 ( 0.00%) ref parent_substs @ .., 1,923 ( 0.00%) closure_kind_ty, 1,004 ( 0.00%) closure_sig_as_fn_ptr_ty, 1,380 ( 0.00%) tupled_upvars_ty, 3,252 ( 0.00%) ] => ClosureSubstsParts { . parent_substs, . closure_kind_ty, . closure_sig_as_fn_ptr_ty, . tupled_upvars_ty, . }, . _ => bug!("closure substs missing synthetics"), . } 2,439 ( 0.00%) } . . /// Returns `true` only if enough of the synthetic types are known to . /// allow using all of the methods on `ClosureSubsts` without panicking. . /// . /// Used primarily by `ty::print::pretty` to be able to handle closure . /// types that haven't had their synthetic types substituted in. 4 ( 0.00%) pub fn is_valid(self) -> bool { 8 ( 0.00%) self.substs.len() >= 3 8 ( 0.00%) && matches!(self.split().tupled_upvars_ty.expect_ty().kind(), Tuple(_)) 8 ( 0.00%) } . . /// Returns the substitutions of the closure's parent. . pub fn parent_substs(self) -> &'tcx [GenericArg<'tcx>] { . self.split().parent_substs . } . . /// Returns an iterator over the list of types of captured paths by the closure. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] 168 ( 0.00%) pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 297 ( 0.00%) match self.tupled_upvars_ty().kind() { . TyKind::Error(_) => None, 202 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() 112 ( 0.00%) } . . /// Returns the tuple type representing the upvars for this closure. . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 3,636 ( 0.00%) self.split().tupled_upvars_ty.expect_ty() . } . . /// Returns the closure kind for this closure; may return a type . /// variable during inference. To get the closure kind during . /// inference, use `infcx.closure_kind(substs)`. 435 ( 0.00%) pub fn kind_ty(self) -> Ty<'tcx> { . self.split().closure_kind_ty.expect_ty() 870 ( 0.00%) } . . /// Returns the `fn` pointer type representing the closure signature for this . /// closure. . // FIXME(eddyb) this should be unnecessary, as the shallowly resolved . // type is known at the time of the creation of `ClosureSubsts`, . // see `rustc_typeck::check::closure`. 2 ( 0.00%) pub fn sig_as_fn_ptr_ty(self) -> Ty<'tcx> { . self.split().closure_sig_as_fn_ptr_ty.expect_ty() 4 ( 0.00%) } . . /// Returns the closure kind for this closure; only usable outside . /// of an inference context, because in that context we know that . /// there are no type variables. . /// . /// If you have an inference context, use `infcx.closure_kind()`. 22 ( 0.00%) pub fn kind(self) -> ty::ClosureKind { 22 ( 0.00%) self.kind_ty().to_opt_closure_kind().unwrap() 44 ( 0.00%) } . . /// Extracts the signature from the closure. 439 ( 0.00%) pub fn sig(self) -> ty::PolyFnSig<'tcx> { . let ty = self.sig_as_fn_ptr_ty(); 878 ( 0.00%) match ty.kind() { 1,756 ( 0.00%) ty::FnPtr(sig) => *sig, . _ => bug!("closure_sig_as_fn_ptr_ty is not a fn-ptr: {:?}", ty.kind()), . } 1,317 ( 0.00%) } . } . . /// Similar to `ClosureSubsts`; see the above documentation for more. . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GeneratorSubsts<'tcx> { . pub substs: SubstsRef<'tcx>, . } . -- line 448 ---------------------------------------- -- line 676 ---------------------------------------- . } . . impl<'tcx> UpvarSubsts<'tcx> { . /// Returns an iterator over the list of types of captured paths by the closure/generator. . /// In case there was a type error in figuring out the types of the captured path, an . /// empty iterator is returned. . #[inline] . pub fn upvar_tys(self) -> impl Iterator> + 'tcx { 134 ( 0.00%) let tupled_tys = match self { 179 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . }; . 291 ( 0.00%) match tupled_tys.kind() { . TyKind::Error(_) => None, 246 ( 0.00%) TyKind::Tuple(..) => Some(self.tupled_upvars_ty().tuple_fields()), . TyKind::Infer(_) => bug!("upvar_tys called before capture types are inferred"), . ty => bug!("Unexpected representation of upvar types tuple {:?}", ty), . } . .into_iter() . .flatten() . } . . #[inline] . pub fn tupled_upvars_ty(self) -> Ty<'tcx> { 224 ( 0.00%) match self { 262 ( 0.00%) UpvarSubsts::Closure(substs) => substs.as_closure().tupled_upvars_ty(), . UpvarSubsts::Generator(substs) => substs.as_generator().tupled_upvars_ty(), . } . } . } . . /// An inline const is modeled like . /// . /// const InlineConst<'l0...'li, T0...Tj, R>: R; -- line 710 ---------------------------------------- -- line 760 ---------------------------------------- . } . . /// Returns the type of this inline const. . pub fn ty(self) -> Ty<'tcx> { . self.split().ty.expect_ty() . } . } . 54,816 ( 0.00%) #[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, TyEncodable, TyDecodable)] 62,632 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub enum ExistentialPredicate<'tcx> { . /// E.g., `Iterator`. . Trait(ExistentialTraitRef<'tcx>), . /// E.g., `Iterator::Item = T`. . Projection(ExistentialProjection<'tcx>), . /// E.g., `Send`. 6 ( 0.00%) AutoTrait(DefId), . } . . impl<'tcx> ExistentialPredicate<'tcx> { . /// Compares via an ordering that will not change if modules are reordered or other changes are . /// made to the tree. In particular, this ordering is preserved across incremental compilations. 30 ( 0.00%) pub fn stable_cmp(&self, tcx: TyCtxt<'tcx>, other: &Self) -> Ordering { . use self::ExistentialPredicate::*; 32 ( 0.00%) match (*self, *other) { . (Trait(_), Trait(_)) => Ordering::Equal, . (Projection(ref a), Projection(ref b)) => { . tcx.def_path_hash(a.item_def_id).cmp(&tcx.def_path_hash(b.item_def_id)) . } . (AutoTrait(ref a), AutoTrait(ref b)) => { . tcx.def_path_hash(*a).cmp(&tcx.def_path_hash(*b)) . } . (Trait(_), _) => Ordering::Less, . (Projection(_), Trait(_)) => Ordering::Greater, . (Projection(_), _) => Ordering::Less, . (AutoTrait(_), _) => Ordering::Greater, . } 36 ( 0.00%) } . } . . impl<'tcx> Binder<'tcx, ExistentialPredicate<'tcx>> { 7,308 ( 0.00%) pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::Predicate<'tcx> { . use crate::ty::ToPredicate; 4,060 ( 0.00%) match self.skip_binder() { . ExistentialPredicate::Trait(tr) => { . self.rebind(tr).with_self_ty(tcx, self_ty).without_const().to_predicate(tcx) . } . ExistentialPredicate::Projection(p) => { . self.rebind(p.with_self_ty(tcx, self_ty)).to_predicate(tcx) . } . ExistentialPredicate::AutoTrait(did) => { . let trait_ref = self.rebind(ty::TraitRef { . def_id: did, . substs: tcx.mk_substs_trait(self_ty, &[]), . }); . trait_ref.without_const().to_predicate(tcx) . } . } 6,496 ( 0.00%) } . } . . impl<'tcx> List>> { . /// Returns the "principal `DefId`" of this set of existential predicates. . /// . /// A Rust trait object type consists (in addition to a lifetime bound) . /// of a set of trait bounds, which are separated into any number . /// of auto-trait bounds, and at most one non-auto-trait bound. The -- line 826 ---------------------------------------- -- line 840 ---------------------------------------- . /// . /// It is also possible to have a "trivial" trait object that . /// consists only of auto traits, with no principal - for example, . /// `dyn Send + Sync`. In that case, the set of auto-trait bounds . /// is `{Send, Sync}`, while there is no principal. These trait objects . /// have a "trivial" vtable consisting of just the size, alignment, . /// and destructor. . pub fn principal(&self) -> Option>> { 2,442 ( 0.00%) self[0] 2,442 ( 0.00%) .map_bound(|this| match this { . ExistentialPredicate::Trait(tr) => Some(tr), . _ => None, . }) . .transpose() . } . 812 ( 0.00%) pub fn principal_def_id(&self) -> Option { . self.principal().map(|trait_ref| trait_ref.skip_binder().def_id) 1,624 ( 0.00%) } . . #[inline] . pub fn projection_bounds<'a>( . &'a self, . ) -> impl Iterator>> + 'a { . self.iter().filter_map(|predicate| { . predicate . .map_bound(|pred| match pred { -- line 866 ---------------------------------------- -- line 886 ---------------------------------------- . /// T: Foo . /// . /// This would be represented by a trait-reference where the `DefId` is the . /// `DefId` for the trait `Foo` and the substs define `T` as parameter 0, . /// and `U` as parameter 1. . /// . /// Trait references also appear in object types like `Foo`, but in . /// that case the `Self` parameter is absent from the substitutions. 27,209 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 2,077,399 ( 0.03%) #[derive(HashStable, TypeFoldable)] . pub struct TraitRef<'tcx> { 775 ( 0.00%) pub def_id: DefId, 1,310,091 ( 0.02%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> TraitRef<'tcx> { 2,398 ( 0.00%) pub fn new(def_id: DefId, substs: SubstsRef<'tcx>) -> TraitRef<'tcx> { . TraitRef { def_id, substs } 9,592 ( 0.00%) } . . /// Returns a `TraitRef` of the form `P0: Foo` where `Pi` . /// are the parameters defined on trait. 567 ( 0.00%) pub fn identity(tcx: TyCtxt<'tcx>, def_id: DefId) -> Binder<'tcx, TraitRef<'tcx>> { 252 ( 0.00%) ty::Binder::dummy(TraitRef { . def_id, 378 ( 0.00%) substs: InternalSubsts::identity_for_item(tcx, def_id), . }) 504 ( 0.00%) } . . #[inline] . pub fn self_ty(&self) -> Ty<'tcx> { . self.substs.type_at(0) . } . 1,648 ( 0.00%) pub fn from_method( . tcx: TyCtxt<'tcx>, . trait_id: DefId, . substs: SubstsRef<'tcx>, . ) -> ty::TraitRef<'tcx> { . let defs = tcx.generics_of(trait_id); . 206 ( 0.00%) ty::TraitRef { def_id: trait_id, substs: tcx.intern_substs(&substs[..defs.params.len()]) } 2,266 ( 0.00%) } . } . . pub type PolyTraitRef<'tcx> = Binder<'tcx, TraitRef<'tcx>>; . . impl<'tcx> PolyTraitRef<'tcx> { . pub fn self_ty(&self) -> Binder<'tcx, Ty<'tcx>> { . self.map_bound_ref(|tr| tr.self_ty()) . } . . pub fn def_id(&self) -> DefId { 3,465 ( 0.00%) self.skip_binder().def_id 1,155 ( 0.00%) } . 5,136 ( 0.00%) pub fn to_poly_trait_predicate(&self) -> ty::PolyTraitPredicate<'tcx> { 10,272 ( 0.00%) self.map_bound(|trait_ref| ty::TraitPredicate { . trait_ref, . constness: ty::BoundConstness::NotConst, . polarity: ty::ImplPolarity::Positive, . }) 5,136 ( 0.00%) } . } . . /// An existential reference to a trait, where `Self` is erased. . /// For example, the trait object `Trait<'a, 'b, X, Y>` is: . /// . /// exists T. T: Trait<'a, 'b, X, Y> . /// . /// The substitutions don't include the erased `Self`, only trait . /// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above). 40 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 31,465 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ExistentialTraitRef<'tcx> { 3,832 ( 0.00%) pub def_id: DefId, 34,508 ( 0.00%) pub substs: SubstsRef<'tcx>, . } . . impl<'tcx> ExistentialTraitRef<'tcx> { 4 ( 0.00%) pub fn erase_self_ty( . tcx: TyCtxt<'tcx>, . trait_ref: ty::TraitRef<'tcx>, . ) -> ty::ExistentialTraitRef<'tcx> { . // Assert there is a Self. . trait_ref.substs.type_at(0); . . ty::ExistentialTraitRef { . def_id: trait_ref.def_id, . substs: tcx.intern_substs(&trait_ref.substs[1..]), . } 4 ( 0.00%) } . . /// Object types don't have a self type specified. Therefore, when . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. . pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::TraitRef<'tcx> { . // otherwise the escaping vars would be captured by the binder . // debug_assert!(!self_ty.has_escaping_bound_vars()); -- line 985 ---------------------------------------- -- line 999 ---------------------------------------- . /// we convert the principal trait-ref into a normal trait-ref, . /// you must give *some* self type. A common choice is `mk_err()` . /// or some placeholder type. . pub fn with_self_ty(&self, tcx: TyCtxt<'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> { . self.map_bound(|trait_ref| trait_ref.with_self_ty(tcx, self_ty)) . } . } . 39,248 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundVariableKind { . Ty(BoundTyKind), . Region(BoundRegionKind), . Const, . } . . /// Binder is a binder for higher-ranked lifetimes or types. It is part of the -- line 1015 ---------------------------------------- -- line 1016 ---------------------------------------- . /// compiler's representation for things like `for<'a> Fn(&'a isize)` . /// (which would be represented by the type `PolyTraitRef == . /// Binder<'tcx, TraitRef>`). Note that when we instantiate, . /// erase, or otherwise "discharge" these bound vars, we change the . /// type from `Binder<'tcx, T>` to just `T` (see . /// e.g., `liberate_late_bound_regions`). . /// . /// `Decodable` and `Encodable` are implemented for `Binder` using the `impl_binder_encode_decode!` macro. 76,941 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] 887,563 ( 0.01%) pub struct Binder<'tcx, T>(T, &'tcx List); . . impl<'tcx, T> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . { . /// Wraps `value` in a binder, asserting that `value` does not . /// contain any bound vars that would be bound by the . /// binder. This is commonly used to 'inject' a value T into a . /// different binding level. 58,905 ( 0.00%) pub fn dummy(value: T) -> Binder<'tcx, T> { 319,544 ( 0.01%) assert!(!value.has_escaping_bound_vars()); 716,454 ( 0.01%) Binder(value, ty::List::empty()) 58,491 ( 0.00%) } . . pub fn bind_with_vars(value: T, vars: &'tcx List) -> Binder<'tcx, T> { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(vars); . value.visit_with(&mut validator); . } 14,936 ( 0.00%) Binder(value, vars) . } . } . . impl<'tcx, T> Binder<'tcx, T> { . /// Skips the binder and returns the "bound" value. This is a . /// risky thing to do because it's easy to get confused about . /// De Bruijn indices and the like. It is usually better to . /// discharge the binder using `no_bound_vars` or -- line 1053 ---------------------------------------- -- line 1059 ---------------------------------------- . /// accounting. . /// . /// Some examples where `skip_binder` is reasonable: . /// . /// - extracting the `DefId` from a PolyTraitRef; . /// - comparing the self type of a PolyTraitRef to see if it is equal to . /// a type parameter `X`, since the type `X` does not reference any regions . pub fn skip_binder(self) -> T { 2,216,239 ( 0.04%) self.0 . } . . pub fn bound_vars(&self) -> &'tcx List { 598 ( 0.00%) self.1 . } . . pub fn as_ref(&self) -> Binder<'tcx, &T> { 5,663 ( 0.00%) Binder(&self.0, self.1) . } . . pub fn map_bound_ref_unchecked(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . let value = f(&self.0); . Binder(value, self.1) -- line 1083 ---------------------------------------- -- line 1085 ---------------------------------------- . . pub fn map_bound_ref>(&self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(&T) -> U, . { . self.as_ref().map_bound(f) . } . 360 ( 0.00%) pub fn map_bound>(self, f: F) -> Binder<'tcx, U> . where . F: FnOnce(T) -> U, . { 33,223 ( 0.00%) let value = f(self.0); . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 442,104 ( 0.01%) Binder(value, self.1) 360 ( 0.00%) } . . pub fn try_map_bound, E>(self, f: F) -> Result, E> . where . F: FnOnce(T) -> Result, . { . let value = f(self.0)?; . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.1); . value.visit_with(&mut validator); . } 7,622 ( 0.00%) Ok(Binder(value, self.1)) . } . . /// Wraps a `value` in a binder, using the same bound variables as the . /// current `Binder`. This should not be used if the new value *changes* . /// the bound variables. Note: the (old or new) value itself does not . /// necessarily need to *name* all the bound variables. . /// . /// This currently doesn't do anything different than `bind`, because we -- line 1122 ---------------------------------------- -- line 1126 ---------------------------------------- . pub fn rebind(&self, value: U) -> Binder<'tcx, U> . where . U: TypeFoldable<'tcx>, . { . if cfg!(debug_assertions) { . let mut validator = ValidateBoundVars::new(self.bound_vars()); . value.visit_with(&mut validator); . } 15,567 ( 0.00%) Binder(value, self.1) . } . . /// Unwraps and returns the value within, but only if it contains . /// no bound vars at all. (In other words, if this binder -- . /// and indeed any enclosing binder -- doesn't bind anything at . /// all.) Otherwise, returns `None`. . /// . /// (One could imagine having a method that just unwraps a single -- line 1142 ---------------------------------------- -- line 1143 ---------------------------------------- . /// binder, but permits late-bound vars bound by enclosing . /// binders, but that would require adjusting the debruijn . /// indices, and given the shallow binding structure we often use, . /// would not be that useful.) . pub fn no_bound_vars(self) -> Option . where . T: TypeFoldable<'tcx>, . { 393,825 ( 0.01%) if self.0.has_escaping_bound_vars() { None } else { Some(self.skip_binder()) } 955 ( 0.00%) } . . /// Splits the contents into two things that share the same binder . /// level as the original, returning two distinct binders. . /// . /// `f` should consider bound regions at depth 1 to be free, and . /// anything it produces with bound regions at depth 1 will be . /// bound in the resulting return values. . pub fn split(self, f: F) -> (Binder<'tcx, U>, Binder<'tcx, V>) -- line 1160 ---------------------------------------- -- line 1170 ---------------------------------------- . pub fn transpose(self) -> Option> { . let bound_vars = self.1; . self.0.map(|v| Binder(v, bound_vars)) . } . } . . /// Represents the projection of an associated type. In explicit UFCS . /// form this would be written `>::N`. 1,630 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 112,075 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ProjectionTy<'tcx> { . /// The parameters of the associated item. 74,109 ( 0.00%) pub substs: SubstsRef<'tcx>, . . /// The `DefId` of the `TraitItem` for the associated type `N`. . /// . /// Note that this is not the `DefId` of the `TraitRef` containing this . /// associated type, which is in `tcx.associated_item(item_def_id).container`. 69,709 ( 0.00%) pub item_def_id: DefId, . } . . impl<'tcx> ProjectionTy<'tcx> { 36,302 ( 0.00%) pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId { 10,372 ( 0.00%) tcx.associated_item(self.item_def_id).container.id() 41,488 ( 0.00%) } . . /// Extracts the underlying trait reference and own substs from this projection. . /// For example, if this is a projection of `::Item<'a>`, . /// then this function would return a `T: Iterator` trait reference and `['a]` as the own substs 25,368 ( 0.00%) pub fn trait_ref_and_own_substs( . &self, . tcx: TyCtxt<'tcx>, . ) -> (ty::TraitRef<'tcx>, &'tcx [ty::GenericArg<'tcx>]) { 10,872 ( 0.00%) let def_id = tcx.associated_item(self.item_def_id).container.id(); . let trait_generics = tcx.generics_of(def_id); 18,120 ( 0.00%) ( . ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, trait_generics) }, . &self.substs[trait_generics.count()..], . ) 32,616 ( 0.00%) } . . /// Extracts the underlying trait reference from this projection. . /// For example, if this is a projection of `::Item`, . /// then this function would return a `T: Iterator` trait reference. . /// . /// WARNING: This will drop the substs for generic associated types . /// consider calling [Self::trait_ref_and_own_substs] to get those . /// as well. 46,278 ( 0.00%) pub fn trait_ref(&self, tcx: TyCtxt<'tcx>) -> ty::TraitRef<'tcx> { 5,142 ( 0.00%) let def_id = self.trait_def_id(tcx); 5,142 ( 0.00%) ty::TraitRef { def_id, substs: self.substs.truncate_to(tcx, tcx.generics_of(def_id)) } 56,562 ( 0.00%) } . 10,046 ( 0.00%) pub fn self_ty(&self) -> Ty<'tcx> { 30,138 ( 0.00%) self.substs.type_at(0) 20,092 ( 0.00%) } . } . . #[derive(Copy, Clone, Debug, TypeFoldable)] . pub struct GenSig<'tcx> { . pub resume_ty: Ty<'tcx>, . pub yield_ty: Ty<'tcx>, . pub return_ty: Ty<'tcx>, . } -- line 1233 ---------------------------------------- -- line 1235 ---------------------------------------- . pub type PolyGenSig<'tcx> = Binder<'tcx, GenSig<'tcx>>; . . /// Signature of a function type, which we have arbitrarily . /// decided to use to refer to the input/output types. . /// . /// - `inputs`: is the list of arguments and their modes. . /// - `output`: is the return type. . /// - `c_variadic`: indicates whether this is a C-variadic function. 94,290 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] 56,176 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct FnSig<'tcx> { 6,444 ( 0.00%) pub inputs_and_output: &'tcx List>, 59,438 ( 0.00%) pub c_variadic: bool, 10,875 ( 0.00%) pub unsafety: hir::Unsafety, 67,995 ( 0.00%) pub abi: abi::Abi, . } . . impl<'tcx> FnSig<'tcx> { 61,764 ( 0.00%) pub fn inputs(&self) -> &'tcx [Ty<'tcx>] { 262,434 ( 0.00%) &self.inputs_and_output[..self.inputs_and_output.len() - 1] 123,528 ( 0.00%) } . 38,140 ( 0.00%) pub fn output(&self) -> Ty<'tcx> { 279,160 ( 0.00%) self.inputs_and_output[self.inputs_and_output.len() - 1] 76,280 ( 0.00%) } . . // Creates a minimal `FnSig` to be used when encountering a `TyKind::Error` in a fallible . // method. . fn fake() -> FnSig<'tcx> { . FnSig { . inputs_and_output: List::empty(), . c_variadic: false, . unsafety: hir::Unsafety::Normal, -- line 1267 ---------------------------------------- -- line 1270 ---------------------------------------- . } . } . . pub type PolyFnSig<'tcx> = Binder<'tcx, FnSig<'tcx>>; . . impl<'tcx> PolyFnSig<'tcx> { . #[inline] . pub fn inputs(&self) -> Binder<'tcx, &'tcx [Ty<'tcx>]> { 8,452 ( 0.00%) self.map_bound_ref_unchecked(|fn_sig| fn_sig.inputs()) . } . #[inline] . pub fn input(&self, index: usize) -> ty::Binder<'tcx, Ty<'tcx>> { 32,561 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.inputs()[index]) . } . pub fn inputs_and_output(&self) -> ty::Binder<'tcx, &'tcx List>> { . self.map_bound_ref(|fn_sig| fn_sig.inputs_and_output) 791 ( 0.00%) } . #[inline] . pub fn output(&self) -> ty::Binder<'tcx, Ty<'tcx>> { 17,223 ( 0.00%) self.map_bound_ref(|fn_sig| fn_sig.output()) . } . pub fn c_variadic(&self) -> bool { 1,580 ( 0.00%) self.skip_binder().c_variadic 790 ( 0.00%) } . pub fn unsafety(&self) -> hir::Unsafety { 13,510 ( 0.00%) self.skip_binder().unsafety 6,755 ( 0.00%) } . pub fn abi(&self) -> abi::Abi { 7,200 ( 0.00%) self.skip_binder().abi 1,800 ( 0.00%) } . } . . pub type CanonicalPolyFnSig<'tcx> = Canonical<'tcx, Binder<'tcx, FnSig<'tcx>>>; . 56 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct ParamTy { 394 ( 0.00%) pub index: u32, . pub name: Symbol, . } . . impl<'tcx> ParamTy { 158 ( 0.00%) pub fn new(index: u32, name: Symbol) -> ParamTy { . ParamTy { index, name } 79 ( 0.00%) } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamTy { 60 ( 0.00%) ParamTy::new(def.index, def.name) 30 ( 0.00%) } . . #[inline] . pub fn to_ty(self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { . tcx.mk_ty_param(self.index, self.name) . } . } . . #[derive(Copy, Clone, Hash, TyEncodable, TyDecodable, Eq, PartialEq, Ord, PartialOrd)] . #[derive(HashStable)] . pub struct ParamConst { 102 ( 0.00%) pub index: u32, 102 ( 0.00%) pub name: Symbol, . } . . impl ParamConst { . pub fn new(index: u32, name: Symbol) -> ParamConst { . ParamConst { index, name } . } . . pub fn for_def(def: &ty::GenericParamDef) -> ParamConst { -- line 1338 ---------------------------------------- -- line 1440 ---------------------------------------- . /// the inference variable is supposed to satisfy the relation . /// *for every value of the placeholder region*. To ensure that doesn't . /// happen, you can use `leak_check`. This is more clearly explained . /// by the [rustc dev guide]. . /// . /// [1]: https://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/ . /// [2]: https://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/ . /// [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/traits/hrtb.html 9,312,075 ( 0.15%) #[derive(Clone, PartialEq, Eq, Hash, Copy, TyEncodable, TyDecodable, PartialOrd, Ord)] 18 ( 0.00%) pub enum RegionKind { . /// Region bound in a type or fn declaration which will be . /// substituted 'early' -- that is, at the same time when type . /// parameters are substituted. . ReEarlyBound(EarlyBoundRegion), . . /// Region bound in a function scope, which will be substituted when the . /// function is called. 248,368 ( 0.00%) ReLateBound(ty::DebruijnIndex, BoundRegion), . . /// When checking a function body, the types of all arguments and so forth . /// that refer to bound region parameters are modified to refer to free . /// region parameters. . ReFree(FreeRegion), . . /// Static data that has an "infinite" lifetime. Top in the region lattice. . ReStatic, -- line 1465 ---------------------------------------- -- line 1478 ---------------------------------------- . /// regions visible from `U`, but not less than regions not visible . /// from `U`. . ReEmpty(ty::UniverseIndex), . . /// Erased region, used by trait selection, in MIR and during codegen. . ReErased, . } . 29,336 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable, Debug, PartialOrd, Ord)] . pub struct EarlyBoundRegion { 194,367 ( 0.00%) pub def_id: DefId, 184,692 ( 0.00%) pub index: u32, 175,546 ( 0.00%) pub name: Symbol, . } . . /// A **`const`** **v**ariable **ID**. . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . pub struct ConstVid<'tcx> { 5,403 ( 0.00%) pub index: u32, 569 ( 0.00%) pub phantom: PhantomData<&'tcx ()>, . } . . rustc_index::newtype_index! { . /// A **region** (lifetime) **v**ariable **ID**. . pub struct RegionVid { . DEBUG_FORMAT = custom, . } . } -- line 1505 ---------------------------------------- -- line 1513 ---------------------------------------- . rustc_index::newtype_index! { . pub struct BoundVar { .. } . } . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct BoundTy { . pub var: BoundVar, 10,641 ( 0.00%) pub kind: BoundTyKind, . } . 181,020 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub enum BoundTyKind { . Anon, . Param(Symbol), . } . . impl From for BoundTy { . fn from(var: BoundVar) -> Self { -- line 1532 ---------------------------------------- -- line 1616 ---------------------------------------- . RegionKind::RePlaceholder(placeholder) => placeholder.name.is_named(), . RegionKind::ReEmpty(_) => false, . RegionKind::ReErased => false, . } . } . . #[inline] . pub fn is_late_bound(&self) -> bool { 13,402 ( 0.00%) matches!(*self, ty::ReLateBound(..)) . } . . #[inline] . pub fn is_placeholder(&self) -> bool { . matches!(*self, ty::RePlaceholder(..)) . } . . #[inline] . pub fn bound_at_or_above_binder(&self, index: ty::DebruijnIndex) -> bool { 297,789 ( 0.00%) match *self { . ty::ReLateBound(debruijn, _) => debruijn >= index, . _ => false, . } . } . . pub fn type_flags(&self) -> TypeFlags { . let mut flags = TypeFlags::empty(); . 535,803 ( 0.01%) match *self { . ty::ReVar(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; . flags = flags | TypeFlags::HAS_RE_INFER; . } . ty::RePlaceholder(..) => { . flags = flags | TypeFlags::HAS_FREE_REGIONS; . flags = flags | TypeFlags::HAS_FREE_LOCAL_REGIONS; -- line 1651 ---------------------------------------- -- line 1669 ---------------------------------------- . ty::ReErased => { . flags = flags | TypeFlags::HAS_RE_ERASED; . } . } . . debug!("type_flags({:?}) = {:?}", self, flags); . . flags 29,514 ( 0.00%) } . . /// Given an early-bound or free region, returns the `DefId` where it was bound. . /// For example, consider the regions in this snippet of code: . /// . /// ``` . /// impl<'a> Foo { . /// ^^ -- early bound, declared on an impl . /// -- line 1685 ---------------------------------------- -- line 1713 ---------------------------------------- . . #[inline(always)] . pub fn flags(&self) -> TypeFlags { . self.flags . } . . #[inline] . pub fn is_unit(&self) -> bool { 6,964 ( 0.00%) match self.kind() { 1,386 ( 0.00%) Tuple(ref tys) => tys.is_empty(), . _ => false, . } . } . . #[inline] . pub fn is_never(&self) -> bool { 120,440 ( 0.00%) matches!(self.kind(), Never) . } . . #[inline] . pub fn is_primitive(&self) -> bool { . self.kind().is_primitive() . } . . #[inline] -- line 1737 ---------------------------------------- -- line 1741 ---------------------------------------- . . #[inline] . pub fn is_ref(&self) -> bool { . matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_ty_var(&self) -> bool { 156,195 ( 0.00%) matches!(self.kind(), Infer(TyVar(_))) . } . . #[inline] . pub fn ty_vid(&self) -> Option { 137,754 ( 0.00%) match self.kind() { 30,104 ( 0.00%) &Infer(TyVar(vid)) => Some(vid), . _ => None, . } . } . . #[inline] . pub fn is_ty_infer(&self) -> bool { 135 ( 0.00%) matches!(self.kind(), Infer(_)) . } . . #[inline] . pub fn is_phantom_data(&self) -> bool { . if let Adt(def, _) = self.kind() { def.is_phantom_data() } else { false } . } . . #[inline] . pub fn is_bool(&self) -> bool { 24 ( 0.00%) *self.kind() == Bool . } . . /// Returns `true` if this type is a `str`. . #[inline] . pub fn is_str(&self) -> bool { . *self.kind() == Str . } . . #[inline] . pub fn is_param(&self, index: u32) -> bool { 50 ( 0.00%) match self.kind() { . ty::Param(ref data) => data.index == index, . _ => false, . } . } . . #[inline] . pub fn is_slice(&self) -> bool { . match self.kind() { . RawPtr(TypeAndMut { ty, .. }) | Ref(_, ty, _) => matches!(ty.kind(), Slice(_) | Str), . _ => false, . } . } . . #[inline] . pub fn is_array(&self) -> bool { 165 ( 0.00%) matches!(self.kind(), Array(..)) . } . . #[inline] . pub fn is_simd(&self) -> bool { . match self.kind() { . Adt(def, _) => def.repr.simd(), . _ => false, . } . } . 190 ( 0.00%) pub fn sequence_element_type(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 380 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => ty, . Str => tcx.mk_mach_uint(ty::UintTy::U8), . _ => bug!("`sequence_element_type` called on non-sequence value: {}", self), . } 190 ( 0.00%) } . . pub fn simd_size_and_type(&self, tcx: TyCtxt<'tcx>) -> (u64, Ty<'tcx>) { . match self.kind() { . Adt(def, substs) => { . assert!(def.repr.simd(), "`simd_size_and_type` called on non-SIMD type"); . let variant = def.non_enum_variant(); . let f0_ty = variant.fields[0].ty(tcx, substs); . -- line 1824 ---------------------------------------- -- line 1838 ---------------------------------------- . } . } . _ => bug!("`simd_size_and_type` called on invalid type"), . } . } . . #[inline] . pub fn is_region_ptr(&self) -> bool { 96 ( 0.00%) matches!(self.kind(), Ref(..)) . } . . #[inline] . pub fn is_mutable_ptr(&self) -> bool { . matches!( . self.kind(), . RawPtr(TypeAndMut { mutbl: hir::Mutability::Mut, .. }) . | Ref(_, _, hir::Mutability::Mut) -- line 1854 ---------------------------------------- -- line 1861 ---------------------------------------- . match self.kind() { . Ref(_, _, mutability) => Some(*mutability), . _ => None, . } . } . . #[inline] . pub fn is_unsafe_ptr(&self) -> bool { 8,356 ( 0.00%) matches!(self.kind(), RawPtr(_)) . } . . /// Tests if this is any kind of primitive pointer type (reference, raw pointer, fn pointer). . #[inline] . pub fn is_any_ptr(&self) -> bool { . self.is_region_ptr() || self.is_unsafe_ptr() || self.is_fn_ptr() . } . . #[inline] . pub fn is_box(&self) -> bool { 155,596 ( 0.00%) match self.kind() { 43,045 ( 0.00%) Adt(def, _) => def.is_box(), . _ => false, . } . } . . /// Panics if called on any type other than `Box`. 682 ( 0.00%) pub fn boxed_ty(&self) -> Ty<'tcx> { 682 ( 0.00%) match self.kind() { 1,705 ( 0.00%) Adt(def, substs) if def.is_box() => substs.type_at(0), . _ => bug!("`boxed_ty` is called on non-box type {:?}", self), . } 682 ( 0.00%) } . . /// A scalar type is one that denotes an atomic datum, with no sub-components. . /// (A RawPtr is scalar because it represents a non-managed pointer, so its . /// contents are abstract to rustc.) . #[inline] . pub fn is_scalar(&self) -> bool { 1,413 ( 0.00%) matches!( 3,165 ( 0.00%) self.kind(), . Bool | Char . | Int(_) . | Float(_) . | Uint(_) . | FnDef(..) . | FnPtr(_) . | RawPtr(_) . | Infer(IntVar(_) | FloatVar(_)) . ) . } . . /// Returns `true` if this type is a floating point type. . #[inline] . pub fn is_floating_point(&self) -> bool { 1 ( 0.00%) matches!(self.kind(), Float(_) | Infer(FloatVar(_))) . } . . #[inline] . pub fn is_trait(&self) -> bool { . matches!(self.kind(), Dynamic(..)) . } . . #[inline] . pub fn is_enum(&self) -> bool { . matches!(self.kind(), Adt(adt_def, _) if adt_def.is_enum()) . } . . #[inline] . pub fn is_union(&self) -> bool { 32,875 ( 0.00%) matches!(self.kind(), Adt(adt_def, _) if adt_def.is_union()) . } . . #[inline] . pub fn is_closure(&self) -> bool { 6,392 ( 0.00%) matches!(self.kind(), Closure(..)) . } . . #[inline] . pub fn is_generator(&self) -> bool { 9,492 ( 0.00%) matches!(self.kind(), Generator(..)) . } . . #[inline] . pub fn is_integral(&self) -> bool { 16,857 ( 0.00%) matches!(self.kind(), Infer(IntVar(_)) | Int(_) | Uint(_)) . } . . #[inline] . pub fn is_fresh_ty(&self) -> bool { . matches!(self.kind(), Infer(FreshTy(_))) . } . . #[inline] . pub fn is_fresh(&self) -> bool { 3,389 ( 0.00%) matches!(self.kind(), Infer(FreshTy(_) | FreshIntTy(_) | FreshFloatTy(_))) . } . . #[inline] . pub fn is_char(&self) -> bool { . matches!(self.kind(), Char) . } . . #[inline] -- line 1963 ---------------------------------------- -- line 1967 ---------------------------------------- . . #[inline] . pub fn is_signed(&self) -> bool { . matches!(self.kind(), Int(_)) . } . . #[inline] . pub fn is_ptr_sized_integral(&self) -> bool { 6,210 ( 0.00%) matches!(self.kind(), Int(ty::IntTy::Isize) | Uint(ty::UintTy::Usize)) . } . . #[inline] . pub fn has_concrete_skeleton(&self) -> bool { . !matches!(self.kind(), Param(_) | Infer(_) | Error(_)) . } . . /// Returns the type and mutability of `*ty`. . /// . /// The parameter `explicit` indicates if this is an *explicit* dereference. . /// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly. 27,037 ( 0.00%) pub fn builtin_deref(&self, explicit: bool) -> Option> { 159,193 ( 0.00%) match self.kind() { 1,192 ( 0.00%) Adt(def, _) if def.is_box() => { 921 ( 0.00%) Some(TypeAndMut { ty: self.boxed_ty(), mutbl: hir::Mutability::Not }) . } 103,066 ( 0.00%) Ref(_, ty, mutbl) => Some(TypeAndMut { ty, mutbl: *mutbl }), 20 ( 0.00%) RawPtr(mt) if explicit => Some(*mt), . _ => None, . } 54,074 ( 0.00%) } . . /// Returns the type of `ty[i]`. . pub fn builtin_index(&self) -> Option> { 282 ( 0.00%) match self.kind() { . Array(ty, _) | Slice(ty) => Some(ty), . _ => None, . } 66 ( 0.00%) } . 134,416 ( 0.00%) pub fn fn_sig(&self, tcx: TyCtxt<'tcx>) -> PolyFnSig<'tcx> { 67,208 ( 0.00%) match self.kind() { 50,394 ( 0.00%) FnDef(def_id, substs) => tcx.fn_sig(*def_id).subst(tcx, substs), 20 ( 0.00%) FnPtr(f) => *f, . Error(_) => { . // ignore errors (#54954) . ty::Binder::dummy(FnSig::fake()) . } . Closure(..) => bug!( . "to get the signature of a closure, use `substs.as_closure().sig()` not `fn_sig()`", . ), . _ => bug!("Ty::fn_sig() called on non-fn type: {:?}", self), . } 151,218 ( 0.00%) } . . #[inline] . pub fn is_fn(&self) -> bool { 750 ( 0.00%) matches!(self.kind(), FnDef(..) | FnPtr(_)) . } . . #[inline] . pub fn is_fn_ptr(&self) -> bool { . matches!(self.kind(), FnPtr(_)) . } . . #[inline] . pub fn is_impl_trait(&self) -> bool { . matches!(self.kind(), Opaque(..)) . } . . #[inline] . pub fn ty_adt_def(&self) -> Option<&'tcx AdtDef> { 10,909 ( 0.00%) match self.kind() { . Adt(adt, _) => Some(adt), . _ => None, . } . } . . /// Iterates over tuple fields. . /// Panics when called on anything but a tuple. 169 ( 0.00%) pub fn tuple_fields(&self) -> impl DoubleEndedIterator> { 2,796 ( 0.00%) match self.kind() { 1,622 ( 0.00%) Tuple(substs) => substs.iter().map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 338 ( 0.00%) } . . /// Get the `i`-th element of a tuple. . /// Panics when called on anything but a tuple. 104 ( 0.00%) pub fn tuple_element_ty(&self, i: usize) -> Option> { 208 ( 0.00%) match self.kind() { 208 ( 0.00%) Tuple(substs) => substs.iter().nth(i).map(|field| field.expect_ty()), . _ => bug!("tuple_fields called on non-tuple"), . } 208 ( 0.00%) } . . /// If the type contains variants, returns the valid range of variant indices. . // . // FIXME: This requires the optimized MIR in the case of generators. . #[inline] . pub fn variant_range(&self, tcx: TyCtxt<'tcx>) -> Option> { . match self.kind() { . TyKind::Adt(adt, _) => Some(adt.variant_range()), -- line 2068 ---------------------------------------- -- line 2095 ---------------------------------------- . TyKind::Generator(def_id, substs, _) => { . Some(substs.as_generator().discriminant_for_variant(*def_id, tcx, variant_index)) . } . _ => None, . } . } . . /// Returns the type of the discriminant of this type. 26,540 ( 0.00%) pub fn discriminant_ty(&'tcx self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 13,270 ( 0.00%) match self.kind() { 21,232 ( 0.00%) ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx), . ty::Generator(_, substs, _) => substs.as_generator().discr_ty(tcx), . . ty::Param(_) | ty::Projection(_) | ty::Opaque(..) | ty::Infer(ty::TyVar(_)) => { . let assoc_items = tcx.associated_item_def_ids( . tcx.require_lang_item(hir::LangItem::DiscriminantKind, None), . ); . tcx.mk_projection(assoc_items[0], tcx.intern_substs(&[self.into()])) . } -- line 2113 ---------------------------------------- -- line 2135 ---------------------------------------- . | ty::Infer(IntVar(_) | FloatVar(_)) => tcx.types.u8, . . ty::Bound(..) . | ty::Placeholder(_) . | ty::Infer(FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`discriminant_ty` applied to unexpected type: {:?}", self) . } . } 21,232 ( 0.00%) } . . /// Returns the type of metadata for (potentially fat) pointers to this type. . pub fn ptr_metadata_ty( . &'tcx self, . tcx: TyCtxt<'tcx>, . normalize: impl FnMut(Ty<'tcx>) -> Ty<'tcx>, . ) -> Ty<'tcx> { . let tail = tcx.struct_tail_with_normalize(self, normalize); -- line 2151 ---------------------------------------- -- line 2200 ---------------------------------------- . /// function converts such a special type into the closure . /// kind. To go the other way, use . /// `tcx.closure_kind_ty(closure_kind)`. . /// . /// Note that during type checking, we use an inference variable . /// to represent the closure kind, because it has not yet been . /// inferred. Once upvar inference (in `rustc_typeck/src/check/upvar.rs`) . /// is complete, that type variable will be unified. 1,092 ( 0.00%) pub fn to_opt_closure_kind(&self) -> Option { 2,184 ( 0.00%) match self.kind() { 2,152 ( 0.00%) Int(int_ty) => match int_ty { . ty::IntTy::I8 => Some(ty::ClosureKind::Fn), . ty::IntTy::I16 => Some(ty::ClosureKind::FnMut), . ty::IntTy::I32 => Some(ty::ClosureKind::FnOnce), . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . }, . . // "Bound" types appear in canonical queries when the . // closure type is not yet known . Bound(..) | Infer(_) => None, . . Error(_) => Some(ty::ClosureKind::Fn), . . _ => bug!("cannot convert type `{:?}` to a closure kind", self), . } 1,092 ( 0.00%) } . . /// Fast path helper for testing if a type is `Sized`. . /// . /// Returning true means the type is known to be sized. Returning . /// `false` means nothing -- could be sized, might not be. . /// . /// Note that we could never rely on the fact that a type such as `[_]` is . /// trivially `!Sized` because we could be in a type environment with a . /// bound such as `[_]: Copy`. A function with such a bound obviously never . /// can be called, but that doesn't mean it shouldn't typecheck. This is why . /// this method doesn't return `Option`. 465,516 ( 0.01%) pub fn is_trivially_sized(&self, tcx: TyCtxt<'tcx>) -> bool { 310,344 ( 0.01%) match self.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 2246 ---------------------------------------- -- line 2250 ---------------------------------------- . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => true, . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false, . 5,159 ( 0.00%) ty::Tuple(tys) => tys.iter().all(|ty| ty.expect_ty().is_trivially_sized(tcx)), . 50,427 ( 0.00%) ty::Adt(def, _substs) => def.sized_constraint(tcx).is_empty(), . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => false, . . ty::Infer(ty::TyVar(_)) => false, . . ty::Bound(..) . | ty::Placeholder(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("`is_trivially_sized` applied to unexpected type: {:?}", self) . } . } 465,516 ( 0.01%) } . } . . /// Extra information about why we ended up with a particular variance. . /// This is only used to add more information to error messages, and . /// has no effect on soundness. While choosing the 'wrong' `VarianceDiagInfo` . /// may lead to confusing notes in error messages, it will never cause . /// a miscompilation or unsoundness. . /// . /// When in doubt, use `VarianceDiagInfo::default()` 135 ( 0.00%) #[derive(Copy, Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)] . pub enum VarianceDiagInfo<'tcx> { . /// No additional information - this is the default. . /// We will not add any additional information to error messages. . #[default] . None, . /// We switched our variance because a generic argument occurs inside . /// the invariant generic argument of another type. . Invariant { -- line 2290 ---------------------------------------- -- line 2295 ---------------------------------------- . /// (e.g. `0` for `*mut T`, `1` for `MyStruct<'CovariantParam, 'InvariantParam>`) . param_index: u32, . }, . } . . impl<'tcx> VarianceDiagInfo<'tcx> { . /// Mirrors `Variance::xform` - used to 'combine' the existing . /// and new `VarianceDiagInfo`s when our variance changes. 121,946 ( 0.00%) pub fn xform(self, other: VarianceDiagInfo<'tcx>) -> VarianceDiagInfo<'tcx> { . // For now, just use the first `VarianceDiagInfo::Invariant` that we see 182,919 ( 0.00%) match self { . VarianceDiagInfo::None => other, . VarianceDiagInfo::Invariant { .. } => self, . } 60,973 ( 0.00%) } . } 34,636,756 ( 0.57%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs -------------------------------------------------------------------------------- Ir -- line 223 ---------------------------------------- . fn eq(&self, other: &Rhs) -> bool; . . /// This method tests for `!=`. . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ne(&self, other: &Rhs) -> bool { 56,859 ( 0.00%) !self.eq(other) . } . } . . /// Derive macro generating an impl of the trait `PartialEq`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics, structural_match)] . pub macro PartialEq($item:item) { -- line 239 ---------------------------------------- -- line 328 ---------------------------------------- . /// assert_eq!(Ordering::Less, result); . /// . /// let result = 1.cmp(&1); . /// assert_eq!(Ordering::Equal, result); . /// . /// let result = 2.cmp(&1); . /// assert_eq!(Ordering::Greater, result); . /// ``` 5,122 ( 0.00%) #[derive(Clone, Copy, PartialEq, Debug, Hash)] . #[stable(feature = "rust1", since = "1.0.0")] . #[repr(i8)] . pub enum Ordering { . /// An ordering where a compared value is less than another. . #[stable(feature = "rust1", since = "1.0.0")] . Less = -1, . /// An ordering where a compared value is equal to another. . #[stable(feature = "rust1", since = "1.0.0")] -- line 344 ---------------------------------------- -- line 569 ---------------------------------------- . /// let result = x.0.cmp(&y.0).then_with(|| x.1.cmp(&y.1)).then_with(|| x.2.cmp(&y.2)); . /// . /// assert_eq!(result, Ordering::Less); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "ordering_chaining", since = "1.17.0")] . pub fn then_with Ordering>(self, f: F) -> Ordering { 5,207 ( 0.00%) match self { . Equal => f(), . _ => self, . } . } . } . . /// A helper struct for reverse ordering. . /// -- line 585 ---------------------------------------- -- line 792 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn max(self, other: Self) -> Self . where . Self: Sized, . { 362,770 ( 0.01%) max_by(self, other, Ord::cmp) . } . . /// Compares and returns the minimum of two values. . /// . /// Returns the first argument if the comparison determines them to be equal. . /// . /// # Examples . /// -- line 808 ---------------------------------------- -- line 812 ---------------------------------------- . /// ``` . #[stable(feature = "ord_max_min", since = "1.21.0")] . #[inline] . #[must_use] . fn min(self, other: Self) -> Self . where . Self: Sized, . { 67,242 ( 0.00%) min_by(self, other, Ord::cmp) . } . . /// Restrict a value to a certain interval. . /// . /// Returns `max` if `self` is greater than `max`, and `min` if `self` is . /// less than `min`. Otherwise this returns `self`. . /// . /// # Panics -- line 828 ---------------------------------------- -- line 1097 ---------------------------------------- . /// let result = 2.0 < 1.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn lt(&self, other: &Rhs) -> bool { 127,590 ( 0.00%) matches!(self.partial_cmp(other), Some(Less)) . } . . /// This method tests less than or equal to (for `self` and `other`) and is used by the `<=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1113 ---------------------------------------- -- line 1121 ---------------------------------------- . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn le(&self, other: &Rhs) -> bool { . // Pattern `Some(Less | Eq)` optimizes worse than negating `None | Some(Greater)`. . // FIXME: The root cause was fixed upstream in LLVM with: . // https://github.com/llvm/llvm-project/commit/9bad7de9a3fb844f1ca2965f35d0c2a3d1e11775 . // Revert this workaround once support for LLVM 12 gets dropped. 1,182,149 ( 0.02%) !matches!(self.partial_cmp(other), None | Some(Greater)) . } . . /// This method tests greater than (for `self` and `other`) and is used by the `>` operator. . /// . /// # Examples . /// . /// ``` . /// let result = 1.0 > 2.0; -- line 1137 ---------------------------------------- -- line 1140 ---------------------------------------- . /// let result = 2.0 > 2.0; . /// assert_eq!(result, false); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn gt(&self, other: &Rhs) -> bool { 2,172,172 ( 0.04%) matches!(self.partial_cmp(other), Some(Greater)) . } . . /// This method tests greater than or equal to (for `self` and `other`) and is used by the `>=` . /// operator. . /// . /// # Examples . /// . /// ``` -- line 1156 ---------------------------------------- -- line 1160 ---------------------------------------- . /// let result = 2.0 >= 2.0; . /// assert_eq!(result, true); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[default_method_body_is_const] . fn ge(&self, other: &Rhs) -> bool { 265 ( 0.00%) matches!(self.partial_cmp(other), Some(Greater | Equal)) . } . } . . /// Derive macro generating an impl of the trait `PartialOrd`. . #[rustc_builtin_macro] . #[stable(feature = "builtin_macro_prelude", since = "1.38.0")] . #[allow_internal_unstable(core_intrinsics)] . pub macro PartialOrd($item:item) { -- line 1176 ---------------------------------------- -- line 1210 ---------------------------------------- . /// . /// assert_eq!(cmp::min_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 1); . /// assert_eq!(cmp::min_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn min_by Ordering>(v1: T, v2: T, compare: F) -> T { 533,797 ( 0.01%) match compare(&v1, &v2) { . Ordering::Less | Ordering::Equal => v1, . Ordering::Greater => v2, . } 4 ( 0.00%) } . . /// Returns the element that gives the minimum value from the specified function. . /// . /// Returns the first argument if the comparison determines them to be equal. . /// . /// # Examples . /// . /// ``` -- line 1230 ---------------------------------------- -- line 1231 ---------------------------------------- . /// use std::cmp; . /// . /// assert_eq!(cmp::min_by_key(-2, 1, |x: &i32| x.abs()), 1); . /// assert_eq!(cmp::min_by_key(-2, 2, |x: &i32| x.abs()), -2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] 24 ( 0.00%) pub fn min_by_key K, K: Ord>(v1: T, v2: T, mut f: F) -> T { 16 ( 0.00%) min_by(v1, v2, |v1, v2| f(v1).cmp(&f(v2))) 16 ( 0.00%) } . . /// Compares and returns the maximum of two values. . /// . /// Returns the second argument if the comparison determines them to be equal. . /// . /// Internally uses an alias to [`Ord::max`]. . /// . /// # Examples -- line 1249 ---------------------------------------- -- line 1273 ---------------------------------------- . /// . /// assert_eq!(cmp::max_by(-2, 1, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), -2); . /// assert_eq!(cmp::max_by(-2, 2, |x: &i32, y: &i32| x.abs().cmp(&y.abs())), 2); . /// ``` . #[inline] . #[must_use] . #[stable(feature = "cmp_min_max_by", since = "1.53.0")] . pub fn max_by Ordering>(v1: T, v2: T, compare: F) -> T { 1,224,895 ( 0.02%) match compare(&v1, &v2) { 11,450 ( 0.00%) Ordering::Less | Ordering::Equal => v2, . Ordering::Greater => v1, . } . } . . /// Returns the element that gives the maximum value from the specified function. . /// . /// Returns the second argument if the comparison determines them to be equal. . /// -- line 1290 ---------------------------------------- -- line 1308 ---------------------------------------- . use crate::cmp::Ordering::{self, Equal, Greater, Less}; . use crate::hint::unreachable_unchecked; . . macro_rules! partial_eq_impl { . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for $t { . #[inline] 25,866 ( 0.00%) fn eq(&self, other: &$t) -> bool { (*self) == (*other) } . #[inline] 68,308 ( 0.00%) fn ne(&self, other: &$t) -> bool { (*self) != (*other) } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq for () { . #[inline] . fn eq(&self, _other: &()) -> bool { -- line 1326 ---------------------------------------- -- line 1392 ---------------------------------------- . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd for $t { . #[inline] . fn partial_cmp(&self, other: &$t) -> Option { . Some(self.cmp(other)) . } . #[inline] 15,214,605 ( 0.25%) fn lt(&self, other: &$t) -> bool { (*self) < (*other) } . #[inline] 157,908 ( 0.00%) fn le(&self, other: &$t) -> bool { (*self) <= (*other) } . #[inline] 12 ( 0.00%) fn ge(&self, other: &$t) -> bool { (*self) >= (*other) } . #[inline] . fn gt(&self, other: &$t) -> bool { (*self) > (*other) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for $t { . #[inline] . fn cmp(&self, other: &$t) -> Ordering { . // The order here is important to generate more optimal assembly. . // See for more info. 13,634,622 ( 0.23%) if *self < *other { Less } . else if *self == *other { Equal } . else { Greater } . } . } . )*) . } . . #[stable(feature = "rust1", since = "1.0.0")] -- line 1423 ---------------------------------------- -- line 1430 ---------------------------------------- . . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for bool { . #[inline] . fn cmp(&self, other: &bool) -> Ordering { . // Casting to i8's and converting the difference to an Ordering generates . // more optimal assembly. . // See for more info. 811 ( 0.00%) match (*self as i8) - (*other as i8) { . -1 => Less, . 0 => Equal, . 1 => Greater, . // SAFETY: bool as i8 returns 0 or 1, so the difference can't be anything else . _ => unsafe { unreachable_unchecked() }, . } . } . } -- line 1446 ---------------------------------------- -- line 1474 ---------------------------------------- . // & pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&B> for &A . where . A: PartialEq, . { . #[inline] 5,400 ( 0.00%) fn eq(&self, other: &&B) -> bool { 2,416,923 ( 0.04%) PartialEq::eq(*self, *other) 311,947 ( 0.01%) } . #[inline] . fn ne(&self, other: &&B) -> bool { 57 ( 0.00%) PartialEq::ne(*self, *other) 109,749 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialOrd<&B> for &A . where . A: PartialOrd, . { . #[inline] . fn partial_cmp(&self, other: &&B) -> Option { 4 ( 0.00%) PartialOrd::partial_cmp(*self, *other) . } . #[inline] . fn lt(&self, other: &&B) -> bool { . PartialOrd::lt(*self, *other) . } . #[inline] . fn le(&self, other: &&B) -> bool { . PartialOrd::le(*self, *other) -- line 1505 ---------------------------------------- -- line 1516 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . impl Ord for &A . where . A: Ord, . { . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(*self, *other) 147,112 ( 0.00%) } . } . #[stable(feature = "rust1", since = "1.0.0")] . impl Eq for &A where A: Eq {} . . // &mut pointers . . #[stable(feature = "rust1", since = "1.0.0")] . impl PartialEq<&mut B> for &mut A -- line 1532 ---------------------------------------- 4,522,687 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ops/bit.rs -------------------------------------------------------------------------------- Ir -- line 267 ---------------------------------------- . macro_rules! bitor_impl { . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ops", issue = "90080")] . impl const BitOr for $t { . type Output = $t; . . #[inline] 109,926 ( 0.00%) fn bitor(self, rhs: $t) -> $t { self | rhs } . } . . forward_ref_binop! { impl const BitOr, bitor for $t, $t } . )*) . } . . bitor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } . -- line 283 ---------------------------------------- -- line 368 ---------------------------------------- . macro_rules! bitxor_impl { . ($($t:ty)*) => ($( . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ops", issue = "90080")] . impl const BitXor for $t { . type Output = $t; . . #[inline] 10,420,456 ( 0.17%) fn bitxor(self, other: $t) -> $t { self ^ other } . } . . forward_ref_binop! { impl const BitXor, bitxor for $t, $t } . )*) . } . . bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 } . -- line 384 ---------------------------------------- -- line 468 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ops", issue = "90080")] . impl const Shl<$f> for $t { . type Output = $t; . . #[inline] . #[rustc_inherit_overflow_checks] . fn shl(self, other: $f) -> $t { 36 ( 0.00%) self << other . } . } . . forward_ref_binop! { impl const Shl, shl for $t, $f } . }; . } . . macro_rules! shl_impl_all { -- line 484 ---------------------------------------- 2,874,659 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/intrinsics.rs -------------------------------------------------------------------------------- Ir -- line 2075 ---------------------------------------- . // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached. . // Therefore, compiletime_check and runtime_check are observably equivalent. . unsafe { . const_eval_select((src, dst, count), compiletime_check, runtime_check); . } . . // SAFETY: the safety contract for `copy_nonoverlapping` must be . // upheld by the caller. 46,519,547 ( 0.77%) unsafe { copy_nonoverlapping(src, dst, count) } . } . . /// Copies `count * size_of::()` bytes from `src` to `dst`. The source . /// and destination may overlap. . /// . /// If the source and destination will *never* overlap, . /// [`copy_nonoverlapping`] can be used instead. . /// -- line 2091 ---------------------------------------- -- line 2165 ---------------------------------------- . #[cfg(debug_assertions)] . // SAFETY: As per our safety precondition, we may assume that the `abort` above is never reached. . // Therefore, compiletime_check and runtime_check are observably equivalent. . unsafe { . const_eval_select((src, dst), compiletime_check, runtime_check); . } . . // SAFETY: the safety contract for `copy` must be upheld by the caller. 10,333,484 ( 0.17%) unsafe { copy(src, dst, count) } . } . . /// Sets `count * size_of::()` bytes of memory starting at `dst` to . /// `val`. . /// . /// `write_bytes` is similar to C's [`memset`], but sets `count * . /// size_of::()` bytes to `val`. . /// -- line 2181 ---------------------------------------- -- line 2266 ---------------------------------------- . #[cfg(debug_assertions)] . // SAFETY: runtime debug-assertions are a best-effort basis; it's fine to . // not do them during compile time . unsafe { . const_eval_select((dst,), compiletime_check, runtime_check); . } . . // SAFETY: the safety contract for `write_bytes` must be upheld by the caller. 852,771 ( 0.01%) unsafe { write_bytes(dst, val, count) } . } . . /// Selects which function to call depending on the context. . /// . /// If this function is evaluated at compile-time, then a call to this . /// intrinsic will be replaced with a call to `called_in_const`. It gets . /// replaced with a call to `called_at_rt` otherwise. . /// -- line 2282 ---------------------------------------- 234,129 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/fulfill.rs -------------------------------------------------------------------------------- Ir -- line 33 ---------------------------------------- . . impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> { . /// Note that we include both the `ParamEnv` and the `Predicate`, . /// as the `ParamEnv` can influence whether fulfillment succeeds . /// or fails. . type CacheKey = ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>; . . fn as_cache_key(&self) -> Self::CacheKey { 659,418 ( 0.01%) self.obligation.param_env.and(self.obligation.predicate) . } . } . . /// The fulfillment context is used to drive trait resolution. It . /// consists of a list of obligations that must be (eventually) . /// satisfied. The job is to track which are satisfied, which yielded . /// errors, and which are still pending. At any point, users can call . /// `select_where_possible`, and the fulfillment context will try to do -- line 49 ---------------------------------------- -- line 95 ---------------------------------------- . } . . // `PendingPredicateObligation` is used a lot. Make sure it doesn't unintentionally get bigger. . #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] . static_assert_size!(PendingPredicateObligation<'_>, 72); . . impl<'a, 'tcx> FulfillmentContext<'tcx> { . /// Creates a new fulfillment context. 8,431 ( 0.00%) pub fn new() -> FulfillmentContext<'tcx> { 109,020 ( 0.00%) FulfillmentContext { 16,160 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: true, . usable_in_snapshot: false, . } 8,431 ( 0.00%) } . . pub fn new_in_snapshot() -> FulfillmentContext<'tcx> { 342 ( 0.00%) FulfillmentContext { 114 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: true, . usable_in_snapshot: true, . } . } . . pub fn new_ignoring_regions() -> FulfillmentContext<'tcx> { 24,300 ( 0.00%) FulfillmentContext { 6,075 ( 0.00%) predicates: ObligationForest::new(), . relationships: FxHashMap::default(), . register_region_obligations: false, . usable_in_snapshot: false, . } . } . . /// Attempts to select obligations using `selcx`. . fn select(&mut self, selcx: &mut SelectionContext<'a, 'tcx>) -> Vec> { 140,088 ( 0.00%) let span = debug_span!("select", obligation_forest_size = ?self.predicates.len()); . let _enter = span.enter(); . . let mut errors = Vec::new(); . . loop { . debug!("select: starting another iteration"); . . // Process pending obligations. . let outcome: Outcome<_, _> = 616,574 ( 0.01%) self.predicates.process_obligations(&mut FulfillProcessor { . selcx, 88,082 ( 0.00%) register_region_obligations: self.register_region_obligations, . }); . debug!("select: outcome={:#?}", outcome); . . // FIXME: if we kept the original cache key, we could mark projection . // obligations as complete for the projection cache here. . . errors.extend(outcome.errors.into_iter().map(to_fulfillment_error)); . . // If nothing new was added, no need to keep looping. 176,164 ( 0.00%) if outcome.stalled { . break; . } . } . . debug!( . "select({} predicates remaining, {} errors) done", . self.predicates.len(), . errors.len() -- line 162 ---------------------------------------- -- line 169 ---------------------------------------- . impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> { . /// "Normalize" a projection type `::X` by . /// creating a fresh type variable `$0` as well as a projection . /// predicate `::X == $0`. When the . /// inference engine runs, it will attempt to find an impl of . /// `SomeTrait` or a where-clause that lets us unify `$0` with . /// something concrete. If this fails, we'll unify `$0` with . /// `projection_ty` again. 2,508 ( 0.00%) #[tracing::instrument(level = "debug", skip(self, infcx, param_env, cause))] . fn normalize_projection_type( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . param_env: ty::ParamEnv<'tcx>, . projection_ty: ty::ProjectionTy<'tcx>, . cause: ObligationCause<'tcx>, . ) -> Ty<'tcx> { . debug_assert!(!projection_ty.has_escaping_bound_vars()); . . // FIXME(#20304) -- cache . . let mut selcx = SelectionContext::new(infcx); . let mut obligations = vec![]; 1,026 ( 0.00%) let normalized_ty = project::normalize_projection_type( . &mut selcx, . param_env, . projection_ty, 570 ( 0.00%) cause, . 0, . &mut obligations, . ); . self.register_predicate_obligations(infcx, obligations); . . debug!(?normalized_ty); . . normalized_ty . } . 426,036 ( 0.01%) fn register_predicate_obligation( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . obligation: PredicateObligation<'tcx>, . ) { . // this helps to reduce duplicate errors, as well as making . // debug output much nicer to read and so on. 213,018 ( 0.00%) let obligation = infcx.resolve_vars_if_possible(obligation); . . debug!(?obligation, "register_predicate_obligation"); . 426,036 ( 0.01%) assert!(!infcx.is_in_snapshot() || self.usable_in_snapshot); . 213,018 ( 0.00%) super::relationships::update(self, infcx, &obligation); . . self.predicates . .register_obligation(PendingPredicateObligation { obligation, stalled_on: vec![] }); 355,030 ( 0.01%) } . 220,832 ( 0.00%) fn select_all_or_error(&mut self, infcx: &InferCtxt<'_, 'tcx>) -> Vec> { . { 27,604 ( 0.00%) let errors = self.select_where_possible(infcx); 27,604 ( 0.00%) if !errors.is_empty() { . return errors; . } . } . 138,020 ( 0.00%) self.predicates.to_errors(CodeAmbiguity).into_iter().map(to_fulfillment_error).collect() 193,228 ( 0.00%) } . 700,440 ( 0.01%) fn select_where_possible( . &mut self, . infcx: &InferCtxt<'_, 'tcx>, . ) -> Vec> { . let mut selcx = SelectionContext::new(infcx); . self.select(&mut selcx) 630,396 ( 0.01%) } . 561 ( 0.00%) fn pending_obligations(&self) -> Vec> { . self.predicates.map_pending_obligations(|o| o.obligation.clone()) 748 ( 0.00%) } . . fn relationships(&mut self) -> &mut FxHashMap { 97 ( 0.00%) &mut self.relationships 97 ( 0.00%) } . } . . struct FulfillProcessor<'a, 'b, 'tcx> { . selcx: &'a mut SelectionContext<'b, 'tcx>, . register_region_obligations: bool, . } . . fn mk_pending(os: Vec>) -> Vec> { -- line 258 ---------------------------------------- -- line 275 ---------------------------------------- . #[inline(always)] . fn process_obligation( . &mut self, . pending_obligation: &mut Self::Obligation, . ) -> ProcessResult { . // If we were stalled on some unresolved variables, first check whether . // any of them have been resolved; if not, don't bother doing more work . // yet. 56,208,273 ( 0.93%) let change = match pending_obligation.stalled_on.len() { . // Match arms are in order of frequency, which matters because this . // code is so hot. 1 and 0 dominate; 2+ is fairly rare. . 1 => { 27,927,564 ( 0.46%) let infer_var = pending_obligation.stalled_on[0]; 9,309,188 ( 0.15%) self.selcx.infcx().ty_or_const_infer_var_changed(infer_var) . } . 0 => { . // In this case we haven't changed, but wish to make a change. . true . } . _ => { . // This `for` loop was once a call to `all()`, but this lower-level . // form was a perf win. See #64545 for details. 62,142 ( 0.00%) (|| { 120,777 ( 0.00%) for &infer_var in &pending_obligation.stalled_on { 4,977 ( 0.00%) if self.selcx.infcx().ty_or_const_infer_var_changed(infer_var) { . return true; . } . } . false . })() . } . }; . 5,255 ( 0.00%) if !change { . debug!( . "process_predicate: pending obligation {:?} still stalled on {:?}", . self.selcx.infcx().resolve_vars_if_possible(pending_obligation.obligation.clone()), . pending_obligation.stalled_on . ); . return ProcessResult::Unchanged; . } . 297,873 ( 0.00%) self.progress_changed_obligations(pending_obligation) . } . . fn process_backedge<'c, I>( . &mut self, . cycle: I, . _marker: PhantomData<&'c PendingPredicateObligation<'tcx>>, . ) where . I: Clone + Iterator>, -- line 325 ---------------------------------------- -- line 333 ---------------------------------------- . } . } . . impl<'a, 'b, 'tcx> FulfillProcessor<'a, 'b, 'tcx> { . // The code calling this method is extremely hot and only rarely . // actually uses this, so move this part of the code . // out of that loop. . #[inline(never)] 992,910 ( 0.02%) fn progress_changed_obligations( . &mut self, . pending_obligation: &mut PendingPredicateObligation<'tcx>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { . pending_obligation.stalled_on.truncate(0); . . let obligation = &mut pending_obligation.obligation; . 297,873 ( 0.00%) if obligation.predicate.has_infer_types_or_consts() { 56,445 ( 0.00%) obligation.predicate = 169,335 ( 0.00%) self.selcx.infcx().resolve_vars_if_possible(obligation.predicate); . } . . debug!(?obligation, ?obligation.cause, "process_obligation"); . . let infcx = self.selcx.infcx(); . 85,692 ( 0.00%) let binder = obligation.predicate.kind(); 695,037 ( 0.01%) match binder.no_bound_vars() { 1,342 ( 0.00%) None => match binder.skip_binder() { . // Evaluation will discard candidates using the leak check. . // This means we need to pass it the bound version of our . // predicate. . ty::PredicateKind::Trait(trait_ref) => { . let trait_obligation = obligation.with(binder.rebind(trait_ref)); . 244 ( 0.00%) self.process_trait_obligation( . obligation, 732 ( 0.00%) trait_obligation, . &mut pending_obligation.stalled_on, . ) . } . ty::PredicateKind::Projection(data) => { . let project_obligation = obligation.with(binder.rebind(data)); . 244 ( 0.00%) self.process_projection_obligation( . obligation, 732 ( 0.00%) project_obligation, . &mut pending_obligation.stalled_on, . ) . } . ty::PredicateKind::RegionOutlives(_) . | ty::PredicateKind::TypeOutlives(_) . | ty::PredicateKind::WellFormed(_) . | ty::PredicateKind::ObjectSafe(_) . | ty::PredicateKind::ClosureKind(..) -- line 386 ---------------------------------------- -- line 397 ---------------------------------------- . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for Chalk") . } . }, . Some(pred) => match pred { . ty::PredicateKind::Trait(data) => { . let trait_obligation = obligation.with(Binder::dummy(data)); . 242,325 ( 0.00%) self.process_trait_obligation( . obligation, 1,066,230 ( 0.02%) trait_obligation, . &mut pending_obligation.stalled_on, . ) . } . . ty::PredicateKind::RegionOutlives(data) => { 29,436 ( 0.00%) match infcx.region_outlives_predicate(&obligation.cause, Binder::dummy(data)) { 19,624 ( 0.00%) Ok(()) => ProcessResult::Changed(vec![]), . Err(_) => ProcessResult::Error(CodeSelectionError(Unimplemented)), . } . } . . ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(t_a, r_b)) => { 17,576 ( 0.00%) if self.register_region_obligations { 43,940 ( 0.00%) self.selcx.infcx().register_region_obligation_with_cause( . t_a, . r_b, . &obligation.cause, . ); . } . ProcessResult::Changed(vec![]) . } . . ty::PredicateKind::Projection(ref data) => { . let project_obligation = obligation.with(Binder::dummy(*data)); . 37,930 ( 0.00%) self.process_projection_obligation( . obligation, 113,790 ( 0.00%) project_obligation, . &mut pending_obligation.stalled_on, . ) . } . . ty::PredicateKind::ObjectSafe(trait_def_id) => { 204 ( 0.00%) if !self.selcx.tcx().is_object_safe(trait_def_id) { . ProcessResult::Error(CodeSelectionError(Unimplemented)) . } else { . ProcessResult::Changed(vec![]) . } . } . . ty::PredicateKind::ClosureKind(_, closure_substs, kind) => { 507 ( 0.00%) match self.selcx.infcx().closure_kind(closure_substs) { . Some(closure_kind) => { 1,014 ( 0.00%) if closure_kind.extends(kind) { . ProcessResult::Changed(vec![]) . } else { . ProcessResult::Error(CodeSelectionError(Unimplemented)) . } . } . None => ProcessResult::Unchanged, . } . } . . ty::PredicateKind::WellFormed(arg) => { 186,809 ( 0.00%) match wf::obligations( . self.selcx.infcx(), 53,374 ( 0.00%) obligation.param_env, 53,374 ( 0.00%) obligation.cause.body_id, 80,061 ( 0.00%) obligation.recursion_depth + 1, . arg, . obligation.cause.span, . ) { . None => { 24,852 ( 0.00%) pending_obligation.stalled_on = 43,491 ( 0.00%) vec![TyOrConstInferVar::maybe_from_generic_arg(arg).unwrap()]; 12,426 ( 0.00%) ProcessResult::Unchanged . } 61,422 ( 0.00%) Some(os) => ProcessResult::Changed(mk_pending(os)), . } . } . . ty::PredicateKind::Subtype(subtype) => { 20,016 ( 0.00%) match self.selcx.infcx().subtype_predicate( . &obligation.cause, . obligation.param_env, . Binder::dummy(subtype), . ) { . None => { . // None means that both are unresolved. 12,205 ( 0.00%) pending_obligation.stalled_on = vec![ 2,441 ( 0.00%) TyOrConstInferVar::maybe_from_ty(subtype.a).unwrap(), 4,882 ( 0.00%) TyOrConstInferVar::maybe_from_ty(subtype.b).unwrap(), . ]; . ProcessResult::Unchanged . } . Some(Ok(ok)) => ProcessResult::Changed(mk_pending(ok.obligations)), . Some(Err(err)) => { . let expected_found = . ExpectedFound::new(subtype.a_is_expected, subtype.a, subtype.b); . ProcessResult::Error(FulfillmentErrorCode::CodeSubtypeError( -- line 497 ---------------------------------------- -- line 498 ---------------------------------------- . expected_found, . err, . )) . } . } . } . . ty::PredicateKind::Coerce(coerce) => { 120 ( 0.00%) match self.selcx.infcx().coerce_predicate( . &obligation.cause, . obligation.param_env, . Binder::dummy(coerce), . ) { . None => { . // None means that both are unresolved. 20 ( 0.00%) pending_obligation.stalled_on = vec![ 4 ( 0.00%) TyOrConstInferVar::maybe_from_ty(coerce.a).unwrap(), 8 ( 0.00%) TyOrConstInferVar::maybe_from_ty(coerce.b).unwrap(), . ]; . ProcessResult::Unchanged . } . Some(Ok(ok)) => ProcessResult::Changed(mk_pending(ok.obligations)), . Some(Err(err)) => { . let expected_found = ExpectedFound::new(false, coerce.a, coerce.b); . ProcessResult::Error(FulfillmentErrorCode::CodeSubtypeError( . expected_found, -- line 523 ---------------------------------------- -- line 639 ---------------------------------------- . } . } . } . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for Chalk") . } . }, . } 893,619 ( 0.01%) } . 921,994 ( 0.02%) #[instrument(level = "debug", skip(self, obligation, stalled_on))] . fn process_trait_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . trait_obligation: TraitObligation<'tcx>, . stalled_on: &mut Vec>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { 48,526 ( 0.00%) let infcx = self.selcx.infcx(); 145,578 ( 0.00%) if obligation.predicate.is_global() { . // no type variables present, can use evaluation for better caching. . // FIXME: consider caching errors too. 41,084 ( 0.00%) if infcx.predicate_must_hold_considering_regions(obligation) { . debug!( . "selecting trait at depth {} evaluated to holds", . obligation.recursion_depth . ); 41,036 ( 0.00%) return ProcessResult::Changed(vec![]); . } . } . 196,056 ( 0.00%) match self.selcx.select(&trait_obligation) { . Ok(Some(impl_source)) => { . debug!("selecting trait at depth {} yielded Ok(Some)", obligation.recursion_depth); 271,224 ( 0.00%) ProcessResult::Changed(mk_pending(impl_source.nested_obligations())) . } . Ok(None) => { . debug!("selecting trait at depth {} yielded Ok(None)", obligation.recursion_depth); . . // This is a bit subtle: for the most part, the . // only reason we can fail to make progress on . // trait selection is because we don't have enough . // information about the types in the trait. . stalled_on.clear(); . stalled_on.extend(substs_infer_vars( . self.selcx, 12,940 ( 0.00%) trait_obligation.predicate.map_bound(|pred| pred.trait_ref.substs), . )); . . debug!( . "process_predicate: pending obligation {:?} now stalled on {:?}", . infcx.resolve_vars_if_possible(obligation.clone()), . stalled_on . ); . 12,940 ( 0.00%) ProcessResult::Unchanged . } . Err(selection_err) => { . debug!("selecting trait at depth {} yielded Err", obligation.recursion_depth); . . ProcessResult::Error(CodeSelectionError(selection_err)) . } . } . } . 84,117 ( 0.00%) fn process_projection_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . project_obligation: PolyProjectionObligation<'tcx>, . stalled_on: &mut Vec>, . ) -> ProcessResult, FulfillmentErrorCode<'tcx>> { 7,647 ( 0.00%) let tcx = self.selcx.tcx(); . 22,941 ( 0.00%) if obligation.predicate.is_global() { . // no type variables present, can use evaluation for better caching. . // FIXME: consider caching errors too. 188 ( 0.00%) if self.selcx.infcx().predicate_must_hold_considering_regions(obligation) { 540 ( 0.00%) if let Some(key) = ProjectionCacheKey::from_poly_projection_predicate( . &mut self.selcx, 630 ( 0.00%) project_obligation.predicate, . ) { . // If `predicate_must_hold_considering_regions` succeeds, then we've . // evaluated all sub-obligations. We can therefore mark the 'root' . // obligation as complete, and skip evaluating sub-obligations. 540 ( 0.00%) self.selcx . .infcx() . .inner . .borrow_mut() . .projection_cache() . .complete(key, EvaluationResult::EvaluatedToOk); . } 180 ( 0.00%) return ProcessResult::Changed(vec![]); . } else { . tracing::debug!("Does NOT hold: {:?}", obligation); . } . } . 52,899 ( 0.00%) match project::poly_project_and_unify_type(self.selcx, &project_obligation) { . Ok(Ok(Some(os))) => ProcessResult::Changed(mk_pending(os)), . Ok(Ok(None)) => { . stalled_on.clear(); . stalled_on.extend(substs_infer_vars( . self.selcx, 3,789 ( 0.00%) project_obligation.predicate.map_bound(|pred| pred.projection_ty.substs), . )); 3,789 ( 0.00%) ProcessResult::Unchanged . } . // Let the caller handle the recursion . Ok(Err(project::InProgress)) => ProcessResult::Changed(mk_pending(vec![ . project_obligation.with(project_obligation.predicate.to_predicate(tcx)), . ])), . Err(e) => ProcessResult::Error(CodeProjectionError(e)), . } 61,176 ( 0.00%) } . } . . /// Returns the set of inference variables contained in `substs`. . fn substs_infer_vars<'a, 'tcx>( . selcx: &mut SelectionContext<'a, 'tcx>, . substs: ty::Binder<'tcx, SubstsRef<'tcx>>, . ) -> impl Iterator> { . selcx . .infcx() . .resolve_vars_if_possible(substs) . .skip_binder() // ok because this check doesn't care about regions . .iter() . .filter(|arg| arg.has_infer_types_or_consts()) . .flat_map(|arg| { 95,270 ( 0.00%) let mut walker = arg.walk(); 171,736 ( 0.00%) while let Some(c) = walker.next() { 19,104 ( 0.00%) if !c.has_infer_types_or_consts() { . walker.visited.remove(&c); 48 ( 0.00%) walker.skip_current_subtree(); . } . } . walker.visited.into_iter() . }) . .filter_map(TyOrConstInferVar::maybe_from_generic_arg) . } . . fn to_fulfillment_error<'tcx>( -- line 778 ---------------------------------------- 38,435,064 ( 0.64%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/type_variable.rs -------------------------------------------------------------------------------- Ir -- line 44 ---------------------------------------- . /// Convert from a specific kind of undo to the more general UndoLog . impl<'tcx> From for UndoLog<'tcx> { . fn from(l: Instantiate) -> Self { . UndoLog::Values(sv::UndoLog::Other(l)) . } . } . . impl<'tcx> Rollback> for TypeVariableStorage<'tcx> { 159,713 ( 0.00%) fn reverse(&mut self, undo: UndoLog<'tcx>) { 674,833 ( 0.01%) match undo { 123,732 ( 0.00%) UndoLog::EqRelation(undo) => self.eq_relations.reverse(undo), 73,582 ( 0.00%) UndoLog::SubRelation(undo) => self.sub_relations.reverse(undo), 122,112 ( 0.00%) UndoLog::Values(undo) => self.values.reverse(undo), . } 319,426 ( 0.01%) } . } . . pub struct TypeVariableStorage<'tcx> { . values: sv::SnapshotVecStorage, . . /// Two variables are unified in `eq_relations` when we have a . /// constraint `?X == ?Y`. This table also stores, for each key, . /// the known value. -- line 66 ---------------------------------------- -- line 136 ---------------------------------------- . DynReturnFn, . LatticeVariable, . } . . pub(crate) struct TypeVariableData { . origin: TypeVariableOrigin, . } . 1,600,280 ( 0.03%) #[derive(Copy, Clone, Debug)] . pub enum TypeVariableValue<'tcx> { . Known { value: Ty<'tcx> }, . Unknown { universe: ty::UniverseIndex }, . } . . impl<'tcx> TypeVariableValue<'tcx> { . /// If this value is known, returns the type it is known to be. . /// Otherwise, `None`. . pub fn known(&self) -> Option> { 1,028,174 ( 0.02%) match *self { . TypeVariableValue::Unknown { .. } => None, . TypeVariableValue::Known { value } => Some(value), . } . } . . pub fn is_unknown(&self) -> bool { . match *self { . TypeVariableValue::Unknown { .. } => true, -- line 162 ---------------------------------------- -- line 166 ---------------------------------------- . } . . pub(crate) struct Instantiate; . . pub(crate) struct Delegate; . . impl<'tcx> TypeVariableStorage<'tcx> { . pub fn new() -> TypeVariableStorage<'tcx> { 67,305 ( 0.00%) TypeVariableStorage { . values: sv::SnapshotVecStorage::new(), . eq_relations: ut::UnificationTableStorage::new(), . sub_relations: ut::UnificationTableStorage::new(), . } . } . . #[inline] . pub(crate) fn with_log<'a>( -- line 182 ---------------------------------------- -- line 188 ---------------------------------------- . } . . impl<'tcx> TypeVariableTable<'_, 'tcx> { . /// Returns the origin that was given when `vid` was created. . /// . /// Note that this function does not return care whether . /// `vid` has been unified with something else or not. . pub fn var_origin(&self, vid: ty::TyVid) -> &TypeVariableOrigin { 2,761 ( 0.00%) &self.storage.values.get(vid.as_usize()).origin . } . . /// Records that `a == b`, depending on `dir`. . /// . /// Precondition: neither `a` nor `b` are known. . pub fn equate(&mut self, a: ty::TyVid, b: ty::TyVid) { . debug_assert!(self.probe(a).is_unknown()); . debug_assert!(self.probe(b).is_unknown()); 4,396 ( 0.00%) self.eq_relations().union(a, b); 3,768 ( 0.00%) self.sub_relations().union(a, b); . } . . /// Records that `a <: b`, depending on `dir`. . /// . /// Precondition: neither `a` nor `b` are known. . pub fn sub(&mut self, a: ty::TyVid, b: ty::TyVid) { . debug_assert!(self.probe(a).is_unknown()); . debug_assert!(self.probe(b).is_unknown()); 22,804 ( 0.00%) self.sub_relations().union(a, b); . } . . /// Instantiates `vid` with the type `ty`. . /// . /// Precondition: `vid` must not have been previously instantiated. 329,175 ( 0.01%) pub fn instantiate(&mut self, vid: ty::TyVid, ty: Ty<'tcx>) { . let vid = self.root_var(vid); . debug_assert!(self.probe(vid).is_unknown()); . debug_assert!( . self.eq_relations().probe_value(vid).is_unknown(), . "instantiating type variable `{:?}` twice: new-value = {:?}, old-value={:?}", . vid, . ty, . self.eq_relations().probe_value(vid) . ); 460,845 ( 0.01%) self.eq_relations().union_value(vid, TypeVariableValue::Known { value: ty }); . . // Hack: we only need this so that `types_escaping_snapshot` . // can see what has been unified; see the Delegate impl for . // more details. . self.undo_log.push(Instantiate); 329,175 ( 0.01%) } . . /// Creates a new type variable. . /// . /// - `diverging`: indicates if this is a "diverging" type . /// variable, e.g., one created as the type of a `return` . /// expression. The code in this module doesn't care if a . /// variable is diverging, but the main Rust type-checker will . /// sometimes "unify" such variables with the `!` or `()` types. . /// - `origin`: indicates *why* the type variable was created. . /// The code in this module doesn't care, but it can be useful . /// for improving error messages. 400,836 ( 0.01%) pub fn new_var( . &mut self, . universe: ty::UniverseIndex, . origin: TypeVariableOrigin, . ) -> ty::TyVid { 601,254 ( 0.01%) let eq_key = self.eq_relations().new_key(TypeVariableValue::Unknown { universe }); . 334,030 ( 0.01%) let sub_key = self.sub_relations().new_key(()); 66,806 ( 0.00%) assert_eq!(eq_key.vid, sub_key); . 601,254 ( 0.01%) let index = self.values().push(TypeVariableData { origin }); 267,224 ( 0.00%) assert_eq!(eq_key.vid.as_u32(), index as u32); . . debug!("new_var(index={:?}, universe={:?}, origin={:?}", eq_key.vid, universe, origin,); . . eq_key.vid 467,642 ( 0.01%) } . . /// Returns the number of type variables created thus far. . pub fn num_vars(&self) -> usize { 7,028 ( 0.00%) self.storage.values.len() . } . . /// Returns the "root" variable of `vid` in the `eq_relations` . /// equivalence table. All type variables that have been equated . /// will yield the same root variable (per the union-find . /// algorithm), so `root_var(a) == root_var(b)` implies that `a == . /// b` (transitively). . pub fn root_var(&mut self, vid: ty::TyVid) -> ty::TyVid { 290,568 ( 0.00%) self.eq_relations().find(vid).vid . } . . /// Returns the "root" variable of `vid` in the `sub_relations` . /// equivalence table. All type variables that have been are . /// related via equality or subtyping will yield the same root . /// variable (per the union-find algorithm), so `sub_root_var(a) . /// == sub_root_var(b)` implies that: . /// . /// exists X. (a <: X || X <: a) && (b <: X || X <: b) 379 ( 0.00%) pub fn sub_root_var(&mut self, vid: ty::TyVid) -> ty::TyVid { 212,877 ( 0.00%) self.sub_relations().find(vid) 758 ( 0.00%) } . . /// Returns `true` if `a` and `b` have same "sub-root" (i.e., exists some . /// type X such that `forall i in {a, b}. (i <: X || X <: i)`. . pub fn sub_unified(&mut self, a: ty::TyVid, b: ty::TyVid) -> bool { . self.sub_root_var(a) == self.sub_root_var(b) . } . . /// Retrieves the type to which `vid` has been instantiated, if . /// any. 3,667,140 ( 0.06%) pub fn probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { . self.inlined_probe(vid) 3,667,140 ( 0.06%) } . . /// An always-inlined variant of `probe`, for very hot call sites. . #[inline(always)] . pub fn inlined_probe(&mut self, vid: ty::TyVid) -> TypeVariableValue<'tcx> { 20,145,456 ( 0.33%) self.eq_relations().inlined_probe_value(vid) . } . . /// If `t` is a type-inference variable, and it has been . /// instantiated, then return the with which it was . /// instantiated. Otherwise, returns `t`. . pub fn replace_if_possible(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { 768,384 ( 0.01%) match *t.kind() { 560,981 ( 0.01%) ty::Infer(ty::TyVar(v)) => match self.probe(v) { . TypeVariableValue::Unknown { .. } => t, . TypeVariableValue::Known { value } => value, . }, . _ => t, . } . } . . #[inline] -- line 323 ---------------------------------------- -- line 324 ---------------------------------------- . fn values( . &mut self, . ) -> sv::SnapshotVec, &mut InferCtxtUndoLogs<'tcx>> { . self.storage.values.with_log(self.undo_log) . } . . #[inline] . fn eq_relations(&mut self) -> super::UnificationTable<'_, 'tcx, TyVidEqKey<'tcx>> { 11,969,156 ( 0.20%) self.storage.eq_relations.with_log(self.undo_log) . } . . #[inline] . fn sub_relations(&mut self) -> super::UnificationTable<'_, 'tcx, ty::TyVid> { 149,402 ( 0.00%) self.storage.sub_relations.with_log(self.undo_log) . } . . /// Returns a range of the type variables created during the snapshot. 35,140 ( 0.00%) pub fn vars_since_snapshot( . &mut self, . value_count: usize, . ) -> (Range, Vec) { . let range = TyVid::from_usize(value_count)..TyVid::from_usize(self.num_vars()); 42,168 ( 0.00%) ( . range.start..range.end, . (range.start.as_usize()..range.end.as_usize()) . .map(|index| self.storage.values.get(index).origin) . .collect(), . ) 42,168 ( 0.00%) } . . /// Returns indices of all variables that are not yet . /// instantiated. . pub fn unsolved_variables(&mut self) -> Vec { . (0..self.storage.values.len()) . .filter_map(|i| { . let vid = ty::TyVid::from_usize(i); 201,912 ( 0.00%) match self.probe(vid) { . TypeVariableValue::Unknown { .. } => Some(vid), . TypeVariableValue::Known { .. } => None, . } . }) . .collect() . } . } . -- line 368 ---------------------------------------- -- line 391 ---------------------------------------- . /// These structs (a newtyped TyVid) are used as the unification key . /// for the `eq_relations`; they carry a `TypeVariableValue` along . /// with them. . #[derive(Copy, Clone, Debug, PartialEq, Eq)] . pub(crate) struct TyVidEqKey<'tcx> { . vid: ty::TyVid, . . // in the table, we map each ty-vid to one of these: 28,933,803 ( 0.48%) phantom: PhantomData>, . } . . impl<'tcx> From for TyVidEqKey<'tcx> { . #[inline] // make this function eligible for inlining - it is quite hot. . fn from(vid: ty::TyVid) -> Self { . TyVidEqKey { vid, phantom: PhantomData } . } . } -- line 407 ---------------------------------------- -- line 419 ---------------------------------------- . "TyVidEqKey" . } . } . . impl<'tcx> ut::UnifyValue for TypeVariableValue<'tcx> { . type Error = ut::NoError; . . fn unify_values(value1: &Self, value2: &Self) -> Result { 266,480 ( 0.00%) match (value1, value2) { . // We never equate two type variables, both of which . // have known types. Instead, we recursively equate . // those types. . (&TypeVariableValue::Known { .. }, &TypeVariableValue::Known { .. }) => { . bug!("equating two type variables, both of which have known types") . } . . // If one side is known, prefer that one. . (&TypeVariableValue::Known { .. }, &TypeVariableValue::Unknown { .. }) => Ok(*value1), . (&TypeVariableValue::Unknown { .. }, &TypeVariableValue::Known { .. }) => Ok(*value2), . . // If both sides are *unknown*, it hardly matters, does it? . ( 628 ( 0.00%) &TypeVariableValue::Unknown { universe: universe1 }, 628 ( 0.00%) &TypeVariableValue::Unknown { universe: universe2 }, . ) => { . // If we unify two unbound variables, ?T and ?U, then whatever . // value they wind up taking (which must be the same value) must . // be nameable by both universes. Therefore, the resulting . // universe is the minimum of the two universes, because that is . // the one which contains the fewest names in scope. . let universe = cmp::min(universe1, universe2); 2,512 ( 0.00%) Ok(TypeVariableValue::Unknown { universe }) . } . } . } . } 186,941 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/usefulness.rs -------------------------------------------------------------------------------- Ir -- line 308 ---------------------------------------- . /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty . /// outside its module and should not be matchable with an empty match statement. . crate module: DefId, . crate param_env: ty::ParamEnv<'tcx>, . crate pattern_arena: &'p TypedArena>, . } . . impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { 110 ( 0.00%) pub(super) fn is_uninhabited(&self, ty: Ty<'tcx>) -> bool { 66,755 ( 0.00%) if self.tcx.features().exhaustive_patterns { . self.tcx.is_ty_uninhabited_from(self.module, ty, self.param_env) . } else { . false . } 110 ( 0.00%) } . . /// Returns whether the given type is an enum from another crate declared `#[non_exhaustive]`. . pub(super) fn is_foreign_non_exhaustive_enum(&self, ty: Ty<'tcx>) -> bool { 60,936 ( 0.00%) match ty.kind() { . ty::Adt(def, ..) => { 56,920 ( 0.00%) def.is_enum() && def.is_variant_list_non_exhaustive() && !def.did.is_local() . } . _ => false, . } . } . } . . #[derive(Copy, Clone)] . pub(super) struct PatCtxt<'a, 'p, 'tcx> { -- line 336 ---------------------------------------- -- line 349 ---------------------------------------- . impl<'a, 'p, 'tcx> fmt::Debug for PatCtxt<'a, 'p, 'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . f.debug_struct("PatCtxt").field("ty", &self.ty).finish() . } . } . . /// A row of a matrix. Rows of len 1 are very common, which is why `SmallVec[_; 2]` . /// works well. 47,104 ( 0.00%) #[derive(Clone)] . struct PatStack<'p, 'tcx> { . pats: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>, . } . . impl<'p, 'tcx> PatStack<'p, 'tcx> { 7,660 ( 0.00%) fn from_pattern(pat: &'p DeconstructedPat<'p, 'tcx>) -> Self { . Self::from_vec(smallvec![pat]) 7,660 ( 0.00%) } . . fn from_vec(vec: SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]>) -> Self { 24,028 ( 0.00%) PatStack { pats: vec } . } . . fn is_empty(&self) -> bool { 1,233,232 ( 0.02%) self.pats.is_empty() . } . . fn len(&self) -> usize { . self.pats.len() . } . . fn head(&self) -> &'p DeconstructedPat<'p, 'tcx> { 8,816,160 ( 0.15%) self.pats[0] . } . . fn iter(&self) -> impl Iterator> { . self.pats.iter().copied() . } . . // Recursively expand the first pattern into its subpatterns. Only useful if the pattern is an . // or-pattern. Panics if `self` is empty. . fn expand_or_pat<'a>(&'a self) -> impl Iterator> + Captures<'a> { . self.head().iter_fields().map(move |pat| { 1,437 ( 0.00%) let mut new_patstack = PatStack::from_pattern(pat); . new_patstack.pats.extend_from_slice(&self.pats[1..]); . new_patstack . }) . } . . /// This computes `S(self.head().ctor(), self)`. See top of the file for explanations. . /// . /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing -- line 399 ---------------------------------------- -- line 402 ---------------------------------------- . /// This is roughly the inverse of `Constructor::apply`. . fn pop_head_constructor( . &self, . cx: &MatchCheckCtxt<'p, 'tcx>, . ctor: &Constructor<'tcx>, . ) -> PatStack<'p, 'tcx> { . // We pop the head pattern and push the new fields extracted from the arguments of . // `self.head()`. 2,389,716 ( 0.04%) let mut new_fields: SmallVec<[_; 2]> = self.head().specialize(cx, ctor); . new_fields.extend_from_slice(&self.pats[1..]); 2,389,716 ( 0.04%) PatStack::from_vec(new_fields) . } . } . . /// Pretty-printing for matrix row. . impl<'p, 'tcx> fmt::Debug for PatStack<'p, 'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(f, "+")?; . for pat in self.iter() { . write!(f, " {:?} +", pat)?; . } . Ok(()) . } . } . . /// A 2D matrix. 296 ( 0.00%) #[derive(Clone)] . pub(super) struct Matrix<'p, 'tcx> { 222 ( 0.00%) patterns: Vec>, . } . . impl<'p, 'tcx> Matrix<'p, 'tcx> { . fn empty() -> Self { 75,902 ( 0.00%) Matrix { patterns: vec![] } . } . . /// Number of columns of this matrix. `None` is the matrix is empty. . pub(super) fn column_count(&self) -> Option { . self.patterns.get(0).map(|r| r.len()) . } . . /// Pushes a new row to the matrix. If the row starts with an or-pattern, this recursively . /// expands it. 5,229,630 ( 0.09%) fn push(&mut self, row: PatStack<'p, 'tcx>) { 1,155,268 ( 0.02%) if !row.is_empty() && row.head().is_or_pat() { . self.patterns.extend(row.expand_or_pat()); . } else { 2,323,988 ( 0.04%) self.patterns.push(row); . } 4,648,560 ( 0.08%) } . . /// Iterate over the first component of each row . fn heads<'a>( . &'a self, . ) -> impl Iterator> + Clone + Captures<'a> { . self.patterns.iter().map(|r| r.head()) . } . -- line 459 ---------------------------------------- -- line 460 ---------------------------------------- . /// This computes `S(constructor, self)`. See top of the file for explanations. . fn specialize_constructor( . &self, . pcx: PatCtxt<'_, 'p, 'tcx>, . ctor: &Constructor<'tcx>, . ) -> Matrix<'p, 'tcx> { . let mut matrix = Matrix::empty(); . for row in &self.patterns { 1,273,447 ( 0.02%) if ctor.is_covered_by(pcx, row.head().ctor()) { . let new_row = row.pop_head_constructor(pcx.cx, ctor); 3,440,508 ( 0.06%) matrix.push(new_row); . } . } . matrix . } . } . . /// Pretty-printer for matrices of patterns, example: . /// -- line 478 ---------------------------------------- -- line 522 ---------------------------------------- . NoWitnesses { useful: bool }, . /// Carries a list of witnesses of non-exhaustiveness. If empty, indicates that the whole . /// pattern is unreachable. . WithWitnesses(Vec>), . } . . impl<'p, 'tcx> Usefulness<'p, 'tcx> { . fn new_useful(preference: ArmType) -> Self { 15,226 ( 0.00%) match preference { . // A single (empty) witness of reachability. 39 ( 0.00%) FakeExtraWildcard => WithWitnesses(vec![Witness(vec![])]), . RealArm => NoWitnesses { useful: true }, . } . } . . fn new_not_useful(preference: ArmType) -> Self { 55,866 ( 0.00%) match preference { 14,664 ( 0.00%) FakeExtraWildcard => WithWitnesses(vec![]), 62,260 ( 0.00%) RealArm => NoWitnesses { useful: false }, . } . } . . fn is_useful(&self) -> bool { 63,192 ( 0.00%) match self { 13,732 ( 0.00%) Usefulness::NoWitnesses { useful } => *useful, . Usefulness::WithWitnesses(witnesses) => !witnesses.is_empty(), . } . } . . /// Combine usefulnesses from two branches. This is an associative operation. 171,472 ( 0.00%) fn extend(&mut self, other: Self) { 269,456 ( 0.00%) match (&mut *self, other) { 61,992 ( 0.00%) (WithWitnesses(_), WithWitnesses(o)) if o.is_empty() => {} 195 ( 0.00%) (WithWitnesses(s), WithWitnesses(o)) if s.is_empty() => *self = WithWitnesses(o), . (WithWitnesses(s), WithWitnesses(o)) => s.extend(o), . (NoWitnesses { useful: s_useful }, NoWitnesses { useful: o_useful }) => { 70,770 ( 0.00%) *s_useful = *s_useful || o_useful . } . _ => unreachable!(), . } 195,968 ( 0.00%) } . . /// After calculating usefulness after a specialization, call this to reconstruct a usefulness . /// that makes sense for the matrix pre-specialization. This new usefulness can then be merged . /// with the results of specializing with the other constructors. . fn apply_constructor( . self, . pcx: PatCtxt<'_, 'p, 'tcx>, . matrix: &Matrix<'p, 'tcx>, // used to compute missing ctors . ctor: &Constructor<'tcx>, . ) -> Self { 48,022 ( 0.00%) match self { 136,690 ( 0.00%) NoWitnesses { .. } => self, 113,612 ( 0.00%) WithWitnesses(ref witnesses) if witnesses.is_empty() => self, 45 ( 0.00%) WithWitnesses(witnesses) => { 30 ( 0.00%) let new_witnesses = if let Constructor::Missing { .. } = ctor { . // We got the special `Missing` constructor, so each of the missing constructors . // gives a new pattern that is not caught by the match. We list those patterns. 26 ( 0.00%) let new_patterns = if pcx.is_non_exhaustive { . // Here we don't want the user to try to list all variants, we want them to add . // a wildcard, so we only suggest that. . vec![DeconstructedPat::wildcard(pcx.ty)] . } else { 104 ( 0.00%) let mut split_wildcard = SplitWildcard::new(pcx); 104 ( 0.00%) split_wildcard.split(pcx, matrix.heads().map(DeconstructedPat::ctor)); . . // This lets us know if we skipped any variants because they are marked . // `doc(hidden)` or they are unstable feature gate (only stdlib types). 13 ( 0.00%) let mut hide_variant_show_wild = false; . // Construct for each missing constructor a "wild" version of this . // constructor, that matches everything that can be built with . // it. For example, if `ctor` is a `Constructor::Variant` for . // `Option::Some`, we get the pattern `Some(_)`. . let mut new: Vec> = split_wildcard . .iter_missing(pcx) . .filter_map(|missing_ctor| { . // Check if this variant is marked `doc(hidden)` 2,735 ( 0.00%) if missing_ctor.is_doc_hidden_variant(pcx) 3,829 ( 0.00%) || missing_ctor.is_unstable_variant(pcx) . { . hide_variant_show_wild = true; . return None; . } 6,017 ( 0.00%) Some(DeconstructedPat::wild_from_ctor(pcx, missing_ctor.clone())) . }) . .collect(); . 26 ( 0.00%) if hide_variant_show_wild { . new.push(DeconstructedPat::wildcard(pcx.ty)); . } . 52 ( 0.00%) new . }; . . witnesses . .into_iter() . .flat_map(|witness| { . new_patterns.iter().map(move |pat| { . Witness( . witness -- line 621 ---------------------------------------- -- line 626 ---------------------------------------- . .collect(), . ) . }) . }) . .collect() . } else { . witnesses . .into_iter() 24 ( 0.00%) .map(|witness| witness.apply_constructor(pcx, &ctor)) . .collect() . }; 90 ( 0.00%) WithWitnesses(new_witnesses) . } . } . } . } . . #[derive(Copy, Clone, Debug)] . enum ArmType { . FakeExtraWildcard, -- line 645 ---------------------------------------- -- line 679 ---------------------------------------- . /// `Witness(vec![Pair(Some(_), true)])` . /// . /// The final `Pair(Some(_), true)` is then the resulting witness. . #[derive(Debug)] . crate struct Witness<'p, 'tcx>(Vec>); . . impl<'p, 'tcx> Witness<'p, 'tcx> { . /// Asserts that the witness contains a single pattern, and returns it. 2,188 ( 0.00%) fn single_pattern(self) -> DeconstructedPat<'p, 'tcx> { 2,188 ( 0.00%) assert_eq!(self.0.len(), 1); . self.0.into_iter().next().unwrap() 2,735 ( 0.00%) } . . /// Constructs a partial witness for a pattern given a list of . /// patterns expanded by the specialization step. . /// . /// When a pattern P is discovered to be useful, this function is used bottom-up . /// to reconstruct a complete witness, e.g., a pattern P' that covers a subset . /// of values, V, where each value in that set is not covered by any previously . /// used patterns and is covered by the pattern P'. Examples: . /// . /// left_ty: tuple of 3 elements . /// pats: [10, 20, _] => (10, 20, _) . /// . /// left_ty: struct X { a: (bool, &'static str), b: usize} . /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } 22 ( 0.00%) fn apply_constructor(mut self, pcx: PatCtxt<'_, 'p, 'tcx>, ctor: &Constructor<'tcx>) -> Self { . let pat = { 2 ( 0.00%) let len = self.0.len(); 14 ( 0.00%) let arity = ctor.arity(pcx); 6 ( 0.00%) let pats = self.0.drain((len - arity)..).rev(); 2 ( 0.00%) let fields = Fields::from_iter(pcx.cx, pats); 2 ( 0.00%) DeconstructedPat::new(ctor.clone(), fields, pcx.ty, DUMMY_SP) . }; . . self.0.push(pat); . 6 ( 0.00%) self 16 ( 0.00%) } . } . . /// Report that a match of a `non_exhaustive` enum marked with `non_exhaustive_omitted_patterns` . /// is not exhaustive enough. . /// . /// NB: The partner lint for structs lives in `compiler/rustc_typeck/src/check/pat.rs`. . fn lint_non_exhaustive_omitted_patterns<'p, 'tcx>( . cx: &MatchCheckCtxt<'p, 'tcx>, -- line 725 ---------------------------------------- -- line 760 ---------------------------------------- . /// This is used both for reachability checking (if a pattern isn't useful in . /// relation to preceding patterns, it is not reachable) and exhaustiveness . /// checking (if a wildcard pattern is useful in relation to a matrix, the . /// matrix isn't exhaustive). . /// . /// `is_under_guard` is used to inform if the pattern has a guard. If it . /// has one it must not be inserted into the matrix. This shouldn't be . /// relied on for soundness. 604,282 ( 0.01%) #[instrument( . level = "debug", . skip(cx, matrix, witness_preference, hir_id, is_under_guard, is_top_level) 284,368 ( 0.00%) )] . fn is_useful<'p, 'tcx>( . cx: &MatchCheckCtxt<'p, 'tcx>, . matrix: &Matrix<'p, 'tcx>, . v: &PatStack<'p, 'tcx>, . witness_preference: ArmType, . hir_id: HirId, . is_under_guard: bool, . is_top_level: bool, -- line 779 ---------------------------------------- -- line 781 ---------------------------------------- . debug!("matrix,v={:?}{:?}", matrix, v); . let Matrix { patterns: rows, .. } = matrix; . . // The base case. We are pattern-matching on () and the return value is . // based on whether our matrix has a row or not. . // NOTE: This could potentially be optimized by checking rows.is_empty() . // first and then, if v is non-empty, the return value is based on whether . // the type of the tuple we're checking is inhabited or not. 35,546 ( 0.00%) if v.is_empty() { 14,482 ( 0.00%) let ret = if rows.is_empty() { . Usefulness::new_useful(witness_preference) . } else { . Usefulness::new_not_useful(witness_preference) . }; . debug!(?ret); 101,374 ( 0.00%) return ret; . } . . debug_assert!(rows.iter().all(|r| r.len() == v.len())); . . let ty = v.head().ty(); . let is_non_exhaustive = cx.is_foreign_non_exhaustive_enum(ty); 105,320 ( 0.00%) let pcx = PatCtxt { cx, ty, span: v.head().span(), is_top_level, is_non_exhaustive }; . . // If the first pattern is an or-pattern, expand it. . let mut ret = Usefulness::new_not_useful(witness_preference); 21,064 ( 0.00%) if v.head().is_or_pat() { . debug!("expanding or-pattern"); . // We try each or-pattern branch in turn. . let mut matrix = matrix.clone(); 970 ( 0.00%) for v in v.expand_or_pat() { . let usefulness = ensure_sufficient_stack(|| { 7,760 ( 0.00%) is_useful(cx, &matrix, &v, witness_preference, hir_id, is_under_guard, false) . }); 7,275 ( 0.00%) ret.extend(usefulness); . // If pattern has a guard don't add it to the matrix. 970 ( 0.00%) if !is_under_guard { . // We push the already-seen patterns into the matrix in order to detect redundant . // branches like `Some(_) | Some(0)`. 3,353 ( 0.00%) matrix.push(v); . } . } . } else { . let v_ctor = v.head().ctor(); 42,599 ( 0.00%) if let Constructor::IntRange(ctor_range) = &v_ctor { . // Lint on likely incorrect range patterns (#63987) 3,095 ( 0.00%) ctor_range.lint_overlapping_range_endpoints( 3,095 ( 0.00%) pcx, . matrix.heads(), . matrix.column_count().unwrap_or(0), . hir_id, . ) . } . // We split the head constructor of `v`. 167,920 ( 0.00%) let split_ctors = v_ctor.split(pcx, matrix.heads().map(DeconstructedPat::ctor)); 41,980 ( 0.00%) let is_non_exhaustive_and_wild = is_non_exhaustive && v_ctor.is_wildcard(); . // For each constructor, we compute whether there's a value that starts with it that would . // witness the usefulness of `v`. . let start_matrix = &matrix; 644,962 ( 0.01%) for ctor in split_ctors { . debug!("specialize({:?})", ctor); . // We cache the result of `Fields::wildcards` because it is used a lot. 120,055 ( 0.00%) let spec_matrix = start_matrix.specialize_constructor(pcx, &ctor); . let v = v.pop_head_constructor(cx, &ctor); . let usefulness = ensure_sufficient_stack(|| { 336,154 ( 0.01%) is_useful(cx, &spec_matrix, &v, witness_preference, hir_id, is_under_guard, false) . }); 264,121 ( 0.00%) let usefulness = usefulness.apply_constructor(pcx, start_matrix, &ctor); . . // When all the conditions are met we have a match with a `non_exhaustive` enum . // that has the potential to trigger the `non_exhaustive_omitted_patterns` lint. . // To understand the workings checkout `Constructor::split` and `SplitWildcard::new/into_ctors` 72,018 ( 0.00%) if is_non_exhaustive_and_wild . // We check that the match has a wildcard pattern and that that wildcard is useful, . // meaning there are variants that are covered by the wildcard. Without the check . // for `witness_preference` the lint would trigger on `if let NonExhaustiveEnum::A = foo {}` . && usefulness.is_useful() && matches!(witness_preference, RealArm) . && matches!( . &ctor, . Constructor::Missing { nonexhaustive_enum_missing_real_variants: true } . ) -- line 861 ---------------------------------------- -- line 877 ---------------------------------------- . .cloned() . .map(|missing_ctor| DeconstructedPat::wild_from_ctor(pcx, missing_ctor)) . .collect::>() . }; . . lint_non_exhaustive_omitted_patterns(pcx.cx, pcx.ty, pcx.span, hir_id, patterns); . } . 168,077 ( 0.00%) ret.extend(usefulness); . } . } . 28,381 ( 0.00%) if ret.is_useful() { . v.head().set_reachable(); . } . . debug!(?ret); 84,256 ( 0.00%) ret . } . . /// The arm of a match expression. . #[derive(Clone, Copy)] . crate struct MatchArm<'p, 'tcx> { . /// The pattern must have been lowered through `check_match::MatchVisitor::lower_pattern`. . crate pat: &'p DeconstructedPat<'p, 'tcx>, . crate hir_id: HirId, -- line 902 ---------------------------------------- -- line 923 ---------------------------------------- . crate non_exhaustiveness_witnesses: Vec>, . } . . /// The entrypoint for the usefulness algorithm. Computes whether a match is exhaustive and which . /// of its arms are reachable. . /// . /// Note: the input patterns must have been lowered through . /// `check_match::MatchVisitor::lower_pattern`. 42,559 ( 0.00%) crate fn compute_match_usefulness<'p, 'tcx>( . cx: &MatchCheckCtxt<'p, 'tcx>, . arms: &[MatchArm<'p, 'tcx>], . scrut_hir_id: HirId, . scrut_ty: Ty<'tcx>, . ) -> UsefulnessReport<'p, 'tcx> { . let mut matrix = Matrix::empty(); . let arm_usefulness: Vec<_> = arms . .iter() . .copied() . .map(|arm| { 21,543 ( 0.00%) let v = PatStack::from_pattern(arm.pat); 107,715 ( 0.00%) is_useful(cx, &matrix, &v, RealArm, arm.hir_id, arm.has_guard, true); 21,543 ( 0.00%) if !arm.has_guard { 50,211 ( 0.00%) matrix.push(v); . } 7,181 ( 0.00%) let reachability = if arm.pat.is_reachable() { 21,543 ( 0.00%) Reachability::Reachable(arm.pat.unreachable_spans()) . } else { . Reachability::Unreachable . }; 14,362 ( 0.00%) (arm, reachability) 14,354 ( 0.00%) }) . .collect(); . 3,869 ( 0.00%) let wild_pattern = cx.pattern_arena.alloc(DeconstructedPat::wildcard(scrut_ty)); . let v = PatStack::from_pattern(wild_pattern); 46,428 ( 0.00%) let usefulness = is_useful(cx, &matrix, &v, FakeExtraWildcard, scrut_hir_id, false, true); 7,738 ( 0.00%) let non_exhaustiveness_witnesses = match usefulness { 3,829 ( 0.00%) WithWitnesses(pats) => pats.into_iter().map(|w| w.single_pattern()).collect(), . NoWitnesses { .. } => bug!(), . }; 30,952 ( 0.00%) UsefulnessReport { arm_usefulness, non_exhaustiveness_witnesses } 34,821 ( 0.00%) } 117,520 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/fold.rs -------------------------------------------------------------------------------- Ir -- line 46 ---------------------------------------- . /// . /// To implement this conveniently, use the derive macro located in `rustc_macros`. . pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { . /// Consumers may find this more convenient to use with infallible folders than . /// [`try_super_fold_with`][`TypeFoldable::try_super_fold_with`], to which the . /// provided default definition delegates. Implementors **should not** override . /// this provided default definition, to ensure that the two methods are coherent . /// (provide a definition of `try_super_fold_with` instead). 8,311,462 ( 0.14%) fn super_fold_with>(self, folder: &mut F) -> Self { 238,275 ( 0.00%) self.try_super_fold_with(folder).into_ok() 8,528,257 ( 0.14%) } . /// Consumers may find this more convenient to use with infallible folders than . /// [`try_fold_with`][`TypeFoldable::try_fold_with`], to which the provided . /// default definition delegates. Implementors **should not** override this . /// provided default definition, to ensure that the two methods are coherent . /// (provide a definition of `try_fold_with` instead). 844,805 ( 0.01%) fn fold_with>(self, folder: &mut F) -> Self { 316,096 ( 0.01%) self.try_fold_with(folder).into_ok() 777,375 ( 0.01%) } . . fn try_super_fold_with>( . self, . folder: &mut F, . ) -> Result; . 10,620,959 ( 0.18%) fn try_fold_with>(self, folder: &mut F) -> Result { 2,588,810 ( 0.04%) self.try_super_fold_with(folder) 10,519,796 ( 0.17%) } . . fn super_visit_with>(&self, visitor: &mut V) -> ControlFlow; 1,238,896 ( 0.02%) fn visit_with>(&self, visitor: &mut V) -> ControlFlow { 797,645 ( 0.01%) self.super_visit_with(visitor) 791,002 ( 0.01%) } . . /// Returns `true` if `self` has any late-bound regions that are either . /// bound by `binder` or bound by some binder outside of `binder`. . /// If `binder` is `ty::INNERMOST`, this indicates whether . /// there are any late-bound regions that appear free. . fn has_vars_bound_at_or_above(&self, binder: ty::DebruijnIndex) -> bool { 506,644 ( 0.01%) self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }).is_break() . } . . /// Returns `true` if this `self` has any regions that escape `binder` (and . /// hence are not bound by it). . fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool { 132 ( 0.00%) self.has_vars_bound_at_or_above(binder.shifted_in(1)) . } . 1,537,806 ( 0.03%) fn has_escaping_bound_vars(&self) -> bool { 166,241 ( 0.00%) self.has_vars_bound_at_or_above(ty::INNERMOST) 1,544,908 ( 0.03%) } . 81,495 ( 0.00%) #[instrument(level = "trace")] . fn has_type_flags(&self, flags: TypeFlags) -> bool { 63,945 ( 0.00%) self.visit_with(&mut HasTypeFlagsVisitor { flags }).break_value() == Some(FoundFlags) . } . fn has_projections(&self) -> bool { 16,834 ( 0.00%) self.has_type_flags(TypeFlags::HAS_PROJECTION) . } . fn has_opaque_types(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_TY_OPAQUE) . } 43,208 ( 0.00%) fn references_error(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_ERROR) 43,208 ( 0.00%) } . fn has_param_types_or_consts(&self) -> bool { 546 ( 0.00%) self.has_type_flags(TypeFlags::HAS_TY_PARAM | TypeFlags::HAS_CT_PARAM) . } . fn has_infer_regions(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_RE_INFER) . } . fn has_infer_types(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_TY_INFER) . } . fn has_infer_types_or_consts(&self) -> bool { 399,915 ( 0.01%) self.has_type_flags(TypeFlags::HAS_TY_INFER | TypeFlags::HAS_CT_INFER) . } 47,828 ( 0.00%) fn needs_infer(&self) -> bool { 218,994 ( 0.00%) self.has_type_flags(TypeFlags::NEEDS_INFER) 48,432 ( 0.00%) } . fn has_placeholders(&self) -> bool { . self.has_type_flags( . TypeFlags::HAS_RE_PLACEHOLDER . | TypeFlags::HAS_TY_PLACEHOLDER . | TypeFlags::HAS_CT_PLACEHOLDER, . ) . } . fn needs_subst(&self) -> bool { 24 ( 0.00%) self.has_type_flags(TypeFlags::NEEDS_SUBST) . } . /// "Free" regions in this context means that it has any region . /// that is not (a) erased or (b) late-bound. . fn has_free_regions(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_FREE_REGIONS) . } . . fn has_erased_regions(&self) -> bool { -- line 142 ---------------------------------------- -- line 147 ---------------------------------------- . fn has_erasable_regions(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_FREE_REGIONS) . } . . /// Indicates whether this value references only 'global' . /// generic parameters that are the same regardless of what fn we are . /// in. This is used for caching. . fn is_global(&self) -> bool { 112,927 ( 0.00%) !self.has_type_flags(TypeFlags::HAS_FREE_LOCAL_NAMES) . } . . /// True if there are any late-bound regions . fn has_late_bound_regions(&self) -> bool { . self.has_type_flags(TypeFlags::HAS_RE_LATE_BOUND) . } . . /// Indicates whether this value still has parameters/placeholders/inference variables -- line 163 ---------------------------------------- -- line 194 ---------------------------------------- . . fn tcx<'a>(&'a self) -> TyCtxt<'tcx>; . . fn fold_binder(&mut self, t: Binder<'tcx, T>) -> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . Self: TypeFolder<'tcx, Error = !>, . { 2,153,445 ( 0.04%) t.super_fold_with(self) . } . . fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> . where . Self: TypeFolder<'tcx, Error = !>, . { 288,039 ( 0.00%) t.super_fold_with(self) . } . . fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> . where . Self: TypeFolder<'tcx, Error = !>, . { . r.super_fold_with(self) . } . . fn fold_const(&mut self, c: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> . where . Self: TypeFolder<'tcx, Error = !>, . { 19,919 ( 0.00%) c.super_fold_with(self) . } . . fn fold_predicate(&mut self, p: ty::Predicate<'tcx>) -> ty::Predicate<'tcx> . where . Self: TypeFolder<'tcx, Error = !>, . { 13,122 ( 0.00%) p.super_fold_with(self) . } . . fn fold_mir_const(&mut self, c: mir::ConstantKind<'tcx>) -> mir::ConstantKind<'tcx> . where . Self: TypeFolder<'tcx, Error = !>, . { . bug!("most type folders should not be folding MIR datastructures: {:?}", c) . } -- line 238 ---------------------------------------- -- line 247 ---------------------------------------- . /// A blanket implementation of this trait (that defers to the relevant . /// method of [`TypeFolder`]) is provided for all infallible folders in . /// order to ensure the two APIs are coherent. . pub trait FallibleTypeFolder<'tcx>: TypeFolder<'tcx> { . fn try_fold_binder(&mut self, t: Binder<'tcx, T>) -> Result, Self::Error> . where . T: TypeFoldable<'tcx>, . { 13,031 ( 0.00%) t.try_super_fold_with(self) . } . . fn try_fold_ty(&mut self, t: Ty<'tcx>) -> Result, Self::Error> { . t.try_super_fold_with(self) . } . . fn try_fold_region(&mut self, r: ty::Region<'tcx>) -> Result, Self::Error> { . r.try_super_fold_with(self) -- line 263 ---------------------------------------- -- line 269 ---------------------------------------- . ) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { . c.try_super_fold_with(self) . } . . fn try_fold_predicate( . &mut self, . p: ty::Predicate<'tcx>, . ) -> Result, Self::Error> { 3,543 ( 0.00%) p.try_super_fold_with(self) . } . . fn try_fold_mir_const( . &mut self, . c: mir::ConstantKind<'tcx>, . ) -> Result, Self::Error> { . bug!("most type folders should not be folding MIR datastructures: {:?}", c) . } -- line 285 ---------------------------------------- -- line 286 ---------------------------------------- . } . . // Blanket implementation of fallible trait for infallible folders . // delegates to infallible methods to prevent incoherence . impl<'tcx, F> FallibleTypeFolder<'tcx> for F . where . F: TypeFolder<'tcx, Error = !>, . { 284,794 ( 0.00%) fn try_fold_binder(&mut self, t: Binder<'tcx, T>) -> Result, Self::Error> . where . T: TypeFoldable<'tcx>, . { 523,285 ( 0.01%) Ok(self.fold_binder(t)) 425,079 ( 0.01%) } . 61,586 ( 0.00%) fn try_fold_ty(&mut self, t: Ty<'tcx>) -> Result, Self::Error> { 3,910,634 ( 0.06%) Ok(self.fold_ty(t)) 46,458 ( 0.00%) } . 71,120 ( 0.00%) fn try_fold_region(&mut self, r: ty::Region<'tcx>) -> Result, Self::Error> { 820,945 ( 0.01%) Ok(self.fold_region(r)) 71,120 ( 0.00%) } . 2,444 ( 0.00%) fn try_fold_const( . &mut self, . c: &'tcx ty::Const<'tcx>, . ) -> Result<&'tcx ty::Const<'tcx>, Self::Error> { 17,550 ( 0.00%) Ok(self.fold_const(c)) 1,924 ( 0.00%) } . . fn try_fold_predicate( . &mut self, . p: ty::Predicate<'tcx>, . ) -> Result, Self::Error> { . Ok(self.fold_predicate(p)) . } . -- line 322 ---------------------------------------- -- line 330 ---------------------------------------- . . pub trait TypeVisitor<'tcx>: Sized { . type BreakTy = !; . . fn visit_binder>( . &mut self, . t: &Binder<'tcx, T>, . ) -> ControlFlow { 2,166 ( 0.00%) t.super_visit_with(self) . } . . fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { 4,620 ( 0.00%) t.super_visit_with(self) . } . . fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { . r.super_visit_with(self) . } . 24 ( 0.00%) fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> ControlFlow { 3 ( 0.00%) c.super_visit_with(self) 21 ( 0.00%) } . . fn visit_unevaluated_const(&mut self, uv: ty::Unevaluated<'tcx>) -> ControlFlow { . uv.super_visit_with(self) . } . . fn visit_predicate(&mut self, p: ty::Predicate<'tcx>) -> ControlFlow { . p.super_visit_with(self) . } -- line 359 ---------------------------------------- -- line 376 ---------------------------------------- . . impl<'tcx, F, G, H> TypeFolder<'tcx> for BottomUpFolder<'tcx, F, G, H> . where . F: FnMut(Ty<'tcx>) -> Ty<'tcx>, . G: FnMut(ty::Region<'tcx>) -> ty::Region<'tcx>, . H: FnMut(&'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx>, . { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { 177 ( 0.00%) self.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 5,381 ( 0.00%) let t = ty.super_fold_with(self); . (self.ty_op)(t) . } . . fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { . let r = r.super_fold_with(self); . (self.lt_op)(r) . } . . fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 176 ( 0.00%) let ct = ct.super_fold_with(self); . (self.ct_op)(ct) . } . } . . /////////////////////////////////////////////////////////////////////////// . // Region folder . . impl<'tcx> TyCtxt<'tcx> { -- line 406 ---------------------------------------- -- line 415 ---------------------------------------- . ) -> T . where . T: TypeFoldable<'tcx>, . { . value.fold_with(&mut RegionFolder::new(self, skipped_regions, &mut f)) . } . . /// Invoke `callback` on every region appearing free in `value`. 19,670 ( 0.00%) pub fn for_each_free_region( . self, . value: &impl TypeFoldable<'tcx>, . mut callback: impl FnMut(ty::Region<'tcx>), . ) { . self.any_free_region_meets(value, |r| { 93,920 ( 0.00%) callback(r); . false . }); 17,190 ( 0.00%) } . . /// Returns `true` if `callback` returns true for every region appearing free in `value`. . pub fn all_free_regions_meet( . self, . value: &impl TypeFoldable<'tcx>, . mut callback: impl FnMut(ty::Region<'tcx>) -> bool, . ) -> bool { 13,476 ( 0.00%) !self.any_free_region_meets(value, |r| !callback(r)) . } . . /// Returns `true` if `callback` returns true for some region appearing free in `value`. . pub fn any_free_region_meets( . self, . value: &impl TypeFoldable<'tcx>, . callback: impl FnMut(ty::Region<'tcx>) -> bool, . ) -> bool { -- line 448 ---------------------------------------- -- line 473 ---------------------------------------- . F: FnMut(ty::Region<'tcx>) -> bool, . { . type BreakTy = (); . . fn visit_binder>( . &mut self, . t: &Binder<'tcx, T>, . ) -> ControlFlow { 2,958 ( 0.00%) self.outer_index.shift_in(1); 2,888 ( 0.00%) let result = t.as_ref().skip_binder().visit_with(self); 2,166 ( 0.00%) self.outer_index.shift_out(1); . result . } . 80,710 ( 0.00%) fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { 133,337 ( 0.00%) match *r { 195 ( 0.00%) ty::ReLateBound(debruijn, _) if debruijn < self.outer_index => { . ControlFlow::CONTINUE . } . _ => { . if (self.callback)(r) { . ControlFlow::BREAK . } else { . ControlFlow::CONTINUE . } . } . } 112,994 ( 0.00%) } . . fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { . // We're only interested in types involving regions 79,373 ( 0.00%) if ty.flags().intersects(TypeFlags::HAS_FREE_REGIONS) { 90,431 ( 0.00%) ty.super_visit_with(self) . } else { . ControlFlow::CONTINUE . } . } . } . 192,473 ( 0.00%) value.visit_with(&mut RegionVisitor { outer_index: ty::INNERMOST, callback }).is_break() . } . } . . /// Folds over the substructure of a type, visiting its component . /// types and all regions that occur *free* within it. . /// . /// That is, `Ty` can contain function or method types that bind . /// regions at the call site (`ReLateBound`), and occurrences of -- line 520 ---------------------------------------- -- line 540 ---------------------------------------- . . impl<'a, 'tcx> RegionFolder<'a, 'tcx> { . #[inline] . pub fn new( . tcx: TyCtxt<'tcx>, . skipped_regions: &'a mut bool, . fold_region_fn: &'a mut dyn FnMut(ty::Region<'tcx>, ty::DebruijnIndex) -> ty::Region<'tcx>, . ) -> RegionFolder<'a, 'tcx> { 504,160 ( 0.01%) RegionFolder { tcx, skipped_regions, current_index: ty::INNERMOST, fold_region_fn } . } . } . . impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { 113,120 ( 0.00%) self.tcx 113,120 ( 0.00%) } . . fn fold_binder>( . &mut self, . t: ty::Binder<'tcx, T>, . ) -> ty::Binder<'tcx, T> { 5,956 ( 0.00%) self.current_index.shift_in(1); 9,386 ( 0.00%) let t = t.super_fold_with(self); 5,568 ( 0.00%) self.current_index.shift_out(1); . t . } . 2,638 ( 0.00%) #[instrument(skip(self), level = "debug")] . fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { 88,760 ( 0.00%) match *r { 2,638 ( 0.00%) ty::ReLateBound(debruijn, _) if debruijn < self.current_index => { . debug!(?self.current_index, "skipped bound region"); 2,638 ( 0.00%) *self.skipped_regions = true; . r . } . _ => { . debug!(?self.current_index, "folding free region"); 258,366 ( 0.00%) (self.fold_region_fn)(r, self.current_index) . } . } . } . } . . /////////////////////////////////////////////////////////////////////////// . // Bound vars replacer . -- line 585 ---------------------------------------- -- line 592 ---------------------------------------- . current_index: ty::DebruijnIndex, . . fld_r: Option<&'a mut (dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx> + 'a)>, . fld_t: Option<&'a mut (dyn FnMut(ty::BoundTy) -> Ty<'tcx> + 'a)>, . fld_c: Option<&'a mut (dyn FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx> + 'a)>, . } . . impl<'a, 'tcx> BoundVarReplacer<'a, 'tcx> { 204,022 ( 0.00%) fn new( . tcx: TyCtxt<'tcx>, . fld_r: Option<&'a mut (dyn FnMut(ty::BoundRegion) -> ty::Region<'tcx> + 'a)>, . fld_t: Option<&'a mut (dyn FnMut(ty::BoundTy) -> Ty<'tcx> + 'a)>, . fld_c: Option<&'a mut (dyn FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx> + 'a)>, . ) -> Self { 714,622 ( 0.01%) BoundVarReplacer { tcx, current_index: ty::INNERMOST, fld_r, fld_t, fld_c } 102,011 ( 0.00%) } . } . . impl<'a, 'tcx> TypeFolder<'tcx> for BoundVarReplacer<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { 92,367 ( 0.00%) self.tcx 24,207 ( 0.00%) } . . fn fold_binder>( . &mut self, . t: ty::Binder<'tcx, T>, . ) -> ty::Binder<'tcx, T> { 129,154 ( 0.00%) self.current_index.shift_in(1); 26,660 ( 0.00%) let t = t.super_fold_with(self); 100,164 ( 0.00%) self.current_index.shift_out(1); . t . } . 328,480 ( 0.01%) fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { 210,276 ( 0.00%) match *t.kind() { 4,462 ( 0.00%) ty::Bound(debruijn, bound_ty) if debruijn == self.current_index => { 895 ( 0.00%) if let Some(fld_t) = self.fld_t.as_mut() { . let ty = fld_t(bound_ty); 895 ( 0.00%) return ty::fold::shift_vars(self.tcx, &ty, self.current_index.as_u32()); . } . } 208,486 ( 0.00%) _ if t.has_vars_bound_at_or_above(self.current_index) => { 333,631 ( 0.01%) return t.super_fold_with(self); . } . _ => {} . } . t 148,068 ( 0.00%) } . 389,056 ( 0.01%) fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { 291,514 ( 0.00%) match *r { 289,892 ( 0.00%) ty::ReLateBound(debruijn, br) if debruijn == self.current_index => { 144,720 ( 0.00%) if let Some(fld_r) = self.fld_r.as_mut() { . let region = fld_r(br); 289,788 ( 0.00%) return if let ty::ReLateBound(debruijn1, br) = *region { . // If the callback returns a late-bound region, . // that region should always use the INNERMOST . // debruijn index. Then we adjust it to the . // correct depth. 174 ( 0.00%) assert_eq!(debruijn1, ty::INNERMOST); 1,718 ( 0.00%) self.tcx.mk_region(ty::ReLateBound(debruijn, br)) . } else { . region . }; . } . } . _ => {} . } . r 389,056 ( 0.01%) } . . fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 81 ( 0.00%) match *ct { . ty::Const { val: ty::ConstKind::Bound(debruijn, bound_const), ty } . if debruijn == self.current_index => . { . if let Some(fld_c) = self.fld_c.as_mut() { . let ct = fld_c(bound_const, ty); . return ty::fold::shift_vars(self.tcx, &ct, self.current_index.as_u32()); . } . } 27 ( 0.00%) _ if ct.has_vars_bound_at_or_above(self.current_index) => { . return ct.super_fold_with(self); . } . _ => {} . } . ct . } . } . -- line 681 ---------------------------------------- -- line 697 ---------------------------------------- . mut fld_r: F, . ) -> (T, BTreeMap>) . where . F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, . T: TypeFoldable<'tcx>, . { . let mut region_map = BTreeMap::new(); . let mut real_fld_r = 178,392 ( 0.00%) |br: ty::BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br)); 5,775 ( 0.00%) let value = value.skip_binder(); 18,354 ( 0.00%) let value = if !value.has_escaping_bound_vars() { 29,610 ( 0.00%) value . } else { 38,111 ( 0.00%) let mut replacer = BoundVarReplacer::new(self, Some(&mut real_fld_r), None, None); 715 ( 0.00%) value.fold_with(&mut replacer) . }; 96,084 ( 0.00%) (value, region_map) . } . . /// Replaces all escaping bound vars. The `fld_r` closure replaces escaping . /// bound regions; the `fld_t` closure replaces escaping bound types and the `fld_c` . /// closure replaces escaping bound consts. 793,720 ( 0.01%) pub fn replace_escaping_bound_vars( . self, . value: T, . mut fld_r: F, . mut fld_t: G, . mut fld_c: H, . ) -> T . where . F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, . G: FnMut(ty::BoundTy) -> Ty<'tcx>, . H: FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx>, . T: TypeFoldable<'tcx>, . { 103,244 ( 0.00%) if !value.has_escaping_bound_vars() { 115,505 ( 0.00%) value . } else { . let mut replacer = 1,175,389 ( 0.02%) BoundVarReplacer::new(self, Some(&mut fld_r), Some(&mut fld_t), Some(&mut fld_c)); 47,782 ( 0.00%) value.fold_with(&mut replacer) . } 545,300 ( 0.01%) } . . /// Replaces all types or regions bound by the given `Binder`. The `fld_r` . /// closure replaces bound regions while the `fld_t` closure replaces bound . /// types. 502,190 ( 0.01%) pub fn replace_bound_vars( . self, . value: Binder<'tcx, T>, . mut fld_r: F, . fld_t: G, . fld_c: H, . ) -> (T, BTreeMap>) . where . F: FnMut(ty::BoundRegion) -> ty::Region<'tcx>, . G: FnMut(ty::BoundTy) -> Ty<'tcx>, . H: FnMut(ty::BoundVar, Ty<'tcx>) -> &'tcx ty::Const<'tcx>, . T: TypeFoldable<'tcx>, . { . let mut region_map = BTreeMap::new(); 410,195 ( 0.01%) let real_fld_r = |br: ty::BoundRegion| *region_map.entry(br).or_insert_with(|| fld_r(br)); 513,381 ( 0.01%) let value = self.replace_escaping_bound_vars(value.skip_binder(), real_fld_r, fld_t, fld_c); 558,996 ( 0.01%) (value, region_map) 351,912 ( 0.01%) } . . /// Replaces any late-bound regions bound in `value` with . /// free variants attached to `all_outlive_scope`. 29,150 ( 0.00%) pub fn liberate_late_bound_regions( . self, . all_outlive_scope: DefId, . value: ty::Binder<'tcx, T>, . ) -> T . where . T: TypeFoldable<'tcx>, . { . self.replace_late_bound_regions(value, |br| { 42,744 ( 0.00%) self.mk_region(ty::ReFree(ty::FreeRegion { 6,576 ( 0.00%) scope: all_outlive_scope, . bound_region: br.kind, . })) . }) . .0 15,900 ( 0.00%) } . . pub fn shift_bound_var_indices(self, bound_vars: usize, value: T) -> T . where . T: TypeFoldable<'tcx>, . { . self.replace_escaping_bound_vars( 8,970 ( 0.00%) value, . |r| { . self.mk_region(ty::ReLateBound( . ty::INNERMOST, . ty::BoundRegion { . var: ty::BoundVar::from_usize(r.var.as_usize() + bound_vars), . kind: r.kind, . }, . )) -- line 795 ---------------------------------------- -- line 843 ---------------------------------------- . fn collect_late_bound_regions( . self, . value: &Binder<'tcx, T>, . just_constraint: bool, . ) -> FxHashSet . where . T: TypeFoldable<'tcx>, . { 3,136 ( 0.00%) let mut collector = LateBoundRegionsCollector::new(just_constraint); . let result = value.as_ref().skip_binder().visit_with(&mut collector); . assert!(result.is_continue()); // should never have stopped early 6,232 ( 0.00%) collector.regions . } . . /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also . /// method lookup and a few other places where precise region relationships are not required. . pub fn erase_late_bound_regions(self, value: Binder<'tcx, T>) -> T . where . T: TypeFoldable<'tcx>, . { 9,264 ( 0.00%) self.replace_late_bound_regions(value, |_| self.lifetimes.re_erased).0 . } . . /// Rewrite any late-bound regions so that they are anonymous. Region numbers are . /// assigned starting at 0 and increasing monotonically in the order traversed . /// by the fold operation. . /// . /// The chief purpose of this function is to canonicalize regions so that two . /// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become . /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and . /// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization. 36,011 ( 0.00%) pub fn anonymize_late_bound_regions(self, sig: Binder<'tcx, T>) -> Binder<'tcx, T> . where . T: TypeFoldable<'tcx>, . { 26,000 ( 0.00%) let mut counter = 0; 1,323 ( 0.00%) let inner = self . .replace_late_bound_regions(sig, |_| { . let br = ty::BoundRegion { 348 ( 0.00%) var: ty::BoundVar::from_u32(counter), . kind: ty::BrAnon(counter), . }; 1,392 ( 0.00%) let r = self.mk_region(ty::ReLateBound(ty::INNERMOST, br)); 870 ( 0.00%) counter += 1; . r . }) . .0; 5,200 ( 0.00%) let bound_vars = self.mk_bound_variable_kinds( 15,600 ( 0.00%) (0..counter).map(|i| ty::BoundVariableKind::Region(ty::BrAnon(i))), . ); 30,000 ( 0.00%) Binder::bind_with_vars(inner, bound_vars) 30,948 ( 0.00%) } . } . . pub struct ValidateBoundVars<'tcx> { . bound_vars: &'tcx ty::List, . binder_index: ty::DebruijnIndex, . // We may encounter the same variable at different levels of binding, so . // this can't just be `Ty` . visited: SsoHashSet<(ty::DebruijnIndex, Ty<'tcx>)>, -- line 902 ---------------------------------------- -- line 1004 ---------------------------------------- . tcx: TyCtxt<'tcx>, . current_index: ty::DebruijnIndex, . amount: u32, . } . . impl<'tcx> Shifter<'tcx> { . pub fn new(tcx: TyCtxt<'tcx>, amount: u32) -> Self { . Shifter { tcx, current_index: ty::INNERMOST, amount } 1,808 ( 0.00%) } . } . . impl<'tcx> TypeFolder<'tcx> for Shifter<'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { 27 ( 0.00%) self.tcx . } . . fn fold_binder>( . &mut self, . t: ty::Binder<'tcx, T>, . ) -> ty::Binder<'tcx, T> { . self.current_index.shift_in(1); . let t = t.super_fold_with(self); . self.current_index.shift_out(1); . t . } . . fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { 26 ( 0.00%) match *r { . ty::ReLateBound(debruijn, br) => { 36 ( 0.00%) if self.amount == 0 || debruijn < self.current_index { . r . } else { 9 ( 0.00%) let debruijn = debruijn.shifted_in(self.amount); . let shifted = ty::ReLateBound(debruijn, br); 90 ( 0.00%) self.tcx.mk_region(shifted) . } . } . _ => r, . } . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 1,864 ( 0.00%) match *ty.kind() { . ty::Bound(debruijn, bound_ty) => { . if self.amount == 0 || debruijn < self.current_index { . ty . } else { . let debruijn = debruijn.shifted_in(self.amount); . self.tcx.mk_ty(ty::Bound(debruijn, bound_ty)) . } . } . 2,835 ( 0.00%) _ => ty.super_fold_with(self), . } . } . . fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { . if let ty::Const { val: ty::ConstKind::Bound(debruijn, bound_ct), ty } = *ct { . if self.amount == 0 || debruijn < self.current_index { . ct . } else { -- line 1064 ---------------------------------------- -- line 1073 ---------------------------------------- . . pub fn shift_region<'tcx>( . tcx: TyCtxt<'tcx>, . region: ty::Region<'tcx>, . amount: u32, . ) -> ty::Region<'tcx> { . match region { . ty::ReLateBound(debruijn, br) if amount > 0 => { 1,274 ( 0.00%) tcx.mk_region(ty::ReLateBound(debruijn.shifted_in(amount), *br)) . } . _ => region, . } . } . . pub fn shift_vars<'tcx, T>(tcx: TyCtxt<'tcx>, value: T, amount: u32) -> T . where . T: TypeFoldable<'tcx>, . { . debug!("shift_vars(value={:?}, amount={})", value, amount); . 1,860 ( 0.00%) value.fold_with(&mut Shifter::new(tcx, amount)) . } . . #[derive(Debug, PartialEq, Eq, Copy, Clone)] . struct FoundEscapingVars; . . /// An "escaping var" is a bound var whose binder is not part of `t`. A bound var can be a . /// bound region or a bound type. . /// -- line 1101 ---------------------------------------- -- line 1128 ---------------------------------------- . . impl<'tcx> TypeVisitor<'tcx> for HasEscapingVarsVisitor { . type BreakTy = FoundEscapingVars; . . fn visit_binder>( . &mut self, . t: &Binder<'tcx, T>, . ) -> ControlFlow { 56,296 ( 0.00%) self.outer_index.shift_in(1); . let result = t.super_visit_with(self); 79,302 ( 0.00%) self.outer_index.shift_out(1); . result . } . . #[inline] . fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { . // If the outer-exclusive-binder is *strictly greater* than . // `outer_index`, that means that `t` contains some content . // bound at `outer_index` or above (because -- line 1146 ---------------------------------------- -- line 1160 ---------------------------------------- . // visited. . if r.bound_at_or_above_binder(self.outer_index) { . ControlFlow::Break(FoundEscapingVars) . } else { . ControlFlow::CONTINUE . } . } . 10 ( 0.00%) fn visit_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> ControlFlow { . // we don't have a `visit_infer_const` callback, so we have to . // hook in here to catch this case (annoying...), but . // otherwise we do want to remember to visit the rest of the . // const, as it has types/regions embedded in a lot of other . // places. 21 ( 0.00%) match ct.val { . ty::ConstKind::Bound(debruijn, _) if debruijn >= self.outer_index => { . ControlFlow::Break(FoundEscapingVars) . } 9 ( 0.00%) _ => ct.super_visit_with(self), . } 10 ( 0.00%) } . . #[inline] . fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow { . if predicate.inner.outer_exclusive_binder > self.outer_index { . ControlFlow::Break(FoundEscapingVars) . } else { . ControlFlow::CONTINUE . } -- line 1188 ---------------------------------------- -- line 1210 ---------------------------------------- . #[instrument(level = "trace")] . fn visit_ty(&mut self, t: Ty<'_>) -> ControlFlow { . debug!( . "HasTypeFlagsVisitor: t={:?} t.flags={:?} self.flags={:?}", . t, . t.flags(), . self.flags . ); 525,882 ( 0.01%) if t.flags().intersects(self.flags) { . ControlFlow::Break(FoundFlags) . } else { . ControlFlow::CONTINUE . } . } . . #[inline] . #[instrument(skip(self), level = "trace")] . fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { 59,014 ( 0.00%) let flags = r.type_flags(); . trace!(r.flags=?flags); . if flags.intersects(self.flags) { . ControlFlow::Break(FoundFlags) . } else { . ControlFlow::CONTINUE . } . } . . #[inline] . #[instrument(level = "trace")] . fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> ControlFlow { 2,044 ( 0.00%) let flags = FlagComputation::for_const(c); . trace!(r.flags=?flags); 8,950 ( 0.00%) if flags.intersects(self.flags) { . ControlFlow::Break(FoundFlags) . } else { . ControlFlow::CONTINUE . } . } . . #[inline] . #[instrument(level = "trace")] . fn visit_unevaluated_const(&mut self, uv: ty::Unevaluated<'tcx>) -> ControlFlow { 366 ( 0.00%) let flags = FlagComputation::for_unevaluated_const(uv); . trace!(r.flags=?flags); . if flags.intersects(self.flags) { . ControlFlow::Break(FoundFlags) . } else { . ControlFlow::CONTINUE . } . } . -- line 1260 ---------------------------------------- -- line 1285 ---------------------------------------- . /// them constraints `'a == 'b`. But if you have `<&'a u32 as . /// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those . /// types may mean that `'a` and `'b` don't appear in the results, . /// so they are not considered *constrained*. . just_constrained: bool, . } . . impl LateBoundRegionsCollector { 1,558 ( 0.00%) fn new(just_constrained: bool) -> Self { 10,906 ( 0.00%) LateBoundRegionsCollector { . current_index: ty::INNERMOST, . regions: Default::default(), . just_constrained, . } 1,558 ( 0.00%) } . } . . impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector { . fn visit_binder>( . &mut self, . t: &Binder<'tcx, T>, . ) -> ControlFlow { 8 ( 0.00%) self.current_index.shift_in(1); . let result = t.super_visit_with(self); . self.current_index.shift_out(1); . result . } . 5,715 ( 0.00%) fn visit_ty(&mut self, t: Ty<'tcx>) -> ControlFlow { . // if we are only looking for "constrained" region, we have to . // ignore the inputs to a projection, as they may not appear . // in the normalized form 5,562 ( 0.00%) if self.just_constrained { 4,781 ( 0.00%) if let ty::Projection(..) | ty::Opaque(..) = t.kind() { . return ControlFlow::CONTINUE; . } . } . 6,411 ( 0.00%) t.super_visit_with(self) 3,812 ( 0.00%) } . . fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> ControlFlow { . // if we are only looking for "constrained" region, we have to . // ignore the inputs of an unevaluated const, as they may not appear . // in the normalized form 2 ( 0.00%) if self.just_constrained { . if let ty::ConstKind::Unevaluated(..) = c.val { . return ControlFlow::CONTINUE; . } . } . . c.super_visit_with(self) . } . . fn visit_region(&mut self, r: ty::Region<'tcx>) -> ControlFlow { 6,223 ( 0.00%) if let ty::ReLateBound(debruijn, br) = *r { 1,003 ( 0.00%) if debruijn == self.current_index { . self.regions.insert(br.kind); . } . } . ControlFlow::CONTINUE . } . } 599,037 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 84 ---------------------------------------- . pub struct SessionGlobals { . symbol_interner: symbol::Interner, . span_interner: Lock, . hygiene_data: Lock, . source_map: Lock>>, . } . . impl SessionGlobals { 12 ( 0.00%) pub fn new(edition: Edition) -> SessionGlobals { 50 ( 0.00%) SessionGlobals { 2 ( 0.00%) symbol_interner: symbol::Interner::fresh(), . span_interner: Lock::new(span_encoding::SpanInterner::default()), 6 ( 0.00%) hygiene_data: Lock::new(hygiene::HygieneData::new(edition)), . source_map: Lock::new(None), . } 10 ( 0.00%) } . } . . #[inline] . pub fn create_session_globals_then(edition: Edition, f: impl FnOnce() -> R) -> R { 1 ( 0.00%) assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 7 ( 0.00%) SESSION_GLOBALS.set(&session_globals, f) 1 ( 0.00%) } . . #[inline] . pub fn set_session_globals_then(session_globals: &SessionGlobals, f: impl FnOnce() -> R) -> R { . assert!( . !SESSION_GLOBALS.is_set(), . "SESSION_GLOBALS should never be overwritten! \ . Use another thread if you need another SessionGlobals" . ); -- line 119 ---------------------------------------- -- line 120 ---------------------------------------- . SESSION_GLOBALS.set(session_globals, f) . } . . #[inline] . pub fn create_default_session_if_not_set_then(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 4 ( 0.00%) create_session_if_not_set_then(edition::DEFAULT_EDITION, f) . } . . #[inline] . pub fn create_session_if_not_set_then(edition: Edition, f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 1 ( 0.00%) if !SESSION_GLOBALS.is_set() { 3 ( 0.00%) let session_globals = SessionGlobals::new(edition); 11 ( 0.00%) SESSION_GLOBALS.set(&session_globals, || SESSION_GLOBALS.with(f)) 1 ( 0.00%) } else { . SESSION_GLOBALS.with(f) . } . } . . #[inline] . pub fn with_session_globals(f: F) -> R . where . F: FnOnce(&SessionGlobals) -> R, . { 1,873,886 ( 0.03%) SESSION_GLOBALS.with(f) . } . . #[inline] . pub fn create_default_session_globals_then(f: impl FnOnce() -> R) -> R { . create_session_globals_then(edition::DEFAULT_EDITION, f) . } . . // If this ever becomes non thread-local, `decode_syntax_context` . // and `decode_expn_id` will need to be updated to handle concurrent . // deserialization. . scoped_tls::scoped_thread_local!(static SESSION_GLOBALS: SessionGlobals); . . // FIXME: We should use this enum or something like it to get rid of the . // use of magic `/rust/1.x/...` paths across the board. 73 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd)] 1,060 ( 0.00%) #[derive(Decodable)] . pub enum RealFileName { . LocalPath(PathBuf), . /// For remapped paths (namely paths into libstd that have been mapped . /// to the appropriate spot on the local host's file system, and local file . /// system paths that have been remapped with `FilePathMapping`), . Remapped { . /// `local_path` is the (host-dependent) local path to the file. This is . /// None if the file was imported from another crate -- line 173 ---------------------------------------- -- line 179 ---------------------------------------- . } . . impl Hash for RealFileName { . fn hash(&self, state: &mut H) { . // To prevent #70924 from happening again we should only hash the . // remapped (virtualized) path if that exists. This is because . // virtualized paths to sysroot crates (/rust/$hash or /rust/$version) . // remain stable even if the corresponding local_path changes 3,245 ( 0.00%) self.remapped_path_if_available().hash(state) . } . } . . // This is functionally identical to #[derive(Encodable)], with the exception of . // an added assert statement . impl Encodable for RealFileName { . fn encode(&self, encoder: &mut S) -> Result<(), S::Error> { 62 ( 0.00%) encoder.emit_enum(|encoder| match *self { . RealFileName::LocalPath(ref local_path) => { 248 ( 0.00%) encoder.emit_enum_variant("LocalPath", 0, 1, |encoder| { . encoder.emit_enum_variant_arg(true, |encoder| local_path.encode(encoder))?; . Ok(()) . }) . } . . RealFileName::Remapped { ref local_path, ref virtual_name } => encoder . .emit_enum_variant("Remapped", 1, 2, |encoder| { . // For privacy and build reproducibility, we must not embed host-dependant path in artifacts -- line 205 ---------------------------------------- -- line 224 ---------------------------------------- . p.as_ref().map(PathBuf::as_path) . } . } . } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. 2 ( 0.00%) pub fn into_local_path(self) -> Option { 2 ( 0.00%) match self { . RealFileName::LocalPath(p) => Some(p), . RealFileName::Remapped { local_path: p, virtual_name: _ } => p, . } 3 ( 0.00%) } . . /// Returns the path suitable for embedding into build artifacts. This would still . /// be a local path if it has not been remapped. A remapped path will not correspond . /// to a valid file system path: see `local_path_if_available()` for something that . /// is more likely to return paths into the local host file system. . pub fn remapped_path_if_available(&self) -> &Path { 2,496 ( 0.00%) match self { . RealFileName::LocalPath(p) . | RealFileName::Remapped { local_path: _, virtual_name: p } => &p, . } 32 ( 0.00%) } . . /// Returns the path suitable for reading from the file system on the local host, . /// if this information exists. Otherwise returns the remapped name. . /// Avoid embedding this in build artifacts; see `remapped_path_if_available()` for that. . pub fn local_path_if_available(&self) -> &Path { 31 ( 0.00%) match self { . RealFileName::LocalPath(path) . | RealFileName::Remapped { local_path: None, virtual_name: path } . | RealFileName::Remapped { local_path: Some(path), virtual_name: _ } => path, . } . } . . pub fn to_string_lossy(&self, display_pref: FileNameDisplayPreference) -> Cow<'_, str> { 31 ( 0.00%) match display_pref { . FileNameDisplayPreference::Local => self.local_path_if_available().to_string_lossy(), . FileNameDisplayPreference::Remapped => { . self.remapped_path_if_available().to_string_lossy() . } . } . } . } . . /// Differentiates between real files and common virtual files. 6,761 ( 0.00%) #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)] 6,798 ( 0.00%) #[derive(Decodable, Encodable)] . pub enum FileName { . Real(RealFileName), . /// Call to `quote!`. . QuoteExpansion(u64), . /// Command line. . Anon(u64), . /// Hack in `src/librustc_ast/parse.rs`. . // FIXME(jseyfried) -- line 281 ---------------------------------------- -- line 288 ---------------------------------------- . /// Custom sources for explicit parser calls from plugins and drivers. . Custom(String), . DocTest(PathBuf, isize), . /// Post-substitution inline assembly from LLVM. . InlineAsm(u64), . } . . impl From for FileName { 217 ( 0.00%) fn from(p: PathBuf) -> Self { 62 ( 0.00%) assert!(!p.to_string_lossy().ends_with('>')); 186 ( 0.00%) FileName::Real(RealFileName::LocalPath(p)) 217 ( 0.00%) } . } . 2 ( 0.00%) #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] . pub enum FileNameDisplayPreference { . Remapped, . Local, . } . . pub struct FileNameDisplay<'a> { . inner: &'a FileName, . display_pref: FileNameDisplayPreference, . } . . impl fmt::Display for FileNameDisplay<'_> { 124 ( 0.00%) fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { . use FileName::*; 186 ( 0.00%) match *self.inner { . Real(ref name) => { 186 ( 0.00%) write!(fmt, "{}", name.to_string_lossy(self.display_pref)) . } . QuoteExpansion(_) => write!(fmt, ""), . MacroExpansion(_) => write!(fmt, ""), . Anon(_) => write!(fmt, ""), . ProcMacroSourceCode(_) => write!(fmt, ""), . CfgSpec(_) => write!(fmt, ""), . CliCrateAttr(_) => write!(fmt, ""), . Custom(ref s) => write!(fmt, "<{}>", s), . DocTest(ref path, _) => write!(fmt, "{}", path.display()), . InlineAsm(_) => write!(fmt, ""), . } 155 ( 0.00%) } . } . . impl FileNameDisplay<'_> { . pub fn to_string_lossy(&self) -> Cow<'_, str> { . match self.inner { . FileName::Real(ref inner) => inner.to_string_lossy(self.display_pref), . _ => Cow::from(format!("{}", self)), . } . } . } . . impl FileName { . pub fn is_real(&self) -> bool { . use FileName::*; 1,114 ( 0.00%) match *self { . Real(_) => true, . Anon(_) . | MacroExpansion(_) . | ProcMacroSourceCode(_) . | CfgSpec(_) . | CliCrateAttr(_) . | Custom(_) . | QuoteExpansion(_) -- line 353 ---------------------------------------- -- line 357 ---------------------------------------- . } . . pub fn prefer_remapped(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Remapped } . } . . // This may include transient local filesystem information. . // Must not be embedded in build outputs. 31 ( 0.00%) pub fn prefer_local(&self) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref: FileNameDisplayPreference::Local } 62 ( 0.00%) } . . pub fn display(&self, display_pref: FileNameDisplayPreference) -> FileNameDisplay<'_> { . FileNameDisplay { inner: self, display_pref } . } . . pub fn macro_expansion_source_code(src: &str) -> FileName { . let mut hasher = StableHasher::new(); . src.hash(&mut hasher); -- line 375 ---------------------------------------- -- line 423 ---------------------------------------- . /// that the length of the span is equal to `span.hi - span.lo`; there may be space in the . /// [`BytePos`] range between files. . /// . /// `SpanData` is public because `Span` uses a thread-local interner and can't be . /// sent to other threads, but some pieces of performance infra run in a separate thread. . /// Using `Span` is generally preferred. . #[derive(Clone, Copy, Hash, PartialEq, Eq)] . pub struct SpanData { 13 ( 0.00%) pub lo: BytePos, 13 ( 0.00%) pub hi: BytePos, . /// Information about where the macro came from, if this piece of . /// code was created by a macro expansion. 39 ( 0.00%) pub ctxt: SyntaxContext, 13 ( 0.00%) pub parent: Option, . } . . // Order spans by position in the file. . impl Ord for SpanData { . fn cmp(&self, other: &Self) -> Ordering { . let SpanData { . lo: s_lo, . hi: s_hi, -- line 444 ---------------------------------------- -- line 485 ---------------------------------------- . } . #[inline] . pub fn with_parent(&self, parent: Option) -> Span { . Span::new(self.lo, self.hi, self.ctxt, parent) . } . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { 576,510 ( 0.01%) self.lo.0 == 0 && self.hi.0 == 0 . } . /// Returns `true` if `self` fully encloses `other`. . pub fn contains(self, other: Self) -> bool { 10,011 ( 0.00%) self.lo <= other.lo && other.hi <= self.hi . } . } . . // The interner is pointed to by a thread local value which is only set on the main thread . // with parallelization is disabled. So we don't allow `Span` to transfer between threads . // to avoid panics and other errors, even though it would be memory safe to do so. . #[cfg(not(parallel_compiler))] . impl !Send for Span {} . #[cfg(not(parallel_compiler))] . impl !Sync for Span {} . . impl PartialOrd for Span { 119,560 ( 0.00%) fn partial_cmp(&self, rhs: &Self) -> Option { 89,670 ( 0.00%) PartialOrd::partial_cmp(&self.data(), &rhs.data()) 119,560 ( 0.00%) } . } . impl Ord for Span { . fn cmp(&self, rhs: &Self) -> Ordering { . Ord::cmp(&self.data(), &rhs.data()) . } . } . . /// A collection of `Span`s. -- line 520 ---------------------------------------- -- line 532 ---------------------------------------- . } . . impl Span { . #[inline] . pub fn lo(self) -> BytePos { . self.data().lo . } . #[inline] 273,168 ( 0.00%) pub fn with_lo(self, lo: BytePos) -> Span { . self.data().with_lo(lo) 182,112 ( 0.00%) } . #[inline] 21,632 ( 0.00%) pub fn hi(self) -> BytePos { . self.data().hi 21,632 ( 0.00%) } . #[inline] 88,353 ( 0.00%) pub fn with_hi(self, hi: BytePos) -> Span { . self.data().with_hi(hi) 58,902 ( 0.00%) } . #[inline] . pub fn ctxt(self) -> SyntaxContext { . self.data_untracked().ctxt . } . #[inline] 19,945 ( 0.00%) pub fn with_ctxt(self, ctxt: SyntaxContext) -> Span { . self.data_untracked().with_ctxt(ctxt) 11,967 ( 0.00%) } . #[inline] . pub fn parent(self) -> Option { . self.data().parent . } . #[inline] 135 ( 0.00%) pub fn with_parent(self, ctxt: Option) -> Span { . self.data().with_parent(ctxt) 90 ( 0.00%) } . . /// Returns `true` if this is a dummy span with any hygienic context. . #[inline] . pub fn is_dummy(self) -> bool { . self.data_untracked().is_dummy() . } . . /// Returns `true` if this span comes from a macro or desugaring. . #[inline] 18 ( 0.00%) pub fn from_expansion(self) -> bool { . self.ctxt() != SyntaxContext::root() 12 ( 0.00%) } . . /// Returns `true` if `span` originates in a derive-macro's expansion. . pub fn in_derive_expansion(self) -> bool { . matches!(self.ctxt().outer_expn_data().kind, ExpnKind::Macro(MacroKind::Derive, _)) . } . . /// Gate suggestions that would not be appropriate in a context the user didn't write. . pub fn can_be_used_for_suggestions(self) -> bool { -- line 586 ---------------------------------------- -- line 600 ---------------------------------------- . /// Returns a new span representing an empty span at the beginning of this span. . #[inline] . pub fn shrink_to_lo(self) -> Span { . let span = self.data_untracked(); . span.with_hi(span.lo) . } . /// Returns a new span representing an empty span at the end of this span. . #[inline] 507 ( 0.00%) pub fn shrink_to_hi(self) -> Span { . let span = self.data_untracked(); . span.with_lo(span.hi) 338 ( 0.00%) } . . #[inline] . /// Returns `true` if `hi == lo`. . pub fn is_empty(self) -> bool { . let span = self.data_untracked(); . span.hi == span.lo . } . . /// Returns `self` if `self` is not the dummy span, and `other` otherwise. . pub fn substitute_dummy(self, other: Span) -> Span { . if self.is_dummy() { other } else { self } . } . . /// Returns `true` if `self` fully encloses `other`. 30,033 ( 0.00%) pub fn contains(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.contains(other) 20,022 ( 0.00%) } . . /// Returns `true` if `self` touches `other`. . pub fn overlaps(self, other: Span) -> bool { . let span = self.data(); . let other = other.data(); . span.lo < other.hi && other.lo < span.hi . } . -- line 638 ---------------------------------------- -- line 663 ---------------------------------------- . /// The `Span` for the tokens in the previous macro expansion from which `self` was generated, . /// if any. . pub fn parent_callsite(self) -> Option { . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(expn_data.call_site) } else { None } . } . . /// Walk down the expansion ancestors to find a span that's contained within `outer`. 25,500 ( 0.00%) pub fn find_ancestor_inside(mut self, outer: Span) -> Option { 12,750 ( 0.00%) while !outer.contains(self) { . self = self.parent_callsite()?; . } . Some(self) 28,050 ( 0.00%) } . . /// Edition of the crate from which this span came. 54,960 ( 0.00%) pub fn edition(self) -> edition::Edition { . self.ctxt().edition() 36,640 ( 0.00%) } . . #[inline] . pub fn rust_2015(self) -> bool { 11,257 ( 0.00%) self.edition() == edition::Edition::Edition2015 . } . . #[inline] . pub fn rust_2018(self) -> bool { 6,272 ( 0.00%) self.edition() >= edition::Edition::Edition2018 . } . . #[inline] . pub fn rust_2021(self) -> bool { 9,102 ( 0.00%) self.edition() >= edition::Edition::Edition2021 . } . . /// Returns the source callee. . /// . /// Returns `None` if the supplied span has no expansion trace, . /// else returns the `ExpnData` for the macro definition . /// corresponding to the source callsite. . pub fn source_callee(self) -> Option { -- line 703 ---------------------------------------- -- line 707 ---------------------------------------- . } . let expn_data = self.ctxt().outer_expn_data(); . if !expn_data.is_root() { Some(source_callee(expn_data)) } else { None } . } . . /// Checks if a span is "internal" to a macro in which `#[unstable]` . /// items can be used (that is, a macro marked with . /// `#[allow_internal_unstable]`). 2,580 ( 0.00%) pub fn allows_unstable(self, feature: Symbol) -> bool { 516 ( 0.00%) self.ctxt() . .outer_expn_data() . .allow_internal_unstable . .map_or(false, |features| features.iter().any(|&f| f == feature)) 2,064 ( 0.00%) } . . /// Checks if this span arises from a compiler desugaring of kind `kind`. 73,073 ( 0.00%) pub fn is_desugaring(self, kind: DesugaringKind) -> bool { 62,634 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => k == kind, . _ => false, . } 52,195 ( 0.00%) } . . /// Returns the compiler desugaring that created this span, or `None` . /// if this span is not from a desugaring. 315 ( 0.00%) pub fn desugaring_kind(self) -> Option { 378 ( 0.00%) match self.ctxt().outer_expn_data().kind { . ExpnKind::Desugaring(k) => Some(k), . _ => None, . } 252 ( 0.00%) } . . /// Checks if a span is "internal" to a macro in which `unsafe` . /// can be used without triggering the `unsafe_code` lint. . // (that is, a macro marked with `#[allow_internal_unsafe]`). 4 ( 0.00%) pub fn allows_unsafe(self) -> bool { 1 ( 0.00%) self.ctxt().outer_expn_data().allow_internal_unsafe 4 ( 0.00%) } . . pub fn macro_backtrace(mut self) -> impl Iterator { . let mut prev_span = DUMMY_SP; . std::iter::from_fn(move || { . loop { . let expn_data = self.ctxt().outer_expn_data(); . if expn_data.is_root() { . return None; -- line 752 ---------------------------------------- -- line 767 ---------------------------------------- . . /// Returns a `Span` that would enclose both `self` and `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^^^^ . /// ``` 818,840 ( 0.01%) pub fn to(self, end: Span) -> Span { . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 143,217 ( 0.00%) if span_data.ctxt != end_data.ctxt { 5,662 ( 0.00%) if span_data.ctxt == SyntaxContext::root() { . return end; 4,303 ( 0.00%) } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . cmp::min(span_data.lo, end_data.lo), . cmp::max(span_data.hi, end_data.hi), . if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 481,754 ( 0.01%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 669,960 ( 0.01%) } . . /// Returns a `Span` between the end of `self` to the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^ . /// ``` 1,716 ( 0.00%) pub fn between(self, end: Span) -> Span { . let span = self.data(); . let end = end.data(); . Span::new( . span.hi, . end.lo, . if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt }, 1,092 ( 0.00%) if span.parent == end.parent { span.parent } else { None }, . ) 1,248 ( 0.00%) } . . /// Returns a `Span` from the beginning of `self` until the beginning of `end`. . /// . /// ```text . /// ____ ___ . /// self lorem ipsum end . /// ^^^^^^^^^^^^^^^^^ . /// ``` 3,586 ( 0.00%) pub fn until(self, end: Span) -> Span { . // Most of this function's body is copied from `to`. . // We can't just do `self.to(end.shrink_to_lo())`, . // because to also does some magic where it uses min/max so . // it can handle overlapping spans. Some advanced mis-use of . // `until` with different ctxts makes this visible. . let span_data = self.data(); . let end_data = end.data(); . // FIXME(jseyfried): `self.ctxt` should always equal `end.ctxt` here (cf. issue #23480). . // Return the macro span on its own to avoid weird diagnostic output. It is preferable to . // have an incomplete span than a completely nonsensical one. 652 ( 0.00%) if span_data.ctxt != end_data.ctxt { . if span_data.ctxt == SyntaxContext::root() { . return end; . } else if end_data.ctxt == SyntaxContext::root() { . return self; . } . // Both spans fall within a macro. . // FIXME(estebank): check if it is the *same* macro. . } . Span::new( . span_data.lo, . end_data.lo, . if end_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt }, 2,282 ( 0.00%) if span_data.parent == end_data.parent { span_data.parent } else { None }, . ) 2,934 ( 0.00%) } . 462 ( 0.00%) pub fn from_inner(self, inner: InnerSpan) -> Span { . let span = self.data(); . Span::new( . span.lo + BytePos::from_usize(inner.start), . span.lo + BytePos::from_usize(inner.end), . span.ctxt, . span.parent, . ) 294 ( 0.00%) } . . /// Equivalent of `Span::def_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_def_site_ctxt(self, expn_id: ExpnId) -> Span { 7,968 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Opaque) . } . . /// Equivalent of `Span::call_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_call_site_ctxt(self, expn_id: ExpnId) -> Span { 28 ( 0.00%) self.with_ctxt_from_mark(expn_id, Transparency::Transparent) . } . . /// Equivalent of `Span::mixed_site` from the proc macro API, . /// except that the location is taken from the `self` span. . pub fn with_mixed_site_ctxt(self, expn_id: ExpnId) -> Span { . self.with_ctxt_from_mark(expn_id, Transparency::SemiTransparent) . } . . /// Produces a span with the same location as `self` and context produced by a macro with the . /// given ID and transparency, assuming that macro was defined directly and not produced by . /// some other macro (which is the case for built-in and procedural macros). 47,976 ( 0.00%) pub fn with_ctxt_from_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . self.with_ctxt(SyntaxContext::root().apply_mark(expn_id, transparency)) 27,986 ( 0.00%) } . . #[inline] . pub fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> Span { . let span = self.data(); 16,686 ( 0.00%) span.with_ctxt(span.ctxt.apply_mark(expn_id, transparency)) . } . . #[inline] . pub fn remove_mark(&mut self) -> ExpnId { 32 ( 0.00%) let mut span = self.data(); 32 ( 0.00%) let mark = span.ctxt.remove_mark(); 128 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] . pub fn adjust(&mut self, expn_id: ExpnId) -> Option { . let mut span = self.data(); . let mark = span.ctxt.adjust(expn_id); . *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 214,749 ( 0.00%) pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option { 143,166 ( 0.00%) let mut span = self.data(); 118,347 ( 0.00%) let mark = span.ctxt.normalize_to_macros_2_0_and_adjust(expn_id); 214,749 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 237,652 ( 0.00%) } . . #[inline] . pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option> { 24,968 ( 0.00%) let mut span = self.data(); 37,452 ( 0.00%) let mark = span.ctxt.glob_adjust(expn_id, glob_span); 99,872 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark . } . . #[inline] 3,411 ( 0.00%) pub fn reverse_glob_adjust( . &mut self, . expn_id: ExpnId, . glob_span: Span, . ) -> Option> { 2,758 ( 0.00%) let mut span = self.data(); 2,863 ( 0.00%) let mark = span.ctxt.reverse_glob_adjust(expn_id, glob_span); 5,347 ( 0.00%) *self = Span::new(span.lo, span.hi, span.ctxt, span.parent); . mark 3,790 ( 0.00%) } . . #[inline] 171,608 ( 0.00%) pub fn normalize_to_macros_2_0(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macros_2_0()) 150,157 ( 0.00%) } . . #[inline] . pub fn normalize_to_macro_rules(self) -> Span { . let span = self.data(); . span.with_ctxt(span.ctxt.normalize_to_macro_rules()) . } . } . -- line 948 ---------------------------------------- -- line 958 ---------------------------------------- . . /// What label should we attach to this span (if any)? . pub label: Option, . } . . impl Default for Span { . fn default() -> Self { . DUMMY_SP 2 ( 0.00%) } . } . . impl Encodable for Span { . default fn encode(&self, s: &mut E) -> Result<(), E::Error> { . let span = self.data(); . s.emit_struct(false, |s| { . s.emit_struct_field("lo", true, |s| span.lo.encode(s))?; . s.emit_struct_field("hi", false, |s| span.hi.encode(s)) -- line 974 ---------------------------------------- -- line 990 ---------------------------------------- . /// any spans that are debug-printed during the closure's execution. . /// . /// Normally, the global `TyCtxt` is used to retrieve the `SourceMap` . /// (see `rustc_interface::callbacks::span_debug1`). However, some parts . /// of the compiler (e.g. `rustc_parse`) may debug-print `Span`s before . /// a `TyCtxt` is available. In this case, we fall back to . /// the `SourceMap` provided to this function. If that is not available, . /// we fall back to printing the raw `Span` field values. 9 ( 0.00%) pub fn with_source_map T>(source_map: Lrc, f: F) -> T { . with_session_globals(|session_globals| { 2 ( 0.00%) *session_globals.source_map.borrow_mut() = Some(source_map); . }); . struct ClearSourceMap; . impl Drop for ClearSourceMap { . fn drop(&mut self) { . with_session_globals(|session_globals| { 1 ( 0.00%) session_globals.source_map.borrow_mut().take(); . }); . } . } . . let _guard = ClearSourceMap; 4 ( 0.00%) f() 8 ( 0.00%) } . . pub fn debug_with_source_map( . span: Span, . f: &mut fmt::Formatter<'_>, . source_map: &SourceMap, . ) -> fmt::Result { . write!(f, "{} ({:?})", source_map.span_to_diagnostic_string(span), span.ctxt()) . } -- line 1021 ---------------------------------------- -- line 1048 ---------------------------------------- . . impl MultiSpan { . #[inline] . pub fn new() -> MultiSpan { . MultiSpan { primary_spans: vec![], span_labels: vec![] } . } . . pub fn from_span(primary_span: Span) -> MultiSpan { 10,120 ( 0.00%) MultiSpan { primary_spans: vec![primary_span], span_labels: vec![] } . } . . pub fn from_spans(mut vec: Vec) -> MultiSpan { . vec.sort(); 315 ( 0.00%) MultiSpan { primary_spans: vec, span_labels: vec![] } . } . . pub fn push_span_label(&mut self, span: Span, label: String) { . self.span_labels.push((span, label)); . } . . /// Selects the first primary span (if any). . pub fn primary_span(&self) -> Option { . self.primary_spans.first().cloned() 452 ( 0.00%) } . . /// Returns all primary spans. . pub fn primary_spans(&self) -> &[Span] { . &self.primary_spans . } . . /// Returns `true` if any of the primary spans are displayable. . pub fn has_primary_spans(&self) -> bool { -- line 1079 ---------------------------------------- -- line 1139 ---------------------------------------- . . /// Returns `true` if any of the span labels is displayable. . pub fn has_span_labels(&self) -> bool { . self.span_labels.iter().any(|(sp, _)| !sp.is_dummy()) . } . } . . impl From for MultiSpan { 10,120 ( 0.00%) fn from(span: Span) -> MultiSpan { . MultiSpan::from_span(span) 10,120 ( 0.00%) } . } . . impl From> for MultiSpan { 315 ( 0.00%) fn from(spans: Vec) -> MultiSpan { 252 ( 0.00%) MultiSpan::from_spans(spans) 315 ( 0.00%) } . } . . /// Identifies an offset of a multi-byte character in a `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct MultiByteChar { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The number of bytes, `>= 2`. . pub bytes: u8, . } . . /// Identifies an offset of a non-narrow character in a `SourceFile`. 312 ( 0.00%) #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub enum NonNarrowChar { . /// Represents a zero-width character. . ZeroWidth(BytePos), . /// Represents a wide (full-width) character. . Wide(BytePos), . /// Represents a tab character, represented visually with a width of 4 characters. . Tab(BytePos), . } -- line 1176 ---------------------------------------- -- line 1201 ---------------------------------------- . } . } . } . . impl Add for NonNarrowChar { . type Output = Self; . . fn add(self, rhs: BytePos) -> Self { 540 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos + rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos + rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos + rhs), . } . } . } . . impl Sub for NonNarrowChar { . type Output = Self; . 156 ( 0.00%) fn sub(self, rhs: BytePos) -> Self { 540 ( 0.00%) match self { . NonNarrowChar::ZeroWidth(pos) => NonNarrowChar::ZeroWidth(pos - rhs), . NonNarrowChar::Wide(pos) => NonNarrowChar::Wide(pos - rhs), . NonNarrowChar::Tab(pos) => NonNarrowChar::Tab(pos - rhs), . } 468 ( 0.00%) } . } . . /// Identifies an offset of a character that was normalized away from `SourceFile`. . #[derive(Copy, Clone, Encodable, Decodable, Eq, PartialEq, Debug)] . pub struct NormalizedPos { . /// The absolute offset of the character in the `SourceMap`. . pub pos: BytePos, . /// The difference between original and normalized string at position. . pub diff: u32, . } . 93 ( 0.00%) #[derive(PartialEq, Eq, Clone, Debug)] . pub enum ExternalSource { . /// No external source has to be loaded, since the `SourceFile` represents a local crate. . Unneeded, . Foreign { . kind: ExternalSourceKind, . /// This SourceFile's byte-offset within the source_map of its original crate. . original_start_pos: BytePos, . /// The end of this SourceFile within the source_map of its original crate. -- line 1246 ---------------------------------------- -- line 1257 ---------------------------------------- . AbsentOk, . /// A failed attempt has been made to load the external source. . AbsentErr, . Unneeded, . } . . impl ExternalSource { . pub fn get_source(&self) -> Option<&Lrc> { 708 ( 0.00%) match self { . ExternalSource::Foreign { kind: ExternalSourceKind::Present(ref src), .. } => Some(src), . _ => None, . } . } . } . . #[derive(Debug)] . pub struct OffsetOverflowError; . 1,074 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Encodable, Decodable)] . pub enum SourceFileHashAlgorithm { . Md5, . Sha1, . Sha256, . } . . impl FromStr for SourceFileHashAlgorithm { . type Err = (); -- line 1283 ---------------------------------------- -- line 1290 ---------------------------------------- . _ => Err(()), . } . } . } . . rustc_data_structures::impl_stable_hash_via_hash!(SourceFileHashAlgorithm); . . /// The hash of the on-disk source file used for debug info. 186 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] 2,148 ( 0.00%) #[derive(HashStable_Generic, Encodable, Decodable)] . pub struct SourceFileHash { . pub kind: SourceFileHashAlgorithm, . value: [u8; 32], . } . . impl SourceFileHash { . pub fn new(kind: SourceFileHashAlgorithm, src: &str) -> SourceFileHash { . let mut hash = SourceFileHash { kind, value: Default::default() }; . let len = hash.hash_len(); . let value = &mut hash.value[..len]; . let data = src.as_bytes(); . match kind { . SourceFileHashAlgorithm::Md5 => { 93 ( 0.00%) value.copy_from_slice(&Md5::digest(data)); . } . SourceFileHashAlgorithm::Sha1 => { . value.copy_from_slice(&Sha1::digest(data)); . } . SourceFileHashAlgorithm::Sha256 => { . value.copy_from_slice(&Sha256::digest(data)); . } . } -- line 1321 ---------------------------------------- -- line 1329 ---------------------------------------- . . /// The bytes of the hash. . pub fn hash_bytes(&self) -> &[u8] { . let len = self.hash_len(); . &self.value[..len] . } . . fn hash_len(&self) -> usize { 93 ( 0.00%) match self.kind { . SourceFileHashAlgorithm::Md5 => 16, . SourceFileHashAlgorithm::Sha1 => 20, . SourceFileHashAlgorithm::Sha256 => 32, . } . } . } . . /// A single source in the [`SourceMap`]. 1,798 ( 0.00%) #[derive(Clone)] . pub struct SourceFile { . /// The name of the file that the source came from. Source that doesn't . /// originate from files has names between angle brackets by convention . /// (e.g., ``). . pub name: FileName, . /// The complete source code. 31 ( 0.00%) pub src: Option>, . /// The source code's hash. . pub src_hash: SourceFileHash, . /// The external source code (used for external crates, which will have a `None` . /// value as `self.src`. . pub external_src: Lock, . /// The start position of this source in the `SourceMap`. . pub start_pos: BytePos, . /// The end position of this source in the `SourceMap`. -- line 1361 ---------------------------------------- -- line 1364 ---------------------------------------- . pub lines: Vec, . /// Locations of multi-byte characters in the source code. . pub multibyte_chars: Vec, . /// Width of characters that are not narrow in the source code. . pub non_narrow_chars: Vec, . /// Locations of characters removed during normalization. . pub normalized_pos: Vec, . /// A hash of the filename, used for speeding up hashing in incremental compilation. 31 ( 0.00%) pub name_hash: u128, . /// Indicates which crate this `SourceFile` was imported from. 31 ( 0.00%) pub cnum: CrateNum, . } . . impl Encodable for SourceFile { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_struct(false, |s| { . s.emit_struct_field("name", true, |s| self.name.encode(s))?; . s.emit_struct_field("src_hash", false, |s| self.src_hash.encode(s))?; . s.emit_struct_field("start_pos", false, |s| self.start_pos.encode(s))?; . s.emit_struct_field("end_pos", false, |s| self.end_pos.encode(s))?; . s.emit_struct_field("lines", false, |s| { . let lines = &self.lines[..]; . // Store the length. . s.emit_u32(lines.len() as u32)?; . 124 ( 0.00%) if !lines.is_empty() { . // In order to preserve some space, we exploit the fact that . // the lines list is sorted and individual lines are . // probably not that long. Because of that we can store lines . // as a difference list, using as little space as possible . // for the differences. . let max_line_length = if lines.len() == 1 { . 0 . } else { -- line 1397 ---------------------------------------- -- line 1399 ---------------------------------------- . .array_windows() . .map(|&[fst, snd]| snd - fst) . .map(|bp| bp.to_usize()) . .max() . .unwrap() . }; . . let bytes_per_diff: u8 = match max_line_length { 62 ( 0.00%) 0..=0xFF => 1, . 0x100..=0xFFFF => 2, . _ => 4, . }; . . // Encode the number of bytes used per diff. . bytes_per_diff.encode(s)?; . . // Encode the first element. . lines[0].encode(s)?; . . let diff_iter = lines.array_windows().map(|&[fst, snd]| snd - fst); . 62 ( 0.00%) match bytes_per_diff { . 1 => { . for diff in diff_iter { . (diff.0 as u8).encode(s)? . } . } . 2 => { . for diff in diff_iter { . (diff.0 as u16).encode(s)? -- line 1428 ---------------------------------------- -- line 1436 ---------------------------------------- . _ => unreachable!(), . } . } . . Ok(()) . })?; . s.emit_struct_field("multibyte_chars", false, |s| self.multibyte_chars.encode(s))?; . s.emit_struct_field("non_narrow_chars", false, |s| self.non_narrow_chars.encode(s))?; 93 ( 0.00%) s.emit_struct_field("name_hash", false, |s| self.name_hash.encode(s))?; . s.emit_struct_field("normalized_pos", false, |s| self.normalized_pos.encode(s))?; 341 ( 0.00%) s.emit_struct_field("cnum", false, |s| self.cnum.encode(s)) . }) . } . } . . impl Decodable for SourceFile { 4,833 ( 0.00%) fn decode(d: &mut D) -> SourceFile { . d.read_struct(|d| { . let name: FileName = d.read_struct_field("name", |d| Decodable::decode(d)); . let src_hash: SourceFileHash = . d.read_struct_field("src_hash", |d| Decodable::decode(d)); . let start_pos: BytePos = d.read_struct_field("start_pos", |d| Decodable::decode(d)); . let end_pos: BytePos = d.read_struct_field("end_pos", |d| Decodable::decode(d)); . let lines: Vec = d.read_struct_field("lines", |d| { . let num_lines: u32 = Decodable::decode(d); 537 ( 0.00%) let mut lines = Vec::with_capacity(num_lines as usize); . . if num_lines > 0 { . // Read the number of bytes used per diff. . let bytes_per_diff: u8 = Decodable::decode(d); . . // Read the first element. . let mut line_start: BytePos = Decodable::decode(d); . lines.push(line_start); . . for _ in 1..num_lines { 890,610 ( 0.01%) let diff = match bytes_per_diff { . 1 => d.read_u8() as u32, . 2 => d.read_u16() as u32, . 4 => d.read_u32(), . _ => unreachable!(), . }; . . line_start = line_start + BytePos(diff); . -- line 1480 ---------------------------------------- -- line 1483 ---------------------------------------- . } . . lines . }); . let multibyte_chars: Vec = . d.read_struct_field("multibyte_chars", |d| Decodable::decode(d)); . let non_narrow_chars: Vec = . d.read_struct_field("non_narrow_chars", |d| Decodable::decode(d)); 1,074 ( 0.00%) let name_hash: u128 = d.read_struct_field("name_hash", |d| Decodable::decode(d)); . let normalized_pos: Vec = . d.read_struct_field("normalized_pos", |d| Decodable::decode(d)); . let cnum: CrateNum = d.read_struct_field("cnum", |d| Decodable::decode(d)); 5,907 ( 0.00%) SourceFile { 4,296 ( 0.00%) name, . start_pos, . end_pos, . src: None, 2,148 ( 0.00%) src_hash, . // Unused - the metadata decoder will construct . // a new SourceFile, filling in `external_src` properly . external_src: Lock::new(ExternalSource::Unneeded), 2,148 ( 0.00%) lines, 2,148 ( 0.00%) multibyte_chars, 2,148 ( 0.00%) non_narrow_chars, 2,148 ( 0.00%) normalized_pos, . name_hash, . cnum, . } . }) 4,833 ( 0.00%) } . } . . impl fmt::Debug for SourceFile { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(fmt, "SourceFile({:?})", self.name) . } . } . . impl SourceFile { 341 ( 0.00%) pub fn new( . name: FileName, . mut src: String, . start_pos: BytePos, . hash_kind: SourceFileHashAlgorithm, . ) -> Self { . // Compute the file hash before any normalization. . let src_hash = SourceFileHash::new(hash_kind, &src); 93 ( 0.00%) let normalized_pos = normalize_src(&mut src, start_pos); . . let name_hash = { . let mut hasher: StableHasher = StableHasher::new(); 62 ( 0.00%) name.hash(&mut hasher); . hasher.finish::() . }; 62 ( 0.00%) let end_pos = start_pos.to_usize() + src.len(); 93 ( 0.00%) assert!(end_pos <= u32::MAX as usize); . 372 ( 0.00%) let (lines, multibyte_chars, non_narrow_chars) = 62 ( 0.00%) analyze_source_file::analyze_source_file(&src, start_pos); . 527 ( 0.00%) SourceFile { . name, . src: Some(Lrc::new(src)), 341 ( 0.00%) src_hash, . external_src: Lock::new(ExternalSource::Unneeded), . start_pos, . end_pos: Pos::from_usize(end_pos), 124 ( 0.00%) lines, 124 ( 0.00%) multibyte_chars, 124 ( 0.00%) non_narrow_chars, 124 ( 0.00%) normalized_pos, . name_hash, . cnum: LOCAL_CRATE, . } 279 ( 0.00%) } . . /// Returns the `BytePos` of the beginning of the current line. . pub fn line_begin_pos(&self, pos: BytePos) -> BytePos { . let line_index = self.lookup_line(pos).unwrap(); . self.lines[line_index] . } . . /// Add externally loaded source. . /// If the hash of the input doesn't match or no input is supplied via None, . /// it is interpreted as an error and the corresponding enum variant is set. . /// The return value signifies whether some kind of source is present. 13,188 ( 0.00%) pub fn add_external_src(&self, get_src: F) -> bool . where . F: FnOnce() -> Option, . { 1,884 ( 0.00%) if matches!( 3,768 ( 0.00%) *self.external_src.borrow(), . ExternalSource::Foreign { kind: ExternalSourceKind::AbsentOk, .. } . ) { . let src = get_src(); . let mut external_src = self.external_src.borrow_mut(); . // Check that no-one else have provided the source while we were getting it . if let ExternalSource::Foreign { . kind: src_kind @ ExternalSourceKind::AbsentOk, .. . } = &mut *external_src -- line 1582 ---------------------------------------- -- line 1592 ---------------------------------------- . *src_kind = ExternalSourceKind::AbsentErr; . } . . false . } else { . self.src.is_some() || external_src.get_source().is_some() . } . } else { 1,884 ( 0.00%) self.src.is_some() || self.external_src.borrow().get_source().is_some() . } 16,956 ( 0.00%) } . . /// Gets a line from the list of pre-computed line-beginnings. . /// The line number here is 0-based. . pub fn get_line(&self, line_number: usize) -> Option> { . fn get_until_newline(src: &str, begin: usize) -> &str { . // We can't use `lines.get(line_number+1)` because we might . // be parsing when we call this function and thus the current . // line is the last one we have line info for. -- line 1610 ---------------------------------------- -- line 1627 ---------------------------------------- . Some(Cow::Owned(String::from(get_until_newline(src, begin)))) . } else { . None . } . } . . pub fn is_real_file(&self) -> bool { . self.name.is_real() 557 ( 0.00%) } . . pub fn is_imported(&self) -> bool { . self.src.is_none() 14,803 ( 0.00%) } . . pub fn count_lines(&self) -> usize { . self.lines.len() . } . . /// Finds the line containing the given position. The return value is the . /// index into the `lines` array of this `SourceFile`, not the 1-based line . /// number. If the source_file is empty or the position is located before the . /// first line, `None` is returned. . pub fn lookup_line(&self, pos: BytePos) -> Option { 34,112 ( 0.00%) match self.lines.binary_search(&pos) { . Ok(idx) => Some(idx), . Err(0) => None, . Err(idx) => Some(idx - 1), . } . } . . pub fn line_bounds(&self, line_index: usize) -> Range { 79,591 ( 0.00%) if self.is_empty() { . return self.start_pos..self.end_pos; . } . 54,683 ( 0.00%) assert!(line_index < self.lines.len()); 77,136 ( 0.00%) if line_index == (self.lines.len() - 1) { 210 ( 0.00%) self.lines[line_index]..self.end_pos . } else { 51,248 ( 0.00%) self.lines[line_index]..self.lines[line_index + 1] . } . } . . /// Returns whether or not the file contains the given `SourceMap` byte . /// position. The position one past the end of the file is considered to be . /// contained by the file. This implies that files for which `is_empty` . /// returns true still contain one byte position according to this function. . #[inline] -- line 1674 ---------------------------------------- -- line 1692 ---------------------------------------- . Err(i) if i == 0 => 0, . Err(i) => self.normalized_pos[i - 1].diff, . }; . . BytePos::from_u32(pos.0 - self.start_pos.0 + diff) . } . . /// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`. 2 ( 0.00%) pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos { . // The number of extra bytes due to multibyte chars in the `SourceFile`. . let mut total_extra_bytes = 0; . 6 ( 0.00%) for mbc in self.multibyte_chars.iter() { . debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos); . if mbc.pos < bpos { . // Every character is at least one byte, so we only . // count the actual extra bytes. . total_extra_bytes += mbc.bytes as u32 - 1; . // We should never see a byte position in the middle of a . // character. . assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32); . } else { . break; . } . } . 12 ( 0.00%) assert!(self.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32()); 10 ( 0.00%) CharPos(bpos.to_usize() - self.start_pos.to_usize() - total_extra_bytes as usize) 4 ( 0.00%) } . . /// Looks up the file's (1-based) line number and (0-based `CharPos`) column offset, for a . /// given `BytePos`. 7 ( 0.00%) pub fn lookup_file_pos(&self, pos: BytePos) -> (usize, CharPos) { 2 ( 0.00%) let chpos = self.bytepos_to_file_charpos(pos); . match self.lookup_line(pos) { . Some(a) => { . let line = a + 1; // Line numbers start at 1 1 ( 0.00%) let linebpos = self.lines[a]; 2 ( 0.00%) let linechpos = self.bytepos_to_file_charpos(linebpos); . let col = chpos - linechpos; . debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); . debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos); . debug!("byte is on line: {}", line); 1 ( 0.00%) assert!(chpos >= linechpos); . (line, col) . } . None => (0, chpos), . } 8 ( 0.00%) } . . /// Looks up the file's (1-based) line number, (0-based `CharPos`) column offset, and (0-based) . /// column offset when displayed, for a given `BytePos`. 8 ( 0.00%) pub fn lookup_file_pos_with_col_display(&self, pos: BytePos) -> (usize, CharPos, usize) { 3 ( 0.00%) let (line, col_or_chpos) = self.lookup_file_pos(pos); 2 ( 0.00%) if line > 0 { . let col = col_or_chpos; 1 ( 0.00%) let linebpos = self.lines[line - 1]; . let col_display = { . let start_width_idx = self . .non_narrow_chars . .binary_search_by_key(&linebpos, |x| x.pos()) . .unwrap_or_else(|x| x); . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let special_chars = end_width_idx - start_width_idx; . let non_narrow: usize = self.non_narrow_chars[start_width_idx..end_width_idx] . .iter() . .map(|x| x.width()) . .sum(); 5 ( 0.00%) col.0 - special_chars + non_narrow . }; . (line, col, col_display) . } else { . let chpos = col_or_chpos; . let col_display = { . let end_width_idx = self . .non_narrow_chars . .binary_search_by_key(&pos, |x| x.pos()) . .unwrap_or_else(|x| x); . let non_narrow: usize = . self.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum(); . chpos.0 - end_width_idx + non_narrow . }; . (0, chpos, col_display) . } 7 ( 0.00%) } . } . . /// Normalizes the source code and records the normalizations. 310 ( 0.00%) fn normalize_src(src: &mut String, start_pos: BytePos) -> Vec { . let mut normalized_pos = vec![]; . remove_bom(src, &mut normalized_pos); . normalize_newlines(src, &mut normalized_pos); . . // Offset all the positions by start_pos to match the final file positions. . for np in &mut normalized_pos { . np.pos.0 += start_pos.0; . } . . normalized_pos 279 ( 0.00%) } . . /// Removes UTF-8 BOM, if any. . fn remove_bom(src: &mut String, normalized_pos: &mut Vec) { 31 ( 0.00%) if src.starts_with('\u{feff}') { . src.drain(..3); . normalized_pos.push(NormalizedPos { pos: BytePos(0), diff: 3 }); . } . } . . /// Replaces `\r\n` with `\n` in-place in `src`. . /// . /// Returns error if there's a lone `\r` in the string. . fn normalize_newlines(src: &mut String, normalized_pos: &mut Vec) { 31 ( 0.00%) if !src.as_bytes().contains(&b'\r') { . return; . } . . // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding. . // While we *can* call `as_mut_vec` and do surgery on the live string . // directly, let's rather steal the contents of `src`. This makes the code . // safe even if a panic occurs. . -- line 1816 ---------------------------------------- -- line 1877 ---------------------------------------- . ( . $( . $(#[$attr:meta])* . $vis:vis struct $ident:ident($inner_vis:vis $inner_ty:ty); . )* . ) => { . $( . $(#[$attr])* 180,608 ( 0.00%) $vis struct $ident($inner_vis $inner_ty); . . impl Pos for $ident { . #[inline(always)] . fn from_usize(n: usize) -> $ident { 10,398 ( 0.00%) $ident(n as $inner_ty) . } . . #[inline(always)] . fn to_usize(&self) -> usize { 201,784 ( 0.00%) self.0 as usize . } . . #[inline(always)] . fn from_u32(n: u32) -> $ident { . $ident(n as $inner_ty) . } . . #[inline(always)] -- line 1903 ---------------------------------------- -- line 1906 ---------------------------------------- . } . } . . impl Add for $ident { . type Output = $ident; . . #[inline(always)] . fn add(self, rhs: $ident) -> $ident { 1,891,549 ( 0.03%) $ident(self.0 + rhs.0) . } . } . . impl Sub for $ident { . type Output = $ident; . . #[inline(always)] . fn sub(self, rhs: $ident) -> $ident { 2,940,584 ( 0.05%) $ident(self.0 - rhs.0) . } . } . )* . }; . } . . impl_pos! { . /// A byte offset. -- line 1931 ---------------------------------------- -- line 1946 ---------------------------------------- . impl Encodable for BytePos { . fn encode(&self, s: &mut S) -> Result<(), S::Error> { . s.emit_u32(self.0) . } . } . . impl Decodable for BytePos { . fn decode(d: &mut D) -> BytePos { 172,102 ( 0.00%) BytePos(d.read_u32()) . } . } . . // _____________________________________________________________________________ . // Loc, SourceFileAndLine, SourceFileAndBytePos . // . . /// A source code location used for error reporting. -- line 1962 ---------------------------------------- -- line 2042 ---------------------------------------- . /// Range inside of a `Span` used for diagnostics when we only have access to relative positions. . #[derive(Copy, Clone, PartialEq, Eq, Debug)] . pub struct InnerSpan { . pub start: usize, . pub end: usize, . } . . impl InnerSpan { 260 ( 0.00%) pub fn new(start: usize, end: usize) -> InnerSpan { . InnerSpan { start, end } 130 ( 0.00%) } . } . . /// Requirements for a `StableHashingContext` to be used in this crate. . /// . /// This is a hack to allow using the [`HashStable_Generic`] derive macro . /// instead of implementing everything in rustc_middle. . pub trait HashStableContext { . fn def_path_hash(&self, def_id: DefId) -> DefPathHash; -- line 2060 ---------------------------------------- -- line 2079 ---------------------------------------- . /// offsets into the `SourceMap`). Instead, we hash the (file name, line, column) . /// triple, which stays the same even if the containing `SourceFile` has moved . /// within the `SourceMap`. . /// . /// Also note that we are hashing byte offsets for the column, not unicode . /// codepoint offsets. For the purpose of the hash that's sufficient. . /// Also, hashing filenames is expensive so we avoid doing it twice when the . /// span starts and ends in the same file, which is almost always the case. 1,488,921 ( 0.02%) fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { . const TAG_VALID_SPAN: u8 = 0; . const TAG_INVALID_SPAN: u8 = 1; . const TAG_RELATIVE_SPAN: u8 = 2; . 252,791 ( 0.00%) if !ctx.hash_spans() { . return; . } . 1,236,130 ( 0.02%) let span = self.data_untracked(); 517,881 ( 0.01%) span.ctxt.hash_stable(ctx, hasher); 638,109 ( 0.01%) span.parent.hash_stable(ctx, hasher); . 434,708 ( 0.01%) if span.is_dummy() { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . 406,802 ( 0.01%) if let Some(parent) = span.parent { . let def_span = ctx.def_span(parent).data_untracked(); . if def_span.contains(span) { . // This span is enclosed in a definition: only hash the relative position. . Hash::hash(&TAG_RELATIVE_SPAN, hasher); . (span.lo - def_span.lo).to_u32().hash_stable(ctx, hasher); . (span.hi - def_span.lo).to_u32().hash_stable(ctx, hasher); . return; . } . } . . // If this is not an empty or invalid span, we want to hash the last . // position that belongs to it, as opposed to hashing the first . // position past it. 813,604 ( 0.01%) let (file, line_lo, col_lo, line_hi, col_hi) = match ctx.span_data_to_lines_and_cols(&span) . { 406,802 ( 0.01%) Some(pos) => pos, . None => { . Hash::hash(&TAG_INVALID_SPAN, hasher); . return; . } . }; . . Hash::hash(&TAG_VALID_SPAN, hasher); . // We truncate the stable ID hash and line and column numbers. The chances -- line 2129 ---------------------------------------- -- line 2134 ---------------------------------------- . // hash only the length, for example, then two otherwise equal spans with . // different end locations will have the same hash. This can cause a problem . // during incremental compilation wherein a previous result for a query that . // depends on the end location of a span will be incorrectly reused when the . // end location of the span it depends on has changed (see issue #74890). A . // similar analysis applies if some query depends specifically on the length . // of the span, but we only hash the end location. So hash both. . 203,401 ( 0.00%) let col_lo_trunc = (col_lo.0 as u64) & 0xFF; . let line_lo_trunc = ((line_lo as u64) & 0xFF_FF_FF) << 8; . let col_hi_trunc = (col_hi.0 as u64) & 0xFF << 32; . let line_hi_trunc = ((line_hi as u64) & 0xFF_FF_FF) << 40; . let col_line = col_lo_trunc | line_lo_trunc | col_hi_trunc | line_hi_trunc; 203,401 ( 0.00%) let len = (span.hi - span.lo).0; . Hash::hash(&col_line, hasher); . Hash::hash(&len, hasher); 1,701,624 ( 0.03%) } . } 4,150,506 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_trait_selection/src/traits/select/mod.rs -------------------------------------------------------------------------------- Ir -- line 208 ---------------------------------------- . /// There is no built-in impl. There may be some other . /// candidate (a where-clause or user-defined impl). . None, . /// It is unknown whether there is an impl. . Ambiguous, . } . . impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { 520,854 ( 0.01%) pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { 2,556,986 ( 0.04%) SelectionContext { . infcx, 348,733 ( 0.01%) freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode: TraitQueryMode::Standard, . } 434,045 ( 0.01%) } . . pub fn intercrate(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { 368 ( 0.00%) SelectionContext { . infcx, 46 ( 0.00%) freshener: infcx.freshener_keep_static(), . intercrate: true, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode: TraitQueryMode::Standard, . } . } . . pub fn with_negative( -- line 238 ---------------------------------------- -- line 245 ---------------------------------------- . freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls, . query_mode: TraitQueryMode::Standard, . } . } . 20,048 ( 0.00%) pub fn with_query_mode( . infcx: &'cx InferCtxt<'cx, 'tcx>, . query_mode: TraitQueryMode, . ) -> SelectionContext<'cx, 'tcx> { . debug!(?query_mode, "with_query_mode"); 40,096 ( 0.00%) SelectionContext { . infcx, 2,506 ( 0.00%) freshener: infcx.freshener_keep_static(), . intercrate: false, . intercrate_ambiguity_causes: None, . allow_negative_impls: false, . query_mode, . } 15,036 ( 0.00%) } . . /// Enables tracking of intercrate ambiguity causes. These are . /// used in coherence to give improved diagnostics. We don't do . /// this until we detect a coherence error because it can lead to . /// false overflow results (#47139) and because it costs . /// computation time. . pub fn enable_tracking_intercrate_ambiguity_causes(&mut self) { . assert!(self.intercrate); -- line 274 ---------------------------------------- -- line 281 ---------------------------------------- . /// was enabled and disables tracking at the same time. If . /// tracking is not enabled, just returns an empty vector. . pub fn take_intercrate_ambiguity_causes(&mut self) -> Vec { . assert!(self.intercrate); . self.intercrate_ambiguity_causes.take().unwrap_or_default() . } . . pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'tcx> { 47,399,436 ( 0.79%) self.infcx 78,272 ( 0.00%) } . . pub fn tcx(&self) -> TyCtxt<'tcx> { 661,507 ( 0.01%) self.infcx.tcx . } . . pub fn is_intercrate(&self) -> bool { 74,773 ( 0.00%) self.intercrate . } . . /////////////////////////////////////////////////////////////////////////// . // Selection . // . // The selection phase tries to identify *how* an obligation will . // be resolved. For example, it will identify which impl or . // parameter bound is to be used. The process can be inconclusive -- line 305 ---------------------------------------- -- line 309 ---------------------------------------- . // 1. If no applicable impl or parameter bound can be found. . // 2. If the output type parameters in the obligation do not match . // those specified by the impl/bound. For example, if the obligation . // is `Vec: Iterable`, but the impl specifies . // `impl Iterable for Vec`, than an error would result. . . /// Attempts to satisfy the obligation. If successful, this will affect the surrounding . /// type environment by performing unification. 334,915 ( 0.01%) #[instrument(level = "debug", skip(self))] 430,605 ( 0.01%) pub fn select( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, Selection<'tcx>> { 353,381 ( 0.01%) let candidate = match self.select_from_obligation(obligation) { . Err(SelectionError::Overflow) => { . // In standard mode, overflow must have been caught and reported . // earlier. . assert!(self.query_mode == TraitQueryMode::Canonical); . return Err(SelectionError::Overflow); . } . Err(SelectionError::Ambiguous(_)) => { . return Ok(None); . } . Err(e) => { 121,912 ( 0.00%) return Err(e); . } . Ok(None) => { 46,173 ( 0.00%) return Ok(None); . } 139,326 ( 0.00%) Ok(Some(candidate)) => candidate, . }; . 235,322 ( 0.00%) match self.confirm_candidate(obligation, candidate) { . Err(SelectionError::Overflow) => { . assert!(self.query_mode == TraitQueryMode::Canonical); . Err(SelectionError::Overflow) . } 20,228 ( 0.00%) Err(e) => Err(e), . Ok(candidate) => { . debug!(?candidate, "confirmed"); 259,980 ( 0.00%) Ok(Some(candidate)) . } . } . } . 478,450 ( 0.01%) crate fn select_from_obligation( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { . debug_assert!(!obligation.predicate.has_escaping_bound_vars()); . . let pec = &ProvisionalEvaluationCache::default(); . let stack = self.push_stack(TraitObligationStackList::empty(pec), obligation); . 95,690 ( 0.00%) self.candidate_from_obligation(&stack) 430,605 ( 0.01%) } . . /////////////////////////////////////////////////////////////////////////// . // EVALUATION . // . // Tests whether an obligation can be selected or whether an impl . // can be applied to particular types. It skips the "confirmation" . // step and hence completely ignores output type parameters. . // -- line 372 ---------------------------------------- -- line 374 ---------------------------------------- . // we can be sure it does not. . . /// Evaluates whether the obligation `obligation` can be satisfied (by any means). . pub fn predicate_may_hold_fatal(&mut self, obligation: &PredicateObligation<'tcx>) -> bool { . debug!(?obligation, "predicate_may_hold_fatal"); . . // This fatal query is a stopgap that should only be used in standard mode, . // where we do not expect overflow to be propagated. 46 ( 0.00%) assert!(self.query_mode == TraitQueryMode::Standard); . 230 ( 0.00%) self.evaluate_root_obligation(obligation) . .expect("Overflow should be caught earlier in standard query mode") . .may_apply() . } . . /// Evaluates whether the obligation `obligation` can be satisfied . /// and returns an `EvaluationResult`. This is meant for the . /// *initial* call. 5,012 ( 0.00%) pub fn evaluate_root_obligation( . &mut self, . obligation: &PredicateObligation<'tcx>, . ) -> Result { . self.evaluation_probe(|this| { 7,656 ( 0.00%) this.evaluate_predicate_recursively( . TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()), . obligation.clone(), . ) . }) 5,012 ( 0.00%) } . . fn evaluation_probe( . &mut self, . op: impl FnOnce(&mut Self) -> Result, . ) -> Result { 49,021 ( 0.00%) self.infcx.probe(|snapshot| -> Result { 5,104 ( 0.00%) let result = op(self)?; . 47,138 ( 0.00%) match self.infcx.leak_check(true, snapshot) { . Ok(()) => {} . Err(_) => return Ok(EvaluatedToErr), . } . 33,670 ( 0.00%) match self.infcx.region_constraints_added_in_snapshot(snapshot) { . None => Ok(result), . Some(_) => Ok(result.max(EvaluatedToOkModuloRegions)), . } . }) . } . . /// Evaluates the predicates in `predicates` recursively. Note that . /// this applies projections in the predicates, and therefore . /// is run within an inference probe. 73,600 ( 0.00%) #[instrument(skip(self, stack), level = "debug")] . fn evaluate_predicates_recursively<'o, I>( . &mut self, . stack: TraitObligationStackList<'o, 'tcx>, . predicates: I, . ) -> Result . where . I: IntoIterator> + std::fmt::Debug, . { . let mut result = EvaluatedToOk; 34,937 ( 0.00%) for obligation in predicates { 7,504 ( 0.00%) let eval = self.evaluate_predicate_recursively(stack, obligation.clone())?; 3,752 ( 0.00%) if let EvaluatedToErr = eval { . // fast-path - EvaluatedToErr is the top of the lattice, . // so we don't need to look on the other predicates. . return Ok(EvaluatedToErr); . } else { . result = cmp::max(result, eval); . } . } . Ok(result) . } . 44,280 ( 0.00%) #[instrument( . level = "debug", . skip(self, previous_stack), . fields(previous_stack = ?previous_stack.head()) 39,852 ( 0.00%) )] . fn evaluate_predicate_recursively<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . obligation: PredicateObligation<'tcx>, . ) -> Result { . // `previous_stack` stores a `TraitObligation`, while `obligation` is . // a `PredicateObligation`. These are distinct types, so we can't . // use any `Option` combinator method that would force them to be . // the same. 8,856 ( 0.00%) match previous_stack.head() { 3,738 ( 0.00%) Some(h) => self.check_recursion_limit(&obligation, h.obligation)?, 2,559 ( 0.00%) None => self.check_recursion_limit(&obligation, &obligation)?, . } . 26,568 ( 0.00%) let result = ensure_sufficient_stack(|| { 4,428 ( 0.00%) let bound_predicate = obligation.predicate.kind(); 44,280 ( 0.00%) match bound_predicate.skip_binder() { . ty::PredicateKind::Trait(t) => { . let t = bound_predicate.rebind(t); . debug_assert!(!t.has_escaping_bound_vars()); . let obligation = obligation.with(t); 65,340 ( 0.00%) self.evaluate_trait_predicate_recursively(previous_stack, obligation) . } . . ty::PredicateKind::Subtype(p) => { . let p = bound_predicate.rebind(p); . // Does this code ever run? 90 ( 0.00%) match self.infcx.subtype_predicate(&obligation.cause, obligation.param_env, p) { . Some(Ok(InferOk { mut obligations, .. })) => { . self.add_depth(obligations.iter_mut(), obligation.recursion_depth); . self.evaluate_predicates_recursively( . previous_stack, . obligations.into_iter(), . ) . } . Some(Err(_)) => Ok(EvaluatedToErr), -- line 489 ---------------------------------------- -- line 522 ---------------------------------------- . None => Ok(EvaluatedToAmbig), . }, . . ty::PredicateKind::TypeOutlives(pred) => { . // A global type with no late-bound regions can only . // contain the "'static" lifetime (any other lifetime . // would either be late-bound or local), so it is guaranteed . // to outlive any other lifetime 12 ( 0.00%) if pred.0.is_global() && !pred.0.has_late_bound_regions() { . Ok(EvaluatedToOk) . } else { . Ok(EvaluatedToOkModuloRegions) . } . } . . ty::PredicateKind::RegionOutlives(..) => { . // We do not consider region relationships when evaluating trait matches. -- line 538 ---------------------------------------- -- line 545 ---------------------------------------- . } else { . Ok(EvaluatedToErr) . } . } . . ty::PredicateKind::Projection(data) => { . let data = bound_predicate.rebind(data); . let project_obligation = obligation.with(data); 252 ( 0.00%) match project::poly_project_and_unify_type(self, &project_obligation) { 175 ( 0.00%) Ok(Ok(Some(mut subobligations))) => { . 'compute_res: { . // If we've previously marked this projection as 'complete', thne . // use the final cached result (either `EvaluatedToOk` or . // `EvaluatedToOkModuloRegions`), and skip re-evaluating the . // sub-obligations. 140 ( 0.00%) if let Some(key) = 315 ( 0.00%) ProjectionCacheKey::from_poly_projection_predicate(self, data) . { 248 ( 0.00%) if let Some(cached_res) = self . .infcx . .inner . .borrow_mut() . .projection_cache() . .is_complete(key) . { . break 'compute_res Ok(cached_res); . } . } . 70 ( 0.00%) self.add_depth( . subobligations.iter_mut(), 70 ( 0.00%) obligation.recursion_depth, . ); 70 ( 0.00%) let res = self.evaluate_predicates_recursively( 105 ( 0.00%) previous_stack, 175 ( 0.00%) subobligations, . ); 140 ( 0.00%) if let Ok(res) = res { 70 ( 0.00%) if res == EvaluatedToOk || res == EvaluatedToOkModuloRegions { 132 ( 0.00%) if let Some(key) = 33 ( 0.00%) ProjectionCacheKey::from_poly_projection_predicate( 231 ( 0.00%) self, data, . ) . { . // If the result is something that we can cache, then mark this . // entry as 'complete'. This will allow us to skip evaluating the . // suboligations at all the next time we evaluate the projection . // predicate. 203 ( 0.00%) self.infcx . .inner . .borrow_mut() . .projection_cache() . .complete(key, res); . } . } . } . res -- line 601 ---------------------------------------- -- line 603 ---------------------------------------- . } . Ok(Ok(None)) => Ok(EvaluatedToAmbig), . Ok(Err(project::InProgress)) => Ok(EvaluatedToRecur), . Err(_) => Ok(EvaluatedToErr), . } . } . . ty::PredicateKind::ClosureKind(_, closure_substs, kind) => { 95 ( 0.00%) match self.infcx.closure_kind(closure_substs) { . Some(closure_kind) => { 57 ( 0.00%) if closure_kind.extends(kind) { . Ok(EvaluatedToOk) . } else { . Ok(EvaluatedToErr) . } . } . None => Ok(EvaluatedToAmbig), . } . } -- line 621 ---------------------------------------- -- line 693 ---------------------------------------- . } . } . } . } . ty::PredicateKind::TypeWellFormedFromEnv(..) => { . bug!("TypeWellFormedFromEnv is only used for chalk") . } . } 4,428 ( 0.00%) }); . . debug!("finished: {:?} from {:?}", result, obligation); . . result . } . 91,476 ( 0.00%) #[instrument(skip(self, previous_stack), level = "debug")] . fn evaluate_trait_predicate_recursively<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . mut obligation: TraitObligation<'tcx>, . ) -> Result { 8,712 ( 0.00%) if !self.intercrate . && obligation.is_global() . && obligation.param_env.caller_bounds().iter().all(|bound| bound.needs_subst()) . { . // If a param env has no global bounds, global obligations do not . // depend on its particular value in order to work, so we can clear . // out the param env and get better caching. . debug!("in global"); 4,002 ( 0.00%) obligation.param_env = obligation.param_env.without_caller_bounds(); . } . . let stack = self.push_stack(previous_stack, &obligation); . let mut fresh_trait_pred = stack.fresh_trait_pred; 8,712 ( 0.00%) let mut param_env = obligation.param_env; . . fresh_trait_pred = fresh_trait_pred.map_bound(|mut pred| { 4,356 ( 0.00%) pred.remap_constness(self.tcx(), &mut param_env); 52,272 ( 0.00%) pred . }); . . debug!(?fresh_trait_pred); . 5,846 ( 0.00%) if let Some(result) = self.check_evaluation_cache(param_env, fresh_trait_pred) { . debug!(?result, "CACHE HIT"); . return Ok(result); . } . . if let Some(result) = stack.cache().get_provisional(fresh_trait_pred) { . debug!(?result, "PROVISIONAL CACHE HIT"); . stack.update_reached_depth(result.reached_depth); . return Ok(result.result); -- line 744 ---------------------------------------- -- line 748 ---------------------------------------- . // stack. If so, we don't want to insert the result into the . // main cache (it is cycle dependent) nor the provisional . // cache (which is meant for things that have completed but . // for a "backedge" -- this result *is* the backedge). . if let Some(cycle_result) = self.check_evaluation_cycle(&stack) { . return Ok(cycle_result); . } . 26,514 ( 0.00%) let (result, dep_node) = self.in_task(|this| this.evaluate_stack(&stack)); 2,946 ( 0.00%) let result = result?; . 14,730 ( 0.00%) if !result.must_apply_modulo_regions() { . stack.cache().on_failure(stack.dfn); . } . 2,946 ( 0.00%) let reached_depth = stack.reached_depth.get(); 5,892 ( 0.00%) if reached_depth >= stack.depth { . debug!(?result, "CACHE MISS"); 55,974 ( 0.00%) self.insert_evaluation_cache(param_env, fresh_trait_pred, dep_node, result); . . stack.cache().on_completion( 5,892 ( 0.00%) stack.dfn, . |fresh_trait_pred, provisional_result, provisional_dep_node| { . // Create a new `DepNode` that has dependencies on: . // * The `DepNode` for the original evaluation that resulted in a provisional cache . // entry being crated . // * The `DepNode` for the *current* evaluation, which resulted in us completing . // provisional caches entries and inserting them into the evaluation cache . // . // This ensures that when a query reads this entry from the evaluation cache, -- line 777 ---------------------------------------- -- line 835 ---------------------------------------- . /// `self.freshener`, we can be sure that (a) this will not . /// affect the inferencer state and (b) that if we see two . /// fresh regions with the same index, they refer to the same . /// unbound type variable. . fn check_evaluation_cycle( . &mut self, . stack: &TraitObligationStack<'_, 'tcx>, . ) -> Option { 8 ( 0.00%) if let Some(cycle_depth) = stack . .iter() . .skip(1) // Skip top-most frame. . .find(|prev| { 1,474 ( 0.00%) stack.obligation.param_env == prev.obligation.param_env . && stack.fresh_trait_pred == prev.fresh_trait_pred . }) . .map(|stack| stack.depth) . { . debug!("evaluate_stack --> recursive at depth {}", cycle_depth); . . // If we have a stack like `A B C D E A`, where the top of . // the stack is the final `A`, then this will iterate over . // `A, E, D, C, B` -- i.e., all the participants apart . // from the cycle head. We mark them as participating in a . // cycle. This suppresses caching for those nodes. See . // `in_cycle` field for more details. 8 ( 0.00%) stack.update_reached_depth(cycle_depth); . . // Subtle: when checking for a coinductive cycle, we do . // not compare using the "freshened trait refs" (which . // have erased regions) but rather the fully explicit . // trait refs. This is important because it's only a cycle . // if the regions match exactly. 4 ( 0.00%) let cycle = stack.iter().skip(1).take_while(|s| s.depth >= cycle_depth); 4 ( 0.00%) let tcx = self.tcx(); 28 ( 0.00%) let cycle = cycle.map(|stack| stack.obligation.predicate.to_predicate(tcx)); 44 ( 0.00%) if self.coinductive_match(cycle) { . debug!("evaluate_stack --> recursive, coinductive"); . Some(EvaluatedToOk) . } else { . debug!("evaluate_stack --> recursive, inductive"); . Some(EvaluatedToRecur) . } . } else { . None . } . } . 26,514 ( 0.00%) fn evaluate_stack<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . ) -> Result { . // In intercrate mode, whenever any of the generics are unbound, . // there can always be an impl. Even if there are no impls in . // this crate, perhaps the type would be unified with . // something from another crate that does provide an impl. . // -- line 890 ---------------------------------------- -- line 903 ---------------------------------------- . // imagine, this is just where we started. To avoid that, we . // check for unbound variables and return an ambiguous (hence possible) . // match if we've seen this trait before. . // . // This suffices to allow chains like `FnMut` implemented in . // terms of `Fn` etc, but we could probably make this more . // precise still. . let unbound_input_types = 2,946 ( 0.00%) stack.fresh_trait_pred.skip_binder().trait_ref.substs.types().any(|ty| ty.is_fresh()); . 8,838 ( 0.00%) if stack.obligation.polarity() != ty::ImplPolarity::Negative { . // This check was an imperfect workaround for a bug in the old . // intercrate mode; it should be removed when that goes away. 14,730 ( 0.00%) if unbound_input_types && self.intercrate { . debug!("evaluate_stack --> unbound argument, intercrate --> ambiguous",); . // Heuristics: show the diagnostics when there are no candidates in crate. . if self.intercrate_ambiguity_causes.is_some() { . debug!("evaluate_stack: intercrate_ambiguity_causes is some"); . if let Ok(candidate_set) = self.assemble_candidates(stack) { . if !candidate_set.ambiguous && candidate_set.vec.is_empty() { . let trait_ref = stack.obligation.predicate.skip_binder().trait_ref; . let self_ty = trait_ref.self_ty(); -- line 924 ---------------------------------------- -- line 937 ---------------------------------------- . self.intercrate_ambiguity_causes.as_mut().unwrap().push(cause); . } . } . } . return Ok(EvaluatedToAmbig); . } . } . 5,892 ( 0.00%) if unbound_input_types . && stack.iter().skip(1).any(|prev| { 132 ( 0.00%) stack.obligation.param_env == prev.obligation.param_env . && self.match_fresh_trait_refs( 176 ( 0.00%) stack.fresh_trait_pred, 176 ( 0.00%) prev.fresh_trait_pred, . prev.obligation.param_env, . ) . }) . { . debug!("evaluate_stack --> unbound argument, recursive --> giving up",); . return Ok(EvaluatedToUnknown); . } . 29,344 ( 0.00%) match self.candidate_from_obligation(stack) { 13,110 ( 0.00%) Ok(Some(c)) => self.evaluate_candidate(stack, &c), . Err(SelectionError::Ambiguous(_)) => Ok(EvaluatedToAmbig), . Ok(None) => Ok(EvaluatedToAmbig), . Err(Overflow) => Err(OverflowError::Canonical), . Err(ErrorReporting) => Err(OverflowError::ErrorReporting), . Err(..) => Ok(EvaluatedToErr), . } 29,460 ( 0.00%) } . . /// For defaulted traits, we use a co-inductive strategy to solve, so . /// that recursion is ok. This routine returns `true` if the top of the . /// stack (`cycle[0]`): . /// . /// - is a defaulted trait, . /// - it also appears in the backtrace at some position `X`, . /// - all the predicates at positions `X..` between `X` and the top are -- line 975 ---------------------------------------- -- line 977 ---------------------------------------- . pub fn coinductive_match(&mut self, mut cycle: I) -> bool . where . I: Iterator>, . { . cycle.all(|predicate| self.coinductive_predicate(predicate)) . } . . fn coinductive_predicate(&self, predicate: ty::Predicate<'tcx>) -> bool { 8 ( 0.00%) let result = match predicate.kind().skip_binder() { 36 ( 0.00%) ty::PredicateKind::Trait(ref data) => self.tcx().trait_is_auto(data.def_id()), . _ => false, . }; . debug!(?predicate, ?result, "coinductive_predicate"); . result . } . . /// Further evaluates `candidate` to decide whether all type parameters match and whether nested . /// obligations are met. Returns whether `candidate` remains viable after this further -- line 994 ---------------------------------------- -- line 998 ---------------------------------------- . skip(self, stack), . fields(depth = stack.obligation.recursion_depth) . )] . fn evaluate_candidate<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . candidate: &SelectionCandidate<'tcx>, . ) -> Result { 7,078 ( 0.00%) let mut result = self.evaluation_probe(|this| { . let candidate = (*candidate).clone(); 56,699 ( 0.00%) match this.confirm_candidate(stack.obligation, candidate) { . Ok(selection) => { . debug!(?selection); 14,156 ( 0.00%) this.evaluate_predicates_recursively( . stack.list(), 46,007 ( 0.00%) selection.nested_obligations().into_iter(), . ) . } . Err(..) => Ok(EvaluatedToErr), . } . })?; . . // If we erased any lifetimes, then we want to use . // `EvaluatedToOkModuloRegions` instead of `EvaluatedToOk` -- line 1021 ---------------------------------------- -- line 1037 ---------------------------------------- . &self, . param_env: ty::ParamEnv<'tcx>, . trait_pred: ty::PolyTraitPredicate<'tcx>, . ) -> Option { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 8,712 ( 0.00%) if self.intercrate { . return None; . } . . let tcx = self.tcx(); . if self.can_use_global_caches(param_env) { 27,209 ( 0.00%) if let Some(res) = tcx.evaluation_cache.get(¶m_env.and(trait_pred), tcx) { . return Some(res); . } . } 11,692 ( 0.00%) self.infcx.evaluation_cache.get(¶m_env.and(trait_pred), tcx) . } . 35,352 ( 0.00%) fn insert_evaluation_cache( . &mut self, . param_env: ty::ParamEnv<'tcx>, . trait_pred: ty::PolyTraitPredicate<'tcx>, . dep_node: DepNodeIndex, . result: EvaluationResult, . ) { . // Avoid caching results that depend on more than just the trait-ref . // - the stack can create recursion. 11,784 ( 0.00%) if result.is_stack_dependent() { . return; . } . . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 5,892 ( 0.00%) if self.intercrate { . return; . } . . if self.can_use_global_caches(param_env) { . if !trait_pred.needs_infer() { . debug!(?trait_pred, ?result, "insert_evaluation_cache global"); . // This may overwrite the cache with the same value . // FIXME: Due to #50507 this overwrites the different values . // This should be changed to use HashMapExt::insert_same . // when that is fixed 2,496 ( 0.00%) self.tcx().evaluation_cache.insert(param_env.and(trait_pred), dep_node, result); . return; . } . } . . debug!(?trait_pred, ?result, "insert_evaluation_cache"); 1,616 ( 0.00%) self.infcx.evaluation_cache.insert(param_env.and(trait_pred), dep_node, result); 23,568 ( 0.00%) } . . /// For various reasons, it's possible for a subobligation . /// to have a *lower* recursion_depth than the obligation used to create it. . /// Projection sub-obligations may be returned from the projection cache, . /// which results in obligations with an 'old' `recursion_depth`. . /// Additionally, methods like `InferCtxt.subtype_predicate` produce . /// subobligations without taking in a 'parent' depth, causing the . /// generated subobligations to have a `recursion_depth` of `0`. . /// . /// To ensure that obligation_depth never decreases, we force all subobligations . /// to have at least the depth of the original obligation. 35 ( 0.00%) fn add_depth>>( . &self, . it: I, . min_depth: usize, . ) { 56 ( 0.00%) it.for_each(|o| o.recursion_depth = cmp::max(min_depth, o.recursion_depth) + 1); 70 ( 0.00%) } . . fn check_recursion_depth>( . &self, . depth: usize, . error_obligation: &Obligation<'tcx, T>, . ) -> Result<(), OverflowError> { 220,876 ( 0.00%) if !self.infcx.tcx.recursion_limit().value_within_limit(depth) { . match self.query_mode { . TraitQueryMode::Standard => { . if self.infcx.is_tainted_by_errors() { . return Err(OverflowError::ErrorReporting); . } . self.infcx.report_overflow_error(error_obligation, true); . } . TraitQueryMode::Canonical => { -- line 1126 ---------------------------------------- -- line 1143 ---------------------------------------- . ) -> Result<(), OverflowError> { . self.check_recursion_depth(obligation.recursion_depth, error_obligation) . } . . fn in_task(&mut self, op: OP) -> (R, DepNodeIndex) . where . OP: FnOnce(&mut Self) -> R, . { 80,327 ( 0.00%) let (result, dep_node) = 60,642 ( 0.00%) self.tcx().dep_graph.with_anon_task(self.tcx(), DepKind::TraitSelect, || op(self)); . self.tcx().dep_graph.read_index(dep_node); 74,148 ( 0.00%) (result, dep_node) . } . . /// filter_impls filters constant trait obligations and candidates that have a positive impl . /// for a negative goal and a negative impl for a positive goal . #[instrument(level = "debug", skip(self))] . fn filter_impls( . &mut self, . candidates: Vec>, . obligation: &TraitObligation<'tcx>, . ) -> Vec> { . let tcx = self.tcx(); . let mut result = Vec::with_capacity(candidates.len()); . 90,742 ( 0.00%) for candidate in candidates { . // Respect const trait obligations 16,761 ( 0.00%) if obligation.is_const() { . match candidate { . // const impl . ImplCandidate(def_id) . if tcx.impl_constness(def_id) == hir::Constness::Const => {} . // const param . ParamCandidate(trait_pred) . if trait_pred.skip_binder().constness . == ty::BoundConstness::ConstIfConst => {} -- line 1178 ---------------------------------------- -- line 1186 ---------------------------------------- . ConstDropCandidate(_) => {} . _ => { . // reject all other types of candidates . continue; . } . } . } . 11,174 ( 0.00%) if let ImplCandidate(def_id) = candidate { 7,288 ( 0.00%) if ty::ImplPolarity::Reservation == tcx.impl_polarity(def_id) 3,644 ( 0.00%) || obligation.polarity() == tcx.impl_polarity(def_id) . || self.allow_negative_impls . { . result.push(candidate); . } . } else { . result.push(candidate); . } . } . 23,428 ( 0.00%) result . } . . /// filter_reservation_impls filter reservation impl for any goal as ambiguous 73,525 ( 0.00%) #[instrument(level = "debug", skip(self))] . fn filter_reservation_impls( . &mut self, . candidate: SelectionCandidate<'tcx>, . obligation: &TraitObligation<'tcx>, . ) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { . let tcx = self.tcx(); . // Treat reservation impls as ambiguity. 13,414 ( 0.00%) if let ImplCandidate(def_id) = candidate { 4,764 ( 0.00%) if let ty::ImplPolarity::Reservation = tcx.impl_polarity(def_id) { . if let Some(intercrate_ambiguity_clauses) = &mut self.intercrate_ambiguity_causes { . let attrs = tcx.get_attrs(def_id); . let attr = tcx.sess.find_by_name(&attrs, sym::rustc_reservation_impl); . let value = attr.and_then(|a| a.value_str()); . if let Some(value) = value { . debug!( . "filter_reservation_impls: \ . reservation impl ambiguity on {:?}", -- line 1227 ---------------------------------------- -- line 1232 ---------------------------------------- . message: value.to_string(), . }, . ); . } . } . return Ok(None); . } . } 25,950 ( 0.00%) Ok(Some(candidate)) . } . . fn is_knowable<'o>(&mut self, stack: &TraitObligationStack<'o, 'tcx>) -> Option { . debug!("is_knowable(intercrate={:?})", self.intercrate); . 18,675 ( 0.00%) if !self.intercrate || stack.obligation.polarity() == ty::ImplPolarity::Negative { . return None; . } . . let obligation = &stack.obligation; 138 ( 0.00%) let predicate = self.infcx().resolve_vars_if_possible(obligation.predicate); . . // Okay to skip binder because of the nature of the . // trait-ref-is-knowable check, which does not care about . // bound regions. . let trait_ref = predicate.skip_binder().trait_ref; . 138 ( 0.00%) coherence::trait_ref_is_knowable(self.tcx(), trait_ref) . } . . /// Returns `true` if the global caches can be used. . fn can_use_global_caches(&self, param_env: ty::ParamEnv<'tcx>) -> bool { . // If there are any inference variables in the `ParamEnv`, then we . // always use a cache local to this particular scope. Otherwise, we . // switch to a global cache. . if param_env.needs_infer() { -- line 1266 ---------------------------------------- -- line 1286 ---------------------------------------- . &mut self, . mut param_env: ty::ParamEnv<'tcx>, . cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>, . ) -> Option>> { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 101,582 ( 0.00%) if self.intercrate { 46 ( 0.00%) return None; . } . let tcx = self.tcx(); . let mut pred = cache_fresh_trait_pred.skip_binder(); 101,490 ( 0.00%) pred.remap_constness(tcx, &mut param_env); . 50,745 ( 0.00%) if self.can_use_global_caches(param_env) { 502,130 ( 0.01%) if let Some(res) = tcx.selection_cache.get(¶m_env.and(pred), tcx) { 579,956 ( 0.01%) return Some(res); . } . } 66,931 ( 0.00%) self.infcx.selection_cache.get(¶m_env.and(pred), tcx) . } . . /// Determines whether can we safely cache the result . /// of selecting an obligation. This is almost always `true`, . /// except when dealing with certain `ParamCandidate`s. . /// . /// Ordinarily, a `ParamCandidate` will contain no inference variables, . /// since it was usually produced directly from a `DefId`. However, -- line 1314 ---------------------------------------- -- line 1325 ---------------------------------------- . fn can_cache_candidate( . &self, . result: &SelectionResult<'tcx, SelectionCandidate<'tcx>>, . ) -> bool { . // Neither the global nor local cache is aware of intercrate . // mode, so don't do any caching. In particular, we might . // re-use the same `InferCtxt` with both an intercrate . // and non-intercrate `SelectionContext` 12,358 ( 0.00%) if self.intercrate { . return false; . } 18,399 ( 0.00%) match result { . Ok(Some(SelectionCandidate::ParamCandidate(trait_ref))) => !trait_ref.needs_infer(), . _ => true, . } . } . . fn insert_candidate_cache( . &mut self, . mut param_env: ty::ParamEnv<'tcx>, . cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>, . dep_node: DepNodeIndex, . candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>, . ) { . let tcx = self.tcx(); . let mut pred = cache_fresh_trait_pred.skip_binder(); . 12,358 ( 0.00%) pred.remap_constness(tcx, &mut param_env); . . if !self.can_cache_candidate(&candidate) { . debug!(?pred, ?candidate, "insert_candidate_cache - candidate is not cacheable"); . return; . } . 6,128 ( 0.00%) if self.can_use_global_caches(param_env) { 14,064 ( 0.00%) if let Err(Overflow) = candidate { . // Don't cache overflow globally; we only produce this in certain modes. . } else if !pred.needs_infer() { 22,404 ( 0.00%) if !candidate.needs_infer() { . debug!(?pred, ?candidate, "insert_candidate_cache global"); . // This may overwrite the cache with the same value. 151,227 ( 0.00%) tcx.selection_cache.insert(param_env.and(pred), dep_node, candidate); . return; . } . } . } . . debug!(?pred, ?candidate, "insert_candidate_cache local"); 12,121 ( 0.00%) self.infcx.selection_cache.insert(param_env.and(pred), dep_node, candidate); . } . . /// Matches a predicate against the bounds of its self type. . /// . /// Given an obligation like `::Bar: Baz` where the self type is . /// a projection, look at the bounds of `T::Bar`, see if we can find a . /// `Baz` bound. We return indexes into the list returned by . /// `tcx.item_bounds` for any applicable bounds. 513 ( 0.00%) fn match_projection_obligation_against_definition_bounds( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> smallvec::SmallVec<[usize; 2]> { 741 ( 0.00%) let poly_trait_predicate = self.infcx().resolve_vars_if_possible(obligation.predicate); . let placeholder_trait_predicate = 969 ( 0.00%) self.infcx().replace_bound_vars_with_placeholders(poly_trait_predicate); . debug!( . ?placeholder_trait_predicate, . "match_projection_obligation_against_definition_bounds" . ); . 57 ( 0.00%) let tcx = self.infcx.tcx; 418 ( 0.00%) let (def_id, substs) = match *placeholder_trait_predicate.trait_ref.self_ty().kind() { 57 ( 0.00%) ty::Projection(ref data) => (data.item_def_id, data.substs), 190 ( 0.00%) ty::Opaque(def_id, substs) => (def_id, substs), . _ => { . span_bug!( . obligation.cause.span, . "match_projection_obligation_against_definition_bounds() called \ . but self-ty is not a projection: {:?}", . placeholder_trait_predicate.trait_ref.self_ty() . ); . } -- line 1405 ---------------------------------------- -- line 1411 ---------------------------------------- . // unnecessary ambiguity. . let mut distinct_normalized_bounds = FxHashSet::default(); . . let matching_bounds = bounds . .iter() . .enumerate() . .filter_map(|(idx, bound)| { . let bound_predicate = bound.kind(); 822 ( 0.00%) if let ty::PredicateKind::Trait(pred) = bound_predicate.skip_binder() { . let bound = bound_predicate.rebind(pred.trait_ref); 2,640 ( 0.00%) if self.infcx.probe(|_| { 1,030 ( 0.00%) match self.match_normalize_trait_ref( . obligation, 704 ( 0.00%) bound, 704 ( 0.00%) placeholder_trait_predicate.trait_ref, . ) { . Ok(None) => true, . Ok(Some(normalized_trait)) . if distinct_normalized_bounds.insert(normalized_trait) => . { . true . } . _ => false, -- line 1433 ---------------------------------------- -- line 1437 ---------------------------------------- . } . } . None . }) . .collect(); . . debug!(?matching_bounds, "match_projection_obligation_against_definition_bounds"); . matching_bounds 513 ( 0.00%) } . . /// Equates the trait in `obligation` with trait bound. If the two traits . /// can be equated and the normalized trait bound doesn't contain inference . /// variables or placeholders, the normalized bound is returned. 2,288 ( 0.00%) fn match_normalize_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . trait_bound: ty::PolyTraitRef<'tcx>, . placeholder_trait_ref: ty::TraitRef<'tcx>, . ) -> Result>, ()> { . debug_assert!(!placeholder_trait_ref.has_escaping_bound_vars()); 592 ( 0.00%) if placeholder_trait_ref.def_id != trait_bound.def_id() { . // Avoid unnecessary normalization 288 ( 0.00%) return Err(()); . } . 384 ( 0.00%) let Normalized { value: trait_bound, obligations: _ } = ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 32 ( 0.00%) obligation.param_env, . obligation.cause.clone(), 96 ( 0.00%) obligation.recursion_depth + 1, 128 ( 0.00%) trait_bound, . ) . }); 96 ( 0.00%) self.infcx 64 ( 0.00%) .at(&obligation.cause, obligation.param_env) . .sup(ty::Binder::dummy(placeholder_trait_ref), trait_bound) . .map(|InferOk { obligations: _, value: () }| { . // This method is called within a probe, so we can't have . // inference variables and placeholders escape. . if !trait_bound.needs_infer() && !trait_bound.has_placeholders() { 95 ( 0.00%) Some(trait_bound) . } else { . None . } . }) . .map_err(|_| ()) 1,584 ( 0.00%) } . . fn evaluate_where_clause<'o>( . &mut self, . stack: &TraitObligationStack<'o, 'tcx>, . where_clause_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result { . self.evaluation_probe(|this| { 7,073 ( 0.00%) match this.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) { 848 ( 0.00%) Ok(obligations) => this.evaluate_predicates_recursively(stack.list(), obligations), . Err(()) => Ok(EvaluatedToErr), . } . }) . } . 1,512 ( 0.00%) pub(super) fn match_projection_projections( . &mut self, . obligation: &ProjectionTyObligation<'tcx>, . env_predicate: PolyProjectionPredicate<'tcx>, . potentially_unnormalized_candidates: bool, . ) -> bool { . let mut nested_obligations = Vec::new(); 840 ( 0.00%) let (infer_predicate, _) = self.infcx.replace_bound_vars_with_fresh_vars( 504 ( 0.00%) obligation.cause.span, . LateBoundRegionConversionTime::HigherRankedType, . env_predicate, . ); 336 ( 0.00%) let infer_projection = if potentially_unnormalized_candidates { 256 ( 0.00%) ensure_sufficient_stack(|| { 128 ( 0.00%) project::normalize_with_depth_to( . self, 32 ( 0.00%) obligation.param_env, . obligation.cause.clone(), 96 ( 0.00%) obligation.recursion_depth + 1, 64 ( 0.00%) infer_predicate.projection_ty, . &mut nested_obligations, . ) . }) . } else { 272 ( 0.00%) infer_predicate.projection_ty . }; . 168 ( 0.00%) self.infcx 336 ( 0.00%) .at(&obligation.cause, obligation.param_env) 336 ( 0.00%) .sup(obligation.predicate, infer_projection) 1,221 ( 0.00%) .map_or(false, |InferOk { obligations, value: () }| { . self.evaluate_predicates_recursively( . TraitObligationStackList::empty(&ProvisionalEvaluationCache::default()), . nested_obligations.into_iter().chain(obligations), . ) 254 ( 0.00%) .map_or(false, |res| res.may_apply()) . }) 1,512 ( 0.00%) } . . /////////////////////////////////////////////////////////////////////////// . // WINNOW . // . // Winnowing is the process of attempting to resolve ambiguity by . // probing further. During the winnowing process, we unify all . // type variables and then we also attempt to evaluate recursive . // bounds to see if they are satisfied. -- line 1544 ---------------------------------------- -- line 1550 ---------------------------------------- . /// See the comment for "SelectionCandidate" for more details. . fn candidate_should_be_dropped_in_favor_of( . &mut self, . sized_predicate: bool, . victim: &EvaluatedCandidate<'tcx>, . other: &EvaluatedCandidate<'tcx>, . needs_infer: bool, . ) -> bool { 2,145 ( 0.00%) if victim.candidate == other.candidate { . return true; . } . . // Check if a bound would previously have been removed when normalizing . // the param_env so that it can be given the lowest priority. See . // #50825 for the motivation for this. . let is_global = |cand: &ty::PolyTraitPredicate<'tcx>| { . cand.is_global() && !cand.has_late_bound_regions() . }; . . // (*) Prefer `BuiltinCandidate { has_nested: false }`, `PointeeCandidate`, . // `DiscriminantKindCandidate`, and `ConstDropCandidate` to anything else. . // . // This is a fix for #53123 and prevents winnowing from accidentally extending the . // lifetime of a variable. 38,790 ( 0.00%) match (&other.candidate, &victim.candidate) { . (_, AutoImplCandidate(..)) | (AutoImplCandidate(..), _) => { . bug!( . "default implementations shouldn't be recorded \ . when there are other valid candidates" . ); . } . . // (*) -- line 1582 ---------------------------------------- -- line 1674 ---------------------------------------- . } . (ObjectCandidate(_), ProjectionCandidate(_)) . | (ProjectionCandidate(_), ObjectCandidate(_)) => { . bug!("Have both object and projection candidate") . } . . // Arbitrarily give projection and object candidates priority. . ( 4,290 ( 0.00%) ObjectCandidate(_) | ProjectionCandidate(_), . ImplCandidate(..) . | ClosureCandidate . | GeneratorCandidate . | FnPointerCandidate { .. } . | BuiltinObjectCandidate . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { .. } -- line 1690 ---------------------------------------- -- line 1696 ---------------------------------------- . | ClosureCandidate . | GeneratorCandidate . | FnPointerCandidate { .. } . | BuiltinObjectCandidate . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { .. } . | TraitAliasCandidate(..), 8,580 ( 0.00%) ObjectCandidate(_) | ProjectionCandidate(_), . ) => false, . 21,450 ( 0.00%) (&ImplCandidate(other_def), &ImplCandidate(victim_def)) => { . // See if we can toss out `victim` based on specialization. . // This requires us to know *for sure* that the `other` impl applies . // i.e., `EvaluatedToOk`. . // . // FIXME(@lcnr): Using `modulo_regions` here seems kind of scary . // to me but is required for `std` to compile, so I didn't change it . // for now. . let tcx = self.tcx(); 8,580 ( 0.00%) if other.evaluation.must_apply_modulo_regions() { 17,008 ( 0.00%) if tcx.specializes((other_def, victim_def)) { . return true; . } . } . 10,720 ( 0.00%) if other.evaluation.must_apply_considering_regions() { 8,464 ( 0.00%) match tcx.impls_are_allowed_to_overlap(other_def, victim_def) { . Some(ty::ImplOverlapKind::Permitted { marker: true }) => { . // Subtle: If the predicate we are evaluating has inference . // variables, do *not* allow discarding candidates due to . // marker trait impls. . // . // Without this restriction, we could end up accidentally . // constrainting inference variables based on an arbitrarily . // chosen trait impl. -- line 1731 ---------------------------------------- -- line 1790 ---------------------------------------- . | BuiltinUnsizeCandidate . | TraitUpcastingUnsizeCandidate(_) . | BuiltinCandidate { has_nested: true } . | TraitAliasCandidate(..), . ) => false, . } . } . 15,720 ( 0.00%) fn sized_conditions( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> BuiltinImplConditions<'tcx> { . use self::BuiltinImplConditions::{Ambiguous, None, Where}; . . // NOTE: binder moved to (*) 4,716 ( 0.00%) let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty()); . 7,884 ( 0.00%) match self_ty.kind() { . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) . | ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::RawPtr(..) -- line 1815 ---------------------------------------- -- line 1817 ---------------------------------------- . | ty::Ref(..) . | ty::Generator(..) . | ty::GeneratorWitness(..) . | ty::Array(..) . | ty::Closure(..) . | ty::Never . | ty::Error(_) => { . // safe for everything 532 ( 0.00%) Where(ty::Binder::dummy(Vec::new())) . } . . ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => None, . . ty::Tuple(tys) => Where( . obligation . .predicate 393 ( 0.00%) .rebind(tys.last().into_iter().map(|k| k.expect_ty()).collect()), . ), . . ty::Adt(def, substs) => { 1,520 ( 0.00%) let sized_crit = def.sized_constraint(self.tcx()); . // (*) binder moved here 3,800 ( 0.00%) Where( . obligation.predicate.rebind({ 760 ( 0.00%) sized_crit.iter().map(|ty| ty.subst(self.tcx(), substs)).collect() . }), . ) . } . . ty::Projection(_) | ty::Param(_) | ty::Opaque(..) => None, . ty::Infer(ty::TyVar(_)) => Ambiguous, . . ty::Placeholder(..) . | ty::Bound(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty); . } . } 12,576 ( 0.00%) } . 8,104 ( 0.00%) fn copy_clone_conditions( . &mut self, . obligation: &TraitObligation<'tcx>, . ) -> BuiltinImplConditions<'tcx> { . // NOTE: binder moved to (*) 2,026 ( 0.00%) let self_ty = self.infcx.shallow_resolve(obligation.predicate.skip_binder().self_ty()); . . use self::BuiltinImplConditions::{Ambiguous, None, Where}; . 5,239 ( 0.00%) match *self_ty.kind() { . ty::Infer(ty::IntVar(_)) . | ty::Infer(ty::FloatVar(_)) . | ty::FnDef(..) . | ty::FnPtr(_) 92 ( 0.00%) | ty::Error(_) => Where(ty::Binder::dummy(Vec::new())), . . ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::Char . | ty::RawPtr(..) . | ty::Never -- line 1879 ---------------------------------------- -- line 1886 ---------------------------------------- . ty::Dynamic(..) . | ty::Str . | ty::Slice(..) . | ty::Generator(..) . | ty::GeneratorWitness(..) . | ty::Foreign(..) . | ty::Ref(_, _, hir::Mutability::Mut) => None, . 29 ( 0.00%) ty::Tuple(tys) => { . // (*) binder moved here 158 ( 0.00%) Where(obligation.predicate.rebind(tys.iter().map(|k| k.expect_ty()).collect())) . } . 67 ( 0.00%) ty::Closure(_, substs) => { . // (*) binder moved here 201 ( 0.00%) let ty = self.infcx.shallow_resolve(substs.as_closure().tupled_upvars_ty()); 134 ( 0.00%) if let ty::Infer(ty::TyVar(_)) = ty.kind() { . // Not yet resolved. . Ambiguous . } else { 469 ( 0.00%) Where(obligation.predicate.rebind(substs.as_closure().upvar_tys().collect())) . } . } . . ty::Adt(..) | ty::Projection(..) | ty::Param(..) | ty::Opaque(..) => { . // Fallback to whatever user-defined impls exist in this case. . None . } . -- line 1914 ---------------------------------------- -- line 1920 ---------------------------------------- . } . . ty::Placeholder(..) . | ty::Bound(..) . | ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => { . bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty); . } . } 6,078 ( 0.00%) } . . /// For default impls, we need to break apart a type into its . /// "constituent types" -- meaning, the types that it contains. . /// . /// Here are some (simple) examples: . /// . /// ``` . /// (i32, u32) -> [i32, u32] -- line 1936 ---------------------------------------- -- line 1937 ---------------------------------------- . /// Foo where struct Foo { x: i32, y: u32 } -> [i32, u32] . /// Bar where struct Bar { x: T, y: u32 } -> [i32, u32] . /// Zed where enum Zed { A(T), B(u32) } -> [i32, u32] . /// ``` . fn constituent_types_for_ty( . &self, . t: ty::Binder<'tcx, Ty<'tcx>>, . ) -> ty::Binder<'tcx, Vec>> { 20 ( 0.00%) match *t.skip_binder().kind() { . ty::Uint(_) . | ty::Int(_) . | ty::Bool . | ty::Float(_) . | ty::FnDef(..) . | ty::FnPtr(_) . | ty::Str . | ty::Error(_) -- line 1953 ---------------------------------------- -- line 1968 ---------------------------------------- . ty::RawPtr(ty::TypeAndMut { ty: element_ty, .. }) | ty::Ref(_, element_ty, _) => { . t.rebind(vec![element_ty]) . } . . ty::Array(element_ty, _) | ty::Slice(element_ty) => t.rebind(vec![element_ty]), . . ty::Tuple(ref tys) => { . // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet 2 ( 0.00%) t.rebind(tys.iter().map(|k| k.expect_ty()).collect()) . } . . ty::Closure(_, ref substs) => { . let ty = self.infcx.shallow_resolve(substs.as_closure().tupled_upvars_ty()); . t.rebind(vec![ty]) . } . . ty::Generator(_, ref substs, _) => { -- line 1984 ---------------------------------------- -- line 2003 ---------------------------------------- . // We can resolve the `impl Trait` to its concrete type, . // which enforces a DAG between the functions requiring . // the auto trait bounds in question. . t.rebind(vec![self.tcx().type_of(def_id).subst(self.tcx(), substs)]) . } . } . } . 2,016 ( 0.00%) fn collect_predicates_for_types( . &mut self, . param_env: ty::ParamEnv<'tcx>, . cause: ObligationCause<'tcx>, . recursion_depth: usize, . trait_def_id: DefId, . types: ty::Binder<'tcx, Vec>>, . ) -> Vec> { . // Because the types were potentially derived from -- line 2019 ---------------------------------------- -- line 2030 ---------------------------------------- . // 2. Produce something like `&'0 i32 : Copy` . // 3. Re-bind the regions back to `for<'a> &'a i32 : Copy` . . types . .as_ref() . .skip_binder() // binder moved -\ . .iter() . .flat_map(|ty| { 546 ( 0.00%) let ty: ty::Binder<'tcx, Ty<'tcx>> = types.rebind(ty); // <----/ . 2,184 ( 0.00%) self.infcx.commit_unconditionally(|_| { 546 ( 0.00%) let placeholder_ty = self.infcx.replace_bound_vars_with_placeholders(ty); 910 ( 0.00%) let Normalized { value: normalized_ty, mut obligations } = 1,820 ( 0.00%) ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 182 ( 0.00%) param_env, . cause.clone(), 182 ( 0.00%) recursion_depth, 182 ( 0.00%) placeholder_ty, . ) . }); 2,002 ( 0.00%) let placeholder_obligation = predicate_for_trait_def( . self.tcx(), 182 ( 0.00%) param_env, . cause.clone(), 364 ( 0.00%) trait_def_id, . recursion_depth, . normalized_ty, . &[], . ); 1,092 ( 0.00%) obligations.push(placeholder_obligation); . obligations . }) . }) . .collect() 1,008 ( 0.00%) } . . /////////////////////////////////////////////////////////////////////////// . // Matching . // . // Matching is a common path used for both evaluation and . // confirmation. It basically unifies types that appear in impls . // and traits. This does affect the surrounding environment; . // therefore, when used during evaluation, match routines must be -- line 2074 ---------------------------------------- -- line 2075 ---------------------------------------- . // run inside of a `probe()` so that their side-effects are . // contained. . . fn rematch_impl( . &mut self, . impl_def_id: DefId, . obligation: &TraitObligation<'tcx>, . ) -> Normalized<'tcx, SubstsRef<'tcx>> { 56,840 ( 0.00%) match self.match_impl(impl_def_id, obligation) { 45,472 ( 0.00%) Ok(substs) => substs, . Err(()) => { . bug!( . "Impl {:?} was matchable against {:?} but now is not", . impl_def_id, . obligation . ); . } . } . } . 415,188 ( 0.01%) #[tracing::instrument(level = "debug", skip(self))] . fn match_impl( . &mut self, . impl_def_id: DefId, . obligation: &TraitObligation<'tcx>, . ) -> Result>, ()> { . let impl_trait_ref = self.tcx().impl_trait_ref(impl_def_id).unwrap(); . . // Before we create the substitutions and everything, first . // consider a "quick reject". This avoids creating more types . // and so forth that we need to. . if self.fast_reject_trait_refs(obligation, &impl_trait_ref) { 2,370 ( 0.00%) return Err(()); . } . . let placeholder_obligation = 144,669 ( 0.00%) self.infcx().replace_bound_vars_with_placeholders(obligation.predicate); 62,001 ( 0.00%) let placeholder_obligation_trait_ref = placeholder_obligation.trait_ref; . 103,335 ( 0.00%) let impl_substs = self.infcx.fresh_substs_for_item(obligation.cause.span, impl_def_id); . 41,334 ( 0.00%) let impl_trait_ref = impl_trait_ref.subst(self.tcx(), impl_substs); . . debug!(?impl_trait_ref); . 124,002 ( 0.00%) let Normalized { value: impl_trait_ref, obligations: mut nested_obligations } = 144,669 ( 0.00%) ensure_sufficient_stack(|| { . project::normalize_with_depth( . self, 20,667 ( 0.00%) obligation.param_env, . obligation.cause.clone(), 62,001 ( 0.00%) obligation.recursion_depth + 1, 41,334 ( 0.00%) impl_trait_ref, . ) . }); . . debug!(?impl_trait_ref, ?placeholder_obligation_trait_ref); . . let cause = ObligationCause::new( 20,667 ( 0.00%) obligation.cause.span, 41,334 ( 0.00%) obligation.cause.body_id, 186,003 ( 0.00%) ObligationCauseCode::MatchImpl(obligation.cause.clone(), impl_def_id), . ); . 41,334 ( 0.00%) let InferOk { obligations, .. } = self . .infcx 20,667 ( 0.00%) .at(&cause, obligation.param_env) . .eq(placeholder_obligation_trait_ref, impl_trait_ref) . .map_err(|e| debug!("match_impl: failed eq_trait_refs due to `{}`", e))?; . nested_obligations.extend(obligations); . 45,048 ( 0.00%) if !self.intercrate . && self.tcx().impl_polarity(impl_def_id) == ty::ImplPolarity::Reservation . { . debug!("match_impl: reservation impls only apply in intercrate mode"); . return Err(()); . } . . debug!(?impl_substs, ?nested_obligations, "match_impl: success"); 90,072 ( 0.00%) Ok(Normalized { value: impl_substs, obligations: nested_obligations }) . } . . fn fast_reject_trait_refs( . &mut self, . obligation: &TraitObligation<'_>, . impl_trait_ref: &ty::TraitRef<'_>, . ) -> bool { . // We can avoid creating type variables and doing the full . // substitution if we find that any of the input types, when . // simplified, do not match. . 65,556 ( 0.00%) iter::zip(obligation.predicate.skip_binder().trait_ref.substs, impl_trait_ref.substs).any( . |(obligation_arg, impl_arg)| { 214,914 ( 0.00%) match (obligation_arg.unpack(), impl_arg.unpack()) { . (GenericArgKind::Type(obligation_ty), GenericArgKind::Type(impl_ty)) => { . // Note, we simplify parameters for the obligation but not the . // impl so that we do not reject a blanket impl but do reject . // more concrete impls if we're searching for `T: Trait`. 286,488 ( 0.00%) let simplified_obligation_ty = fast_reject::simplify_type( . self.tcx(), . obligation_ty, . SimplifyParams::Yes, . StripReferences::No, . ); 214,866 ( 0.00%) let simplified_impl_ty = fast_reject::simplify_type( . self.tcx(), . impl_ty, . SimplifyParams::No, . StripReferences::No, . ); . 107,433 ( 0.00%) simplified_obligation_ty.is_some() . && simplified_impl_ty.is_some() . && simplified_obligation_ty != simplified_impl_ty . } . (GenericArgKind::Lifetime(_), GenericArgKind::Lifetime(_)) => { . // Lifetimes can never cause a rejection. . false . } . (GenericArgKind::Const(_), GenericArgKind::Const(_)) => { -- line 2194 ---------------------------------------- -- line 2201 ---------------------------------------- . } . }, . ) . } . . /// Normalize `where_clause_trait_ref` and try to match it against . /// `obligation`. If successful, return any predicates that . /// result from the normalization. 5,787 ( 0.00%) fn match_where_clause_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . where_clause_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result>, ()> { . self.match_poly_trait_ref(obligation, where_clause_trait_ref) 5,144 ( 0.00%) } . . /// Returns `Ok` if `poly_trait_ref` being true implies that the . /// obligation is satisfied. . #[instrument(skip(self), level = "debug")] . fn match_poly_trait_ref( . &mut self, . obligation: &TraitObligation<'tcx>, . poly_trait_ref: ty::PolyTraitRef<'tcx>, . ) -> Result>, ()> { 1,691 ( 0.00%) self.infcx 643 ( 0.00%) .at(&obligation.cause, obligation.param_env) 5,073 ( 0.00%) .sup(obligation.predicate.to_poly_trait_ref(), poly_trait_ref) . .map(|InferOk { obligations, .. }| obligations) . .map_err(|_| ()) . } . . /////////////////////////////////////////////////////////////////////////// . // Miscellany . . fn match_fresh_trait_refs( . &self, . previous: ty::PolyTraitPredicate<'tcx>, . current: ty::PolyTraitPredicate<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> bool { 132 ( 0.00%) let mut matcher = ty::_match::Match::new(self.tcx(), param_env); . matcher.relate(previous, current).is_ok() . } . . fn push_stack<'o>( . &mut self, . previous_stack: TraitObligationStackList<'o, 'tcx>, . obligation: &'o TraitObligation<'tcx>, . ) -> TraitObligationStack<'o, 'tcx> { 321,918 ( 0.01%) let fresh_trait_pred = obligation.predicate.fold_with(&mut self.freshener); . . let dfn = previous_stack.cache.next_dfn(); 8,712 ( 0.00%) let depth = previous_stack.depth() + 1; 456,741 ( 0.01%) TraitObligationStack { . obligation, 265,361 ( 0.00%) fresh_trait_pred, . reached_depth: Cell::new(depth), . previous: previous_stack, . dfn, . depth, . } . } . . #[instrument(skip(self), level = "debug")] . fn closure_trait_ref_unnormalized( . &mut self, . obligation: &TraitObligation<'tcx>, . substs: SubstsRef<'tcx>, . ) -> ty::PolyTraitRef<'tcx> { 693 ( 0.00%) let closure_sig = substs.as_closure().sig(); . . debug!(?closure_sig); . . // (1) Feels icky to skip the binder here, but OTOH we know . // that the self-type is an unboxed closure type and hence is . // in fact unparameterized (or at least does not reference any . // regions bound in the obligation). Still probably some . // refactoring could make this nicer. 2,310 ( 0.00%) closure_trait_ref_and_return_type( . self.tcx(), 1,386 ( 0.00%) obligation.predicate.def_id(), 231 ( 0.00%) obligation.predicate.skip_binder().self_ty(), // (1) 924 ( 0.00%) closure_sig, . util::TupleArgumentsFlag::No, . ) . .map_bound(|(trait_ref, _)| trait_ref) . } . . fn generator_trait_ref_unnormalized( . &mut self, . obligation: &TraitObligation<'tcx>, -- line 2291 ---------------------------------------- -- line 2307 ---------------------------------------- . ) . .map_bound(|(trait_ref, ..)| trait_ref) . } . . /// Returns the obligations that are implied by instantiating an . /// impl or trait. The obligations are substituted and fully . /// normalized. This is used when confirming an impl or default . /// impl. 261,556 ( 0.00%) #[tracing::instrument(level = "debug", skip(self, cause, param_env))] . fn impl_or_trait_obligations( . &mut self, . cause: ObligationCause<'tcx>, . recursion_depth: usize, . param_env: ty::ParamEnv<'tcx>, . def_id: DefId, // of impl or trait . substs: SubstsRef<'tcx>, // for impl or trait . ) -> Vec> { -- line 2323 ---------------------------------------- -- line 2334 ---------------------------------------- . // V: Iterator, V: Sized, . // ::Item: Copy . // When we substitute, say, `V => IntoIter, U => $0`, the last . // obligation will normalize to `<$0 as Iterator>::Item = $1` and . // `$1: Copy`, so we must ensure the obligations are emitted in . // that order. . let predicates = tcx.predicates_of(def_id); . debug!(?predicates); 11,372 ( 0.00%) assert_eq!(predicates.parent, None); 34,116 ( 0.00%) let mut obligations = Vec::with_capacity(predicates.predicates.len()); . for (predicate, _) in predicates.predicates { . debug!(?predicate); 78,620 ( 0.00%) let predicate = normalize_with_depth_to( . self, . param_env, . cause.clone(), . recursion_depth, 15,724 ( 0.00%) predicate.subst(tcx, substs), . &mut obligations, . ); 157,240 ( 0.00%) obligations.push(Obligation { . cause: cause.clone(), . recursion_depth, . param_env, . predicate, . }); . } . . // We are performing deduplication here to avoid exponential blowups -- line 2362 ---------------------------------------- -- line 2363 ---------------------------------------- . // (#38528) from happening, but the real cause of the duplication is . // unknown. What we know is that the deduplication avoids exponential . // amount of predicates being propagated when processing deeply nested . // types. . // . // This code is hot enough that it's worth avoiding the allocation . // required for the FxHashSet when possible. Special-casing lengths 0, . // 1 and 2 covers roughly 75-80% of the cases. 23,076 ( 0.00%) if obligations.len() <= 1 { . // No possibility of duplicates. 5,388 ( 0.00%) } else if obligations.len() == 2 { . // Only two elements. Drop the second if they are equal. . if obligations[0] == obligations[1] { . obligations.truncate(1); . } . } else { . // Three or more elements. Use a general deduplication process. . let mut seen = FxHashSet::default(); 1,490 ( 0.00%) obligations.retain(|i| seen.insert(i.clone())); . } . 45,488 ( 0.00%) obligations . } . } . . trait TraitObligationExt<'tcx> { . fn derived_cause( . &self, . variant: fn(DerivedObligationCause<'tcx>) -> ObligationCauseCode<'tcx>, . ) -> ObligationCause<'tcx>; -- line 2392 ---------------------------------------- -- line 2408 ---------------------------------------- . */ . . let obligation = self; . . // NOTE(flaper87): As of now, it keeps track of the whole error . // chain. Ideally, we should have a way to configure this either . // by using -Z verbose or just a CLI argument. . let derived_cause = DerivedObligationCause { 46,160 ( 0.00%) parent_trait_pred: obligation.predicate, 22,916 ( 0.00%) parent_code: obligation.cause.clone_code(), . }; . let derived_code = variant(derived_cause); 138,308 ( 0.00%) ObligationCause::new(obligation.cause.span, obligation.cause.body_id, derived_code) . } . } . . impl<'o, 'tcx> TraitObligationStack<'o, 'tcx> { . fn list(&'o self) -> TraitObligationStackList<'o, 'tcx> { . TraitObligationStackList::with(self) . } . . fn cache(&self) -> &'o ProvisionalEvaluationCache<'tcx> { 10,587 ( 0.00%) self.previous.cache . } . . fn iter(&'o self) -> TraitObligationStackList<'o, 'tcx> { . self.list() . } . . /// Indicates that attempting to evaluate this stack entry . /// required accessing something from the stack at depth `reached_depth`. 8 ( 0.00%) fn update_reached_depth(&self, reached_depth: usize) { 4 ( 0.00%) assert!( 4 ( 0.00%) self.depth >= reached_depth, . "invoked `update_reached_depth` with something under this stack: \ . self.depth={} reached_depth={}", . self.depth, . reached_depth, . ); . debug!(reached_depth, "update_reached_depth"); . let mut p = self; 12 ( 0.00%) while reached_depth < p.depth { . debug!(?p.fresh_trait_pred, "update_reached_depth: marking as cycle participant"); 4 ( 0.00%) p.reached_depth.set(p.reached_depth.get().min(reached_depth)); 4 ( 0.00%) p = p.previous.head.unwrap(); . } 8 ( 0.00%) } . } . . /// The "provisional evaluation cache" is used to store intermediate cache results . /// when solving auto traits. Auto traits are unusual in that they can support . /// cycles. So, for example, a "proof tree" like this would be ok: . /// . /// - `Foo: Send` :- . /// - `Bar: Send` :- -- line 2462 ---------------------------------------- -- line 2540 ---------------------------------------- . /// evaluation. When we create an entry in the evaluation cache using this provisional . /// cache entry (see `on_completion`), we use this `dep_node` to ensure that future reads from . /// the cache will have all of the necessary incr comp dependencies tracked. . dep_node: DepNodeIndex, . } . . impl<'tcx> Default for ProvisionalEvaluationCache<'tcx> { . fn default() -> Self { 58,561 ( 0.00%) Self { dfn: Cell::new(0), map: Default::default() } . } . } . . impl<'tcx> ProvisionalEvaluationCache<'tcx> { . /// Get the next DFN in sequence (basically a counter). . fn next_dfn(&self) -> usize { 4,356 ( 0.00%) let result = self.dfn.get(); 17,424 ( 0.00%) self.dfn.set(result + 1); . result . } . . /// Check the provisional cache for any result for . /// `fresh_trait_ref`. If there is a hit, then you must consider . /// it an access to the stack slots at depth . /// `reached_depth` (from the returned value). . fn get_provisional( -- line 2564 ---------------------------------------- -- line 2663 ---------------------------------------- . fn on_completion( . &self, . dfn: usize, . mut op: impl FnMut(ty::PolyTraitPredicate<'tcx>, EvaluationResult, DepNodeIndex), . ) { . debug!(?dfn, "on_completion"); . . for (fresh_trait_pred, eval) in 20,622 ( 0.00%) self.map.borrow_mut().drain_filter(|_k, eval| eval.from_dfn >= dfn) . { . debug!(?fresh_trait_pred, ?eval, "on_completion"); . . op(fresh_trait_pred, eval.result, eval.dep_node); . } . } . } . -- line 2679 ---------------------------------------- -- line 2692 ---------------------------------------- . TraitObligationStackList { cache: r.cache(), head: Some(r) } . } . . fn head(&self) -> Option<&'o TraitObligationStack<'o, 'tcx>> { . self.head . } . . fn depth(&self) -> usize { 10,559 ( 0.00%) if let Some(head) = self.head { head.depth } else { 0 } . } . } . . impl<'o, 'tcx> Iterator for TraitObligationStackList<'o, 'tcx> { . type Item = &'o TraitObligationStack<'o, 'tcx>; . . fn next(&mut self) -> Option<&'o TraitObligationStack<'o, 'tcx>> { 10,796 ( 0.00%) let o = self.head?; 989 ( 0.00%) *self = o.previous; . Some(o) . } . } . . impl<'o, 'tcx> fmt::Debug for TraitObligationStack<'o, 'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(f, "TraitObligationStack({:?})", self.obligation) . } -- line 2717 ---------------------------------------- 470,400 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/traversal.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . pub struct Preorder<'a, 'tcx> { . body: &'a Body<'tcx>, . visited: BitSet, . worklist: Vec, . root_is_start_block: bool, . } . . impl<'a, 'tcx> Preorder<'a, 'tcx> { 42,950 ( 0.00%) pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Preorder<'a, 'tcx> { 4,295 ( 0.00%) let worklist = vec![root]; . 12,885 ( 0.00%) Preorder { . body, . visited: BitSet::new_empty(body.basic_blocks().len()), 17,180 ( 0.00%) worklist, . root_is_start_block: root == START_BLOCK, . } 38,655 ( 0.00%) } . } . 5,086 ( 0.00%) pub fn preorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Preorder<'a, 'tcx> { 15,598 ( 0.00%) Preorder::new(body, START_BLOCK) 7,629 ( 0.00%) } . . impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 1,620,476 ( 0.03%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { 231,346 ( 0.00%) while let Some(idx) = self.worklist.pop() { 231,346 ( 0.00%) if !self.visited.insert(idx) { . continue; . } . 143,021 ( 0.00%) let data = &self.body[idx]; . 715,105 ( 0.01%) if let Some(ref term) = data.terminator { . self.worklist.extend(term.successors()); . } . . return Some((idx, data)); . } . . None 1,325,844 ( 0.02%) } . . fn size_hint(&self) -> (usize, Option) { . // All the blocks, minus the number of blocks we've visited. . let upper = self.body.basic_blocks().len() - self.visited.count(); . . let lower = if self.root_is_start_block { . // We will visit all remaining blocks exactly once. . upper -- line 74 ---------------------------------------- -- line 101 ---------------------------------------- . pub struct Postorder<'a, 'tcx> { . body: &'a Body<'tcx>, . visited: BitSet, . visit_stack: Vec<(BasicBlock, Successors<'a>)>, . root_is_start_block: bool, . } . . impl<'a, 'tcx> Postorder<'a, 'tcx> { 41,615 ( 0.00%) pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> Postorder<'a, 'tcx> { . let mut po = Postorder { . body, . visited: BitSet::new_empty(body.basic_blocks().len()), . visit_stack: Vec::new(), . root_is_start_block: root == START_BLOCK, . }; . . let data = &po.body[root]; . 41,615 ( 0.00%) if let Some(ref term) = data.terminator { . po.visited.insert(root); 23,780 ( 0.00%) po.visit_stack.push((root, term.successors())); 11,890 ( 0.00%) po.traverse_successor(); . } . . po 53,505 ( 0.00%) } . 1,408,498 ( 0.02%) fn traverse_successor(&mut self) { . // This is quite a complex loop due to 1. the borrow checker not liking it much . // and 2. what exactly is going on is not clear . // . // It does the actual traversal of the graph, while the `next` method on the iterator . // just pops off of the stack. `visit_stack` is a stack containing pairs of nodes and . // iterators over the successors of those nodes. Each iteration attempts to get the next . // node from the top of the stack, then pushes that node and an iterator over the . // successors to the top of the stack. This loop only grows `visit_stack`, stopping when -- line 136 ---------------------------------------- -- line 169 ---------------------------------------- . // . // Now that the top of the stack has no successors we can traverse, each item will . // be popped off during iteration until we get back to `A`. This yields [E, D, B]. . // . // When we yield `B` and call `traverse_successor`, we push `C` to the stack, but . // since we've already visited `E`, that child isn't added to the stack. The last . // two iterations yield `C` and finally `A` for a final traversal of [E, D, B, C, A] . loop { 1,530,692 ( 0.03%) let bb = if let Some(&mut (_, ref mut iter)) = self.visit_stack.last_mut() { 938,940 ( 0.02%) if let Some(&bb) = iter.next() { . bb . } else { . break; . } . } else { . break; . }; . 312,980 ( 0.01%) if self.visited.insert(bb) { 1,325,268 ( 0.02%) if let Some(term) = &self.body[bb].terminator { 757,296 ( 0.01%) self.visit_stack.push((bb, term.successors())); . } . } . } 1,609,712 ( 0.03%) } . } . 68 ( 0.00%) pub fn postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> Postorder<'a, 'tcx> { 68 ( 0.00%) Postorder::new(body, START_BLOCK) 102 ( 0.00%) } . . impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 225 ( 0.00%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { . let next = self.visit_stack.pop(); 195,269 ( 0.00%) if next.is_some() { 384,627 ( 0.01%) self.traverse_successor(); . } . 195,269 ( 0.00%) next.map(|(bb, _)| (bb, &self.body[bb])) 375 ( 0.00%) } . 170 ( 0.00%) fn size_hint(&self) -> (usize, Option) { . // All the blocks, minus the number of blocks we've visited. 17,720 ( 0.00%) let upper = self.body.basic_blocks().len() - self.visited.count(); . 17,720 ( 0.00%) let lower = if self.root_is_start_block { . // We will visit all remaining blocks exactly once. . upper . } else { . self.visit_stack.len() . }; . 102 ( 0.00%) (lower, Some(upper)) 238 ( 0.00%) } . } . . /// Reverse postorder traversal of a graph . /// . /// Reverse postorder is the reverse order of a postorder traversal. . /// This is different to a preorder traversal and represents a natural . /// linearization of control-flow. . /// -- line 232 ---------------------------------------- -- line 253 ---------------------------------------- . pub struct ReversePostorder<'a, 'tcx> { . body: &'a Body<'tcx>, . blocks: Vec, . idx: usize, . } . . impl<'a, 'tcx> ReversePostorder<'a, 'tcx> { . pub fn new(body: &'a Body<'tcx>, root: BasicBlock) -> ReversePostorder<'a, 'tcx> { 11,822 ( 0.00%) let blocks: Vec<_> = Postorder::new(body, root).map(|(bb, _)| bb).collect(); . 5,911 ( 0.00%) let len = blocks.len(); . 29,555 ( 0.00%) ReversePostorder { body, blocks, idx: len } . } . } . 35,466 ( 0.00%) pub fn reverse_postorder<'a, 'tcx>(body: &'a Body<'tcx>) -> ReversePostorder<'a, 'tcx> { . ReversePostorder::new(body, START_BLOCK) 29,555 ( 0.00%) } . . impl<'a, 'tcx> Iterator for ReversePostorder<'a, 'tcx> { . type Item = (BasicBlock, &'a BasicBlockData<'tcx>); . 201,139 ( 0.00%) fn next(&mut self) -> Option<(BasicBlock, &'a BasicBlockData<'tcx>)> { 804,556 ( 0.01%) if self.idx == 0 { . return None; . } 390,456 ( 0.01%) self.idx -= 1; . . self.blocks.get(self.idx).map(|&bb| (bb, &self.body[bb])) 402,278 ( 0.01%) } . . fn size_hint(&self) -> (usize, Option) { . (self.idx, Some(self.idx)) . } . } . . impl<'a, 'tcx> ExactSizeIterator for ReversePostorder<'a, 'tcx> {} . -- line 291 ---------------------------------------- -- line 295 ---------------------------------------- . /// This is clearer than writing `preorder` in cases where the order doesn't matter. . pub fn reachable<'a, 'tcx>( . body: &'a Body<'tcx>, . ) -> impl 'a + Iterator)> { . preorder(body) . } . . /// Returns a `BitSet` containing all basic blocks reachable from the `START_BLOCK`. 10,512 ( 0.00%) pub fn reachable_as_bitset<'tcx>(body: &Body<'tcx>) -> BitSet { . let mut iter = preorder(body); . (&mut iter).for_each(drop); 7,008 ( 0.00%) iter.visited 10,512 ( 0.00%) } 1,976,320 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/subst.rs -------------------------------------------------------------------------------- Ir -- line 22 ---------------------------------------- . . /// An entity in the Rust type system, which can be one of . /// several kinds (types, lifetimes, and consts). . /// To reduce memory usage, a `GenericArg` is an interned pointer, . /// with the lowest 2 bits being reserved for a tag to . /// indicate the type (`Ty`, `Region`, or `Const`) it points to. . #[derive(Copy, Clone, PartialEq, Eq, Hash)] . pub struct GenericArg<'tcx> { 851,978 ( 0.01%) ptr: NonZeroUsize, 2,351,071 ( 0.04%) marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>, &'tcx ty::Const<'tcx>)>, . } . . const TAG_MASK: usize = 0b11; . const TYPE_TAG: usize = 0b00; . const REGION_TAG: usize = 0b01; . const CONST_TAG: usize = 0b10; . 66,042 ( 0.00%) #[derive(Debug, TyEncodable, TyDecodable, PartialEq, Eq, PartialOrd, Ord, HashStable)] . pub enum GenericArgKind<'tcx> { . Lifetime(ty::Region<'tcx>), . Type(Ty<'tcx>), . Const(&'tcx ty::Const<'tcx>), . } . . impl<'tcx> GenericArgKind<'tcx> { 16,318 ( 0.00%) fn pack(self) -> GenericArg<'tcx> { 16,318 ( 0.00%) let (tag, ptr) = match self { . GenericArgKind::Lifetime(lt) => { . // Ensure we can use the tag bits. . assert_eq!(mem::align_of_val(lt) & TAG_MASK, 0); . (REGION_TAG, lt as *const _ as usize) . } . GenericArgKind::Type(ty) => { . // Ensure we can use the tag bits. . assert_eq!(mem::align_of_val(ty) & TAG_MASK, 0); -- line 56 ---------------------------------------- -- line 58 ---------------------------------------- . } . GenericArgKind::Const(ct) => { . // Ensure we can use the tag bits. . assert_eq!(mem::align_of_val(ct) & TAG_MASK, 0); . (CONST_TAG, ct as *const _ as usize) . } . }; . 583,220 ( 0.01%) GenericArg { ptr: unsafe { NonZeroUsize::new_unchecked(ptr | tag) }, marker: PhantomData } 16,318 ( 0.00%) } . } . . impl<'tcx> fmt::Debug for GenericArg<'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . match self.unpack() { . GenericArgKind::Lifetime(lt) => lt.fmt(f), . GenericArgKind::Type(ty) => ty.fmt(f), . GenericArgKind::Const(ct) => ct.fmt(f), -- line 75 ---------------------------------------- -- line 85 ---------------------------------------- . . impl<'tcx> PartialOrd for GenericArg<'tcx> { . fn partial_cmp(&self, other: &GenericArg<'_>) -> Option { . Some(self.cmp(&other)) . } . } . . impl<'tcx> From> for GenericArg<'tcx> { 407,684 ( 0.01%) fn from(r: ty::Region<'tcx>) -> GenericArg<'tcx> { . GenericArgKind::Lifetime(r).pack() 407,684 ( 0.01%) } . } . . impl<'tcx> From> for GenericArg<'tcx> { 1,074,996 ( 0.02%) fn from(ty: Ty<'tcx>) -> GenericArg<'tcx> { . GenericArgKind::Type(ty).pack() 1,074,996 ( 0.02%) } . } . . impl<'tcx> From<&'tcx ty::Const<'tcx>> for GenericArg<'tcx> { 547 ( 0.00%) fn from(c: &'tcx ty::Const<'tcx>) -> GenericArg<'tcx> { . GenericArgKind::Const(c).pack() 547 ( 0.00%) } . } . . impl<'tcx> GenericArg<'tcx> { . #[inline] . pub fn unpack(self) -> GenericArgKind<'tcx> { . let ptr = self.ptr.get(); . unsafe { 15,818,111 ( 0.26%) match ptr & TAG_MASK { 327,299 ( 0.01%) REGION_TAG => GenericArgKind::Lifetime(&*((ptr & !TAG_MASK) as *const _)), 3,524,627 ( 0.06%) TYPE_TAG => GenericArgKind::Type(&*((ptr & !TAG_MASK) as *const _)), 723 ( 0.00%) CONST_TAG => GenericArgKind::Const(&*((ptr & !TAG_MASK) as *const _)), . _ => intrinsics::unreachable(), . } . } . } . . /// Unpack the `GenericArg` as a type when it is known certainly to be a type. . /// This is true in cases where `Substs` is used in places where the kinds are known . /// to be limited (e.g. in tuples, where the only parameters are type parameters). . pub fn expect_ty(self) -> Ty<'tcx> { . match self.unpack() { . GenericArgKind::Type(ty) => ty, . _ => bug!("expected a type, but found another kind"), . } 33,234 ( 0.00%) } . . /// Unpack the `GenericArg` as a const when it is known certainly to be a const. . pub fn expect_const(self) -> &'tcx ty::Const<'tcx> { . match self.unpack() { . GenericArgKind::Const(c) => c, . _ => bug!("expected a const, but found another kind"), . } . } -- line 140 ---------------------------------------- -- line 170 ---------------------------------------- . GenericArgKind::Type(ty) => ty.visit_with(visitor), . GenericArgKind::Const(ct) => ct.visit_with(visitor), . } . } . } . . impl<'tcx, E: TyEncoder<'tcx>> Encodable for GenericArg<'tcx> { . fn encode(&self, e: &mut E) -> Result<(), E::Error> { 7,188 ( 0.00%) self.unpack().encode(e) . } . } . . impl<'tcx, D: TyDecoder<'tcx>> Decodable for GenericArg<'tcx> { 81,590 ( 0.00%) fn decode(d: &mut D) -> GenericArg<'tcx> { 81,590 ( 0.00%) GenericArgKind::decode(d).pack() . } . } . . /// A substitution mapping generic parameters to new values. . pub type InternalSubsts<'tcx> = List>; . . pub type SubstsRef<'tcx> = &'tcx InternalSubsts<'tcx>; . . impl<'a, 'tcx> InternalSubsts<'tcx> { . /// Interpret these substitutions as the substitutions of a closure type. . /// Closure substitutions have a particular structure controlled by the . /// compiler that encodes information like the signature and closure kind; . /// see `ty::ClosureSubsts` struct for more comments. 1,616 ( 0.00%) pub fn as_closure(&'a self) -> ClosureSubsts<'a> { . ClosureSubsts { substs: self } 1,616 ( 0.00%) } . . /// Interpret these substitutions as the substitutions of a generator type. . /// Generator substitutions have a particular structure controlled by the . /// compiler that encodes information like the signature and generator kind; . /// see `ty::GeneratorSubsts` struct for more comments. . pub fn as_generator(&'tcx self) -> GeneratorSubsts<'tcx> { . GeneratorSubsts { substs: self } . } -- line 208 ---------------------------------------- -- line 211 ---------------------------------------- . /// Inline const substitutions have a particular structure controlled by the . /// compiler that encodes information like the inferred type; . /// see `ty::InlineConstSubsts` struct for more comments. . pub fn as_inline_const(&'tcx self) -> InlineConstSubsts<'tcx> { . InlineConstSubsts { substs: self } . } . . /// Creates an `InternalSubsts` that maps each generic parameter to itself. 26,180 ( 0.00%) pub fn identity_for_item(tcx: TyCtxt<'tcx>, def_id: DefId) -> SubstsRef<'tcx> { 7,764 ( 0.00%) Self::for_item(tcx, def_id, |param, _| tcx.mk_param_from_def(param)) 23,562 ( 0.00%) } . . /// Creates an `InternalSubsts` for generic parameter definitions, . /// by calling closures to obtain each kind. . /// The closures get to observe the `InternalSubsts` as they're . /// being built, which can be used to correctly . /// substitute defaults of generic parameters. 315,004 ( 0.01%) pub fn for_item(tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx> . where . F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, . { . let defs = tcx.generics_of(def_id); . let count = defs.count(); . let mut substs = SmallVec::with_capacity(count); 114,447 ( 0.00%) Self::fill_item(&mut substs, tcx, defs, &mut mk_kind); 70,426 ( 0.00%) tcx.intern_substs(&substs) 316,971 ( 0.01%) } . . pub fn extend_to(&self, tcx: TyCtxt<'tcx>, def_id: DefId, mut mk_kind: F) -> SubstsRef<'tcx> . where . F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, . { . Self::for_item(tcx, def_id, |param, substs| { . self.get(param.index as usize).cloned().unwrap_or_else(|| mk_kind(param, substs)) . }) . } . 463,436 ( 0.01%) pub fn fill_item( . substs: &mut SmallVec<[GenericArg<'tcx>; 8]>, . tcx: TyCtxt<'tcx>, . defs: &ty::Generics, . mk_kind: &mut F, . ) where . F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, . { 196,366 ( 0.00%) if let Some(def_id) = defs.parent { . let parent_defs = tcx.generics_of(def_id); 41,113 ( 0.00%) Self::fill_item(substs, tcx, parent_defs, mk_kind); . } . Self::fill_single(substs, defs, mk_kind) 369,128 ( 0.01%) } . 656 ( 0.00%) pub fn fill_single( . substs: &mut SmallVec<[GenericArg<'tcx>; 8]>, . defs: &ty::Generics, . mk_kind: &mut F, . ) where . F: FnMut(&ty::GenericParamDef, &[GenericArg<'tcx>]) -> GenericArg<'tcx>, . { . substs.reserve(defs.params.len()); . for param in &defs.params { . let kind = mk_kind(param, substs); 273,149 ( 0.00%) assert_eq!(param.index as usize, substs.len()); . substs.push(kind); . } 656 ( 0.00%) } . . #[inline] . pub fn types(&'a self) -> impl DoubleEndedIterator> + 'a { . self.iter() . .filter_map(|k| if let GenericArgKind::Type(ty) = k.unpack() { Some(ty) } else { None }) . } . . #[inline] -- line 284 ---------------------------------------- -- line 302 ---------------------------------------- . self.iter().filter_map(|k| match k.unpack() { . GenericArgKind::Lifetime(_) => None, . generic => Some(generic), . }) . } . . #[inline] . pub fn type_at(&self, i: usize) -> Ty<'tcx> { 381,765 ( 0.01%) if let GenericArgKind::Type(ty) = self[i].unpack() { . ty . } else { . bug!("expected type for param #{} in {:?}", i, self); . } . } . . #[inline] . pub fn region_at(&self, i: usize) -> ty::Region<'tcx> { -- line 318 ---------------------------------------- -- line 350 ---------------------------------------- . /// impl X for U { fn f() {} } . /// ``` . /// . /// * If `self` is `[Self, S, T]`: the identity substs of `f` in the trait. . /// * If `source_ancestor` is the def_id of the trait. . /// * If `target_substs` is `[U]`, the substs for the impl. . /// * Then we will return `[U, T]`, the subst for `f` in the impl that . /// are needed for it to match the trait. 18,039 ( 0.00%) pub fn rebase_onto( . &self, . tcx: TyCtxt<'tcx>, . source_ancestor: DefId, . target_substs: SubstsRef<'tcx>, . ) -> SubstsRef<'tcx> { . let defs = tcx.generics_of(source_ancestor); 5,154 ( 0.00%) tcx.mk_substs(target_substs.iter().chain(self.iter().skip(defs.params.len()))) 20,616 ( 0.00%) } . . pub fn truncate_to(&self, tcx: TyCtxt<'tcx>, generics: &ty::Generics) -> SubstsRef<'tcx> { . tcx.mk_substs(self.iter().take(generics.count())) . } . } . . impl<'tcx> TypeFoldable<'tcx> for SubstsRef<'tcx> { 69,328 ( 0.00%) fn try_super_fold_with>( . self, . folder: &mut F, . ) -> Result { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // The match arms are in order of frequency. The 1, 2, and 0 cases are . // typically hit in 90--99.99% of cases. When folding doesn't change . // the substs, it's faster to reuse the existing substs rather than . // calling `intern_substs`. 3,912,122 ( 0.06%) match self.len() { . 1 => { 679,006 ( 0.01%) let param0 = self[0].try_fold_with(folder)?; 2,270,172 ( 0.04%) if param0 == self[0] { Ok(self) } else { Ok(folder.tcx().intern_substs(&[param0])) } . } . 2 => { 555,029 ( 0.01%) let param0 = self[0].try_fold_with(folder)?; 1,118,930 ( 0.02%) let param1 = self[1].try_fold_with(folder)?; 1,065,328 ( 0.02%) if param0 == self[0] && param1 == self[1] { . Ok(self) . } else { 843,787 ( 0.01%) Ok(folder.tcx().intern_substs(&[param0, param1])) . } . } . 0 => Ok(self), . _ => { . let params: SmallVec<[_; 8]> = 17,174 ( 0.00%) self.iter().map(|k| k.try_fold_with(folder)).collect::>()?; 1,579 ( 0.00%) if params[..] == self[..] { . Ok(self) . } else { 3,690 ( 0.00%) Ok(folder.tcx().intern_substs(¶ms)) . } . } . } 210,941 ( 0.00%) } . . fn super_visit_with>(&self, visitor: &mut V) -> ControlFlow { 568,273 ( 0.01%) self.iter().try_for_each(|t| t.visit_with(visitor)) . } . } . . /////////////////////////////////////////////////////////////////////////// . // Public trait `Subst` . // . // Just call `foo.subst(tcx, substs)` to perform a substitution across . // `foo`. Or use `foo.subst_spanned(tcx, substs, Some(span))` when -- line 420 ---------------------------------------- -- line 435 ---------------------------------------- . . impl<'tcx, T: TypeFoldable<'tcx>> Subst<'tcx> for T { . fn subst_spanned( . self, . tcx: TyCtxt<'tcx>, . substs: &[GenericArg<'tcx>], . span: Option, . ) -> T { 1,187,756 ( 0.02%) let mut folder = SubstFolder { tcx, substs, span, binders_passed: 0 }; . self.fold_with(&mut folder) . } . } . . /////////////////////////////////////////////////////////////////////////// . // The actual substitution engine itself is a type folder. . . struct SubstFolder<'a, 'tcx> { -- line 451 ---------------------------------------- -- line 456 ---------------------------------------- . span: Option, . . /// Number of region binders we have passed through while doing the substitution . binders_passed: u32, . } . . impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { 276,155 ( 0.00%) self.tcx . } . . fn fold_binder>( . &mut self, . t: ty::Binder<'tcx, T>, . ) -> ty::Binder<'tcx, T> { 329,024 ( 0.01%) self.binders_passed += 1; 70,330 ( 0.00%) let t = t.super_fold_with(self); 329,024 ( 0.01%) self.binders_passed -= 1; . t . } . 591,042 ( 0.01%) fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { . // Note: This routine only handles regions that are bound on . // type declarations and other outer declarations, not those . // bound in *fn types*. Region substitution of the bound . // regions that appear in a function signature is done using . // the specialized routine `ty::replace_late_regions()`. 197,014 ( 0.00%) match *r { 162,364 ( 0.00%) ty::ReEarlyBound(data) => { 162,364 ( 0.00%) let rk = self.substs.get(data.index as usize).map(|k| k.unpack()); . match rk { 81,182 ( 0.00%) Some(GenericArgKind::Lifetime(lt)) => self.shift_region_through_binders(lt), . _ => { . let span = self.span.unwrap_or(DUMMY_SP); . let msg = format!( . "Region parameter out of range \ . when substituting in region {} (index={})", . data.name, data.index . ); . span_bug!(span, "{}", msg); . } . } . } . _ => r, . } 689,549 ( 0.01%) } . 1,020,249 ( 0.02%) fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> { 340,083 ( 0.01%) if !t.needs_subst() { . return t; . } . 541,540 ( 0.01%) match *t.kind() { 821,145 ( 0.01%) ty::Param(p) => self.ty_for_param(p, t), 639,246 ( 0.01%) _ => t.super_fold_with(self), . } 1,167,710 ( 0.02%) } . 3,024 ( 0.00%) fn fold_const(&mut self, c: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 2,839 ( 0.00%) if let ty::ConstKind::Param(p) = c.val { . self.const_for_param(p, c) . } else { 333 ( 0.00%) c.super_fold_with(self) . } 3,160 ( 0.00%) } . . #[inline] . fn fold_mir_const(&mut self, c: mir::ConstantKind<'tcx>) -> mir::ConstantKind<'tcx> { . c.super_fold_with(self) . } . } . . impl<'a, 'tcx> SubstFolder<'a, 'tcx> { . fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> { . // Look up the type in the substitutions. It really should be in there. 328,458 ( 0.01%) let opt_ty = self.substs.get(p.index as usize).map(|k| k.unpack()); . let ty = match opt_ty { . Some(GenericArgKind::Type(ty)) => ty, . Some(kind) => { . let span = self.span.unwrap_or(DUMMY_SP); . span_bug!( . span, . "expected type for `{:?}` ({:?}/{}) but found {:?} \ . when substituting, substs={:?}", -- line 539 ---------------------------------------- -- line 562 ---------------------------------------- . } . . fn const_for_param( . &self, . p: ParamConst, . source_ct: &'tcx ty::Const<'tcx>, . ) -> &'tcx ty::Const<'tcx> { . // Look up the const in the substitutions. It really should be in there. 790 ( 0.00%) let opt_ct = self.substs.get(p.index as usize).map(|k| k.unpack()); . let ct = match opt_ct { . Some(GenericArgKind::Const(ct)) => ct, . Some(kind) => { . let span = self.span.unwrap_or(DUMMY_SP); . span_bug!( . span, . "expected const for `{:?}` ({:?}/{}) but found {:?} \ . when substituting substs={:?}", -- line 578 ---------------------------------------- -- line 645 ---------------------------------------- . fn shift_vars_through_binders>(&self, val: T) -> T { . debug!( . "shift_vars(val={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", . val, . self.binders_passed, . val.has_escaping_bound_vars() . ); . 598,356 ( 0.01%) if self.binders_passed == 0 || !val.has_escaping_bound_vars() { . return val; . } . . let result = ty::fold::shift_vars(self.tcx(), val, self.binders_passed); . debug!("shift_vars: shifted result = {:?}", result); . . result . } . . fn shift_region_through_binders(&self, region: ty::Region<'tcx>) -> ty::Region<'tcx> { 197,585 ( 0.00%) if self.binders_passed == 0 || !region.has_escaping_bound_vars() { . return region; . } . ty::fold::shift_region(self.tcx, region, self.binders_passed) . } . } . . /// Stores the user-given substs to reach some fully qualified path . /// (e.g., `::Item` or `::Item`). . #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] 34,185 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub struct UserSubsts<'tcx> { . /// The substitutions for the item as given by the user. . pub substs: SubstsRef<'tcx>, . . /// The self type, in the case of a `::Item` path (when applied . /// to an inherent impl). See `UserSelfTy` below. 3,512 ( 0.00%) pub user_self_ty: Option>, . } . . /// Specifies the user-given self type. In the case of a path that . /// refers to a member in an inherent impl, this self type is . /// sometimes needed to constrain the type parameters on the impl. For . /// example, in this code: . /// . /// ``` -- line 689 ---------------------------------------- -- line 696 ---------------------------------------- . /// self type `Foo`. Then we can instantiate the parameters of . /// the impl (with the substs from `UserSubsts`) and apply those to . /// the self type, giving `Foo`. Finally, we unify that with . /// the self type here, which contains `?A` to be `&'static u32` . #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] . #[derive(HashStable, TypeFoldable, Lift)] . pub struct UserSelfTy<'tcx> { . pub impl_def_id: DefId, 4,845 ( 0.00%) pub self_ty: Ty<'tcx>, . } 6,119,080 ( 0.10%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/undo_log.rs -------------------------------------------------------------------------------- Ir -- line 28 ---------------------------------------- . PushRegionObligation, . } . . macro_rules! impl_from { . ($($ctor: ident ($ty: ty),)*) => { . $( . impl<'tcx> From<$ty> for UndoLog<'tcx> { . fn from(x: $ty) -> Self { 2,697 ( 0.00%) UndoLog::$ctor(x.into()) . } . } . )* . } . } . . // Upcast from a single kind of "undoable action" to the general enum . impl_from! { -- line 44 ---------------------------------------- -- line 57 ---------------------------------------- . ConstUnificationTable(sv::UndoLog>>), . . RegionUnificationTable(sv::UndoLog>>), . ProjectionCache(traits::UndoLog<'tcx>), . } . . /// The Rollback trait defines how to rollback a particular action. . impl<'tcx> Rollback> for InferCtxtInner<'tcx> { 782,598 ( 0.01%) fn reverse(&mut self, undo: UndoLog<'tcx>) { 1,304,330 ( 0.02%) match undo { 1,597,130 ( 0.03%) UndoLog::TypeVariables(undo) => self.type_variable_storage.reverse(undo), 544 ( 0.00%) UndoLog::ConstUnificationTable(undo) => self.const_unification_storage.reverse(undo), 4,410 ( 0.00%) UndoLog::IntUnificationTable(undo) => self.int_unification_storage.reverse(undo), . UndoLog::FloatUnificationTable(undo) => self.float_unification_storage.reverse(undo), . UndoLog::RegionConstraintCollector(undo) => { 397,110 ( 0.01%) self.region_constraint_storage.as_mut().unwrap().reverse(undo) . } . UndoLog::RegionUnificationTable(undo) => { 83,474 ( 0.00%) self.region_constraint_storage.as_mut().unwrap().unification_table.reverse(undo) . } . UndoLog::ProjectionCache(undo) => self.projection_cache.reverse(undo), . UndoLog::PushRegionObligation => { . self.region_obligations.pop(); . } . } 1,043,464 ( 0.02%) } . } . . /// The combined undo log for all the various unification tables. For each change to the storage . /// for any kind of inference variable, we record an UndoLog entry in the vector here. . pub(crate) struct InferCtxtUndoLogs<'tcx> { . logs: Vec>, . num_open_snapshots: usize, . } . . impl Default for InferCtxtUndoLogs<'_> { . fn default() -> Self { 44,870 ( 0.00%) Self { logs: Default::default(), num_open_snapshots: Default::default() } . } . } . . /// The UndoLogs trait defines how we undo a particular kind of action (of type T). We can undo any . /// action that is convertable into an UndoLog (per the From impls above). . impl<'tcx, T> UndoLogs for InferCtxtUndoLogs<'tcx> . where . UndoLog<'tcx>: From, -- line 102 ---------------------------------------- -- line 103 ---------------------------------------- . { . #[inline] . fn num_open_snapshots(&self) -> usize { . self.num_open_snapshots . } . . #[inline] . fn push(&mut self, undo: T) { 291,997 ( 0.00%) if self.in_snapshot() { 21,576 ( 0.00%) self.logs.push(undo.into()) . } . } . . fn clear(&mut self) { . self.logs.clear(); . self.num_open_snapshots = 0; . } . -- line 120 ---------------------------------------- -- line 125 ---------------------------------------- . { . if self.in_snapshot() { . self.logs.extend(undos.into_iter().map(UndoLog::from)) . } . } . } . . impl<'tcx> InferCtxtInner<'tcx> { 460,500 ( 0.01%) pub fn rollback_to(&mut self, snapshot: Snapshot<'tcx>) { . debug!("rollback_to({})", snapshot.undo_len); . self.undo_log.assert_open_snapshot(&snapshot); . 936,098 ( 0.02%) while self.undo_log.logs.len() > snapshot.undo_len { . let undo = self.undo_log.logs.pop().unwrap(); 3,130,392 ( 0.05%) self.reverse(undo); . } . 186,609 ( 0.00%) if self.undo_log.num_open_snapshots == 1 { . // The root snapshot. It's safe to clear the undo log because . // there's no snapshot further out that we might need to roll back . // to. 60,956 ( 0.00%) assert!(snapshot.undo_len == 0); . self.undo_log.logs.clear(); . } . 260,728 ( 0.00%) self.undo_log.num_open_snapshots -= 1; 537,250 ( 0.01%) } . . pub fn commit(&mut self, snapshot: Snapshot<'tcx>) { . debug!("commit({})", snapshot.undo_len); . 623,394 ( 0.01%) if self.undo_log.num_open_snapshots == 1 { . // The root snapshot. It's safe to clear the undo log because . // there's no snapshot further out that we might need to roll back . // to. 174,204 ( 0.00%) assert!(snapshot.undo_len == 0); . self.undo_log.logs.clear(); . } . 710,496 ( 0.01%) self.undo_log.num_open_snapshots -= 1; . } . } . . impl<'tcx> InferCtxtUndoLogs<'tcx> { . pub fn start_snapshot(&mut self) -> Snapshot<'tcx> { 1,138,192 ( 0.02%) self.num_open_snapshots += 1; . Snapshot { undo_len: self.logs.len(), _marker: PhantomData } . } . . pub(crate) fn region_constraints_in_snapshot( . &self, . s: &Snapshot<'tcx>, . ) -> impl Iterator> + Clone { 30,023 ( 0.00%) self.logs[s.undo_len..].iter().filter_map(|log| match log { . UndoLog::RegionConstraintCollector(log) => Some(log), . _ => None, . }) . } . . pub(crate) fn region_constraints( . &self, . ) -> impl Iterator> + Clone { 77 ( 0.00%) self.logs.iter().filter_map(|log| match log { . UndoLog::RegionConstraintCollector(log) => Some(log), . _ => None, . }) . } . . fn assert_open_snapshot(&self, snapshot: &Snapshot<'tcx>) { . // Failures here may indicate a failure to follow a stack discipline. 230,250 ( 0.00%) assert!(self.logs.len() >= snapshot.undo_len); 230,250 ( 0.00%) assert!(self.num_open_snapshots > 0); . } . } . . impl<'tcx> std::ops::Index for InferCtxtUndoLogs<'tcx> { . type Output = UndoLog<'tcx>; . . fn index(&self, key: usize) -> &Self::Output { . &self.logs[key] -- line 204 ---------------------------------------- 380,979 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs -------------------------------------------------------------------------------- Ir -- line 186 ---------------------------------------- . /// // use the values stored in map . /// ``` . pub struct HashMap { . pub(crate) hash_builder: S, . pub(crate) table: RawTable<(K, V), A>, . } . . impl Clone for HashMap { 776 ( 0.00%) fn clone(&self) -> Self { 1,991 ( 0.00%) HashMap { . hash_builder: self.hash_builder.clone(), 363 ( 0.00%) table: self.table.clone(), . } 873 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . self.table.clone_from(&source.table); . . // Update hash_builder only if we successfully cloned all elements. . self.hash_builder.clone_from(&source.hash_builder); . } . } -- line 207 ---------------------------------------- -- line 210 ---------------------------------------- . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hasher(hash_builder: &S) -> impl Fn(&(Q, V)) -> u64 + '_ . where . K: Borrow, . Q: Hash, . S: BuildHasher, . { 317,076 ( 0.01%) move |val| make_hash::(hash_builder, &val.0) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent_key(k: &Q) -> impl Fn(&(K, V)) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 1,167,035 ( 0.02%) move |x| k.eq(x.0.borrow()) . } . . /// Ensures that a single closure type across uses of this which, in turn prevents multiple . /// instances of any functions like RawTable::reserve from being generated . #[cfg_attr(feature = "inline-more", inline)] . fn equivalent(k: &Q) -> impl Fn(&K) -> bool + '_ . where . K: Borrow, . Q: ?Sized + Eq, . { 1,142,447 ( 0.02%) move |x| k.eq(x.borrow()) . } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, -- line 248 ---------------------------------------- -- line 251 ---------------------------------------- . use core::hash::Hasher; . let mut state = hash_builder.build_hasher(); . val.hash(&mut state); . state.finish() . } . . #[cfg(feature = "nightly")] . #[cfg_attr(feature = "inline-more", inline)] 2 ( 0.00%) pub(crate) fn make_hash(hash_builder: &S, val: &Q) -> u64 . where . K: Borrow, . Q: Hash + ?Sized, . S: BuildHasher, . { . hash_builder.hash_one(val) 4 ( 0.00%) } . . #[cfg(not(feature = "nightly"))] . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn make_insert_hash(hash_builder: &S, val: &K) -> u64 . where . K: Hash, . S: BuildHasher, . { -- line 274 ---------------------------------------- -- line 367 ---------------------------------------- . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . /// . /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html . #[cfg_attr(feature = "inline-more", inline)] . pub const fn with_hasher(hash_builder: S) -> Self { 661,931 ( 0.01%) Self { . hash_builder, . table: RawTable::new(), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. . /// -- line 383 ---------------------------------------- -- line 437 ---------------------------------------- . /// use hashbrown::hash_map::DefaultHashBuilder; . /// . /// let s = DefaultHashBuilder::default(); . /// let mut map = HashMap::with_hasher(s); . /// map.insert(1, 2); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self { 1,738 ( 0.00%) Self { . hash_builder, . table: RawTable::new_in(alloc), . } . } . . /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` . /// to hash the keys. It will be allocated with the given allocator. . /// -- line 453 ---------------------------------------- -- line 663 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert_eq!(a.len(), 0); . /// a.insert(1, "a"); . /// assert_eq!(a.len(), 1); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn len(&self) -> usize { 70,702 ( 0.00%) self.table.len() . } . . /// Returns `true` if the map contains no elements. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; -- line 679 ---------------------------------------- -- line 680 ---------------------------------------- . /// . /// let mut a = HashMap::new(); . /// assert!(a.is_empty()); . /// a.insert(1, "a"); . /// assert!(!a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn is_empty(&self) -> bool { 286,858 ( 0.00%) self.len() == 0 . } . . /// Clears the map, returning all key-value pairs as an iterator. Keeps the . /// allocated memory for reuse. . /// . /// # Examples . /// . /// ``` -- line 696 ---------------------------------------- -- line 790 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut a = HashMap::new(); . /// a.insert(1, "a"); . /// a.clear(); . /// assert!(a.is_empty()); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6 ( 0.00%) pub fn clear(&mut self) { . self.table.clear(); 6 ( 0.00%) } . . /// Creates a consuming iterator visiting all the keys in arbitrary order. . /// The map cannot be used after calling this. . /// The iterator element type is `K`. . /// . /// # Examples . /// . /// ``` -- line 808 ---------------------------------------- -- line 963 ---------------------------------------- . /// } . /// . /// assert_eq!(letters[&'s'], 2); . /// assert_eq!(letters[&'t'], 3); . /// assert_eq!(letters[&'u'], 1); . /// assert_eq!(letters.get(&'y'), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 1,056 ( 0.00%) pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { . let hash = make_insert_hash::(&self.hash_builder, &key); . if let Some(elem) = self.table.find(hash, equivalent_key(&key)) { 350 ( 0.00%) Entry::Occupied(OccupiedEntry { . hash, . key: Some(key), . elem, . table: self, . }) . } else { 970 ( 0.00%) Entry::Vacant(VacantEntry { . hash, . key, . table: self, . }) . } 1,320 ( 0.00%) } . . /// Gets the given key's corresponding entry by reference in the map for in-place manipulation. . /// . /// # Examples . /// . /// ``` . /// use hashbrown::HashMap; . /// -- line 995 ---------------------------------------- -- line 1047 ---------------------------------------- . /// ``` . #[inline] . pub fn get(&self, k: &Q) -> Option<&V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 2,571,278 ( 0.04%) match self.get_inner(k) { . Some(&(_, ref v)) => Some(v), . None => None, . } . } . . /// Returns the key-value pair corresponding to the supplied key. . /// . /// The supplied key may be any borrowed form of the map's key type, but -- line 1063 ---------------------------------------- -- line 1091 ---------------------------------------- . } . . #[inline] . fn get_inner(&self, k: &Q) -> Option<&(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 2,824,644 ( 0.05%) if self.table.is_empty() { . None . } else { 1 ( 0.00%) let hash = make_hash::(&self.hash_builder, k); . self.table.get(hash, equivalent_key(k)) . } . } . . /// Returns the key-value pair corresponding to the supplied key, with a mutable reference to value. . /// . /// The supplied key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for -- line 1110 ---------------------------------------- -- line 1155 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.contains_key(&1), true); . /// assert_eq!(map.contains_key(&2), false); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 1,010,676 ( 0.02%) pub fn contains_key(&self, k: &Q) -> bool . where . K: Borrow, . Q: Hash + Eq, . { . self.get_inner(k).is_some() 1,261,062 ( 0.02%) } . . /// Returns a mutable reference to the value corresponding to the key. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// . /// [`Eq`]: https://doc.rust-lang.org/std/cmp/trait.Eq.html -- line 1177 ---------------------------------------- -- line 1185 ---------------------------------------- . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// if let Some(x) = map.get_mut(&1) { . /// *x = "b"; . /// } . /// assert_eq!(map[&1], "b"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 785 ( 0.00%) pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 2,355 ( 0.00%) match self.get_inner_mut(k) { . Some(&mut (_, ref mut v)) => Some(v), . None => None, . } 1,570 ( 0.00%) } . . #[inline] . fn get_inner_mut(&mut self, k: &Q) -> Option<&mut (K, V)> . where . K: Borrow, . Q: Hash + Eq, . { 785 ( 0.00%) if self.table.is_empty() { . None . } else { . let hash = make_hash::(&self.hash_builder, k); . self.table.get_mut(hash, equivalent_key(k)) . } . } . . /// Attempts to get mutable references to `N` values in the map at once. -- line 1219 ---------------------------------------- -- line 1495 ---------------------------------------- . /// assert_eq!(map.insert(37, "a"), None); . /// assert_eq!(map.is_empty(), false); . /// . /// map.insert(37, "b"); . /// assert_eq!(map.insert(37, "c"), Some("b")); . /// assert_eq!(map[&37], "c"); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 6,876,008 ( 0.11%) pub fn insert(&mut self, k: K, v: V) -> Option { . let hash = make_insert_hash::(&self.hash_builder, &k); 1,223 ( 0.00%) if let Some((_, item)) = self.table.get_mut(hash, equivalent_key(&k)) { 2 ( 0.00%) Some(mem::replace(item, v)) . } else { 2,837,445 ( 0.05%) self.table 2,240,387 ( 0.04%) .insert(hash, (k, v), make_hasher::(&self.hash_builder)); 267,934 ( 0.00%) None . } 6,582,673 ( 0.11%) } . . /// Insert a key-value pair into the map without checking . /// if the key already exists in the map. . /// . /// Returns a reference to the key and value just inserted. . /// . /// This operation is safe if a key does not exist in the map. . /// -- line 1520 ---------------------------------------- -- line 1592 ---------------------------------------- . /// use hashbrown::HashMap; . /// . /// let mut map = HashMap::new(); . /// map.insert(1, "a"); . /// assert_eq!(map.remove(&1), Some("a")); . /// assert_eq!(map.remove(&1), None); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 281,929 ( 0.00%) pub fn remove(&mut self, k: &Q) -> Option . where . K: Borrow, . Q: Hash + Eq, . { . // Avoid `Option::map` because it bloats LLVM IR. 1,308,915 ( 0.02%) match self.remove_entry(k) { 72,429 ( 0.00%) Some((_, v)) => Some(v), 221,041 ( 0.00%) None => None, . } 646,959 ( 0.01%) } . . /// Removes a key from the map, returning the stored key and value if the . /// key was previously in the map. . /// . /// The key may be any borrowed form of the map's key type, but . /// [`Hash`] and [`Eq`] on the borrowed form *must* match those for . /// the key type. . /// -- line 1618 ---------------------------------------- -- line 1631 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn remove_entry(&mut self, k: &Q) -> Option<(K, V)> . where . K: Borrow, . Q: Hash + Eq, . { . let hash = make_hash::(&self.hash_builder, k); 564,502 ( 0.01%) self.table.remove_entry(hash, equivalent_key(k)) . } . } . . impl HashMap { . /// Creates a raw entry builder for the HashMap. . /// . /// Raw entries provide the lowest level of control for searching and . /// manipulating a map. They must be manually initialized with a hash and -- line 1647 ---------------------------------------- -- line 2209 ---------------------------------------- . /// Creates a `RawEntryMut` from the given key and its hash. . #[inline] . #[allow(clippy::wrong_self_convention)] . pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> . where . K: Borrow, . Q: Eq, . { 5,901,840 ( 0.10%) self.from_hash(hash, equivalent(k)) . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilderMut<'a, K, V, S, A> { . /// Creates a `RawEntryMut` from the given hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 15,292,484 ( 0.25%) pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { . self.search(hash, is_match) 16,422,500 ( 0.27%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> . where . for<'b> F: FnMut(&'b K) -> bool, . { 1,695,058 ( 0.03%) match self.map.table.find(hash, |(k, _)| is_match(k)) { 9,022,892 ( 0.15%) Some(elem) => RawEntryMut::Occupied(RawOccupiedEntryMut { . elem, . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), 1,246,944 ( 0.02%) None => RawEntryMut::Vacant(RawVacantEntryMut { . table: &mut self.map.table, . hash_builder: &self.map.hash_builder, . }), . } . } . } . . impl<'a, K, V, S, A: Allocator + Clone> RawEntryBuilder<'a, K, V, S, A> { -- line 2251 ---------------------------------------- -- line 2260 ---------------------------------------- . { . let hash = make_hash::(&self.map.hash_builder, k); . self.from_key_hashed_nocheck(hash, k) . } . . /// Access an entry by a key and its hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] 3,331,696 ( 0.06%) pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> Option<(&'a K, &'a V)> . where . K: Borrow, . Q: Eq, . { 4,098,852 ( 0.07%) self.from_hash(hash, equivalent(k)) 6,057,122 ( 0.10%) } . . #[cfg_attr(feature = "inline-more", inline)] . fn search(self, hash: u64, mut is_match: F) -> Option<(&'a K, &'a V)> . where . F: FnMut(&K) -> bool, . { 5,919,587 ( 0.10%) match self.map.table.get(hash, |(k, _)| is_match(k)) { . Some(&(ref key, ref value)) => Some((key, value)), . None => None, . } . } . . /// Access an entry by hash. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::wrong_self_convention)] -- line 2289 ---------------------------------------- -- line 2624 ---------------------------------------- . /// and returns a mutable reference to it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::shadow_unrelated)] . pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) . where . K: Hash, . S: BuildHasher, . { 2,061,981 ( 0.03%) let &mut (ref mut k, ref mut v) = self.table.insert_entry( . hash, . (key, value), . make_hasher::(self.hash_builder), . ); . (k, v) . } . . /// Set the value of an entry with a custom hasher function. -- line 2640 ---------------------------------------- -- line 2974 ---------------------------------------- . /// map.insert("a", 1); . /// map.insert("b", 2); . /// map.insert("c", 3); . /// . /// // Not possible with .iter() . /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); . /// ``` . #[cfg_attr(feature = "inline-more", inline)] 3,404 ( 0.00%) fn into_iter(self) -> IntoIter { 61,653 ( 0.00%) IntoIter { 33,518 ( 0.00%) inner: self.table.into_iter(), . } 10,212 ( 0.00%) } . } . . impl<'a, K, V> Iterator for Iter<'a, K, V> { . type Item = (&'a K, &'a V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(&'a K, &'a V)> { . // Avoid `Option::map` because it bloats LLVM IR. 490,620 ( 0.01%) match self.inner.next() { . Some(x) => unsafe { . let r = x.as_ref(); 3,471 ( 0.00%) Some((&r.0, &r.1)) . }, . None => None, . } . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { 2 ( 0.00%) self.inner.size_hint() . } . } . impl ExactSizeIterator for Iter<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { . self.inner.len() . } . } -- line 3013 ---------------------------------------- -- line 3051 ---------------------------------------- . } . } . . impl Iterator for IntoIter { . type Item = (K, V); . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option<(K, V)> { 3,598 ( 0.00%) self.inner.next() . } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for IntoIter { . #[cfg_attr(feature = "inline-more", inline)] -- line 3067 ---------------------------------------- -- line 3076 ---------------------------------------- . f.debug_list().entries(self.iter()).finish() . } . } . . impl<'a, K, V> Iterator for Keys<'a, K, V> { . type Item = &'a K; . . #[cfg_attr(feature = "inline-more", inline)] 824 ( 0.00%) fn next(&mut self) -> Option<&'a K> { . // Avoid `Option::map` because it bloats LLVM IR. . match self.inner.next() { . Some((k, _)) => Some(k), . None => None, . } 1,648 ( 0.00%) } . #[cfg_attr(feature = "inline-more", inline)] . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . impl ExactSizeIterator for Keys<'_, K, V> { . #[cfg_attr(feature = "inline-more", inline)] . fn len(&self) -> usize { -- line 3098 ---------------------------------------- -- line 3819 ---------------------------------------- . /// ``` . #[cfg_attr(feature = "inline-more", inline)] . pub fn insert(self, value: V) -> &'a mut V . where . K: Hash, . S: BuildHasher, . { . let table = &mut self.table.table; 84 ( 0.00%) let entry = table.insert_entry( . self.hash, . (self.key, value), . make_hasher::(&self.table.hash_builder), . ); . &mut entry.1 . } . . #[cfg_attr(feature = "inline-more", inline)] -- line 3835 ---------------------------------------- -- line 4557 ---------------------------------------- . /// keys with new values returned from the iterator. . impl Extend<(K, V)> for HashMap . where . K: Eq + Hash, . S: BuildHasher, . A: Allocator + Clone, . { . #[cfg_attr(feature = "inline-more", inline)] 95,925 ( 0.00%) fn extend>(&mut self, iter: T) { . // Keys may be already present or show multiple times in the iterator. . // Reserve the entire hint lower bound if the map is empty. . // Otherwise reserve half the hint (rounded up), so the map . // will only resize twice in the worst case. 58,813 ( 0.00%) let iter = iter.into_iter(); 78,157 ( 0.00%) let reserve = if self.is_empty() { . iter.size_hint().0 . } else { 3,445 ( 0.00%) (iter.size_hint().0 + 1) / 2 . }; . self.reserve(reserve); . iter.for_each(move |(k, v)| { 183,484 ( 0.00%) self.insert(k, v); . }); 69,010 ( 0.00%) } . . #[inline] . #[cfg(feature = "nightly")] . fn extend_one(&mut self, (k, v): (K, V)) { . self.insert(k, v); . } . . #[inline] -- line 4588 ---------------------------------------- 13,394,727 ( 0.22%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/pattern/deconstruct_pat.rs -------------------------------------------------------------------------------- Ir -- line 65 ---------------------------------------- . use std::cell::Cell; . use std::cmp::{self, max, min, Ordering}; . use std::fmt; . use std::iter::{once, IntoIterator}; . use std::ops::RangeInclusive; . . /// Recursively expand this pattern into its subpatterns. Only useful for or-patterns. . fn expand_or_pat<'p, 'tcx>(pat: &'p Pat<'tcx>) -> Vec<&'p Pat<'tcx>> { 2,236 ( 0.00%) fn expand<'p, 'tcx>(pat: &'p Pat<'tcx>, vec: &mut Vec<&'p Pat<'tcx>>) { 1,118 ( 0.00%) if let PatKind::Or { pats } = pat.kind.as_ref() { . for pat in pats { 1,455 ( 0.00%) expand(pat, vec); . } . } else { . vec.push(pat) . } 2,236 ( 0.00%) } . . let mut pats = Vec::new(); 148 ( 0.00%) expand(pat, &mut pats); . pats . } . . /// An inclusive interval, used for precise integer exhaustiveness checking. . /// `IntRange`s always store a contiguous range. This means that values are . /// encoded such that `0` encodes the minimum value for the integer, . /// regardless of the signedness. . /// For example, the pattern `-128..=127i8` is encoded as `0..=255`. . /// This makes comparisons and arithmetic on interval endpoints much more . /// straightforward. See `signed_bias` for details. . /// . /// `IntRange` is never used to encode an empty range or a "range" that wraps . /// around the (offset) space: i.e., `range.lo <= range.hi`. . #[derive(Clone, PartialEq, Eq)] . pub(super) struct IntRange { 11,312 ( 0.00%) range: RangeInclusive, . /// Keeps the bias used for encoding the range. It depends on the type of the range and . /// possibly the pointer size of the current architecture. The algorithm ensures we never . /// compare `IntRange`s with different types/architectures. 10,371 ( 0.00%) bias: u128, . } . . impl IntRange { . #[inline] . fn is_integral(ty: Ty<'_>) -> bool { 18,295 ( 0.00%) matches!(ty.kind(), ty::Char | ty::Int(_) | ty::Uint(_) | ty::Bool) . } . . fn is_singleton(&self) -> bool { 10,550 ( 0.00%) self.range.start() == self.range.end() . } . . fn boundaries(&self) -> (u128, u128) { 109,286 ( 0.00%) (*self.range.start(), *self.range.end()) . } . . #[inline] . fn integral_size_and_signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> Option<(Size, u128)> { 1,857 ( 0.00%) match *ty.kind() { . ty::Bool => Some((Size::from_bytes(1), 0)), . ty::Char => Some((Size::from_bytes(4), 0)), 60 ( 0.00%) ty::Int(ity) => { . let size = Integer::from_int_ty(&tcx, ity).size(); 130 ( 0.00%) Some((size, 1u128 << (size.bits() as u128 - 1))) . } 3,654 ( 0.00%) ty::Uint(uty) => Some((Integer::from_uint_ty(&tcx, uty).size(), 0)), . _ => None, . } . } . . #[inline] . fn from_const<'tcx>( . tcx: TyCtxt<'tcx>, . param_env: ty::ParamEnv<'tcx>, . value: &Const<'tcx>, . ) -> Option { 619 ( 0.00%) if let Some((target_size, bias)) = Self::integral_size_and_signed_bias(tcx, value.ty) { . let ty = value.ty; . let val = (|| { 4,952 ( 0.00%) if let ty::ConstKind::Value(ConstValue::Scalar(scalar)) = value.val { . // For this specific pattern we can skip a lot of effort and go . // straight to the result, after doing a bit of checking. (We . // could remove this branch and just fall through, which . // is more general but much slower.) . if let Ok(bits) = scalar.to_bits_or_ptr_internal(target_size) { . return Some(bits); . } . } . // This is a more general form of the previous case. . value.try_eval_bits(tcx, param_env, ty) . })()?; 8,047 ( 0.00%) let val = val ^ bias; . Some(IntRange { range: val..=val, bias }) . } else { . None . } . } . . #[inline] 11,963 ( 0.00%) fn from_range<'tcx>( . tcx: TyCtxt<'tcx>, . lo: u128, . hi: u128, . ty: Ty<'tcx>, . end: &RangeEnd, . ) -> Option { 1,709 ( 0.00%) if Self::is_integral(ty) { . // Perform a shift if the underlying types are signed, . // which makes the interval arithmetic simpler. . let bias = IntRange::signed_bias(tcx, ty); 17,090 ( 0.00%) let (lo, hi) = (lo ^ bias, hi ^ bias); . let offset = (*end == RangeEnd::Excluded) as u128; 17,090 ( 0.00%) if lo > hi || (lo == hi && *end == RangeEnd::Excluded) { . // This should have been caught earlier by E0030. . bug!("malformed range pattern: {}..={}", lo, (hi - offset)); . } 17,090 ( 0.00%) Some(IntRange { range: lo..=(hi - offset), bias }) . } else { . None . } 10,254 ( 0.00%) } . . // The return value of `signed_bias` should be XORed with an endpoint to encode/decode it. . fn signed_bias(tcx: TyCtxt<'_>, ty: Ty<'_>) -> u128 { 3,418 ( 0.00%) match *ty.kind() { 348 ( 0.00%) ty::Int(ity) => { . let bits = Integer::from_int_ty(&tcx, ity).size().bits() as u128; 754 ( 0.00%) 1u128 << (bits - 1) . } . _ => 0, . } . } . . fn is_subrange(&self, other: &Self) -> bool { . other.range.start() <= self.range.start() && self.range.end() <= other.range.end() . } . . fn intersection(&self, other: &Self) -> Option { . let (lo, hi) = self.boundaries(); . let (other_lo, other_hi) = other.boundaries(); 334,652 ( 0.01%) if lo <= other_hi && other_lo <= hi { 1,237 ( 0.00%) Some(IntRange { range: max(lo, other_lo)..=min(hi, other_hi), bias: self.bias }) . } else { . None . } . } . . fn suspicious_intersection(&self, other: &Self) -> bool { . // `false` in the following cases: . // 1 ---- // 1 ---------- // 1 ---- // 1 ---- -- line 214 ---------------------------------------- -- line 242 ---------------------------------------- . } else { . PatKind::Range(PatRange { lo: lo_const, hi: hi_const, end: RangeEnd::Included }) . }; . . Pat { ty, span: DUMMY_SP, kind: Box::new(kind) } . } . . /// Lint on likely incorrect range patterns (#63987) 2,476 ( 0.00%) pub(super) fn lint_overlapping_range_endpoints<'a, 'p: 'a, 'tcx: 'a>( . &self, . pcx: PatCtxt<'_, 'p, 'tcx>, . pats: impl Iterator>, . column_count: usize, . hir_id: HirId, . ) { 619 ( 0.00%) if self.is_singleton() { . return; . } . . if column_count != 1 { . // FIXME: for now, only check for overlapping ranges on simple range . // patterns. Otherwise with the current logic the following is detected . // as overlapping: . // ``` -- line 265 ---------------------------------------- -- line 295 ---------------------------------------- . ); . } . err.span_label(pcx.span, "... with this range"); . err.note("you likely meant to write mutually exclusive ranges"); . err.emit(); . }, . ); . } 3,095 ( 0.00%) } . . /// See `Constructor::is_covered_by` . fn is_covered_by(&self, other: &Self) -> bool { 106,812 ( 0.00%) if self.intersection(other).is_some() { . // Constructor splitting should ensure that all intersections we encounter are actually . // inclusions. 2,052 ( 0.00%) assert!(self.is_subrange(other)); . true . } else { . false . } . } . } . . /// Note: this is often not what we want: e.g. `false` is converted into the range `0..=0` and -- line 318 ---------------------------------------- -- line 325 ---------------------------------------- . write!(f, "{}", lo)?; . write!(f, "{}", RangeEnd::Included)?; . write!(f, "{}", hi) . } . } . . /// Represents a border between 2 integers. Because the intervals spanning borders must be able to . /// cover every integer, we need to be able to represent 2^128 + 1 such borders. 44,111 ( 0.00%) #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] . enum IntBorder { . JustBefore(u128), . AfterMax, . } . . /// A range of integers that is partitioned into disjoint subranges. This does constructor . /// splitting for integer ranges as explained at the top of the file. . /// -- line 341 ---------------------------------------- -- line 359 ---------------------------------------- . range: IntRange, . /// The borders of ranges we have seen. They are all contained within `range`. This is kept . /// sorted. . borders: Vec, . } . . impl SplitIntRange { . fn new(range: IntRange) -> Self { 13,672 ( 0.00%) SplitIntRange { range, borders: Vec::new() } . } . . /// Internal use . fn to_borders(r: IntRange) -> [IntBorder; 2] { . use IntBorder::*; . let (lo, hi) = r.boundaries(); . let lo = JustBefore(lo); . let hi = match hi.checked_add(1) { -- line 375 ---------------------------------------- -- line 400 ---------------------------------------- . let mut prev_border = self_range[0]; . self.borders . .iter() . .copied() . // End with the end of the range. . .chain(once(self_range[1])) . // List pairs of adjacent borders. . .map(move |border| { 6,365 ( 0.00%) let ret = (prev_border, border); 3,819 ( 0.00%) prev_border = border; . ret . }) . // Skip duplicates. . .filter(|(prev_border, border)| prev_border != border) . // Finally, convert to ranges. . .map(move |(prev_border, border)| { 17,868 ( 0.00%) let range = match (prev_border, border) { 17,868 ( 0.00%) (JustBefore(n), JustBefore(m)) if n < m => n..=(m - 1), . (JustBefore(n), AfterMax) => n..=u128::MAX, . _ => unreachable!(), // Ruled out by the sorting and filtering we did . }; 7,225 ( 0.00%) IntRange { range, bias: self.range.bias } . }) . } . } . . #[derive(Copy, Clone, Debug, PartialEq, Eq)] . enum SliceKind { . /// Patterns of length `n` (`[x, y]`). . FixedLen(usize), -- line 429 ---------------------------------------- -- line 434 ---------------------------------------- . /// and everything in between is a wildcard `_`. . VarLen(usize, usize), . } . . impl SliceKind { . fn arity(self) -> usize { . match self { . FixedLen(length) => length, 44 ( 0.00%) VarLen(prefix, suffix) => prefix + suffix, . } . } . . /// Whether this pattern includes patterns of length `other_len`. . fn covers_length(self, other_len: usize) -> bool { . match self { . FixedLen(len) => len == other_len, . VarLen(prefix, suffix) => prefix + suffix <= other_len, -- line 450 ---------------------------------------- -- line 458 ---------------------------------------- . /// `None` if the matched value is a slice, `Some(n)` if it is an array of size `n`. . array_len: Option, . /// The kind of pattern it is: fixed-length `[x, y]` or variable length `[x, .., y]`. . kind: SliceKind, . } . . impl Slice { . fn new(array_len: Option, kind: SliceKind) -> Self { 88 ( 0.00%) let kind = match (array_len, kind) { . // If the middle `..` is empty, we effectively have a fixed-length pattern. 176 ( 0.00%) (Some(len), VarLen(prefix, suffix)) if prefix + suffix >= len => FixedLen(len), . _ => kind, . }; . Slice { array_len, kind } . } . . fn arity(self) -> usize { . self.kind.arity() . } -- line 476 ---------------------------------------- -- line 550 ---------------------------------------- . arity: usize, . /// The smallest slice bigger than any slice seen. `max_slice.arity()` is the length `L` . /// described above. . max_slice: SliceKind, . } . . impl SplitVarLenSlice { . fn new(prefix: usize, suffix: usize, array_len: Option) -> Self { 220 ( 0.00%) SplitVarLenSlice { array_len, arity: prefix + suffix, max_slice: VarLen(prefix, suffix) } . } . . /// Pass a set of slices relative to which to split this one. . fn split(&mut self, slices: impl Iterator) { . let (max_prefix_len, max_suffix_len) = match &mut self.max_slice { . VarLen(prefix, suffix) => (prefix, suffix), . FixedLen(_) => return, // No need to split . }; -- line 566 ---------------------------------------- -- line 576 ---------------------------------------- . VarLen(prefix, suffix) => { . *max_prefix_len = cmp::max(*max_prefix_len, prefix); . *max_suffix_len = cmp::max(*max_suffix_len, suffix); . } . } . } . // We want `L = max(L, max_fixed_len + 1)`, modulo the fact that we keep prefix and . // suffix separate. 154 ( 0.00%) if max_fixed_len + 1 >= *max_prefix_len + *max_suffix_len { . // The subtraction can't overflow thanks to the above check. . // The new `max_prefix_len` is larger than its previous value. 88 ( 0.00%) *max_prefix_len = max_fixed_len + 1 - *max_suffix_len; . } . . // We cap the arity of `max_slice` at the array size. 44 ( 0.00%) match self.array_len { 44 ( 0.00%) Some(len) if self.max_slice.arity() >= len => self.max_slice = FixedLen(len), . _ => {} . } . } . . /// Iterate over the partition of this slice. . fn iter<'a>(&'a self) -> impl Iterator + Captures<'a> { . let smaller_lengths = match self.array_len { . // The only admissible fixed-length slice is one of the array size. Whether `max_slice` -- line 600 ---------------------------------------- -- line 604 ---------------------------------------- . // We cover all arities in the range `(self.arity..infinity)`. We split that range into . // two: lengths smaller than `max_slice.arity()` are treated independently as . // fixed-lengths slices, and lengths above are captured by `max_slice`. . None => self.arity..self.max_slice.arity(), . }; . smaller_lengths . .map(FixedLen) . .chain(once(self.max_slice)) 44 ( 0.00%) .map(move |kind| Slice::new(self.array_len, kind)) . } . } . . /// A value can be decomposed into a constructor applied to some fields. This struct represents . /// the constructor. See also `Fields`. . /// . /// `pat_constructor` retrieves the constructor corresponding to a pattern. . /// `specialize_constructor` returns the list of fields corresponding to a pattern, given a . /// constructor. `Constructor::apply` reconstructs the pattern from a pair of `Constructor` and . /// `Fields`. 55,671 ( 0.00%) #[derive(Clone, Debug, PartialEq)] . pub(super) enum Constructor<'tcx> { . /// The constructor for patterns that have a single constructor, like tuples, struct patterns . /// and fixed-length arrays. . Single, . /// Enum variants. 110,851 ( 0.00%) Variant(VariantIdx), . /// Ranges of integer literal values (`2`, `2..=5` or `2..5`). . IntRange(IntRange), . /// Ranges of floating-point literal values (`2.0..=5.2`). . FloatRange(&'tcx ty::Const<'tcx>, &'tcx ty::Const<'tcx>, RangeEnd), . /// String literals. Strings are not quite the same as `&[u8]` so we treat them separately. . Str(&'tcx ty::Const<'tcx>), . /// Array and slice patterns. . Slice(Slice), -- line 637 ---------------------------------------- -- line 649 ---------------------------------------- . /// Wildcard pattern. . Wildcard, . /// Or-pattern. . Or, . } . . impl<'tcx> Constructor<'tcx> { . pub(super) fn is_wildcard(&self) -> bool { 14,379 ( 0.00%) matches!(self, Wildcard) . } . . pub(super) fn is_non_exhaustive(&self) -> bool { . matches!(self, NonExhaustive) . } . . fn as_int_range(&self) -> Option<&IntRange> { 32,763 ( 0.00%) match self { . IntRange(range) => Some(range), . _ => None, . } . } . . fn as_slice(&self) -> Option { 22 ( 0.00%) match self { . Slice(slice) => Some(*slice), . _ => None, . } . } . . /// Checks if the `Constructor` is a variant and `TyCtxt::eval_stability` returns . /// `EvalResult::Deny { .. }`. . /// . /// This means that the variant has a stdlib unstable feature marking it. 1,094 ( 0.00%) pub(super) fn is_unstable_variant(&self, pcx: PatCtxt<'_, '_, 'tcx>) -> bool { 1,094 ( 0.00%) if let Constructor::Variant(idx) = self { 1,641 ( 0.00%) if let ty::Adt(adt, _) = pcx.ty.kind() { 2,735 ( 0.00%) let variant_def_id = adt.variants[*idx].def_id; . // Filter variants that depend on a disabled unstable feature. 1,094 ( 0.00%) return matches!( 4,376 ( 0.00%) pcx.cx.tcx.eval_stability(variant_def_id, None, DUMMY_SP, None), . EvalResult::Deny { .. } . ); . } . } . false 2,188 ( 0.00%) } . . /// Checks if the `Constructor` is a `Constructor::Variant` with a `#[doc(hidden)]` . /// attribute. . pub(super) fn is_doc_hidden_variant(&self, pcx: PatCtxt<'_, '_, 'tcx>) -> bool { 1,641 ( 0.00%) if let Constructor::Variant(idx) = self { 1,094 ( 0.00%) if let ty::Adt(adt, _) = pcx.ty.kind() { 2,735 ( 0.00%) let variant_def_id = adt.variants[*idx].def_id; 1,094 ( 0.00%) return pcx.cx.tcx.is_doc_hidden(variant_def_id); . } . } . false . } . . fn variant_index_for_adt(&self, adt: &'tcx ty::AdtDef) -> VariantIdx { 19,516 ( 0.00%) match *self { 9,648 ( 0.00%) Variant(idx) => idx, . Single => { 220 ( 0.00%) assert!(!adt.is_enum()); . VariantIdx::new(0) . } . _ => bug!("bad constructor {:?} for adt {:?}", self, adt), . } . } . . /// The number of fields for this constructor. This must be kept in sync with . /// `Fields::wildcards`. 14 ( 0.00%) pub(super) fn arity(&self, pcx: PatCtxt<'_, '_, 'tcx>) -> usize { 12 ( 0.00%) match self { 9 ( 0.00%) Single | Variant(_) => match pcx.ty.kind() { 1 ( 0.00%) ty::Tuple(fs) => fs.len(), . ty::Ref(..) => 1, . ty::Adt(adt, ..) => { . if adt.is_box() { . // The only legal patterns of type `Box` (outside `std`) are `_` and box . // patterns. If we're here we can assume this is a box pattern. . 1 . } else { . let variant = &adt.variants[self.variant_index_for_adt(adt)]; -- line 732 ---------------------------------------- -- line 740 ---------------------------------------- . | FloatRange(..) . | IntRange(..) . | NonExhaustive . | Opaque . | Missing { .. } . | Wildcard => 0, . Or => bug!("The `Or` constructor doesn't have a fixed arity"), . } 16 ( 0.00%) } . . /// Some constructors (namely `Wildcard`, `IntRange` and `Slice`) actually stand for a set of actual . /// constructors (like variants, integers or fixed-sized slices). When specializing for these . /// constructors, we want to be specialising for the actual underlying constructors. . /// Naively, we would simply return the list of constructors they correspond to. We instead are . /// more clever: if there are constructors that we know will behave the same wrt the current . /// matrix, we keep them grouped. For example, all slices of a sufficiently large length . /// will either be all useful or all non-useful with a given matrix. . /// . /// See the branches for details on how the splitting is done. . /// . /// This function may discard some irrelevant constructors if this preserves behavior and . /// diagnostics. Eg. for the `_` case, we ignore the constructors already present in the . /// matrix, unless all of them are. 569,550 ( 0.01%) pub(super) fn split<'a>( . &self, . pcx: PatCtxt<'_, '_, 'tcx>, . ctors: impl Iterator> + Clone, . ) -> SmallVec<[Self; 1]> . where . 'tcx: 'a, . { 345,413 ( 0.01%) match self { . Wildcard => { 106,544 ( 0.00%) let mut split_wildcard = SplitWildcard::new(pcx); 119,862 ( 0.00%) split_wildcard.split(pcx, ctors); 253,042 ( 0.00%) split_wildcard.into_ctors(pcx) . } . // Fast-track if the range is trivial. In particular, we don't do the overlapping . // ranges check. 2,328 ( 0.00%) IntRange(ctor_range) if !ctor_range.is_singleton() => { . let mut split_range = SplitIntRange::new(ctor_range.clone()); . let int_ranges = ctors.filter_map(|ctor| ctor.as_int_range()); . split_range.split(int_ranges.cloned()); . split_range.iter().map(IntRange).collect() . } 44 ( 0.00%) &Slice(Slice { kind: VarLen(self_prefix, self_suffix), array_len }) => { . let mut split_self = SplitVarLenSlice::new(self_prefix, self_suffix, array_len); . let slices = ctors.filter_map(|c| c.as_slice()).map(|s| s.kind); . split_self.split(slices); . split_self.iter().map(Slice).collect() . } . // Any other constructor can be used unchanged. 670,496 ( 0.01%) _ => smallvec![self.clone()], . } 512,595 ( 0.01%) } . . /// Returns whether `self` is covered by `other`, i.e. whether `self` is a subset of `other`. . /// For the simple cases, this is simply checking for equality. For the "grouped" constructors, . /// this checks for inclusion. . // We inline because this has a single call site in `Matrix::specialize_constructor`. . #[inline] . pub(super) fn is_covered_by<'p>(&self, pcx: PatCtxt<'_, 'p, 'tcx>, other: &Self) -> bool { . // This must be kept in sync with `is_covered_by_any`. 15,674,821 ( 0.26%) match (self, other) { . // Wildcards cover anything . (_, Wildcard) => true, . // The missing ctors are not covered by anything in the matrix except wildcards. . (Missing { .. } | Wildcard, _) => false, . . (Single, Single) => true, 1,269,110 ( 0.02%) (Variant(self_id), Variant(other_id)) => self_id == other_id, . . (IntRange(self_range), IntRange(other_range)) => self_range.is_covered_by(other_range), . ( . FloatRange(self_from, self_to, self_end), . FloatRange(other_from, other_to, other_end), . ) => { . match ( . compare_const_vals(pcx.cx.tcx, self_to, other_to, pcx.cx.param_env, pcx.ty), -- line 818 ---------------------------------------- -- line 848 ---------------------------------------- . other . ), . } . } . . /// Faster version of `is_covered_by` when applied to many constructors. `used_ctors` is . /// assumed to be built from `matrix.head_ctors()` with wildcards filtered out, and `self` is . /// assumed to have been split from a wildcard. 87,405 ( 0.00%) fn is_covered_by_any<'p>( . &self, . pcx: PatCtxt<'_, 'p, 'tcx>, . used_ctors: &[Constructor<'tcx>], . ) -> bool { 17,481 ( 0.00%) if used_ctors.is_empty() { . return false; . } . . // This must be kept in sync with `is_covered_by`. 35,022 ( 0.00%) match self { . // If `self` is `Single`, `used_ctors` cannot contain anything else than `Single`s. . Single => !used_ctors.is_empty(), 853,532 ( 0.01%) Variant(vid) => used_ctors.iter().any(|c| matches!(c, Variant(i) if i == vid)), . IntRange(range) => used_ctors . .iter() . .filter_map(|c| c.as_int_range()) . .any(|other| range.is_covered_by(other)), . Slice(slice) => used_ctors . .iter() . .filter_map(|c| c.as_slice()) . .any(|other| slice.is_covered_by(other)), . // This constructor is never covered by anything else . NonExhaustive => false, . Str(..) | FloatRange(..) | Opaque | Missing { .. } | Wildcard | Or => { . span_bug!(pcx.span, "found unexpected ctor in all_ctors: {:?}", self) . } . } 87,405 ( 0.00%) } . } . . /// A wildcard constructor that we split relative to the constructors in the matrix, as explained . /// at the top of the file. . /// . /// A constructor that is not present in the matrix rows will only be covered by the rows that have . /// wildcards. Thus we can group all of those constructors together; we call them "missing . /// constructors". Splitting a wildcard would therefore list all present constructors individually -- line 892 ---------------------------------------- -- line 903 ---------------------------------------- . pub(super) struct SplitWildcard<'tcx> { . /// Constructors seen in the matrix. . matrix_ctors: Vec>, . /// All the constructors for this type . all_ctors: SmallVec<[Constructor<'tcx>; 1]>, . } . . impl<'tcx> SplitWildcard<'tcx> { 106,648 ( 0.00%) pub(super) fn new<'p>(pcx: PatCtxt<'_, 'p, 'tcx>) -> Self { . debug!("SplitWildcard::new({:?})", pcx.ty); 13,331 ( 0.00%) let cx = pcx.cx; 13,331 ( 0.00%) let make_range = |start, end| { . IntRange( . // `unwrap()` is ok because we know the type is an integer. 13,894 ( 0.00%) IntRange::from_range(cx.tcx, start, end, pcx.ty, &RangeEnd::Included).unwrap(), . ) . }; . // This determines the set of all possible constructors for the type `pcx.ty`. For numbers, . // arrays and slices we use ranges and variable-length slices when appropriate. . // . // If the `exhaustive_patterns` feature is enabled, we make sure to omit constructors that . // are statically impossible. E.g., for `Option`, we do not include `Some(_)` in the . // returned list of constructors. . // Invariant: this is empty if and only if the type is uninhabited (as determined by . // `cx.is_uninhabited()`). 71,978 ( 0.00%) let all_ctors = match pcx.ty.kind() { . ty::Bool => smallvec![make_range(0, 1)], 110 ( 0.00%) ty::Array(sub_ty, len) if len.try_eval_usize(cx.tcx, cx.param_env).is_some() => { 88 ( 0.00%) let len = len.eval_usize(cx.tcx, cx.param_env) as usize; 154 ( 0.00%) if len != 0 && cx.is_uninhabited(sub_ty) { . smallvec![] . } else { . smallvec![Slice(Slice::new(Some(len), VarLen(0, 0)))] . } . } . // Treat arrays of a constant but unknown length like slices. . ty::Array(sub_ty, _) | ty::Slice(sub_ty) => { . let kind = if cx.is_uninhabited(sub_ty) { FixedLen(0) } else { VarLen(0, 0) }; . smallvec![Slice(Slice::new(None, kind))] . } 20,490 ( 0.00%) ty::Adt(def, substs) if def.is_enum() => { . // If the enum is declared as `#[non_exhaustive]`, we treat it as if it had an . // additional "unknown" constructor. . // There is no point in enumerating all possible variants, because the user can't . // actually match against them all themselves. So we always return only the fictitious . // constructor. . // E.g., in an example like: . // . // ``` -- line 951 ---------------------------------------- -- line 954 ---------------------------------------- . // io::ErrorKind::NotFound => {}, . // } . // ``` . // . // we don't want to show every possible IO error, but instead have only `_` as the . // witness. . let is_declared_nonexhaustive = cx.is_foreign_non_exhaustive_enum(pcx.ty); . 18,808 ( 0.00%) let is_exhaustive_pat_feature = cx.tcx.features().exhaustive_patterns; . . // If `exhaustive_patterns` is disabled and our scrutinee is an empty enum, we treat it . // as though it had an "unknown" constructor to avoid exposing its emptiness. The . // exception is if the pattern is at the top level, because we want empty matches to be . // considered exhaustive. . let is_secretly_empty = 18,808 ( 0.00%) def.variants.is_empty() && !is_exhaustive_pat_feature && !pcx.is_top_level; . . let mut ctors: SmallVec<[_; 1]> = def . .variants . .iter_enumerated() 4,702 ( 0.00%) .filter(|(_, v)| { . // If `exhaustive_patterns` is enabled, we exclude variants known to be . // uninhabited. 82,008 ( 0.00%) let is_uninhabited = is_exhaustive_pat_feature . && v.uninhabited_from(cx.tcx, substs, def.adt_kind(), cx.param_env) . .contains(cx.tcx, cx.module); . !is_uninhabited . }) . .map(|(idx, _)| Variant(idx)) . .collect(); . 9,404 ( 0.00%) if is_secretly_empty || is_declared_nonexhaustive { . ctors.push(NonExhaustive); . } . ctors . } . ty::Char => { . smallvec![ . // The valid Unicode Scalar Value ranges. . make_range('\u{0000}' as u128, '\u{D7FF}' as u128), . make_range('\u{E000}' as u128, '\u{10FFFF}' as u128), . ] . } . ty::Int(_) | ty::Uint(_) . if pcx.ty.is_ptr_sized_integral() 2,004 ( 0.00%) && !cx.tcx.features().precise_pointer_size_matching => . { . // `usize`/`isize` are not allowed to be matched exhaustively unless the . // `precise_pointer_size_matching` feature is enabled. So we treat those types like . // `#[non_exhaustive]` enums by returning a special unmatcheable constructor. . smallvec![NonExhaustive] . } . &ty::Int(ity) => { . let bits = Integer::from_int_ty(&cx.tcx, ity).size().bits() as u128; 754 ( 0.00%) let min = 1u128 << (bits - 1); 232 ( 0.00%) let max = min - 1; . smallvec![make_range(min, max)] . } . &ty::Uint(uty) => { . let size = Integer::from_uint_ty(&cx.tcx, uty).size(); . let max = size.truncate(u128::MAX); . smallvec![make_range(0, max)] . } . // If `exhaustive_patterns` is disabled and our scrutinee is the never type, we cannot -- line 1017 ---------------------------------------- -- line 1022 ---------------------------------------- . } . ty::Never => smallvec![], . _ if cx.is_uninhabited(pcx.ty) => smallvec![], . ty::Adt(..) | ty::Tuple(..) | ty::Ref(..) => smallvec![Single], . // This type is one for which we cannot list constructors, like `str` or `f64`. . _ => smallvec![NonExhaustive], . }; . 146,641 ( 0.00%) SplitWildcard { matrix_ctors: Vec::new(), all_ctors } 119,979 ( 0.00%) } . . /// Pass a set of constructors relative to which to split this one. Don't call twice, it won't . /// do what you want. 93,317 ( 0.00%) pub(super) fn split<'a>( . &mut self, . pcx: PatCtxt<'_, '_, 'tcx>, . ctors: impl Iterator> + Clone, . ) where . 'tcx: 'a, . { . // Since `all_ctors` never contains wildcards, this won't recurse further. 133,310 ( 0.00%) self.all_ctors = 323,685 ( 0.01%) self.all_ctors.iter().flat_map(|ctor| ctor.split(pcx, ctors.clone())).collect(); 93,317 ( 0.00%) self.matrix_ctors = ctors.filter(|c| !c.is_wildcard()).cloned().collect(); 66,655 ( 0.00%) } . . /// Whether there are any value constructors for this type that are not present in the matrix. . fn any_missing(&self, pcx: PatCtxt<'_, '_, 'tcx>) -> bool { . self.iter_missing(pcx).next().is_some() . } . . /// Iterate over the constructors for this type that are not present in the matrix. . pub(super) fn iter_missing<'a, 'p>( . &'a self, . pcx: PatCtxt<'a, 'p, 'tcx>, . ) -> impl Iterator> + Captures<'p> { 189,517 ( 0.00%) self.all_ctors.iter().filter(move |ctor| !ctor.is_covered_by_any(pcx, &self.matrix_ctors)) . } . . /// Return the set of constructors resulting from splitting the wildcard. As explained at the . /// top of the file, if any constructors are missing we can ignore the present ones. . fn into_ctors(self, pcx: PatCtxt<'_, '_, 'tcx>) -> SmallVec<[Constructor<'tcx>; 1]> { 26,636 ( 0.00%) if self.any_missing(pcx) { . // Some constructors are missing, thus we can specialize with the special `Missing` . // constructor, which stands for those constructors that are not seen in the matrix, . // and matches the same rows as any of them (namely the wildcard rows). See the top of . // the file for details. . // However, when all constructors are missing we can also specialize with the full . // `Wildcard` constructor. The difference will depend on what we want in diagnostics. . . // If some constructors are missing, we typically want to report those constructors, -- line 1072 ---------------------------------------- -- line 1084 ---------------------------------------- . // let (_, _, false) = x; . // ``` . // we don't want to show all 16 possible witnesses `(, , . // true)` - we are satisfied with `(_, _, true)`. So if all constructors are missing we . // prefer to report just a wildcard `_`. . // . // The exception is: if we are at the top-level, for example in an empty match, we . // sometimes prefer reporting the list of constructors instead of just `_`. 23,620 ( 0.00%) let report_when_all_missing = pcx.is_top_level && !IntRange::is_integral(pcx.ty); 35,351 ( 0.00%) let ctor = if !self.matrix_ctors.is_empty() || report_when_all_missing { 8,408 ( 0.00%) if pcx.is_non_exhaustive { . Missing { . nonexhaustive_enum_missing_real_variants: self . .iter_missing(pcx) . .any(|c| !(c.is_non_exhaustive() || c.is_unstable_variant(pcx))), . } . } else { . Missing { nonexhaustive_enum_missing_real_variants: false } . } . } else { . Wildcard . }; 35,430 ( 0.00%) return smallvec![ctor]; . } . . // All the constructors are present in the matrix, so we just go through them all. 15,080 ( 0.00%) self.all_ctors . } . } . . /// A value can be decomposed into a constructor applied to some fields. This struct represents . /// those fields, generalized to allow patterns in each field. See also `Constructor`. . /// . /// This is constructed for a constructor using [`Fields::wildcards()`]. The idea is that . /// [`Fields::wildcards()`] constructs a list of fields where all entries are wildcards, and then -- line 1118 ---------------------------------------- -- line 1142 ---------------------------------------- . Fields { fields: &[] } . } . . fn singleton(cx: &MatchCheckCtxt<'p, 'tcx>, field: DeconstructedPat<'p, 'tcx>) -> Self { . let field: &_ = cx.pattern_arena.alloc(field); . Fields { fields: std::slice::from_ref(field) } . } . 46,104 ( 0.00%) pub(super) fn from_iter( . cx: &MatchCheckCtxt<'p, 'tcx>, . fields: impl IntoIterator>, . ) -> Self { 2 ( 0.00%) let fields: &[_] = cx.pattern_arena.alloc_from_iter(fields); . Fields { fields } 51,830 ( 0.00%) } . . fn wildcards_from_tys( . cx: &MatchCheckCtxt<'p, 'tcx>, . tys: impl IntoIterator>, . ) -> Self { . Fields::from_iter(cx, tys.into_iter().map(DeconstructedPat::wildcard)) . } . -- line 1164 ---------------------------------------- -- line 1165 ---------------------------------------- . // In the cases of either a `#[non_exhaustive]` field list or a non-public field, we hide . // uninhabited fields in order not to reveal the uninhabitedness of the whole variant. . // This lists the fields we keep along with their types. . fn list_variant_nonhidden_fields<'a>( . cx: &'a MatchCheckCtxt<'p, 'tcx>, . ty: Ty<'tcx>, . variant: &'a VariantDef, . ) -> impl Iterator)> + Captures<'a> + Captures<'p> { 18,862 ( 0.00%) let (adt, substs) = match ty.kind() { 13,928 ( 0.00%) ty::Adt(adt, substs) => (adt, substs), . _ => bug!(), . }; . // Whether we must not match the fields of this variant exhaustively. 14,365 ( 0.00%) let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did.is_local(); . . variant.fields.iter().enumerate().filter_map(move |(i, field)| { 48,524 ( 0.00%) let ty = field.ty(cx.tcx, substs); . // `field.ty()` doesn't normalize after substituting. 13,864 ( 0.00%) let ty = cx.tcx.normalize_erasing_regions(cx.param_env, ty); 28,244 ( 0.00%) let is_visible = adt.is_enum() || field.vis.is_accessible_from(cx.module, cx.tcx); . let is_uninhabited = cx.is_uninhabited(ty); . . if is_uninhabited && (!is_visible || is_non_exhaustive) { . None . } else { . Some((Field::new(i), ty)) . } . }) . } . . /// Creates a new list of wildcard fields for a given constructor. The result must have a . /// length of `constructor.arity()`. 114,105 ( 0.00%) pub(super) fn wildcards( . cx: &MatchCheckCtxt<'p, 'tcx>, . ty: Ty<'tcx>, . constructor: &Constructor<'tcx>, . ) -> Self { 159,747 ( 0.00%) let ret = match constructor { 15,930 ( 0.00%) Single | Variant(_) => match ty.kind() { 249 ( 0.00%) ty::Tuple(fs) => Fields::wildcards_from_tys(cx, fs.iter().map(|ty| ty.expect_ty())), 258 ( 0.00%) ty::Ref(_, rty, _) => Fields::wildcards_from_tys(cx, once(*rty)), . ty::Adt(adt, substs) => { 9,868 ( 0.00%) if adt.is_box() { . // The only legal patterns of type `Box` (outside `std`) are `_` and box . // patterns. If we're here we can assume this is a box pattern. . Fields::wildcards_from_tys(cx, once(substs.type_at(0))) . } else { . let variant = &adt.variants[constructor.variant_index_for_adt(adt)]; . let tys = Fields::list_variant_nonhidden_fields(cx, ty, variant) . .map(|(_, ty)| ty); 4,934 ( 0.00%) Fields::wildcards_from_tys(cx, tys) . } . } . _ => bug!("Unexpected type for `Single` constructor: {:?}", ty), . }, . Slice(slice) => match *ty.kind() { . ty::Slice(ty) | ty::Array(ty, _) => { . let arity = slice.arity(); . Fields::wildcards_from_tys(cx, (0..arity).map(|_| ty)) -- line 1223 ---------------------------------------- -- line 1232 ---------------------------------------- . | Missing { .. } . | Wildcard => Fields::empty(), . Or => { . bug!("called `Fields::wildcards` on an `Or` ctor") . } . }; . debug!("Fields::wildcards({:?}, {:?}) = {:#?}", constructor, ty, ret); . ret 114,105 ( 0.00%) } . . /// Returns the list of patterns. . pub(super) fn iter_patterns<'a>( . &'a self, . ) -> impl Iterator> + Captures<'a> { 591,875 ( 0.01%) self.fields.iter() . } . } . . /// Values and patterns can be represented as a constructor applied to some fields. This represents . /// a pattern in this form. . /// This also keeps track of whether the pattern has been found reachable during analysis. For this . /// reason we should be careful not to clone patterns for which we care about that. Use . /// `clone_and_forget_reachability` if you're sure. -- line 1254 ---------------------------------------- -- line 1266 ---------------------------------------- . } . . pub(super) fn new( . ctor: Constructor<'tcx>, . fields: Fields<'p, 'tcx>, . ty: Ty<'tcx>, . span: Span, . ) -> Self { 240,724 ( 0.00%) DeconstructedPat { ctor, fields, ty, span, reachable: Cell::new(false) } . } . . /// Construct a pattern that matches everything that starts with this constructor. . /// For example, if `ctor` is a `Constructor::Variant` for `Option::Some`, we get the pattern . /// `Some(_)`. . pub(super) fn wild_from_ctor(pcx: PatCtxt<'_, 'p, 'tcx>, ctor: Constructor<'tcx>) -> Self { 1,094 ( 0.00%) let fields = Fields::wildcards(pcx.cx, pcx.ty, &ctor); 4,376 ( 0.00%) DeconstructedPat::new(ctor, fields, pcx.ty, DUMMY_SP) . } . . /// Clone this value. This method emphasizes that cloning loses reachability information and . /// should be done carefully. 2,192 ( 0.00%) pub(super) fn clone_and_forget_reachability(&self) -> Self { 1,096 ( 0.00%) DeconstructedPat::new(self.ctor.clone(), self.fields, self.ty, self.span) 2,192 ( 0.00%) } . 136,440 ( 0.00%) pub(crate) fn from_pat(cx: &MatchCheckCtxt<'p, 'tcx>, pat: &Pat<'tcx>) -> Self { 24,040 ( 0.00%) let mkpat = |pat| DeconstructedPat::from_pat(cx, pat); . let ctor; . let fields; 78,892 ( 0.00%) match pat.kind.as_ref() { . PatKind::AscribeUserType { subpattern, .. } => return mkpat(subpattern), . PatKind::Binding { subpattern: Some(subpat), .. } => return mkpat(subpat), . PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { . ctor = Wildcard; . fields = Fields::empty(); . } 2,654 ( 0.00%) PatKind::Deref { subpattern } => { . ctor = Single; 1,327 ( 0.00%) fields = Fields::singleton(cx, mkpat(subpattern)); . } 5,726 ( 0.00%) PatKind::Leaf { subpatterns } | PatKind::Variant { subpatterns, .. } => { 25,362 ( 0.00%) match pat.ty.kind() { . ty::Tuple(fs) => { . ctor = Single; 1,229 ( 0.00%) let mut wilds: SmallVec<[_; 2]> = fs . .iter() 1,263 ( 0.00%) .map(|ty| ty.expect_ty()) . .map(DeconstructedPat::wildcard) . .collect(); . for pat in subpatterns { 18,945 ( 0.00%) wilds[pat.field.index()] = mkpat(&pat.pattern); . } 13,519 ( 0.00%) fields = Fields::from_iter(cx, wilds); . } 13,491 ( 0.00%) ty::Adt(adt, substs) if adt.is_box() => { . // The only legal patterns of type `Box` (outside `std`) are `_` and box . // patterns. If we're here we can assume this is a box pattern. . // FIXME(Nadrieril): A `Box` can in theory be matched either with `Box(_, . // _)` or a box pattern. As a hack to avoid an ICE with the former, we . // ignore other fields than the first one. This will trigger an error later . // anyway. . // See https://github.com/rust-lang/rust/issues/82772 , . // explanation: https://github.com/rust-lang/rust/pull/82789#issuecomment-796921977 -- line 1328 ---------------------------------------- -- line 1335 ---------------------------------------- . mkpat(&pat.pattern) . } else { . DeconstructedPat::wildcard(substs.type_at(0)) . }; . ctor = Single; . fields = Fields::singleton(cx, pat); . } . ty::Adt(adt, _) => { 13,711 ( 0.00%) ctor = match pat.kind.as_ref() { . PatKind::Leaf { .. } => Single, 8,774 ( 0.00%) PatKind::Variant { variant_index, .. } => Variant(*variant_index), . _ => bug!(), . }; . let variant = &adt.variants[ctor.variant_index_for_adt(adt)]; . // For each field in the variant, we store the relevant index into `self.fields` if any. . let mut field_id_to_id: Vec> = 13,491 ( 0.00%) (0..variant.fields.len()).map(|_| None).collect(); 4,497 ( 0.00%) let tys = Fields::list_variant_nonhidden_fields(cx, pat.ty, variant) . .enumerate() . .map(|(i, (field, ty))| { 13,624 ( 0.00%) field_id_to_id[field.index()] = Some(i); . ty . }); . let mut wilds: SmallVec<[_; 2]> = . tys.map(DeconstructedPat::wildcard).collect(); . for pat in subpatterns { 16,940 ( 0.00%) if let Some(i) = field_id_to_id[pat.field.index()] { 50,820 ( 0.00%) wilds[i] = mkpat(&pat.pattern); . } . } 31,479 ( 0.00%) fields = Fields::from_iter(cx, wilds); . } . _ => bug!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, pat.ty), . } . } . PatKind::Constant { value } => { 619 ( 0.00%) if let Some(int_range) = IntRange::from_const(cx.tcx, cx.param_env, value) { . ctor = IntRange(int_range); . fields = Fields::empty(); . } else { . match pat.ty.kind() { . ty::Float(_) => { . ctor = FloatRange(value, value, RangeEnd::Included); . fields = Fields::empty(); . } -- line 1379 ---------------------------------------- -- line 1427 ---------------------------------------- . FixedLen(prefix.len() + suffix.len()) . }; . ctor = Slice(Slice::new(array_len, kind)); . fields = Fields::from_iter(cx, prefix.iter().chain(suffix).map(mkpat)); . } . PatKind::Or { .. } => { . ctor = Or; . let pats = expand_or_pat(pat); 740 ( 0.00%) fields = Fields::from_iter(cx, pats.into_iter().map(mkpat)); . } . } 54,576 ( 0.00%) DeconstructedPat::new(ctor, fields, pat.ty, pat.span) 122,796 ( 0.00%) } . . pub(crate) fn to_pat(&self, cx: &MatchCheckCtxt<'p, 'tcx>) -> Pat<'tcx> { . let is_wildcard = |pat: &Pat<'_>| { . matches!(*pat.kind, PatKind::Binding { subpattern: None, .. } | PatKind::Wild) . }; . let mut subpatterns = self.iter_fields().map(|p| p.to_pat(cx)); . let pat = match &self.ctor { . Single | Variant(_) => match self.ty.kind() { -- line 1447 ---------------------------------------- -- line 1520 ---------------------------------------- . bug!("can't convert to pattern: {:?}", self) . } . }; . . Pat { ty: self.ty, span: DUMMY_SP, kind: Box::new(pat) } . } . . pub(super) fn is_or_pat(&self) -> bool { 637,390 ( 0.01%) matches!(self.ctor, Or) . } . . pub(super) fn ctor(&self) -> &Constructor<'tcx> { . &self.ctor . } . pub(super) fn ty(&self) -> Ty<'tcx> { 23,478 ( 0.00%) self.ty . } . pub(super) fn span(&self) -> Span { 42,683 ( 0.00%) self.span . } . . pub(super) fn iter_fields<'a>( . &'a self, . ) -> impl Iterator> + Captures<'a> { . self.fields.iter_patterns() . } . . /// Specialize this pattern with a constructor. . /// `other_ctor` can be different from `self.ctor`, but must be covered by it. 4,779,432 ( 0.08%) pub(super) fn specialize<'a>( . &'a self, . cx: &MatchCheckCtxt<'p, 'tcx>, . other_ctor: &Constructor<'tcx>, . ) -> SmallVec<[&'p DeconstructedPat<'p, 'tcx>; 2]> { 2,987,145 ( 0.05%) match (&self.ctor, other_ctor) { . (Wildcard, _) => { . // We return a wildcard for each field of `other_ctor`. 89,096 ( 0.00%) Fields::wildcards(cx, self.ty, other_ctor).iter_patterns().collect() . } . (Slice(self_slice), Slice(other_slice)) . if self_slice.arity() != other_slice.arity() => . { . // The only tricky case: two slices of different arity. Since `self_slice` covers . // `other_slice`, `self_slice` must be `VarLen`, i.e. of the form . // `[prefix, .., suffix]`. Moreover `other_slice` is guaranteed to have a larger . // arity. So we fill the middle part with enough wildcards to reach the length of -- line 1565 ---------------------------------------- -- line 1578 ---------------------------------------- . let extra_wildcards = other_slice.arity() - self_slice.arity(); . let extra_wildcards = (0..extra_wildcards).map(|_| wildcard); . prefix.iter().chain(extra_wildcards).chain(suffix).collect() . } . } . } . _ => self.fields.iter_patterns().collect(), . } 5,376,861 ( 0.09%) } . . /// We keep track for each pattern if it was ever reachable during the analysis. This is used . /// with `unreachable_spans` to report unreachable subpatterns arising from or patterns. . pub(super) fn set_reachable(&self) { . self.reachable.set(true) . } . pub(super) fn is_reachable(&self) -> bool { . self.reachable.get() . } . . /// Report the spans of subpatterns that were not reachable, if any. . pub(super) fn unreachable_spans(&self) -> Vec { . let mut spans = Vec::new(); 21,543 ( 0.00%) self.collect_unreachable_spans(&mut spans); . spans . } . 54,648 ( 0.00%) fn collect_unreachable_spans(&self, spans: &mut Vec) { . // We don't look at subpatterns if we already reported the whole pattern as unreachable. 13,662 ( 0.00%) if !self.is_reachable() { . spans.push(self.span); . } else { . for p in self.iter_fields() { 19,443 ( 0.00%) p.collect_unreachable_spans(spans); . } . } 54,648 ( 0.00%) } . } . . /// This is mostly copied from the `Pat` impl. This is best effort and not good enough for a . /// `Display` impl. . impl<'p, 'tcx> fmt::Debug for DeconstructedPat<'p, 'tcx> { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . // Printing lists is a chore. . let mut first = true; -- line 1621 ---------------------------------------- 8,109,557 ( 0.13%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/bitmask.rs -------------------------------------------------------------------------------- Ir -- line 17 ---------------------------------------- . pub struct BitMask(pub BitMaskWord); . . #[allow(clippy::use_self)] . impl BitMask { . /// Returns a new `BitMask` with all bits inverted. . #[inline] . #[must_use] . pub fn invert(self) -> Self { 707,848 ( 0.01%) BitMask(self.0 ^ BITMASK_MASK) . } . . /// Flip the bit in the mask for the entry at the given index. . /// . /// Returns the bit's previous state. . #[inline] . #[allow(clippy::cast_ptr_alignment)] . #[cfg(feature = "raw")] -- line 33 ---------------------------------------- -- line 38 ---------------------------------------- . // The bit was set if the bit is now 0. . self.0 & mask == 0 . } . . /// Returns a new `BitMask` with the lowest bit removed. . #[inline] . #[must_use] . pub fn remove_lowest_bit(self) -> Self { 305,905 ( 0.01%) BitMask(self.0 & (self.0 - 1)) . } . /// Returns whether the `BitMask` has at least one set bit. . #[inline] . pub fn any_bit_set(self) -> bool { 3,721,209 ( 0.06%) self.0 != 0 . } . . /// Returns the first set bit in the `BitMask`, if there is one. . #[inline] . pub fn lowest_set_bit(self) -> Option { 48,204,713 ( 0.80%) if self.0 == 0 { . None . } else { . Some(unsafe { self.lowest_set_bit_nonzero() }) . } . } . . /// Returns the first set bit in the `BitMask`, if there is one. The . /// bitmask must not be empty. . #[inline] . #[cfg(feature = "nightly")] . pub unsafe fn lowest_set_bit_nonzero(self) -> usize { 51,084 ( 0.00%) intrinsics::cttz_nonzero(self.0) as usize / BITMASK_STRIDE . } . #[inline] . #[cfg(not(feature = "nightly"))] . pub unsafe fn lowest_set_bit_nonzero(self) -> usize { . self.trailing_zeros() . } . . /// Returns the number of trailing zeroes in the `BitMask`. -- line 77 ---------------------------------------- -- line 110 ---------------------------------------- . /// bits. . pub struct BitMaskIter(BitMask); . . impl Iterator for BitMaskIter { . type Item = usize; . . #[inline] . fn next(&mut self) -> Option { 7,837,151 ( 0.13%) let bit = self.0.lowest_set_bit()?; . self.0 = self.0.remove_lowest_bit(); . Some(bit) . } . } 175,121 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_transform/src/simplify.rs -------------------------------------------------------------------------------- Ir -- line 38 ---------------------------------------- . use std::convert::TryInto; . . pub struct SimplifyCfg { . label: String, . } . . impl SimplifyCfg { . pub fn new(label: &str) -> Self { 14,603 ( 0.00%) SimplifyCfg { label: format!("SimplifyCfg-{}", label) } . } . } . 17,180 ( 0.00%) pub fn simplify_cfg<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { 6,872 ( 0.00%) CfgSimplifier::new(body).simplify(); 5,154 ( 0.00%) remove_dead_blocks(tcx, body); . . // FIXME: Should probably be moved into some kind of pass manager . body.basic_blocks_mut().raw.shrink_to_fit(); 7,008 ( 0.00%) } . . impl<'tcx> MirPass<'tcx> for SimplifyCfg { 1,718 ( 0.00%) fn name(&self) -> Cow<'_, str> { 5,154 ( 0.00%) Cow::Borrowed(&self.label) 1,718 ( 0.00%) } . 1,718 ( 0.00%) fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { . debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, body.source); 3,436 ( 0.00%) simplify_cfg(tcx, body); . } . } . . pub struct CfgSimplifier<'a, 'tcx> { . basic_blocks: &'a mut IndexVec>, . pred_count: IndexVec, . } . . impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> { 12,026 ( 0.00%) pub fn new(body: &'a mut Body<'tcx>) -> Self { 3,436 ( 0.00%) let mut pred_count = IndexVec::from_elem(0u32, body.basic_blocks()); . . // we can't use mir.predecessors() here because that counts . // dead blocks, which we don't want to. 5,154 ( 0.00%) pred_count[START_BLOCK] = 1; . 264,700 ( 0.00%) for (_, data) in traversal::preorder(body) { 177,909 ( 0.00%) if let Some(ref term) = data.terminator { 270,824 ( 0.00%) for &tgt in term.successors() { 464,575 ( 0.01%) pred_count[tgt] += 1; . } . } . } . . let basic_blocks = body.basic_blocks_mut(); . 6,872 ( 0.00%) CfgSimplifier { basic_blocks, pred_count } 13,744 ( 0.00%) } . 13,744 ( 0.00%) pub fn simplify(mut self) { 1,718 ( 0.00%) self.strip_nops(); . . // Vec of the blocks that should be merged. We store the indices here, instead of the . // statements itself to avoid moving the (relatively) large statements twice. . // We do not push the statements directly into the target block (`bb`) as that is slower . // due to additional reallocations . let mut merged_blocks = Vec::new(); . loop { . let mut changed = false; . 4,054 ( 0.00%) for bb in self.basic_blocks.indices() { 185,238 ( 0.00%) if self.pred_count[bb] == 0 { . continue; . } . . debug!("simplifying {:?}", bb); . . let mut terminator = . self.basic_blocks[bb].terminator.take().expect("invalid terminator state"); . 234,150 ( 0.00%) for successor in terminator.successors_mut() { . self.collapse_goto_chain(successor, &mut changed); . } . . let mut inner_changed = true; . merged_blocks.clear(); 239,788 ( 0.00%) while inner_changed { . inner_changed = false; . inner_changed |= self.simplify_branch(&mut terminator); 80,869 ( 0.00%) inner_changed |= self.merge_successor(&mut merged_blocks, &mut terminator); 242,607 ( 0.00%) changed |= inner_changed; . } . . let statements_to_merge = 83,688 ( 0.00%) merged_blocks.iter().map(|&i| self.basic_blocks[i].statements.len()).sum(); . 5,522 ( 0.00%) if statements_to_merge > 0 { . let mut statements = std::mem::take(&mut self.basic_blocks[bb].statements); . statements.reserve(statements_to_merge); 1,001 ( 0.00%) for &from in &merged_blocks { 2,002 ( 0.00%) statements.append(&mut self.basic_blocks[from].statements); . } 8,487 ( 0.00%) self.basic_blocks[bb].statements = statements; . } . 2,419,550 ( 0.04%) self.basic_blocks[bb].terminator = Some(terminator); . } . 4,054 ( 0.00%) if !changed { . break; . } . } 13,744 ( 0.00%) } . . /// This function will return `None` if . /// * the block has statements . /// * the block has a terminator other than `goto` . /// * the block has no terminator (meaning some other part of the current optimization stole it) . fn take_terminator_if_simple_goto(&mut self, bb: BasicBlock) -> Option> { 859,579 ( 0.01%) match self.basic_blocks[bb] { . BasicBlockData { . ref statements, . terminator: . ref mut terminator @ Some(Terminator { kind: TerminatorKind::Goto { .. }, .. }), . .. 20,833 ( 0.00%) } if statements.is_empty() => terminator.take(), . // if `terminator` is None, this means we are in a loop. In that . // case, let all the loop collapse to its entry. . _ => None, . } . } . . /// Collapse a goto chain starting from `start` . fn collapse_goto_chain(&mut self, start: &mut BasicBlock, changed: &mut bool) { . // Using `SmallVec` here, because in some logs on libcore oli-obk saw many single-element . // goto chains. We should probably benchmark different sizes. . let mut terminators: SmallVec<[_; 1]> = Default::default(); 366,075 ( 0.01%) let mut current = *start; 380,743 ( 0.01%) while let Some(terminator) = self.take_terminator_if_simple_goto(current) { 1,544 ( 0.00%) let target = match terminator { 772 ( 0.00%) Terminator { kind: TerminatorKind::Goto { target }, .. } => target, . _ => unreachable!(), . }; 14,668 ( 0.00%) terminators.push((current, terminator)); . current = target; . } . let last = current; 122,025 ( 0.00%) *start = last; 11,580 ( 0.00%) while let Some((current, mut terminator)) = terminators.pop() { 1,544 ( 0.00%) let target = match terminator { . Terminator { kind: TerminatorKind::Goto { ref mut target }, .. } => target, . _ => unreachable!(), . }; 1,544 ( 0.00%) *changed |= *target != last; 1,544 ( 0.00%) *target = last; . debug!("collapsing goto chain from {:?} to {:?}", current, target); . 1,544 ( 0.00%) if self.pred_count[current] == 1 { . // This is the last reference to current, so the pred-count to . // to target is moved into the current block. . self.pred_count[current] = 0; . } else { 400 ( 0.00%) self.pred_count[*target] += 1; 300 ( 0.00%) self.pred_count[current] -= 1; . } 25,476 ( 0.00%) self.basic_blocks[current].terminator = Some(terminator); . } . } . . // merge a block with 1 `goto` predecessor to its parent . fn merge_successor( . &mut self, . merged_blocks: &mut Vec, . terminator: &mut Terminator<'tcx>, . ) -> bool { 186,596 ( 0.00%) let target = match terminator.kind { 60,792 ( 0.00%) TerminatorKind::Goto { target } if self.pred_count[target] == 1 => target, . _ => return false, . }; . . debug!("merging block {:?} into {:?}", target, terminator); 64,837 ( 0.00%) *terminator = match self.basic_blocks[target].terminator.take() { . Some(terminator) => terminator, . None => { . // unreachable loop - this should not be possible, as we . // don't strand blocks, but handle it correctly. . return false; . } . }; . . merged_blocks.push(target); 8,457 ( 0.00%) self.pred_count[target] = 0; . . true . } . . // turn a branch with all successors identical to a goto . fn simplify_branch(&mut self, terminator: &mut Terminator<'tcx>) -> bool { 242,607 ( 0.00%) match terminator.kind { . TerminatorKind::SwitchInt { .. } => {} . _ => return false, . }; . . let first_succ = { 28,980 ( 0.00%) if let Some(&first_succ) = terminator.successors().next() { 14,490 ( 0.00%) if terminator.successors().all(|s| *s == first_succ) { . let count = terminator.successors().count(); . self.pred_count[first_succ] -= (count - 1) as u32; . first_succ . } else { . return false; . } . } else { . return false; -- line 249 ---------------------------------------- -- line 252 ---------------------------------------- . . debug!("simplifying branch {:?}", terminator); . terminator.kind = TerminatorKind::Goto { target: first_succ }; . true . } . . fn strip_nops(&mut self) { . for blk in self.basic_blocks.iter_mut() { 271,108 ( 0.00%) blk.statements.retain(|stmt| !matches!(stmt.kind, StatementKind::Nop)) . } . } . } . 17,180 ( 0.00%) pub fn remove_dead_blocks<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { 1,718 ( 0.00%) let reachable = traversal::reachable_as_bitset(body); . let num_blocks = body.basic_blocks().len(); 3,436 ( 0.00%) if num_blocks == reachable.count() { . return; . } . . let basic_blocks = body.basic_blocks_mut(); . let mut replacements: Vec<_> = (0..num_blocks).map(BasicBlock::new).collect(); . let mut used_blocks = 0; 22,559 ( 0.00%) for alive_index in reachable.iter() { . let alive_index = alive_index.index(); 22,559 ( 0.00%) replacements[alive_index] = BasicBlock::new(used_blocks); 45,118 ( 0.00%) if alive_index != used_blocks { . // Swap the next alive block data with the current available slot. Since . // alive_index is non-decreasing this is a valid operation. . basic_blocks.raw.swap(alive_index, used_blocks); . } 22,559 ( 0.00%) used_blocks += 1; . } . 1,580 ( 0.00%) if tcx.sess.instrument_coverage() { . save_unreachable_coverage(basic_blocks, used_blocks); . } . . basic_blocks.raw.truncate(used_blocks); . . for block in basic_blocks { 45,118 ( 0.00%) for target in block.terminator_mut().successors_mut() { 143,344 ( 0.00%) *target = replacements[target.index()]; . } . } 13,744 ( 0.00%) } . . /// Some MIR transforms can determine at compile time that a sequences of . /// statements will never be executed, so they can be dropped from the MIR. . /// For example, an `if` or `else` block that is guaranteed to never be executed . /// because its condition can be evaluated at compile time, such as by const . /// evaluation: `if false { ... }`. . /// . /// Those statements are bypassed by redirecting paths in the CFG around the -- line 305 ---------------------------------------- 883,915 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_index/src/bit_set.rs -------------------------------------------------------------------------------- Ir -- line 30 ---------------------------------------- . // Both start and end are inclusive. . let start = match range.start_bound().cloned() { . Bound::Included(start) => start.index(), . Bound::Excluded(start) => start.index() + 1, . Bound::Unbounded => 0, . }; . let end = match range.end_bound().cloned() { . Bound::Included(end) => end.index(), 15 ( 0.00%) Bound::Excluded(end) => end.index().checked_sub(1)?, . Bound::Unbounded => domain - 1, . }; 30 ( 0.00%) assert!(end < domain); 30 ( 0.00%) if start > end { . return None; . } . Some((start, end)) . } . . macro_rules! bit_relations_inherent_impls { . () => { . /// Sets `self = self | other` and returns `true` if `self` changed . /// (i.e., if new bits were added). 968,768 ( 0.02%) pub fn union(&mut self, other: &Rhs) -> bool . where . Self: BitRelations, . { . >::union(self, other) 782,348 ( 0.01%) } . . /// Sets `self = self - other` and returns `true` if `self` changed. . /// (i.e., if any bits were removed). 278,860 ( 0.00%) pub fn subtract(&mut self, other: &Rhs) -> bool . where . Self: BitRelations, . { . >::subtract(self, other) 278,860 ( 0.00%) } . . /// Sets `self = self & other` and return `true` if `self` changed. . /// (i.e., if any bits were removed). . pub fn intersect(&mut self, other: &Rhs) -> bool . where . Self: BitRelations, . { . >::intersect(self, other) -- line 74 ---------------------------------------- -- line 92 ---------------------------------------- . domain_size: usize, . words: Vec, . marker: PhantomData, . } . . impl BitSet { . /// Gets the domain size. . pub fn domain_size(&self) -> usize { 124 ( 0.00%) self.domain_size . } . } . . impl BitSet { . /// Creates a new, empty bitset with a given `domain_size`. . #[inline] . pub fn new_empty(domain_size: usize) -> BitSet { . let num_words = num_words(domain_size); 81,476 ( 0.00%) BitSet { domain_size, words: vec![0; num_words], marker: PhantomData } . } . . /// Creates a new, filled bitset with a given `domain_size`. . #[inline] . pub fn new_filled(domain_size: usize) -> BitSet { . let num_words = num_words(domain_size); 345 ( 0.00%) let mut result = BitSet { domain_size, words: vec![!0; num_words], marker: PhantomData }; . result.clear_excess_bits(); . result . } . . /// Clear all elements. . #[inline] . pub fn clear(&mut self) { . for word in &mut self.words { 7,438 ( 0.00%) *word = 0; . } . } . . /// Clear excess bits in the final word. . fn clear_excess_bits(&mut self) { 859 ( 0.00%) let num_bits_in_final_word = self.domain_size % WORD_BITS; 1,856 ( 0.00%) if num_bits_in_final_word > 0 { 996 ( 0.00%) let mask = (1 << num_bits_in_final_word) - 1; 2,850 ( 0.00%) let final_word_idx = self.words.len() - 1; 927 ( 0.00%) self.words[final_word_idx] &= mask; . } . } . . /// Count the number of set bits in the set. . pub fn count(&self) -> usize { . self.words.iter().map(|e| e.count_ones() as usize).sum() . } . . /// Returns `true` if `self` contains `elem`. . #[inline] . pub fn contains(&self, elem: T) -> bool { 2,368,971 ( 0.04%) assert!(elem.index() < self.domain_size); . let (word_index, mask) = word_index_and_mask(elem); 2,343,813 ( 0.04%) (self.words[word_index] & mask) != 0 . } . . /// Is `self` is a (non-strict) superset of `other`? . #[inline] . pub fn superset(&self, other: &BitSet) -> bool { . assert_eq!(self.domain_size, other.domain_size); . self.words.iter().zip(&other.words).all(|(a, b)| (a & b) == *b) . } . . /// Is the set empty? . #[inline] . pub fn is_empty(&self) -> bool { 43 ( 0.00%) self.words.iter().all(|a| *a == 0) . } . . /// Insert `elem`. Returns whether the set has changed. . #[inline] . pub fn insert(&mut self, elem: T) -> bool { 2,279,193 ( 0.04%) assert!(elem.index() < self.domain_size); . let (word_index, mask) = word_index_and_mask(elem); . let word_ref = &mut self.words[word_index]; 870,404 ( 0.01%) let word = *word_ref; 1,740,808 ( 0.03%) let new_word = word | mask; 1,086,694 ( 0.02%) *word_ref = new_word; 931,949 ( 0.02%) new_word != word . } . . #[inline] . pub fn insert_range(&mut self, elems: impl RangeBounds) { . let Some((start, end)) = inclusive_start_end(elems, self.domain_size) else { . return; . }; . -- line 182 ---------------------------------------- -- line 199 ---------------------------------------- . } else { . self.words[start_word_index] |= end_mask | (end_mask - start_mask); . } . } . . /// Sets all bits to true. . pub fn insert_all(&mut self) { . for word in &mut self.words { 1,718 ( 0.00%) *word = !0; . } . self.clear_excess_bits(); . } . . /// Returns `true` if the set has changed. . #[inline] . pub fn remove(&mut self, elem: T) -> bool { 731,965 ( 0.01%) assert!(elem.index() < self.domain_size); . let (word_index, mask) = word_index_and_mask(elem); . let word_ref = &mut self.words[word_index]; 48,034 ( 0.00%) let word = *word_ref; 749,020 ( 0.01%) let new_word = word & !mask; 374,766 ( 0.01%) *word_ref = new_word; 96,068 ( 0.00%) new_word != word . } . . /// Gets a slice of the underlying words. . pub fn words(&self) -> &[Word] { . &self.words . } . . /// Iterates over the indices of set bits in a sorted order. -- line 229 ---------------------------------------- -- line 273 ---------------------------------------- . not_already |= (self.words[current_index] ^ new_bit_mask) != 0; . // Any bits in the tail? Note `clear_excess_bits` before. . not_already |= self.words[current_index + 1..].iter().any(|&x| x != 0); . . not_already . } . . fn last_set_in(&self, range: impl RangeBounds) -> Option { 15 ( 0.00%) let (start, end) = inclusive_start_end(range, self.domain_size)?; . let (start_word_index, _) = word_index_and_mask(start); . let (end_word_index, end_mask) = word_index_and_mask(end); . 75 ( 0.00%) let end_word = self.words[end_word_index] & (end_mask | (end_mask - 1)); 30 ( 0.00%) if end_word != 0 { 36 ( 0.00%) let pos = max_bit(end_word) + WORD_BITS * end_word_index; 24 ( 0.00%) if start <= pos { . return Some(T::new(pos)); . } . } . . // We exclude end_word_index from the range here, because we don't want . // to limit ourselves to *just* the last word: the bits set it in may be . // after `end`, so it may not work out. . if let Some(offset) = 3 ( 0.00%) self.words[start_word_index..end_word_index].iter().rposition(|&w| w != 0) . { 6 ( 0.00%) let word_idx = start_word_index + offset; 3 ( 0.00%) let start_word = self.words[word_idx]; 15 ( 0.00%) let pos = max_bit(start_word) + WORD_BITS * word_idx; 6 ( 0.00%) if start <= pos { . return Some(T::new(pos)); . } . } . . None . } . . bit_relations_inherent_impls! {} . } . . // dense REL dense . impl BitRelations> for BitSet { . fn union(&mut self, other: &BitSet) -> bool { 559,260 ( 0.01%) assert_eq!(self.domain_size, other.domain_size); 3,759,584 ( 0.06%) bitwise(&mut self.words, &other.words, |a, b| a | b) . } . . fn subtract(&mut self, other: &BitSet) -> bool { . assert_eq!(self.domain_size, other.domain_size); 8,904 ( 0.00%) bitwise(&mut self.words, &other.words, |a, b| a & !b) . } . . fn intersect(&mut self, other: &BitSet) -> bool { . assert_eq!(self.domain_size, other.domain_size); . bitwise(&mut self.words, &other.words, |a, b| a & b) . } . } . . // Applies a function to mutate a bitset, and returns true if any . // of the applications return true . fn sequential_update( . mut self_update: impl FnMut(T) -> bool, . it: impl Iterator, . ) -> bool { . let mut changed = false; 85,836 ( 0.00%) for elem in it { 257,508 ( 0.00%) changed |= self_update(elem); . } . changed 222,504 ( 0.00%) } . . // Optimization of intersection for SparseBitSet that's generic . // over the RHS . fn sparse_intersect( . set: &mut SparseBitSet, . other_contains: impl Fn(&T) -> bool, . ) -> bool { . let size = set.elems.len(); -- line 350 ---------------------------------------- -- line 413 ---------------------------------------- . HybridBitSet::Dense(dense) => dense.intersect(other), . } . } . } . . // dense REL hybrid . impl BitRelations> for BitSet { . fn union(&mut self, other: &HybridBitSet) -> bool { 223,088 ( 0.00%) assert_eq!(self.domain_size, other.domain_size()); 111,544 ( 0.00%) match other { . HybridBitSet::Sparse(sparse) => { . sequential_update(|elem| self.insert(elem), sparse.iter().cloned()) . } . HybridBitSet::Dense(dense) => self.union(dense), . } . } . . fn subtract(&mut self, other: &HybridBitSet) -> bool { 223,088 ( 0.00%) assert_eq!(self.domain_size, other.domain_size()); 111,544 ( 0.00%) match other { . HybridBitSet::Sparse(sparse) => { . sequential_update(|elem| self.remove(elem), sparse.iter().cloned()) . } . HybridBitSet::Dense(dense) => self.subtract(dense), . } . } . . fn intersect(&mut self, other: &HybridBitSet) -> bool { -- line 440 ---------------------------------------- -- line 455 ---------------------------------------- . HybridBitSet::Dense(dense) => self.intersect(dense), . } . } . } . . // hybrid REL hybrid . impl BitRelations> for HybridBitSet { . fn union(&mut self, other: &HybridBitSet) -> bool { 36,498 ( 0.00%) assert_eq!(self.domain_size(), other.domain_size()); 12,166 ( 0.00%) match self { . HybridBitSet::Sparse(_) => { 12,166 ( 0.00%) match other { . HybridBitSet::Sparse(other_sparse) => { . // Both sets are sparse. Add the elements in . // `other_sparse` to `self` one at a time. This . // may or may not cause `self` to be densified. . let mut changed = false; . for elem in other_sparse.iter() { 36,792 ( 0.00%) changed |= self.insert(*elem); . } . changed . } . . HybridBitSet::Dense(other_dense) => self.union(other_dense), . } . } . -- line 481 ---------------------------------------- -- line 511 ---------------------------------------- . } . } . . impl Clone for BitSet { . fn clone(&self) -> Self { . BitSet { domain_size: self.domain_size, words: self.words.clone(), marker: PhantomData } . } . 855,414 ( 0.01%) fn clone_from(&mut self, from: &Self) { 645,400 ( 0.01%) if self.domain_size != from.domain_size { . self.words.resize(from.domain_size, 0); . self.domain_size = from.domain_size; . } . . self.words.copy_from_slice(&from.words); . } . } . -- line 528 ---------------------------------------- -- line 599 ---------------------------------------- . } . } . } . . impl<'a, T: Idx> Iterator for BitIter<'a, T> { . type Item = T; . fn next(&mut self) -> Option { . loop { 2,322,043 ( 0.04%) if self.word != 0 { . // Get the position of the next set bit in the current word, . // then clear the bit. . let bit_pos = self.word.trailing_zeros() as usize; . let bit = 1 << bit_pos; . self.word ^= bit; 204,696 ( 0.00%) return Some(T::new(bit_pos + self.offset)); . } . . // Move onto the next word. `wrapping_add()` is needed to handle . // the degenerate initial value given to `offset` in `new()`. . let word = self.iter.next()?; 992,696 ( 0.02%) self.word = *word; . self.offset = self.offset.wrapping_add(WORD_BITS); . } . } . } . . #[inline] . fn bitwise(out_vec: &mut [Word], in_vec: &[Word], op: Op) -> bool . where . Op: Fn(Word, Word) -> Word, . { 746,848 ( 0.01%) assert_eq!(out_vec.len(), in_vec.len()); . let mut changed = 0; 362,954 ( 0.01%) for (out_elem, in_elem) in iter::zip(out_vec, in_vec) { 3,943,598 ( 0.07%) let old_val = *out_elem; 3,497,084 ( 0.06%) let new_val = op(old_val, *in_elem); 3,763,876 ( 0.06%) *out_elem = new_val; . // This is essentially equivalent to a != with changed being a bool, but . // in practice this code gets auto-vectorized by the compiler for most . // operators. Using != here causes us to generate quite poor code as the . // compiler tries to go back to a boolean on each loop iteration. 7,887,392 ( 0.13%) changed |= old_val ^ new_val; . } . changed != 0 . } . . const SPARSE_MAX: usize = 8; . . /// A fixed-size bitset type with a sparse representation and a maximum of . /// `SPARSE_MAX` elements. The elements are stored as a sorted `ArrayVec` with . /// no duplicates. . /// . /// This type is used by `HybridBitSet`; do not use directly. . #[derive(Clone, Debug)] . pub struct SparseBitSet { 101,496 ( 0.00%) domain_size: usize, . elems: ArrayVec, . } . . impl SparseBitSet { . fn new_empty(domain_size: usize) -> Self { . SparseBitSet { domain_size, elems: ArrayVec::new() } . } . . fn len(&self) -> usize { 267,260 ( 0.00%) self.elems.len() . } . . fn is_empty(&self) -> bool { . self.elems.len() == 0 . } . . fn contains(&self, elem: T) -> bool { 5,181 ( 0.00%) assert!(elem.index() < self.domain_size); . self.elems.contains(&elem) . } . . fn insert(&mut self, elem: T) -> bool { 266,882 ( 0.00%) assert!(elem.index() < self.domain_size); 108,129 ( 0.00%) let changed = if let Some(i) = self.elems.iter().position(|&e| e.index() >= elem.index()) { 48,504 ( 0.00%) if self.elems[i] == elem { . // `elem` is already in the set. . false . } else { . // `elem` is smaller than one or more existing elements. . self.elems.insert(i, elem); . true . } . } else { . // `elem` is larger than all existing elements. . self.elems.push(elem); . true . }; 266,882 ( 0.00%) assert!(self.len() <= SPARSE_MAX); . changed . } . . fn remove(&mut self, elem: T) -> bool { 184,628 ( 0.00%) assert!(elem.index() < self.domain_size); . if let Some(i) = self.elems.iter().position(|&e| e == elem) { 46,572 ( 0.00%) self.elems.remove(i); . true . } else { . false . } . } . . fn to_dense(&self) -> BitSet { . let mut dense = BitSet::new_empty(self.domain_size); . for elem in self.elems.iter() { 1,464 ( 0.00%) dense.insert(*elem); . } . dense . } . . fn iter(&self) -> slice::Iter<'_, T> { 35,574 ( 0.00%) self.elems.iter() . } . . bit_relations_inherent_impls! {} . } . . impl SparseBitSet { . fn last_set_in(&self, range: impl RangeBounds) -> Option { . let mut last_leq = None; -- line 723 ---------------------------------------- -- line 738 ---------------------------------------- . /// number of elements, but a large `domain_size`, and are cleared frequently. . /// . /// `T` is an index type, typically a newtyped `usize` wrapper, but it can also . /// just be `usize`. . /// . /// All operations that involve an element will panic if the element is equal . /// to or greater than the domain size. All operations that involve two bitsets . /// will panic if the bitsets have differing domain sizes. 913,464 ( 0.02%) #[derive(Clone)] . pub enum HybridBitSet { . Sparse(SparseBitSet), . Dense(BitSet), . } . . impl fmt::Debug for HybridBitSet { . fn fmt(&self, w: &mut fmt::Formatter<'_>) -> fmt::Result { . match self { -- line 754 ---------------------------------------- -- line 767 ---------------------------------------- . match self { . HybridBitSet::Sparse(sparse) => sparse.domain_size, . HybridBitSet::Dense(dense) => dense.domain_size, . } . } . . pub fn clear(&mut self) { . let domain_size = self.domain_size(); 21,980 ( 0.00%) *self = HybridBitSet::new_empty(domain_size); . } . . pub fn contains(&self, elem: T) -> bool { 9,228 ( 0.00%) match self { . HybridBitSet::Sparse(sparse) => sparse.contains(elem), . HybridBitSet::Dense(dense) => dense.contains(elem), . } . } . . pub fn superset(&self, other: &HybridBitSet) -> bool { . match (self, other) { . (HybridBitSet::Dense(self_dense), HybridBitSet::Dense(other_dense)) => { -- line 787 ---------------------------------------- -- line 799 ---------------------------------------- . HybridBitSet::Sparse(sparse) => sparse.is_empty(), . HybridBitSet::Dense(dense) => dense.is_empty(), . } . } . . /// Returns the previous element present in the bitset from `elem`, . /// inclusively of elem. That is, will return `Some(elem)` if elem is in the . /// bitset. 35,604 ( 0.00%) pub fn last_set_in(&self, range: impl RangeBounds) -> Option . where . T: Ord, . { 35,604 ( 0.00%) match self { . HybridBitSet::Sparse(sparse) => sparse.last_set_in(range), . HybridBitSet::Dense(dense) => dense.last_set_in(range), . } 53,406 ( 0.00%) } . 1,218,267 ( 0.02%) pub fn insert(&mut self, elem: T) -> bool { . // No need to check `elem` against `self.domain_size` here because all . // the match cases check it, one way or another. 270,726 ( 0.00%) match self { 267,260 ( 0.00%) HybridBitSet::Sparse(sparse) if sparse.len() < SPARSE_MAX => { . // The set is sparse and has space for `elem`. . sparse.insert(elem) . } . HybridBitSet::Sparse(sparse) if sparse.contains(elem) => { . // The set is sparse and does not have space for `elem`, but . // that doesn't matter because `elem` is already present. . false . } . HybridBitSet::Sparse(sparse) => { . // The set is sparse and full. Convert to a dense set. . let mut dense = sparse.to_dense(); . let changed = dense.insert(elem); 183 ( 0.00%) assert!(changed); 1,098 ( 0.00%) *self = HybridBitSet::Dense(dense); . changed . } . HybridBitSet::Dense(dense) => dense.insert(elem), . } 1,082,904 ( 0.02%) } . . pub fn insert_range(&mut self, elems: impl RangeBounds) { . // No need to check `elem` against `self.domain_size` here because all . // the match cases check it, one way or another. . let start = match elems.start_bound().cloned() { . Bound::Included(start) => start.index(), . Bound::Excluded(start) => start.index() + 1, . Bound::Unbounded => 0, -- line 848 ---------------------------------------- -- line 879 ---------------------------------------- . match self { . HybridBitSet::Sparse(_) => { . *self = HybridBitSet::Dense(BitSet::new_filled(domain_size)); . } . HybridBitSet::Dense(dense) => dense.insert_all(), . } . } . 91,440 ( 0.00%) pub fn remove(&mut self, elem: T) -> bool { . // Note: we currently don't bother going from Dense back to Sparse. 184,800 ( 0.00%) match self { . HybridBitSet::Sparse(sparse) => sparse.remove(elem), . HybridBitSet::Dense(dense) => dense.remove(elem), . } 182,880 ( 0.00%) } . . /// Converts to a dense set, consuming itself in the process. . pub fn to_dense(self) -> BitSet { . match self { . HybridBitSet::Sparse(sparse) => sparse.to_dense(), . HybridBitSet::Dense(dense) => dense, . } . } . . pub fn iter(&self) -> HybridIter<'_, T> { 8,152 ( 0.00%) match self { 15,715 ( 0.00%) HybridBitSet::Sparse(sparse) => HybridIter::Sparse(sparse.iter()), . HybridBitSet::Dense(dense) => HybridIter::Dense(dense.iter()), . } . } . . bit_relations_inherent_impls! {} . } . . pub enum HybridIter<'a, T: Idx> { -- line 913 ---------------------------------------- -- line 935 ---------------------------------------- . /// to or greater than the domain size. . #[derive(Clone, Debug, PartialEq)] . pub struct GrowableBitSet { . bit_set: BitSet, . } . . impl GrowableBitSet { . /// Ensure that the set can hold at least `min_domain_size` elements. 100,275 ( 0.00%) pub fn ensure(&mut self, min_domain_size: usize) { 28,650 ( 0.00%) if self.bit_set.domain_size < min_domain_size { 86 ( 0.00%) self.bit_set.domain_size = min_domain_size; . } . . let min_num_words = num_words(min_domain_size); 57,300 ( 0.00%) if self.bit_set.words.len() < min_num_words { . self.bit_set.words.resize(min_num_words, 0) . } 85,950 ( 0.00%) } . . pub fn new_empty() -> GrowableBitSet { 9,432 ( 0.00%) GrowableBitSet { bit_set: BitSet::new_empty(0) } . } . . pub fn with_capacity(capacity: usize) -> GrowableBitSet { 4 ( 0.00%) GrowableBitSet { bit_set: BitSet::new_empty(capacity) } . } . . /// Returns `true` if the set has changed. . #[inline] . pub fn insert(&mut self, elem: T) -> bool { 71,291 ( 0.00%) self.ensure(elem.index() + 1); . self.bit_set.insert(elem) . } . . /// Returns `true` if the set has changed. . #[inline] . pub fn remove(&mut self, elem: T) -> bool { 111 ( 0.00%) self.ensure(elem.index() + 1); . self.bit_set.remove(elem) . } . . #[inline] . pub fn is_empty(&self) -> bool { . self.bit_set.is_empty() . } . . #[inline] . pub fn contains(&self, elem: T) -> bool { . let (word_index, mask) = word_index_and_mask(elem); 656 ( 0.00%) self.bit_set.words.get(word_index).map_or(false, |word| (word & mask) != 0) . } . } . . /// A fixed-size 2D bit matrix type with a dense representation. . /// . /// `R` and `C` are index types used to identify rows and columns respectively; . /// typically newtyped `usize` wrappers, but they can also just be `usize`. . /// -- line 992 ---------------------------------------- -- line 1001 ---------------------------------------- . } . . impl BitMatrix { . /// Creates a new `rows x columns` matrix, initially empty. . pub fn new(num_rows: usize, num_columns: usize) -> BitMatrix { . // For every element, we need one bit for every other . // element. Round up to an even number of words. . let words_per_row = num_words(num_columns); 10,812 ( 0.00%) BitMatrix { . num_rows, . num_columns, 6,354 ( 0.00%) words: vec![0; num_rows * words_per_row], . marker: PhantomData, . } . } . . /// Creates a new matrix, with `row` used as the value for every row. . pub fn from_row_n(row: &BitSet, num_rows: usize) -> BitMatrix { . let num_columns = row.domain_size(); . let words_per_row = num_words(num_columns); -- line 1020 ---------------------------------------- -- line 1029 ---------------------------------------- . . pub fn rows(&self) -> impl Iterator { . (0..self.num_rows).map(R::new) . } . . /// The range of bits for a given row. . fn range(&self, row: R) -> (usize, usize) { . let words_per_row = num_words(self.num_columns); 148,330 ( 0.00%) let start = row.index() * words_per_row; 125,900 ( 0.00%) (start, start + words_per_row) . } . . /// Sets the cell at `(row, column)` to true. Put another way, insert . /// `column` to the bitset for `row`. . /// . /// Returns `true` if this changed the matrix. . pub fn insert(&mut self, row: R, column: C) -> bool { 79,822 ( 0.00%) assert!(row.index() < self.num_rows && column.index() < self.num_columns); . let (start, _) = self.range(row); . let (word_index, mask) = word_index_and_mask(column); . let words = &mut self.words[..]; 81,040 ( 0.00%) let word = words[start + word_index]; 32,416 ( 0.00%) let new_word = word | mask; 16,208 ( 0.00%) words[start + word_index] = new_word; 32,416 ( 0.00%) word != new_word . } . . /// Do the bits from `row` contain `column`? Put another way, is . /// the matrix cell at `(row, column)` true? Put yet another way, . /// if the matrix represents (transitive) reachability, can . /// `row` reach `column`? 6,222 ( 0.00%) pub fn contains(&self, row: R, column: C) -> bool { 31,110 ( 0.00%) assert!(row.index() < self.num_rows && column.index() < self.num_columns); . let (start, _) = self.range(row); . let (word_index, mask) = word_index_and_mask(column); 31,110 ( 0.00%) (self.words[start + word_index] & mask) != 0 12,444 ( 0.00%) } . . /// Returns those indices that are true in rows `a` and `b`. This . /// is an *O*(*n*) operation where *n* is the number of elements . /// (somewhat independent from the actual size of the . /// intersection, in particular). . pub fn intersect_rows(&self, row1: R, row2: R) -> Vec { . assert!(row1.index() < self.num_rows && row2.index() < self.num_rows); . let (row1_start, row1_end) = self.range(row1); -- line 1073 ---------------------------------------- -- line 1090 ---------------------------------------- . . /// Adds the bits from row `read` to the bits from row `write`, and . /// returns `true` if anything changed. . /// . /// This is used when computing transitive reachability because if . /// you have an edge `write -> read`, because in that case . /// `write` can reach everything that `read` can (and . /// potentially more). 14,990 ( 0.00%) pub fn union_rows(&mut self, read: R, write: R) -> bool { 81,040 ( 0.00%) assert!(read.index() < self.num_rows && write.index() < self.num_rows); 16,208 ( 0.00%) let (read_start, read_end) = self.range(read); . let (write_start, write_end) = self.range(write); . let words = &mut self.words[..]; . let mut changed = false; . for (read_index, write_index) in iter::zip(read_start..read_end, write_start..write_end) { 90,784 ( 0.00%) let word = words[write_index]; 48,624 ( 0.00%) let new_word = word | words[read_index]; 16,208 ( 0.00%) words[write_index] = new_word; 82,258 ( 0.00%) changed |= word != new_word; . } . changed 44,970 ( 0.00%) } . . /// Adds the bits from `with` to the bits from row `write`, and . /// returns `true` if anything changed. . pub fn union_row_with(&mut self, with: &BitSet, write: R) -> bool { . assert!(write.index() < self.num_rows); . assert_eq!(with.domain_size(), self.num_columns); . let (write_start, write_end) = self.range(write); . let mut changed = false; -- line 1119 ---------------------------------------- -- line 1209 ---------------------------------------- . /// Creates a new empty sparse bit matrix with no rows or columns. . pub fn new(num_columns: usize) -> Self { . Self { num_columns, rows: IndexVec::new() } . } . . fn ensure_row(&mut self, row: R) -> &mut HybridBitSet { . // Instantiate any missing rows up to and including row `row` with an empty HybridBitSet. . // Then replace row `row` with a full HybridBitSet if necessary. 10,193 ( 0.00%) self.rows.get_or_insert_with(row, || HybridBitSet::new_empty(self.num_columns)) . } . . /// Sets the cell at `(row, column)` to true. Put another way, insert . /// `column` to the bitset for `row`. . /// . /// Returns `true` if this changed the matrix. . pub fn insert(&mut self, row: R, column: C) -> bool { 9,830 ( 0.00%) self.ensure_row(row).insert(column) . } . . /// Sets the cell at `(row, column)` to false. Put another way, delete . /// `column` from the bitset for `row`. Has no effect if `row` does not . /// exist. . /// . /// Returns `true` if this changed the matrix. . pub fn remove(&mut self, row: R, column: C) -> bool { -- line 1233 ---------------------------------------- -- line 1255 ---------------------------------------- . . /// Adds the bits from row `read` to the bits from row `write`, and . /// returns `true` if anything changed. . /// . /// This is used when computing transitive reachability because if . /// you have an edge `write -> read`, because in that case . /// `write` can reach everything that `read` can (and . /// potentially more). 940,912 ( 0.02%) pub fn union_rows(&mut self, read: R, write: R) -> bool { 134,416 ( 0.00%) if read == write || self.row(read).is_none() { . return false; . } . . self.ensure_row(write); 66,913 ( 0.00%) if let (Some(read_row), Some(write_row)) = self.rows.pick2_mut(read, write) { . write_row.union(read_row) . } else { . unreachable!() . } 1,209,744 ( 0.02%) } . . /// Insert all bits in the given row. . pub fn insert_all_into_row(&mut self, row: R) { . self.ensure_row(row).insert_all(); . } . . pub fn rows(&self) -> impl Iterator { . self.rows.indices() -- line 1282 ---------------------------------------- -- line 1284 ---------------------------------------- . . /// Iterates through all the columns set to true in a given row of . /// the matrix. . pub fn iter<'a>(&'a self, row: R) -> impl Iterator + 'a { . self.row(row).into_iter().flat_map(|r| r.iter()) . } . . pub fn row(&self, row: R) -> Option<&HybridBitSet> { 302,337 ( 0.01%) if let Some(Some(row)) = self.rows.get(row) { Some(row) } else { None } . } . . /// Interescts `row` with `set`. `set` can be either `BitSet` or . /// `HybridBitSet`. Has no effect if `row` does not exist. . /// . /// Returns true if the row was changed. . pub fn intersect_row(&mut self, row: R, set: &Set) -> bool . where -- line 1300 ---------------------------------------- -- line 1329 ---------------------------------------- . HybridBitSet: BitRelations, . { . self.ensure_row(row).union(set) . } . } . . #[inline] . fn num_words(domain_size: T) -> usize { 440,748 ( 0.01%) (domain_size.index() + WORD_BITS - 1) / WORD_BITS . } . . #[inline] . fn word_index_and_mask(elem: T) -> (usize, Word) { . let elem = elem.index(); 5,128,163 ( 0.09%) let word_index = elem / WORD_BITS; 506,618 ( 0.01%) let mask = 1 << (elem % WORD_BITS); . (word_index, mask) . } . . #[inline] . fn max_bit(word: Word) -> usize { 6 ( 0.00%) WORD_BITS - 1 - word.leading_zeros() as usize . } . . /// Integral type used to represent the bit set. . pub trait FiniteBitSetTy: . BitAnd . + BitAndAssign . + BitOrAssign . + Clone -- line 1358 ---------------------------------------- 2,849,729 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/snapshot_vec.rs -------------------------------------------------------------------------------- Ir -- line 42 ---------------------------------------- . . impl Rollback> for SnapshotVecStorage { . fn reverse(&mut self, undo: UndoLog) { . self.values.reverse(undo) . } . } . impl Rollback> for Vec { . fn reverse(&mut self, undo: UndoLog) { 582,732 ( 0.01%) match undo { . NewElem(i) => { . self.pop(); 215,187 ( 0.00%) assert!(Vec::len(self) == i); . } . . SetElem(i, v) => { 185,639 ( 0.00%) self[i] = v; . } . . Other(u) => { . D::reverse(self, u); . } . } . } . } -- line 65 ---------------------------------------- -- line 76 ---------------------------------------- . impl VecLike for Vec . where . D: SnapshotVecDelegate, . { . fn push(&mut self, item: D::Value) { . Vec::push(self, item) . } . fn len(&self) -> usize { 252,665 ( 0.00%) Vec::len(self) . } . fn reserve(&mut self, size: usize) { . Vec::reserve(self, size) . } . } . . impl VecLike for &'_ mut Vec . where . D: SnapshotVecDelegate, . { . fn push(&mut self, item: D::Value) { . Vec::push(self, item) . } . fn len(&self) -> usize { 1,388,723 ( 0.02%) Vec::len(self) . } . fn reserve(&mut self, size: usize) { . Vec::reserve(self, size) . } . } . . #[allow(type_alias_bounds)] . pub type SnapshotVecStorage = -- line 107 ---------------------------------------- -- line 191 ---------------------------------------- . } . . impl, D: SnapshotVecDelegate, U> SnapshotVec { . pub fn len(&self) -> usize { . self.values.len() . } . . pub fn get(&self, index: usize) -> &D::Value { 51,785,190 ( 0.86%) &self.values.as_ref()[index] . } . . /// Returns a mutable pointer into the vec; whatever changes you make here cannot be undone . /// automatically, so you should be sure call `record()` with some sort of suitable undo . /// action. . pub fn get_mut(&mut self, index: usize) -> &mut D::Value { 182,732 ( 0.00%) &mut self.values.as_mut()[index] . } . . /// Reserve space for new values, just like an ordinary vec. . pub fn reserve(&mut self, additional: usize) { . // This is not affected by snapshots or anything. . self.values.reserve(additional); . } . } -- line 214 ---------------------------------------- -- line 219 ---------------------------------------- . } . . pub fn record(&mut self, action: D::Undo) { . if self.in_snapshot() { . self.undo_log.push(Other(action)); . } . } . 3,016,839 ( 0.05%) pub fn push(&mut self, elem: D::Value) -> usize { . let len = self.values.len(); . self.values.push(elem); . 410,970 ( 0.01%) if self.in_snapshot() { . self.undo_log.push(NewElem(len)); . } . . len 2,785,424 ( 0.05%) } . . /// Updates the element at the given index. The old value will saved (and perhaps restored) if . /// a snapshot is active. . pub fn set(&mut self, index: usize, new_elem: D::Value) { . let old_elem = mem::replace(&mut self.values.as_mut()[index], new_elem); . if self.undo_log.in_snapshot() { . self.undo_log.push(SetElem(index, old_elem)); . } . } . . /// Updates all elements. Potentially more efficient -- but . /// otherwise equivalent to -- invoking `set` for each element. 21 ( 0.00%) pub fn set_all(&mut self, mut new_elems: impl FnMut(usize) -> D::Value) { 3 ( 0.00%) if !self.undo_log.in_snapshot() { . for (index, slot) in self.values.as_mut().iter_mut().enumerate() { 300 ( 0.00%) *slot = new_elems(index); . } . } else { . for i in 0..self.values.len() { . self.set(i, new_elems(i)); . } . } 24 ( 0.00%) } . 905,251 ( 0.02%) pub fn update(&mut self, index: usize, op: OP) . where . OP: FnOnce(&mut D::Value), . D::Value: Clone, . { 109,853 ( 0.00%) if self.undo_log.in_snapshot() { 397,125 ( 0.01%) let old_elem = self.values.as_mut()[index].clone(); 336 ( 0.00%) self.undo_log.push(SetElem(index, old_elem)); . } 322,046 ( 0.01%) op(&mut self.values.as_mut()[index]); 878,824 ( 0.01%) } . } . . impl SnapshotVec . where . D: SnapshotVecDelegate, . V: VecLike + Rollback>, . L: Snapshots>, . { -- line 279 ---------------------------------------- 342,540 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs -------------------------------------------------------------------------------- Ir -- line 180 ---------------------------------------- . /// assert_eq!(v, &[0.into()]); . /// . /// // Ensure that the last item was dropped. . /// assert!(weak.upgrade().is_none()); . /// ``` . #[stable(feature = "drop_in_place", since = "1.8.0")] . #[lang = "drop_in_place"] . #[allow(unconditional_recursion)] 54,743,198 ( 0.91%) pub unsafe fn drop_in_place(to_drop: *mut T) { . // Code here does not matter - this is replaced by the . // real drop glue by the compiler. . . // SAFETY: see comment above . unsafe { drop_in_place(to_drop) } . } . . /// Creates a null raw pointer. -- line 196 ---------------------------------------- -- line 366 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . . // Perform the swap . // SAFETY: the caller must guarantee that `x` and `y` are . // valid for writes and properly aligned. `tmp` cannot be . // overlapping either `x` or `y` because `tmp` was just allocated . // on the stack as a separate allocated object. . unsafe { 39 ( 0.00%) copy_nonoverlapping(x, tmp.as_mut_ptr(), 1); 39 ( 0.00%) copy(y, x, 1); // `x` and `y` may overlap . copy_nonoverlapping(tmp.as_ptr(), y, 1); . } . } . . /// Swaps `count * size_of::()` bytes between the two regions of memory . /// beginning at `x` and `y`. The two regions must *not* overlap. . /// . /// # Safety -- line 383 ---------------------------------------- -- line 448 ---------------------------------------- . return; . } . } . . // Direct swapping, for the cases not going through the block optimization. . // SAFETY: the caller must guarantee that `x` and `y` are valid . // for writes, properly aligned, and non-overlapping. . unsafe { 30 ( 0.00%) let z = read(x); . copy_nonoverlapping(y, x, 1); . write(y, z); . } . } . . #[inline] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . const unsafe fn swap_nonoverlapping_bytes(x: *mut u8, y: *mut u8, len: usize) { -- line 464 ---------------------------------------- -- line 693 ---------------------------------------- . let mut tmp = MaybeUninit::::uninit(); . // SAFETY: the caller must guarantee that `src` is valid for reads. . // `src` cannot overlap `tmp` because `tmp` was just allocated on . // the stack as a separate allocated object. . // . // Also, since we just wrote a valid value into `tmp`, it is guaranteed . // to be properly initialized. . unsafe { 1,892,789 ( 0.03%) copy_nonoverlapping(src, tmp.as_mut_ptr(), 1); 6,984,641 ( 0.12%) tmp.assume_init() . } . } . . /// Reads the value from `src` without moving it. This leaves the . /// memory in `src` unchanged. . /// . /// Unlike [`read`], `read_unaligned` works with unaligned pointers. . /// -- line 710 ---------------------------------------- -- line 884 ---------------------------------------- . #[rustc_const_unstable(feature = "const_intrinsic_copy", issue = "80697")] . fn copy_nonoverlapping(src: *const T, dst: *mut T, count: usize); . } . . // SAFETY: the caller must guarantee that `dst` is valid for writes. . // `dst` cannot overlap `src` because the caller has mutable access . // to `dst` while `src` is owned by this function. . unsafe { 99,069,496 ( 1.64%) copy_nonoverlapping(&src as *const T, dst, 1); . intrinsics::forget(src); . } . } . . /// Overwrites a memory location with the given value without reading or . /// dropping the old value. . /// . /// Unlike [`write()`], the pointer may be unaligned. -- line 900 ---------------------------------------- -- line 1206 ---------------------------------------- . if stride == 1 { . // `stride == 1` case can be computed more simply through `-p (mod a)`, but doing so . // inhibits LLVM's ability to select instructions like `lea`. Instead we compute . // . // round_up_to_next_alignment(p, a) - p . // . // which distributes operations around the load-bearing, but pessimizing `and` sufficiently . // for LLVM to be able to utilize the various optimizations it knows about. 4,465 ( 0.00%) return wrapping_sub( 17,744 ( 0.00%) wrapping_add(p as usize, a_minus_one) & wrapping_sub(0, a), . p as usize, . ); . } . . let pmoda = p as usize & a_minus_one; . if pmoda == 0 { . // Already aligned. Yay! . return 0; -- line 1223 ---------------------------------------- -- line 1348 ---------------------------------------- . /// assert!(std::ptr::eq( . /// &wrapper as &dyn Trait as *const dyn Trait as *const u8, . /// &wrapper.member as &dyn Trait as *const dyn Trait as *const u8, . /// )); . /// ``` . #[stable(feature = "ptr_eq", since = "1.17.0")] . #[inline] . pub fn eq(a: *const T, b: *const T) -> bool { 2,866,009 ( 0.05%) a == b . } . . /// Hash a raw pointer. . /// . /// This can be used to hash a `&T` reference (which coerces to `*const T` implicitly) . /// by its address rather than the value it points to . /// (which is what the `Hash for &T` implementation does). . /// -- line 1364 ---------------------------------------- 9,355,840 ( 0.16%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/smallvec-1.7.0/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 294 ---------------------------------------- . #[allow(deprecated)] . impl From for CollectionAllocErr { . fn from(_: LayoutErr) -> Self { . CollectionAllocErr::CapacityOverflow . } . } . . fn infallible(result: Result) -> T { 3,055,175 ( 0.05%) match result { . Ok(x) => x, . Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"), . Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout), . } . } . . /// FIXME: use `Layout::array` when we require a Rust version where it’s stable . /// https://github.com/rust-lang/rust/issues/55724 -- line 310 ---------------------------------------- -- line 422 ---------------------------------------- . #[cfg(feature = "union")] . impl SmallVecData { . #[inline] . unsafe fn inline(&self) -> *const A::Item { . self.inline.as_ptr() as *const A::Item . } . #[inline] . unsafe fn inline_mut(&mut self) -> *mut A::Item { 205,300 ( 0.00%) self.inline.as_mut_ptr() as *mut A::Item . } . #[inline] . fn from_inline(inline: MaybeUninit) -> SmallVecData { . SmallVecData { . inline: core::mem::ManuallyDrop::new(inline), . } . } . #[inline] . unsafe fn into_inline(self) -> MaybeUninit { . core::mem::ManuallyDrop::into_inner(self.inline) . } . #[inline] . unsafe fn heap(&self) -> (*mut A::Item, usize) { 15,786 ( 0.00%) self.heap . } . #[inline] . unsafe fn heap_mut(&mut self) -> &mut (*mut A::Item, usize) { . &mut self.heap . } . #[inline] . fn from_heap(ptr: *mut A::Item, len: usize) -> SmallVecData { . SmallVecData { heap: (ptr, len) } -- line 452 ---------------------------------------- -- line 557 ---------------------------------------- . #[inline] . pub fn new() -> SmallVec { . // Try to detect invalid custom implementations of `Array`. Hopefully, . // this check should be optimized away entirely for valid ones. . assert!( . mem::size_of::() == A::size() * mem::size_of::() . && mem::align_of::() >= mem::align_of::() . ); 1,498,564 ( 0.02%) SmallVec { . capacity: 0, . data: SmallVecData::from_inline(MaybeUninit::uninit()), . } . } . . /// Construct an empty vector with enough capacity pre-allocated to store at least `n` . /// elements. . /// -- line 573 ---------------------------------------- -- line 635 ---------------------------------------- . /// let small_vec: SmallVec<_> = SmallVec::from_buf(buf); . /// . /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); . /// ``` . #[inline] . pub fn from_buf(buf: A) -> SmallVec { . SmallVec { . capacity: A::size(), 27,629 ( 0.00%) data: SmallVecData::from_inline(MaybeUninit::new(buf)), . } . } . . /// Constructs a new `SmallVec` on the stack from an `A` without . /// copying elements. Also sets the length, which must be less or . /// equal to the size of `buf`. . /// . /// ```rust -- line 651 ---------------------------------------- -- line 653 ---------------------------------------- . /// . /// let buf = [1, 2, 3, 4, 5, 0, 0, 0]; . /// let small_vec: SmallVec<_> = SmallVec::from_buf_and_len(buf, 5); . /// . /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); . /// ``` . #[inline] . pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec { 12 ( 0.00%) assert!(len <= A::size()); 36 ( 0.00%) unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) } . } . . /// Constructs a new `SmallVec` on the stack from an `A` without . /// copying elements. Also sets the length. The user is responsible . /// for ensuring that `len <= A::size()`. . /// . /// ```rust . /// use smallvec::SmallVec; -- line 670 ---------------------------------------- -- line 674 ---------------------------------------- . /// let small_vec: SmallVec<_> = unsafe { . /// SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), 5) . /// }; . /// . /// assert_eq!(&*small_vec, &[1, 2, 3, 4, 5]); . /// ``` . #[inline] . pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit, len: usize) -> SmallVec { 6 ( 0.00%) SmallVec { . capacity: len, . data: SmallVecData::from_inline(buf), . } . } . . /// Sets the length of a vector. . /// . /// This will explicitly set the size of the vector, without actually . /// modifying its buffers, so it is up to the caller to ensure that the . /// vector is actually the specified size. . pub unsafe fn set_len(&mut self, new_len: usize) { . let (_, len_ptr, _) = self.triple_mut(); 826,810 ( 0.01%) *len_ptr = new_len; . } . . /// The maximum number of elements this vector can hold inline . #[inline] . fn inline_capacity() -> usize { . if mem::size_of::() > 0 { 296 ( 0.00%) A::size() . } else { . // For zero-size items code like `ptr.add(offset)` always returns the same pointer. . // Therefore all items are at the same address, . // and any array size has capacity for infinitely many items. . // The capacity is limited by the bit width of the length field. . // . // `Vec` also does this: . // https://github.com/rust-lang/rust/blob/1.44.0/src/liballoc/raw_vec.rs#L186 -- line 710 ---------------------------------------- -- line 725 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.triple().1 . } . . /// Returns `true` if the vector is empty . #[inline] . pub fn is_empty(&self) -> bool { 755,931 ( 0.01%) self.len() == 0 . } . . /// The number of items the vector can hold without reallocating . #[inline] . pub fn capacity(&self) -> usize { . self.triple().2 . } . . /// Returns a tuple with (data ptr, len, capacity) . /// Useful to get all SmallVec properties with a single check of the current storage variant. . #[inline] . fn triple(&self) -> (*const A::Item, usize, usize) { . unsafe { 25,306,520 ( 0.42%) if self.spilled() { . let (ptr, len) = self.data.heap(); . (ptr, len, self.capacity) . } else { . (self.data.inline(), self.capacity, Self::inline_capacity()) . } . } . } . . /// Returns a tuple with (data ptr, len ptr, capacity) . #[inline] . fn triple_mut(&mut self) -> (*mut A::Item, &mut usize, usize) { . unsafe { 21,768,206 ( 0.36%) if self.spilled() { 106,156 ( 0.00%) let &mut (ptr, ref mut len_ptr) = self.data.heap_mut(); . (ptr, len_ptr, self.capacity) . } else { . ( . self.data.inline_mut(), . &mut self.capacity, . Self::inline_capacity(), . ) . } . } . } . . /// Returns `true` if the data has spilled into a separate heap-allocated buffer. . #[inline] . pub fn spilled(&self) -> bool { 29,069,770 ( 0.48%) self.capacity > Self::inline_capacity() . } . . /// Creates a draining iterator that removes the specified range in the vector . /// and yields the removed items. . /// . /// Note 1: The element range is removed even if the iterator is only . /// partially consumed or not consumed at all. . /// -- line 784 ---------------------------------------- -- line 821 ---------------------------------------- . iter: range_slice.iter(), . vec: NonNull::from(self), . } . } . } . . /// Append an item to the vector. . #[inline] 255,455 ( 0.00%) pub fn push(&mut self, value: A::Item) { . unsafe { . let (mut ptr, mut len, cap) = self.triple_mut(); 1,777,523 ( 0.03%) if *len == cap { 358 ( 0.00%) self.reserve(1); 30,843 ( 0.00%) let &mut (heap_ptr, ref mut heap_len) = self.data.heap_mut(); . ptr = heap_ptr; . len = heap_len; . } 26,310 ( 0.00%) ptr::write(ptr.add(*len), value); 3,361,354 ( 0.06%) *len += 1; . } 204,364 ( 0.00%) } . . /// Remove an item from the end of the vector and return it, or None if empty. . #[inline] . pub fn pop(&mut self) -> Option { . unsafe { . let (ptr, len_ptr, _) = self.triple_mut(); 836,845 ( 0.01%) if *len_ptr == 0 { . return None; . } 210,307 ( 0.00%) let last_index = *len_ptr - 1; 210,648 ( 0.00%) *len_ptr = last_index; 206,473 ( 0.00%) Some(ptr::read(ptr.add(last_index))) . } 101 ( 0.00%) } . . /// Moves all the elements of `other` into `self`, leaving `other` empty. . /// . /// # Example . /// . /// ``` . /// # use smallvec::{SmallVec, smallvec}; . /// let mut v0: SmallVec<[u8; 16]> = smallvec![1, 2, 3]; -- line 863 ---------------------------------------- -- line 879 ---------------------------------------- . /// or if the capacity computation overflows `usize`. . pub fn grow(&mut self, new_cap: usize) { . infallible(self.try_grow(new_cap)) . } . . /// Re-allocate to set the capacity to `max(new_cap, inline_size())`. . /// . /// Panics if `new_cap` is less than the vector's length 14,109 ( 0.00%) pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> { . unsafe { 1,403 ( 0.00%) let (ptr, &mut len, cap) = self.triple_mut(); . let unspilled = !self.spilled(); 48,792 ( 0.00%) assert!(new_cap >= len); 48,792 ( 0.00%) if new_cap <= self.inline_size() { . if unspilled { . return Ok(()); . } . self.data = SmallVecData::from_inline(MaybeUninit::uninit()); . ptr::copy_nonoverlapping(ptr, self.data.inline_mut(), len); . self.capacity = len; . deallocate(ptr, cap); 48,792 ( 0.00%) } else if new_cap != cap { 45,818 ( 0.00%) let layout = layout_array::(new_cap)?; . debug_assert!(layout.size() > 0); . let new_alloc; 24,396 ( 0.00%) if unspilled { . new_alloc = NonNull::new(alloc::alloc::alloc(layout)) . .ok_or(CollectionAllocErr::AllocErr { layout })? . .cast() . .as_ptr(); . ptr::copy_nonoverlapping(ptr, new_alloc, len); . } else { . // This should never fail since the same succeeded . // when previously allocating `ptr`. 11,073 ( 0.00%) let old_layout = layout_array::(cap)?; . . let new_ptr = alloc::alloc::realloc(ptr as *mut u8, old_layout, layout.size()); . new_alloc = NonNull::new(new_ptr) . .ok_or(CollectionAllocErr::AllocErr { layout })? . .cast() . .as_ptr(); . } 48,792 ( 0.00%) self.data = SmallVecData::from_heap(new_alloc, len); 67,798 ( 0.00%) self.capacity = new_cap; . } . Ok(()) . } 15,512 ( 0.00%) } . . /// Reserve capacity for `additional` more elements to be inserted. . /// . /// May reserve more space to avoid frequent reallocations. . /// . /// Panics if the capacity computation overflows `usize`. . #[inline] 274,599 ( 0.00%) pub fn reserve(&mut self, additional: usize) { 3,711,608 ( 0.06%) infallible(self.try_reserve(additional)) 311,888 ( 0.01%) } . . /// Reserve capacity for `additional` more elements to be inserted. . /// . /// May reserve more space to avoid frequent reallocations. 11,139,562 ( 0.18%) pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { . // prefer triple_mut() even if triple() would work . // so that the optimizer removes duplicated calls to it . // from callers like insert() 21,405 ( 0.00%) let (_, &mut len, cap) = self.triple_mut(); 8,707,224 ( 0.14%) if cap - len >= additional { . return Ok(()); . } . let new_cap = len . .checked_add(additional) . .and_then(usize::checked_next_power_of_two) . .ok_or(CollectionAllocErr::CapacityOverflow)?; 3,079 ( 0.00%) self.try_grow(new_cap) 13,741,068 ( 0.23%) } . . /// Reserve the minimum capacity for `additional` more elements to be inserted. . /// . /// Panics if the new capacity overflows `usize`. . pub fn reserve_exact(&mut self, additional: usize) { . infallible(self.try_reserve_exact(additional)) . } . . /// Reserve the minimum capacity for `additional` more elements to be inserted. . pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> { . let (_, &mut len, cap) = self.triple_mut(); 208,325 ( 0.00%) if cap - len >= additional { . return Ok(()); . } . let new_cap = len . .checked_add(additional) . .ok_or(CollectionAllocErr::CapacityOverflow)?; 433 ( 0.00%) self.try_grow(new_cap) 433 ( 0.00%) } . . /// Shrink the capacity of the vector as much as possible. . /// . /// When possible, this will move data from an external heap buffer to the vector's inline . /// storage. . pub fn shrink_to_fit(&mut self) { . if !self.spilled() { . return; -- line 981 ---------------------------------------- -- line 999 ---------------------------------------- . /// If `len` is greater than or equal to the vector's current length, this has no . /// effect. . /// . /// This does not re-allocate. If you want the vector's capacity to shrink, call . /// `shrink_to_fit` after truncating. . pub fn truncate(&mut self, len: usize) { . unsafe { . let (ptr, len_ptr, _) = self.triple_mut(); 116,548 ( 0.00%) while len < *len_ptr { . let last_index = *len_ptr - 1; . *len_ptr = last_index; . ptr::drop_in_place(ptr.add(last_index)); . } . } . } . . /// Extracts a slice containing the entire vector. -- line 1015 ---------------------------------------- -- line 1044 ---------------------------------------- . pub fn clear(&mut self) { . self.truncate(0); . } . . /// Remove and return the element at position `index`, shifting all elements after it to the . /// left. . /// . /// Panics if `index` is out of bounds. 2,476 ( 0.00%) pub fn remove(&mut self, index: usize) -> A::Item { . unsafe { . let (mut ptr, len_ptr, _) = self.triple_mut(); 11,547 ( 0.00%) let len = *len_ptr; 49,656 ( 0.00%) assert!(index < len); 73,660 ( 0.00%) *len_ptr = len - 1; . ptr = ptr.add(index); . let item = ptr::read(ptr); 33,104 ( 0.00%) ptr::copy(ptr.add(1), ptr, len - index - 1); . item . } 3,095 ( 0.00%) } . . /// Insert an element at position `index`, shifting all elements after it to the right. . /// . /// Panics if `index` is out of bounds. 190,199 ( 0.00%) pub fn insert(&mut self, index: usize, element: A::Item) { . self.reserve(1); . . unsafe { . let (mut ptr, len_ptr, _) = self.triple_mut(); 17,349 ( 0.00%) let len = *len_ptr; 34,640 ( 0.00%) assert!(index <= len); 69,280 ( 0.00%) *len_ptr = len + 1; . ptr = ptr.add(index); 34,640 ( 0.00%) ptr::copy(ptr, ptr.add(1), len - index); . ptr::write(ptr, element); . } 121,038 ( 0.00%) } . . /// Insert multiple elements at position `index`, shifting all following elements toward the . /// back. . pub fn insert_many>(&mut self, index: usize, iterable: I) { . let mut iter = iterable.into_iter(); . if index == self.len() { . return self.extend(iter); . } -- line 1088 ---------------------------------------- -- line 1201 ---------------------------------------- . } . } . . /// Retains only the elements specified by the predicate. . /// . /// In other words, remove all elements `e` such that `f(&e)` returns `false`. . /// This method operates in place and preserves the order of the retained . /// elements. 263,565 ( 0.00%) pub fn retain bool>(&mut self, mut f: F) { . let mut del = 0; . let len = self.len(); . for i in 0..len { 133,636 ( 0.00%) if !f(&mut self[i]) { 42 ( 0.00%) del += 1; 38,952 ( 0.00%) } else if del > 0 { 45 ( 0.00%) self.swap(i - del, i); . } . } 58,570 ( 0.00%) self.truncate(len - del); 234,280 ( 0.00%) } . . /// Removes consecutive duplicate elements. . pub fn dedup(&mut self) . where . A::Item: PartialEq, . { . self.dedup_by(|a, b| a == b); . } -- line 1228 ---------------------------------------- -- line 1418 ---------------------------------------- . capacity: len, . data: SmallVecData::from_inline(unsafe { . let mut data: MaybeUninit = MaybeUninit::uninit(); . ptr::copy_nonoverlapping( . slice.as_ptr(), . data.as_mut_ptr() as *mut A::Item, . len, . ); 2,531 ( 0.00%) data . }), . } . } else { . let mut b = slice.to_vec(); . let (ptr, cap) = (b.as_mut_ptr(), b.capacity()); . mem::forget(b); . SmallVec { . capacity: cap, -- line 1434 ---------------------------------------- -- line 1436 ---------------------------------------- . } . } . } . . /// Copy elements from a slice into the vector at position `index`, shifting any following . /// elements toward the back. . /// . /// For slices of `Copy` types, this is more efficient than `insert`. 7,621,493 ( 0.13%) pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) { . self.reserve(slice.len()); . . let len = self.len(); 1,942,113 ( 0.03%) assert!(index <= len); . . unsafe { . let slice_ptr = slice.as_ptr(); . let ptr = self.as_mut_ptr().add(index); . ptr::copy(ptr, ptr.add(slice.len()), len - index); . ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len()); 1,294,742 ( 0.02%) self.set_len(len + slice.len()); . } 5,178,968 ( 0.09%) } . . /// Copy elements from a slice and append them to the vector. . /// . /// For slices of `Copy` types, this is more efficient than `extend`. . #[inline] . pub fn extend_from_slice(&mut self, slice: &[A::Item]) { . let len = self.len(); 1,355,271 ( 0.02%) self.insert_from_slice(len, slice); . } . } . . impl SmallVec . where . A::Item: Clone, . { . /// Resizes the vector so that its length is equal to `len`. -- line 1473 ---------------------------------------- -- line 1522 ---------------------------------------- . } . } . } . . impl ops::DerefMut for SmallVec { . #[inline] . fn deref_mut(&mut self) -> &mut [A::Item] { . unsafe { 104,056 ( 0.00%) let (ptr, &mut len, _) = self.triple_mut(); . slice::from_raw_parts_mut(ptr, len) . } . } . } . . impl AsRef<[A::Item]> for SmallVec { . #[inline] . fn as_ref(&self) -> &[A::Item] { -- line 1538 ---------------------------------------- -- line 1716 ---------------------------------------- . SmallVec::extend_from_slice(self, other) . } . } . . impl FromIterator for SmallVec { . #[inline] . fn from_iter>(iterable: I) -> SmallVec { . let mut v = SmallVec::new(); 3,036,781 ( 0.05%) v.extend(iterable); . v . } . } . . impl Extend for SmallVec { 10,072,428 ( 0.17%) fn extend>(&mut self, iterable: I) { 430,616 ( 0.01%) let mut iter = iterable.into_iter(); . let (lower_size_bound, _) = iter.size_hint(); 60,013 ( 0.00%) self.reserve(lower_size_bound); . . unsafe { . let (ptr, len_ptr, cap) = self.triple_mut(); . let mut len = SetLenOnDrop::new(len_ptr); 4,889,617 ( 0.08%) while len.get() < cap { 434,560 ( 0.01%) if let Some(out) = iter.next() { 8 ( 0.00%) ptr::write(ptr.add(len.get()), out); . len.increment_len(1); . } else { . return; . } . } . } . 456,024 ( 0.01%) for elem in iter { 202,645 ( 0.00%) self.push(elem); . } 8,359,999 ( 0.14%) } . } . . impl fmt::Debug for SmallVec . where . A::Item: fmt::Debug, . { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . f.debug_list().entries(self.iter()).finish() -- line 1759 ---------------------------------------- -- line 1764 ---------------------------------------- . #[inline] . fn default() -> SmallVec { . SmallVec::new() . } . } . . #[cfg(feature = "may_dangle")] . unsafe impl<#[may_dangle] A: Array> Drop for SmallVec { 1,328,590 ( 0.02%) fn drop(&mut self) { . unsafe { 4,107,269 ( 0.07%) if self.spilled() { . let (ptr, len) = self.data.heap(); . Vec::from_raw_parts(ptr, len, self.capacity); . } else { . ptr::drop_in_place(&mut self[..]); . } . } 1,489,436 ( 0.02%) } . } . . #[cfg(not(feature = "may_dangle"))] . impl Drop for SmallVec { . fn drop(&mut self) { . unsafe { . if self.spilled() { . let (ptr, len) = self.data.heap(); -- line 1789 ---------------------------------------- -- line 1888 ---------------------------------------- . A::Item: Clone, . { . fn clone(&self) -> IntoIter { . SmallVec::from(self.as_slice()).into_iter() . } . } . . impl Drop for IntoIter { 3,685 ( 0.00%) fn drop(&mut self) { 316,934 ( 0.01%) for _ in self {} 3,416 ( 0.00%) } . } . . impl Iterator for IntoIter { . type Item = A::Item; . . #[inline] . fn next(&mut self) -> Option { 1,235,530 ( 0.02%) if self.current == self.end { . None . } else { . unsafe { . let current = self.current; 633,004 ( 0.01%) self.current += 1; 20,452 ( 0.00%) Some(ptr::read(self.data.as_ptr().add(current))) . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 31,059 ( 0.00%) let size = self.end - self.current; . (size, Some(size)) . } . } . . impl DoubleEndedIterator for IntoIter { . #[inline] . fn next_back(&mut self) -> Option { . if self.current == self.end { -- line 1927 ---------------------------------------- -- line 1955 ---------------------------------------- . impl IntoIterator for SmallVec { . type IntoIter = IntoIter; . type Item = A::Item; . fn into_iter(mut self) -> Self::IntoIter { . unsafe { . // Set SmallVec len to zero as `IntoIter` drop handles dropping of the elements . let len = self.len(); . self.set_len(0); 73,545 ( 0.00%) IntoIter { 685,736 ( 0.01%) data: self, . current: 0, . end: len, . } . } . } . } . . impl<'a, A: Array> IntoIterator for &'a SmallVec { -- line 1972 ---------------------------------------- -- line 2000 ---------------------------------------- . len: &'a mut usize, . local_len: usize, . } . . impl<'a> SetLenOnDrop<'a> { . #[inline] . fn new(len: &'a mut usize) -> Self { . SetLenOnDrop { 1,072,199 ( 0.02%) local_len: *len, . len, . } . } . . #[inline] . fn get(&self) -> usize { . self.local_len . } . . #[inline] . fn increment_len(&mut self, increment: usize) { 863,764 ( 0.01%) self.local_len += increment; . } . } . . impl<'a> Drop for SetLenOnDrop<'a> { . #[inline] . fn drop(&mut self) { 1,100,456 ( 0.02%) *self.len = self.local_len; . } . } . . #[cfg(feature = "const_new")] . impl SmallVec<[T; N]> { . /// Construct an empty vector. . /// . /// This is a `const` version of [`SmallVec::new`] that is enabled by the feature `const_new`, with the limitation that it only works for arrays. -- line 2035 ---------------------------------------- 12,937,683 ( 0.21%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs -------------------------------------------------------------------------------- Ir -- line 111 ---------------------------------------- . const EMPTY: u8 = 0b1111_1111; . . /// Control byte value for a deleted bucket. . const DELETED: u8 = 0b1000_0000; . . /// Checks whether a control byte represents a full bucket (top bit is clear). . #[inline] . fn is_full(ctrl: u8) -> bool { 5,403,784 ( 0.09%) ctrl & 0x80 == 0 . } . . /// Checks whether a control byte represents a special value (top bit is set). . #[inline] . fn is_special(ctrl: u8) -> bool { . ctrl & 0x80 != 0 . } . . /// Checks whether a special control value is EMPTY (just check 1 bit). . #[inline] . fn special_is_empty(ctrl: u8) -> bool { . debug_assert!(is_special(ctrl)); 305,303 ( 0.01%) ctrl & 0x01 != 0 . } . . /// Primary hash function, used to select the initial bucket to probe from. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h1(hash: u64) -> usize { . // On 32-bit platforms we simply ignore the higher hash bits. . hash as usize -- line 140 ---------------------------------------- -- line 143 ---------------------------------------- . /// Secondary hash function, saved in the low 7 bits of the control byte. . #[inline] . #[allow(clippy::cast_possible_truncation)] . fn h2(hash: u64) -> u8 { . // Grab the top 7 bits of the hash. While the hash is normally a full 64-bit . // value, some hash functions (such as FxHash) produce a usize result . // instead, which means that the top 32 bits are 0 on 32-bit platforms. . let hash_len = usize::min(mem::size_of::(), mem::size_of::()); 101,315,284 ( 1.68%) let top7 = hash >> (hash_len * 8 - 7); . (top7 & 0x7f) as u8 // truncation . } . . /// Probe sequence based on triangular numbers, which is guaranteed (since our . /// table size is a power of two) to visit every group of elements exactly once. . /// . /// A triangular probe has us jump by 1 more group every time. So first we . /// jump by 1 group (meaning we just continue our linear scan), then 2 groups -- line 159 ---------------------------------------- -- line 170 ---------------------------------------- . #[inline] . fn move_next(&mut self, bucket_mask: usize) { . // We should have found an empty bucket by now and ended the probe. . debug_assert!( . self.stride <= bucket_mask, . "Went past end of probe sequence" . ); . 702,064 ( 0.01%) self.stride += Group::WIDTH; 702,064 ( 0.01%) self.pos += self.stride; 606,406 ( 0.01%) self.pos &= bucket_mask; . } . } . . /// Returns the number of buckets needed to hold the given number of items, . /// taking the maximum load factor into account. . /// . /// Returns `None` if an overflow occurs. . // Workaround for emscripten bug emscripten-core/emscripten-fastcomp#258 . #[cfg_attr(target_os = "emscripten", inline(never))] . #[cfg_attr(not(target_os = "emscripten"), inline)] . fn capacity_to_buckets(cap: usize) -> Option { . debug_assert_ne!(cap, 0); . . // For small tables we require at least 1 empty bucket so that lookups are . // guaranteed to terminate if an element doesn't exist in the table. 323,002 ( 0.01%) if cap < 8 { . // We don't bother with a table size of 2 buckets since that can only . // hold a single element. Instead we skip directly to a 4 bucket table . // which can hold 3 elements. 705,870 ( 0.01%) return Some(if cap < 4 { 4 } else { 8 }); . } . . // Otherwise require 1/8 buckets to be empty (87.5% load) . // . // Be careful when modifying this, calculate_layout relies on the . // overflow check here. 121,962 ( 0.00%) let adjusted_cap = cap.checked_mul(8)? / 7; . . // Any overflows will have been caught by the checked_mul. Also, any . // rounding errors from the division above will be cleaned up by . // next_power_of_two (which can't overflow because of the previous division). . Some(adjusted_cap.next_power_of_two()) . } . . /// Returns the maximum effective capacity for the given bucket mask, taking . /// the maximum load factor into account. . #[inline] . fn bucket_mask_to_capacity(bucket_mask: usize) -> usize { 855,070 ( 0.01%) if bucket_mask < 8 { . // For tables with 1/2/4/8 buckets, we always reserve one empty slot. . // Keep in mind that the bucket mask is one less than the bucket count. . bucket_mask . } else { . // For larger tables we reserve 12.5% of the slots as empty. 207,270 ( 0.00%) ((bucket_mask + 1) / 8) * 7 . } . } . . /// Helper which allows the max calculation for ctrl_align to be statically computed for each T . /// while keeping the rest of `calculate_layout_for` independent of `T` . #[derive(Copy, Clone)] . struct TableLayout { . size: usize, -- line 233 ---------------------------------------- -- line 246 ---------------------------------------- . . #[inline] . fn calculate_layout_for(self, buckets: usize) -> Option<(Layout, usize)> { . debug_assert!(buckets.is_power_of_two()); . . let TableLayout { size, ctrl_align } = self; . // Manual layout calculation since Layout methods are not yet stable. . let ctrl_offset = 681,201 ( 0.01%) size.checked_mul(buckets)?.checked_add(ctrl_align - 1)? & !(ctrl_align - 1); 893,407 ( 0.01%) let len = ctrl_offset.checked_add(buckets + Group::WIDTH)?; . . Some(( . unsafe { Layout::from_size_align_unchecked(len, ctrl_align) }, . ctrl_offset, . )) . } . } . -- line 263 ---------------------------------------- -- line 337 ---------------------------------------- . } . } . #[cfg_attr(feature = "inline-more", inline)] . pub unsafe fn drop(&self) { . self.as_ptr().drop_in_place(); . } . #[inline] . pub unsafe fn read(&self) -> T { 10,362 ( 0.00%) self.as_ptr().read() . } . #[inline] . pub unsafe fn write(&self, val: T) { . self.as_ptr().write(val); . } . #[inline] . pub unsafe fn as_ref<'a>(&self) -> &'a T { . &*self.as_ptr() -- line 353 ---------------------------------------- -- line 422 ---------------------------------------- . /// Creates a new empty hash table without allocating any memory, using the . /// given allocator. . /// . /// In effect this returns a table with exactly 1 bucket. However we can . /// leave the data pointer dangling since that bucket is never written to . /// due to our load factor forcing us to always have at least 1 free bucket. . #[inline] . pub fn new_in(alloc: A) -> Self { 1,086 ( 0.00%) Self { . table: RawTableInner::new_in(alloc), . marker: PhantomData, . } . } . . /// Allocates a new hash table with the given number of buckets. . /// . /// The control bytes are left uninitialized. -- line 438 ---------------------------------------- -- line 440 ---------------------------------------- . unsafe fn new_uninitialized( . alloc: A, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . Ok(Self { 415 ( 0.00%) table: RawTableInner::new_uninitialized( . alloc, . TableLayout::new::(), . buckets, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 456 ---------------------------------------- -- line 458 ---------------------------------------- . /// Attempts to allocate a new hash table with at least enough capacity . /// for inserting the given number of elements without reallocating. . fn fallible_with_capacity( . alloc: A, . capacity: usize, . fallibility: Fallibility, . ) -> Result { . Ok(Self { 27,976 ( 0.00%) table: RawTableInner::fallible_with_capacity( . alloc, . TableLayout::new::(), . capacity, . fallibility, . )?, . marker: PhantomData, . }) . } -- line 474 ---------------------------------------- -- line 527 ---------------------------------------- . debug_assert_ne!(self.table.bucket_mask, 0); . debug_assert!(index < self.buckets()); . Bucket::from_base_index(self.data_end(), index) . } . . /// Erases an element from the table without dropping it. . #[cfg_attr(feature = "inline-more", inline)] . #[deprecated(since = "0.8.1", note = "use erase or remove instead")] 73,904 ( 0.00%) pub unsafe fn erase_no_drop(&mut self, item: &Bucket) { 73,904 ( 0.00%) let index = self.bucket_index(item); . self.table.erase(index); 147,808 ( 0.00%) } . . /// Erases an element from the table, dropping it in place. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn erase(&mut self, item: Bucket) { . // Erase the element from the table first since drop might panic. 57,874 ( 0.00%) self.erase_no_drop(&item); . item.drop(); . } . . /// Finds and erases an element from the table, dropping it in place. . /// Returns true if an element was found. . #[cfg(feature = "raw")] . #[cfg_attr(feature = "inline-more", inline)] . pub fn erase_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> bool { -- line 554 ---------------------------------------- -- line 563 ---------------------------------------- . } . } . . /// Removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] . #[allow(clippy::needless_pass_by_value)] . #[allow(deprecated)] . pub unsafe fn remove(&mut self, item: Bucket) -> T { 89,934 ( 0.00%) self.erase_no_drop(&item); 360 ( 0.00%) item.read() . } . . /// Finds and removes an element from the table, returning it. . #[cfg_attr(feature = "inline-more", inline)] 1,807,768 ( 0.03%) pub fn remove_entry(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option { . // Avoid `Option::map` because it bloats LLVM IR. 2,086 ( 0.00%) match self.find(hash, eq) { 43,662 ( 0.00%) Some(bucket) => Some(unsafe { self.remove(bucket) }), 515,667 ( 0.01%) None => None, . } 2,483,877 ( 0.04%) } . . /// Marks all table buckets as empty without dropping their contents. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear_no_drop(&mut self) { . self.table.clear_no_drop(); . } . . /// Removes all elements from the table without freeing the backing memory. . #[cfg_attr(feature = "inline-more", inline)] . pub fn clear(&mut self) { . // Ensure that the table is reset even if one of the drops panic . let mut self_ = guard(self, |self_| self_.clear_no_drop()); . unsafe { 1 ( 0.00%) self_.drop_elements(); . } . } . 7 ( 0.00%) unsafe fn drop_elements(&mut self) { 27,074 ( 0.00%) if mem::needs_drop::() && !self.is_empty() { . for item in self.iter() { . item.drop(); . } . } 8 ( 0.00%) } . . /// Shrinks the table to fit `max(self.len(), min_size)` elements. . #[cfg_attr(feature = "inline-more", inline)] . pub fn shrink_to(&mut self, min_size: usize, hasher: impl Fn(&T) -> u64) { . // Calculate the minimal number of elements that we need to reserve . // space for. . let min_size = usize::max(self.table.items, min_size); . if min_size == 0 { -- line 615 ---------------------------------------- -- line 642 ---------------------------------------- . } . } . } . . /// Ensures that at least `additional` items can be inserted into the table . /// without reallocation. . #[cfg_attr(feature = "inline-more", inline)] . pub fn reserve(&mut self, additional: usize, hasher: impl Fn(&T) -> u64) { 919,404 ( 0.02%) if additional > self.table.growth_left { . // Avoid `Result::unwrap_or_else` because it bloats LLVM IR. 572,753 ( 0.01%) if self . .reserve_rehash(additional, hasher, Fallibility::Infallible) . .is_err() . { . unsafe { hint::unreachable_unchecked() } . } . } . } . -- line 660 ---------------------------------------- -- line 671 ---------------------------------------- . } else { . Ok(()) . } . } . . /// Out-of-line slow path for `reserve` and `try_reserve`. . #[cold] . #[inline(never)] 1,283,497 ( 0.02%) fn reserve_rehash( . &mut self, . additional: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, . ) -> Result<(), TryReserveError> { . unsafe { . self.table.reserve_rehash_inner( . additional, -- line 687 ---------------------------------------- -- line 690 ---------------------------------------- . TableLayout::new::(), . if mem::needs_drop::() { . Some(mem::transmute(ptr::drop_in_place:: as unsafe fn(*mut T))) . } else { . None . }, . ) . } 958,040 ( 0.02%) } . . /// Allocates a new table of a different size and moves the contents of the . /// current table into it. . fn resize( . &mut self, . capacity: usize, . hasher: impl Fn(&T) -> u64, . fallibility: Fallibility, -- line 706 ---------------------------------------- -- line 714 ---------------------------------------- . ) . } . } . . /// Inserts a new element into the table, and returns its raw bucket. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 5,935,538 ( 0.10%) pub fn insert(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> Bucket { . unsafe { . let mut index = self.table.find_insert_slot(hash); . . // We can avoid growing the table once we have reached our load . // factor if we are replacing a tombstone. This works since the . // number of EMPTY slots does not change in this case. 12,547 ( 0.00%) let old_ctrl = *self.table.ctrl(index); 5,127,500 ( 0.09%) if unlikely(self.table.growth_left == 0 && special_is_empty(old_ctrl)) { . self.reserve(1, hasher); . index = self.table.find_insert_slot(hash); . } . . self.table.record_item_insert_at(index, old_ctrl, hash); . . let bucket = self.bucket(index); 2 ( 0.00%) bucket.write(value); . bucket . } 4,371,735 ( 0.07%) } . . /// Attempts to insert a new element without growing the table and return its raw bucket. . /// . /// Returns an `Err` containing the given element if inserting it would require growing the . /// table. . /// . /// This does not check if the given element already exists in the table. . #[cfg(feature = "raw")] -- line 749 ---------------------------------------- -- line 760 ---------------------------------------- . } . } . } . . /// Inserts a new element into the table, and returns a mutable reference to it. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] 2,908,856 ( 0.05%) pub fn insert_entry(&mut self, hash: u64, value: T, hasher: impl Fn(&T) -> u64) -> &mut T { 318 ( 0.00%) unsafe { self.insert(hash, value, hasher).as_mut() } 2,181,642 ( 0.04%) } . . /// Inserts a new element into the table, without growing the table. . /// . /// There must be enough space in the table to insert the new element. . /// . /// This does not check if the given element already exists in the table. . #[cfg_attr(feature = "inline-more", inline)] . #[cfg(any(feature = "raw", feature = "rustc-internal-api"))] 18,714 ( 0.00%) pub unsafe fn insert_no_grow(&mut self, hash: u64, value: T) -> Bucket { 776,269 ( 0.01%) let (index, old_ctrl) = self.table.prepare_insert_slot(hash); 40,015 ( 0.00%) let bucket = self.table.bucket(index); . . // If we are replacing a DELETED entry then we don't need to update . // the load counter. 1,709,423 ( 0.03%) self.table.growth_left -= special_is_empty(old_ctrl) as usize; . . bucket.write(value); 1,392,007 ( 0.02%) self.table.items += 1; . bucket 37,343 ( 0.00%) } . . /// Temporary removes a bucket, applying the given function to the removed . /// element and optionally put back the returned value in the same bucket. . /// . /// Returns `true` if the bucket still contains an element . /// . /// This does not check if the given bucket is actually occupied. . #[cfg_attr(feature = "inline-more", inline)] -- line 798 ---------------------------------------- -- line 813 ---------------------------------------- . true . } else { . false . } . } . . /// Searches for an element in the table. . #[inline] 13,425 ( 0.00%) pub fn find(&self, hash: u64, mut eq: impl FnMut(&T) -> bool) -> Option> { 19,536 ( 0.00%) let result = self.table.find_inner(hash, &mut |index| unsafe { 45,636 ( 0.00%) eq(self.bucket(index).as_ref()) 8,533 ( 0.00%) }); . . // Avoid `Option::map` because it bloats LLVM IR. . match result { 2,926 ( 0.00%) Some(index) => Some(unsafe { self.bucket(index) }), . None => None, . } 13,912 ( 0.00%) } . . /// Gets a reference to an element in the table. . #[inline] . pub fn get(&self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&T> { . // Avoid `Option::map` because it bloats LLVM IR. 58,382 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_ref() }), . None => None, . } . } . . /// Gets a mutable reference to an element in the table. . #[inline] . pub fn get_mut(&mut self, hash: u64, eq: impl FnMut(&T) -> bool) -> Option<&mut T> { . // Avoid `Option::map` because it bloats LLVM IR. 6,680 ( 0.00%) match self.find(hash, eq) { . Some(bucket) => Some(unsafe { bucket.as_mut() }), . None => None, . } . } . . /// Attempts to get mutable references to `N` entries in the table at once. . /// . /// Returns an array of length `N` with the results of each query. -- line 855 ---------------------------------------- -- line 920 ---------------------------------------- . #[inline] . pub fn len(&self) -> usize { . self.table.items . } . . /// Returns `true` if the table contains no elements. . #[inline] . pub fn is_empty(&self) -> bool { 2,855,928 ( 0.05%) self.len() == 0 . } . . /// Returns the number of buckets in the table. . #[inline] . pub fn buckets(&self) -> usize { . self.table.bucket_mask + 1 . } . . /// Returns an iterator over every element in the table. It is up to . /// the caller to ensure that the `RawTable` outlives the `RawIter`. . /// Because we cannot make the `next` method unsafe on the `RawIter` . /// struct, we have to make the `iter` method unsafe. . #[inline] . pub unsafe fn iter(&self) -> RawIter { 74 ( 0.00%) let data = Bucket::from_base_index(self.data_end(), 0); . RawIter { . iter: RawIterRange::new(self.table.ctrl.as_ptr(), data, self.table.buckets()), 351,843 ( 0.01%) items: self.table.items, . } . } . . /// Returns an iterator over occupied buckets that could match a given hash. . /// . /// `RawTable` only stores 7 bits of the hash value, so this iterator may . /// return items that have a hash value different than the one provided. You . /// should always validate the returned values before using them. -- line 954 ---------------------------------------- -- line 995 ---------------------------------------- . /// Iteration starts at the provided iterator's current location. . /// . /// It is up to the caller to ensure that the iterator is valid for this . /// `RawTable` and covers all items that remain in the table. . pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { . debug_assert_eq!(iter.len(), self.len()); . . let alloc = self.table.alloc.clone(); 15,556 ( 0.00%) let allocation = self.into_allocation(); 11,667 ( 0.00%) RawIntoIter { 19,445 ( 0.00%) iter, . allocation, . marker: PhantomData, . alloc, . } . } . . /// Converts the table into a raw allocation. The contents of the table . /// should be dropped using a `RawIter` before freeing the allocation. . #[cfg_attr(feature = "inline-more", inline)] . pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { 7,293 ( 0.00%) let alloc = if self.table.is_empty_singleton() { . None . } else { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match calculate_layout::(self.table.buckets()) { . Some(lco) => lco, . None => unsafe { hint::unreachable_unchecked() }, . }; . Some(( 1,119 ( 0.00%) unsafe { NonNull::new_unchecked(self.table.ctrl.as_ptr().sub(ctrl_offset)) }, . layout, . )) . }; . mem::forget(self); . alloc . } . } . -- line 1033 ---------------------------------------- -- line 1042 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . impl RawTableInner { . #[inline] . const fn new_in(alloc: A) -> Self { 2,049,042 ( 0.03%) Self { . // Be careful to cast the entire slice to a raw pointer. . ctrl: unsafe { NonNull::new_unchecked(Group::static_empty() as *const _ as *mut u8) }, . bucket_mask: 0, . items: 0, . growth_left: 0, . alloc, . } . } . } . . impl RawTableInner { . #[cfg_attr(feature = "inline-more", inline)] 1,259,056 ( 0.02%) unsafe fn new_uninitialized( . alloc: A, . table_layout: TableLayout, . buckets: usize, . fallibility: Fallibility, . ) -> Result { . debug_assert!(buckets.is_power_of_two()); . . // Avoid `Option::ok_or_else` because it bloats LLVM IR. -- line 1071 ---------------------------------------- -- line 1078 ---------------------------------------- . // exceed `isize::MAX`. We can skip this check on 64-bit systems since . // such allocations will never succeed anyways. . // . // This mirrors what Vec does in the standard library. . if mem::size_of::() < 8 && layout.size() > isize::MAX as usize { . return Err(fallibility.capacity_overflow()); . } . 281,142 ( 0.00%) let ptr: NonNull = match do_alloc(&alloc, layout) { . Ok(block) => block.cast(), . Err(_) => return Err(fallibility.alloc_err(layout)), . }; . . let ctrl = NonNull::new_unchecked(ptr.as_ptr().add(ctrl_offset)); 691,032 ( 0.01%) Ok(Self { . ctrl, 276,756 ( 0.00%) bucket_mask: buckets - 1, . items: 0, . growth_left: bucket_mask_to_capacity(buckets - 1), . alloc, . }) 916,152 ( 0.02%) } . . #[inline] 78,276 ( 0.00%) fn fallible_with_capacity( . alloc: A, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result { 20,154 ( 0.00%) if capacity == 0 { 12,920 ( 0.00%) Ok(Self::new_in(alloc)) . } else { . unsafe { . let buckets = . capacity_to_buckets(capacity).ok_or_else(|| fallibility.capacity_overflow())?; . 570,623 ( 0.01%) let result = Self::new_uninitialized(alloc, table_layout, buckets, fallibility)?; . result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); . 44,026 ( 0.00%) Ok(result) . } . } 78,276 ( 0.00%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element and sets the hash for that slot. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] 286,654 ( 0.00%) unsafe fn prepare_insert_slot(&self, hash: u64) -> (usize, u8) { . let index = self.find_insert_slot(hash); 286,654 ( 0.00%) let old_ctrl = *self.ctrl(index); . self.set_ctrl_h2(index, hash); . (index, old_ctrl) 573,308 ( 0.01%) } . . /// Searches for an empty or deleted bucket which is suitable for inserting . /// a new element. . /// . /// There must be at least 1 empty bucket in the table. . #[inline] . fn find_insert_slot(&self, hash: u64) -> usize { . let mut probe_seq = self.probe_seq(hash); . loop { . unsafe { . let group = Group::load(self.ctrl(probe_seq.pos)); 3,443,642 ( 0.06%) if let Some(bit) = group.match_empty_or_deleted().lowest_set_bit() { 10,071,652 ( 0.17%) let result = (probe_seq.pos + bit) & self.bucket_mask; . . // In tables smaller than the group width, trailing control . // bytes outside the range of the table are filled with . // EMPTY entries. These will unfortunately trigger a . // match, but once masked may point to a full bucket that . // is already occupied. We detect this situation here and . // perform a second scan starting at the beginning of the . // table. This second scan is guaranteed to find an empty . // slot (due to the load factor) before hitting the trailing . // control bytes (containing EMPTY). 4,636,280 ( 0.08%) if unlikely(is_full(*self.ctrl(result))) { . debug_assert!(self.bucket_mask < Group::WIDTH); . debug_assert_ne!(probe_seq.pos, 0); . return Group::load_aligned(self.ctrl(0)) . .match_empty_or_deleted() . .lowest_set_bit_nonzero(); . } . . return result; -- line 1165 ---------------------------------------- -- line 1171 ---------------------------------------- . . /// Searches for an element in the table. This uses dynamic dispatch to reduce the amount of . /// code generated, but it is eliminated by LLVM optimizations. . #[inline] . fn find_inner(&self, hash: u64, eq: &mut dyn FnMut(usize) -> bool) -> Option { . let h2_hash = h2(hash); . let mut probe_seq = self.probe_seq(hash); . 217,540 ( 0.00%) loop { . let group = unsafe { Group::load(self.ctrl(probe_seq.pos)) }; . 10,715,696 ( 0.18%) for bit in group.match_byte(h2_hash) { 19,954,301 ( 0.33%) let index = (probe_seq.pos + bit) & self.bucket_mask; . 12,913,760 ( 0.21%) if likely(eq(index)) { . return Some(index); . } . } . 3,557,918 ( 0.06%) if likely(group.match_empty().any_bit_set()) { . return None; . } . . probe_seq.move_next(self.bucket_mask); . } . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_rehash_in_place(&mut self) { . // Bulk convert all full control bytes to DELETED, and all DELETED . // control bytes to EMPTY. This effectively frees up all buckets . // containing a DELETED entry. 9 ( 0.00%) for i in (0..self.buckets()).step_by(Group::WIDTH) { . let group = Group::load_aligned(self.ctrl(i)); . let group = group.convert_special_to_empty_and_full_to_deleted(); . group.store_aligned(self.ctrl(i)); . } . . // Fix up the trailing control bytes. See the comments in set_ctrl . // for the handling of tables smaller than the group width. 9 ( 0.00%) if self.buckets() < Group::WIDTH { . self.ctrl(0) . .copy_to(self.ctrl(Group::WIDTH), self.buckets()); . } else { . self.ctrl(0) . .copy_to(self.ctrl(self.buckets()), Group::WIDTH); . } . } . -- line 1220 ---------------------------------------- -- line 1225 ---------------------------------------- . Bucket::from_base_index(self.data_end(), index) . } . . #[inline] . unsafe fn bucket_ptr(&self, index: usize, size_of: usize) -> *mut u8 { . debug_assert_ne!(self.bucket_mask, 0); . debug_assert!(index < self.buckets()); . let base: *mut u8 = self.data_end().as_ptr(); 13,425,704 ( 0.22%) base.sub((index + 1) * size_of) . } . . #[inline] . unsafe fn data_end(&self) -> NonNull { . NonNull::new_unchecked(self.ctrl.as_ptr().cast()) . } . . /// Returns an iterator-like object for a probe sequence on the table. . /// . /// This iterator never terminates, but is guaranteed to visit each bucket . /// group exactly once. The loop using `probe_seq` must terminate upon . /// reaching a group containing an empty bucket. . #[inline] . fn probe_seq(&self, hash: u64) -> ProbeSeq { . ProbeSeq { 57,156,939 ( 0.95%) pos: h1(hash) & self.bucket_mask, . stride: 0, . } . } . . /// Returns the index of a bucket for which a value must be inserted if there is enough rooom . /// in the table, otherwise returns error . #[cfg(feature = "raw")] . #[inline] -- line 1257 ---------------------------------------- -- line 1263 ---------------------------------------- . } else { . self.record_item_insert_at(index, old_ctrl, hash); . Ok(index) . } . } . . #[inline] . unsafe fn record_item_insert_at(&mut self, index: usize, old_ctrl: u8, hash: u64) { 6,402,565 ( 0.11%) self.growth_left -= special_is_empty(old_ctrl) as usize; . self.set_ctrl_h2(index, hash); 5,122,052 ( 0.08%) self.items += 1; . } . . #[inline] . fn is_in_same_group(&self, i: usize, new_i: usize, hash: u64) -> bool { . let probe_seq_pos = self.probe_seq(hash).pos; . let probe_index = . |pos: usize| (pos.wrapping_sub(probe_seq_pos) & self.bucket_mask) / Group::WIDTH; 84 ( 0.00%) probe_index(i) == probe_index(new_i) . } . . /// Sets a control byte to the hash, and possibly also the replicated control byte at . /// the end of the array. . #[inline] . unsafe fn set_ctrl_h2(&self, index: usize, hash: u64) { . self.set_ctrl(index, h2(hash)); . } -- line 1289 ---------------------------------------- -- line 1312 ---------------------------------------- . // replicate the buckets at the end of the trailing group. For example . // with 2 buckets and a group size of 4, the control bytes will look . // like this: . // . // Real | Replicated . // --------------------------------------------- . // | [A] | [B] | [EMPTY] | [EMPTY] | [A] | [B] | . // --------------------------------------------- 9,792,087 ( 0.16%) let index2 = ((index.wrapping_sub(Group::WIDTH)) & self.bucket_mask) + Group::WIDTH; . 3,264,029 ( 0.05%) *self.ctrl(index) = ctrl; 3,265,049 ( 0.05%) *self.ctrl(index2) = ctrl; . } . . /// Returns a pointer to a control byte. . #[inline] . unsafe fn ctrl(&self, index: usize) -> *mut u8 { . debug_assert!(index < self.num_ctrl_bytes()); . self.ctrl.as_ptr().add(index) . } . . #[inline] . fn buckets(&self) -> usize { 964,215 ( 0.02%) self.bucket_mask + 1 . } . . #[inline] . fn num_ctrl_bytes(&self) -> usize { 807,811 ( 0.01%) self.bucket_mask + 1 + Group::WIDTH . } . . #[inline] . fn is_empty_singleton(&self) -> bool { 3,598,104 ( 0.06%) self.bucket_mask == 0 . } . . #[allow(clippy::mut_mut)] . #[inline] . unsafe fn prepare_resize( . &self, . table_layout: TableLayout, . capacity: usize, . fallibility: Fallibility, . ) -> Result, TryReserveError> { . debug_assert!(self.items <= capacity); . . // Allocate and initialize the new table. 8,664 ( 0.00%) let mut new_table = RawTableInner::fallible_with_capacity( . self.alloc.clone(), . table_layout, . capacity, . fallibility, . )?; 359,612 ( 0.01%) new_table.growth_left -= self.items; . new_table.items = self.items; . . // The hash function may panic, in which case we simply free the new . // table without dropping any elements that may have been copied into . // it. . // . // This guard is also used to free the old table on success, see . // the comment at the bottom of this function. . Ok(guard(new_table, move |self_| { 157,048 ( 0.00%) if !self_.is_empty_singleton() { . self_.free_buckets(table_layout); . } . })) . } . . /// Reserves or rehashes to make room for `additional` more elements. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1383 ---------------------------------------- -- line 1388 ---------------------------------------- . &mut self, . additional: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . drop: Option, . ) -> Result<(), TryReserveError> { . // Avoid `Option::ok_or_else` because it bloats LLVM IR. 314,106 ( 0.01%) let new_items = match self.items.checked_add(additional) { . Some(new_items) => new_items, . None => return Err(fallibility.capacity_overflow()), . }; 314,102 ( 0.01%) let full_capacity = bucket_mask_to_capacity(self.bucket_mask); 674,324 ( 0.01%) if new_items <= full_capacity / 2 { . // Rehash in-place without re-allocating if we have plenty of spare . // capacity that is locked up due to DELETED entries. . self.rehash_in_place(hasher, layout.size, drop); 6 ( 0.00%) Ok(()) . } else { . // Otherwise, conservatively resize to at least the next size up . // to avoid churning deletes into frequent rehashes. . self.resize_inner( 157,048 ( 0.00%) usize::max(new_items, full_capacity + 1), . hasher, . fallibility, . layout, . ) . } . } . . /// Allocates a new table of a different size and moves the contents of the -- line 1418 ---------------------------------------- -- line 1424 ---------------------------------------- . #[inline(always)] . unsafe fn resize_inner( . &mut self, . capacity: usize, . hasher: &dyn Fn(&mut Self, usize) -> u64, . fallibility: Fallibility, . layout: TableLayout, . ) -> Result<(), TryReserveError> { 20,327 ( 0.00%) let mut new_table = self.prepare_resize(layout, capacity, fallibility)?; . . // Copy all elements to the new table. . for i in 0..self.buckets() { 2,048,017 ( 0.03%) if !is_full(*self.ctrl(i)) { . continue; . } . . // This may panic. . let hash = hasher(self, i); . . // We can use a simpler version of insert() here since: . // - there are no DELETED entries. -- line 1444 ---------------------------------------- -- line 1454 ---------------------------------------- . } . . // We successfully copied all elements without panicking. Now replace . // self with the new table. The old table will have its memory freed but . // the items will not be dropped (since they have been moved into the . // new table). . mem::swap(self, &mut new_table); . 157,048 ( 0.00%) Ok(()) . } . . /// Rehashes the contents of the table in place (i.e. without changing the . /// allocation). . /// . /// If `hasher` panics then some the table's contents may be lost. . /// . /// This uses dynamic dispatch to reduce the amount of -- line 1470 ---------------------------------------- -- line 1496 ---------------------------------------- . } . self_.growth_left = bucket_mask_to_capacity(self_.bucket_mask) - self_.items; . }); . . // At this point, DELETED elements are elements that we haven't . // rehashed yet. Find them and re-insert them at their ideal . // position. . 'outer: for i in 0..guard.buckets() { 192 ( 0.00%) if *guard.ctrl(i) != DELETED { . continue; . } . . let i_p = guard.bucket_ptr(i, size_of); . . 'inner: loop { . // Hash the current item . let hash = hasher(*guard, i); -- line 1512 ---------------------------------------- -- line 1515 ---------------------------------------- . let new_i = guard.find_insert_slot(hash); . let new_i_p = guard.bucket_ptr(new_i, size_of); . . // Probing works by scanning through all of the control . // bytes in groups, which may not be aligned to the group . // size. If both the new and old position fall within the . // same unaligned group, then there is no benefit in moving . // it and we can just continue to the next item. 28 ( 0.00%) if likely(guard.is_in_same_group(i, new_i, hash)) { . guard.set_ctrl_h2(i, hash); . continue 'outer; . } . . // We are moving the current item to a new position. Write . // our H2 to the control byte of the new position. . let prev_ctrl = guard.replace_ctrl_h2(new_i, hash); . if prev_ctrl == EMPTY { -- line 1531 ---------------------------------------- -- line 1541 ---------------------------------------- . // swapped into the old slot. . debug_assert_eq!(prev_ctrl, DELETED); . ptr::swap_nonoverlapping(i_p, new_i_p, size_of); . continue 'inner; . } . } . } . 9 ( 0.00%) guard.growth_left = bucket_mask_to_capacity(guard.bucket_mask) - guard.items; . . mem::forget(guard); . } . . #[inline] . unsafe fn free_buckets(&mut self, table_layout: TableLayout) { . // Avoid `Option::unwrap_or_else` because it bloats LLVM IR. . let (layout, ctrl_offset) = match table_layout.calculate_layout_for(self.buckets()) { . Some(lco) => lco, . None => hint::unreachable_unchecked(), . }; . self.alloc.deallocate( 115,315 ( 0.00%) NonNull::new_unchecked(self.ctrl.as_ptr().sub(ctrl_offset)), . layout, . ); . } . . /// Marks all table buckets as empty without dropping their contents. . #[inline] . fn clear_no_drop(&mut self) { 36,059 ( 0.00%) if !self.is_empty_singleton() { . unsafe { . self.ctrl(0).write_bytes(EMPTY, self.num_ctrl_bytes()); . } . } 38,743 ( 0.00%) self.items = 0; 36,059 ( 0.00%) self.growth_left = bucket_mask_to_capacity(self.bucket_mask); . } . . #[inline] . unsafe fn erase(&mut self, index: usize) { . debug_assert!(is_full(*self.ctrl(index))); 243,750 ( 0.00%) let index_before = index.wrapping_sub(Group::WIDTH) & self.bucket_mask; . let empty_before = Group::load(self.ctrl(index_before)).match_empty(); . let empty_after = Group::load(self.ctrl(index)).match_empty(); . . // If we are inside a continuous block of Group::WIDTH full or deleted . // cells then a probe window may have seen a full block when trying to . // insert. We therefore need to keep that block non-empty so that . // lookups will continue searching to the next probe window. . // . // Note that in this context `leading_zeros` refers to the bytes at the . // end of a group, while `trailing_zeros` refers to the bytes at the . // beginning of a group. 975,000 ( 0.02%) let ctrl = if empty_before.leading_zeros() + empty_after.trailing_zeros() >= Group::WIDTH { . DELETED . } else { 1,147,900 ( 0.02%) self.growth_left += 1; . EMPTY . }; . self.set_ctrl(index, ctrl); 975,000 ( 0.02%) self.items -= 1; . } . } . . impl Clone for RawTable { 2,912 ( 0.00%) fn clone(&self) -> Self { 461 ( 0.00%) if self.table.is_empty_singleton() { . Self::new_in(self.table.alloc.clone()) . } else { . unsafe { . let mut new_table = ManuallyDrop::new( . // Avoid `Result::ok_or_else` because it bloats LLVM IR. . match Self::new_uninitialized( . self.table.alloc.clone(), . self.table.buckets(), -- line 1615 ---------------------------------------- -- line 1624 ---------------------------------------- . // We need to free the memory allocated for the new table. . new_table.free_buckets(); . }); . . // Return the newly created table. . ManuallyDrop::into_inner(new_table) . } . } 3,276 ( 0.00%) } . . fn clone_from(&mut self, source: &Self) { . if source.table.is_empty_singleton() { . *self = Self::new_in(self.table.alloc.clone()); . } else { . unsafe { . // First, drop all our elements without clearing the control bytes. . self.drop_elements(); -- line 1640 ---------------------------------------- -- line 1687 ---------------------------------------- . .table . .ctrl(0) . .copy_to_nonoverlapping(self.table.ctrl(0), self.table.num_ctrl_bytes()); . source . .data_start() . .copy_to_nonoverlapping(self.data_start(), self.table.buckets()); . . self.table.items = source.table.items; 166 ( 0.00%) self.table.growth_left = source.table.growth_left; . } . } . . impl RawTable { . /// Common code for clone and clone_from. Assumes `self.buckets() == source.buckets()`. . #[cfg_attr(feature = "inline-more", inline)] . unsafe fn clone_from_impl(&mut self, source: &Self, mut on_panic: impl FnMut(&mut Self)) { . // Copy the control bytes unchanged. We do this in a single pass -- line 1703 ---------------------------------------- -- line 1790 ---------------------------------------- . fn default() -> Self { . Self::new_in(Default::default()) . } . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] 1,425,028 ( 0.02%) fn drop(&mut self) { 1,547,839 ( 0.03%) if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); . self.free_buckets(); . } . } 1,549,182 ( 0.03%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawTable { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . if !self.table.is_empty_singleton() { . unsafe { . self.drop_elements(); -- line 1813 ---------------------------------------- -- line 1817 ---------------------------------------- . } . } . . impl IntoIterator for RawTable { . type Item = T; . type IntoIter = RawIntoIter; . . #[cfg_attr(feature = "inline-more", inline)] 15,556 ( 0.00%) fn into_iter(self) -> RawIntoIter { . unsafe { . let iter = self.iter(); . self.into_iter_from(iter) . } 19,445 ( 0.00%) } . } . . /// Iterator over a sub-range of a table. Unlike `RawIter` this iterator does . /// not track an item count. . pub(crate) struct RawIterRange { . // Mask of full buckets in the current group. Bits are cleared from this . // mask as each element is processed. . current_group: BitMask, -- line 1838 ---------------------------------------- -- line 1934 ---------------------------------------- . . impl Iterator for RawIterRange { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option> { . unsafe { . loop { 1,200,934 ( 0.02%) if let Some(index) = self.current_group.lowest_set_bit() { 124,494 ( 0.00%) self.current_group = self.current_group.remove_lowest_bit(); 210,557 ( 0.00%) return Some(self.data.next_n(index)); . } . 1,673,687 ( 0.03%) if self.next_ctrl >= self.end { . return None; . } . . // We might read past self.end up to the next group boundary, . // but this is fine because it only occurs on tables smaller . // than the group size where the trailing control bytes are all . // EMPTY. On larger tables self.end is guaranteed to be aligned . // to the group size (since tables are power-of-two sized). 14,413 ( 0.00%) self.current_group = Group::load_aligned(self.next_ctrl).match_full(); 21,474 ( 0.00%) self.data = self.data.next_n(Group::WIDTH); 20,346 ( 0.00%) self.next_ctrl = self.next_ctrl.add(Group::WIDTH); . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . // We don't have an item count, so just guess based on the range size. . ( -- line 1966 ---------------------------------------- -- line 2102 ---------------------------------------- . } . } else { . // We must have already iterated past the removed item. . } . } . } . . unsafe fn drop_elements(&mut self) { 2,225 ( 0.00%) if mem::needs_drop::() && self.len() != 0 { . for item in self { . item.drop(); . } . } . } . } . . impl Clone for RawIter { -- line 2118 ---------------------------------------- -- line 2124 ---------------------------------------- . } . } . } . . impl Iterator for RawIter { . type Item = Bucket; . . #[cfg_attr(feature = "inline-more", inline)] 155,453 ( 0.00%) fn next(&mut self) -> Option> { 299,474 ( 0.00%) if let Some(b) = self.iter.next() { 5,719,675 ( 0.09%) self.items -= 1; . Some(b) . } else { . // We don't check against items == 0 here to allow the . // compiler to optimize away the item count entirely if the . // iterator length is never queried. . debug_assert_eq!(self.items, 0); . None . } 310,906 ( 0.01%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (self.items, Some(self.items)) . } . } . . impl ExactSizeIterator for RawIter {} -- line 2151 ---------------------------------------- -- line 2177 ---------------------------------------- . T: Sync, . A: Sync, . { . } . . #[cfg(feature = "nightly")] . unsafe impl<#[may_dangle] T, A: Allocator + Clone> Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] 4,830 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); . . // Free the table 33,893 ( 0.00%) if let Some((ptr, layout)) = self.allocation { . self.alloc.deallocate(ptr, layout); . } . } 590 ( 0.00%) } . } . #[cfg(not(feature = "nightly"))] . impl Drop for RawIntoIter { . #[cfg_attr(feature = "inline-more", inline)] . fn drop(&mut self) { . unsafe { . // Drop all remaining elements . self.iter.drop_elements(); -- line 2203 ---------------------------------------- -- line 2209 ---------------------------------------- . } . } . } . . impl Iterator for RawIntoIter { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] 1,801 ( 0.00%) fn next(&mut self) -> Option { 666 ( 0.00%) unsafe { Some(self.iter.next()?.read()) } 3,602 ( 0.00%) } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 3 ( 0.00%) self.iter.size_hint() . } . } . . impl ExactSizeIterator for RawIntoIter {} . impl FusedIterator for RawIntoIter {} . . /// Iterator which consumes elements without freeing the table storage. . pub struct RawDrain<'a, T, A: Allocator + Clone = Global> { -- line 2231 ---------------------------------------- -- line 2259 ---------------------------------------- . where . T: Sync, . A: Sync, . { . } . . impl Drop for RawDrain<'_, T, A> { . #[cfg_attr(feature = "inline-more", inline)] 920 ( 0.00%) fn drop(&mut self) { . unsafe { . // Drop all remaining elements. Note that this may panic. . self.iter.drop_elements(); . . // Reset the contents of the table now that all elements have been . // dropped. . self.table.clear_no_drop(); . . // Move the now empty table back to its original location. 115 ( 0.00%) self.orig_table . .as_ptr() . .copy_from_nonoverlapping(&*self.table, 1); . } 920 ( 0.00%) } . } . . impl Iterator for RawDrain<'_, T, A> { . type Item = T; . . #[cfg_attr(feature = "inline-more", inline)] . fn next(&mut self) -> Option { . unsafe { -- line 2289 ---------------------------------------- 45,938,596 ( 0.76%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs -------------------------------------------------------------------------------- Ir -- line 118 ---------------------------------------- . stability: InternedSet<'tcx, attr::Stability>, . . /// `#[rustc_const_stable]` and `#[rustc_const_unstable]` attributes . const_stability: InternedSet<'tcx, attr::ConstStability>, . } . . impl<'tcx> CtxtInterners<'tcx> { . fn new(arena: &'tcx WorkerLocal>) -> CtxtInterners<'tcx> { 17 ( 0.00%) CtxtInterners { . arena, . type_: Default::default(), . type_list: Default::default(), . substs: Default::default(), . region: Default::default(), . poly_existential_predicates: Default::default(), . canonical_var_infos: Default::default(), . predicate: Default::default(), -- line 134 ---------------------------------------- -- line 143 ---------------------------------------- . stability: Default::default(), . const_stability: Default::default(), . } . } . . /// Interns a type. . #[allow(rustc::usage_of_ty_tykind)] . #[inline(never)] 6,361,512 ( 0.11%) fn intern_ty(&self, kind: TyKind<'tcx>) -> Ty<'tcx> { . self.type_ 3,975,945 ( 0.07%) .intern(kind, |kind| { 252,594 ( 0.00%) let flags = super::flags::FlagComputation::for_kind(&kind); . . let ty_struct = TyS { 505,188 ( 0.01%) kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(ty_struct)) . }) . .0 7,156,701 ( 0.12%) } . . #[inline(never)] 2,105,088 ( 0.03%) fn intern_predicate( . &self, . kind: Binder<'tcx, PredicateKind<'tcx>>, . ) -> &'tcx PredicateInner<'tcx> { . self.predicate 1,841,952 ( 0.03%) .intern(kind, |kind| { 550,596 ( 0.01%) let flags = super::flags::FlagComputation::for_predicate(kind); . . let predicate_struct = PredicateInner { . kind, . flags: flags.flags, . outer_exclusive_binder: flags.outer_exclusive_binder, . }; . . Interned(self.arena.alloc(predicate_struct)) . }) . .0 2,368,224 ( 0.04%) } . } . . pub struct CommonTypes<'tcx> { . pub unit: Ty<'tcx>, . pub bool: Ty<'tcx>, . pub char: Ty<'tcx>, . pub isize: Ty<'tcx>, . pub i8: Ty<'tcx>, -- line 193 ---------------------------------------- -- line 237 ---------------------------------------- . /// safely used as a key in the maps of a TypeckResults. For that to be . /// the case, the HirId must have the same `owner` as all the other IDs in . /// this table (signified by `hir_owner`). Otherwise the HirId . /// would be in a different frame of reference and using its `local_id` . /// would result in lookup errors, or worse, in silently wrong data being . /// stored/returned. . #[inline] . fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { 1,093,525 ( 0.02%) if hir_id.owner != hir_owner { . invalid_hir_id_for_typeck_results(hir_owner, hir_id); . } . } . . #[cold] . #[inline(never)] . fn invalid_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) { . ty::tls::with(|tcx| { -- line 253 ---------------------------------------- -- line 261 ---------------------------------------- . } . . impl<'a, V> LocalTableInContext<'a, V> { . pub fn contains_key(&self, id: hir::HirId) -> bool { . validate_hir_id_for_typeck_results(self.hir_owner, id); . self.data.contains_key(&id.local_id) . } . 848,273 ( 0.01%) pub fn get(&self, id: hir::HirId) -> Option<&V> { 168,366 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); 168,366 ( 0.00%) self.data.get(&id.local_id) 680,698 ( 0.01%) } . . pub fn iter(&self) -> hash_map::Iter<'_, hir::ItemLocalId, V> { . self.data.iter() . } . } . . impl<'a, V> ::std::ops::Index for LocalTableInContext<'a, V> { . type Output = V; . 4,746 ( 0.00%) fn index(&self, key: hir::HirId) -> &V { . self.get(key).expect("LocalTableInContext: key not found") 3,164 ( 0.00%) } . } . . pub struct LocalTableInContextMut<'a, V> { . hir_owner: LocalDefId, . data: &'a mut ItemLocalMap, . } . . impl<'a, V> LocalTableInContextMut<'a, V> { -- line 292 ---------------------------------------- -- line 507 ---------------------------------------- . pub treat_byte_string_as_slice: ItemLocalSet, . . /// Contains the data for evaluating the effect of feature `capture_disjoint_fields` . /// on closure size. . pub closure_size_eval: FxHashMap>, . } . . impl<'tcx> TypeckResults<'tcx> { 17,688 ( 0.00%) pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> { 209,308 ( 0.00%) TypeckResults { . hir_owner, . type_dependent_defs: Default::default(), . field_indices: Default::default(), . user_provided_types: Default::default(), . user_provided_sigs: Default::default(), . node_types: Default::default(), . node_substs: Default::default(), . adjustments: Default::default(), -- line 524 ---------------------------------------- -- line 532 ---------------------------------------- . tainted_by_errors: None, . concrete_opaque_types: Default::default(), . closure_min_captures: Default::default(), . closure_fake_reads: Default::default(), . generator_interior_types: ty::Binder::dummy(Default::default()), . treat_byte_string_as_slice: Default::default(), . closure_size_eval: Default::default(), . } 17,688 ( 0.00%) } . . /// Returns the final resolution of a `QPath` in an `Expr` or `Pat` node. 202,452 ( 0.00%) pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res { 134,968 ( 0.00%) match *qpath { 261,414 ( 0.00%) hir::QPath::Resolved(_, ref path) => path.res, . hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self . .type_dependent_def(id) 143,490 ( 0.00%) .map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)), . } 337,420 ( 0.01%) } . 1,674 ( 0.00%) pub fn type_dependent_defs( . &self, . ) -> LocalTableInContext<'_, Result<(DefKind, DefId), ErrorReported>> { 10,518 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.type_dependent_defs } 1,674 ( 0.00%) } . 28,215 ( 0.00%) pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> { 57,916 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok()) 56,430 ( 0.00%) } . 23,144 ( 0.00%) pub fn type_dependent_def_id(&self, id: HirId) -> Option { . self.type_dependent_def(id).map(|(_, def_id)| def_id) 23,144 ( 0.00%) } . 69,806 ( 0.00%) pub fn type_dependent_defs_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Result<(DefKind, DefId), ErrorReported>> { 69,806 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.type_dependent_defs } 69,806 ( 0.00%) } . . pub fn field_indices(&self) -> LocalTableInContext<'_, usize> { 25,636 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.field_indices } 4,787 ( 0.00%) } . . pub fn field_indices_mut(&mut self) -> LocalTableInContextMut<'_, usize> { 31,086 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.field_indices } 15,543 ( 0.00%) } . . pub fn user_provided_types(&self) -> LocalTableInContext<'_, CanonicalUserType<'tcx>> { 38,014 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.user_provided_types } 19,007 ( 0.00%) } . . pub fn user_provided_types_mut( . &mut self, . ) -> LocalTableInContextMut<'_, CanonicalUserType<'tcx>> { 7,024 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.user_provided_types } 3,512 ( 0.00%) } . . pub fn node_types(&self) -> LocalTableInContext<'_, Ty<'tcx>> { 137,130 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.node_types } 68,565 ( 0.00%) } . . pub fn node_types_mut(&mut self) -> LocalTableInContextMut<'_, Ty<'tcx>> { 239,580 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_types } 119,790 ( 0.00%) } . 1,186,200 ( 0.02%) pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> { 197,700 ( 0.00%) self.node_type_opt(id).unwrap_or_else(|| { . bug!("node_type: no type for node `{}`", tls::with(|tcx| tcx.hir().node_to_string(id))) . }) 988,500 ( 0.02%) } . 247,435 ( 0.00%) pub fn node_type_opt(&self, id: hir::HirId) -> Option> { 247,435 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_types.get(&id.local_id).cloned() 494,870 ( 0.01%) } . . pub fn node_substs_mut(&mut self) -> LocalTableInContextMut<'_, SubstsRef<'tcx>> { 27,066 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.node_substs } 13,533 ( 0.00%) } . 80,955 ( 0.00%) pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> { 80,955 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty()) 161,910 ( 0.00%) } . 52,293 ( 0.00%) pub fn node_substs_opt(&self, id: hir::HirId) -> Option> { 52,293 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, id); . self.node_substs.get(&id.local_id).cloned() 104,586 ( 0.00%) } . . // Returns the type of a pattern as a monotype. Like @expr_ty, this function . // doesn't provide type parameter substitutions. . pub fn pat_ty(&self, pat: &hir::Pat<'_>) -> Ty<'tcx> { 41,032 ( 0.00%) self.node_type(pat.hir_id) . } . . // Returns the type of an expression as a monotype. . // . // NB (1): This is the PRE-ADJUSTMENT TYPE for the expression. That is, in . // some cases, we insert `Adjustment` annotations such as auto-deref or . // auto-ref. The type returned by this function does not consider such . // adjustments. See `expr_ty_adjusted()` instead. . // . // NB (2): This type doesn't provide type parameter substitutions; e.g., if you . // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" . // instead of "fn(ty) -> T with T = isize". . pub fn expr_ty(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 198,723 ( 0.00%) self.node_type(expr.hir_id) . } . . pub fn expr_ty_opt(&self, expr: &hir::Expr<'_>) -> Option> { 177,820 ( 0.00%) self.node_type_opt(expr.hir_id) . } . . pub fn adjustments(&self) -> LocalTableInContext<'_, Vec>> { 109,700 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.adjustments } 54,850 ( 0.00%) } . . pub fn adjustments_mut( . &mut self, . ) -> LocalTableInContextMut<'_, Vec>> { 149,542 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.adjustments } 74,771 ( 0.00%) } . 81,259 ( 0.00%) pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] { 162,518 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id); . self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..]) 162,518 ( 0.00%) } . . /// Returns the type of `expr`, considering any `Adjustment` . /// entry recorded for that expression. 13,135 ( 0.00%) pub fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> Ty<'tcx> { 2,627 ( 0.00%) self.expr_adjustments(expr).last().map_or_else(|| self.expr_ty(expr), |adj| adj.target) 3,512 ( 0.00%) } . 1,180 ( 0.00%) pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr<'_>) -> Option> { 236 ( 0.00%) self.expr_adjustments(expr).last().map(|adj| adj.target).or_else(|| self.expr_ty_opt(expr)) 688 ( 0.00%) } . 8,844 ( 0.00%) pub fn is_method_call(&self, expr: &hir::Expr<'_>) -> bool { . // Only paths and method calls/overloaded operators have . // entries in type_dependent_defs, ignore the former here. 17,688 ( 0.00%) if let hir::ExprKind::Path(_) = expr.kind { . return false; . } . 18,620 ( 0.00%) matches!(self.type_dependent_defs().get(expr.hir_id), Some(Ok((DefKind::AssocFn, _)))) 17,688 ( 0.00%) } . 48,327 ( 0.00%) pub fn extract_binding_mode(&self, s: &Session, id: HirId, sp: Span) -> Option { . self.pat_binding_modes().get(id).copied().or_else(|| { . s.delay_span_bug(sp, "missing binding mode"); . None . }) 64,436 ( 0.00%) } . . pub fn pat_binding_modes(&self) -> LocalTableInContext<'_, BindingMode> { 44,503 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_binding_modes } 14,197 ( 0.00%) } . . pub fn pat_binding_modes_mut(&mut self) -> LocalTableInContextMut<'_, BindingMode> { 21,344 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_binding_modes } 10,672 ( 0.00%) } . . pub fn pat_adjustments(&self) -> LocalTableInContext<'_, Vec>> { 91,060 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.pat_adjustments } 45,530 ( 0.00%) } . . pub fn pat_adjustments_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { 27,526 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.pat_adjustments } 13,763 ( 0.00%) } . . /// For a given closure, returns the iterator of `ty::CapturedPlace`s that are captured . /// by the closure. 4,196 ( 0.00%) pub fn closure_min_captures_flattened( . &self, . closure_def_id: DefId, . ) -> impl Iterator> { . self.closure_min_captures . .get(&closure_def_id) . .map(|closure_min_captures| closure_min_captures.values().flat_map(|v| v.iter())) . .into_iter() . .flatten() 5,245 ( 0.00%) } . . pub fn closure_kind_origins(&self) -> LocalTableInContext<'_, (Span, HirPlace<'tcx>)> { 1,560 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.closure_kind_origins } 780 ( 0.00%) } . . pub fn closure_kind_origins_mut( . &mut self, . ) -> LocalTableInContextMut<'_, (Span, HirPlace<'tcx>)> { . LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.closure_kind_origins } . } . . pub fn liberated_fn_sigs(&self) -> LocalTableInContext<'_, ty::FnSig<'tcx>> { 4,724 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.liberated_fn_sigs } 2,362 ( 0.00%) } . . pub fn liberated_fn_sigs_mut(&mut self) -> LocalTableInContextMut<'_, ty::FnSig<'tcx>> { 3,164 ( 0.00%) LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.liberated_fn_sigs } 1,582 ( 0.00%) } . . pub fn fru_field_types(&self) -> LocalTableInContext<'_, Vec>> { 1,560 ( 0.00%) LocalTableInContext { hir_owner: self.hir_owner, data: &self.fru_field_types } 780 ( 0.00%) } . . pub fn fru_field_types_mut(&mut self) -> LocalTableInContextMut<'_, Vec>> { . LocalTableInContextMut { hir_owner: self.hir_owner, data: &mut self.fru_field_types } . } . 297 ( 0.00%) pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool { 99 ( 0.00%) validate_hir_id_for_typeck_results(self.hir_owner, hir_id); . self.coercion_casts.contains(&hir_id.local_id) 198 ( 0.00%) } . . pub fn set_coercion_cast(&mut self, id: ItemLocalId) { . self.coercion_casts.insert(id); . } . . pub fn coercion_casts(&self) -> &ItemLocalSet { 780 ( 0.00%) &self.coercion_casts 780 ( 0.00%) } . } . . impl<'a, 'tcx> HashStable> for TypeckResults<'tcx> { . fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { . let ty::TypeckResults { . hir_owner, . ref type_dependent_defs, . ref field_indices, -- line 764 ---------------------------------------- -- line 819 ---------------------------------------- . const START_INDEX = 0, . } . } . . /// Mapping of type annotation indices to canonical user type annotations. . pub type CanonicalUserTypeAnnotations<'tcx> = . IndexVec>; . 3,572 ( 0.00%) #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable, TypeFoldable, Lift)] . pub struct CanonicalUserTypeAnnotation<'tcx> { . pub user_ty: CanonicalUserType<'tcx>, . pub span: Span, 1,882 ( 0.00%) pub inferred_ty: Ty<'tcx>, . } . . /// Canonicalized user type annotation. . pub type CanonicalUserType<'tcx> = Canonical<'tcx, UserType<'tcx>>; . . impl<'tcx> CanonicalUserType<'tcx> { . /// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`, . /// i.e., each thing is mapped to a canonical variable with the same index. 8,466 ( 0.00%) pub fn is_identity(&self) -> bool { 16,932 ( 0.00%) match self.value { . UserType::Ty(_) => false, . UserType::TypeOf(_, user_substs) => { 8,466 ( 0.00%) if user_substs.user_self_ty.is_some() { . return false; . } . . iter::zip(user_substs.substs, BoundVar::new(0)..).all(|(kind, cvar)| { . match kind.unpack() { 13,482 ( 0.00%) GenericArgKind::Type(ty) => match ty.kind() { . ty::Bound(debruijn, b) => { . // We only allow a `ty::INNERMOST` index in substitutions. 6,727 ( 0.00%) assert_eq!(*debruijn, ty::INNERMOST); . cvar == b.var . } . _ => false, . }, . 6,022 ( 0.00%) GenericArgKind::Lifetime(r) => match r { . ty::ReLateBound(debruijn, br) => { . // We only allow a `ty::INNERMOST` index in substitutions. . assert_eq!(*debruijn, ty::INNERMOST); . cvar == br.var . } . _ => false, . }, . -- line 867 ---------------------------------------- -- line 872 ---------------------------------------- . cvar == b . } . _ => false, . }, . } . }) . } . } 16,932 ( 0.00%) } . } . . /// A user-given type annotation attached to a constant. These arise . /// from constants that are named via paths, like `Foo::::new` and . /// so forth. 11,292 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, TyEncodable, TyDecodable)] 96,808 ( 0.00%) #[derive(HashStable, TypeFoldable, Lift)] . pub enum UserType<'tcx> { . Ty(Ty<'tcx>), . . /// The canonical type is the result of `type_of(def_id)` with the . /// given substitutions applied. . TypeOf(DefId, UserSubsts<'tcx>), . } . . impl<'tcx> CommonTypes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { 111 ( 0.00%) let mk = |ty| interners.intern_ty(ty); . . CommonTypes { . unit: mk(Tuple(List::empty())), . bool: mk(Bool), . char: mk(Char), . never: mk(Never), . isize: mk(Int(ty::IntTy::Isize)), . i8: mk(Int(ty::IntTy::I8)), -- line 906 ---------------------------------------- -- line 921 ---------------------------------------- . . trait_object_dummy_self: mk(Infer(ty::FreshTy(0))), . } . } . } . . impl<'tcx> CommonLifetimes<'tcx> { . fn new(interners: &CtxtInterners<'tcx>) -> CommonLifetimes<'tcx> { 67 ( 0.00%) let mk = |r| interners.region.intern(r, |r| Interned(interners.arena.alloc(r))).0; . . CommonLifetimes { 5 ( 0.00%) re_root_empty: mk(RegionKind::ReEmpty(ty::UniverseIndex::ROOT)), 5 ( 0.00%) re_static: mk(RegionKind::ReStatic), 5 ( 0.00%) re_erased: mk(RegionKind::ReErased), . } . } . } . . impl<'tcx> CommonConsts<'tcx> { . fn new(interners: &CtxtInterners<'tcx>, types: &CommonTypes<'tcx>) -> CommonConsts<'tcx> { 6 ( 0.00%) let mk_const = |c| interners.const_.intern(c, |c| Interned(interners.arena.alloc(c))).0; . . CommonConsts { . unit: mk_const(ty::Const { . val: ty::ConstKind::Value(ConstValue::Scalar(Scalar::ZST)), . ty: types.unit, . }), . } . } -- line 949 ---------------------------------------- -- line 1045 ---------------------------------------- . . /// Stores memory for globals (statics/consts). . pub(crate) alloc_map: Lock>, . . output_filenames: Arc, . } . . impl<'tcx> TyCtxt<'tcx> { 33,520 ( 0.00%) pub fn typeck_opt_const_arg( . self, . def: ty::WithOptConstParam, . ) -> &'tcx TypeckResults<'tcx> { 13,408 ( 0.00%) if let Some(param_did) = def.const_param_did { . self.typeck_const_arg((def.did, param_did)) . } else { . self.typeck(def.did) . } 26,816 ( 0.00%) } . 2,475 ( 0.00%) pub fn alloc_steal_thir(self, thir: Thir<'tcx>) -> &'tcx Steal> { 825 ( 0.00%) self.arena.alloc(Steal::new(thir)) 3,300 ( 0.00%) } . 12,545 ( 0.00%) pub fn alloc_steal_mir(self, mir: Body<'tcx>) -> &'tcx Steal> { 2,509 ( 0.00%) self.arena.alloc(Steal::new(mir)) 17,563 ( 0.00%) } . 3,300 ( 0.00%) pub fn alloc_steal_promoted( . self, . promoted: IndexVec>, . ) -> &'tcx Steal>> { 825 ( 0.00%) self.arena.alloc(Steal::new(promoted)) 4,125 ( 0.00%) } . 1,557 ( 0.00%) pub fn alloc_adt_def( . self, . did: DefId, . kind: AdtKind, . variants: IndexVec, . repr: ReprOptions, . ) -> &'tcx ty::AdtDef { 2,768 ( 0.00%) self.intern_adt_def(ty::AdtDef::new(self, did, kind, variants, repr)) 692 ( 0.00%) } . . /// Allocates a read-only byte or string literal for `mir::interpret`. 5 ( 0.00%) pub fn allocate_bytes(self, bytes: &[u8]) -> interpret::AllocId { . // Create an allocation that just contains these bytes. 1 ( 0.00%) let alloc = interpret::Allocation::from_bytes_byte_aligned_immutable(bytes); 13 ( 0.00%) let alloc = self.intern_const_alloc(alloc); . self.create_memory_alloc(alloc) 5 ( 0.00%) } . . /// Returns a range of the start/end indices specified with the . /// `rustc_layout_scalar_valid_range` attribute. . // FIXME(eddyb) this is an awkward spot for this method, maybe move it? 31,976 ( 0.00%) pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound, Bound) { 15,988 ( 0.00%) let attrs = self.get_attrs(def_id); 67,949 ( 0.00%) let get = |name| { 15,988 ( 0.00%) let attr = match attrs.iter().find(|a| a.has_name(name)) { . Some(attr) => attr, . None => return Bound::Unbounded, . }; . debug!("layout_scalar_valid_range: attr={:?}", attr); 64 ( 0.00%) if let Some( . &[ . ast::NestedMetaItem::Literal(ast::Lit { 32 ( 0.00%) kind: ast::LitKind::Int(a, _), .. . }), . ], 16 ( 0.00%) ) = attr.meta_item_list().as_deref() . { . Bound::Included(a) . } else { . self.sess . .delay_span_bug(attr.span, "invalid rustc_layout_scalar_valid_range attribute"); . Bound::Unbounded . } 79,940 ( 0.00%) }; 23,982 ( 0.00%) ( 23,982 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_start), 11,991 ( 0.00%) get(sym::rustc_layout_scalar_valid_range_end), . ) 31,976 ( 0.00%) } . . pub fn lift>(self, value: T) -> Option { . value.lift_to_tcx(self) . } . . /// Creates a type context and call the closure with a `TyCtxt` reference . /// to the context. The closure enforces that the type context and any interned . /// value (types, substs, etc.) can only be used while `ty::tls` has a valid . /// reference to the context, to allow formatting values that need it. 23 ( 0.00%) pub fn create_global_ctxt( . s: &'tcx Session, . lint_store: Lrc, . arena: &'tcx WorkerLocal>, . resolutions: ty::ResolverOutputs, . krate: &'tcx hir::Crate<'tcx>, . dep_graph: DepGraph, . on_disk_cache: Option<&'tcx dyn OnDiskCache<'tcx>>, . queries: &'tcx dyn query::QueryEngine<'tcx>, . query_kinds: &'tcx [DepKindStruct], . crate_name: &str, . output_filenames: OutputFilenames, . ) -> GlobalCtxt<'tcx> { 1 ( 0.00%) let data_layout = TargetDataLayout::parse(&s.target).unwrap_or_else(|err| { . s.fatal(&err); . }); . let interners = CtxtInterners::new(arena); . let common_types = CommonTypes::new(&interners); . let common_lifetimes = CommonLifetimes::new(&interners); . let common_consts = CommonConsts::new(&interners, &common_types); . 127 ( 0.00%) GlobalCtxt { . sess: s, 6 ( 0.00%) lint_store, . arena, 2 ( 0.00%) interners, 5 ( 0.00%) dep_graph, 4 ( 0.00%) untracked_resolutions: resolutions, 2 ( 0.00%) prof: s.prof.clone(), . types: common_types, . lifetimes: common_lifetimes, . consts: common_consts, . untracked_crate: krate, . on_disk_cache, . queries, . query_caches: query::QueryCaches::default(), . query_kinds, . ty_rcache: Default::default(), . pred_rcache: Default::default(), . selection_cache: Default::default(), . evaluation_cache: Default::default(), 1 ( 0.00%) crate_name: Symbol::intern(crate_name), . data_layout, . alloc_map: Lock::new(interpret::AllocMap::new()), . output_filenames: Arc::new(output_filenames), . } 9 ( 0.00%) } . . crate fn query_kind(self, k: DepKind) -> &'tcx DepKindStruct { . &self.query_kinds[k as usize] . } . . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . ty: Ty<'tcx>, . span: S, . msg: &str, . ) -> &'tcx Const<'tcx> { . self.sess.delay_span_bug(span, msg); . self.mk_const(ty::Const { val: ty::ConstKind::Error(DelaySpanBugEmitted(())), ty }) . } . 770 ( 0.00%) pub fn consider_optimizing String>(self, msg: T) -> bool { 110 ( 0.00%) let cname = self.crate_name(LOCAL_CRATE); 880 ( 0.00%) self.sess.consider_optimizing(cname.as_str(), msg) 880 ( 0.00%) } . . /// Obtain all lang items of this crate and all dependencies (recursively) 1,826,377 ( 0.03%) pub fn lang_items(self) -> &'tcx rustc_hir::lang_items::LanguageItems { . self.get_lang_items(()) 2,348,199 ( 0.04%) } . . /// Obtain the given diagnostic item's `DefId`. Use `is_diagnostic_item` if you just want to . /// compare against another `DefId`, since `is_diagnostic_item` is cheaper. 651 ( 0.00%) pub fn get_diagnostic_item(self, name: Symbol) -> Option { . self.all_diagnostic_items(()).name_to_id.get(&name).copied() 744 ( 0.00%) } . . /// Obtain the diagnostic item's name 110,243 ( 0.00%) pub fn get_diagnostic_name(self, id: DefId) -> Option { . self.diagnostic_items(id.krate).id_to_name.get(&id).copied() 125,992 ( 0.00%) } . . /// Check whether the diagnostic item with the given `name` has the given `DefId`. 74,858 ( 0.00%) pub fn is_diagnostic_item(self, name: Symbol, did: DefId) -> bool { . self.diagnostic_items(did.krate).name_to_id.get(&name) == Some(&did) 85,552 ( 0.00%) } . 22,561 ( 0.00%) pub fn stability(self) -> &'tcx stability::Index<'tcx> { . self.stability_index(()) 29,007 ( 0.00%) } . 930,636 ( 0.02%) pub fn features(self) -> &'tcx rustc_feature::Features { . self.features_query(()) 1,063,584 ( 0.02%) } . 915 ( 0.00%) pub fn def_key(self, id: DefId) -> rustc_hir::definitions::DefKey { . // Accessing the DefKey is ok, since it is part of DefPathHash. 146,902 ( 0.00%) if let Some(id) = id.as_local() { . self.untracked_resolutions.definitions.def_key(id) . } else { 44,987 ( 0.00%) self.untracked_resolutions.cstore.def_key(id) . } 1,830 ( 0.00%) } . . /// Converts a `DefId` into its fully expanded `DefPath` (every . /// `DefId` is really just an interned `DefPath`). . /// . /// Note that if `id` is not local to this crate, the result will . /// be a non-local `DefPath`. . pub fn def_path(self, id: DefId) -> rustc_hir::definitions::DefPath { . // Accessing the DefPath is ok, since it is part of DefPathHash. -- line 1274 ---------------------------------------- -- line 1277 ---------------------------------------- . } else { . self.untracked_resolutions.cstore.def_path(id) . } . } . . #[inline] . pub fn def_path_hash(self, def_id: DefId) -> rustc_hir::definitions::DefPathHash { . // Accessing the DefPathHash is ok, it is incr. comp. stable. 9,593 ( 0.00%) if let Some(def_id) = def_id.as_local() { . self.untracked_resolutions.definitions.def_path_hash(def_id) . } else { 248 ( 0.00%) self.untracked_resolutions.cstore.def_path_hash(def_id) . } . } . . #[inline] . pub fn stable_crate_id(self, crate_num: CrateNum) -> StableCrateId { . if crate_num == LOCAL_CRATE { . self.sess.local_stable_crate_id() . } else { -- line 1296 ---------------------------------------- -- line 1353 ---------------------------------------- . &(format!("{:08x}", stable_crate_id.to_u64()))[..4], . self.def_path(def_id).to_string_no_crate_verbose() . ) . } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn cstore_untracked(self) -> &'tcx ty::CrateStoreDyn { 63,022 ( 0.00%) &*self.untracked_resolutions.cstore 31,511 ( 0.00%) } . . /// Note that this is *untracked* and should only be used within the query . /// system if the result is otherwise tracked through queries . pub fn definitions_untracked(self) -> &'tcx hir::definitions::Definitions { . &self.untracked_resolutions.definitions . } . . #[inline(always)] . pub fn create_stable_hashing_context(self) -> StableHashingContext<'tcx> { 35 ( 0.00%) let resolutions = &self.gcx.untracked_resolutions; 174 ( 0.00%) StableHashingContext::new(self.sess, &resolutions.definitions, &*resolutions.cstore) . } . . #[inline(always)] . pub fn create_no_span_stable_hashing_context(self) -> StableHashingContext<'tcx> { . let resolutions = &self.gcx.untracked_resolutions; . StableHashingContext::ignore_spans( . self.sess, . &resolutions.definitions, -- line 1381 ---------------------------------------- -- line 1390 ---------------------------------------- . /// If `true`, we should use the MIR-based borrowck, but also . /// fall back on the AST borrowck if the MIR-based one errors. . pub fn migrate_borrowck(self) -> bool { . self.borrowck_mode().migrate() . } . . /// What mode(s) of borrowck should we run? AST? MIR? both? . /// (Also considers the `#![feature(nll)]` setting.) 1,560 ( 0.00%) pub fn borrowck_mode(self) -> BorrowckMode { . // Here are the main constraints we need to deal with: . // . // 1. An opts.borrowck_mode of `BorrowckMode::Migrate` is . // synonymous with no `-Z borrowck=...` flag at all. . // . // 2. We want to allow developers on the Nightly channel . // to opt back into the "hard error" mode for NLL, . // (which they can do via specifying `#![feature(nll)]` -- line 1406 ---------------------------------------- -- line 1413 ---------------------------------------- . // errors. (To simplify the code here, it now even overrides . // a user's attempt to specify `-Z borrowck=compare`, which . // we arguably do not need anymore and should remove.) . // . // * Otherwise, if no `-Z borrowck=...` then use migrate mode . // . // * Otherwise, use the behavior requested via `-Z borrowck=...` . 2,340 ( 0.00%) if self.features().nll { . return BorrowckMode::Mir; . } . 780 ( 0.00%) self.sess.opts.borrowck_mode 3,120 ( 0.00%) } . . /// If `true`, we should use lazy normalization for constants, otherwise . /// we still evaluate them eagerly. . #[inline] . pub fn lazy_normalization(self) -> bool { 112 ( 0.00%) let features = self.features(); . // Note: We only use lazy normalization for generic const expressions. 30 ( 0.00%) features.generic_const_exprs . } . . #[inline] . pub fn local_crate_exports_generics(self) -> bool { . debug_assert!(self.sess.opts.share_generics()); . . self.sess.crate_types().iter().any(|crate_type| { . match crate_type { -- line 1442 ---------------------------------------- -- line 1554 ---------------------------------------- . self.mk_imm_ref( . self.lifetimes.re_static, . self.type_of(self.require_lang_item(LangItem::PanicLocation, None)) . .subst(self, self.mk_substs([self.lifetimes.re_static.into()].iter())), . ) . } . . /// Returns a displayable description and article for the given `def_id` (e.g. `("a", "struct")`). 6,017 ( 0.00%) pub fn article_and_description(self, def_id: DefId) -> (&'static str, &'static str) { 4,376 ( 0.00%) match self.def_kind(def_id) { . DefKind::Generator => match self.generator_kind(def_id).unwrap() { . rustc_hir::GeneratorKind::Async(..) => ("an", "async closure"), . rustc_hir::GeneratorKind::Gen => ("a", "generator"), . }, 9,846 ( 0.00%) def_kind => (def_kind.article(), def_kind.descr(def_id)), . } 4,923 ( 0.00%) } . . pub fn type_length_limit(self) -> Limit { . self.limits(()).type_length_limit . } . 563,612 ( 0.01%) pub fn recursion_limit(self) -> Limit { . self.limits(()).recursion_limit 644,128 ( 0.01%) } . . pub fn move_size_limit(self) -> Limit { . self.limits(()).move_size_limit . } . 448 ( 0.00%) pub fn const_eval_limit(self) -> Limit { . self.limits(()).const_eval_limit 512 ( 0.00%) } . . pub fn all_traits(self) -> impl Iterator + 'tcx { . iter::once(LOCAL_CRATE) . .chain(self.crates(()).iter().copied()) . .flat_map(move |cnum| self.traits_in_crate(cnum).iter().copied()) . } . } . -- line 1594 ---------------------------------------- -- line 1684 ---------------------------------------- . /// This is the implicit state of rustc. It contains the current . /// `TyCtxt` and query. It is updated when creating a local interner or . /// executing a new query. Whenever there's a `TyCtxt` value available . /// you should also have access to an `ImplicitCtxt` through the functions . /// in this module. . #[derive(Clone)] . pub struct ImplicitCtxt<'a, 'tcx> { . /// The current `TyCtxt`. 1 ( 0.00%) pub tcx: TyCtxt<'tcx>, . . /// The current query job, if any. This is updated by `JobOwner::start` in . /// `ty::query::plumbing` when executing a query. . pub query: Option>, . . /// Where to store diagnostics for the current query job, if any. . /// This is updated by `JobOwner::start` in `ty::query::plumbing` when executing a query. . pub diagnostics: Option<&'a Lock>>, -- line 1700 ---------------------------------------- -- line 1703 ---------------------------------------- . pub layout_depth: usize, . . /// The current dep graph task. This is used to add dependencies to queries . /// when executing them. . pub task_deps: TaskDepsRef<'a>, . } . . impl<'a, 'tcx> ImplicitCtxt<'a, 'tcx> { 5 ( 0.00%) pub fn new(gcx: &'tcx GlobalCtxt<'tcx>) -> Self { . let tcx = TyCtxt { gcx }; 25 ( 0.00%) ImplicitCtxt { . tcx, . query: None, . diagnostics: None, . layout_depth: 0, . task_deps: TaskDepsRef::Ignore, . } 5 ( 0.00%) } . } . . /// Sets Rayon's thread-local variable, which is preserved for Rayon jobs . /// to `value` during the call to `f`. It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(parallel_compiler)] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { -- line 1728 ---------------------------------------- -- line 1745 ---------------------------------------- . . /// Sets TLV to `value` during the call to `f`. . /// It is restored to its previous value after. . /// This is used to set the pointer to the new `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn set_tlv R, R>(value: usize, f: F) -> R { . let old = get_tlv(); 142,567 ( 0.00%) let _reset = rustc_data_structures::OnDrop(move || TLV.with(|tlv| tlv.set(old))); . TLV.with(|tlv| tlv.set(value)); . f() . } . . /// Gets the pointer to the current `ImplicitCtxt`. . #[cfg(not(parallel_compiler))] . #[inline] . fn get_tlv() -> usize { -- line 1761 ---------------------------------------- -- line 1763 ---------------------------------------- . } . . /// Sets `context` as the new current `ImplicitCtxt` for the duration of the function `f`. . #[inline] . pub fn enter_context<'a, 'tcx, F, R>(context: &ImplicitCtxt<'a, 'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'a, 'tcx>) -> R, . { 1 ( 0.00%) set_tlv(context as *const _ as usize, || f(&context)) . } . . /// Allows access to the current `ImplicitCtxt` in a closure if one is available. . #[inline] . pub fn with_context_opt(f: F) -> R . where . F: for<'a, 'tcx> FnOnce(Option<&ImplicitCtxt<'a, 'tcx>>) -> R, . { . let context = get_tlv(); 546,263 ( 0.01%) if context == 0 { . f(None) . } else { . // We could get an `ImplicitCtxt` pointer from another thread. . // Ensure that `ImplicitCtxt` is `Sync`. . sync::assert_sync::>(); . . unsafe { f(Some(&*(context as *const ImplicitCtxt<'_, '_>))) } . } -- line 1789 ---------------------------------------- -- line 1805 ---------------------------------------- . /// This will panic if you pass it a `TyCtxt` which is different from the current . /// `ImplicitCtxt`'s `tcx` field. . #[inline] . pub fn with_related_context<'tcx, F, R>(tcx: TyCtxt<'tcx>, f: F) -> R . where . F: FnOnce(&ImplicitCtxt<'_, 'tcx>) -> R, . { . with_context(|context| unsafe { 269,177 ( 0.00%) assert!(ptr_eq(context.tcx.gcx, tcx.gcx)); . let context: &ImplicitCtxt<'_, '_> = mem::transmute(context); 269,034 ( 0.00%) f(context) . }) . } . . /// Allows access to the `TyCtxt` in the current `ImplicitCtxt`. . /// Panics if there is no `ImplicitCtxt` available. . #[inline] . pub fn with(f: F) -> R . where -- line 1823 ---------------------------------------- -- line 1981 ---------------------------------------- . fn into_pointer(&self) -> *const () { . self.0 as *const _ as *const () . } . } . . #[allow(rustc::usage_of_ty_tykind)] . impl<'tcx> Borrow> for Interned<'tcx, TyS<'tcx>> { . fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> { 687,968 ( 0.01%) &self.0.kind() . } . } . . impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { . fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind() == other.0.kind() -- line 1997 ---------------------------------------- -- line 1998 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 688,116 ( 0.01%) self.0.kind().hash(s) . } . } . . impl<'tcx> Borrow>> for Interned<'tcx, PredicateInner<'tcx>> { . fn borrow<'a>(&'a self) -> &'a Binder<'tcx, PredicateKind<'tcx>> { 181,469 ( 0.00%) &self.0.kind . } . } . . impl<'tcx> PartialEq for Interned<'tcx, PredicateInner<'tcx>> { . fn eq(&self, other: &Interned<'tcx, PredicateInner<'tcx>>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0.kind == other.0.kind -- line 2020 ---------------------------------------- -- line 2021 ---------------------------------------- . } . } . . impl<'tcx> Eq for Interned<'tcx, PredicateInner<'tcx>> {} . . impl<'tcx> Hash for Interned<'tcx, PredicateInner<'tcx>> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 114,684 ( 0.00%) self.0.kind.hash(s) . } . } . . impl<'tcx, T> Borrow<[T]> for Interned<'tcx, List> { . fn borrow<'a>(&'a self) -> &'a [T] { 719,964 ( 0.01%) &self.0[..] . } . } . . impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, List> { . fn eq(&self, other: &Interned<'tcx, List>) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` equals . // `x == y`. . self.0[..] == other.0[..] -- line 2043 ---------------------------------------- -- line 2044 ---------------------------------------- . } . } . . impl<'tcx, T: Eq> Eq for Interned<'tcx, List> {} . . impl<'tcx, T: Hash> Hash for Interned<'tcx, List> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`. 145,663 ( 0.00%) self.0[..].hash(s) . } . } . . macro_rules! direct_interners { . ($($name:ident: $method:ident($ty:ty),)+) => { . $(impl<'tcx> Borrow<$ty> for Interned<'tcx, $ty> { . fn borrow<'a>(&'a self) -> &'a $ty { 232,928 ( 0.00%) &self.0 . } . } . . impl<'tcx> PartialEq for Interned<'tcx, $ty> { . fn eq(&self, other: &Self) -> bool { . // The `Borrow` trait requires that `x.borrow() == y.borrow()` . // equals `x == y`. . self.0 == other.0 -- line 2068 ---------------------------------------- -- line 2070 ---------------------------------------- . } . . impl<'tcx> Eq for Interned<'tcx, $ty> {} . . impl<'tcx> Hash for Interned<'tcx, $ty> { . fn hash(&self, s: &mut H) { . // The `Borrow` trait requires that `x.borrow().hash(s) == . // x.hash(s)`. 113,651 ( 0.00%) self.0.hash(s) . } . } . . impl<'tcx> TyCtxt<'tcx> { 2,064,703 ( 0.03%) pub fn $method(self, v: $ty) -> &'tcx $ty { 1,065,925 ( 0.02%) self.interners.$name.intern(v, |v| { 29,229 ( 0.00%) Interned(self.interners.arena.alloc(v)) . }).0 2,321,937 ( 0.04%) } . })+ . } . } . . direct_interners! { . region: mk_region(RegionKind), . const_: mk_const(Const<'tcx>), . const_allocation: intern_const_alloc(Allocation), -- line 2095 ---------------------------------------- -- line 2097 ---------------------------------------- . adt_def: intern_adt_def(AdtDef), . stability: intern_stability(attr::Stability), . const_stability: intern_const_stability(attr::ConstStability), . } . . macro_rules! slice_interners { . ($($field:ident: $method:ident($ty:ty)),+ $(,)?) => ( . impl<'tcx> TyCtxt<'tcx> { 8,234,560 ( 0.14%) $(pub fn $method(self, v: &[$ty]) -> &'tcx List<$ty> { . self.interners.$field.intern_ref(v, || { . Interned(List::from_arena(&*self.arena, v)) . }).0 7,355,691 ( 0.12%) })+ . } . ); . } . . slice_interners!( . type_list: _intern_type_list(Ty<'tcx>), . substs: _intern_substs(GenericArg<'tcx>), . canonical_var_infos: _intern_canonical_var_infos(CanonicalVarInfo<'tcx>), -- line 2117 ---------------------------------------- -- line 2129 ---------------------------------------- . /// unsafe. . pub fn safe_to_unsafe_fn_ty(self, sig: PolyFnSig<'tcx>) -> Ty<'tcx> { . assert_eq!(sig.unsafety(), hir::Unsafety::Normal); . self.mk_fn_ptr(sig.map_bound(|sig| ty::FnSig { unsafety: hir::Unsafety::Unsafe, ..sig })) . } . . /// Given the def_id of a Trait `trait_def_id` and the name of an associated item `assoc_name` . /// returns true if the `trait_def_id` defines an associated item of name `assoc_name`. 228 ( 0.00%) pub fn trait_may_define_assoc_type(self, trait_def_id: DefId, assoc_name: Ident) -> bool { . self.super_traits_of(trait_def_id).any(|trait_did| { . self.associated_items(trait_did) . .find_by_name_and_kind(self, assoc_name, ty::AssocKind::Type, trait_did) . .is_some() . }) 171 ( 0.00%) } . . /// Computes the def-ids of the transitive supertraits of `trait_def_id`. This (intentionally) . /// does not compute the full elaborated super-predicates but just the set of def-ids. It is used . /// to identify which traits may define a given associated type to help avoid cycle errors. . /// Returns a `DefId` iterator. . fn super_traits_of(self, trait_def_id: DefId) -> impl Iterator + 'tcx { . let mut set = FxHashSet::default(); 38 ( 0.00%) let mut stack = vec![trait_def_id]; . . set.insert(trait_def_id); . 171 ( 0.00%) iter::from_fn(move || -> Option { 52 ( 0.00%) let trait_did = stack.pop()?; 26 ( 0.00%) let generic_predicates = self.super_predicates_of(trait_did); . . for (predicate, _) in generic_predicates.predicates { 21 ( 0.00%) if let ty::PredicateKind::Trait(data) = predicate.kind().skip_binder() { 14 ( 0.00%) if set.insert(data.def_id()) { . stack.push(data.def_id()); . } . } . } . . Some(trait_did) . }) . } -- line 2169 ---------------------------------------- -- line 2188 ---------------------------------------- . self.mk_fn_sig(params_iter, s.output(), s.c_variadic, unsafety, abi::Abi::Rust) . }) . } . . /// Same a `self.mk_region(kind)`, but avoids accessing the interners if . /// `*r == kind`. . #[inline] . pub fn reuse_or_mk_region(self, r: Region<'tcx>, kind: RegionKind) -> Region<'tcx> { 253,316 ( 0.00%) if *r == kind { r } else { self.mk_region(kind) } . } . . #[allow(rustc::usage_of_ty_tykind)] . #[inline] . pub fn mk_ty(self, st: TyKind<'tcx>) -> Ty<'tcx> { 5,442,123 ( 0.09%) self.interners.intern_ty(st) . } . . #[inline] . pub fn mk_predicate(self, binder: Binder<'tcx, PredicateKind<'tcx>>) -> Predicate<'tcx> { 2,883,898 ( 0.05%) let inner = self.interners.intern_predicate(binder); . Predicate { inner } . } . . #[inline] 2,576,888 ( 0.04%) pub fn reuse_or_mk_predicate( . self, . pred: Predicate<'tcx>, . binder: Binder<'tcx, PredicateKind<'tcx>>, . ) -> Predicate<'tcx> { 181,850 ( 0.00%) if pred.kind() != binder { self.mk_predicate(binder) } else { pred } 2,576,888 ( 0.04%) } . . pub fn mk_mach_int(self, tm: IntTy) -> Ty<'tcx> { 1,180 ( 0.00%) match tm { . IntTy::Isize => self.types.isize, . IntTy::I8 => self.types.i8, . IntTy::I16 => self.types.i16, . IntTy::I32 => self.types.i32, . IntTy::I64 => self.types.i64, . IntTy::I128 => self.types.i128, . } 1,180 ( 0.00%) } . . pub fn mk_mach_uint(self, tm: UintTy) -> Ty<'tcx> { 3,308 ( 0.00%) match tm { . UintTy::Usize => self.types.usize, . UintTy::U8 => self.types.u8, . UintTy::U16 => self.types.u16, . UintTy::U32 => self.types.u32, . UintTy::U64 => self.types.u64, . UintTy::U128 => self.types.u128, . } 3,308 ( 0.00%) } . . pub fn mk_mach_float(self, tm: FloatTy) -> Ty<'tcx> { . match tm { . FloatTy::F32 => self.types.f32, . FloatTy::F64 => self.types.f64, . } . } . . #[inline] . pub fn mk_static_str(self) -> Ty<'tcx> { 2,674 ( 0.00%) self.mk_imm_ref(self.lifetimes.re_static, self.types.str_) . } . . #[inline] . pub fn mk_adt(self, def: &'tcx AdtDef, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . // Take a copy of substs so that we own the vectors inside. . self.mk_ty(Adt(def, substs)) . } . . #[inline] . pub fn mk_foreign(self, def_id: DefId) -> Ty<'tcx> { . self.mk_ty(Foreign(def_id)) . } . 48 ( 0.00%) fn mk_generic_adt(self, wrapper_def_id: DefId, ty_param: Ty<'tcx>) -> Ty<'tcx> { . let adt_def = self.adt_def(wrapper_def_id); . let substs = 90 ( 0.00%) InternalSubsts::for_item(self, wrapper_def_id, |param, substs| match param.kind { . GenericParamDefKind::Lifetime | GenericParamDefKind::Const { .. } => bug!(), 6 ( 0.00%) GenericParamDefKind::Type { has_default, .. } => { 24 ( 0.00%) if param.index == 0 { . ty_param.into() . } else { 6 ( 0.00%) assert!(has_default); 18 ( 0.00%) self.type_of(param.def_id).subst(self, substs).into() . } . } . }); 6 ( 0.00%) self.mk_ty(Adt(adt_def, substs)) 48 ( 0.00%) } . . #[inline] . pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> { 30 ( 0.00%) let def_id = self.require_lang_item(LangItem::OwnedBox, None); 66 ( 0.00%) self.mk_generic_adt(def_id, ty) . } . . #[inline] . pub fn mk_lang_item(self, ty: Ty<'tcx>, item: LangItem) -> Option> { . let def_id = self.lang_items().require(item).ok()?; . Some(self.mk_generic_adt(def_id, ty)) . } . -- line 2293 ---------------------------------------- -- line 2329 ---------------------------------------- . } . . #[inline] . pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ptr(TypeAndMut { ty, mutbl: hir::Mutability::Not }) . } . . #[inline] 2,192 ( 0.00%) pub fn mk_array(self, ty: Ty<'tcx>, n: u64) -> Ty<'tcx> { . self.mk_ty(Array(ty, ty::Const::from_usize(self, n))) 2,288 ( 0.00%) } . . #[inline] . pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { . self.mk_ty(Slice(ty)) . } . . #[inline] . pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { 1,216 ( 0.00%) let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 2,159 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) . } . . pub fn mk_tup], Ty<'tcx>>>(self, iter: I) -> I::Output { 21,114 ( 0.00%) iter.intern_with(|ts| { 5,185 ( 0.00%) let kinds: Vec<_> = ts.iter().map(|&t| GenericArg::from(t)).collect(); 57,697 ( 0.00%) self.mk_ty(Tuple(self.intern_substs(&kinds))) 18,954 ( 0.00%) }) . } . . #[inline] . pub fn mk_unit(self) -> Ty<'tcx> { 707 ( 0.00%) self.types.unit . } . . #[inline] . pub fn mk_diverging_default(self) -> Ty<'tcx> { . if self.features().never_type_fallback { self.types.never } else { self.types.unit } . } . . #[inline] . pub fn mk_fn_def(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(FnDef(def_id, substs)) . } . . #[inline] . pub fn mk_fn_ptr(self, fty: PolyFnSig<'tcx>) -> Ty<'tcx> { 2,482 ( 0.00%) self.mk_ty(FnPtr(fty)) . } . . #[inline] . pub fn mk_dynamic( . self, . obj: &'tcx List>>, . reg: ty::Region<'tcx>, . ) -> Ty<'tcx> { -- line 2384 ---------------------------------------- -- line 2412 ---------------------------------------- . . #[inline] . pub fn mk_ty_var(self, v: TyVid) -> Ty<'tcx> { . self.mk_ty_infer(TyVar(v)) . } . . #[inline] . pub fn mk_const_var(self, v: ConstVid<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { 1,560 ( 0.00%) self.mk_const(ty::Const { val: ty::ConstKind::Infer(InferConst::Var(v)), ty }) . } . . #[inline] . pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> { . self.mk_ty_infer(IntVar(v)) . } . . #[inline] -- line 2428 ---------------------------------------- -- line 2445 ---------------------------------------- . self.mk_ty(Param(ParamTy { index, name })) . } . . #[inline] . pub fn mk_const_param(self, index: u32, name: Symbol, ty: Ty<'tcx>) -> &'tcx Const<'tcx> { . self.mk_const(ty::Const { val: ty::ConstKind::Param(ParamConst { index, name }), ty }) . } . 20,972 ( 0.00%) pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 10,414 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { 13,698 ( 0.00%) self.mk_region(ty::ReEarlyBound(param.to_early_bound_region_data())).into() . } 1,426 ( 0.00%) GenericParamDefKind::Type { .. } => self.mk_ty_param(param.index, param.name).into(), . GenericParamDefKind::Const { .. } => { . self.mk_const_param(param.index, param.name, self.type_of(param.def_id)).into() . } . } 20,972 ( 0.00%) } . . #[inline] . pub fn mk_opaque(self, def_id: DefId, substs: SubstsRef<'tcx>) -> Ty<'tcx> { . self.mk_ty(Opaque(def_id, substs)) . } . 360 ( 0.00%) pub fn mk_place_field(self, place: Place<'tcx>, f: Field, ty: Ty<'tcx>) -> Place<'tcx> { 360 ( 0.00%) self.mk_place_elem(place, PlaceElem::Field(f, ty)) 720 ( 0.00%) } . 4 ( 0.00%) pub fn mk_place_deref(self, place: Place<'tcx>) -> Place<'tcx> { 12 ( 0.00%) self.mk_place_elem(place, PlaceElem::Deref) 8 ( 0.00%) } . 54 ( 0.00%) pub fn mk_place_downcast( . self, . place: Place<'tcx>, . adt_def: &'tcx AdtDef, . variant_index: VariantIdx, . ) -> Place<'tcx> { 54 ( 0.00%) self.mk_place_elem( . place, 108 ( 0.00%) PlaceElem::Downcast(Some(adt_def.variants[variant_index].name), variant_index), . ) 108 ( 0.00%) } . . pub fn mk_place_downcast_unnamed( . self, . place: Place<'tcx>, . variant_index: VariantIdx, . ) -> Place<'tcx> { . self.mk_place_elem(place, PlaceElem::Downcast(None, variant_index)) . } -- line 2496 ---------------------------------------- -- line 2497 ---------------------------------------- . . pub fn mk_place_index(self, place: Place<'tcx>, index: Local) -> Place<'tcx> { . self.mk_place_elem(place, PlaceElem::Index(index)) . } . . /// This method copies `Place`'s projection, add an element and reintern it. Should not be used . /// to build a full `Place` it's just a convenient way to grab a projection and modify it in . /// flight. 3,762 ( 0.00%) pub fn mk_place_elem(self, place: Place<'tcx>, elem: PlaceElem<'tcx>) -> Place<'tcx> { . let mut projection = place.projection.to_vec(); . projection.push(elem); . . Place { local: place.local, projection: self.intern_place_elems(&projection) } 4,180 ( 0.00%) } . 13,454 ( 0.00%) pub fn intern_poly_existential_predicates( . self, . eps: &[ty::Binder<'tcx, ExistentialPredicate<'tcx>>], . ) -> &'tcx List>> { 1,922 ( 0.00%) assert!(!eps.is_empty()); . assert!( . eps.array_windows() 30 ( 0.00%) .all(|[a, b]| a.skip_binder().stable_cmp(self, &b.skip_binder()) . != Ordering::Greater) . ); 7,688 ( 0.00%) self._intern_poly_existential_predicates(eps) 15,376 ( 0.00%) } . . pub fn intern_predicates(self, preds: &[Predicate<'tcx>]) -> &'tcx List> { . // FIXME consider asking the input slice to be sorted to avoid . // re-interning permutations, in which case that would be asserted . // here. 15,977 ( 0.00%) if preds.is_empty() { . // The macro-generated method below asserts we don't intern an empty slice. . List::empty() . } else { 5,410 ( 0.00%) self._intern_predicates(preds) . } 21,232 ( 0.00%) } . . pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List> { 98,273 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) } 450 ( 0.00%) } . . pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List> { 1,367,244 ( 0.02%) if ts.is_empty() { List::empty() } else { self._intern_substs(ts) } 128,344 ( 0.00%) } . . pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List { . if ps.is_empty() { List::empty() } else { self._intern_projs(ps) } . } . . pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List> { 41,819 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) } 23,050 ( 0.00%) } . . pub fn intern_canonical_var_infos( . self, . ts: &[CanonicalVarInfo<'tcx>], . ) -> CanonicalVarInfos<'tcx> { 89,207 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) } 410 ( 0.00%) } . . pub fn intern_bound_variable_kinds( . self, . ts: &[ty::BoundVariableKind], . ) -> &'tcx List { 8,218 ( 0.00%) if ts.is_empty() { List::empty() } else { self._intern_bound_variable_kinds(ts) } 11,162 ( 0.00%) } . . pub fn mk_fn_sig( . self, . inputs: I, . output: I::Item, . c_variadic: bool, . unsafety: hir::Unsafety, . abi: abi::Abi, . ) -> , ty::FnSig<'tcx>>>::Output . where . I: Iterator, ty::FnSig<'tcx>>>, . { . inputs.chain(iter::once(output)).intern_with(|xs| ty::FnSig { 6,342 ( 0.00%) inputs_and_output: self.intern_type_list(xs), 1,216 ( 0.00%) c_variadic, 1,757 ( 0.00%) unsafety, 1,350 ( 0.00%) abi, 440 ( 0.00%) }) . } . . pub fn mk_poly_existential_predicates< . I: InternAs< . [ty::Binder<'tcx, ExistentialPredicate<'tcx>>], . &'tcx List>>, . >, . >( . self, . iter: I, . ) -> I::Output { 7,678 ( 0.00%) iter.intern_with(|xs| self.intern_poly_existential_predicates(xs)) . } . . pub fn mk_predicates], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 16 ( 0.00%) iter.intern_with(|xs| self.intern_predicates(xs)) . } . . pub fn mk_type_list], &'tcx List>>>(self, iter: I) -> I::Output { 6,383 ( 0.00%) iter.intern_with(|xs| self.intern_type_list(xs)) . } . . pub fn mk_substs], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 786,915 ( 0.01%) iter.intern_with(|xs| self.intern_substs(xs)) . } . . pub fn mk_place_elems], &'tcx List>>>( . self, . iter: I, . ) -> I::Output { 126 ( 0.00%) iter.intern_with(|xs| self.intern_place_elems(xs)) . } . 60,919 ( 0.00%) pub fn mk_substs_trait(self, self_ty: Ty<'tcx>, rest: &[GenericArg<'tcx>]) -> SubstsRef<'tcx> { . self.mk_substs(iter::once(self_ty.into()).chain(rest.iter().cloned())) 121,838 ( 0.00%) } . . pub fn mk_bound_variable_kinds< . I: InternAs<[ty::BoundVariableKind], &'tcx List>, . >( . self, . iter: I, . ) -> I::Output { 38,002 ( 0.00%) iter.intern_with(|xs| self.intern_bound_variable_kinds(xs)) . } . . /// Walks upwards from `id` to find a node which might change lint levels with attributes. . /// It stops at `bound` and just returns it if reached. 576,086 ( 0.01%) pub fn maybe_lint_level_root_bounded(self, mut id: HirId, bound: HirId) -> HirId { 576,086 ( 0.01%) let hir = self.hir(); . loop { 1,155,228 ( 0.02%) if id == bound { . return bound; . } . 1,583,842 ( 0.03%) if hir.attrs(id).iter().any(|attr| Level::from_symbol(attr.name_or_empty()).is_some()) { . return id; . } . let next = hir.get_parent_node(id); 908,334 ( 0.02%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 822,980 ( 0.01%) } . 23,560 ( 0.00%) pub fn lint_level_at_node( . self, . lint: &'static Lint, . mut id: hir::HirId, . ) -> (Level, LintLevelSource) { . let sets = self.lint_levels(()); . loop { 147,952 ( 0.00%) if let Some(pair) = sets.level_and_source(lint, id, self.sess) { . return pair; . } 11,858 ( 0.00%) let next = self.hir().get_parent_node(id); 35,574 ( 0.00%) if next == id { . bug!("lint traversal reached the root of the crate"); . } . id = next; . } 23,560 ( 0.00%) } . 882 ( 0.00%) pub fn struct_span_lint_hir( . self, . lint: &'static Lint, . hir_id: HirId, . span: impl Into, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { 13,005 ( 0.00%) let (level, src) = self.lint_level_at_node(lint, hir_id); 3,513 ( 0.00%) struct_lint_level(self.sess, lint, level, src, Some(span.into()), decorate); 504 ( 0.00%) } . . pub fn struct_lint_node( . self, . lint: &'static Lint, . id: HirId, . decorate: impl for<'a> FnOnce(LintDiagnosticBuilder<'a>), . ) { . let (level, src) = self.lint_level_at_node(lint, id); . struct_lint_level(self.sess, lint, level, src, None, decorate); . } . 25,039 ( 0.00%) pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> { 7,154 ( 0.00%) let map = self.in_scope_traits_map(id.owner)?; . let candidates = map.get(&id.local_id)?; 7,154 ( 0.00%) Some(&*candidates) 28,616 ( 0.00%) } . 50,127 ( 0.00%) pub fn named_region(self, id: HirId) -> Option { . debug!(?id, "named_region"); . self.named_region_map(id.owner).and_then(|map| map.get(&id.local_id).cloned()) 64,449 ( 0.00%) } . 6,120 ( 0.00%) pub fn is_late_bound(self, id: HirId) -> bool { . self.is_late_bound_map(id.owner) 346 ( 0.00%) .map_or(false, |(owner, set)| owner == id.owner && set.contains(&id.local_id)) 5,440 ( 0.00%) } . 39,270 ( 0.00%) pub fn object_lifetime_defaults(self, id: HirId) -> Option> { . self.object_lifetime_defaults_map(id.owner) 50,490 ( 0.00%) } . 12,186 ( 0.00%) pub fn late_bound_vars(self, id: HirId) -> &'tcx List { . self.mk_bound_variable_kinds( . self.late_bound_vars_map(id.owner) . .and_then(|map| map.get(&id.local_id).cloned()) . .unwrap_or_else(|| { . bug!("No bound vars found for {:?} ({:?})", self.hir().node_to_string(id), id) . }) . .iter(), . ) 12,186 ( 0.00%) } . . pub fn lifetime_scope(self, id: HirId) -> Option { . self.lifetime_scope_map(id.owner).and_then(|mut map| map.remove(&id.local_id)) . } . . /// Whether the `def_id` counts as const fn in the current crate, considering all active . /// feature gates 290 ( 0.00%) pub fn is_const_fn(self, def_id: DefId) -> bool { 29 ( 0.00%) if self.is_const_fn_raw(def_id) { 10 ( 0.00%) match self.lookup_const_stability(def_id) { 20 ( 0.00%) Some(stability) if stability.level.is_unstable() => { . // has a `rustc_const_unstable` attribute, check whether the user enabled the . // corresponding feature gate. 8 ( 0.00%) self.features() . .declared_lib_features . .iter() . .any(|&(sym, _)| sym == stability.feature) . } . // functions without const stability are either stable user written . // const fn or the user is using feature gates and we thus don't . // care what they do . _ => true, . } . } else { . false . } 261 ( 0.00%) } . } . . impl<'tcx> TyCtxtAt<'tcx> { . /// Constructs a `TyKind::Error` type and registers a `delay_span_bug` to ensure it gets used. . #[track_caller] . pub fn ty_error(self) -> Ty<'tcx> { . self.tcx.ty_error_with_message(self.span, "TyKind::Error constructed but no error reported") . } -- line 2759 ---------------------------------------- -- line 2774 ---------------------------------------- . } . . impl InternAs<[T], R> for I . where . E: InternIteratorElement, . I: Iterator, . { . type Output = E::Output; 98,000 ( 0.00%) fn intern_with(self, f: F) -> Self::Output . where . F: FnOnce(&[T]) -> R, . { 3,235,347 ( 0.05%) E::intern_with(self, f) 102,276 ( 0.00%) } . } . . pub trait InternIteratorElement: Sized { . type Output; . fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; . } . . impl InternIteratorElement for T { . type Output = R; 307,628 ( 0.01%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`. 419,055 ( 0.01%) match iter.size_hint() { . (0, Some(0)) => { 3 ( 0.00%) assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { . let t0 = iter.next().unwrap(); 16 ( 0.00%) assert!(iter.next().is_none()); 66,098 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { . let t0 = iter.next().unwrap(); . let t1 = iter.next().unwrap(); 591 ( 0.00%) assert!(iter.next().is_none()); 47,812 ( 0.00%) f(&[t0, t1]) . } 2,524 ( 0.00%) _ => f(&iter.collect::>()), . } 358,830 ( 0.01%) } . } . . impl<'a, T, R> InternIteratorElement for &'a T . where . T: Clone + 'a, . { . type Output = R; 7,479 ( 0.00%) fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { . // This code isn't hot. 176 ( 0.00%) f(&iter.cloned().collect::>()) 6,142 ( 0.00%) } . } . . impl InternIteratorElement for Result { . type Output = Result; 1,298,840 ( 0.02%) fn intern_with, F: FnOnce(&[T]) -> R>( . mut iter: I, . f: F, . ) -> Self::Output { . // This code is hot enough that it's worth specializing for the most . // common length lists, to avoid the overhead of `SmallVec` creation. . // Lengths 0, 1, and 2 typically account for ~95% of cases. If . // `size_hint` is incorrect a panic will occur via an `unwrap` or an . // `assert`, unless a failure happens first, in which case the result . // will be an error anyway. 977,358 ( 0.02%) Ok(match iter.size_hint() { . (0, Some(0)) => { . assert!(iter.next().is_none()); . f(&[]) . } . (1, Some(1)) => { 5,733 ( 0.00%) let t0 = iter.next().unwrap()?; . assert!(iter.next().is_none()); 66,196 ( 0.00%) f(&[t0]) . } . (2, Some(2)) => { 1,004 ( 0.00%) let t0 = iter.next().unwrap()?; 1,004 ( 0.00%) let t1 = iter.next().unwrap()?; 1,506 ( 0.00%) assert!(iter.next().is_none()); 101,904 ( 0.00%) f(&[t0, t1]) . } 4,625 ( 0.00%) _ => f(&iter.collect::, _>>()?), . }) 1,466,945 ( 0.02%) } . } . . // We are comparing types with different invariant lifetimes, so `ptr::eq` . // won't work for us. . fn ptr_eq(t: *const T, u: *const U) -> bool { 269,177 ( 0.00%) t as *const () == u as *const () . } . . pub fn provide(providers: &mut ty::query::Providers) { 2 ( 0.00%) providers.in_scope_traits_map = . |tcx, id| tcx.hir_crate(()).owners[id].as_ref().map(|owner_info| &owner_info.trait_map); 3 ( 0.00%) providers.resolutions = |tcx, ()| &tcx.untracked_resolutions; 2 ( 0.00%) providers.module_reexports = . |tcx, id| tcx.resolutions(()).reexport_map.get(&id).map(|v| &v[..]); 2 ( 0.00%) providers.crate_name = |tcx, id| { 1 ( 0.00%) assert_eq!(id, LOCAL_CRATE); 1 ( 0.00%) tcx.crate_name . }; 2 ( 0.00%) providers.maybe_unused_trait_import = . |tcx, id| tcx.resolutions(()).maybe_unused_trait_imports.contains(&id); 2 ( 0.00%) providers.maybe_unused_extern_crates = . |tcx, ()| &tcx.resolutions(()).maybe_unused_extern_crates[..]; 2 ( 0.00%) providers.names_imported_by_glob_use = |tcx, id| { . tcx.arena.alloc(tcx.resolutions(()).glob_map.get(&id).cloned().unwrap_or_default()) . }; . 11 ( 0.00%) providers.lookup_stability = |tcx, id| tcx.stability().local_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_const_stability = . |tcx, id| tcx.stability().local_const_stability(id.expect_local()); 2 ( 0.00%) providers.lookup_deprecation_entry = 25,768 ( 0.00%) |tcx, id| tcx.stability().local_deprecation_entry(id.expect_local()); 2 ( 0.00%) providers.extern_mod_stmt_cnum = . |tcx, id| tcx.resolutions(()).extern_crate_map.get(&id).cloned(); 3 ( 0.00%) providers.output_filenames = |tcx, ()| tcx.output_filenames.clone(); 3 ( 0.00%) providers.features_query = |tcx, ()| tcx.sess.features_untracked(); 2 ( 0.00%) providers.is_panic_runtime = |tcx, cnum| { . assert_eq!(cnum, LOCAL_CRATE); . tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::panic_runtime) . }; 2 ( 0.00%) providers.is_compiler_builtins = |tcx, cnum| { . assert_eq!(cnum, LOCAL_CRATE); . tcx.sess.contains_name(tcx.hir().krate_attrs(), sym::compiler_builtins) . }; 2 ( 0.00%) providers.has_panic_handler = |tcx, cnum| { 1 ( 0.00%) assert_eq!(cnum, LOCAL_CRATE); . // We want to check if the panic handler was defined in this crate 3 ( 0.00%) tcx.lang_items().panic_impl().map_or(false, |did| did.is_local()) . }; . } 3,123,348 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs -------------------------------------------------------------------------------- Ir -- line 141 ---------------------------------------- . /// ``` . /// let a = [1, 2, 3]; . /// assert!(!a.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] . #[inline] . pub const fn is_empty(&self) -> bool { 1,784,086 ( 0.03%) self.len() == 0 . } . . /// Returns the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 157 ---------------------------------------- -- line 159 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.first()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn first(&self) -> Option<&T> { 12,212 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns a mutable pointer to the first element of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 175 ---------------------------------------- -- line 178 ---------------------------------------- . /// *first = 5; . /// } . /// assert_eq!(x, &[5, 1, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn first_mut(&mut self) -> Option<&mut T> { 20,020 ( 0.00%) if let [first, ..] = self { Some(first) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &[0, 1, 2]; -- line 194 ---------------------------------------- -- line 197 ---------------------------------------- . /// assert_eq!(first, &0); . /// assert_eq!(elements, &[1, 2]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_first(&self) -> Option<(&T, &[T])> { 18 ( 0.00%) if let [first, tail @ ..] = self { Some((first, tail)) } else { None } . } . . /// Returns the first and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 213 ---------------------------------------- -- line 237 ---------------------------------------- . /// assert_eq!(last, &2); . /// assert_eq!(elements, &[0, 1]); . /// } . /// ``` . #[stable(feature = "slice_splits", since = "1.5.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn split_last(&self) -> Option<(&T, &[T])> { 58,029 ( 0.00%) if let [init @ .., last] = self { Some((last, init)) } else { None } . } . . /// Returns the last and all the rest of the elements of the slice, or `None` if it is empty. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 253 ---------------------------------------- -- line 276 ---------------------------------------- . /// . /// let w: &[i32] = &[]; . /// assert_eq!(None, w.last()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_first_last_not_mut", since = "1.56.0")] . #[inline] . pub const fn last(&self) -> Option<&T> { 895,285 ( 0.01%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a mutable pointer to the last item in the slice. . /// . /// # Examples . /// . /// ``` . /// let x = &mut [0, 1, 2]; -- line 292 ---------------------------------------- -- line 295 ---------------------------------------- . /// *last = 10; . /// } . /// assert_eq!(x, &[0, 1, 10]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] . #[inline] . pub const fn last_mut(&mut self) -> Option<&mut T> { 2,059,468 ( 0.03%) if let [.., last] = self { Some(last) } else { None } . } . . /// Returns a reference to an element or subslice depending on the type of . /// index. . /// . /// - If given a position, returns a reference to the element at that . /// position or `None` if out of bounds. . /// - If given a range, returns the subslice corresponding to that range, -- line 311 ---------------------------------------- -- line 448 ---------------------------------------- . /// } . /// ``` . /// . /// [`as_mut_ptr`]: slice::as_mut_ptr . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_slice_as_ptr", since = "1.32.0")] . #[inline] . pub const fn as_ptr(&self) -> *const T { 1,916,524 ( 0.03%) self as *const [T] as *const T . } . . /// Returns an unsafe mutable pointer to the slice's buffer. . /// . /// The caller must ensure that the slice outlives the pointer this . /// function returns, or else it will end up pointing to garbage. . /// . /// Modifying the container referenced by this slice may cause its buffer -- line 464 ---------------------------------------- -- line 476 ---------------------------------------- . /// } . /// } . /// assert_eq!(x, &[3, 4, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_ptr_offset", issue = "71499")] . #[inline] . pub const fn as_mut_ptr(&mut self) -> *mut T { 31 ( 0.00%) self as *mut [T] as *mut T . } . . /// Returns the two raw pointers spanning the slice. . /// . /// The returned range is half-open, which means that the end pointer . /// points *one past* the last element of the slice. This way, an empty . /// slice is represented by two equal pointers, and the difference between . /// the two pointers represents the size of the slice. -- line 492 ---------------------------------------- -- line 582 ---------------------------------------- . /// v.swap(2, 4); . /// assert!(v == ["a", "b", "e", "d", "c"]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_swap", issue = "83163")] . #[inline] . #[track_caller] . pub const fn swap(&mut self, a: usize, b: usize) { 40,564 ( 0.00%) let _ = &self[a]; 1,032,930 ( 0.02%) let _ = &self[b]; . . // SAFETY: we just checked that both `a` and `b` are in bounds . unsafe { self.swap_unchecked(a, b) } . } . . /// Swaps two elements in the slice, without doing bounds checking. . /// . /// For a safe alternative see [`swap`]. -- line 599 ---------------------------------------- -- line 677 ---------------------------------------- . . // Because this function is first compiled in isolation, . // this check tells LLVM that the indexing below is . // in-bounds. Then after inlining -- once the actual . // lengths of the slices are known -- it's removed. . let (a, b) = (&mut a[..n], &mut b[..n]); . . for i in 0..n { 2,364 ( 0.00%) mem::swap(&mut a[i], &mut b[n - 1 - i]); . } . } . } . . /// Returns an iterator over the slice. . /// . /// # Examples . /// -- line 693 ---------------------------------------- -- line 1499 ---------------------------------------- . /// assert_eq!(left, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(right, []); . /// } . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at(&self, mid: usize) -> (&[T], &[T]) { 18 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_unchecked(mid) } . } . . /// Divides one mutable slice into two at an index. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1515 ---------------------------------------- -- line 1530 ---------------------------------------- . /// left[1] = 2; . /// right[1] = 4; . /// assert_eq!(v, [1, 2, 3, 4, 5, 6]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . #[track_caller] . pub fn split_at_mut(&mut self, mid: usize) -> (&mut [T], &mut [T]) { 191,030 ( 0.00%) assert!(mid <= self.len()); . // SAFETY: `[ptr; mid]` and `[mid; len]` are inside `self`, which . // fulfills the requirements of `from_raw_parts_mut`. . unsafe { self.split_at_mut_unchecked(mid) } . } . . /// Divides one slice into two at an index, without doing bounds checking. . /// . /// The first will contain all indices from `[0, mid)` (excluding -- line 1546 ---------------------------------------- -- line 1628 ---------------------------------------- . pub unsafe fn split_at_mut_unchecked(&mut self, mid: usize) -> (&mut [T], &mut [T]) { . let len = self.len(); . let ptr = self.as_mut_ptr(); . . // SAFETY: Caller has to check that `0 <= mid <= self.len()`. . // . // `[ptr; mid]` and `[mid; len]` are not overlapping, so returning a mutable reference . // is fine. 19,698 ( 0.00%) unsafe { (from_raw_parts_mut(ptr, mid), from_raw_parts_mut(ptr.add(mid), len - mid)) } . } . . /// Divides one slice into an array and a remainder slice at an index. . /// . /// The array will contain all indices from `[0, N)` (excluding . /// the index `N` itself) and the slice will contain all . /// indices from `[N, len)` (excluding the index `len` itself). . /// -- line 1644 ---------------------------------------- -- line 2113 ---------------------------------------- . /// assert!(!v.iter().any(|e| e == "hi")); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn contains(&self, x: &T) -> bool . where . T: PartialEq, . { 2,456 ( 0.00%) cmp::SliceContains::slice_contains(x, self) . } . . /// Returns `true` if `needle` is a prefix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2129 ---------------------------------------- -- line 2142 ---------------------------------------- . /// assert!(v.starts_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn starts_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let n = needle.len(); 4,287 ( 0.00%) self.len() >= n && needle == &self[..n] . } . . /// Returns `true` if `needle` is a suffix of the slice. . /// . /// # Examples . /// . /// ``` . /// let v = [10, 40, 30]; -- line 2158 ---------------------------------------- -- line 2171 ---------------------------------------- . /// assert!(v.ends_with(&[])); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn ends_with(&self, needle: &[T]) -> bool . where . T: PartialEq, . { . let (m, n) = (self.len(), needle.len()); 2,268 ( 0.00%) m >= n && needle == &self[m - n..] . } . . /// Returns a subslice with the prefix removed. . /// . /// If the slice starts with `prefix`, returns the subslice after the prefix, wrapped in `Some`. . /// If `prefix` is empty, simply returns the original slice. . /// . /// If the slice does not start with `prefix`, returns `None`. -- line 2187 ---------------------------------------- -- line 2293 ---------------------------------------- . /// s.insert(idx, num); . /// assert_eq!(s, [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 42, 55]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn binary_search(&self, x: &T) -> Result . where . T: Ord, . { 3 ( 0.00%) self.binary_search_by(|p| p.cmp(x)) . } . . /// Binary searches this sorted slice with a comparator function. . /// . /// The comparator function should implement an order consistent . /// with the sort order of the underlying slice, returning an . /// order code that indicates whether its argument is `Less`, . /// `Equal` or `Greater` the desired target. -- line 2309 ---------------------------------------- -- line 2345 ---------------------------------------- . #[inline] . pub fn binary_search_by<'a, F>(&'a self, mut f: F) -> Result . where . F: FnMut(&'a T) -> Ordering, . { . let mut size = self.len(); . let mut left = 0; . let mut right = size; 8,434,145 ( 0.14%) while left < right { 11,066,228 ( 0.18%) let mid = left + size / 2; . . // SAFETY: the call is made safe by the following invariants: . // - `mid >= 0` . // - `mid < size`: `mid` is limited by `[left; right)` bound. 771,047 ( 0.01%) let cmp = f(unsafe { self.get_unchecked(mid) }); . . // The reason why we use if/else control flow rather than match . // is because match reorders comparison operations, which is perf sensitive. . // This is x86 asm for u8: https://rust.godbolt.org/z/8Y8Pra. 3,338,558 ( 0.06%) if cmp == Less { 4,395,630 ( 0.07%) left = mid + 1; 1,550,884 ( 0.03%) } else if cmp == Greater { . right = mid; . } else { . // SAFETY: same as the `get_unchecked` above . unsafe { crate::intrinsics::assume(mid < self.len()) }; . return Ok(mid); . } . 10,083,975 ( 0.17%) size = right - left; . } . Err(left) . } . . /// Binary searches this sorted slice with a key extraction function. . /// . /// Assumes that the slice is sorted by the key, for instance with . /// [`sort_by_key`] using the same key extraction function. -- line 2382 ---------------------------------------- -- line 3203 ---------------------------------------- . #[track_caller] . fn len_mismatch_fail(dst_len: usize, src_len: usize) -> ! { . panic!( . "source slice length ({}) does not match destination slice length ({})", . src_len, dst_len, . ); . } . 413,814 ( 0.01%) if self.len() != src.len() { . len_mismatch_fail(self.len(), src.len()); . } . . // SAFETY: `self` is valid for `self.len()` elements by definition, and `src` was . // checked to have the same length. The slices cannot overlap because . // mutable references are exclusive. . unsafe { . ptr::copy_nonoverlapping(src.as_ptr(), self.as_mut_ptr(), self.len()); -- line 3219 ---------------------------------------- -- line 3382 ---------------------------------------- . } . let gcd: usize = gcd(mem::size_of::(), mem::size_of::()); . let ts: usize = mem::size_of::() / gcd; . let us: usize = mem::size_of::() / gcd; . . // Armed with this knowledge, we can find how many `U`s we can fit! . let us_len = self.len() / ts * us; . // And how many `T`s will be in the trailing slice! 8,930 ( 0.00%) let ts_len = self.len() % ts; . (us_len, ts_len) . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// . /// This method splits the slice into three distinct slices: prefix, correctly aligned middle . /// slice of a new type, and the suffix slice. The method may make the middle slice the greatest -- line 3398 ---------------------------------------- -- line 3429 ---------------------------------------- . return (self, &[], &[]); . } . . // First, find at what point do we split between the first and 2nd slice. Easy with . // ptr.align_offset. . let ptr = self.as_ptr(); . // SAFETY: See the `align_to_mut` method for the detailed safety comment. . let offset = unsafe { crate::ptr::align_offset(ptr, mem::align_of::()) }; 8,930 ( 0.00%) if offset > self.len() { . (self, &[], &[]) . } else { . let (left, rest) = self.split_at(offset); . let (us_len, ts_len) = rest.align_to_offsets::(); . // SAFETY: now `rest` is definitely aligned, so `from_raw_parts` below is okay, . // since the caller guarantees that we can transmute `T` to `U` safely. . unsafe { . ( . left, . from_raw_parts(rest.as_ptr() as *const U, us_len), 8,930 ( 0.00%) from_raw_parts(rest.as_ptr().add(rest.len() - ts_len), ts_len), . ) . } . } . } . . /// Transmute the slice to a slice of another type, ensuring alignment of the types is . /// maintained. . /// -- line 3456 ---------------------------------------- -- line 3731 ---------------------------------------- . /// let v = [1, 2, 3, 3, 5, 6, 7]; . /// let i = v.partition_point(|&x| x < 5); . /// . /// assert_eq!(i, 4); . /// assert!(v[..i].iter().all(|&x| x < 5)); . /// assert!(v[i..].iter().all(|&x| !(x < 5))); . /// ``` . #[stable(feature = "partition_point", since = "1.52.0")] 26 ( 0.00%) pub fn partition_point

(&self, mut pred: P) -> usize . where . P: FnMut(&T) -> bool, . { . self.binary_search_by(|x| if pred(x) { Less } else { Greater }).unwrap_or_else(|i| i) 52 ( 0.00%) } . . /// Removes the subslice corresponding to the given range . /// and returns a reference to it. . /// . /// Returns `None` and does not modify the slice if the given . /// range is out of bounds. . /// . /// Note that this method only accepts one-sided ranges such as -- line 3752 ---------------------------------------- 7,066,347 ( 0.12%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/mod.rs -------------------------------------------------------------------------------- Ir -- line 26 ---------------------------------------- . pub struct UnmatchedBrace { . pub expected_delim: token::DelimToken, . pub found_delim: Option, . pub found_span: Span, . pub unclosed_span: Option, . pub candidate_span: Option, . } . 434 ( 0.00%) crate fn parse_token_trees<'a>( . sess: &'a ParseSess, . src: &'a str, . start_pos: BytePos, . override_span: Option, . ) -> (PResult<'a, TokenStream>, Vec) { . StringReader { sess, start_pos, pos: start_pos, end_src_index: src.len(), src, override_span } . .into_token_trees() 279 ( 0.00%) } . . struct StringReader<'a> { . sess: &'a ParseSess, . /// Initial position, read-only. . start_pos: BytePos, . /// The absolute offset within the source_map of the current character. . pos: BytePos, . /// Stop reading src at this index. -- line 50 ---------------------------------------- -- line 51 ---------------------------------------- . end_src_index: usize, . /// Source text to tokenize. . src: &'a str, . override_span: Option, . } . . impl<'a> StringReader<'a> { . fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { 91,174 ( 0.00%) self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi)) . } . . /// Returns the next token, and info about preceding whitespace, if any. 471,093 ( 0.01%) fn next_token(&mut self) -> (Spacing, Token) { . let mut spacing = Spacing::Joint; . . // Skip `#!` at the start of the file 134,598 ( 0.00%) let start_src_index = self.src_index(self.pos); 201,897 ( 0.00%) let text: &str = &self.src[start_src_index..self.end_src_index]; . let is_beginning_of_file = self.pos == self.start_pos; 67,299 ( 0.00%) if is_beginning_of_file { 124 ( 0.00%) if let Some(shebang_len) = rustc_lexer::strip_shebang(text) { . self.pos = self.pos + BytePos::from_usize(shebang_len); . spacing = Spacing::Alone; . } . } . . // Skip trivial (whitespace & comments) tokens . loop { 120,693 ( 0.00%) let start_src_index = self.src_index(self.pos); 281,988 ( 0.00%) let text: &str = &self.src[start_src_index..self.end_src_index]; . 93,996 ( 0.00%) if text.is_empty() { . let span = self.mk_sp(self.pos, self.pos); 155 ( 0.00%) return (spacing, Token::new(token::Eof, span)); . } . 281,895 ( 0.00%) let token = rustc_lexer::first_token(text); . 187,930 ( 0.00%) let start = self.pos; 93,965 ( 0.00%) self.pos = self.pos + BytePos::from_usize(token.len); . . debug!("next_token: {:?}({:?})", token.kind, self.str_from(start)); . 756,444 ( 0.01%) match self.cook_lexer_token(token.kind, start) { 538,144 ( 0.01%) Some(kind) => { . let span = self.mk_sp(start, self.pos); 403,608 ( 0.01%) return (spacing, Token::new(kind, span)); . } . None => spacing = Spacing::Alone, . } . } 605,691 ( 0.01%) } . . /// Report a fatal lexical error with a given span. . fn fatal_span(&self, sp: Span, m: &str) -> FatalError { . self.sess.span_diagnostic.span_fatal(sp, m) . } . . /// Report a lexical error with a given span. . fn err_span(&self, sp: Span, m: &str) { -- line 110 ---------------------------------------- -- line 130 ---------------------------------------- . ) -> DiagnosticBuilder<'a> { . self.sess . .span_diagnostic . .struct_span_fatal(self.mk_sp(from_pos, to_pos), &format!("{}: {}", m, escaped_char(c))) . } . . /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly . /// complain about it. 2,758 ( 0.00%) fn lint_unicode_text_flow(&self, start: BytePos) { . // Opening delimiter of the length 2 is not included into the comment text. . let content_start = start + BytePos(2); . let content = self.str_from(content_start); . if contains_text_flow_control_chars(content) { . let span = self.mk_sp(start, self.pos); . self.sess.buffer_lint_with_diagnostic( . &TEXT_DIRECTION_CODEPOINT_IN_COMMENT, . span, . ast::CRATE_NODE_ID, . "unicode codepoint changing visible direction of text present in comment", . BuiltinLintDiagnostics::UnicodeTextFlow(span, content.to_string()), . ); . } 3,152 ( 0.00%) } . . /// Turns simple `rustc_lexer::TokenKind` enum into a rich . /// `rustc_ast::TokenKind`. This turns strings into interned . /// symbols and runs additional validation. . fn cook_lexer_token(&self, token: rustc_lexer::TokenKind, start: BytePos) -> Option { 469,825 ( 0.01%) Some(match token { 1,549 ( 0.00%) rustc_lexer::TokenKind::LineComment { doc_style } => { . // Skip non-doc comments 6,641 ( 0.00%) let doc_style = if let Some(doc_style) = doc_style { . doc_style . } else { . self.lint_unicode_text_flow(start); . return None; . }; . . // Opening delimiter of the length 3 is not included into the symbol. . let content_start = start + BytePos(3); . let content = self.str_from(content_start); 5,905 ( 0.00%) self.cook_doc_comment(content_start, content, CommentKind::Line, doc_style) . } 52 ( 0.00%) rustc_lexer::TokenKind::BlockComment { doc_style, terminated } => { 26 ( 0.00%) if !terminated { . let msg = match doc_style { . Some(_) => "unterminated block doc-comment", . None => "unterminated block comment", . }; . let last_bpos = self.pos; . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start, last_bpos), . msg, . error_code!(E0758), . ); . } . . // Skip non-doc comments 26 ( 0.00%) let doc_style = if let Some(doc_style) = doc_style { . doc_style . } else { . self.lint_unicode_text_flow(start); . return None; . }; . . // Opening delimiter of the length 3 and closing delimiter of the length 2 . // are not included into the symbol. -- line 196 ---------------------------------------- -- line 198 ---------------------------------------- . let content_end = self.pos - BytePos(if terminated { 2 } else { 0 }); . let content = self.str_from_to(content_start, content_end); . self.cook_doc_comment(content_start, content, CommentKind::Block, doc_style) . } . rustc_lexer::TokenKind::Whitespace => return None, . rustc_lexer::TokenKind::Ident . | rustc_lexer::TokenKind::RawIdent . | rustc_lexer::TokenKind::UnknownPrefix => { 95,376 ( 0.00%) let is_raw_ident = token == rustc_lexer::TokenKind::RawIdent; 119,220 ( 0.00%) let is_unknown_prefix = token == rustc_lexer::TokenKind::UnknownPrefix; . let mut ident_start = start; 47,688 ( 0.00%) if is_raw_ident { . ident_start = ident_start + BytePos(2); . } 71,532 ( 0.00%) if is_unknown_prefix { . self.report_unknown_prefix(start); . } 119,220 ( 0.00%) let sym = nfc_normalize(self.str_from(ident_start)); . let span = self.mk_sp(start, self.pos); 95,376 ( 0.00%) self.sess.symbol_gallery.insert(sym, span); 47,688 ( 0.00%) if is_raw_ident { . if !sym.can_be_raw() { . self.err_span(span, &format!("`{}` cannot be a raw identifier", sym)); . } . self.sess.raw_identifier_spans.borrow_mut().push(span); . } 166,908 ( 0.00%) token::Ident(sym, is_raw_ident) . } . rustc_lexer::TokenKind::InvalidIdent . // Do not recover an identifier with emoji if the codepoint is a confusable . // with a recoverable substitution token, like `âž–`. . if UNICODE_ARRAY . .iter() . .find(|&&(c, _, _)| { . let sym = self.str_from(start); -- line 232 ---------------------------------------- -- line 234 ---------------------------------------- . }) . .is_none() => . { . let sym = nfc_normalize(self.str_from(start)); . let span = self.mk_sp(start, self.pos); . self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default().push(span); . token::Ident(sym, false) . } 26,112 ( 0.00%) rustc_lexer::TokenKind::Literal { kind, suffix_start } => { . let suffix_start = start + BytePos(suffix_start as u32); . let (kind, symbol) = self.cook_lexer_literal(start, suffix_start, kind); 4,896 ( 0.00%) let suffix = if suffix_start < self.pos { . let string = self.str_from(suffix_start); . if string == "_" { . self.sess . .span_diagnostic . .struct_span_warn( . self.mk_sp(suffix_start, self.pos), . "underscore literal suffix is not allowed", . ) -- line 253 ---------------------------------------- -- line 264 ---------------------------------------- . .emit(); . None . } else { . Some(Symbol::intern(string)) . } . } else { . None . }; 13,056 ( 0.00%) token::Literal(token::Lit { kind, symbol, suffix }) . } 1,376 ( 0.00%) rustc_lexer::TokenKind::Lifetime { starts_with_number } => { . // Include the leading `'` in the real identifier, for macro . // expansion purposes. See #12512 for the gory details of why . // this is necessary. . let lifetime_name = self.str_from(start); 688 ( 0.00%) if starts_with_number { . self.err_span_(start, self.pos, "lifetimes cannot start with a number"); . } 2,064 ( 0.00%) let ident = Symbol::intern(lifetime_name); 2,752 ( 0.00%) token::Lifetime(ident) . } . rustc_lexer::TokenKind::Semi => token::Semi, . rustc_lexer::TokenKind::Comma => token::Comma, . rustc_lexer::TokenKind::Dot => token::Dot, . rustc_lexer::TokenKind::OpenParen => token::OpenDelim(token::Paren), . rustc_lexer::TokenKind::CloseParen => token::CloseDelim(token::Paren), . rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(token::Brace), . rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(token::Brace), -- line 291 ---------------------------------------- -- line 324 ---------------------------------------- . err.help("source files must contain UTF-8 encoded text, unexpected null bytes might occur when a different encoding is used"); . } . err.emit(); . token? . } . }) . } . 14,172 ( 0.00%) fn cook_doc_comment( . &self, . content_start: BytePos, . content: &str, . comment_kind: CommentKind, . doc_style: DocStyle, . ) -> TokenKind { 1,181 ( 0.00%) if content.contains('\r') { . for (idx, _) in content.char_indices().filter(|&(_, c)| c == '\r') { . self.err_span_( . content_start + BytePos(idx as u32), . content_start + BytePos(idx as u32 + 1), . match comment_kind { . CommentKind::Line => "bare CR not allowed in doc-comment", . CommentKind::Block => "bare CR not allowed in block doc-comment", . }, -- line 347 ---------------------------------------- -- line 349 ---------------------------------------- . } . } . . let attr_style = match doc_style { . DocStyle::Outer => AttrStyle::Outer, . DocStyle::Inner => AttrStyle::Inner, . }; . 3,543 ( 0.00%) token::DocComment(comment_kind, attr_style, Symbol::intern(content)) 17,715 ( 0.00%) } . . fn cook_lexer_literal( . &self, . start: BytePos, . suffix_start: BytePos, . kind: rustc_lexer::LiteralKind, . ) -> (token::LitKind, Symbol) { . // prefix means `"` or `br"` or `r###"`, ... -- line 366 ---------------------------------------- -- line 370 ---------------------------------------- . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start, suffix_start), . "unterminated character literal", . error_code!(E0762), . ) . } . (token::Char, Mode::Char, 1, 1) // ' ' . } 2 ( 0.00%) rustc_lexer::LiteralKind::Byte { terminated } => { 2 ( 0.00%) if !terminated { . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start + BytePos(1), suffix_start), . "unterminated byte constant", . error_code!(E0763), . ) . } . (token::Byte, Mode::Byte, 2, 1) // b' ' . } 337 ( 0.00%) rustc_lexer::LiteralKind::Str { terminated } => { 337 ( 0.00%) if !terminated { . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start, suffix_start), . "unterminated double quote string", . error_code!(E0765), . ) . } . (token::Str, Mode::Str, 1, 1) // " " . } 11 ( 0.00%) rustc_lexer::LiteralKind::ByteStr { terminated } => { 11 ( 0.00%) if !terminated { . self.sess.span_diagnostic.span_fatal_with_code( . self.mk_sp(start + BytePos(1), suffix_start), . "unterminated double quote byte string", . error_code!(E0766), . ) . } . (token::ByteStr, Mode::ByteStr, 2, 1) // b" " . } -- line 407 ---------------------------------------- -- line 410 ---------------------------------------- . let n = u32::from(n_hashes); . (token::StrRaw(n_hashes), Mode::RawStr, 2 + n, 1 + n) // r##" "## . } . rustc_lexer::LiteralKind::RawByteStr { n_hashes, err } => { . self.report_raw_str_error(start, err); . let n = u32::from(n_hashes); . (token::ByteStrRaw(n_hashes), Mode::RawByteStr, 3 + n, 1 + n) // br##" "## . } 2,564 ( 0.00%) rustc_lexer::LiteralKind::Int { base, empty_int } => { 1,282 ( 0.00%) return if empty_int { . self.sess . .span_diagnostic . .struct_span_err_with_code( . self.mk_sp(start, suffix_start), . "no valid digits found for number", . error_code!(E0768), . ) . .emit(); . (token::Integer, sym::integer(0)) . } else { . self.validate_int_literal(base, start, suffix_start); 3,846 ( 0.00%) (token::Integer, self.symbol_from_to(start, suffix_start)) . }; . } . rustc_lexer::LiteralKind::Float { base, empty_exponent } => { . if empty_exponent { . self.err_span_(start, self.pos, "expected at least one digit in exponent"); . } . . match base { -- line 439 ---------------------------------------- -- line 452 ---------------------------------------- . } . . let id = self.symbol_from_to(start, suffix_start); . return (token::Float, id); . } . }; . let content_start = start + BytePos(prefix_len); . let content_end = suffix_start - BytePos(postfix_len); 1,050 ( 0.00%) let id = self.symbol_from_to(content_start, content_end); . self.validate_literal_escape(mode, content_start, content_end, prefix_len, postfix_len); . (lit_kind, id) . } . . #[inline] . fn src_index(&self, pos: BytePos) -> usize { . (pos - self.start_pos).to_usize() . } . . /// Slice of the source text from `start` up to but excluding `self.pos`, . /// meaning the slice does not include the character `self.ch`. . fn str_from(&self, start: BytePos) -> &str { 161,563 ( 0.00%) self.str_from_to(start, self.pos) . } . . /// As symbol_from, with an explicit endpoint. . fn symbol_from_to(&self, start: BytePos, end: BytePos) -> Symbol { . debug!("taking an ident from {:?} to {:?}", start, end); 13,174 ( 0.00%) Symbol::intern(self.str_from_to(start, end)) . } . . /// Slice of the source text spanning from `start` up to but excluding `end`. 28,099 ( 0.00%) fn str_from_to(&self, start: BytePos, end: BytePos) -> &str { . &self.src[self.src_index(start)..self.src_index(end)] 112,396 ( 0.00%) } . . fn report_raw_str_error(&self, start: BytePos, opt_err: Option) { . match opt_err { . Some(RawStrError::InvalidStarter { bad_char }) => { . self.report_non_started_raw_string(start, bad_char) . } . Some(RawStrError::NoTerminator { expected, found, possible_terminator_offset }) => self . .report_unterminated_raw_string(start, expected, possible_terminator_offset, found), -- line 493 ---------------------------------------- -- line 609 ---------------------------------------- . fn validate_literal_escape( . &self, . mode: Mode, . content_start: BytePos, . content_end: BytePos, . prefix_len: u32, . postfix_len: u32, . ) { 2,100 ( 0.00%) let lit_content = self.str_from_to(content_start, content_end); 71,036 ( 0.00%) unescape::unescape_literal(lit_content, mode, &mut |range, result| { . // Here we only check for errors. The actual unescaping is done later. 15,834 ( 0.00%) if let Err(err) = result { . let span_with_quotes = self . .mk_sp(content_start - BytePos(prefix_len), content_end + BytePos(postfix_len)); . let (start, end) = (range.start as u32, range.end as u32); . let lo = content_start + BytePos(start); . let hi = lo + BytePos(end - start); . let span = self.mk_sp(lo, hi); . emit_unescape_error( . &self.sess.span_diagnostic, -- line 628 ---------------------------------------- -- line 629 ---------------------------------------- . lit_content, . span_with_quotes, . span, . mode, . range, . err, . ); . } 71,253 ( 0.00%) }); . } . . fn validate_int_literal(&self, base: Base, content_start: BytePos, content_end: BytePos) { 5,108 ( 0.00%) let base = match base { . Base::Binary => 2, . Base::Octal => 8, . _ => return, . }; 60 ( 0.00%) let s = self.str_from_to(content_start + BytePos(2), content_end); 60 ( 0.00%) for (idx, c) in s.char_indices() { . let idx = idx as u32; 120 ( 0.00%) if c != '_' && c.to_digit(base).is_none() { . let lo = content_start + BytePos(2 + idx); . let hi = content_start + BytePos(2 + idx + c.len_utf8() as u32); . self.err_span_(lo, hi, &format!("invalid digit for a base {} literal", base)); . } . } . } . } . 214,596 ( 0.00%) pub fn nfc_normalize(string: &str) -> Symbol { . use unicode_normalization::{is_nfc_quick, IsNormalized, UnicodeNormalization}; 47,688 ( 0.00%) match is_nfc_quick(string.chars()) { 238,440 ( 0.00%) IsNormalized::Yes => Symbol::intern(string), . _ => { . let normalized_str: String = string.chars().nfc().collect(); . Symbol::intern(&normalized_str) . } . } . } 291,689 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs -------------------------------------------------------------------------------- Ir -- line 57 ---------------------------------------- . /// # Examples . /// . /// Basic usage: . /// . /// ``` . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::from_str_radix(\"A\", 16), Ok(10));")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 2,162 ( 0.00%) pub fn from_str_radix(src: &str, radix: u32) -> Result { 1,081 ( 0.00%) from_str_radix(src, radix) 3,243 ( 0.00%) } . . /// Returns the number of ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// . /// ``` -- line 75 ---------------------------------------- -- line 80 ---------------------------------------- . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[doc(alias = "popcount")] . #[doc(alias = "popcnt")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn count_ones(self) -> u32 { 231,450 ( 0.00%) intrinsics::ctpop(self as $ActualT) as u32 . } . . /// Returns the number of zeros in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 96 ---------------------------------------- -- line 118 ---------------------------------------- . /// assert_eq!(n.leading_zeros(), 2); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn leading_zeros(self) -> u32 { 971,974 ( 0.02%) intrinsics::ctlz(self as $ActualT) as u32 . } . . /// Returns the number of trailing zeros in the binary representation . /// of `self`. . /// . /// # Examples . /// . /// Basic usage: -- line 134 ---------------------------------------- -- line 139 ---------------------------------------- . /// assert_eq!(n.trailing_zeros(), 3); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn trailing_zeros(self) -> u32 { 850,936 ( 0.01%) intrinsics::cttz(self) as u32 . } . . /// Returns the number of leading ones in the binary representation of `self`. . /// . /// # Examples . /// . /// Basic usage: . /// -- line 155 ---------------------------------------- -- line 204 ---------------------------------------- . #[doc = concat!("assert_eq!(n.rotate_left(", $rot, "), m);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn rotate_left(self, n: u32) -> Self { 28,524,828 ( 0.47%) intrinsics::rotate_left(self, n as $SelfT) . } . . /// Shifts the bits to the right by a specified amount, `n`, . /// wrapping the truncated bits to the beginning of the resulting . /// integer. . /// . /// Please note this isn't the same operation as the `>>` shifting operator! . /// -- line 220 ---------------------------------------- -- line 430 ---------------------------------------- . #[doc = concat!("assert_eq!((", stringify!($SelfT), "::MAX - 2).checked_add(3), None);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_checked_int_methods", since = "1.47.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . pub const fn checked_add(self, rhs: Self) -> Option { 62 ( 0.00%) let (a, b) = self.overflowing_add(rhs); . if unlikely!(b) {None} else {Some(a)} . } . . /// Unchecked integer addition. Computes `self + rhs`, assuming overflow . /// cannot occur. . /// . /// # Safety . /// -- line 446 ---------------------------------------- -- line 456 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_add(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_add`. 4,927,143 ( 0.08%) unsafe { intrinsics::unchecked_add(self, rhs) } . } . . /// Checked addition with a signed integer. Computes `self + rhs`, . /// returning `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 472 ---------------------------------------- -- line 525 ---------------------------------------- . )] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")] . #[inline(always)] . pub const unsafe fn unchecked_sub(self, rhs: Self) -> Self { . // SAFETY: the caller must uphold the safety contract for . // `unchecked_sub`. 208,533 ( 0.00%) unsafe { intrinsics::unchecked_sub(self, rhs) } . } . . /// Checked integer multiplication. Computes `self * rhs`, returning . /// `None` if overflow occurred. . /// . /// # Examples . /// . /// Basic usage: -- line 541 ---------------------------------------- -- line 1035 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.saturating_add(127), ", stringify!($SelfT), "::MAX);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_add(self, rhs: Self) -> Self { 494,525 ( 0.01%) intrinsics::saturating_add(self, rhs) . } . . /// Saturating addition with a signed integer. Computes `self + rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1051 ---------------------------------------- -- line 1084 ---------------------------------------- . #[doc = concat!("assert_eq!(13", stringify!($SelfT), ".saturating_sub(127), 0);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[rustc_const_stable(feature = "const_saturating_int_methods", since = "1.47.0")] . #[inline(always)] . pub const fn saturating_sub(self, rhs: Self) -> Self { 9,292 ( 0.00%) intrinsics::saturating_sub(self, rhs) . } . . /// Saturating integer multiplication. Computes `self * rhs`, . /// saturating at the numeric bounds instead of overflowing. . /// . /// # Examples . /// . /// Basic usage: -- line 1100 ---------------------------------------- -- line 1175 ---------------------------------------- . #[doc = concat!("assert_eq!(200", stringify!($SelfT), ".wrapping_add(", stringify!($SelfT), "::MAX), 199);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_add(self, rhs: Self) -> Self { 15,212,289 ( 0.25%) intrinsics::wrapping_add(self, rhs) . } . . /// Wrapping (modular) addition with a signed integer. Computes . /// `self + rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1191 ---------------------------------------- -- line 1217 ---------------------------------------- . #[doc = concat!("assert_eq!(100", stringify!($SelfT), ".wrapping_sub(", stringify!($SelfT), "::MAX), 101);")] . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_sub(self, rhs: Self) -> Self { 4,959,207 ( 0.08%) intrinsics::wrapping_sub(self, rhs) . } . . /// Wrapping (modular) multiplication. Computes `self * . /// rhs`, wrapping around at the boundary of the type. . /// . /// # Examples . /// . /// Basic usage: -- line 1233 ---------------------------------------- -- line 1240 ---------------------------------------- . /// assert_eq!(25u8.wrapping_mul(12), 44); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_mul(self, rhs: Self) -> Self { 18,976,024 ( 0.31%) intrinsics::wrapping_mul(self, rhs) . } . . /// Wrapping (modular) division. Computes `self / rhs`. . /// Wrapped division on unsigned types is just normal division. . /// There's no way wrapping could ever happen. . /// This function exists, so that all operations . /// are accounted for in the wrapping operations. . /// -- line 1256 ---------------------------------------- -- line 1397 ---------------------------------------- . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn wrapping_shl(self, rhs: u32) -> Self { . // SAFETY: the masking by the bitsize of the type ensures that we do not shift . // out of bounds . unsafe { 9 ( 0.00%) intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT) . } . } . . /// Panic-free bitwise shift-right; yields `self >> mask(rhs)`, . /// where `mask` removes any high-order bits of `rhs` that . /// would cause the shift to exceed the bitwidth of the type. . /// . /// Note that this is *not* the same as a rotate-right; the -- line 1413 ---------------------------------------- -- line 1492 ---------------------------------------- . #[doc = concat!("assert_eq!(", stringify!($SelfT), "::MAX.overflowing_add(1), (0, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_add(self, rhs: Self) -> (Self, bool) { 1,932,038 ( 0.03%) let (a, b) = intrinsics::add_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates `self + rhs + carry` without the ability to overflow. . /// . /// Performs "ternary addition" which takes in an extra bit to add, and may return an . /// additional bit of overflow. This allows for chaining together multiple additions . /// to create "big integers" which represent larger values. -- line 1508 ---------------------------------------- -- line 1588 ---------------------------------------- . #[doc = concat!("assert_eq!(0", stringify!($SelfT), ".overflowing_sub(1), (", stringify!($SelfT), "::MAX, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_sub(self, rhs: Self) -> (Self, bool) { 895,291 ( 0.01%) let (a, b) = intrinsics::sub_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) . } . . /// Calculates `self - rhs - borrow` without the ability to overflow. . /// . /// Performs "ternary subtraction" which takes in an extra bit to subtract, and may return . /// an additional bit of overflow. This allows for chaining together multiple subtractions . /// to create "big integers" which represent larger values. -- line 1604 ---------------------------------------- -- line 1673 ---------------------------------------- . /// assert_eq!(5u32.overflowing_mul(2), (10, false)); . /// assert_eq!(1_000_000_000u32.overflowing_mul(10), (1410065408, true)); . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] 6 ( 0.00%) pub const fn overflowing_mul(self, rhs: Self) -> (Self, bool) { 3,822,847 ( 0.06%) let (a, b) = intrinsics::mul_with_overflow(self as $ActualT, rhs as $ActualT); . (a as Self, b) 3 ( 0.00%) } . . /// Calculates the divisor when `self` is divided by `rhs`. . /// . /// Returns a tuple of the divisor along with a boolean indicating . /// whether an arithmetic overflow would occur. Note that for unsigned . /// integers overflow never occurs, so the second value is always . /// `false`. . /// -- line 1692 ---------------------------------------- -- line 1838 ---------------------------------------- . #[doc = concat!("assert_eq!(0x1", stringify!($SelfT), ".overflowing_shl(132), (0x10, true));")] . /// ``` . #[stable(feature = "wrapping", since = "1.7.0")] . #[rustc_const_stable(feature = "const_wrapping_math", since = "1.32.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline(always)] . pub const fn overflowing_shl(self, rhs: u32) -> (Self, bool) { 3 ( 0.00%) (self.wrapping_shl(rhs), (rhs > ($BITS - 1))) . } . . /// Shifts self right by `rhs` bits. . /// . /// Returns a tuple of the shifted version of self along with a boolean . /// indicating whether the shift value was larger than or equal to the . /// number of bits. If the shift value is too large, then value is . /// masked (N-1) where N is the number of bits, and this value is then -- line 1854 ---------------------------------------- -- line 2132 ---------------------------------------- . #[doc = concat!("assert!(16", stringify!($SelfT), ".is_power_of_two());")] . #[doc = concat!("assert!(!10", stringify!($SelfT), ".is_power_of_two());")] . /// ``` . #[must_use] . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_is_power_of_two", since = "1.32.0")] . #[inline(always)] . pub const fn is_power_of_two(self) -> bool { 174 ( 0.00%) self.count_ones() == 1 . } . . // Returns one less than next power of two. . // (For 8u8 next power of two is 8u8 and for 6u8 it is 8u8) . // . // 8u8.one_less_than_next_power_of_two() == 7 . // 6u8.one_less_than_next_power_of_two() == 7 . // . // This method cannot overflow, as in the `next_power_of_two` . // overflow cases it instead ends up returning the maximum value . // of the type, and can return 0 for 0. . #[inline] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . const fn one_less_than_next_power_of_two(self) -> Self { 48,006 ( 0.00%) if self <= 1 { return 0; } . 51,646 ( 0.00%) let p = self - 1; . // SAFETY: Because `p > 0`, it cannot consist entirely of leading zeros. . // That means the shift is always in-bounds, and some processors . // (such as intel pre-haswell) have more efficient ctlz . // intrinsics when the argument is non-zero. 154,790 ( 0.00%) let z = unsafe { intrinsics::ctlz_nonzero(p) }; 51,722 ( 0.00%) <$SelfT>::MAX >> z . } . . /// Returns the smallest power of two greater than or equal to `self`. . /// . /// When return value overflows (i.e., `self > (1 << (N-1))` for type . /// `uN`), it panics in debug mode and the return value is wrapped to 0 in . /// release mode (the only situation in which method can return 0). . /// -- line 2171 ---------------------------------------- -- line 2179 ---------------------------------------- . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_stable(feature = "const_int_pow", since = "1.50.0")] . #[must_use = "this returns the result of the operation, \ . without modifying the original"] . #[inline] . #[rustc_inherit_overflow_checks] . pub const fn next_power_of_two(self) -> Self { 55,250 ( 0.00%) self.one_less_than_next_power_of_two() + 1 . } . . /// Returns the smallest power of two greater than or equal to `n`. If . /// the next power of two is greater than the type's maximum value, . /// `None` is returned, otherwise the power of two is wrapped in `Some`. . /// . /// # Examples . /// -- line 2195 ---------------------------------------- 303,377 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs -------------------------------------------------------------------------------- Ir -- line 48 ---------------------------------------- . // . // FIXME: we have to do something for moving slice patterns. . fn place_contents_drop_state_cannot_differ<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . place: mir::Place<'tcx>, . ) -> bool { . let ty = place.ty(body, tcx).ty; 1,533,222 ( 0.03%) match ty.kind() { . ty::Array(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} => false", . place, ty . ); . false . } . ty::Slice(..) | ty::Ref(..) | ty::RawPtr(..) => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} refd => true", . place, ty . ); . true . } 1,359,927 ( 0.02%) ty::Adt(def, _) if (def.has_dtor(tcx) && !def.is_box()) || def.is_union() => { . debug!( . "place_contents_drop_state_cannot_differ place: {:?} ty: {:?} Drop => true", . place, ty . ); . true . } . _ => false, . } -- line 79 ---------------------------------------- -- line 83 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . lookup_result: LookupResult, . each_child: F, . ) where . F: FnMut(MovePathIndex), . { 31,680 ( 0.00%) match lookup_result { . LookupResult::Parent(..) => { . // access to untracked value - do not touch children . } . LookupResult::Exact(e) => on_all_children_bits(tcx, body, move_data, e, each_child), . } . } . . pub fn on_all_children_bits<'tcx, F>( -- line 99 ---------------------------------------- -- line 100 ---------------------------------------- . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . mut each_child: F, . ) where . F: FnMut(MovePathIndex), . { 2,270,368 ( 0.04%) fn is_terminal_path<'tcx>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . path: MovePathIndex, . ) -> bool { 567,592 ( 0.01%) place_contents_drop_state_cannot_differ(tcx, body, move_data.move_paths[path].place) 2,270,368 ( 0.04%) } . 2,751,579 ( 0.05%) fn on_all_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . move_path_index: MovePathIndex, . each_child: &mut F, . ) where . F: FnMut(MovePathIndex), . { 67,577 ( 0.00%) each_child(move_path_index); . 1,986,572 ( 0.03%) if is_terminal_path(tcx, body, move_data, move_path_index) { . return; . } . 198,405 ( 0.00%) let mut next_child_index = move_data.move_paths[move_path_index].first_child; 589,538 ( 0.01%) while let Some(child_index) = next_child_index { 260,910 ( 0.00%) on_all_children_bits(tcx, body, move_data, child_index, each_child); 43,485 ( 0.00%) next_child_index = move_data.move_paths[child_index].next_sibling; . } 2,270,256 ( 0.04%) } 1,468,391 ( 0.02%) on_all_children_bits(tcx, body, move_data, move_path_index, &mut each_child); . } . . pub fn on_all_drop_children_bits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . path: MovePathIndex, . mut each_child: F, -- line 146 ---------------------------------------- -- line 156 ---------------------------------------- . if erased_ty.needs_drop(tcx, ctxt.param_env) { . each_child(child); . } else { . debug!("on_all_drop_children_bits - skipping") . } . }) . } . 19,680 ( 0.00%) pub fn drop_flag_effects_for_function_entry<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; 1,786 ( 0.00%) for arg in body.args_iter() { 4,708 ( 0.00%) let place = mir::Place::from(arg); 7,062 ( 0.00%) let lookup_result = move_data.rev_lookup.find(place.as_ref()); . on_lookup_result_bits(tcx, body, move_data, lookup_result, |mpi| { 2,400 ( 0.00%) callback(mpi, DropFlagState::Present) . }); . } 14,288 ( 0.00%) } . 3,678,444 ( 0.06%) pub fn drop_flag_effects_for_location<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . ctxt: &MoveDataParamEnv<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex, DropFlagState), . { . let move_data = &ctxt.move_data; . debug!("drop_flag_effects_for_location({:?})", loc); . . // first, move out of the RHS 306,300 ( 0.01%) for mi in &move_data.loc_map[loc] { 624,619 ( 0.01%) let path = mi.move_path_index(move_data); . debug!("moving out of path {:?}", move_data.move_paths[path]); . 192,486 ( 0.00%) on_all_children_bits(tcx, body, move_data, path, |mpi| callback(mpi, DropFlagState::Absent)) . } . . debug!("drop_flag_effects: assignment for location({:?})", loc); . 67,583 ( 0.00%) for_location_inits(tcx, body, move_data, loc, |mpi| callback(mpi, DropFlagState::Present)); 2,452,928 ( 0.04%) } . . pub fn for_location_inits<'tcx, F>( . tcx: TyCtxt<'tcx>, . body: &Body<'tcx>, . move_data: &MoveData<'tcx>, . loc: Location, . mut callback: F, . ) where . F: FnMut(MovePathIndex), . { 306,300 ( 0.01%) for ii in &move_data.init_loc_map[loc] { 347,180 ( 0.01%) let init = move_data.inits[*ii]; 213,974 ( 0.00%) match init.kind { . InitKind::Deep => { . let path = init.path; . . on_all_children_bits(tcx, body, move_data, path, &mut callback) . } . InitKind::Shallow => { . let mpi = init.path; . callback(mpi); -- line 226 ---------------------------------------- -- line 230 ---------------------------------------- . } . } . . /// Calls `handle_inactive_variant` for each descendant move path of `enum_place` that contains a . /// `Downcast` to a variant besides the `active_variant`. . /// . /// NOTE: If there are no move paths corresponding to an inactive variant, . /// `handle_inactive_variant` will not be called for that variant. 55,680 ( 0.00%) pub(crate) fn on_all_inactive_variants<'tcx>( . tcx: TyCtxt<'tcx>, . body: &mir::Body<'tcx>, . move_data: &MoveData<'tcx>, . enum_place: mir::Place<'tcx>, . active_variant: VariantIdx, . mut handle_inactive_variant: impl FnMut(MovePathIndex), . ) { 23,200 ( 0.00%) let enum_mpi = match move_data.rev_lookup.find(enum_place.as_ref()) { . LookupResult::Exact(mpi) => mpi, . LookupResult::Parent(_) => return, . }; . . let enum_path = &move_data.move_paths[enum_mpi]; 17,720 ( 0.00%) for (variant_mpi, variant_path) in enum_path.children(&move_data.move_paths) { . // Because of the way we build the `MoveData` tree, each child should have exactly one more . // projection than `enum_place`. This additional projection must be a downcast since the . // base is an enum. 2,535 ( 0.00%) let (downcast, base_proj) = variant_path.place.projection.split_last().unwrap(); 10,140 ( 0.00%) assert_eq!(enum_place.projection.len(), base_proj.len()); . 7,605 ( 0.00%) let variant_idx = match *downcast { . mir::ProjectionElem::Downcast(_, idx) => idx, . _ => unreachable!(), . }; . 2,535 ( 0.00%) if variant_idx != active_variant { . on_all_children_bits(tcx, body, move_data, variant_mpi, |mpi| { 2,538 ( 0.00%) handle_inactive_variant(mpi) . }); . } . } 37,120 ( 0.00%) } 2,094,295 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/range.rs -------------------------------------------------------------------------------- Ir -- line 198 ---------------------------------------- . } . . #[inline] . #[allow(arithmetic_overflow)] . #[rustc_inherit_overflow_checks] . fn forward(start: Self, n: usize) -> Self { . // In debug builds, trigger a panic on overflow. . // This should optimize completely out in release builds. 60,168 ( 0.00%) if Self::forward_checked(start, n).is_none() { . let _ = Self::MAX + 1; . } . // Do wrapping math to allow e.g. `Step::forward(-128i8, 255)`. . start.wrapping_add(n as Self) . } . . #[inline] . #[allow(arithmetic_overflow)] -- line 214 ---------------------------------------- -- line 613 ---------------------------------------- . . if taken < n { Err(taken) } else { Ok(()) } . } . } . . impl RangeIteratorImpl for ops::Range { . #[inline] . fn spec_next(&mut self) -> Option { 12,235,043 ( 0.20%) if self.start < self.end { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . Some(mem::replace(&mut self.start, n)) . } else { . None . } . } . -- line 629 ---------------------------------------- -- line 657 ---------------------------------------- . // Otherwise 0 is returned which always safe to use. . self.start = unsafe { Step::forward_unchecked(self.start.clone(), taken) }; . . if taken < n { Err(taken) } else { Ok(()) } . } . . #[inline] . fn spec_next_back(&mut self) -> Option { 981,800 ( 0.02%) if self.start < self.end { . // SAFETY: just checked precondition . self.end = unsafe { Step::backward_unchecked(self.end.clone(), 1) }; . Some(self.end.clone()) . } else { . None . } . } . -- line 673 ---------------------------------------- -- line 708 ---------------------------------------- . . #[inline] . fn next(&mut self) -> Option { . self.spec_next() . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 318,739 ( 0.01%) if self.start < self.end { . let hint = Step::steps_between(&self.start, &self.end); . (hint.unwrap_or(usize::MAX), hint) . } else { . (0, Some(0)) . } . } . . #[inline] -- line 724 ---------------------------------------- -- line 857 ---------------------------------------- . impl FusedIterator for ops::Range {} . . #[stable(feature = "rust1", since = "1.0.0")] . impl Iterator for ops::RangeFrom { . type Item = A; . . #[inline] . fn next(&mut self) -> Option { 56,649 ( 0.00%) let n = Step::forward(self.start.clone(), 1); . Some(mem::replace(&mut self.start, n)) . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { . (usize::MAX, None) . } . -- line 873 ---------------------------------------- -- line 998 ---------------------------------------- . . try { accum } . } . } . . impl RangeInclusiveIteratorImpl for ops::RangeInclusive { . #[inline] . fn spec_next(&mut self) -> Option { 143,109 ( 0.00%) if self.is_empty() { . return None; . } . let is_iterating = self.start < self.end; 89,172 ( 0.00%) Some(if is_iterating { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . mem::replace(&mut self.start, n) . } else { . self.exhausted = true; . self.start.clone() . }) . } -- line 1018 ---------------------------------------- -- line 1025 ---------------------------------------- . R: Try, . { . if self.is_empty() { . return try { init }; . } . . let mut accum = init; . 117,122 ( 0.00%) while self.start < self.end { . // SAFETY: just checked precondition . let n = unsafe { Step::forward_unchecked(self.start.clone(), 1) }; . let n = mem::replace(&mut self.start, n); . accum = f(accum, n)?; . } . . self.exhausted = true; . 10,663 ( 0.00%) if self.start == self.end { . accum = f(accum, self.start.clone())?; . } . . try { accum } . } . . #[inline] . fn spec_next_back(&mut self) -> Option { -- line 1050 ---------------------------------------- -- line 1098 ---------------------------------------- . . #[inline] . fn next(&mut self) -> Option { . self.spec_next() . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 111,852 ( 0.00%) if self.is_empty() { . return (0, Some(0)); . } . . match Step::steps_between(&self.start, &self.end) { . Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), . None => (usize::MAX, None), . } . } -- line 1114 ---------------------------------------- 4,659,479 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/walk.rs -------------------------------------------------------------------------------- Ir -- line 20 ---------------------------------------- . /// . /// It's very easy to produce a deeply . /// nested type tree with a lot of . /// identical subtrees. In order to work efficiently . /// in this situation walker only visits each type once. . /// It maintains a set of visited types and . /// skips any types that are already there. . impl<'tcx> TypeWalker<'tcx> { 83,939 ( 0.00%) pub fn new(root: GenericArg<'tcx>) -> Self { 335,756 ( 0.01%) Self { stack: smallvec![root], last_subtree: 1, visited: SsoHashSet::new() } 83,939 ( 0.00%) } . . /// Skips the subtree corresponding to the last type . /// returned by `next()`. . /// . /// Example: Imagine you are walking `Foo, usize>`. . /// . /// ``` . /// let mut iter: TypeWalker = ...; . /// iter.next(); // yields Foo . /// iter.next(); // yields Bar . /// iter.skip_current_subtree(); // skips i32 . /// iter.next(); // yields usize . /// ``` . pub fn skip_current_subtree(&mut self) { 752 ( 0.00%) self.stack.truncate(self.last_subtree); 376 ( 0.00%) } . } . . impl<'tcx> Iterator for TypeWalker<'tcx> { . type Item = GenericArg<'tcx>; . 2,528,649 ( 0.04%) fn next(&mut self) -> Option> { . debug!("next(): stack={:?}", self.stack); . loop { 412,946 ( 0.01%) let next = self.stack.pop()?; 206,473 ( 0.00%) self.last_subtree = self.stack.len(); 412,946 ( 0.01%) if self.visited.insert(next) { 591,066 ( 0.01%) push_inner(&mut self.stack, next); . debug!("next: stack={:?}", self.stack); . return Some(next); . } . } 1,966,727 ( 0.03%) } . } . . impl<'tcx> GenericArg<'tcx> { . /// Iterator that walks `self` and any types reachable from . /// `self`, in depth-first order. Note that just walks the types . /// that appear in `self`, it does not descend into the fields of . /// structs or variants. For example: . /// . /// ```text . /// isize => { isize } . /// Foo> => { Foo>, Bar, isize } . /// [isize] => { [isize], isize } . /// ``` 80,892 ( 0.00%) pub fn walk(self) -> TypeWalker<'tcx> { 40,446 ( 0.00%) TypeWalker::new(self) 121,338 ( 0.00%) } . . /// Iterator that walks the immediate children of `self`. Hence . /// `Foo, u32>` yields the sequence `[Bar, u32]` . /// (but not `i32`, like `walk`). . /// . /// Iterator only walks items once. . /// It accepts visited set, updates it with all visited types . /// and skips any types that are already there. 146,425 ( 0.00%) pub fn walk_shallow( . self, . visited: &mut SsoHashSet>, . ) -> impl Iterator> { . let mut stack = SmallVec::new(); 29,285 ( 0.00%) push_inner(&mut stack, self); 58,570 ( 0.00%) stack.retain(|a| visited.insert(*a)); 322,135 ( 0.01%) stack.into_iter() 146,425 ( 0.00%) } . } . . impl<'tcx> super::TyS<'tcx> { . /// Iterator that walks `self` and any types reachable from . /// `self`, in depth-first order. Note that just walks the types . /// that appear in `self`, it does not descend into the fields of . /// structs or variants. For example: . /// . /// ```text . /// isize => { isize } . /// Foo> => { Foo>, Bar, isize } . /// [isize] => { [isize], isize } . /// ``` 86,986 ( 0.00%) pub fn walk(&'tcx self) -> TypeWalker<'tcx> { 43,493 ( 0.00%) TypeWalker::new(self.into()) 130,479 ( 0.00%) } . } . . /// We push `GenericArg`s on the stack in reverse order so as to . /// maintain a pre-order traversal. As of the time of this . /// writing, the fact that the traversal is pre-order is not . /// known to be significant to any code, but it seems like the . /// natural order one would expect (basically, the order of the . /// types as they are written). 1,357,842 ( 0.02%) fn push_inner<'tcx>(stack: &mut TypeWalkerStack<'tcx>, parent: GenericArg<'tcx>) { . match parent.unpack() { 960,885 ( 0.02%) GenericArgKind::Type(parent_ty) => match *parent_ty.kind() { . ty::Bool . | ty::Char . | ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::Str . | ty::Infer(_) . | ty::Param(_) . | ty::Never . | ty::Error(_) . | ty::Placeholder(..) . | ty::Bound(..) . | ty::Foreign(..) => {} . 560 ( 0.00%) ty::Array(ty, len) => { 840 ( 0.00%) stack.push(len.into()); . stack.push(ty.into()); . } . ty::Slice(ty) => { . stack.push(ty.into()); . } . ty::RawPtr(mt) => { . stack.push(mt.ty.into()); . } 43,988 ( 0.00%) ty::Ref(lt, ty, _) => { 43,988 ( 0.00%) stack.push(ty.into()); . stack.push(lt.into()); . } 1,176 ( 0.00%) ty::Projection(data) => { . stack.extend(data.substs.iter().rev()); . } 3,596 ( 0.00%) ty::Dynamic(obj, lt) => { 3,596 ( 0.00%) stack.push(lt.into()); 3,596 ( 0.00%) stack.extend(obj.iter().rev().flat_map(|predicate| { . let (substs, opt_ty) = match predicate.skip_binder() { . ty::ExistentialPredicate::Trait(tr) => (tr.substs, None), . ty::ExistentialPredicate::Projection(p) => (p.substs, Some(p.term)), . ty::ExistentialPredicate::AutoTrait(_) => . // Empty iterator . { . (ty::InternalSubsts::empty(), None) . } -- line 165 ---------------------------------------- -- line 170 ---------------------------------------- . ty::Term::Const(ct) => ct.into(), . })) . })); . } . ty::Adt(_, substs) . | ty::Opaque(_, substs) . | ty::Closure(_, substs) . | ty::Generator(_, substs, _) 9,780 ( 0.00%) | ty::Tuple(substs) . | ty::FnDef(_, substs) => { . stack.extend(substs.iter().rev()); . } . ty::GeneratorWitness(ts) => { . stack.extend(ts.skip_binder().iter().rev().map(|ty| ty.into())); . } 5,394 ( 0.00%) ty::FnPtr(sig) => { 18,879 ( 0.00%) stack.push(sig.skip_binder().output().into()); 21,576 ( 0.00%) stack.extend(sig.skip_binder().inputs().iter().copied().rev().map(|ty| ty.into())); . } . }, . GenericArgKind::Lifetime(_) => {} . GenericArgKind::Const(parent_ct) => { . stack.push(parent_ct.ty.into()); 564 ( 0.00%) match parent_ct.val { . ty::ConstKind::Infer(_) . | ty::ConstKind::Param(_) . | ty::ConstKind::Placeholder(_) . | ty::ConstKind::Bound(..) . | ty::ConstKind::Value(_) . | ty::ConstKind::Error(_) => {} . . ty::ConstKind::Unevaluated(ct) => { . stack.extend(ct.substs.iter().rev()); . } . } . } . } 458,860 ( 0.01%) } 1,623,094 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs -------------------------------------------------------------------------------- Ir -- line 70 ---------------------------------------- . . impl<'a, T> $name<'a, T> { . // Helper function for creating a slice from the iterator. . #[inline(always)] . fn make_slice(&self) -> &'a [T] { . // SAFETY: the iterator was created from a slice with pointer . // `self.ptr` and length `len!(self)`. This guarantees that all . // the prerequisites for `from_raw_parts` are fulfilled. 127,772 ( 0.00%) unsafe { from_raw_parts(self.ptr.as_ptr(), len!(self)) } . } . . // Helper function for moving the start of the iterator forwards by `offset` elements, . // returning the old start. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] . unsafe fn post_inc_start(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . let old = self.ptr.as_ptr(); . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // so this new pointer is inside `self` and thus guaranteed to be non-null. 1,036,978 ( 0.02%) self.ptr = unsafe { NonNull::new_unchecked(self.ptr.as_ptr().offset(offset)) }; . old . } . } . . // Helper function for moving the end of the iterator backwards by `offset` elements, . // returning the new end. . // Unsafe because the offset must not exceed `self.len()`. . #[inline(always)] -- line 101 ---------------------------------------- -- line 102 ---------------------------------------- . unsafe fn pre_dec_end(&mut self, offset: isize) -> * $raw_mut T { . if mem::size_of::() == 0 { . zst_shrink!(self, offset); . self.ptr.as_ptr() . } else { . // SAFETY: the caller guarantees that `offset` doesn't exceed `self.len()`, . // which is guaranteed to not overflow an `isize`. Also, the resulting pointer . // is in bounds of `slice`, which fulfills the other requirements for `offset`. 21 ( 0.00%) self.end = unsafe { self.end.offset(-offset) }; . self.end . } . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ExactSizeIterator for $name<'_, T> { . #[inline(always)] . fn len(&self) -> usize { 384,425 ( 0.01%) len!(self) . } . . #[inline(always)] . fn is_empty(&self) -> bool { . is_empty!(self) . } . } . -- line 128 ---------------------------------------- -- line 134 ---------------------------------------- . fn next(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer . // must be non-null, and slices over non-ZSTs must also have a . // non-null end pointer. The call to `next_unchecked!` is safe . // since we check if the iterator is empty first. . unsafe { 528,999 ( 0.01%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 139,073 ( 0.00%) assume(!self.end.is_null()); . } 131,831,709 ( 2.19%) if is_empty!(self) { . None . } else { . Some(next_unchecked!(self)) . } . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 6,638,339 ( 0.11%) let exact = len!(self); . (exact, Some(exact)) . } . . #[inline] . fn count(self) -> usize { . len!(self) . } . . #[inline] . fn nth(&mut self, n: usize) -> Option<$elem> { 60,256 ( 0.00%) if n >= len!(self) { . // This iterator is now empty. . if mem::size_of::() == 0 { . // We have to do it this way as `ptr` may never be 0, but `end` . // could be (due to wrapping). . self.end = self.ptr.as_ptr(); . } else { . // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr . unsafe { -- line 175 ---------------------------------------- -- line 203 ---------------------------------------- . // faster to compile. . #[inline] . fn for_each(mut self, mut f: F) . where . Self: Sized, . F: FnMut(Self::Item), . { . while let Some(x) = self.next() { 14,423 ( 0.00%) f(x); . } . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn all(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 13,275 ( 0.00%) while let Some(x) = self.next() { 26,481 ( 0.00%) if !f(x) { . return false; . } . } . true . } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] 8 ( 0.00%) fn any(&mut self, mut f: F) -> bool . where . Self: Sized, . F: FnMut(Self::Item) -> bool, . { 1,248,259 ( 0.02%) while let Some(x) = self.next() { 1,284,642 ( 0.02%) if f(x) { . return true; . } . } . false 8 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find

(&mut self, mut predicate: P) -> Option . where . Self: Sized, . P: FnMut(&Self::Item) -> bool, . { 115,146 ( 0.00%) while let Some(x) = self.next() { 1,615,829 ( 0.03%) if predicate(&x) { . return Some(x); . } . } . None 398 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. . #[inline] . fn find_map(&mut self, mut f: F) -> Option . where . Self: Sized, . F: FnMut(Self::Item) -> Option, . { 39,227 ( 0.00%) while let Some(x) = self.next() { 444,420 ( 0.01%) if let Some(y) = f(x) { 50,868 ( 0.00%) return Some(y); . } . } . None 13,473 ( 0.00%) } . . // We override the default implementation, which uses `try_fold`, . // because this simple implementation generates less LLVM IR and is . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . #[rustc_inherit_overflow_checks] . fn position

(&mut self, mut predicate: P) -> Option where . Self: Sized, . P: FnMut(Self::Item) -> bool, . { . let n = len!(self); . let mut i = 0; . while let Some(x) = self.next() { 971,180 ( 0.02%) if predicate(x) { . // SAFETY: we are guaranteed to be in bounds by the loop invariant: . // when `i >= n`, `self.next()` returns `None` and the loop breaks. . unsafe { assume(i < n) }; . return Some(i); . } . i += 1; . } . None -- line 303 ---------------------------------------- -- line 308 ---------------------------------------- . // faster to compile. Also, the `assume` avoids a bounds check. . #[inline] . fn rposition

(&mut self, mut predicate: P) -> Option where . P: FnMut(Self::Item) -> bool, . Self: Sized + ExactSizeIterator + DoubleEndedIterator . { . let n = len!(self); . let mut i = n; 14,536 ( 0.00%) while let Some(x) = self.next_back() { 235,608 ( 0.00%) i -= 1; 143,114 ( 0.00%) if predicate(x) { . // SAFETY: `i` must be lower than `n` since it starts at `n` . // and is only decreasing. . unsafe { assume(i < n) }; . return Some(i); . } . } . None . } -- line 326 ---------------------------------------- -- line 332 ---------------------------------------- . // the returned references is guaranteed to refer to an element . // of the slice and thus guaranteed to be valid. . // . // Also note that the caller also guarantees that we're never . // called with the same index again, and that no other methods . // that will access this subslice are called, so it is valid . // for the returned reference to be mutable in the case of . // `IterMut` 193,444 ( 0.00%) unsafe { & $( $mut_ )? * self.ptr.as_ptr().add(idx) } . } . . $($extra)* . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T> DoubleEndedIterator for $name<'a, T> { . #[inline] -- line 348 ---------------------------------------- -- line 349 ---------------------------------------- . fn next_back(&mut self) -> Option<$elem> { . // could be implemented with slices, but this avoids bounds checks . . // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null, . // and slices over non-ZSTs must also have a non-null end pointer. . // The call to `next_back_unchecked!` is safe since we check if the iterator is . // empty first. . unsafe { 2 ( 0.00%) assume(!self.ptr.as_ptr().is_null()); . if mem::size_of::() != 0 { 22 ( 0.00%) assume(!self.end.is_null()); . } 2,671,737 ( 0.04%) if is_empty!(self) { . None . } else { . Some(next_back_unchecked!(self)) . } . } . } . . #[inline] -- line 369 ---------------------------------------- 20,186,217 ( 0.33%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/obligation_forest/mod.rs -------------------------------------------------------------------------------- Ir -- line 121 ---------------------------------------- . #[derive(Debug)] . pub enum ProcessResult { . Unchanged, . Changed(Vec), . Error(E), . } . . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] 113,298 ( 0.00%) struct ObligationTreeId(usize); . . type ObligationTreeIdGenerator = . std::iter::Map, fn(usize) -> ObligationTreeId>; . . pub struct ObligationForest { . /// The list of obligations. In between calls to `process_obligations`, . /// this list only contains nodes in the `Pending` or `Waiting` state. . /// -- line 137 ---------------------------------------- -- line 181 ---------------------------------------- . has_parent: bool, . . /// Identifier of the obligation tree to which this node belongs. . obligation_tree_id: ObligationTreeId, . } . . impl Node { . fn new(parent: Option, obligation: O, obligation_tree_id: ObligationTreeId) -> Node { 1,220,592 ( 0.02%) Node { . obligation, . state: Cell::new(NodeState::Pending), 135,201 ( 0.00%) dependents: if let Some(parent_index) = parent { vec![parent_index] } else { vec![] }, . has_parent: parent.is_some(), . obligation_tree_id, . } . } . } . . /// The state of one node in some tree within the forest. This represents the . /// current state of processing for the obligation (of type `O`) associated -- line 200 ---------------------------------------- -- line 223 ---------------------------------------- . /// | compress() . /// v . /// (Removed) . /// ``` . /// The `Error` state can be introduced in several places, via `error_at()`. . /// . /// Outside of `ObligationForest` methods, nodes should be either `Pending` or . /// `Waiting`. 17,186,784 ( 0.29%) #[derive(Debug, Copy, Clone, PartialEq, Eq)] . enum NodeState { . /// This obligation has not yet been selected successfully. Cannot have . /// subobligations. . Pending, . . /// This obligation was selected successfully, but may or may not have . /// subobligations. . Success, -- line 239 ---------------------------------------- -- line 279 ---------------------------------------- . pub stalled: bool, . } . . impl OutcomeTrait for Outcome { . type Error = Error; . type Obligation = O; . . fn new() -> Self { 176,164 ( 0.00%) Self { stalled: true, errors: vec![] } . } . . fn mark_not_stalled(&mut self) { 73,904 ( 0.00%) self.stalled = false; . } . . fn is_stalled(&self) -> bool { 46,934 ( 0.00%) self.stalled . } . . fn record_completed(&mut self, _outcome: &Self::Obligation) { . // do nothing . } . . fn record_error(&mut self, error: Self::Error) { . self.errors.push(error) -- line 303 ---------------------------------------- -- line 306 ---------------------------------------- . . #[derive(Debug, PartialEq, Eq)] . pub struct Error { . pub error: E, . pub backtrace: Vec, . } . . impl ObligationForest { 22,235 ( 0.00%) pub fn new() -> ObligationForest { 266,820 ( 0.00%) ObligationForest { . nodes: vec![], . done_cache: Default::default(), . active_cache: Default::default(), . reused_node_vec: vec![], . obligation_tree_id_generator: (0..).map(ObligationTreeId), . error_cache: Default::default(), . } 22,235 ( 0.00%) } . . /// Returns the total number of nodes in the forest that have not . /// yet been fully resolved. . pub fn len(&self) -> usize { . self.nodes.len() . } . . /// Registers an obligation. . pub fn register_obligation(&mut self, obligation: O) { . // Ignore errors here - there is no guarantee of success. 923,078 ( 0.02%) let _ = self.register_obligation_at(obligation, None); . } . . // Returns Err(()) if we already know this obligation failed. 1,286,615 ( 0.02%) fn register_obligation_at(&mut self, obligation: O, parent: Option) -> Result<(), ()> { 233,930 ( 0.00%) let cache_key = obligation.as_cache_key(); 233,930 ( 0.00%) if self.done_cache.contains(&cache_key) { . debug!("register_obligation_at: ignoring already done obligation: {:?}", obligation); . return Ok(()); . } . 535,806 ( 0.01%) match self.active_cache.entry(cache_key) { . Entry::Occupied(o) => { 26,028 ( 0.00%) let node = &mut self.nodes[*o.get()]; 26,028 ( 0.00%) if let Some(parent_index) = parent { . // If the node is already in `active_cache`, it has already . // had its chance to be marked with a parent. So if it's . // not already present, just dump `parent` into the . // dependents as a non-parent. 18,945 ( 0.00%) if !node.dependents.contains(&parent_index) { . node.dependents.push(parent_index); . } . } 39,042 ( 0.00%) if let NodeState::Error = node.state.get() { Err(()) } else { Ok(()) } . } 228,861 ( 0.00%) Entry::Vacant(v) => { 457,722 ( 0.01%) let obligation_tree_id = match parent { 39,276 ( 0.00%) Some(parent_index) => self.nodes[parent_index].obligation_tree_id, . None => self.obligation_tree_id_generator.next().unwrap(), . }; . . let already_failed = parent.is_some() . && self . .error_cache . .get(&obligation_tree_id) . .map_or(false, |errors| errors.contains(v.key())); . . if already_failed { . Err(()) . } else { 76,287 ( 0.00%) let new_index = self.nodes.len(); . v.insert(new_index); . self.nodes.push(Node::new(parent, obligation, obligation_tree_id)); . Ok(()) . } . } . } 1,052,685 ( 0.02%) } . . /// Converts all remaining obligations to the given error. 193,228 ( 0.00%) pub fn to_errors(&mut self, error: E) -> Vec> { . let errors = self . .nodes . .iter() . .enumerate() . .filter(|(_index, node)| node.state.get() == NodeState::Pending) . .map(|(index, _node)| Error { error: error.clone(), backtrace: self.error_at(index) }) . .collect(); . 55,208 ( 0.00%) self.compress(|_| assert!(false)); . errors 165,624 ( 0.00%) } . . /// Returns the set of obligations that are in a pending state. . pub fn map_pending_obligations(&self, f: F) -> Vec

. where . F: Fn(&O) -> P, . { 114 ( 0.00%) self.nodes . .iter() . .filter(|node| node.state.get() == NodeState::Pending) . .map(|node| f(&node.obligation)) . .collect() . } . . fn insert_into_error_cache(&mut self, index: usize) { . let node = &self.nodes[index]; -- line 410 ---------------------------------------- -- line 414 ---------------------------------------- . .insert(node.obligation.as_cache_key()); . } . . /// Performs a pass through the obligation list. This must . /// be called in a loop until `outcome.stalled` is false. . /// . /// This _cannot_ be unrolled (presently, at least). . #[inline(never)] 792,738 ( 0.01%) pub fn process_obligations(&mut self, processor: &mut P) -> OUT . where . P: ObligationProcessor, . OUT: OutcomeTrait>, . { . let mut outcome = OUT::new(); . . // Note that the loop body can append new nodes, and those new nodes . // will then be processed by subsequent iterations of the loop. . // . // We can't use an iterator for the loop because `self.nodes` is . // appended to and the borrow checker would complain. We also can't use . // `for index in 0..self.nodes.len() { ... }` because the range would . // be computed with the initial length, and we would miss the appended . // nodes. Therefore we use a `while` loop. . let mut index = 0; 9,531,056 ( 0.16%) while let Some(node) = self.nodes.get_mut(index) { . // `processor.process_obligation` can modify the predicate within . // `node.obligation`, and that predicate is the key used for . // `self.active_cache`. This means that `self.active_cache` can get . // out of sync with `nodes`. It's not very common, but it does . // happen, and code in `compress` has to allow for it. 18,885,948 ( 0.31%) if node.state.get() != NodeState::Pending { 36,785 ( 0.00%) index += 1; . continue; . } . 445,681 ( 0.01%) match processor.process_obligation(&mut node.obligation) { . ProcessResult::Unchanged => { . // No change in state. . } 295,616 ( 0.00%) ProcessResult::Changed(children) => { . // We are not (yet) stalled. . outcome.mark_not_stalled(); . node.state.set(NodeState::Success); . 387,534 ( 0.01%) for child in children { 781,303 ( 0.01%) let st = self.register_obligation_at(child, Some(index)); 91,918 ( 0.00%) if let Err(()) = st { . // Error already reported - propagate it . // to our node. . self.error_at(index); . } . } . } . ProcessResult::Error(err) => { . outcome.mark_not_stalled(); . outcome.record_error(Error { error: err, backtrace: self.error_at(index) }); . } . } 18,812,378 ( 0.31%) index += 1; . } . . // There's no need to perform marking, cycle processing and compression when nothing . // changed. 46,934 ( 0.00%) if !outcome.is_stalled() { . self.mark_successes(); . self.process_cycles(processor); 36,076 ( 0.00%) self.compress(|obl| outcome.record_completed(obl)); . } . . outcome 792,738 ( 0.01%) } . . /// Returns a vector of obligations for `p` and all of its . /// ancestors, putting them into the error state in the process. . fn error_at(&self, mut index: usize) -> Vec { . let mut error_stack: Vec = vec![]; . let mut trace = vec![]; . . loop { -- line 492 ---------------------------------------- -- line 516 ---------------------------------------- . trace . } . . /// Mark all `Waiting` nodes as `Success`, except those that depend on a . /// pending node. . fn mark_successes(&self) { . // Convert all `Waiting` nodes to `Success`. . for node in &self.nodes { 2,766,567 ( 0.05%) if node.state.get() == NodeState::Waiting { . node.state.set(NodeState::Success); . } . } . . // Convert `Success` nodes that depend on a pending node back to . // `Waiting`. . for node in &self.nodes { 2,543,791 ( 0.04%) if node.state.get() == NodeState::Pending { . // This call site is hot. . self.inlined_mark_dependents_as_waiting(node); . } . } . } . . // This always-inlined function is for the hot call site. . #[inline(always)] . fn inlined_mark_dependents_as_waiting(&self, node: &Node) { 12,421 ( 0.00%) for &index in node.dependents.iter() { . let node = &self.nodes[index]; 12,421 ( 0.00%) let state = node.state.get(); 12,421 ( 0.00%) if state == NodeState::Success { . // This call site is cold. 31,158 ( 0.00%) self.uninlined_mark_dependents_as_waiting(node); . } else { . debug_assert!(state == NodeState::Waiting || state == NodeState::Error) . } . } . } . . // This never-inlined function is for the cold call site. . #[inline(never)] 72,702 ( 0.00%) fn uninlined_mark_dependents_as_waiting(&self, node: &Node) { . // Mark node Waiting in the cold uninlined code instead of the hot inlined . node.state.set(NodeState::Waiting); . self.inlined_mark_dependents_as_waiting(node) 83,088 ( 0.00%) } . . /// Report cycles between all `Success` nodes, and convert all `Success` . /// nodes to `Done`. This must be called after `mark_successes`. . fn process_cycles

(&mut self, processor: &mut P) . where . P: ObligationProcessor, . { 18,038 ( 0.00%) let mut stack = std::mem::take(&mut self.reused_node_vec); . for (index, node) in self.nodes.iter().enumerate() { . // For some benchmarks this state test is extremely hot. It's a win . // to handle the no-op cases immediately to avoid the cost of the . // function call. 2,543,791 ( 0.04%) if node.state.get() == NodeState::Success { 359,570 ( 0.01%) self.find_cycles_from_node(&mut stack, processor, index); . } . } . . debug_assert!(stack.is_empty()); 144,304 ( 0.00%) self.reused_node_vec = stack; . } . 866,232 ( 0.01%) fn find_cycles_from_node

(&self, stack: &mut Vec, processor: &mut P, index: usize) . where . P: ObligationProcessor, . { . let node = &self.nodes[index]; 192,496 ( 0.00%) if node.state.get() == NodeState::Success { 3,996 ( 0.00%) match stack.iter().rposition(|&n| n == index) { . None => { . stack.push(index); 24,334 ( 0.00%) for &dep_index in node.dependents.iter() { 97,336 ( 0.00%) self.find_cycles_from_node(stack, processor, dep_index); . } . stack.pop(); . node.state.set(NodeState::Done); . } . Some(rpos) => { . // Cycle detected. . processor.process_backedge( . stack[rpos..].iter().map(|&i| &self.nodes[i].obligation), . PhantomData, . ); . } . } . } 769,984 ( 0.01%) } . . /// Compresses the vector, removing all popped nodes. This adjusts the . /// indices and hence invalidates any outstanding indices. `process_cycles` . /// must be run beforehand to remove any cycles on `Success` nodes. . #[inline(never)] 337,532 ( 0.01%) fn compress(&mut self, mut outcome_cb: impl FnMut(&O)) { 45,642 ( 0.00%) let orig_nodes_len = self.nodes.len(); . let mut node_rewrites: Vec<_> = std::mem::take(&mut self.reused_node_vec); . debug_assert!(node_rewrites.is_empty()); . node_rewrites.extend(0..orig_nodes_len); . let mut dead_nodes = 0; . . // Move removable nodes to the end, preserving the order of the . // remaining nodes. . // . // LOOP INVARIANT: . // self.nodes[0..index - dead_nodes] are the first remaining nodes . // self.nodes[index - dead_nodes..index] are all dead . // self.nodes[index..] are unchanged . for index in 0..orig_nodes_len { . let node = &self.nodes[index]; 12,791,107 ( 0.21%) match node.state.get() { . NodeState::Pending | NodeState::Waiting => { 4,939,774 ( 0.08%) if dead_nodes > 0 { 1,483,290 ( 0.02%) self.nodes.swap(index, index - dead_nodes); 2,472,150 ( 0.04%) node_rewrites[index] -= dead_nodes; . } . } . NodeState::Done => { . // This lookup can fail because the contents of . // `self.active_cache` are not guaranteed to match those of . // `self.nodes`. See the comment in `process_obligation` . // for more details. 266,679 ( 0.00%) if let Some((predicate, _)) = 221,712 ( 0.00%) self.active_cache.remove_entry(&node.obligation.as_cache_key()) . { . self.done_cache.insert(predicate); . } else { . self.done_cache.insert(node.obligation.as_cache_key().clone()); . } . // Extract the success stories. . outcome_cb(&node.obligation); 147,808 ( 0.00%) node_rewrites[index] = orig_nodes_len; 147,808 ( 0.00%) dead_nodes += 1; . } . NodeState::Error => { . // We *intentionally* remove the node from the cache at this point. Otherwise . // tests must come up with a different type on every type error they . // check against. . self.active_cache.remove(&node.obligation.as_cache_key()); . self.insert_into_error_cache(index); . node_rewrites[index] = orig_nodes_len; -- line 658 ---------------------------------------- -- line 659 ---------------------------------------- . dead_nodes += 1; . } . NodeState::Success => unreachable!(), . } . } . . if dead_nodes > 0 { . // Remove the dead nodes and rewrite indices. 34,796 ( 0.00%) self.nodes.truncate(orig_nodes_len - dead_nodes); 17,398 ( 0.00%) self.apply_rewrites(&node_rewrites); . } . . node_rewrites.truncate(0); 182,568 ( 0.00%) self.reused_node_vec = node_rewrites; 365,136 ( 0.01%) } . . #[inline(never)] 156,582 ( 0.00%) fn apply_rewrites(&mut self, node_rewrites: &[usize]) { . let orig_nodes_len = node_rewrites.len(); . . for node in &mut self.nodes { . let mut i = 0; 2,450,964 ( 0.04%) while let Some(dependent) = node.dependents.get_mut(i) { 42,300 ( 0.00%) let new_index = node_rewrites[*dependent]; 21,150 ( 0.00%) if new_index >= orig_nodes_len { . node.dependents.swap_remove(i); . if i == 0 && node.has_parent { . // We just removed the parent. . node.has_parent = false; . } . } else { 10,575 ( 0.00%) *dependent = new_index; 21,150 ( 0.00%) i += 1; . } . } . } . . // This updating of `self.active_cache` is necessary because the . // removal of nodes within `compress` can fail. See above. . self.active_cache.retain(|_predicate, index| { 9,877,304 ( 0.16%) let new_index = node_rewrites[*index]; 4,938,652 ( 0.08%) if new_index >= orig_nodes_len { . false . } else { 2,440,389 ( 0.04%) *index = new_index; . true . } . }); 139,184 ( 0.00%) } . } 21,302,752 ( 0.35%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs -------------------------------------------------------------------------------- Ir -- line 108 ---------------------------------------- . suppress_errors: bool, . }, . } . . impl RegionckMode { . /// Indicates that the MIR borrowck will repeat these region . /// checks, so we should ignore errors if NLL is (unconditionally) . /// enabled. 780 ( 0.00%) pub fn for_item_body(tcx: TyCtxt<'_>) -> Self { . // FIXME(Centril): Once we actually remove `::Migrate` also make . // this always `true` and then proceed to eliminate the dead code. 780 ( 0.00%) match tcx.borrowck_mode() { . // If we're on Migrate mode, report AST region errors . BorrowckMode::Migrate => RegionckMode::Erase { suppress_errors: false }, . . // If we're on MIR, don't report AST region errors as they should be reported by NLL . BorrowckMode::Mir => RegionckMode::Erase { suppress_errors: true }, . } 1,560 ( 0.00%) } . } . . /// This type contains all the things within `InferCtxt` that sit within a . /// `RefCell` and are involved with taking/rolling back snapshots. Snapshot . /// operations are hot enough that we want only one call to `borrow_mut` per . /// call to `start_snapshot` and `rollback_to`. . pub struct InferCtxtInner<'tcx> { . /// Cache for projections. This cache is snapshotted along with the infcx. -- line 134 ---------------------------------------- -- line 202 ---------------------------------------- . /// type instantiations (`ty::Infer`) to the actual opaque . /// type (`ty::Opaque`). Used during fallback to map unconstrained . /// opaque type inference variables to their corresponding . /// opaque type. . pub opaque_types_vars: FxHashMap, Ty<'tcx>>, . } . . impl<'tcx> InferCtxtInner<'tcx> { 112,175 ( 0.00%) fn new() -> InferCtxtInner<'tcx> { 762,790 ( 0.01%) InferCtxtInner { . projection_cache: Default::default(), . type_variable_storage: type_variable::TypeVariableStorage::new(), . undo_log: InferCtxtUndoLogs::default(), . const_unification_storage: ut::UnificationTableStorage::new(), . int_unification_storage: ut::UnificationTableStorage::new(), . float_unification_storage: ut::UnificationTableStorage::new(), 67,305 ( 0.00%) region_constraint_storage: Some(RegionConstraintStorage::new()), . region_obligations: vec![], . opaque_types: Default::default(), . opaque_types_vars: Default::default(), . } 134,610 ( 0.00%) } . . #[inline] . pub fn region_obligations(&self) -> &[(hir::HirId, RegionObligation<'tcx>)] { . &self.region_obligations . } . . #[inline] . pub fn projection_cache(&mut self) -> traits::ProjectionCache<'_, 'tcx> { 19,055 ( 0.00%) self.projection_cache.with_log(&mut self.undo_log) . } . . #[inline] . fn type_variables(&mut self) -> type_variable::TypeVariableTable<'_, 'tcx> { 10,299,016 ( 0.17%) self.type_variable_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn int_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::IntVid, . &mut ut::UnificationStorage, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 38,989 ( 0.00%) self.int_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . fn float_unification_table( . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::FloatVid, -- line 258 ---------------------------------------- -- line 268 ---------------------------------------- . &mut self, . ) -> ut::UnificationTable< . ut::InPlace< . ty::ConstVid<'tcx>, . &mut ut::UnificationStorage>, . &mut InferCtxtUndoLogs<'tcx>, . >, . > { 8,556 ( 0.00%) self.const_unification_storage.with_log(&mut self.undo_log) . } . . #[inline] . pub fn unwrap_region_constraints(&mut self) -> RegionConstraintCollector<'_, 'tcx> { 219,022 ( 0.00%) self.region_constraint_storage . .as_mut() . .expect("region constraints already solved") 268,081 ( 0.00%) .with_log(&mut self.undo_log) . } . } . . pub struct InferCtxt<'a, 'tcx> { . pub tcx: TyCtxt<'tcx>, . . /// The `DefId` of the item in whose context we are performing inference or typeck. . /// It is used to check whether an opaque type use is a defining use. -- line 292 ---------------------------------------- -- line 361 ---------------------------------------- . /// item we are type-checking, and just consider those names as . /// part of the root universe. So this would only get incremented . /// when we enter into a higher-ranked (`for<..>`) type or trait . /// bound. . universe: Cell, . } . . /// See the `error_reporting` module for more details. 461,320 ( 0.01%) #[derive(Clone, Copy, Debug, PartialEq, Eq, TypeFoldable)] . pub enum ValuePairs<'tcx> { . Types(ExpectedFound>), . Regions(ExpectedFound>), . Consts(ExpectedFound<&'tcx ty::Const<'tcx>>), . TraitRefs(ExpectedFound>), . PolyTraitRefs(ExpectedFound>), . } . -- line 377 ---------------------------------------- -- line 383 ---------------------------------------- . pub struct TypeTrace<'tcx> { . cause: ObligationCause<'tcx>, . values: ValuePairs<'tcx>, . } . . /// The origin of a `r1 <= r2` constraint. . /// . /// See `error_reporting` module for more details 409,263 ( 0.01%) #[derive(Clone, Debug)] . pub enum SubregionOrigin<'tcx> { . /// Arose from a subtyping relation 21,353 ( 0.00%) Subtype(Box>), . . /// When casting `&'a T` to an `&'b Trait` object, . /// relating `'a` to `'b` . RelateObjectBound(Span), . . /// Some type parameter was instantiated with the given type, . /// and that type must outlive some region. 5,348 ( 0.00%) RelateParamBound(Span, Ty<'tcx>, Option), . . /// The given region parameter was instantiated with a region . /// that must outlive some other region. . RelateRegionParamBound(Span), . . /// Creating a pointer `b` to contents of another reference . Reborrow(Span), . . /// Creating a pointer `b` to contents of an upvar . ReborrowUpvar(Span, ty::UpvarId), . . /// Data with type `Ty<'tcx>` was borrowed 4,811 ( 0.00%) DataBorrowed(Ty<'tcx>, Span), . . /// (&'a &'b T) where a >= b 4,266 ( 0.00%) ReferenceOutlivesReferent(Ty<'tcx>, Span), . . /// Comparing the signature and requirements of an impl method against . /// the containing trait. . CompareImplMethodObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, . . /// Comparing the signature and requirements of an impl associated type . /// against the containing trait . CompareImplTypeObligation { span: Span, impl_item_def_id: DefId, trait_item_def_id: DefId }, -- line 426 ---------------------------------------- -- line 554 ---------------------------------------- . defining_use_anchor: Option, . } . . pub trait TyCtxtInferExt<'tcx> { . fn infer_ctxt(self) -> InferCtxtBuilder<'tcx>; . } . . impl<'tcx> TyCtxtInferExt<'tcx> for TyCtxt<'tcx> { 22,435 ( 0.00%) fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> { 67,305 ( 0.00%) InferCtxtBuilder { tcx: self, defining_use_anchor: None, fresh_typeck_results: None } 22,435 ( 0.00%) } . } . . impl<'tcx> InferCtxtBuilder<'tcx> { . /// Used only by `rustc_typeck` during body type-checking/inference, . /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`. . /// Will also change the scope for opaque type defining use checks to the given owner. 19,512 ( 0.00%) pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self { 23,848 ( 0.00%) self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner))); 10,840 ( 0.00%) self.with_opaque_type_inference(table_owner) 15,176 ( 0.00%) } . . /// Whenever the `InferCtxt` should be able to handle defining uses of opaque types, . /// you need to call this function. Otherwise the opaque type will be treated opaquely. . /// . /// It is only meant to be called in two places, for typeck . /// (via `with_fresh_in_progress_typeck_results`) and for the inference context used . /// in mir borrowck. 1,660 ( 0.00%) pub fn with_opaque_type_inference(mut self, defining_use_anchor: LocalDefId) -> Self { 830 ( 0.00%) self.defining_use_anchor = Some(defining_use_anchor); 5,996 ( 0.00%) self 2,490 ( 0.00%) } . . /// Given a canonical value `C` as a starting point, create an . /// inference context that contains each of the bound values . /// within instantiated as a fresh variable. The `f` closure is . /// invoked with the new infcx, along with the instantiated value . /// `V` and a substitution `S`. This substitution `S` maps from . /// the bound values in `C` to their instantiated values in `V` . /// (in other words, `S(C) = V`). 30,608 ( 0.00%) pub fn enter_with_canonical( . &mut self, . span: Span, . canonical: &Canonical<'tcx, T>, . f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>, T, CanonicalVarValues<'tcx>) -> R, . ) -> R . where . T: TypeFoldable<'tcx>, . { . self.enter(|infcx| { 49,086 ( 0.00%) let (value, subst) = 3,481 ( 0.00%) infcx.instantiate_canonical_with_fresh_inference_vars(span, canonical); 71,452 ( 0.00%) f(infcx, value, subst) . }) 32,917 ( 0.00%) } . 125,582 ( 0.00%) pub fn enter(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R { 63,573 ( 0.00%) let InferCtxtBuilder { tcx, defining_use_anchor, ref fresh_typeck_results } = *self; . let in_progress_typeck_results = fresh_typeck_results.as_ref(); 949,453 ( 0.02%) f(InferCtxt { . tcx, . defining_use_anchor, . in_progress_typeck_results, 22,435 ( 0.00%) inner: RefCell::new(InferCtxtInner::new()), . lexical_region_resolutions: RefCell::new(None), . selection_cache: Default::default(), . evaluation_cache: Default::default(), . reported_trait_errors: Default::default(), . reported_closure_mismatch: Default::default(), . tainted_by_errors_flag: Cell::new(false), 22,435 ( 0.00%) err_count_on_creation: tcx.sess.err_count(), . in_snapshot: Cell::new(false), . skip_leak_check: Cell::new(false), . universe: Cell::new(ty::UniverseIndex::ROOT), . }) 136,310 ( 0.00%) } . } . . impl<'tcx, T> InferOk<'tcx, T> { . pub fn unit(self) -> InferOk<'tcx, ()> { . InferOk { value: (), obligations: self.obligations } . } . . /// Extracts `value`, registering any obligations into `fulfill_cx`. . pub fn into_value_registering_obligations( . self, . infcx: &InferCtxt<'_, 'tcx>, . fulfill_cx: &mut dyn TraitEngine<'tcx>, . ) -> T { 825 ( 0.00%) let InferOk { value, obligations } = self; 3,636 ( 0.00%) for obligation in obligations { 234 ( 0.00%) fulfill_cx.register_predicate_obligation(infcx, obligation); . } . value . } . } . . impl<'tcx> InferOk<'tcx, ()> { 40,499 ( 0.00%) pub fn into_obligations(self) -> PredicateObligations<'tcx> { 161,996 ( 0.00%) self.obligations 40,499 ( 0.00%) } . } . . #[must_use = "once you start a snapshot, you should always consume it"] . pub struct CombinedSnapshot<'a, 'tcx> { . undo_snapshot: Snapshot<'tcx>, . region_constraints_snapshot: RegionSnapshot, . universe: ty::UniverseIndex, . was_in_snapshot: bool, -- line 662 ---------------------------------------- -- line 674 ---------------------------------------- . let canonical = self.canonicalize_query((a, b), &mut OriginalQueryValues::default()); . debug!("canonical consts: {:?}", &canonical.value); . . self.tcx.try_unify_abstract_consts(canonical.value) . } . . pub fn is_in_snapshot(&self) -> bool { . self.in_snapshot.get() 71,006 ( 0.00%) } . 406,328 ( 0.01%) pub fn freshen>(&self, t: T) -> T { 457,119 ( 0.01%) t.fold_with(&mut self.freshener()) 457,119 ( 0.01%) } . . /// Returns the origin of the type variable identified by `vid`, or `None` . /// if this is not a type variable. . /// . /// No attempt is made to resolve `ty`. 400 ( 0.00%) pub fn type_var_origin(&'a self, ty: Ty<'tcx>) -> Option { 800 ( 0.00%) match *ty.kind() { 161 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 644 ( 0.00%) Some(*self.inner.borrow_mut().type_variables().var_origin(vid)) . } 39 ( 0.00%) _ => None, . } 800 ( 0.00%) } . 50,791 ( 0.00%) pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, false) 50,791 ( 0.00%) } . . /// Like `freshener`, but does not replace `'static` regions. 205,879 ( 0.00%) pub fn freshener_keep_static<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { . freshen::TypeFreshener::new(self, true) 205,879 ( 0.00%) } . 3,508 ( 0.00%) pub fn unsolved_variables(&self) -> Vec> { 1,754 ( 0.00%) let mut inner = self.inner.borrow_mut(); 1,754 ( 0.00%) let mut vars: Vec> = inner . .type_variables() . .unsolved_variables() . .into_iter() 322 ( 0.00%) .map(|t| self.tcx.mk_ty_var(t)) . .collect(); . vars.extend( . (0..inner.int_unification_table().len()) . .map(|i| ty::IntVid { index: i as u32 }) 1,844 ( 0.00%) .filter(|&vid| inner.int_unification_table().probe_value(vid).is_none()) 39 ( 0.00%) .map(|v| self.tcx.mk_int_var(v)), . ); . vars.extend( . (0..inner.float_unification_table().len()) . .map(|i| ty::FloatVid { index: i as u32 }) . .filter(|&vid| inner.float_unification_table().probe_value(vid).is_none()) . .map(|v| self.tcx.mk_float_var(v)), . ); . vars 6,139 ( 0.00%) } . 117,061 ( 0.00%) fn combine_fields( . &'a self, . trace: TypeTrace<'tcx>, . param_env: ty::ParamEnv<'tcx>, . ) -> CombineFields<'a, 'tcx> { 468,624 ( 0.01%) CombineFields { . infcx: self, 1,171,560 ( 0.02%) trace, . cause: None, . param_env, . obligations: PredicateObligations::new(), . } 117,061 ( 0.00%) } . . /// Clear the "currently in a snapshot" flag, invoke the closure, . /// then restore the flag to its original value. This flag is a . /// debugging measure designed to detect cases where we start a . /// snapshot, create type variables, and register obligations . /// which may involve those type variables in the fulfillment cx, . /// potentially leaving "dangling type variables" behind. . /// In such cases, an assertion will fail when attempting to -- line 753 ---------------------------------------- -- line 755 ---------------------------------------- . /// better than grovelling through megabytes of `RUSTC_LOG` output. . /// . /// HOWEVER, in some cases the flag is unhelpful. In particular, we . /// sometimes create a "mini-fulfilment-cx" in which we enroll . /// obligations. As long as this fulfillment cx is fully drained . /// before we return, this is not a problem, as there won't be any . /// escaping obligations in the main cx. In those cases, you can . /// use this function. 8 ( 0.00%) pub fn save_and_restore_in_snapshot_flag(&self, func: F) -> R . where . F: FnOnce(&Self) -> R, . { . let flag = self.in_snapshot.replace(false); 42,171 ( 0.00%) let result = func(self); . self.in_snapshot.set(flag); . result 9 ( 0.00%) } . 569,096 ( 0.01%) fn start_snapshot(&self) -> CombinedSnapshot<'a, 'tcx> { . debug!("start_snapshot()"); . . let in_snapshot = self.in_snapshot.replace(true); . . let mut inner = self.inner.borrow_mut(); . 1,707,288 ( 0.03%) CombinedSnapshot { . undo_snapshot: inner.undo_log.start_snapshot(), . region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(), . universe: self.universe(), . was_in_snapshot: in_snapshot, . // Borrow typeck results "in progress" (i.e., during typeck) . // to ban writes from within a snapshot to them. 284,548 ( 0.00%) _in_progress_typeck_results: self . .in_progress_typeck_results . .map(|typeck_results| typeck_results.borrow()), . } 1,138,192 ( 0.02%) } . 844,250 ( 0.01%) #[instrument(skip(self, snapshot), level = "debug")] . fn rollback_to(&self, cause: &str, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 76,750 ( 0.00%) undo_snapshot, 76,750 ( 0.00%) region_constraints_snapshot, 76,750 ( 0.00%) universe, 76,750 ( 0.00%) was_in_snapshot, 153,500 ( 0.00%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . self.universe.set(universe); . . let mut inner = self.inner.borrow_mut(); 76,750 ( 0.00%) inner.rollback_to(undo_snapshot); . inner.unwrap_region_constraints().rollback_to(region_constraints_snapshot); . } . 3,116,970 ( 0.05%) #[instrument(skip(self, snapshot), level = "debug")] . fn commit_from(&self, snapshot: CombinedSnapshot<'a, 'tcx>) { . let CombinedSnapshot { 207,798 ( 0.00%) undo_snapshot, . region_constraints_snapshot: _, . universe: _, 207,798 ( 0.00%) was_in_snapshot, 415,596 ( 0.01%) _in_progress_typeck_results, . } = snapshot; . . self.in_snapshot.set(was_in_snapshot); . . self.inner.borrow_mut().commit(undo_snapshot); . } . . /// Executes `f` and commit the bindings. 106,932 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 131,395 ( 0.00%) pub fn commit_unconditionally(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 11,945 ( 0.00%) let snapshot = self.start_snapshot(); 37,535 ( 0.00%) let r = f(&snapshot); 83,615 ( 0.00%) self.commit_from(snapshot); 74,217 ( 0.00%) r . } . . /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`. 1,305,464 ( 0.02%) #[instrument(skip(self, f), level = "debug")] 1,610,109 ( 0.03%) pub fn commit_if_ok(&self, f: F) -> Result . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result, . { 249,316 ( 0.00%) let snapshot = self.start_snapshot(); 733,161 ( 0.01%) let r = f(&snapshot); . debug!("commit_if_ok() -- r.is_ok() = {}", r.is_ok()); 258,055 ( 0.00%) match r { . Ok(_) => { 1,387,348 ( 0.02%) self.commit_from(snapshot); . } . Err(_) => { 311,605 ( 0.01%) self.rollback_to("commit_if_ok -- error", snapshot); . } . } 1,399,229 ( 0.02%) r . } . . /// Execute `f` then unroll any bindings it creates. 286,398 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 338,164 ( 0.01%) pub fn probe(&self, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 84,295 ( 0.00%) let snapshot = self.start_snapshot(); 137,291 ( 0.00%) let r = f(&snapshot); 410,421 ( 0.01%) self.rollback_to("probe", snapshot); 136,754 ( 0.00%) r . } . . /// If `should_skip` is true, then execute `f` then unroll any bindings it creates. 207 ( 0.00%) #[instrument(skip(self, f), level = "debug")] 276 ( 0.00%) pub fn probe_maybe_skip_leak_check(&self, should_skip: bool, f: F) -> R . where . F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R, . { 46 ( 0.00%) let snapshot = self.start_snapshot(); 23 ( 0.00%) let was_skip_leak_check = self.skip_leak_check.get(); 46 ( 0.00%) if should_skip { . self.skip_leak_check.set(true); . } 92 ( 0.00%) let r = f(&snapshot); 207 ( 0.00%) self.rollback_to("probe", snapshot); . self.skip_leak_check.set(was_skip_leak_check); 276 ( 0.00%) r . } . . /// Scan the constraints produced since `snapshot` began and returns: . /// . /// - `None` -- if none of them involve "region outlives" constraints . /// - `Some(true)` -- if there are `'a: 'b` constraints where `'a` or `'b` is a placeholder . /// - `Some(false)` -- if there are `'a: 'b` constraints but none involve placeholders 13,468 ( 0.00%) pub fn region_constraints_added_in_snapshot( . &self, . snapshot: &CombinedSnapshot<'a, 'tcx>, . ) -> Option { 26,936 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() . .region_constraints_added_in_snapshot(&snapshot.undo_snapshot) 20,202 ( 0.00%) } . 8 ( 0.00%) pub fn add_given(&self, sub: ty::Region<'tcx>, sup: ty::RegionVid) { 16 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().add_given(sub, sup); 12 ( 0.00%) } . 3,204 ( 0.00%) pub fn can_sub(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).sub(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 2,403 ( 0.00%) } . 16,392 ( 0.00%) pub fn can_eq(&self, param_env: ty::ParamEnv<'tcx>, a: T, b: T) -> UnitResult<'tcx> . where . T: at::ToTrace<'tcx>, . { . let origin = &ObligationCause::dummy(); . self.probe(|_| { . self.at(origin, param_env).eq(a, b).map(|InferOk { obligations: _, .. }| { . // Ignore obligations, since we are unrolling . // everything anyway. . }) . }) 12,294 ( 0.00%) } . 207,980 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn sub_regions( . &self, . origin: SubregionOrigin<'tcx>, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) { 187,182 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().make_subregion(origin, a, b); . } . . /// Require that the region `r` be equal to one of the regions in . /// the set `regions`. 45 ( 0.00%) #[instrument(skip(self), level = "debug")] . pub fn member_constraint( . &self, . opaque_type_def_id: DefId, . definition_span: Span, . hidden_ty: Ty<'tcx>, . region: ty::Region<'tcx>, . in_regions: &Lrc>>, . ) { 45 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().member_constraint( . opaque_type_def_id, . definition_span, . hidden_ty, . region, . in_regions, . ); . } . -- line 961 ---------------------------------------- -- line 969 ---------------------------------------- . /// to `subtype_predicate` -- that is, "coercing" `a` to `b` winds up . /// actually requiring `a <: b`. This is of course a valid coercion, . /// but it's not as flexible as `FnCtxt::coerce` would be. . /// . /// (We may refactor this in the future, but there are a number of . /// practical obstacles. Among other things, `FnCtxt::coerce` presently . /// records adjustments that are required on the HIR in order to perform . /// the coercion, and we don't currently have a way to manage that.) 45 ( 0.00%) pub fn coerce_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolyCoercePredicate<'tcx>, . ) -> Option> { 30 ( 0.00%) let subtype_predicate = predicate.map_bound(|p| ty::SubtypePredicate { . a_is_expected: false, // when coercing from `a` to `b`, `b` is expected . a: p.a, . b: p.b, . }); 75 ( 0.00%) self.subtype_predicate(cause, param_env, subtype_predicate) 60 ( 0.00%) } . 30,276 ( 0.00%) pub fn subtype_predicate( . &self, . cause: &ObligationCause<'tcx>, . param_env: ty::ParamEnv<'tcx>, . predicate: ty::PolySubtypePredicate<'tcx>, . ) -> Option> { . // Check for two unresolved inference variables, in which case we can . // make no progress. This is partly a micro-optimization, but it's . // also an opportunity to "sub-unify" the variables. This isn't -- line 999 ---------------------------------------- -- line 1002 ---------------------------------------- . // earlier that they are sub-unified). . // . // Note that we can just skip the binders here because . // type variables can't (at present, at . // least) capture any of the things bound by this binder. . // . // Note that this sub here is not just for diagnostics - it has semantic . // effects as well. 2,523 ( 0.00%) let r_a = self.shallow_resolve(predicate.skip_binder().a); 2,523 ( 0.00%) let r_b = self.shallow_resolve(predicate.skip_binder().b); 19,878 ( 0.00%) match (r_a.kind(), r_b.kind()) { 4,902 ( 0.00%) (&ty::Infer(ty::TyVar(a_vid)), &ty::Infer(ty::TyVar(b_vid))) => { . self.inner.borrow_mut().type_variables().sub(a_vid, b_vid); 4,902 ( 0.00%) return None; . } . _ => {} . } . . Some(self.commit_if_ok(|_snapshot| { 72 ( 0.00%) let ty::SubtypePredicate { a_is_expected, a, b } = . self.replace_bound_vars_with_placeholders(predicate); . 144 ( 0.00%) let ok = self.at(cause, param_env).sub_exp(a_is_expected, a, b)?; . . Ok(ok.unit()) . })) 22,707 ( 0.00%) } . 58,872 ( 0.00%) pub fn region_outlives_predicate( . &self, . cause: &traits::ObligationCause<'tcx>, . predicate: ty::PolyRegionOutlivesPredicate<'tcx>, . ) -> UnitResult<'tcx> { . self.commit_if_ok(|_snapshot| { . let ty::OutlivesPredicate(r_a, r_b) = . self.replace_bound_vars_with_placeholders(predicate); . let origin = SubregionOrigin::from_obligation_cause(cause, || { . RelateRegionParamBound(cause.span) . }); 49,060 ( 0.00%) self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` . Ok(()) . }) 39,248 ( 0.00%) } . . /// Number of type variables created so far. 97 ( 0.00%) pub fn num_ty_vars(&self) -> usize { . self.inner.borrow_mut().type_variables().num_vars() 194 ( 0.00%) } . 61,022 ( 0.00%) pub fn next_ty_var_id(&self, origin: TypeVariableOrigin) -> TyVid { 305,110 ( 0.01%) self.inner.borrow_mut().type_variables().new_var(self.universe(), origin) 91,533 ( 0.00%) } . 46,582 ( 0.00%) pub fn next_ty_var(&self, origin: TypeVariableOrigin) -> Ty<'tcx> { 206,359 ( 0.00%) self.tcx.mk_ty_var(self.next_ty_var_id(origin)) 69,873 ( 0.00%) } . 1,478 ( 0.00%) pub fn next_ty_var_in_universe( . &self, . origin: TypeVariableOrigin, . universe: ty::UniverseIndex, . ) -> Ty<'tcx> { 8,129 ( 0.00%) let vid = self.inner.borrow_mut().type_variables().new_var(universe, origin); 739 ( 0.00%) self.tcx.mk_ty_var(vid) 2,217 ( 0.00%) } . . pub fn next_const_var( . &self, . ty: Ty<'tcx>, . origin: ConstVariableOrigin, . ) -> &'tcx ty::Const<'tcx> { . self.tcx.mk_const_var(self.next_const_var_id(origin), ty) . } -- line 1074 ---------------------------------------- -- line 1090 ---------------------------------------- . pub fn next_const_var_id(&self, origin: ConstVariableOrigin) -> ConstVid<'tcx> { . self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }) . } . . fn next_int_var_id(&self) -> IntVid { 3,965 ( 0.00%) self.inner.borrow_mut().int_unification_table().new_key(None) . } . 2,379 ( 0.00%) pub fn next_int_var(&self) -> Ty<'tcx> { . self.tcx.mk_int_var(self.next_int_var_id()) 3,172 ( 0.00%) } . . fn next_float_var_id(&self) -> FloatVid { . self.inner.borrow_mut().float_unification_table().new_key(None) . } . . pub fn next_float_var(&self) -> Ty<'tcx> { . self.tcx.mk_float_var(self.next_float_var_id()) . } . . /// Creates a fresh region variable with the next available index. . /// The variable will be created in the maximum universe created . /// thus far, allowing it to name any region created thus far. 31,234 ( 0.00%) pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> { 566,617 ( 0.01%) self.next_region_var_in_universe(origin, self.universe()) 62,468 ( 0.00%) } . . /// Creates a fresh region variable with the next available index . /// in the given universe; typically, you can use . /// `next_region_var` and just use the maximal universe. 229,640 ( 0.00%) pub fn next_region_var_in_universe( . &self, . origin: RegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { . let region_var = 1,492,660 ( 0.02%) self.inner.borrow_mut().unwrap_region_constraints().new_region_var(universe, origin); 574,100 ( 0.01%) self.tcx.mk_region(ty::ReVar(region_var)) 344,460 ( 0.01%) } . . /// Return the universe that the region `r` was created in. For . /// most regions (e.g., `'static`, named regions from the user, . /// etc) this is the root universe U0. For inference variables or . /// placeholders, however, it will return the universe which which . /// they are associated. 37,002 ( 0.00%) pub fn universe_of_region(&self, r: ty::Region<'tcx>) -> ty::UniverseIndex { . self.inner.borrow_mut().unwrap_region_constraints().universe(r) 55,503 ( 0.00%) } . . /// Number of region variables created so far. 6,690 ( 0.00%) pub fn num_region_vars(&self) -> usize { . self.inner.borrow_mut().unwrap_region_constraints().num_region_vars() 10,035 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 44,475 ( 0.00%) pub fn next_nll_region_var(&self, origin: NllRegionVariableOrigin) -> ty::Region<'tcx> { . self.next_region_var(RegionVariableOrigin::Nll(origin)) 88,950 ( 0.00%) } . . /// Just a convenient wrapper of `next_region_var` for using during NLL. 357 ( 0.00%) pub fn next_nll_region_var_in_universe( . &self, . origin: NllRegionVariableOrigin, . universe: ty::UniverseIndex, . ) -> ty::Region<'tcx> { 3,139 ( 0.00%) self.next_region_var_in_universe(RegionVariableOrigin::Nll(origin), universe) 714 ( 0.00%) } . 387,936 ( 0.01%) pub fn var_for_def(&self, span: Span, param: &ty::GenericParamDef) -> GenericArg<'tcx> { 211,908 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => { . // Create a region inference variable for the given . // region parameter definition. 15,276 ( 0.00%) self.next_region_var(EarlyBoundRegion(span, param.name)).into() . } . GenericParamDefKind::Type { .. } => { . // Create a type inference variable for the given . // type parameter definition. The substitutions are . // for actual parameters that may be referred to by . // the default of this type parameter, if it exists. . // e.g., `struct Foo(...);` when . // used in a path such as `Foo::::new()` will . // use an inference variable for `C` with `[T, U]` . // as the substitutions for the default, `(T, U)`. 131,824 ( 0.00%) let ty_var_id = self.inner.borrow_mut().type_variables().new_var( . self.universe(), 164,780 ( 0.00%) TypeVariableOrigin { . kind: TypeVariableOriginKind::TypeParameterDefinition( 32,956 ( 0.00%) param.name, 32,956 ( 0.00%) Some(param.def_id), . ), . span, . }, . ); . 32,956 ( 0.00%) self.tcx.mk_ty_var(ty_var_id).into() . } . GenericParamDefKind::Const { .. } => { . let origin = ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstParameterDefinition( . param.name, . param.def_id, . ), . span, . }; . let const_var_id = 2,860 ( 0.00%) self.inner.borrow_mut().const_unification_table().new_key(ConstVarValue { . origin, . val: ConstVariableValue::Unknown { universe: self.universe() }, . }); 260 ( 0.00%) self.tcx.mk_const_var(const_var_id, self.tcx.type_of(param.def_id)).into() . } . } 2,080 ( 0.00%) } . . /// Given a set of generics defined on a type or impl, returns a substitution mapping each . /// type/region parameter to a fresh inference variable. 86,343 ( 0.00%) pub fn fresh_substs_for_item(&self, span: Span, def_id: DefId) -> SubstsRef<'tcx> { 400,696 ( 0.01%) InternalSubsts::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param)) 57,562 ( 0.00%) } . . /// Returns `true` if errors have been reported since this infcx was . /// created. This is sometimes used as a heuristic to skip . /// reporting errors that often occur as a result of earlier . /// errors, but where it's hard to be 100% sure (e.g., unresolved . /// inference variables, regionck errors). 1,980 ( 0.00%) pub fn is_tainted_by_errors(&self) -> bool { . debug!( . "is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ . tainted_by_errors_flag={})", . self.tcx.sess.err_count(), . self.err_count_on_creation, . self.tainted_by_errors_flag.get() . ); . 59,256 ( 0.00%) if self.tcx.sess.err_count() > self.err_count_on_creation { . return true; // errors reported since this infcx was made . } . self.tainted_by_errors_flag.get() 2,970 ( 0.00%) } . . /// Set the "tainted by errors" flag to true. We call this when we . /// observe an error from a prior pass. . pub fn set_tainted_by_errors(&self) { . debug!("set_tainted_by_errors()"); . self.tainted_by_errors_flag.set(true) . } . . /// Process the region constraints and return any any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 61,047 ( 0.00%) pub fn resolve_regions( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) -> Vec> { 165,699 ( 0.00%) let (var_infos, data) = { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; 8,721 ( 0.00%) assert!( 26,163 ( 0.00%) self.is_tainted_by_errors() || inner.region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . inner.region_obligations . ); . inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) . .into_infos_and_data() 8,721 ( 0.00%) }; . . let region_rels = 8,721 ( 0.00%) &RegionRelations::new(self.tcx, region_context, outlives_env.free_region_map()); . 78,489 ( 0.00%) let (lexical_region_resolutions, errors) = 209,304 ( 0.00%) lexical_region_resolve::resolve(region_rels, var_infos, data, mode); . 34,884 ( 0.00%) let old_value = self.lexical_region_resolutions.replace(Some(lexical_region_resolutions)); 8,721 ( 0.00%) assert!(old_value.is_none()); . . errors 78,489 ( 0.00%) } . . /// Process the region constraints and report any errors that . /// result. After this, no more unification operations should be . /// done -- or the compiler will panic -- but it is legal to use . /// `resolve_vars_if_possible` as well as `fully_resolve`. 87,210 ( 0.00%) pub fn resolve_regions_and_report_errors( . &self, . region_context: DefId, . outlives_env: &OutlivesEnvironment<'tcx>, . mode: RegionckMode, . ) { 17,442 ( 0.00%) let errors = self.resolve_regions(region_context, outlives_env, mode); . 26,163 ( 0.00%) if !self.is_tainted_by_errors() { . // As a heuristic, just skip reporting region errors . // altogether if other errors have been reported while . // this infcx was in use. This is totally hokey but . // otherwise we have a hard time separating legit region . // errors from silly ones. 17,442 ( 0.00%) self.report_region_errors(&errors); . } 43,605 ( 0.00%) } . . /// Obtains (and clears) the current set of region . /// constraints. The inference context is still usable: further . /// unifications will simply add new constraints. . /// . /// This method is not meant to be used with normal lexical region . /// resolution. Rather, it is used in the NLL mode as a kind of . /// interim hack: basically we run normal type-check and generate . /// region constraints as normal, but then we take them and . /// translate them into the form that the NLL solver . /// understands. See the NLL module for mode details. 30 ( 0.00%) pub fn take_and_reset_region_constraints(&self) -> RegionConstraintData<'tcx> { 10 ( 0.00%) assert!( . self.inner.borrow().region_obligations.is_empty(), . "region_obligations not empty: {:#?}", . self.inner.borrow().region_obligations . ); . 40 ( 0.00%) self.inner.borrow_mut().unwrap_region_constraints().take_and_reset_data() 50 ( 0.00%) } . . /// Gives temporary access to the region constraint data. . pub fn with_region_constraints( . &self, . op: impl FnOnce(&RegionConstraintData<'tcx>) -> R, . ) -> R { . let mut inner = self.inner.borrow_mut(); 17,804 ( 0.00%) op(inner.unwrap_region_constraints().data()) . } . . pub fn region_var_origin(&self, vid: ty::RegionVid) -> RegionVariableOrigin { . let mut inner = self.inner.borrow_mut(); . let inner = &mut *inner; . inner . .region_constraint_storage . .as_mut() -- line 1335 ---------------------------------------- -- line 1338 ---------------------------------------- . .var_origin(vid) . } . . /// Takes ownership of the list of variable regions. This implies . /// that all the region constraints have already been taken, and . /// hence that `resolve_regions_and_report_errors` can never be . /// called. This is used only during NLL processing to "hand off" ownership . /// of the set of region variables into the NLL region context. 4,125 ( 0.00%) pub fn take_region_var_origins(&self) -> VarInfos { . let mut inner = self.inner.borrow_mut(); 15,675 ( 0.00%) let (var_infos, data) = inner . .region_constraint_storage . .take() . .expect("regions already resolved") . .with_log(&mut inner.undo_log) 825 ( 0.00%) .into_infos_and_data(); 825 ( 0.00%) assert!(data.is_empty()); . var_infos 6,600 ( 0.00%) } . . pub fn ty_to_string(&self, t: Ty<'tcx>) -> String { . self.resolve_vars_if_possible(t).to_string() . } . . /// If `TyVar(vid)` resolves to a type, return that type. Else, return the . /// universe index of `TyVar(vid)`. 22,562 ( 0.00%) pub fn probe_ty_var(&self, vid: TyVid) -> Result, ty::UniverseIndex> { . use self::type_variable::TypeVariableValue; . 67,686 ( 0.00%) match self.inner.borrow_mut().type_variables().probe(vid) { . TypeVariableValue::Known { value } => Ok(value), . TypeVariableValue::Unknown { universe } => Err(universe), . } 78,967 ( 0.00%) } . . /// Resolve any type variables found in `value` -- but only one . /// level. So, if the variable `?X` is bound to some type . /// `Foo`, then this would return `Foo` (but `?Y` may . /// itself be bound to a type). . /// . /// Useful when you only need to inspect the outermost level of . /// the type and don't care about nested types (or perhaps you . /// will be resolving them as well, e.g. in a loop). . pub fn shallow_resolve(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 494,233 ( 0.01%) value.fold_with(&mut ShallowResolver { infcx: self }) . } . 51,794 ( 0.00%) pub fn root_var(&self, var: ty::TyVid) -> ty::TyVid { . self.inner.borrow_mut().type_variables().root_var(var) 77,691 ( 0.00%) } . . /// Where possible, replaces type/const variables in . /// `value` with their final value. Note that region variables . /// are unaffected. If a type/const variable has not been unified, it . /// is left as is. This is an idempotent operation that does . /// not affect inference state in any way and so you can do it . /// at will. 6,980 ( 0.00%) pub fn resolve_vars_if_possible(&self, value: T) -> T . where . T: TypeFoldable<'tcx>, . { 510,131 ( 0.01%) if !value.needs_infer() { 257,582 ( 0.00%) return value; // Avoid duplicated subst-folding. . } 584,175 ( 0.01%) let mut r = resolve::OpportunisticVarResolver::new(self); 743,807 ( 0.01%) value.fold_with(&mut r) 8,564 ( 0.00%) } . . /// Returns the first unresolved variable contained in `T`. In the . /// process of visiting `T`, this will resolve (where possible) . /// type variables in `T`, but it never constructs the final, . /// resolved type, so it's more efficient than . /// `resolve_vars_if_possible()`. . pub fn unresolved_type_vars(&self, value: &T) -> Option<(Ty<'tcx>, Option)> . where -- line 1415 ---------------------------------------- -- line 1490 ---------------------------------------- . expected: &'tcx ty::Const<'tcx>, . actual: &'tcx ty::Const<'tcx>, . err: TypeError<'tcx>, . ) -> DiagnosticBuilder<'tcx> { . let trace = TypeTrace::consts(cause, true, expected, actual); . self.report_and_explain_type_error(trace, &err) . } . 52,578 ( 0.00%) pub fn replace_bound_vars_with_fresh_vars( . &self, . span: Span, . lbrct: LateBoundRegionConversionTime, . value: ty::Binder<'tcx, T>, . ) -> (T, BTreeMap>) . where . T: TypeFoldable<'tcx>, . { . let fld_r = 278,124 ( 0.00%) |br: ty::BoundRegion| self.next_region_var(LateBoundRegion(span, br.kind, lbrct)); . let fld_t = |_| { . self.next_ty_var(TypeVariableOrigin { . kind: TypeVariableOriginKind::MiscVariable, . span, . }) . }; . let fld_c = |_, ty| { . self.next_const_var( . ty, . ConstVariableOrigin { kind: ConstVariableOriginKind::MiscVariable, span }, . ) . }; 273,123 ( 0.00%) self.tcx.replace_bound_vars(value, fld_r, fld_t, fld_c) 35,052 ( 0.00%) } . . /// See the [`region_constraints::RegionConstraintCollector::verify_generic_bound`] method. 6,704 ( 0.00%) pub fn verify_generic_bound( . &self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . a: ty::Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { . debug!("verify_generic_bound({:?}, {:?} <: {:?})", kind, a, bound); . 2,514 ( 0.00%) self.inner . .borrow_mut() . .unwrap_region_constraints() 14,246 ( 0.00%) .verify_generic_bound(origin, kind, a, bound); 5,866 ( 0.00%) } . . /// Obtains the latest type of the given closure; this may be a . /// closure in the current function, in which case its . /// `ClosureKind` may not yet be known. 868 ( 0.00%) pub fn closure_kind(&self, closure_substs: SubstsRef<'tcx>) -> Option { 1,736 ( 0.00%) let closure_kind_ty = closure_substs.as_closure().kind_ty(); . let closure_kind_ty = self.shallow_resolve(closure_kind_ty); 1,302 ( 0.00%) closure_kind_ty.to_opt_closure_kind() . } . . /// Clears the selection, evaluation, and projection caches. This is useful when . /// repeatedly attempting to select an `Obligation` while changing only . /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing. . pub fn clear_caches(&self) { . self.selection_cache.clear(); . self.evaluation_cache.clear(); . self.inner.borrow_mut().projection_cache().clear(); . } . . pub fn universe(&self) -> ty::UniverseIndex { 874,254 ( 0.01%) self.universe.get() 330,559 ( 0.01%) } . . /// Creates and return a fresh universe that extends all previous . /// universes. Updates `self.universe` to that new universe. 562 ( 0.00%) pub fn create_next_universe(&self) -> ty::UniverseIndex { 762 ( 0.00%) let u = self.universe.get().next_universe(); . self.universe.set(u); . u 562 ( 0.00%) } . . /// Resolves and evaluates a constant. . /// . /// The constant can be located on a trait like `::C`, in which case the given . /// substitutions and environment are used to resolve the constant. Alternatively if the . /// constant has generic parameters in scope the substitutions are used to evaluate the value of . /// the constant. For example in `fn foo() { let _ = [0; bar::()]; }` the repeat count . /// constant `bar::()` requires a substitution for `T`, if the substitution for `T` is still -- line 1576 ---------------------------------------- -- line 1606 ---------------------------------------- . // variables, thus we don't need to substitute back the original values. . self.tcx.const_eval_resolve(param_env_erased, unevaluated, span) . } . . /// If `typ` is a type variable of some kind, resolve it one level . /// (but do not resolve types found in the result). If `typ` is . /// not a type variable, just return it unmodified. . // FIXME(eddyb) inline into `ShallowResolver::visit_ty`. 5,836,336 ( 0.10%) fn shallow_resolve_ty(&self, typ: Ty<'tcx>) -> Ty<'tcx> { 3,263,125 ( 0.05%) match *typ.kind() { . ty::Infer(ty::TyVar(v)) => { . // Not entirely obvious: if `typ` is a type variable, . // it can be resolved to an int/float variable, which . // can then be recursively resolved, hence the . // recursion. Note though that we prevent type . // variables from unifying to other type variables . // directly (though they may be embedded . // structurally), and we prevent cycles in any case, . // so this recursion should always be of very limited . // depth. . // . // Note: if these two lines are combined into one we get . // dynamic borrow errors on `self.inner`. 1,810,348 ( 0.03%) let known = self.inner.borrow_mut().type_variables().probe(v).known(); . known.map_or(typ, |t| self.shallow_resolve_ty(t)) . } . 26,040 ( 0.00%) ty::Infer(ty::IntVar(v)) => self . .inner . .borrow_mut() . .int_unification_table() . .probe_value(v) 9,660 ( 0.00%) .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . ty::Infer(ty::FloatVar(v)) => self . .inner . .borrow_mut() . .float_unification_table() . .probe_value(v) . .map(|v| v.to_type(self.tcx)) . .unwrap_or(typ), . . _ => typ, . } 6,565,878 ( 0.11%) } . . /// `ty_or_const_infer_var_changed` is equivalent to one of these two: . /// * `shallow_resolve(ty) != ty` (where `ty.kind = ty::Infer(_)`) . /// * `shallow_resolve(ct) != ct` (where `ct.kind = ty::ConstKind::Infer(_)`) . /// . /// However, `ty_or_const_infer_var_changed` is more efficient. It's always . /// inlined, despite being large, because it has only two call sites that . /// are extremely hot (both in `traits::fulfill`'s checking of `stalled_on` -- line 1659 ---------------------------------------- -- line 1662 ---------------------------------------- . #[inline(always)] . pub fn ty_or_const_infer_var_changed(&self, infer_var: TyOrConstInferVar<'tcx>) -> bool { . match infer_var { . TyOrConstInferVar::Ty(v) => { . use self::type_variable::TypeVariableValue; . . // If `inlined_probe` returns a `Known` value, it never equals . // `ty::Infer(ty::TyVar(v))`. 46,719,372 ( 0.78%) match self.inner.borrow_mut().type_variables().inlined_probe(v) { . TypeVariableValue::Unknown { .. } => false, . TypeVariableValue::Known { .. } => true, . } . } . . TyOrConstInferVar::TyInt(v) => { . // If `inlined_probe_value` returns a value it's always a . // `ty::Int(_)` or `ty::UInt(_)`, which never matches a . // `ty::Infer(_)`. 20,294 ( 0.00%) self.inner.borrow_mut().int_unification_table().inlined_probe_value(v).is_some() . } . . TyOrConstInferVar::TyFloat(v) => { . // If `probe_value` returns a value it's always a . // `ty::Float(_)`, which never matches a `ty::Infer(_)`. . // . // Not `inlined_probe_value(v)` because this call site is colder. . self.inner.borrow_mut().float_unification_table().probe_value(v).is_some() -- line 1688 ---------------------------------------- -- line 1716 ---------------------------------------- . /// Equivalent to `ty::ConstKind::Infer(ty::InferConst::Var(_))`. . Const(ConstVid<'tcx>), . } . . impl<'tcx> TyOrConstInferVar<'tcx> { . /// Tries to extract an inference variable from a type or a constant, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`) and . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). 25,293 ( 0.00%) pub fn maybe_from_generic_arg(arg: GenericArg<'tcx>) -> Option { . match arg.unpack() { . GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), . GenericArgKind::Const(ct) => Self::maybe_from_const(ct), . GenericArgKind::Lifetime(_) => None, . } 25,293 ( 0.00%) } . . /// Tries to extract an inference variable from a type, returns `None` . /// for types other than `ty::Infer(_)` (or `InferTy::Fresh*`). 4,890 ( 0.00%) pub fn maybe_from_ty(ty: Ty<'tcx>) -> Option { 271,021 ( 0.00%) match *ty.kind() { 59,870 ( 0.00%) ty::Infer(ty::TyVar(v)) => Some(TyOrConstInferVar::Ty(v)), 444 ( 0.00%) ty::Infer(ty::IntVar(v)) => Some(TyOrConstInferVar::TyInt(v)), . ty::Infer(ty::FloatVar(v)) => Some(TyOrConstInferVar::TyFloat(v)), . _ => None, . } 4,890 ( 0.00%) } . . /// Tries to extract an inference variable from a constant, returns `None` . /// for constants other than `ty::ConstKind::Infer(_)` (or `InferConst::Fresh`). . pub fn maybe_from_const(ct: &'tcx ty::Const<'tcx>) -> Option { . match ct.val { . ty::ConstKind::Infer(InferConst::Var(v)) => Some(TyOrConstInferVar::Const(v)), . _ => None, . } -- line 1749 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . impl<'a, 'tcx> TypeFolder<'tcx> for ShallowResolver<'a, 'tcx> { . fn tcx<'b>(&'b self) -> TyCtxt<'tcx> { . self.infcx.tcx . } . . fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { 1,066,503 ( 0.02%) self.infcx.shallow_resolve_ty(ty) . } . 3,360 ( 0.00%) fn fold_const(&mut self, ct: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> { 2,448 ( 0.00%) if let ty::Const { val: ty::ConstKind::Infer(InferConst::Var(vid)), .. } = ct { 1,920 ( 0.00%) self.infcx . .inner . .borrow_mut() . .const_unification_table() 1,152 ( 0.00%) .probe_value(*vid) . .val . .known() . .unwrap_or(ct) . } else { . ct . } 4,200 ( 0.00%) } . } . . impl<'tcx> TypeTrace<'tcx> { . pub fn span(&self) -> Span { 2,366 ( 0.00%) self.cause.span . } . . pub fn types( . cause: &ObligationCause<'tcx>, . a_is_expected: bool, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> TypeTrace<'tcx> { -- line 1792 ---------------------------------------- -- line 1800 ---------------------------------------- . b: &'tcx ty::Const<'tcx>, . ) -> TypeTrace<'tcx> { . TypeTrace { cause: cause.clone(), values: Consts(ExpectedFound::new(a_is_expected, a, b)) } . } . } . . impl<'tcx> SubregionOrigin<'tcx> { . pub fn span(&self) -> Span { 5,915 ( 0.00%) match *self { 1,183 ( 0.00%) Subtype(ref a) => a.span(), . RelateObjectBound(a) => a, . RelateParamBound(a, ..) => a, . RelateRegionParamBound(a) => a, . Reborrow(a) => a, . ReborrowUpvar(a, _) => a, . DataBorrowed(_, a) => a, . ReferenceOutlivesReferent(_, a) => a, . CompareImplMethodObligation { span, .. } => span, -- line 1817 ---------------------------------------- -- line 1818 ---------------------------------------- . CompareImplTypeObligation { span, .. } => span, . } . } . . pub fn from_obligation_cause(cause: &traits::ObligationCause<'tcx>, default: F) -> Self . where . F: FnOnce() -> Self, . { 62,966 ( 0.00%) match *cause.code() { 8,223 ( 0.00%) traits::ObligationCauseCode::ReferenceOutlivesReferent(ref_type) => { 41,115 ( 0.00%) SubregionOrigin::ReferenceOutlivesReferent(ref_type, cause.span) . } . . traits::ObligationCauseCode::CompareImplMethodObligation { . impl_item_def_id, . trait_item_def_id, . } => SubregionOrigin::CompareImplMethodObligation { . span: cause.span, . impl_item_def_id, -- line 1836 ---------------------------------------- 32,703,673 ( 0.54%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/adapters/zip.rs -------------------------------------------------------------------------------- Ir -- line 15 ---------------------------------------- . b: B, . // index, len and a_len are only used by the specialized version of zip . index: usize, . len: usize, . a_len: usize, . } . impl Zip { . pub(in crate::iter) fn new(a: A, b: B) -> Zip { 80 ( 0.00%) ZipImpl::new(a, b) . } . fn super_nth(&mut self, mut n: usize) -> Option<(A::Item, B::Item)> { . while let Some(x) = Iterator::next(self) { . if n == 0 { . return Some(x); . } . n -= 1; . } -- line 31 ---------------------------------------- -- line 63 ---------------------------------------- . /// assert!(iter.next().is_none()); . /// ``` . #[stable(feature = "iter_zip", since = "1.59.0")] . pub fn zip(a: A, b: B) -> Zip . where . A: IntoIterator, . B: IntoIterator, . { 66,227 ( 0.00%) ZipImpl::new(a.into_iter(), b.into_iter()) . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl Iterator for Zip . where . A: Iterator, . B: Iterator, . { -- line 79 ---------------------------------------- -- line 136 ---------------------------------------- . Self: Iterator + TrustedRandomAccessNoCoerce; . } . . // Work around limitations of specialization, requiring `default` impls to be repeated . // in intermediary impls. . macro_rules! zip_impl_general_defaults { . () => { . default fn new(a: A, b: B) -> Self { 160 ( 0.00%) Zip { . a, . b, . index: 0, // unused . len: 0, // unused . a_len: 0, // unused . } . } . . #[inline] . default fn next(&mut self) -> Option<(A::Item, B::Item)> { 108,883 ( 0.00%) let x = self.a.next()?; 61,302 ( 0.00%) let y = self.b.next()?; 32,424 ( 0.00%) Some((x, y)) 19,502 ( 0.00%) } . . #[inline] . default fn nth(&mut self, n: usize) -> Option { . self.super_nth(n) . } . . #[inline] . default fn next_back(&mut self) -> Option<(A::Item, B::Item)> -- line 166 ---------------------------------------- -- line 203 ---------------------------------------- . B: Iterator, . { . type Item = (A::Item, B::Item); . . zip_impl_general_defaults! {} . . #[inline] . default fn size_hint(&self) -> (usize, Option) { 906 ( 0.00%) let (a_lower, a_upper) = self.a.size_hint(); 22 ( 0.00%) let (b_lower, b_upper) = self.b.size_hint(); . . let lower = cmp::min(a_lower, b_lower); . . let upper = match (a_upper, b_upper) { . (Some(x), Some(y)) => Some(cmp::min(x, y)), . (Some(x), None) => Some(x), . (None, Some(y)) => Some(y), . (None, None) => None, -- line 220 ---------------------------------------- -- line 236 ---------------------------------------- . where . A: TrustedRandomAccessNoCoerce + Iterator, . B: TrustedRandomAccessNoCoerce + Iterator, . { . zip_impl_general_defaults! {} . . #[inline] . default fn size_hint(&self) -> (usize, Option) { 3,822 ( 0.00%) let size = cmp::min(self.a.size(), self.b.size()); . (size, Some(size)) . } . . #[inline] . unsafe fn get_unchecked(&mut self, idx: usize) -> ::Item { 156 ( 0.00%) let idx = self.index + idx; . // SAFETY: the caller must uphold the contract for . // `Iterator::__iterator_get_unchecked`. . unsafe { (self.a.__iterator_get_unchecked(idx), self.b.__iterator_get_unchecked(idx)) } . } . } . . #[doc(hidden)] . impl ZipImpl for Zip -- line 258 ---------------------------------------- -- line 263 ---------------------------------------- . fn new(a: A, b: B) -> Self { . let a_len = a.size(); . let len = cmp::min(a_len, b.size()); . Zip { a, b, index: 0, len, a_len } . } . . #[inline] . fn next(&mut self) -> Option<(A::Item, B::Item)> { 10,210,588 ( 0.17%) if self.index < self.len { . let i = self.index; . // since get_unchecked executes code which can panic we increment the counters beforehand . // so that the same index won't be accessed twice, as required by TrustedRandomAccess 4,170,244 ( 0.07%) self.index += 1; . // SAFETY: `i` is smaller than `self.len`, thus smaller than `self.a.len()` and `self.b.len()` . unsafe { 2,796 ( 0.00%) Some((self.a.__iterator_get_unchecked(i), self.b.__iterator_get_unchecked(i))) . } 100 ( 0.00%) } else if A::MAY_HAVE_SIDE_EFFECT && self.index < self.a_len { . let i = self.index; . // as above, increment before executing code that may panic . self.index += 1; . self.len += 1; . // match the base implementation's potential side effects . // SAFETY: we just checked that `i` < `self.a.len()` . unsafe { . self.a.__iterator_get_unchecked(i); -- line 288 ---------------------------------------- -- line 290 ---------------------------------------- . None . } else { . None . } . } . . #[inline] . fn size_hint(&self) -> (usize, Option) { 483,570 ( 0.01%) let len = self.len - self.index; . (len, Some(len)) . } . . #[inline] . fn nth(&mut self, n: usize) -> Option { . let delta = cmp::min(n, self.len - self.index); . let end = self.index + delta; . while self.index < end { -- line 306 ---------------------------------------- -- line 535 ---------------------------------------- . #[unstable(feature = "trusted_random_access", issue = "none")] . #[rustc_specialization_trait] . pub unsafe trait TrustedRandomAccessNoCoerce: Sized { . // Convenience method. . fn size(&self) -> usize . where . Self: Iterator, . { 8,944 ( 0.00%) self.size_hint().0 . } . /// `true` if getting an iterator element may have side effects. . /// Remember to take inner iterators into account. . const MAY_HAVE_SIDE_EFFECT: bool; . } . . /// Like `Iterator::__iterator_get_unchecked`, but doesn't require the compiler to . /// know that `U: TrustedRandomAccess`. -- line 551 ---------------------------------------- -- line 555 ---------------------------------------- . /// Same requirements calling `get_unchecked` directly. . #[doc(hidden)] . pub(in crate::iter::adapters) unsafe fn try_get_unchecked(it: &mut I, idx: usize) -> I::Item . where . I: Iterator, . { . // SAFETY: the caller must uphold the contract for . // `Iterator::__iterator_get_unchecked`. 19,439 ( 0.00%) unsafe { it.try_get_unchecked(idx) } . } . . unsafe trait SpecTrustedRandomAccess: Iterator { . /// If `Self: TrustedRandomAccess`, it must be safe to call . /// `Iterator::__iterator_get_unchecked(self, index)`. . unsafe fn try_get_unchecked(&mut self, index: usize) -> Self::Item; . } . -- line 571 ---------------------------------------- 1,719,478 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/relate.rs -------------------------------------------------------------------------------- Ir -- line 28 ---------------------------------------- . . /// Returns a static string we can use for printouts. . fn tag(&self) -> &'static str; . . /// Returns `true` if the value `a` is the "expected" type in the . /// relation. Just affects error messages. . fn a_is_expected(&self) -> bool; . 13,122 ( 0.00%) fn with_cause(&mut self, _cause: Cause, f: F) -> R . where . F: FnOnce(&mut Self) -> R, . { . f(self) 11,572 ( 0.00%) } . . /// Generic relation routine suitable for most anything. 880,963 ( 0.01%) fn relate>(&mut self, a: T, b: T) -> RelateResult<'tcx, T> { 766,606 ( 0.01%) Relate::relate(self, a, b) 725,103 ( 0.01%) } . . /// Relate the two substitutions for the given item. The default . /// is to look up the variance for the item and proceed . /// accordingly. 311,560 ( 0.01%) fn relate_item_substs( . &mut self, . item_def_id: DefId, . a_subst: SubstsRef<'tcx>, . b_subst: SubstsRef<'tcx>, . ) -> RelateResult<'tcx, SubstsRef<'tcx>> { . debug!( . "relate_item_substs(item_def_id={:?}, a_subst={:?}, b_subst={:?})", . item_def_id, a_subst, b_subst . ); . . let tcx = self.tcx(); . let opt_variances = tcx.variances_of(item_def_id); 124,624 ( 0.00%) relate_substs(self, Some((item_def_id, opt_variances)), a_subst, b_subst) 280,404 ( 0.00%) } . . /// Switch variance for the purpose of relating `a` and `b`. . fn relate_with_variance>( . &mut self, . variance: ty::Variance, . info: ty::VarianceDiagInfo<'tcx>, . a: T, . b: T, -- line 73 ---------------------------------------- -- line 108 ---------------------------------------- . a: Self, . b: Self, . ) -> RelateResult<'tcx, Self>; . } . . /////////////////////////////////////////////////////////////////////////// . // Relate impls . 313,400 ( 0.01%) pub fn relate_type_and_mut<'tcx, R: TypeRelation<'tcx>>( . relation: &mut R, . a: ty::TypeAndMut<'tcx>, . b: ty::TypeAndMut<'tcx>, . base_ty: Ty<'tcx>, . ) -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> { . debug!("{}.mts({:?}, {:?})", relation.tag(), a, b); 55,867 ( 0.00%) if a.mutbl != b.mutbl { 1,980 ( 0.00%) Err(TypeError::Mutability) . } else { . let mutbl = a.mutbl; . let (variance, info) = match mutbl { . ast::Mutability::Not => (ty::Covariant, ty::VarianceDiagInfo::None), . ast::Mutability::Mut => { . (ty::Invariant, ty::VarianceDiagInfo::Invariant { ty: base_ty, param_index: 0 }) . } . }; 20,945 ( 0.00%) let ty = relation.relate_with_variance(variance, info, a.ty, b.ty)?; 144,926 ( 0.00%) Ok(ty::TypeAndMut { ty, mutbl }) . } 313,636 ( 0.01%) } . 4,020 ( 0.00%) pub fn relate_substs<'tcx, R: TypeRelation<'tcx>>( . relation: &mut R, . variances: Option<(DefId, &[ty::Variance])>, . a_subst: SubstsRef<'tcx>, . b_subst: SubstsRef<'tcx>, . ) -> RelateResult<'tcx, SubstsRef<'tcx>> { 249,955 ( 0.00%) let tcx = relation.tcx(); 149,547 ( 0.00%) let mut cached_ty = None; . . let params = iter::zip(a_subst, b_subst).enumerate().map(|(i, (a, b))| { 902,566 ( 0.01%) let (variance, variance_info) = match variances { 145,262 ( 0.00%) Some((ty_def_id, variances)) => { 217,893 ( 0.00%) let variance = variances[i]; 72,631 ( 0.00%) let variance_info = if variance == ty::Invariant { . let ty = 19,567 ( 0.00%) cached_ty.get_or_insert_with(|| tcx.type_of(ty_def_id).subst(tcx, a_subst)); . ty::VarianceDiagInfo::Invariant { ty, param_index: i.try_into().unwrap() } . } else { . ty::VarianceDiagInfo::default() . }; . (variance, variance_info) . } . None => (ty::Invariant, ty::VarianceDiagInfo::default()), . }; 211,776 ( 0.00%) relation.relate_with_variance(variance, variance_info, a, b) . }); . . tcx.mk_substs(params) 4,824 ( 0.00%) } . . impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> { 12,181 ( 0.00%) fn relate>( . relation: &mut R, . a: ty::FnSig<'tcx>, . b: ty::FnSig<'tcx>, . ) -> RelateResult<'tcx, ty::FnSig<'tcx>> { . let tcx = relation.tcx(); . 5,622 ( 0.00%) if a.c_variadic != b.c_variadic { . return Err(TypeError::VariadicMismatch(expected_found( . relation, . a.c_variadic, . b.c_variadic, . ))); . } 4,796 ( 0.00%) let unsafety = relation.relate(a.unsafety, b.unsafety)?; . let abi = relation.relate(a.abi, b.abi)?; . 7,909 ( 0.00%) if a.inputs().len() != b.inputs().len() { . return Err(TypeError::ArgCount); . } . 8,433 ( 0.00%) let inputs_and_output = iter::zip(a.inputs(), b.inputs()) 3,746 ( 0.00%) .map(|(&a, &b)| ((a, b), false)) 7,496 ( 0.00%) .chain(iter::once(((a.output(), b.output()), true))) 1,439 ( 0.00%) .map(|((a, b), is_output)| { 1,439 ( 0.00%) if is_output { 980 ( 0.00%) relation.relate(a, b) . } else { . relation.relate_with_variance( . ty::Contravariant, . ty::VarianceDiagInfo::default(), . a, . b, . ) . } . }) . .enumerate() 5,048 ( 0.00%) .map(|(i, r)| match r { . Err(TypeError::Sorts(exp_found) | TypeError::ArgumentSorts(exp_found, _)) => { . Err(TypeError::ArgumentSorts(exp_found, i)) . } . Err(TypeError::Mutability | TypeError::ArgumentMutability(_)) => { . Err(TypeError::ArgumentMutability(i)) . } 8,385 ( 0.00%) r => r, . }); 7,083 ( 0.00%) Ok(ty::FnSig { . inputs_and_output: tcx.mk_type_list(inputs_and_output)?, 937 ( 0.00%) c_variadic: a.c_variadic, . unsafety, . abi, . }) 8,433 ( 0.00%) } . } . . impl<'tcx> Relate<'tcx> for ty::BoundConstness { . fn relate>( . relation: &mut R, . a: ty::BoundConstness, . b: ty::BoundConstness, . ) -> RelateResult<'tcx, ty::BoundConstness> { -- line 229 ---------------------------------------- -- line 236 ---------------------------------------- . } . . impl<'tcx> Relate<'tcx> for ast::Unsafety { . fn relate>( . relation: &mut R, . a: ast::Unsafety, . b: ast::Unsafety, . ) -> RelateResult<'tcx, ast::Unsafety> { 937 ( 0.00%) if a != b { . Err(TypeError::UnsafetyMismatch(expected_found(relation, a, b))) . } else { . Ok(a) . } . } . } . . impl<'tcx> Relate<'tcx> for abi::Abi { -- line 252 ---------------------------------------- -- line 255 ---------------------------------------- . a: abi::Abi, . b: abi::Abi, . ) -> RelateResult<'tcx, abi::Abi> { . if a == b { Ok(a) } else { Err(TypeError::AbiMismatch(expected_found(relation, a, b))) } . } . } . . impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> { 10,859 ( 0.00%) fn relate>( . relation: &mut R, . a: ty::ProjectionTy<'tcx>, . b: ty::ProjectionTy<'tcx>, . ) -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> { 2,889 ( 0.00%) if a.item_def_id != b.item_def_id { . Err(TypeError::ProjectionMismatched(expected_found( . relation, . a.item_def_id, . b.item_def_id, . ))) . } else { . let substs = relation.relate(a.substs, b.substs)?; 3,688 ( 0.00%) Ok(ty::ProjectionTy { item_def_id: a.item_def_id, substs: &substs }) . } 7,617 ( 0.00%) } . } . . impl<'tcx> Relate<'tcx> for ty::ExistentialProjection<'tcx> { . fn relate>( . relation: &mut R, . a: ty::ExistentialProjection<'tcx>, . b: ty::ExistentialProjection<'tcx>, . ) -> RelateResult<'tcx, ty::ExistentialProjection<'tcx>> { -- line 286 ---------------------------------------- -- line 304 ---------------------------------------- . b.substs, . )?; . Ok(ty::ExistentialProjection { item_def_id: a.item_def_id, substs, term }) . } . } . } . . impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> { 346,011 ( 0.01%) fn relate>( . relation: &mut R, . a: ty::TraitRef<'tcx>, . b: ty::TraitRef<'tcx>, . ) -> RelateResult<'tcx, ty::TraitRef<'tcx>> { . // Different traits cannot be related. 74,094 ( 0.00%) if a.def_id != b.def_id { 99 ( 0.00%) Err(TypeError::Traits(expected_found(relation, a.def_id, b.def_id))) . } else { 24,687 ( 0.00%) let substs = relate_substs(relation, None, a.substs, b.substs)?; 67,436 ( 0.00%) Ok(ty::TraitRef { def_id: a.def_id, substs }) . } 222,434 ( 0.00%) } . } . . impl<'tcx> Relate<'tcx> for ty::ExistentialTraitRef<'tcx> { 3,971 ( 0.00%) fn relate>( . relation: &mut R, . a: ty::ExistentialTraitRef<'tcx>, . b: ty::ExistentialTraitRef<'tcx>, . ) -> RelateResult<'tcx, ty::ExistentialTraitRef<'tcx>> { . // Different traits cannot be related. 5,733 ( 0.00%) if a.def_id != b.def_id { . Err(TypeError::Traits(expected_found(relation, a.def_id, b.def_id))) . } else { 2,633 ( 0.00%) let substs = relate_substs(relation, None, a.substs, b.substs)?; 3,100 ( 0.00%) Ok(ty::ExistentialTraitRef { def_id: a.def_id, substs }) . } 2,527 ( 0.00%) } . } . . #[derive(Copy, Debug, Clone, TypeFoldable)] . struct GeneratorWitness<'tcx>(&'tcx ty::List>); . . impl<'tcx> Relate<'tcx> for GeneratorWitness<'tcx> { . fn relate>( . relation: &mut R, -- line 348 ---------------------------------------- -- line 358 ---------------------------------------- . . impl<'tcx> Relate<'tcx> for Ty<'tcx> { . #[inline] . fn relate>( . relation: &mut R, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> RelateResult<'tcx, Ty<'tcx>> { 814,500 ( 0.01%) relation.tys(a, b) . } . } . . /// The main "type relation" routine. Note that this does not handle . /// inference artifacts, so you should filter those out before calling . /// it. 1,747,272 ( 0.03%) pub fn super_relate_tys<'tcx, R: TypeRelation<'tcx>>( . relation: &mut R, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> RelateResult<'tcx, Ty<'tcx>> { . let tcx = relation.tcx(); . debug!("super_relate_tys: a={:?} b={:?}", a, b); 3,894,496 ( 0.06%) match (a.kind(), b.kind()) { . (&ty::Infer(_), _) | (_, &ty::Infer(_)) => { . // The caller should handle these cases! . bug!("var types encountered in super_relate_tys") . } . . (ty::Bound(..), _) | (_, ty::Bound(..)) => { . bug!("bound types encountered in super_relate_tys") . } -- line 388 ---------------------------------------- -- line 391 ---------------------------------------- . . (&ty::Never, _) . | (&ty::Char, _) . | (&ty::Bool, _) . | (&ty::Int(_), _) . | (&ty::Uint(_), _) . | (&ty::Float(_), _) . | (&ty::Str, _) 18,433 ( 0.00%) if a == b => . { . Ok(a) . } . 9,053 ( 0.00%) (&ty::Param(ref a_p), &ty::Param(ref b_p)) if a_p.index == b_p.index => Ok(a), . . (ty::Placeholder(p1), ty::Placeholder(p2)) if p1 == p2 => Ok(a), . 1,192,288 ( 0.02%) (&ty::Adt(a_def, a_substs), &ty::Adt(b_def, b_substs)) if a_def == b_def => { 370,957 ( 0.01%) let substs = relation.relate_item_substs(a_def.did, a_substs, b_substs)?; . Ok(tcx.mk_adt(a_def, substs)) . } . . (&ty::Foreign(a_id), &ty::Foreign(b_id)) if a_id == b_id => Ok(tcx.mk_foreign(a_id)), . 13,377 ( 0.00%) (&ty::Dynamic(a_obj, a_region), &ty::Dynamic(b_obj, b_region)) => { 3,822 ( 0.00%) let region_bound = relation.with_cause(Cause::ExistentialRegionBound, |relation| { . relation.relate_with_variance( . ty::Contravariant, . ty::VarianceDiagInfo::default(), 1,911 ( 0.00%) a_region, . b_region, . ) . })?; . Ok(tcx.mk_dynamic(relation.relate(a_obj, b_obj)?, region_bound)) . } . . (&ty::Generator(a_id, a_substs, movability), &ty::Generator(b_id, b_substs, _)) . if a_id == b_id => -- line 428 ---------------------------------------- -- line 439 ---------------------------------------- . // inside the binder so we can related them . let a_types = a_types.map_bound(GeneratorWitness); . let b_types = b_types.map_bound(GeneratorWitness); . // Then remove the GeneratorWitness for the result . let types = relation.relate(a_types, b_types)?.map_bound(|witness| witness.0); . Ok(tcx.mk_generator_witness(types)) . } . 1,984 ( 0.00%) (&ty::Closure(a_id, a_substs), &ty::Closure(b_id, b_substs)) if a_id == b_id => { . // All Closure types with the same id represent . // the (anonymous) type of the same closure expression. So . // all of their regions should be equated. 496 ( 0.00%) let substs = relation.relate(a_substs, b_substs)?; . Ok(tcx.mk_closure(a_id, &substs)) . } . 28 ( 0.00%) (&ty::RawPtr(a_mt), &ty::RawPtr(b_mt)) => { 10 ( 0.00%) let mt = relate_type_and_mut(relation, a_mt, b_mt, a)?; . Ok(tcx.mk_ptr(mt)) . } . 529,778 ( 0.01%) (&ty::Ref(a_r, a_ty, a_mutbl), &ty::Ref(b_r, b_ty, b_mutbl)) => { 228,060 ( 0.00%) let r = relation.relate_with_variance( . ty::Contravariant, . ty::VarianceDiagInfo::default(), . a_r, . b_r, . )?; . let a_mt = ty::TypeAndMut { ty: a_ty, mutbl: a_mutbl }; . let b_mt = ty::TypeAndMut { ty: b_ty, mutbl: b_mutbl }; 366,392 ( 0.01%) let mt = relate_type_and_mut(relation, a_mt, b_mt, a)?; . Ok(tcx.mk_ref(r, mt)) . } . 3,795 ( 0.00%) (&ty::Array(a_t, sz_a), &ty::Array(b_t, sz_b)) => { 1,368 ( 0.00%) let t = relation.relate(a_t, b_t)?; 1,518 ( 0.00%) match relation.relate(sz_a, sz_b) { 2,277 ( 0.00%) Ok(sz) => Ok(tcx.mk_ty(ty::Array(t, sz))), . Err(err) => { . // Check whether the lengths are both concrete/known values, . // but are unequal, for better diagnostics. . // . // It might seem dubious to eagerly evaluate these constants here, . // we however cannot end up with errors in `Relate` during both . // `type_of` and `predicates_of`. This means that evaluating the . // constants should not cause cycle errors here. -- line 484 ---------------------------------------- -- line 489 ---------------------------------------- . TypeError::FixedArraySize(expected_found(relation, sz_a_val, sz_b_val)), . ), . _ => Err(err), . } . } . } . } . 5,888 ( 0.00%) (&ty::Slice(a_t), &ty::Slice(b_t)) => { 512 ( 0.00%) let t = relation.relate(a_t, b_t)?; . Ok(tcx.mk_slice(t)) . } . 21,406 ( 0.00%) (&ty::Tuple(as_), &ty::Tuple(bs)) => { 21,406 ( 0.00%) if as_.len() == bs.len() { . Ok(tcx.mk_tup( 26,578 ( 0.00%) iter::zip(as_, bs).map(|(a, b)| relation.relate(a.expect_ty(), b.expect_ty())), . )?) . } else if !(as_.is_empty() || bs.is_empty()) { . Err(TypeError::TupleSize(expected_found(relation, as_.len(), bs.len()))) . } else { . Err(TypeError::Sorts(expected_found(relation, a, b))) . } . } . 5,172 ( 0.00%) (&ty::FnDef(a_def_id, a_substs), &ty::FnDef(b_def_id, b_substs)) 8,620 ( 0.00%) if a_def_id == b_def_id => . { 6,896 ( 0.00%) let substs = relation.relate_item_substs(a_def_id, a_substs, b_substs)?; . Ok(tcx.mk_fn_def(a_def_id, substs)) . } . . (&ty::FnPtr(a_fty), &ty::FnPtr(b_fty)) => { 9,102 ( 0.00%) let fty = relation.relate(a_fty, b_fty)?; . Ok(tcx.mk_fn_ptr(fty)) . } . . // these two are already handled downstream in case of lazy normalization 2,625 ( 0.00%) (&ty::Projection(a_data), &ty::Projection(b_data)) => { . let projection_ty = relation.relate(a_data, b_data)?; . Ok(tcx.mk_projection(projection_ty.item_def_id, projection_ty.substs)) . } . 1,608 ( 0.00%) (&ty::Opaque(a_def_id, a_substs), &ty::Opaque(b_def_id, b_substs)) 4,020 ( 0.00%) if a_def_id == b_def_id => . { 4,824 ( 0.00%) let substs = relate_substs(relation, None, a_substs, b_substs)?; . Ok(tcx.mk_opaque(a_def_id, substs)) . } . . _ => Err(TypeError::Sorts(expected_found(relation, a, b))), . } 1,965,681 ( 0.03%) } . . /// The main "const relation" routine. Note that this does not handle . /// inference artifacts, so you should filter those out before calling . /// it. 2,990 ( 0.00%) pub fn super_relate_consts<'tcx, R: TypeRelation<'tcx>>( . relation: &mut R, . a: &'tcx ty::Const<'tcx>, . b: &'tcx ty::Const<'tcx>, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { . debug!("{}.super_relate_consts(a = {:?}, b = {:?})", relation.tag(), a, b); . let tcx = relation.tcx(); . . // FIXME(oli-obk): once const generics can have generic types, this assertion . // will likely get triggered. Move to `normalize_erasing_regions` at that point. 598 ( 0.00%) let a_ty = tcx.erase_regions(a.ty); 598 ( 0.00%) let b_ty = tcx.erase_regions(b.ty); 299 ( 0.00%) if a_ty != b_ty { . relation.tcx().sess.delay_span_bug( . DUMMY_SP, . &format!("cannot relate constants of different types: {} != {}", a_ty, b_ty), . ); . } . . let eagerly_eval = |x: &'tcx ty::Const<'tcx>| x.eval(tcx, relation.param_env()); 299 ( 0.00%) let a = eagerly_eval(a); 598 ( 0.00%) let b = eagerly_eval(b); . . // Currently, the values that can be unified are primitive types, . // and those that derive both `PartialEq` and `Eq`, corresponding . // to structural-match types. 3,289 ( 0.00%) let is_match = match (a.val, b.val) { . (ty::ConstKind::Infer(_), _) | (_, ty::ConstKind::Infer(_)) => { . // The caller should handle these cases! . bug!("var types encountered in super_relate_consts: {:?} {:?}", a, b) . } . . (ty::ConstKind::Error(_), _) => return Ok(a), . (_, ty::ConstKind::Error(_)) => return Ok(b), . -- line 580 ---------------------------------------- -- line 608 ---------------------------------------- . substs, . promoted: au.promoted, . }), . ty: a.ty, . })); . } . _ => false, . }; 1,794 ( 0.00%) if is_match { Ok(a) } else { Err(TypeError::ConstMismatch(expected_found(relation, a, b))) } 2,691 ( 0.00%) } . . fn check_const_value_eq<'tcx, R: TypeRelation<'tcx>>( . relation: &mut R, . a_val: ConstValue<'tcx>, . b_val: ConstValue<'tcx>, . // FIXME(oli-obk): these arguments should go away with valtrees . a: &'tcx ty::Const<'tcx>, . b: &'tcx ty::Const<'tcx>, . // FIXME(oli-obk): this should just be `bool` with valtrees . ) -> RelateResult<'tcx, bool> { 299 ( 0.00%) let tcx = relation.tcx(); 3,289 ( 0.00%) Ok(match (a_val, b_val) { 6,578 ( 0.00%) (ConstValue::Scalar(Scalar::Int(a_val)), ConstValue::Scalar(Scalar::Int(b_val))) => { 299 ( 0.00%) a_val == b_val . } . ( . ConstValue::Scalar(Scalar::Ptr(a_val, _a_size)), . ConstValue::Scalar(Scalar::Ptr(b_val, _b_size)), . ) => { . a_val == b_val . || match (tcx.global_alloc(a_val.provenance), tcx.global_alloc(b_val.provenance)) { . (GlobalAlloc::Function(a_instance), GlobalAlloc::Function(b_instance)) => { -- line 639 ---------------------------------------- -- line 672 ---------------------------------------- . } . } . . _ => false, . }) . } . . impl<'tcx> Relate<'tcx> for &'tcx ty::List>> { 19,110 ( 0.00%) fn relate>( . relation: &mut R, . a: Self, . b: Self, . ) -> RelateResult<'tcx, Self> { 1,911 ( 0.00%) let tcx = relation.tcx(); . . // FIXME: this is wasteful, but want to do a perf run to see how slow it is. . // We need to perform this deduplication as we sometimes generate duplicate projections . // in `a`. . let mut a_v: Vec<_> = a.into_iter().collect(); . let mut b_v: Vec<_> = b.into_iter().collect(); . // `skip_binder` here is okay because `stable_cmp` doesn't look at binders . a_v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder())); . a_v.dedup(); . b_v.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder())); . b_v.dedup(); 5,733 ( 0.00%) if a_v.len() != b_v.len() { . return Err(TypeError::ExistentialMismatch(expected_found(relation, a, b))); . } . 7,644 ( 0.00%) let v = iter::zip(a_v, b_v).map(|(ep_a, ep_b)| { . use crate::ty::ExistentialPredicate::*; 7,644 ( 0.00%) match (ep_a.skip_binder(), ep_b.skip_binder()) { 7,644 ( 0.00%) (Trait(a), Trait(b)) => Ok(ep_a . .rebind(Trait(relation.relate(ep_a.rebind(a), ep_b.rebind(b))?.skip_binder()))), . (Projection(a), Projection(b)) => Ok(ep_a.rebind(Projection( . relation.relate(ep_a.rebind(a), ep_b.rebind(b))?.skip_binder(), . ))), . (AutoTrait(a), AutoTrait(b)) if a == b => Ok(ep_a.rebind(AutoTrait(a))), . _ => Err(TypeError::ExistentialMismatch(expected_found(relation, a, b))), . } . }); 3,822 ( 0.00%) tcx.mk_poly_existential_predicates(v) 15,288 ( 0.00%) } . } . . impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { . fn relate>( . relation: &mut R, . a: ty::ClosureSubsts<'tcx>, . b: ty::ClosureSubsts<'tcx>, . ) -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> { -- line 722 ---------------------------------------- -- line 737 ---------------------------------------- . } . . impl<'tcx> Relate<'tcx> for SubstsRef<'tcx> { . fn relate>( . relation: &mut R, . a: SubstsRef<'tcx>, . b: SubstsRef<'tcx>, . ) -> RelateResult<'tcx, SubstsRef<'tcx>> { 1,211 ( 0.00%) relate_substs(relation, None, a, b) . } . } . . impl<'tcx> Relate<'tcx> for ty::Region<'tcx> { . fn relate>( . relation: &mut R, . a: ty::Region<'tcx>, . b: ty::Region<'tcx>, . ) -> RelateResult<'tcx, ty::Region<'tcx>> { 350,494 ( 0.01%) relation.regions(a, b) . } . } . . impl<'tcx> Relate<'tcx> for &'tcx ty::Const<'tcx> { . fn relate>( . relation: &mut R, . a: &'tcx ty::Const<'tcx>, . b: &'tcx ty::Const<'tcx>, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { 2,970 ( 0.00%) relation.consts(a, b) . } . } . . impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for ty::Binder<'tcx, T> { . fn relate>( . relation: &mut R, . a: ty::Binder<'tcx, T>, . b: ty::Binder<'tcx, T>, . ) -> RelateResult<'tcx, ty::Binder<'tcx, T>> { 21,098 ( 0.00%) relation.binders(a, b) . } . } . . impl<'tcx> Relate<'tcx> for GenericArg<'tcx> { 710,824 ( 0.01%) fn relate>( . relation: &mut R, . a: GenericArg<'tcx>, . b: GenericArg<'tcx>, . ) -> RelateResult<'tcx, GenericArg<'tcx>> { 845,016 ( 0.01%) match (a.unpack(), b.unpack()) { . (GenericArgKind::Lifetime(a_lt), GenericArgKind::Lifetime(b_lt)) => { . Ok(relation.relate(a_lt, b_lt)?.into()) . } . (GenericArgKind::Type(a_ty), GenericArgKind::Type(b_ty)) => { 3,595 ( 0.00%) Ok(relation.relate(a_ty, b_ty)?.into()) . } . (GenericArgKind::Const(a_ct), GenericArgKind::Const(b_ct)) => { . Ok(relation.relate(a_ct, b_ct)?.into()) . } . (GenericArgKind::Lifetime(unpacked), x) => { . bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) . } . (GenericArgKind::Type(unpacked), x) => { . bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) . } . (GenericArgKind::Const(unpacked), x) => { . bug!("impossible case reached: can't relate: {:?} with {:?}", unpacked, x) . } . } 735,986 ( 0.01%) } . } . . impl<'tcx> Relate<'tcx> for ty::ImplPolarity { . fn relate>( . relation: &mut R, . a: ty::ImplPolarity, . b: ty::ImplPolarity, . ) -> RelateResult<'tcx, ty::ImplPolarity> { -- line 813 ---------------------------------------- -- line 815 ---------------------------------------- . Err(TypeError::PolarityMismatch(expected_found(relation, a, b))) . } else { . Ok(a) . } . } . } . . impl<'tcx> Relate<'tcx> for ty::TraitPredicate<'tcx> { 484 ( 0.00%) fn relate>( . relation: &mut R, . a: ty::TraitPredicate<'tcx>, . b: ty::TraitPredicate<'tcx>, . ) -> RelateResult<'tcx, ty::TraitPredicate<'tcx>> { . Ok(ty::TraitPredicate { 220 ( 0.00%) trait_ref: relation.relate(a.trait_ref, b.trait_ref)?, . constness: relation.relate(a.constness, b.constness)?, . polarity: relation.relate(a.polarity, b.polarity)?, . }) 396 ( 0.00%) } . } . . impl<'tcx> Relate<'tcx> for ty::Term<'tcx> { . fn relate>( . relation: &mut R, . a: Self, . b: Self, . ) -> RelateResult<'tcx, Self> { -- line 841 ---------------------------------------- -- line 862 ---------------------------------------- . . /////////////////////////////////////////////////////////////////////////// . // Error handling . . pub fn expected_found<'tcx, R, T>(relation: &mut R, a: T, b: T) -> ExpectedFound . where . R: TypeRelation<'tcx>, . { 66 ( 0.00%) ExpectedFound::new(relation.a_is_expected(), a, b) . } 4,275,382 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs -------------------------------------------------------------------------------- Ir -- line 417 ---------------------------------------- . /// # #![allow(unused_mut)] . /// let mut vec: Vec = Vec::new(); . /// ``` . #[inline] . #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")] . #[stable(feature = "rust1", since = "1.0.0")] . #[must_use] . pub const fn new() -> Self { 9,126,084 ( 0.15%) Vec { buf: RawVec::NEW, len: 0 } 586 ( 0.00%) } . . /// Constructs a new, empty `Vec` with the specified capacity. . /// . /// The vector will be able to hold exactly `capacity` elements without . /// reallocating. If `capacity` is 0, the vector will not allocate. . /// . /// It is important to note that although the returned vector has the . /// *capacity* specified, the vector will have a zero *length*. For an -- line 434 ---------------------------------------- -- line 601 ---------------------------------------- . /// vec.push(11); . /// assert_eq!(vec.len(), 11); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { 2,419,384 ( 0.04%) Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 } . } . . /// Creates a `Vec` directly from the raw components of another vector. . /// . /// # Safety . /// . /// This is highly unsafe, due to the number of invariants that aren't . /// checked: -- line 617 ---------------------------------------- -- line 677 ---------------------------------------- . /// // Put everything back together into a Vec . /// let rebuilt = Vec::from_raw_parts_in(p, len, cap, alloc.clone()); . /// assert_eq!(rebuilt, [4, 5, 6]); . /// } . /// ``` . #[inline] . #[unstable(feature = "allocator_api", issue = "32838")] . pub unsafe fn from_raw_parts_in(ptr: *mut T, length: usize, capacity: usize, alloc: A) -> Self { 164,020 ( 0.00%) unsafe { Vec { buf: RawVec::from_raw_parts_in(ptr, capacity, alloc), len: length } } . } . . /// Decomposes a `Vec` into its raw components. . /// . /// Returns the raw pointer to the underlying data, the length of . /// the vector (in elements), and the allocated capacity of the . /// data (in elements). These are the same arguments in the same . /// order as the arguments to [`from_raw_parts`]. -- line 693 ---------------------------------------- -- line 778 ---------------------------------------- . /// . /// ``` . /// let vec: Vec = Vec::with_capacity(10); . /// assert_eq!(vec.capacity(), 10); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn capacity(&self) -> usize { 350,829 ( 0.01%) self.buf.capacity() . } . . /// Reserves capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 794 ---------------------------------------- -- line 801 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve(&mut self, additional: usize) { 819,495 ( 0.01%) self.buf.reserve(self.len, additional); . } . . /// Reserves the minimum capacity for exactly `additional` more elements to . /// be inserted in the given `Vec`. After calling `reserve_exact`, . /// capacity will be greater than or equal to `self.len() + additional`. . /// Does nothing if the capacity is already sufficient. . /// . /// Note that the allocator may give the collection more space than it -- line 817 ---------------------------------------- -- line 829 ---------------------------------------- . /// ``` . /// let mut vec = vec![1]; . /// vec.reserve_exact(10); . /// assert!(vec.capacity() >= 11); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn reserve_exact(&mut self, additional: usize) { 24,864 ( 0.00%) self.buf.reserve_exact(self.len, additional); . } . . /// Tries to reserve capacity for at least `additional` more elements to be inserted . /// in the given `Vec`. The collection may reserve more space to avoid . /// frequent reallocations. After calling `try_reserve`, capacity will be . /// greater than or equal to `self.len() + additional`. Does nothing if . /// capacity is already sufficient. . /// -- line 845 ---------------------------------------- -- line 930 ---------------------------------------- . /// assert!(vec.capacity() >= 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn shrink_to_fit(&mut self) { . // The capacity is never less than the length, and there's nothing to do when . // they are equal, so we can avoid the panic case in `RawVec::shrink_to_fit` . // by only calling it with a greater capacity. 58,305 ( 0.00%) if self.capacity() > self.len { 9,870 ( 0.00%) self.buf.shrink_to_fit(self.len); . } . } . . /// Shrinks the capacity of the vector with a lower bound. . /// . /// The capacity will remain at least as large as both the length . /// and the supplied value. . /// -- line 947 ---------------------------------------- -- line 990 ---------------------------------------- . /// let slice = vec.into_boxed_slice(); . /// assert_eq!(slice.into_vec().capacity(), 3); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn into_boxed_slice(mut self) -> Box<[T], A> { . unsafe { . self.shrink_to_fit(); 20,198 ( 0.00%) let me = ManuallyDrop::new(self); . let buf = ptr::read(&me.buf); . let len = me.len(); . buf.into_box(len).assume_init() . } . } . . /// Shortens the vector, keeping the first `len` elements and dropping . /// the rest. -- line 1006 ---------------------------------------- -- line 1040 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.truncate(0); . /// assert_eq!(vec, []); . /// ``` . /// . /// [`clear`]: Vec::clear . /// [`drain`]: Vec::drain . #[stable(feature = "rust1", since = "1.0.0")] 341,167 ( 0.01%) pub fn truncate(&mut self, len: usize) { . // This is safe because: . // . // * the slice passed to `drop_in_place` is valid; the `len > self.len` . // case avoids creating an invalid slice, and . // * the `len` of the vector is shrunk before calling `drop_in_place`, . // such that no value will be dropped twice in case `drop_in_place` . // were to panic once (if it panics twice, the program aborts). . unsafe { . // Note: It's intentional that this is `>` and not `>=`. . // Changing it to `>=` has negative performance . // implications in some cases. See #78884 for more. 358,525 ( 0.01%) if len > self.len { . return; . } . let remaining_len = self.len - len; . let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len); 569,253 ( 0.01%) self.len = len; 17,398 ( 0.00%) ptr::drop_in_place(s); . } 409,400 ( 0.01%) } . . /// Extracts a slice containing the entire vector. . /// . /// Equivalent to `&s[..]`. . /// . /// # Examples . /// . /// ``` -- line 1076 ---------------------------------------- -- line 1126 ---------------------------------------- . /// ``` . /// . /// [`as_mut_ptr`]: Vec::as_mut_ptr . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_ptr(&self) -> *const T { . // We shadow the slice method of the same name to avoid going through . // `deref`, which creates an intermediate reference. 33,110,977 ( 0.55%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns an unsafe mutable pointer to the vector's buffer. . /// -- line 1142 ---------------------------------------- -- line 1162 ---------------------------------------- . /// } . /// assert_eq!(&*x, &[0, 1, 2, 3]); . /// ``` . #[stable(feature = "vec_as_ptr", since = "1.37.0")] . #[inline] . pub fn as_mut_ptr(&mut self) -> *mut T { . // We shadow the slice method of the same name to avoid going through . // `deref_mut`, which creates an intermediate reference. 18,311,675 ( 0.30%) let ptr = self.buf.ptr(); . unsafe { . assume(!ptr.is_null()); . } . ptr . } . . /// Returns a reference to the underlying allocator. . #[unstable(feature = "allocator_api", issue = "32838")] -- line 1178 ---------------------------------------- -- line 1259 ---------------------------------------- . /// . /// Normally, here, one would use [`clear`] instead to correctly drop . /// the contents and thus not leak memory. . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub unsafe fn set_len(&mut self, new_len: usize) { . debug_assert!(new_len <= self.capacity()); . 1,431,402 ( 0.02%) self.len = new_len; 872 ( 0.00%) } . . /// Removes an element from the vector and returns it. . /// . /// The removed element is replaced by the last element of the vector. . /// . /// This does not preserve ordering, but is *O*(1). . /// If you need to preserve the element order, use [`remove`] instead. . /// -- line 1276 ---------------------------------------- -- line 1305 ---------------------------------------- . assert_failed(index, len); . } . unsafe { . // We replace self[index] with the last element. Note that if the . // bounds check above succeeds there must be a last element (which . // can be self[index] itself). . let value = ptr::read(self.as_ptr().add(index)); . let base_ptr = self.as_mut_ptr(); 13 ( 0.00%) ptr::copy(base_ptr.add(len - 1), base_ptr.add(index), 1); . self.set_len(len - 1); . value . } . } . . /// Inserts an element at position `index` within the vector, shifting all . /// elements after it to the right. . /// -- line 1321 ---------------------------------------- -- line 1329 ---------------------------------------- . /// let mut vec = vec![1, 2, 3]; . /// vec.insert(1, 4); . /// assert_eq!(vec, [1, 4, 2, 3]); . /// vec.insert(4, 5); . /// assert_eq!(vec, [1, 4, 2, 3, 5]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 14,028 ( 0.00%) pub fn insert(&mut self, index: usize, element: T) { . #[cold] . #[inline(never)] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("insertion index (is {}) should be <= len (is {})", index, len); . } . 2,377 ( 0.00%) let len = self.len(); 8,888 ( 0.00%) if index > len { . assert_failed(index, len); . } . . // space for the new element 10,844 ( 0.00%) if len == self.buf.capacity() { . self.reserve(1); . } . . unsafe { . // infallible . // The spot to put the new value . { . let p = self.as_mut_ptr().add(index); . // Shift everything over to make space. (Duplicating the . // `index`th element into two consecutive places.) 15,147 ( 0.00%) ptr::copy(p, p.offset(1), len - index); . // Write it in, overwriting the first copy of the `index`th . // element. . ptr::write(p, element); . } 10,844 ( 0.00%) self.set_len(len + 1); . } 12,024 ( 0.00%) } . . /// Removes and returns the element at position `index` within the vector, . /// shifting all elements after it to the left. . /// . /// Note: Because this shifts over the remaining elements, it has a . /// worst-case performance of *O*(*n*). If you don't need the order of elements . /// to be preserved, use [`swap_remove`] instead. If you'd like to remove . /// elements from the beginning of the `Vec`, consider using -- line 1376 ---------------------------------------- -- line 1395 ---------------------------------------- . pub fn remove(&mut self, index: usize) -> T { . #[cold] . #[inline(never)] . #[track_caller] . fn assert_failed(index: usize, len: usize) -> ! { . panic!("removal index (is {}) should be < len (is {})", index, len); . } . 1,074 ( 0.00%) let len = self.len(); 4,213 ( 0.00%) if index >= len { . assert_failed(index, len); . } . unsafe { . // infallible . let ret; . { . // the place we are taking from. . let ptr = self.as_mut_ptr().add(index); . // copy it out, unsafely having a copy of the value on . // the stack and in the vector at the same time. 1,328 ( 0.00%) ret = ptr::read(ptr); . . // Shift everything down to fill in that spot. 2,319 ( 0.00%) ptr::copy(ptr.offset(1), ptr, len - index - 1); . } 2,955 ( 0.00%) self.set_len(len - 1); . ret . } . } . . /// Retains only the elements specified by the predicate. . /// . /// In other words, remove all elements `e` such that `f(&e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the -- line 1428 ---------------------------------------- -- line 1442 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3, 4, 5]; . /// let keep = [false, true, true, false, true]; . /// let mut iter = keep.iter(); . /// vec.retain(|_| *iter.next().unwrap()); . /// assert_eq!(vec, [2, 3, 5]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] 926,568 ( 0.02%) pub fn retain(&mut self, mut f: F) . where . F: FnMut(&T) -> bool, . { 58,184 ( 0.00%) self.retain_mut(|elem| f(elem)); 898,344 ( 0.01%) } . . /// Retains only the elements specified by the predicate, passing a mutable reference to it. . /// . /// In other words, remove all elements `e` such that `f(&mut e)` returns `false`. . /// This method operates in place, visiting each element exactly once in the . /// original order, and preserves the order of the retained elements. . /// . /// # Examples -- line 1463 ---------------------------------------- -- line 1474 ---------------------------------------- . /// }); . /// assert_eq!(vec, [2, 3, 4]); . /// ``` . #[unstable(feature = "vec_retain_mut", issue = "90829")] . pub fn retain_mut(&mut self, mut f: F) . where . F: FnMut(&mut T) -> bool, . { 122,312 ( 0.00%) let original_len = self.len(); . // Avoid double drop if the drop guard is not executed, . // since we may make some holes during the process. . unsafe { self.set_len(0) }; . . // Vec: [Kept, Kept, Hole, Hole, Hole, Hole, Unchecked, Unchecked] . // |<- processed len ->| ^- next to check . // |<- deleted cnt ->| . // |<- original_len ->| -- line 1490 ---------------------------------------- -- line 1499 ---------------------------------------- . v: &'a mut Vec, . processed_len: usize, . deleted_cnt: usize, . original_len: usize, . } . . impl Drop for BackshiftOnDrop<'_, T, A> { . fn drop(&mut self) { 107,544 ( 0.00%) if self.deleted_cnt > 0 { . // SAFETY: Trailing unchecked items must be valid since we never touch them. . unsafe { . ptr::copy( . self.v.as_ptr().add(self.processed_len), 92,284 ( 0.00%) self.v.as_mut_ptr().add(self.processed_len - self.deleted_cnt), . self.original_len - self.processed_len, . ); . } . } . // SAFETY: After filling holes, all items are in contiguous memory. . unsafe { 2 ( 0.00%) self.v.set_len(self.original_len - self.deleted_cnt); . } . } . } . . let mut g = BackshiftOnDrop { v: self, processed_len: 0, deleted_cnt: 0, original_len }; . . fn process_loop( . original_len: usize, . f: &mut F, . g: &mut BackshiftOnDrop<'_, T, A>, . ) where . F: FnMut(&mut T) -> bool, . { 1,219,346 ( 0.02%) while g.processed_len != original_len { . // SAFETY: Unchecked element must be valid. . let cur = unsafe { &mut *g.v.as_mut_ptr().add(g.processed_len) }; 284,945 ( 0.00%) if !f(cur) { . // Advance early to avoid double drop if `drop_in_place` panicked. 2,231 ( 0.00%) g.processed_len += 1; 570 ( 0.00%) g.deleted_cnt += 1; . // SAFETY: We never touch this element again after dropped. 547 ( 0.00%) unsafe { ptr::drop_in_place(cur) }; . // We already advanced the counter. . if DELETED { . continue; . } else { . break; . } . } . if DELETED { . // SAFETY: `deleted_cnt` > 0, so the hole slot must not overlap with current element. . // We use copy for move, and never touch this element again. . unsafe { 5,076 ( 0.00%) let hole_slot = g.v.as_mut_ptr().add(g.processed_len - g.deleted_cnt); . ptr::copy_nonoverlapping(cur, hole_slot, 1); . } . } 3,384 ( 0.00%) g.processed_len += 1; . } . } . . // Stage 1: Nothing was deleted. . process_loop::(original_len, &mut f, &mut g); . . // Stage 2: Some elements were deleted. . process_loop::(original_len, &mut f, &mut g); -- line 1565 ---------------------------------------- -- line 1606 ---------------------------------------- . /// ``` . /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; . /// . /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); . /// . /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); . /// ``` . #[stable(feature = "dedup_by", since = "1.16.0")] 26,810 ( 0.00%) pub fn dedup_by(&mut self, mut same_bucket: F) . where . F: FnMut(&mut T, &mut T) -> bool, . { 3,835 ( 0.00%) let len = self.len(); 11,360 ( 0.00%) if len <= 1 { . return; . } . . /* INVARIANT: vec.len() > read >= write > write-1 >= 0 */ . struct FillGapOnDrop<'a, T, A: core::alloc::Allocator> { . /* Offset of the element we want to check if it is duplicate */ . read: usize, . -- line 1627 ---------------------------------------- -- line 1670 ---------------------------------------- . let ptr = gap.vec.as_mut_ptr(); . . /* Drop items while going through Vec, it should be more efficient than . * doing slice partition_dedup + truncate */ . . /* SAFETY: Because of the invariant, read_ptr, prev_ptr and write_ptr . * are always in-bounds and read_ptr never aliases prev_ptr */ . unsafe { 10,320 ( 0.00%) while gap.read < len { . let read_ptr = ptr.add(gap.read); . let prev_ptr = ptr.add(gap.write.wrapping_sub(1)); . 4,273 ( 0.00%) if same_bucket(&mut *read_ptr, &mut *prev_ptr) { . // Increase `gap.read` now since the drop may panic. . gap.read += 1; . /* We have found duplicate, drop it in-place */ . ptr::drop_in_place(read_ptr); . } else { . let write_ptr = ptr.add(gap.write); . . /* Because `read_ptr` can be equal to `write_ptr`, we either . * have to use `copy` or conditional `copy_nonoverlapping`. . * Looks like the first option is faster. */ . ptr::copy(read_ptr, write_ptr, 1); . . /* We have filled that place, so go further */ 3,447 ( 0.00%) gap.write += 1; . gap.read += 1; . } . } . . /* Technically we could let `gap` clean up with its Drop, but . * when `same_bucket` is guaranteed to not panic, this bloats a little . * the codegen, so we just do it manually */ . gap.vec.set_len(gap.write); . mem::forget(gap); . } 30,640 ( 0.00%) } . . /// Appends an element to the back of a collection. . /// . /// # Panics . /// . /// Panics if the new capacity exceeds `isize::MAX` bytes. . /// . /// # Examples -- line 1715 ---------------------------------------- -- line 1717 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2]; . /// vec.push(3); . /// assert_eq!(vec, [1, 2, 3]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] 685 ( 0.00%) pub fn push(&mut self, value: T) { . // This will panic or abort if we would allocate > isize::MAX bytes . // or if the length increment would overflow for zero-sized types. 12,381,052 ( 0.21%) if self.len == self.buf.capacity() { 923,227 ( 0.02%) self.buf.reserve_for_push(self.len); . } . unsafe { 436,605 ( 0.01%) let end = self.as_mut_ptr().add(self.len); . ptr::write(end, value); 18,017,973 ( 0.30%) self.len += 1; . } 548 ( 0.00%) } . . /// Removes the last element from a vector and returns it, or [`None`] if it . /// is empty. . /// . /// If you'd like to pop the first element, consider using . /// [`VecDeque::pop_front`] instead. . /// . /// [`VecDeque::pop_front`]: crate::collections::VecDeque::pop_front -- line 1744 ---------------------------------------- -- line 1748 ---------------------------------------- . /// ``` . /// let mut vec = vec![1, 2, 3]; . /// assert_eq!(vec.pop(), Some(3)); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn pop(&mut self) -> Option { 3,717,351 ( 0.06%) if self.len == 0 { 94 ( 0.00%) None . } else { . unsafe { 2,813,749 ( 0.05%) self.len -= 1; 641,188 ( 0.01%) Some(ptr::read(self.as_ptr().add(self.len()))) . } . } . } . . /// Moves all the elements of `other` into `Self`, leaving `other` empty. . /// . /// # Panics . /// -- line 1769 ---------------------------------------- -- line 1776 ---------------------------------------- . /// let mut vec2 = vec![4, 5, 6]; . /// vec.append(&mut vec2); . /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]); . /// assert_eq!(vec2, []); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[inline] . #[stable(feature = "append", since = "1.4.0")] 1,113 ( 0.00%) pub fn append(&mut self, other: &mut Self) { . unsafe { . self.append_elements(other.as_slice() as _); . other.set_len(0); . } 954 ( 0.00%) } . . /// Appends elements to `Self` from other buffer. . #[cfg(not(no_global_oom_handling))] . #[inline] . unsafe fn append_elements(&mut self, other: *const [T]) { . let count = unsafe { (*other).len() }; . self.reserve(count); 33,584 ( 0.00%) let len = self.len(); . unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) }; 545,958 ( 0.01%) self.len += count; . } . . /// Creates a draining iterator that removes the specified range in the vector . /// and yields the removed items. . /// . /// When the iterator **is** dropped, all elements in the range are removed . /// from the vector, even if the iterator was not fully consumed. If the . /// iterator **is not** dropped (with [`mem::forget`] for example), it is -- line 1807 ---------------------------------------- -- line 1834 ---------------------------------------- . // When the Drain is first created, it shortens the length of . // the source vector to make sure no uninitialized or moved-from elements . // are accessible at all if the Drain's destructor never gets to run. . // . // Drain will ptr::read out the values to remove. . // When finished, remaining tail of the vec is copied back to cover . // the hole, and the vector length is restored to the new length. . // 80,096 ( 0.00%) let len = self.len(); . let Range { start, end } = slice::range(range, ..len); . . unsafe { . // set self.vec length's to start, to be safe in case Drain is leaked . self.set_len(start); . // Use the borrow in the IterMut to indicate borrowing behavior of the . // whole Drain iterator (like &mut T). 6 ( 0.00%) let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start); 513 ( 0.00%) Drain { . tail_start: end, 6 ( 0.00%) tail_len: len - end, . iter: range_slice.iter(), . vec: NonNull::from(self), . } . } . } . . /// Clears the vector, removing all values. . /// -- line 1861 ---------------------------------------- -- line 1869 ---------------------------------------- . /// . /// v.clear(); . /// . /// assert!(v.is_empty()); . /// ``` . #[inline] . #[stable(feature = "rust1", since = "1.0.0")] . pub fn clear(&mut self) { 611,861 ( 0.01%) self.truncate(0) . } . . /// Returns the number of elements in the vector, also referred to . /// as its 'length'. . /// . /// # Examples . /// . /// ``` -- line 1885 ---------------------------------------- -- line 1900 ---------------------------------------- . /// let mut v = Vec::new(); . /// assert!(v.is_empty()); . /// . /// v.push(1); . /// assert!(!v.is_empty()); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . pub fn is_empty(&self) -> bool { 1,366,659 ( 0.02%) self.len() == 0 . } . . /// Splits the collection into two at the given index. . /// . /// Returns a newly allocated vector containing the elements in the range . /// `[at, len)`. After the call, the original vector will be left containing . /// the elements `[0, at)` with its previous capacity unchanged. . /// -- line 1916 ---------------------------------------- -- line 1935 ---------------------------------------- . A: Clone, . { . #[cold] . #[inline(never)] . fn assert_failed(at: usize, len: usize) -> ! { . panic!("`at` split index (is {}) should be <= len (is {})", at, len); . } . 3,887 ( 0.00%) if at > self.len() { . assert_failed(at, self.len()); . } . 3,091 ( 0.00%) if at == 0 { . // the new vector can take over the original buffer and avoid the copy . return mem::replace( . self, . Vec::with_capacity_in(self.capacity(), self.allocator().clone()), . ); . } . . let other_len = self.len - at; -- line 1955 ---------------------------------------- -- line 1988 ---------------------------------------- . /// . /// let mut vec = vec![]; . /// let mut p = 1; . /// vec.resize_with(4, || { p *= 2; p }); . /// assert_eq!(vec, [2, 4, 8, 16]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize_with", since = "1.33.0")] 108,198 ( 0.00%) pub fn resize_with(&mut self, new_len: usize, f: F) . where . F: FnMut() -> T, . { 14,748 ( 0.00%) let len = self.len(); 29,496 ( 0.00%) if new_len > len { 133,287 ( 0.00%) self.extend_with(new_len - len, ExtendFunc(f)); . } else { . self.truncate(new_len); . } 93,450 ( 0.00%) } . . /// Consumes and leaks the `Vec`, returning a mutable reference to the contents, . /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime . /// `'a`. If the type has only static references, or none at all, then this . /// may be chosen to be `'static`. . /// . /// As of Rust 1.57, this method does not reallocate or shrink the `Vec`, . /// so the leaked allocation may include unused capacity that is not part -- line 2014 ---------------------------------------- -- line 2070 ---------------------------------------- . #[stable(feature = "vec_spare_capacity", since = "1.60.0")] . #[inline] . pub fn spare_capacity_mut(&mut self) -> &mut [MaybeUninit] { . // Note: . // This method is not implemented in terms of `split_at_spare_mut`, . // to prevent invalidation of pointers to the buffer. . unsafe { . slice::from_raw_parts_mut( 2 ( 0.00%) self.as_mut_ptr().add(self.len) as *mut MaybeUninit, 108 ( 0.00%) self.buf.capacity() - self.len, . ) . } . } . . /// Returns vector content as a slice of `T`, along with the remaining spare . /// capacity of the vector as a slice of `MaybeUninit`. . /// . /// The returned spare capacity slice can be used to fill the vector with data -- line 2087 ---------------------------------------- -- line 2189 ---------------------------------------- . /// assert_eq!(vec, ["hello", "world", "world"]); . /// . /// let mut vec = vec![1, 2, 3, 4]; . /// vec.resize(2, 0); . /// assert_eq!(vec, [1, 2]); . /// ``` . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "vec_resize", since = "1.5.0")] 380 ( 0.00%) pub fn resize(&mut self, new_len: usize, value: T) { 38 ( 0.00%) let len = self.len(); . 114 ( 0.00%) if new_len > len { 12,330 ( 0.00%) self.extend_with(new_len - len, ExtendElement(value)) . } else { . self.truncate(new_len); . } 38 ( 0.00%) } . . /// Clones and appends all elements in a slice to the `Vec`. . /// . /// Iterates over the slice `other`, clones each element, and then appends . /// it to this `Vec`. The `other` slice is traversed in-order. . /// . /// Note that this function is same as [`extend`] except that it is . /// specialized to work with slices instead. If and when Rust gets -- line 2213 ---------------------------------------- -- line 2291 ---------------------------------------- . fn last(mut self) -> T { . (self.0)() . } . } . . impl Vec { . #[cfg(not(no_global_oom_handling))] . /// Extend the vector by `n` values, using the given generator. 625,850 ( 0.01%) fn extend_with>(&mut self, n: usize, mut value: E) { . self.reserve(n); . . unsafe { 14,099 ( 0.00%) let mut ptr = self.as_mut_ptr().add(self.len()); . // Use SetLenOnDrop to work around bug where compiler . // might not realize the store through `ptr` through self.set_len() . // don't alias. . let mut local_len = SetLenOnDrop::new(&mut self.len); . . // Write all elements except the last one . for _ in 1..n { . ptr::write(ptr, value.next()); . ptr = ptr.offset(1); . // Increment the length in every step in case next() panics . local_len.increment_len(1); . } . 744,774 ( 0.01%) if n > 0 { . // We can write the last element directly without cloning needlessly . ptr::write(ptr, value.last()); . local_len.increment_len(1); . } . . // len set by scope guard . } 499,093 ( 0.01%) } . } . . impl Vec { . /// Removes consecutive repeated elements in the vector according to the . /// [`PartialEq`] trait implementation. . /// . /// If the vector is sorted, this removes all duplicates. . /// -- line 2333 ---------------------------------------- -- line 2338 ---------------------------------------- . /// . /// vec.dedup(); . /// . /// assert_eq!(vec, [1, 2, 3, 2]); . /// ``` . #[stable(feature = "rust1", since = "1.0.0")] . #[inline] . pub fn dedup(&mut self) { 3,837 ( 0.00%) self.dedup_by(|a, b| a == b) . } . } . . //////////////////////////////////////////////////////////////////////////////// . // Internal methods and functions . //////////////////////////////////////////////////////////////////////////////// . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] 109,606 ( 0.00%) pub fn from_elem(elem: T, n: usize) -> Vec { 797,885 ( 0.01%) ::from_elem(elem, n, Global) 144,276 ( 0.00%) } . . #[doc(hidden)] . #[cfg(not(no_global_oom_handling))] . #[unstable(feature = "allocator_api", issue = "32838")] . pub fn from_elem_in(elem: T, n: usize, alloc: A) -> Vec { . ::from_elem(elem, n, alloc) . } . -- line 2367 ---------------------------------------- -- line 2424 ---------------------------------------- . // Common trait implementations for Vec . //////////////////////////////////////////////////////////////////////////////// . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::Deref for Vec { . type Target = [T]; . . fn deref(&self) -> &[T] { 39,677,958 ( 0.66%) unsafe { slice::from_raw_parts(self.as_ptr(), self.len) } 268 ( 0.00%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl ops::DerefMut for Vec { . fn deref_mut(&mut self) -> &mut [T] { 25,645,101 ( 0.43%) unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) } . } . } . . #[cfg(not(no_global_oom_handling))] . trait SpecCloneFrom { . fn clone_from(this: &mut Self, other: &Self); . } . -- line 2447 ---------------------------------------- -- line 2468 ---------------------------------------- . this.extend_from_slice(other); . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Clone for Vec { . #[cfg(not(test))] 514,057 ( 0.01%) fn clone(&self) -> Self { . let alloc = self.allocator().clone(); 2 ( 0.00%) <[T]>::to_vec_in(&**self, alloc) 627,546 ( 0.01%) } . . // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is . // required for this method definition, is not available. Instead use the . // `slice::to_vec` function which is only available with cfg(test) . // NB see the slice::hack module in slice.rs for more information . #[cfg(test)] . fn clone(&self) -> Self { . let alloc = self.allocator().clone(); -- line 2487 ---------------------------------------- -- line 2518 ---------------------------------------- . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> Index for Vec { . type Output = I::Output; . . #[inline] . fn index(&self, index: I) -> &Self::Output { 918,900 ( 0.02%) Index::index(&**self, index) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_on_unimplemented( . message = "vector indices are of type `usize` or ranges of `usize`", . label = "vector indices are of type `usize` or ranges of `usize`" . )] . impl, A: Allocator> IndexMut for Vec { . #[inline] . fn index_mut(&mut self, index: I) -> &mut Self::Output { 732 ( 0.00%) IndexMut::index_mut(&mut **self, index) . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl FromIterator for Vec { . #[inline] . fn from_iter>(iter: I) -> Vec { 4,247,857 ( 0.07%) >::from_iter(iter.into_iter()) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl IntoIterator for Vec { . type Item = T; . type IntoIter = IntoIter; . -- line 2555 ---------------------------------------- -- line 2564 ---------------------------------------- . /// for s in v.into_iter() { . /// // s has type String, not &String . /// println!("{}", s); . /// } . /// ``` . #[inline] . fn into_iter(self) -> IntoIter { . unsafe { 1,862,483 ( 0.03%) let mut me = ManuallyDrop::new(self); . let alloc = ptr::read(me.allocator()); . let begin = me.as_mut_ptr(); . let end = if mem::size_of::() == 0 { . arith_offset(begin as *const i8, me.len() as isize) as *const T . } else { . begin.add(me.len()) as *const T . }; . let cap = me.buf.capacity(); 583,952 ( 0.01%) IntoIter { . buf: NonNull::new_unchecked(begin), . phantom: PhantomData, . cap, . alloc, . ptr: begin, . end, . } . } -- line 2589 ---------------------------------------- -- line 2591 ---------------------------------------- . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a Vec { . type Item = &'a T; . type IntoIter = slice::Iter<'a, T>; . . fn into_iter(self) -> slice::Iter<'a, T> { 218 ( 0.00%) self.iter() . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec { . type Item = &'a mut T; . type IntoIter = slice::IterMut<'a, T>; . . fn into_iter(self) -> slice::IterMut<'a, T> { 1,074 ( 0.00%) self.iter_mut() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl Extend for Vec { . #[inline] 5,200 ( 0.00%) fn extend>(&mut self, iter: I) { 2,531,812 ( 0.04%) >::spec_extend(self, iter.into_iter()) 5,200 ( 0.00%) } . . #[inline] . fn extend_one(&mut self, item: T) { . self.push(item); . } . . #[inline] . fn extend_reserve(&mut self, additional: usize) { -- line 2627 ---------------------------------------- -- line 2636 ---------------------------------------- . fn extend_desugared>(&mut self, mut iterator: I) { . // This is the case for a general iterator. . // . // This function should be the moral equivalent of: . // . // for item in iterator { . // self.push(item); . // } 259,924 ( 0.00%) while let Some(element) = iterator.next() { 114,761 ( 0.00%) let len = self.len(); 1,174,081 ( 0.02%) if len == self.capacity() { 893 ( 0.00%) let (lower, _) = iterator.size_hint(); . self.reserve(lower.saturating_add(1)); . } . unsafe { . ptr::write(self.as_mut_ptr().add(len), element); . // Since next() executes user code which can panic we have to bump the length . // after each step. . // NB can't overflow since we would have had to alloc the address space 799,543 ( 0.01%) self.set_len(len + 1); . } . } 16,943 ( 0.00%) } . . /// Creates a splicing iterator that replaces the specified range in the vector . /// with the given `replace_with` iterator and yields the removed items. . /// `replace_with` does not need to be the same length as `range`. . /// . /// `range` is removed even if the iterator is not consumed until the end. . /// . /// It is unspecified how many elements are removed from the vector -- line 2666 ---------------------------------------- -- line 2745 ---------------------------------------- . /// assert_eq!(evens, vec![2, 4, 6, 8, 14]); . /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]); . /// ``` . #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")] . pub fn drain_filter(&mut self, filter: F) -> DrainFilter<'_, T, F, A> . where . F: FnMut(&mut T) -> bool, . { 5,066 ( 0.00%) let old_len = self.len(); . . // Guard against us getting leaked (leak amplification) . unsafe { . self.set_len(0); . } . 16,232 ( 0.00%) DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false } . } . } . . /// Extend implementation that copies elements out of references before pushing them onto the Vec. . /// . /// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to . /// append the entire slice at once. . /// -- line 2768 ---------------------------------------- -- line 2803 ---------------------------------------- . #[inline] . fn cmp(&self, other: &Self) -> Ordering { . Ord::cmp(&**self, &**other) . } . } . . #[stable(feature = "rust1", since = "1.0.0")] . unsafe impl<#[may_dangle] T, A: Allocator> Drop for Vec { 2,455,795 ( 0.04%) fn drop(&mut self) { . unsafe { . // use drop for [T] . // use a raw slice to refer to the elements of the vector as weakest necessary type; . // could avoid questions of validity in certain cases 1,064,257 ( 0.02%) ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len)) . } . // RawVec handles deallocation 2,905,538 ( 0.05%) } . } . . #[stable(feature = "rust1", since = "1.0.0")] . #[rustc_const_unstable(feature = "const_default_impls", issue = "87864")] . impl const Default for Vec { . /// Creates an empty `Vec`. . fn default() -> Vec { . Vec::new() -- line 2827 ---------------------------------------- -- line 2976 ---------------------------------------- . /// newly-allocated buffer with exactly the right capacity. . /// . /// # Examples . /// . /// ``` . /// assert_eq!(Box::from(vec![1, 2, 3]), vec![1, 2, 3].into_boxed_slice()); . /// ``` . fn from(v: Vec) -> Self { 4 ( 0.00%) v.into_boxed_slice() . } . } . . #[cfg(not(no_global_oom_handling))] . #[stable(feature = "rust1", since = "1.0.0")] . impl From<&str> for Vec { . /// Allocate a `Vec` and fill it with a UTF-8 string. . /// -- line 2992 ---------------------------------------- 12,050,123 ( 0.20%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/ena-0.14.0/src/unify/mod.rs -------------------------------------------------------------------------------- Ir -- line 154 ---------------------------------------- . /// Value of a unification key. We implement Tarjan's union-find . /// algorithm: when two keys are unified, one of them is converted . /// into a "redirect" pointing at the other. These redirects form a . /// DAG: the roots of the DAG (nodes that are not redirected) are each . /// associated with a value of type `V` and a rank. The rank is used . /// to keep the DAG relatively balanced, which helps keep the running . /// time of the algorithm under control. For more information, see . /// . 44,977 ( 0.00%) #[derive(PartialEq, Clone, Debug)] . pub struct VarValue { 72,780 ( 0.00%) parent: K, // if equal to self, this is a root 12,112 ( 0.00%) value: K::Value, // value assigned (only relevant to root) 66,768 ( 0.00%) rank: u32, // max depth (only relevant to root) . } . . /// Table of unification keys and their values. You must define a key type K . /// that implements the `UnifyKey` trait. Unification tables can be used in two-modes: . /// . /// - in-place (`UnificationTable>` or `InPlaceUnificationTable`): . /// - This is the standard mutable mode, where the array is modified . /// in place. . /// - To do backtracking, you can employ the `snapshot` and `rollback_to` . /// methods. . /// - persistent (`UnificationTable>` or `PersistentUnificationTable`): . /// - In this mode, we use a persistent vector to store the data, so that . /// cloning the table is an O(1) operation. . /// - This implies that ordinary operations are quite a bit slower though. . /// - Requires the `persistent` feature be selected in your Cargo.toml file. 112,175 ( 0.00%) #[derive(Clone, Debug, Default)] . pub struct UnificationTable { . /// Indicates the current value of each key. . values: S, . } . . pub type UnificationStorage = Vec>; . pub type UnificationTableStorage = UnificationTable, ()>>; . -- line 190 ---------------------------------------- -- line 215 ---------------------------------------- . } . . fn new(parent: K, value: K::Value, rank: u32) -> VarValue { . VarValue { . parent: parent, // this is a root . value: value, . rank: rank, . } 236,046 ( 0.00%) } . . fn redirect(&mut self, to: K) { 33,589 ( 0.00%) self.parent = to; . } . . fn root(&mut self, rank: u32, value: K::Value) { 32,009 ( 0.00%) self.rank = rank; 15,301 ( 0.00%) self.value = value; . } . . fn parent(&self, self_key: K) -> Option { 2,538,614 ( 0.04%) self.if_not_self(self.parent, self_key) . } . . fn if_not_self(&self, key: K, self_key: K) -> Option { . if key == self_key { . None . } else { . Some(key) . } -- line 243 ---------------------------------------- -- line 253 ---------------------------------------- . &'a mut self, . undo_log: L, . ) -> UnificationTable, L>> . where . L: UndoLogs>>, . { . UnificationTable { . values: InPlace { 6 ( 0.00%) values: self.values.values.with_log(undo_log), . }, . } . } . } . . // We can't use V:LatticeValue, much as I would like to, . // because frequently the pattern is that V=Option for some . // other type parameter U, and we have no way to say -- line 269 ---------------------------------------- -- line 311 ---------------------------------------- . pub fn len(&self) -> usize { . self.values.len() . } . } . . impl UnificationTable { . /// Starts a new snapshot. Each snapshot must be either . /// Creates a fresh key with the given value. 408,740 ( 0.01%) pub fn new_key(&mut self, value: S::Value) -> S::Key { . let len = self.values.len(); 618,392 ( 0.01%) let key: S::Key = UnifyKey::from_index(len as u32); . self.values.push(VarValue::new_var(key, value)); 250,668 ( 0.00%) debug!("{}: created new key: {:?}", S::tag(), key); . key 608,365 ( 0.01%) } . . /// Reserve memory for `num_new_keys` to be created. Does not . /// actually create the new keys; you must then invoke `new_key`. . pub fn reserve(&mut self, num_new_keys: usize) { . self.values.reserve(num_new_keys); . } . . /// Clears all unifications that have been performed, resetting to . /// the initial state. The values of each variable are given by . /// the closure. . pub fn reset_unifications(&mut self, mut value: impl FnMut(S::Key) -> S::Value) { . self.values.reset_unifications(|i| { 75 ( 0.00%) let key = UnifyKey::from_index(i as u32); . let value = value(key); . VarValue::new_var(key, value) . }); . } . . /// Obtains the current value for a particular key. . /// Not for end-users; they can use `probe_value`. . fn value(&self, key: S::Key) -> &VarValue { 1,590,991 ( 0.03%) &self.values[key.index() as usize] . } . . /// Find the root node for `vid`. This uses the standard . /// union-find algorithm with path compression: . /// . . /// . /// NB. This is a building-block operation and you would probably . /// prefer to call `probe` below. . /// . /// This is an always-inlined version of this function for the hot . /// callsites. `uninlined_get_root_key` is the never-inlined version. . #[inline(always)] . fn inlined_get_root_key(&mut self, vid: S::Key) -> S::Key { . let redirect = { 20,028,517 ( 0.33%) match self.value(vid).parent(vid) { . None => return vid, . Some(redirect) => redirect, . } . }; . 84,334 ( 0.00%) let root_key: S::Key = self.uninlined_get_root_key(redirect); 11,571 ( 0.00%) if root_key != redirect { . // Path compression 1,461 ( 0.00%) self.update_value(vid, |value| value.parent = root_key); . } . . root_key . } . . // This is a never-inlined version of this function for cold callsites. . // 'inlined_get_root_key` is the always-inlined version. . #[inline(never)] 3,056,972 ( 0.05%) fn uninlined_get_root_key(&mut self, vid: S::Key) -> S::Key { . self.inlined_get_root_key(vid) 2,867,431 ( 0.05%) } . 3,896 ( 0.00%) fn update_value(&mut self, key: S::Key, op: OP) . where . OP: FnOnce(&mut VarValue), . { 156,430 ( 0.00%) self.values.update(key.index() as usize, op); 109,853 ( 0.00%) debug!("Updated variable {:?} to {:?}", key, self.value(key)); 2,435 ( 0.00%) } . . /// Either redirects `node_a` to `node_b` or vice versa, depending . /// on the relative rank. The value associated with the new root . /// will be `new_value`. . /// . /// NB: This is the "union" operation of "union-find". It is . /// really more of a building block. If the values associated with . /// your key are non-trivial, you would probably prefer to call . /// `unify_var_var` below. . fn unify_roots(&mut self, key_a: S::Key, key_b: S::Key, new_value: S::Value) { 17,964 ( 0.00%) debug!("unify(key_a={:?}, key_b={:?})", key_a, key_b); . 31,315 ( 0.00%) let rank_a = self.value(key_a).rank; 85,163 ( 0.00%) let rank_b = self.value(key_b).rank; . if let Some((new_root, redirected)) = S::Key::order_roots( 51,942 ( 0.00%) key_a, . &self.value(key_a).value, 51,942 ( 0.00%) key_b, . &self.value(key_b).value, . ) { . // compute the new rank for the new root that they chose; . // this may not be the optimal choice. . let new_rank = if new_root == key_a { . debug_assert!(redirected == key_b); . if rank_a > rank_b { . rank_a -- line 416 ---------------------------------------- -- line 422 ---------------------------------------- . debug_assert!(redirected == key_a); . if rank_b > rank_a { . rank_b . } else { . rank_a + 1 . } . }; . self.redirect_root(new_rank, redirected, new_root, new_value); 35,928 ( 0.00%) } else if rank_a > rank_b { . // a has greater rank, so a should become b's parent, . // i.e., b should redirect to a. 3,867 ( 0.00%) self.redirect_root(rank_a, key_b, key_a, new_value); 16,704 ( 0.00%) } else if rank_a < rank_b { . // b has greater rank, so a should redirect to b. 9,310 ( 0.00%) self.redirect_root(rank_b, key_a, key_b, new_value); . } else { . // If equal, redirect one to the other and increment the . // other's rank. 58,811 ( 0.00%) self.redirect_root(rank_a + 1, key_a, key_b, new_value); . } . } . . /// Internal method to redirect `old_root_key` (which is currently . /// a root) to a child of `new_root_key` (which will remain a . /// root). The rank and value of `new_root_key` will be updated to . /// `new_rank` and `new_value` respectively. 227,556 ( 0.00%) fn redirect_root( . &mut self, . new_rank: u32, . old_root_key: S::Key, . new_root_key: S::Key, . new_value: S::Value, . ) { . self.update_value(old_root_key, |old_root_value| { . old_root_value.redirect(new_root_key); . }); 110 ( 0.00%) self.update_value(new_root_key, |new_root_value| { 650 ( 0.00%) new_root_value.root(new_rank, new_value); . }); 123,085 ( 0.00%) } . } . . /// //////////////////////////////////////////////////////////////////////// . /// Public API . . impl UnificationTable . where . S: UnificationStoreMut, . K: UnifyKey, . V: UnifyValue, . { . /// Unions two keys without the possibility of failure; only . /// applicable when unify values use `NoError` as their error . /// type. 177,327 ( 0.00%) pub fn union(&mut self, a_id: K1, b_id: K2) . where . K1: Into, . K2: Into, . V: UnifyValue, . { . self.unify_var_var(a_id, b_id).unwrap(); 156,993 ( 0.00%) } . . /// Unions a key and a value without the possibility of failure; . /// only applicable when unify values use `NoError` as their error . /// type. 624,456 ( 0.01%) pub fn union_value(&mut self, id: K1, value: V) . where . K1: Into, . V: UnifyValue, . { . self.unify_var_value(id, value).unwrap(); 488,223 ( 0.01%) } . . /// Given two keys, indicates whether they have been unioned together. . pub fn unioned(&mut self, a_id: K1, b_id: K2) -> bool . where . K1: Into, . K2: Into, . { . self.find(a_id) == self.find(b_id) -- line 502 ---------------------------------------- -- line 503 ---------------------------------------- . } . . /// Given a key, returns the (current) root key. . pub fn find(&mut self, id: K1) -> K . where . K1: Into, . { . let id = id.into(); 350,569 ( 0.01%) self.uninlined_get_root_key(id) . } . . /// Unions together two variables, merging their values. If . /// merging the values fails, the error is propagated and this . /// method has no effect. 207 ( 0.00%) pub fn unify_var_var(&mut self, a_id: K1, b_id: K2) -> Result<(), V::Error> . where . K1: Into, . K2: Into, . { . let a_id = a_id.into(); . let b_id = b_id.into(); . 68,768 ( 0.00%) let root_a = self.uninlined_get_root_key(a_id); 61,117 ( 0.00%) let root_b = self.uninlined_get_root_key(b_id); . 5,702 ( 0.00%) if root_a == root_b { . return Ok(()); . } . 110,458 ( 0.00%) let combined = V::unify_values(&self.value(root_a).value, &self.value(root_b).value)?; . . Ok(self.unify_roots(root_a, root_b, combined)) 230 ( 0.00%) } . . /// Sets the value of the key `a_id` to `b`, attempting to merge . /// with the previous value. 30,344 ( 0.00%) pub fn unify_var_value(&mut self, a_id: K1, b: V) -> Result<(), V::Error> . where . K1: Into, . { . let a_id = a_id.into(); 96,421 ( 0.00%) let root_a = self.uninlined_get_root_key(a_id); 220,716 ( 0.00%) let value = V::unify_values(&self.value(root_a).value, &b)?; 153,068 ( 0.00%) self.update_value(root_a, |node| node.value = value); . Ok(()) 27,416 ( 0.00%) } . . /// Returns the current value for the given key. If the key has . /// been union'd, this will give the value from the current root. 64,905 ( 0.00%) pub fn probe_value(&mut self, id: K1) -> V . where . K1: Into, . { . self.inlined_probe_value(id) 63,384 ( 0.00%) } . . // An always-inlined version of `probe_value`, for hot callsites. . #[inline(always)] . pub fn inlined_probe_value(&mut self, id: K1) -> V . where . K1: Into, . { . let id = id.into(); . let id = self.inlined_get_root_key(id); 38,002 ( 0.00%) self.value(id).value.clone() . } . } . . /////////////////////////////////////////////////////////////////////////// . . impl UnifyValue for () { . type Error = NoError; . . fn unify_values(_: &(), _: &()) -> Result<(), NoError> { . Ok(()) 6,626 ( 0.00%) } . } . . impl UnifyValue for Option { . type Error = V::Error; . . fn unify_values(a: &Option, b: &Option) -> Result { 18,080 ( 0.00%) match (a, b) { . (&None, &None) => Ok(None), . (&Some(ref v), &None) | (&None, &Some(ref v)) => Ok(Some(v.clone())), . (&Some(ref a), &Some(ref b)) => match V::unify_values(a, b) { . Ok(v) => Ok(Some(v)), . Err(err) => Err(err), . }, . } . } -- line 593 ---------------------------------------- 338,314 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs -------------------------------------------------------------------------------- Ir -- line 27 ---------------------------------------- . impl CacheEntry { . #[inline] . fn update( . &mut self, . new_file_and_idx: Option<(Lrc, usize)>, . pos: BytePos, . time_stamp: usize, . ) { 29,775 ( 0.00%) if let Some((file, file_idx)) = new_file_and_idx { 13,526 ( 0.00%) self.file = file; 19,687 ( 0.00%) self.file_index = file_idx; . } . 14,641 ( 0.00%) let line_index = self.file.lookup_line(pos).unwrap(); . let line_bounds = self.file.line_bounds(line_index); 51,424 ( 0.00%) self.line_number = line_index + 1; 76,332 ( 0.00%) self.line = line_bounds; . self.touch(time_stamp); . } . . #[inline] . fn touch(&mut self, time_stamp: usize) { 383,545 ( 0.01%) self.time_stamp = time_stamp; . } . } . . #[derive(Clone)] . pub struct CachingSourceMapView<'sm> { . source_map: &'sm SourceMap, . line_cache: [CacheEntry; 3], . time_stamp: usize, . } . . impl<'sm> CachingSourceMapView<'sm> { 10,592 ( 0.00%) pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { . let files = source_map.files(); 5,296 ( 0.00%) let first_file = files[0].clone(); . let entry = CacheEntry { . time_stamp: 0, . line_number: 0, . line: BytePos(0)..BytePos(0), . file: first_file, . file_index: 0, . }; . 26,480 ( 0.00%) CachingSourceMapView { . source_map, 37,072 ( 0.00%) line_cache: [entry.clone(), entry.clone(), entry], . time_stamp: 0, . } 21,184 ( 0.00%) } . . pub fn byte_pos_to_line_and_col( . &mut self, . pos: BytePos, . ) -> Option<(Lrc, usize, BytePos)> { . self.time_stamp += 1; . . // Check if the position is in one of the cached lines -- line 85 ---------------------------------------- -- line 106 ---------------------------------------- . }; . . let cache_entry = &mut self.line_cache[oldest]; . cache_entry.update(new_file_and_idx, pos, self.time_stamp); . . Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start)) . } . 1,423,807 ( 0.02%) pub fn span_data_to_lines_and_cols( . &mut self, . span_data: &SpanData, . ) -> Option<(Lrc, usize, BytePos, usize, BytePos)> { 813,604 ( 0.01%) self.time_stamp += 1; . . // Check if lo and hi are in the cached lines. 203,401 ( 0.00%) let lo_cache_idx = self.cache_entry_index(span_data.lo); 203,401 ( 0.00%) let hi_cache_idx = self.cache_entry_index(span_data.hi); . 360,912 ( 0.01%) if lo_cache_idx != -1 && hi_cache_idx != -1 { . // Cache hit for span lo and hi. Check if they belong to the same file. . let result = { 360,288 ( 0.01%) let lo = &self.line_cache[lo_cache_idx as usize]; . let hi = &self.line_cache[hi_cache_idx as usize]; . 1,080,864 ( 0.02%) if lo.file_index != hi.file_index { . return None; . } . . ( 360,288 ( 0.01%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 180,144 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . ) . }; . 180,144 ( 0.00%) self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); 180,144 ( 0.00%) self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); . 720,576 ( 0.01%) return Some(result); . } . . // No cache hit or cache hit for only one of span lo and hi. 45,890 ( 0.00%) let oldest = if lo_cache_idx != -1 || hi_cache_idx != -1 { . let avoid_idx = if lo_cache_idx != -1 { lo_cache_idx } else { hi_cache_idx }; . self.oldest_cache_entry_index_avoid(avoid_idx as usize) . } else { . self.oldest_cache_entry_index() . }; . . // If the entry doesn't point to the correct file, get the new file and index. . // Return early if the file containing beginning of span doesn't contain end of span. 185,396 ( 0.00%) let new_file_and_idx = if !file_contains(&self.line_cache[oldest].file, span_data.lo) { 43,080 ( 0.00%) let new_file_and_idx = self.file_for_position(span_data.lo)?; 51,696 ( 0.00%) if !file_contains(&new_file_and_idx.0, span_data.hi) { . return None; . } . 34,464 ( 0.00%) Some(new_file_and_idx) . } else { . let file = &self.line_cache[oldest].file; 43,923 ( 0.00%) if !file_contains(&file, span_data.hi) { . return None; . } . 43,923 ( 0.00%) None . }; . . // Update the cache entries. 116,285 ( 0.00%) let (lo_idx, hi_idx) = match (lo_cache_idx, hi_cache_idx) { . // Oldest cache entry is for span_data.lo line. . (-1, -1) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); . 59,058 ( 0.00%) if !lo.line.contains(&span_data.hi) { . let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); . let next_oldest = self.oldest_cache_entry_index_avoid(oldest); . let hi = &mut self.line_cache[next_oldest]; . hi.update(new_file_and_idx, span_data.hi, self.time_stamp); . (oldest, next_oldest) . } else { . (oldest, oldest) . } . } . // Oldest cache entry is for span_data.lo line. . (-1, _) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); 624 ( 0.00%) let hi = &mut self.line_cache[hi_cache_idx as usize]; 936 ( 0.00%) hi.touch(self.time_stamp); . (oldest, hi_cache_idx as usize) . } . // Oldest cache entry is for span_data.hi line. . (_, -1) => { . let hi = &mut self.line_cache[oldest]; 3,259 ( 0.00%) hi.update(new_file_and_idx, span_data.hi, self.time_stamp); 6,518 ( 0.00%) let lo = &mut self.line_cache[lo_cache_idx as usize]; 13,036 ( 0.00%) lo.touch(self.time_stamp); . (lo_cache_idx as usize, oldest) . } . _ => { . panic!(); . } . }; . . let lo = &self.line_cache[lo_idx]; . let hi = &self.line_cache[hi_idx]; . . // Span lo and hi may equal line end when last line doesn't . // end in newline, hence the inclusive upper bounds below. 46,514 ( 0.00%) assert!(span_data.lo >= lo.line.start); 23,257 ( 0.00%) assert!(span_data.lo <= lo.line.end); 46,514 ( 0.00%) assert!(span_data.hi >= hi.line.start); 23,257 ( 0.00%) assert!(span_data.hi <= hi.line.end); 116,285 ( 0.00%) assert!(lo.file.contains(span_data.lo)); 69,771 ( 0.00%) assert!(lo.file.contains(span_data.hi)); 69,771 ( 0.00%) assert_eq!(lo.file_index, hi.file_index); . 69,771 ( 0.00%) Some(( 23,257 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 23,257 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . )) 1,830,609 ( 0.03%) } . . fn cache_entry_index(&self, pos: BytePos) -> isize { . for (idx, cache_entry) in self.line_cache.iter().enumerate() { 2,204,223 ( 0.04%) if cache_entry.line.contains(&pos) { . return idx as isize; . } . } . . -1 . } . . fn oldest_cache_entry_index(&self) -> usize { . let mut oldest = 0; . . for idx in 1..self.line_cache.len() { 118,116 ( 0.00%) if self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp { . oldest = idx; . } . } . . oldest . } . . fn oldest_cache_entry_index_avoid(&self, avoid_idx: usize) -> usize { . let mut oldest = if avoid_idx != 0 { 0 } else { 1 }; . . for idx in 0..self.line_cache.len() { 35,631 ( 0.00%) if idx != avoid_idx 18,459 ( 0.00%) && self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp . { . oldest = idx; . } . } . . oldest . } . 43,080 ( 0.00%) fn file_for_position(&self, pos: BytePos) -> Option<(Lrc, usize)> { 8,616 ( 0.00%) if !self.source_map.files().is_empty() { 17,232 ( 0.00%) let file_idx = self.source_map.lookup_source_file_idx(pos); . let file = &self.source_map.files()[file_idx]; . 68,928 ( 0.00%) if file_contains(file, pos) { . return Some((file.clone(), file_idx)); . } . } . . None 43,080 ( 0.00%) } . } . . #[inline] . fn file_contains(file: &SourceFile, pos: BytePos) -> bool { . // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position . // one past the end of a file to belong to it. Normally, that's what we want. But for the . // purposes of converting a byte position to a line and column number, we can't come up with a . // line and column number if the file is empty, because an empty file doesn't contain any -- line 290 ---------------------------------------- 2,534,603 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/mod.rs -------------------------------------------------------------------------------- Ir -- line 157 ---------------------------------------- . pub fn opt_fn_def_id(self) -> Option { . if let Res::Def(DefKind::Fn, def_id) = self.res { Some(def_id) } else { None } . } . } . . /// The "header" of an impl is everything outside the body: a Self type, a trait . /// ref (in the case of a trait impl), and a set of predicates (from the . /// bounds / where-clauses). 736 ( 0.00%) #[derive(Clone, Debug, TypeFoldable)] . pub struct ImplHeader<'tcx> { . pub impl_def_id: DefId, . pub self_ty: Ty<'tcx>, . pub trait_ref: Option>, . pub predicates: Vec>, . } . . #[derive( . Copy, . Clone, 196,551 ( 0.00%) PartialEq, . Eq, . Hash, . TyEncodable, 16,282 ( 0.00%) TyDecodable, . HashStable, . Debug, . TypeFoldable . )] . pub enum ImplPolarity { . /// `impl Trait for Type` . Positive, . /// `impl !Trait for Type` -- line 188 ---------------------------------------- -- line 210 ---------------------------------------- . match self { . Self::Positive => f.write_str("positive"), . Self::Negative => f.write_str("negative"), . Self::Reservation => f.write_str("reservation"), . } . } . } . 213,259 ( 0.00%) #[derive(Clone, Debug, PartialEq, Eq, Copy, Hash, TyEncodable, TyDecodable, HashStable)] . pub enum Visibility { . /// Visible everywhere (including in other crates). . Public, . /// Visible only in the given crate-local module. . Restricted(DefId), . /// Not visible anywhere in the local crate. This is the visibility of private external items. . Invisible, . } . 286,571 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, HashStable, TyEncodable, TyDecodable)] . pub enum BoundConstness { . /// `T: Trait` . NotConst, . /// `T: ~const Trait` . /// . /// Requires resolving to const only when we are in a const context. . ConstIfConst, . } . . impl BoundConstness { . /// Reduce `self` and `constness` to two possible combined states instead of four. 76,486 ( 0.00%) pub fn and(&mut self, constness: hir::Constness) -> hir::Constness { 114,729 ( 0.00%) match (constness, self) { . (hir::Constness::Const, BoundConstness::ConstIfConst) => hir::Constness::Const, . (_, this) => { 99,434 ( 0.00%) *this = BoundConstness::NotConst; . hir::Constness::NotConst . } . } 38,243 ( 0.00%) } . } . . impl fmt::Display for BoundConstness { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . match self { . Self::NotConst => f.write_str("normal"), . Self::ConstIfConst => f.write_str("`~const`"), . } -- line 256 ---------------------------------------- -- line 274 ---------------------------------------- . pub before_feature_tys: Ty<'tcx>, . /// Tuple containing the types of closure captures after the feature `capture_disjoint_fields` . pub after_feature_tys: Ty<'tcx>, . } . . pub trait DefIdTree: Copy { . fn parent(self, id: DefId) -> Option; . 35 ( 0.00%) fn is_descendant_of(self, mut descendant: DefId, ancestor: DefId) -> bool { 29 ( 0.00%) if descendant.krate != ancestor.krate { . return false; . } . 92,669 ( 0.00%) while descendant != ancestor { 160,880 ( 0.00%) match self.parent(descendant) { . Some(parent) => descendant = parent, . None => return false, . } . } . true 40 ( 0.00%) } . } . . impl<'tcx> DefIdTree for TyCtxt<'tcx> { 91,540 ( 0.00%) fn parent(self, id: DefId) -> Option { . self.def_key(id).parent.map(|index| DefId { index, ..id }) 137,310 ( 0.00%) } . } . . impl Visibility { 520 ( 0.00%) pub fn from_hir(visibility: &hir::Visibility<'_>, id: hir::HirId, tcx: TyCtxt<'_>) -> Self { 1,560 ( 0.00%) match visibility.node { . hir::VisibilityKind::Public => Visibility::Public, . hir::VisibilityKind::Crate(_) => Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)), . hir::VisibilityKind::Restricted { ref path, .. } => match path.res { . // If there is no resolution, `resolve` will have already reported an error, so . // assume that the visibility is public to avoid reporting more privacy errors. . Res::Err => Visibility::Public, . def => Visibility::Restricted(def.def_id()), . }, . hir::VisibilityKind::Inherited => { 1,040 ( 0.00%) Visibility::Restricted(tcx.parent_module(id).to_def_id()) . } . } 520 ( 0.00%) } . . /// Returns `true` if an item with this visibility is accessible from the given block. 164,979 ( 0.00%) pub fn is_accessible_from(self, module: DefId, tree: T) -> bool { 1,054,437 ( 0.02%) let restriction = match self { . // Public items are visible everywhere. . Visibility::Public => return true, . // Private items from other crates are visible nowhere. . Visibility::Invisible => return false, . // Restricted items are visible in an arbitrary local module. 26,808 ( 0.00%) Visibility::Restricted(other) if other.krate != module.krate => return false, . Visibility::Restricted(module) => module, . }; . 10 ( 0.00%) tree.is_descendant_of(module, restriction) 164,979 ( 0.00%) } . . /// Returns `true` if this visibility is at least as accessible as the given visibility 8,586 ( 0.00%) pub fn is_at_least(self, vis: Visibility, tree: T) -> bool { 38,152 ( 0.00%) let vis_restriction = match vis { . Visibility::Public => return self == Visibility::Public, . Visibility::Invisible => return true, . Visibility::Restricted(module) => module, . }; . . self.is_accessible_from(vis_restriction, tree) 8,586 ( 0.00%) } . . // Returns `true` if this item is visible anywhere in the local crate. . pub fn is_visible_locally(self) -> bool { . match self { . Visibility::Public => true, . Visibility::Restricted(def_id) => def_id.is_local(), . Visibility::Invisible => false, . } . } . . pub fn is_public(self) -> bool { 8,214 ( 0.00%) matches!(self, Visibility::Public) 4,107 ( 0.00%) } . } . . /// The crate variances map is computed during typeck and contains the . /// variance of every item in the local crate. You should not use it . /// directly, because to do so will make your pass dependent on the . /// HIR of every item in the local crate. Instead, use . /// `tcx.variances_of()` to get the variance for a *particular* . /// item. -- line 365 ---------------------------------------- -- line 509 ---------------------------------------- . #[derive(Clone, Copy, Lift)] . pub struct Predicate<'tcx> { . inner: &'tcx PredicateInner<'tcx>, . } . . impl<'tcx> PartialEq for Predicate<'tcx> { . fn eq(&self, other: &Self) -> bool { . // `self.kind` is always interned. 248,458 ( 0.00%) ptr::eq(self.inner, other.inner) 241,114 ( 0.00%) } . } . . impl Hash for Predicate<'_> { . fn hash(&self, s: &mut H) { . (self.inner as *const PredicateInner<'_>).hash(s) . } . } . . impl<'tcx> Eq for Predicate<'tcx> {} . . impl<'tcx> Predicate<'tcx> { . /// Gets the inner `Binder<'tcx, PredicateKind<'tcx>>`. . #[inline] . pub fn kind(self) -> Binder<'tcx, PredicateKind<'tcx>> { 2,027,162 ( 0.03%) self.inner.kind . } . . /// Flips the polarity of a Predicate. . /// . /// Given `T: Trait` predicate it returns `T: !Trait` and given `T: !Trait` returns `T: Trait`. . pub fn flip_polarity(&self, tcx: TyCtxt<'tcx>) -> Option> { . let kind = self . .inner -- line 541 ---------------------------------------- -- line 567 ---------------------------------------- . flags: _, . outer_exclusive_binder: _, . } = self.inner; . . kind.hash_stable(hcx, hasher); . } . } . 8,511,611 ( 0.14%) #[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] 6,614,182 ( 0.11%) #[derive(HashStable, TypeFoldable)] . pub enum PredicateKind<'tcx> { . /// Corresponds to `where Foo: Bar`. `Foo` here would be . /// the `Self` type of the trait reference and `A`, `B`, and `C` . /// would be the type parameters. . Trait(TraitPredicate<'tcx>), . . /// `where 'a: 'b` . RegionOutlives(RegionOutlivesPredicate<'tcx>), -- line 584 ---------------------------------------- -- line 594 ---------------------------------------- . WellFormed(GenericArg<'tcx>), . . /// Trait must be object-safe. . ObjectSafe(DefId), . . /// No direct syntax. May be thought of as `where T: FnFoo<...>` . /// for some substitutions `...` and `T` being a closure type. . /// Satisfied (or refuted) once we know the closure's kind. 837 ( 0.00%) ClosureKind(DefId, SubstsRef<'tcx>, ClosureKind), . . /// `T1 <: T2` . /// . /// This obligation is created most often when we have two . /// unresolved type variables and hence don't have enough . /// information to process the subtyping obligation yet. 6,447 ( 0.00%) Subtype(SubtypePredicate<'tcx>), . . /// `T1` coerced to `T2` . /// . /// Like a subtyping obligation, this is created most often . /// when we have two unresolved type variables and hence . /// don't have enough information to process the coercion . /// obligation yet. At the moment, we actually process coercions . /// very much like subtyping and don't handle the full coercion -- line 617 ---------------------------------------- -- line 645 ---------------------------------------- . } . . impl<'tcx> Predicate<'tcx> { . /// Performs a substitution suitable for going from a . /// poly-trait-ref to supertraits that must hold if that . /// poly-trait-ref holds. This is slightly different from a normal . /// substitution in terms of what happens with bound regions. See . /// lengthy comment below for details. 5,382 ( 0.00%) pub fn subst_supertrait( . self, . tcx: TyCtxt<'tcx>, . trait_ref: &ty::PolyTraitRef<'tcx>, . ) -> Predicate<'tcx> { . // The interaction between HRTB and supertraits is not entirely . // obvious. Let me walk you (and myself) through an example. . // . // Let's start with an easy case. Consider two traits: -- line 661 ---------------------------------------- -- line 731 ---------------------------------------- . // ['x, 'b] . let bound_pred = self.kind(); . let pred_bound_vars = bound_pred.bound_vars(); . let trait_bound_vars = trait_ref.bound_vars(); . // 1) Self: Bar1<'a, '^0.0> -> Self: Bar1<'a, '^0.1> . let shifted_pred = . tcx.shift_bound_var_indices(trait_bound_vars.len(), bound_pred.skip_binder()); . // 2) Self: Bar1<'a, '^0.1> -> T: Bar1<'^0.0, '^0.1> 598 ( 0.00%) let new = shifted_pred.subst(tcx, trait_ref.skip_binder().substs); . // 3) ['x] + ['b] -> ['x, 'b] . let bound_vars = . tcx.mk_bound_variable_kinds(trait_bound_vars.iter().chain(pred_bound_vars)); 5,382 ( 0.00%) tcx.reuse_or_mk_predicate(self, ty::Binder::bind_with_vars(new, bound_vars)) 4,186 ( 0.00%) } . } . 5,080 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] 460,397 ( 0.01%) #[derive(HashStable, TypeFoldable)] . pub struct TraitPredicate<'tcx> { . pub trait_ref: TraitRef<'tcx>, . 712,244 ( 0.01%) pub constness: BoundConstness, . 829,766 ( 0.01%) pub polarity: ImplPolarity, . } . . pub type PolyTraitPredicate<'tcx> = ty::Binder<'tcx, TraitPredicate<'tcx>>; . . impl<'tcx> TraitPredicate<'tcx> { 490,240 ( 0.01%) pub fn remap_constness(&mut self, tcx: TyCtxt<'tcx>, param_env: &mut ParamEnv<'tcx>) { 429,138 ( 0.01%) if unlikely!(Some(self.trait_ref.def_id) == tcx.lang_items().drop_trait()) { . // remap without changing constness of this predicate. . // this is because `T: ~const Drop` has a different meaning to `T: Drop` 178 ( 0.00%) param_env.remap_constness_with(self.constness) . } else { 61,191 ( 0.00%) *param_env = param_env.with_constness(self.constness.and(param_env.constness())) . } 367,680 ( 0.01%) } . . /// Remap the constness of this predicate before emitting it for diagnostics. . pub fn remap_constness_diag(&mut self, param_env: ParamEnv<'tcx>) { . // this is different to `remap_constness` that callees want to print this predicate . // in case of selection errors. `T: ~const Drop` bounds cannot end up here when the . // param_env is not const because we it is always satisfied in non-const contexts. . if let hir::Constness::NotConst = param_env.constness() { . self.constness = ty::BoundConstness::NotConst; . } . } . . pub fn def_id(self) -> DefId { 148,320 ( 0.00%) self.trait_ref.def_id 74,160 ( 0.00%) } . 91,414 ( 0.00%) pub fn self_ty(self) -> Ty<'tcx> { 274,242 ( 0.00%) self.trait_ref.self_ty() 182,828 ( 0.00%) } . } . . impl<'tcx> PolyTraitPredicate<'tcx> { . pub fn def_id(self) -> DefId { . // Ok to skip binder since trait `DefId` does not care about regions. . self.skip_binder().def_id() 35,962 ( 0.00%) } . 4 ( 0.00%) pub fn self_ty(self) -> ty::Binder<'tcx, Ty<'tcx>> { 16 ( 0.00%) self.map_bound(|trait_ref| trait_ref.self_ty()) 8 ( 0.00%) } . . /// Remap the constness of this predicate before emitting it for diagnostics. . pub fn remap_constness_diag(&mut self, param_env: ParamEnv<'tcx>) { . *self = self.map_bound(|mut p| { . p.remap_constness_diag(param_env); . p . }); . } . } . 399 ( 0.00%) #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] 119,307 ( 0.00%) #[derive(HashStable, TypeFoldable)] 187,697 ( 0.00%) pub struct OutlivesPredicate(pub A, pub B); // `A: B` . pub type RegionOutlivesPredicate<'tcx> = OutlivesPredicate, ty::Region<'tcx>>; . pub type TypeOutlivesPredicate<'tcx> = OutlivesPredicate, ty::Region<'tcx>>; . pub type PolyRegionOutlivesPredicate<'tcx> = ty::Binder<'tcx, RegionOutlivesPredicate<'tcx>>; . pub type PolyTypeOutlivesPredicate<'tcx> = ty::Binder<'tcx, TypeOutlivesPredicate<'tcx>>; . . /// Encodes that `a` must be a subtype of `b`. The `a_is_expected` flag indicates . /// whether the `a` type is the type that we should label as "expected" when . /// presenting user diagnostics. . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)] 17,718 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct SubtypePredicate<'tcx> { 27,853 ( 0.00%) pub a_is_expected: bool, . pub a: Ty<'tcx>, . pub b: Ty<'tcx>, . } . pub type PolySubtypePredicate<'tcx> = ty::Binder<'tcx, SubtypePredicate<'tcx>>; . . /// Encodes that we have to coerce *from* the `a` type to the `b` type. . #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, TyEncodable, TyDecodable)] 15 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct CoercePredicate<'tcx> { . pub a: Ty<'tcx>, . pub b: Ty<'tcx>, . } . pub type PolyCoercePredicate<'tcx> = ty::Binder<'tcx, CoercePredicate<'tcx>>; . 79,893 ( 0.00%) #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, TyEncodable, TyDecodable)] 104,137 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub enum Term<'tcx> { 2 ( 0.00%) Ty(Ty<'tcx>), . Const(&'tcx Const<'tcx>), . } . . impl<'tcx> From> for Term<'tcx> { . fn from(ty: Ty<'tcx>) -> Self { . Term::Ty(ty) 11,829 ( 0.00%) } . } . . impl<'tcx> From<&'tcx Const<'tcx>> for Term<'tcx> { . fn from(c: &'tcx Const<'tcx>) -> Self { . Term::Const(c) . } . } . . impl<'tcx> Term<'tcx> { . pub fn ty(&self) -> Option> { 15,880 ( 0.00%) if let Term::Ty(ty) = self { Some(ty) } else { None } 7,940 ( 0.00%) } . } . . /// This kind of predicate has no *direct* correspondent in the . /// syntax, but it roughly corresponds to the syntactic forms: . /// . /// 1. `T: TraitRef<..., Item = Type>` . /// 2. `>::Item == Type` (NYI) . /// . /// In particular, form #1 is "desugared" to the combination of a . /// normal trait predicate (`T: TraitRef<...>`) and one of these . /// predicates. Form #2 is a broader form in that it also permits . /// equality between arbitrary types. Processing an instance of . /// Form #2 eventually yields one of these `ProjectionPredicate` . /// instances to normalize the LHS. 56 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Hash, TyEncodable, TyDecodable)] 27,164 ( 0.00%) #[derive(HashStable, TypeFoldable)] . pub struct ProjectionPredicate<'tcx> { . pub projection_ty: ProjectionTy<'tcx>, 30,177 ( 0.00%) pub term: Term<'tcx>, . } . . pub type PolyProjectionPredicate<'tcx> = Binder<'tcx, ProjectionPredicate<'tcx>>; . . impl<'tcx> PolyProjectionPredicate<'tcx> { . /// Returns the `DefId` of the trait of the associated item being projected. . #[inline] . pub fn trait_def_id(&self, tcx: TyCtxt<'tcx>) -> DefId { 176 ( 0.00%) self.skip_binder().projection_ty.trait_def_id(tcx) . } . . /// Get the [PolyTraitRef] required for this projection to be well formed. . /// Note that for generic associated types the predicates of the associated . /// type also need to be checked. . #[inline] . pub fn required_poly_trait_ref(&self, tcx: TyCtxt<'tcx>) -> PolyTraitRef<'tcx> { . // Note: unlike with `TraitRef::to_poly_trait_ref()`, . // `self.0.trait_ref` is permitted to have escaping regions. . // This is because here `self` has a `Binder` and so does our . // return value, so we are preserving the number of binding . // levels. 196 ( 0.00%) self.map_bound(|predicate| predicate.projection_ty.trait_ref(tcx)) . } . . pub fn term(&self) -> Binder<'tcx, Term<'tcx>> { . self.map_bound(|predicate| predicate.term) . } . . /// The `DefId` of the `TraitItem` for the associated type. . /// . /// Note that this is not the `DefId` of the `TraitRef` containing this . /// associated type, which is in `tcx.associated_item(projection_def_id()).container`. . pub fn projection_def_id(&self) -> DefId { . // Ok to skip binder since trait `DefId` does not care about regions. 514 ( 0.00%) self.skip_binder().projection_ty.item_def_id 257 ( 0.00%) } . } . . pub trait ToPolyTraitRef<'tcx> { . fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx>; . } . . impl<'tcx> ToPolyTraitRef<'tcx> for PolyTraitPredicate<'tcx> { 4,872 ( 0.00%) fn to_poly_trait_ref(&self) -> PolyTraitRef<'tcx> { 4,872 ( 0.00%) self.map_bound_ref(|trait_pred| trait_pred.trait_ref) 4,872 ( 0.00%) } . } . . pub trait ToPredicate<'tcx> { . fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx>; . } . . impl<'tcx> ToPredicate<'tcx> for Binder<'tcx, PredicateKind<'tcx>> { . #[inline(always)] . fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { . tcx.mk_predicate(self) . } . } . . impl<'tcx> ToPredicate<'tcx> for PolyTraitPredicate<'tcx> { 31,049 ( 0.00%) fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { 155,245 ( 0.00%) self.map_bound(PredicateKind::Trait).to_predicate(tcx) 62,098 ( 0.00%) } . } . . impl<'tcx> ToPredicate<'tcx> for PolyRegionOutlivesPredicate<'tcx> { . fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { . self.map_bound(PredicateKind::RegionOutlives).to_predicate(tcx) . } . } . . impl<'tcx> ToPredicate<'tcx> for PolyTypeOutlivesPredicate<'tcx> { 419 ( 0.00%) fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { 838 ( 0.00%) self.map_bound(PredicateKind::TypeOutlives).to_predicate(tcx) 838 ( 0.00%) } . } . . impl<'tcx> ToPredicate<'tcx> for PolyProjectionPredicate<'tcx> { 3,789 ( 0.00%) fn to_predicate(self, tcx: TyCtxt<'tcx>) -> Predicate<'tcx> { 18,945 ( 0.00%) self.map_bound(PredicateKind::Projection).to_predicate(tcx) 7,578 ( 0.00%) } . } . . impl<'tcx> Predicate<'tcx> { 12,854 ( 0.00%) pub fn to_opt_poly_trait_pred(self) -> Option> { . let predicate = self.kind(); 38,562 ( 0.00%) match predicate.skip_binder() { 25,865 ( 0.00%) PredicateKind::Trait(t) => Some(predicate.rebind(t)), . PredicateKind::Projection(..) . | PredicateKind::Subtype(..) . | PredicateKind::Coerce(..) . | PredicateKind::RegionOutlives(..) . | PredicateKind::WellFormed(..) . | PredicateKind::ObjectSafe(..) . | PredicateKind::ClosureKind(..) . | PredicateKind::TypeOutlives(..) . | PredicateKind::ConstEvaluatable(..) . | PredicateKind::ConstEquate(..) 1,254 ( 0.00%) | PredicateKind::TypeWellFormedFromEnv(..) => None, . } 12,854 ( 0.00%) } . 4,436 ( 0.00%) pub fn to_opt_type_outlives(self) -> Option> { . let predicate = self.kind(); 8,872 ( 0.00%) match predicate.skip_binder() { 6 ( 0.00%) PredicateKind::TypeOutlives(data) => Some(predicate.rebind(data)), . PredicateKind::Trait(..) . | PredicateKind::Projection(..) . | PredicateKind::Subtype(..) . | PredicateKind::Coerce(..) . | PredicateKind::RegionOutlives(..) . | PredicateKind::WellFormed(..) . | PredicateKind::ObjectSafe(..) . | PredicateKind::ClosureKind(..) . | PredicateKind::ConstEvaluatable(..) . | PredicateKind::ConstEquate(..) 4,434 ( 0.00%) | PredicateKind::TypeWellFormedFromEnv(..) => None, . } 4,436 ( 0.00%) } . } . . /// Represents the bounds declared on a particular set of type . /// parameters. Should eventually be generalized into a flag list of . /// where-clauses. You can obtain an `InstantiatedPredicates` list from a . /// `GenericPredicates` by using the `instantiate` method. Note that this method . /// reflects an important semantic invariant of `InstantiatedPredicates`: while . /// the `GenericPredicates` are expressed in terms of the bound type -- line 1005 ---------------------------------------- -- line 1011 ---------------------------------------- . /// Example: . /// . /// struct Foo> { ... } . /// . /// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like . /// `[[], [U:Bar]]`. Now if there were some particular reference . /// like `Foo`, then the `InstantiatedPredicates` would be `[[], . /// [usize:Bar]]`. 292,388 ( 0.00%) #[derive(Clone, Debug, TypeFoldable)] . pub struct InstantiatedPredicates<'tcx> { . pub predicates: Vec>, . pub spans: Vec, . } . . impl<'tcx> InstantiatedPredicates<'tcx> { 253 ( 0.00%) pub fn empty() -> InstantiatedPredicates<'tcx> { 127,736 ( 0.00%) InstantiatedPredicates { predicates: vec![], spans: vec![] } 253 ( 0.00%) } . . pub fn is_empty(&self) -> bool { . self.predicates.is_empty() 82 ( 0.00%) } . } . 35 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, HashStable, TyEncodable, TyDecodable, TypeFoldable)] . pub struct OpaqueTypeKey<'tcx> { . pub def_id: DefId, . pub substs: SubstsRef<'tcx>, . } . . rustc_index::newtype_index! { . /// "Universes" are used during type- and trait-checking in the . /// presence of `for<..>` binders to control what sets of names are -- line 1043 ---------------------------------------- -- line 1091 ---------------------------------------- . /// ``` . /// for<'a> fn(&'a u32) . /// ``` . /// . /// Once we "enter" into this `for<'a>` quantifier, we are in a . /// new universe that extends `U` -- in this new universe, we can . /// name the region `'a`, but that region was not nameable from . /// `U` because it was not in scope there. 37,500 ( 0.00%) pub fn next_universe(self) -> UniverseIndex { . UniverseIndex::from_u32(self.private.checked_add(1).unwrap()) 112,500 ( 0.00%) } . . /// Returns `true` if `self` can name a name from `other` -- in other words, . /// if the set of names in `self` is a superset of those in . /// `other` (`self >= other`). . pub fn can_name(self, other: UniverseIndex) -> bool { 174,438 ( 0.00%) self.private >= other.private 87,219 ( 0.00%) } . . /// Returns `true` if `self` cannot name some names from `other` -- in other . /// words, if the set of names in `self` is a strict subset of . /// those in `other` (`self < other`). . pub fn cannot_name(self, other: UniverseIndex) -> bool { 296 ( 0.00%) self.private < other.private 148 ( 0.00%) } . } . . /// The "placeholder index" fully defines a placeholder region, type, or const. Placeholders are . /// identified by both a universe, as well as a name residing within that universe. Distinct bound . /// regions/types/consts within the same universe simply have an unknown relationship to one . /// another. . #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TyEncodable, TyDecodable, PartialOrd, Ord)] . pub struct Placeholder { 3,633 ( 0.00%) pub universe: UniverseIndex, 2,726 ( 0.00%) pub name: T, . } . . impl<'a, T> HashStable> for Placeholder . where . T: HashStable>, . { . fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { . self.universe.hash_stable(hcx, hasher); -- line 1133 ---------------------------------------- -- line 1194 ---------------------------------------- . /// aren't allowed to call that query: it is equal to `type_of(const_param)` which is . /// trivial to compute. . /// . /// If we now want to use that constant in a place which potentionally needs its type . /// we also pass the type of its `const_param`. This is the point of `WithOptConstParam`, . /// except that instead of a `Ty` we bundle the `DefId` of the const parameter. . /// Meaning that we need to use `type_of(const_param_did)` if `const_param_did` is `Some` . /// to get the type of `did`. 1,569 ( 0.00%) #[derive(Copy, Clone, Debug, TypeFoldable, Lift, TyEncodable, TyDecodable)] . #[derive(PartialEq, Eq, PartialOrd, Ord)] . #[derive(Hash, HashStable)] . pub struct WithOptConstParam { 6,152 ( 0.00%) pub did: T, . /// The `DefId` of the corresponding generic parameter in case `did` is . /// a const argument. . /// . /// Note that even if `did` is a const argument, this may still be `None`. . /// All queries taking `WithOptConstParam` start by calling `tcx.opt_const_param_of(def.did)` . /// to potentially update `param_did` in the case it is `None`. 6,367 ( 0.00%) pub const_param_did: Option, . } . . impl WithOptConstParam { . /// Creates a new `WithOptConstParam` setting `const_param_did` to `None`. . #[inline(always)] . pub fn unknown(did: T) -> WithOptConstParam { . WithOptConstParam { did, const_param_did: None } 14,637 ( 0.00%) } . } . . impl WithOptConstParam { . /// Returns `Some((did, param_did))` if `def_id` is a const argument, . /// `None` otherwise. . #[inline(always)] . pub fn try_lookup(did: LocalDefId, tcx: TyCtxt<'_>) -> Option<(LocalDefId, DefId)> { . tcx.opt_const_param_of(did).map(|param_did| (did, param_did)) . } . . /// In case `self` is unknown but `self.did` is a const argument, this returns . /// a `WithOptConstParam` with the correct `const_param_did`. . #[inline(always)] . pub fn try_upgrade(self, tcx: TyCtxt<'_>) -> Option> { 2,509 ( 0.00%) if self.const_param_did.is_none() { 4,159 ( 0.00%) if let const_param_did @ Some(_) = tcx.opt_const_param_of(self.did) { . return Some(WithOptConstParam { did: self.did, const_param_did }); . } . } . . None . } . . pub fn to_global(self) -> WithOptConstParam { . WithOptConstParam { did: self.did.to_def_id(), const_param_did: self.const_param_did } . } . 136 ( 0.00%) pub fn def_id_for_type_of(self) -> DefId { 144 ( 0.00%) if let Some(did) = self.const_param_did { did } else { self.did.to_def_id() } 34 ( 0.00%) } . } . . impl WithOptConstParam { 1,717 ( 0.00%) pub fn as_local(self) -> Option> { . self.did . .as_local() . .map(|did| WithOptConstParam { did, const_param_did: self.const_param_did }) 1,717 ( 0.00%) } . . pub fn as_const_arg(self) -> Option<(LocalDefId, DefId)> { 2,491 ( 0.00%) if let Some(param_did) = self.const_param_did { 1,199 ( 0.00%) if let Some(did) = self.did.as_local() { . return Some((did, param_did)); . } . } . . None 252 ( 0.00%) } . . pub fn is_local(self) -> bool { . self.did.is_local() . } . 339 ( 0.00%) pub fn def_id_for_type_of(self) -> DefId { . self.const_param_did.unwrap_or(self.did) 339 ( 0.00%) } . } . . /// When type checking, we use the `ParamEnv` to track . /// details about the set of where-clauses that are in scope at this . /// particular point. . #[derive(Copy, Clone, Hash, PartialEq, Eq)] . pub struct ParamEnv<'tcx> { . /// This packs both caller bounds and the reveal enum into one pointer. -- line 1285 ---------------------------------------- -- line 1302 ---------------------------------------- . reveal: traits::Reveal, . constness: hir::Constness, . } . . unsafe impl rustc_data_structures::tagged_ptr::Tag for ParamTag { . const BITS: usize = 2; . #[inline] . fn into_usize(self) -> usize { 163,530 ( 0.00%) match self { . Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::NotConst } => 0, . Self { reveal: traits::Reveal::All, constness: hir::Constness::NotConst } => 1, . Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::Const } => 2, . Self { reveal: traits::Reveal::All, constness: hir::Constness::Const } => 3, . } . } . #[inline] . unsafe fn from_usize(ptr: usize) -> Self { 1,176,788 ( 0.02%) match ptr { . 0 => Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::NotConst }, . 1 => Self { reveal: traits::Reveal::All, constness: hir::Constness::NotConst }, . 2 => Self { reveal: traits::Reveal::UserFacing, constness: hir::Constness::Const }, . 3 => Self { reveal: traits::Reveal::All, constness: hir::Constness::Const }, . _ => std::hint::unreachable_unchecked(), . } . } . } -- line 1327 ---------------------------------------- -- line 1405 ---------------------------------------- . pub fn new( . caller_bounds: &'tcx List>, . reveal: Reveal, . constness: hir::Constness, . ) -> Self { . ty::ParamEnv { packed: CopyTaggedPtr::new(caller_bounds, ParamTag { reveal, constness }) } . } . 32 ( 0.00%) pub fn with_user_facing(mut self) -> Self { . self.packed.set_tag(ParamTag { reveal: Reveal::UserFacing, ..self.packed.tag() }); . self 32 ( 0.00%) } . . #[inline] . pub fn with_constness(mut self, constness: hir::Constness) -> Self { . self.packed.set_tag(ParamTag { constness, ..self.packed.tag() }); . self . } . . #[inline] -- line 1424 ---------------------------------------- -- line 1430 ---------------------------------------- . #[inline] . pub fn without_const(mut self) -> Self { . self.packed.set_tag(ParamTag { constness: hir::Constness::NotConst, ..self.packed.tag() }); . self . } . . #[inline] . pub fn remap_constness_with(&mut self, mut constness: ty::BoundConstness) { 38,401 ( 0.00%) *self = self.with_constness(constness.and(self.constness())) . } . . /// Returns a new parameter environment with the same clauses, but . /// which "reveals" the true results of projections in all cases . /// (even for associated types that are specializable). This is . /// the desired behavior during codegen and certain other special . /// contexts; normally though we want to use `Reveal::UserFacing`, . /// which is the default. . /// All opaque types in the caller_bounds of the `ParamEnv` . /// will be normalized to their underlying types. . /// See PR #65989 and issue #65918 for more details 8,640 ( 0.00%) pub fn with_reveal_all_normalized(self, tcx: TyCtxt<'tcx>) -> Self { . if self.packed.tag().reveal == traits::Reveal::All { . return self; . } . . ParamEnv::new( . tcx.normalize_opaque_types(self.caller_bounds()), . Reveal::All, . self.constness(), . ) 9,720 ( 0.00%) } . . /// Returns this same environment but with no caller bounds. . #[inline] . pub fn without_caller_bounds(self) -> Self { . Self::new(List::empty(), self.reveal(), self.constness()) . } . . /// Creates a suitable environment in which to perform trait -- line 1468 ---------------------------------------- -- line 1472 ---------------------------------------- . /// pair it with the empty environment. This improves caching and is generally . /// invisible. . /// . /// N.B., we preserve the environment when type-checking because it . /// is possible for the user to have wacky where-clauses like . /// `where Box: Copy`, which are clearly never . /// satisfiable. We generally want to behave as if they were true, . /// although the surrounding function is never reachable. 69,835 ( 0.00%) pub fn and>(self, value: T) -> ParamEnvAnd<'tcx, T> { . match self.reveal() { . Reveal::UserFacing => ParamEnvAnd { param_env: self, value }, . . Reveal::All => { 1,630 ( 0.00%) if value.is_global() { . ParamEnvAnd { param_env: self.without_caller_bounds(), value } . } else { . ParamEnvAnd { param_env: self, value } . } . } . } 66,809 ( 0.00%) } . } . . // FIXME(ecstaticmorse): Audit all occurrences of `without_const().to_predicate(tcx)` to ensure that . // the constness of trait bounds is being propagated correctly. . impl<'tcx> PolyTraitRef<'tcx> { . #[inline] . pub fn with_constness(self, constness: BoundConstness) -> PolyTraitPredicate<'tcx> { 2,166 ( 0.00%) self.map_bound(|trait_ref| ty::TraitPredicate { . trait_ref, . constness, . polarity: ty::ImplPolarity::Positive, . }) . } . #[inline] . pub fn without_const(self) -> PolyTraitPredicate<'tcx> { . self.with_constness(BoundConstness::NotConst) . } . } . 3,021 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, TypeFoldable)] . pub struct ParamEnvAnd<'tcx, T> { 46,625 ( 0.00%) pub param_env: ParamEnv<'tcx>, 1,068,935 ( 0.02%) pub value: T, . } . . impl<'tcx, T> ParamEnvAnd<'tcx, T> { . pub fn into_parts(self) -> (ParamEnv<'tcx>, T) { . (self.param_env, self.value) . } . . #[inline] . pub fn without_const(mut self) -> Self { . self.param_env = self.param_env.without_const(); 3,652 ( 0.00%) self . } . } . . impl<'a, 'tcx, T> HashStable> for ParamEnvAnd<'tcx, T> . where . T: HashStable>, . { . fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { -- line 1534 ---------------------------------------- -- line 1542 ---------------------------------------- . #[derive(Copy, Clone, Debug, HashStable)] . pub struct Destructor { . /// The `DefId` of the destructor method . pub did: DefId, . /// The constness of the destructor method . pub constness: hir::Constness, . } . 13,786 ( 0.00%) bitflags! { 62,217 ( 0.00%) #[derive(HashStable, TyEncodable, TyDecodable)] . pub struct VariantFlags: u32 { . const NO_VARIANT_FLAGS = 0; . /// Indicates whether the field list of this variant is `#[non_exhaustive]`. . const IS_FIELD_LIST_NON_EXHAUSTIVE = 1 << 0; . /// Indicates whether this variant was obtained as part of recovering from . /// a syntactic error. May be incomplete or bogus. . const IS_RECOVERED = 1 << 1; . } . } . . /// Definition of a variant -- a struct's fields or an enum variant. 199,815 ( 0.00%) #[derive(Debug, HashStable, TyEncodable, TyDecodable)] . pub struct VariantDef { . /// `DefId` that identifies the variant itself. . /// If this variant belongs to a struct or union, then this is a copy of its `DefId`. . pub def_id: DefId, . /// `DefId` that identifies the variant's constructor. . /// If this variant is a struct variant, then this is `None`. . pub ctor_def_id: Option, . /// Variant or struct name. -- line 1571 ---------------------------------------- -- line 1592 ---------------------------------------- . /// `parent_did` is the `DefId` of the `AdtDef` representing the enum or struct that . /// owns this variant. It is used for checking if a struct has `#[non_exhaustive]` w/out having . /// to go through the redirect of checking the ctor's attributes - but compiling a small crate . /// requires loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any . /// built-in trait), and we do not want to load attributes twice. . /// . /// If someone speeds up attribute loading to not be a performance concern, they can . /// remove this hack and use the constructor `DefId` everywhere. 6,792 ( 0.00%) pub fn new( . name: Symbol, . variant_did: Option, . ctor_def_id: Option, . discr: VariantDiscr, . fields: Vec, . ctor_kind: CtorKind, . adt_kind: AdtKind, . parent_did: DefId, -- line 1608 ---------------------------------------- -- line 1611 ---------------------------------------- . ) -> Self { . debug!( . "VariantDef::new(name = {:?}, variant_did = {:?}, ctor_def_id = {:?}, discr = {:?}, . fields = {:?}, ctor_kind = {:?}, adt_kind = {:?}, parent_did = {:?})", . name, variant_did, ctor_def_id, discr, fields, ctor_kind, adt_kind, parent_did, . ); . . let mut flags = VariantFlags::NO_VARIANT_FLAGS; 849 ( 0.00%) if is_field_list_non_exhaustive { . flags |= VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE; . } . 2,547 ( 0.00%) if recovered { . flags |= VariantFlags::IS_RECOVERED; . } . 7,641 ( 0.00%) VariantDef { . def_id: variant_did.unwrap_or(parent_did), . ctor_def_id, . name, . discr, 3,396 ( 0.00%) fields, . ctor_kind, . flags, . } 3,396 ( 0.00%) } . . /// Is this field list non-exhaustive? . #[inline] . pub fn is_field_list_non_exhaustive(&self) -> bool { . self.flags.intersects(VariantFlags::IS_FIELD_LIST_NON_EXHAUSTIVE) . } . . /// Was this variant obtained as part of recovering from a syntactic error? . #[inline] . pub fn is_recovered(&self) -> bool { . self.flags.intersects(VariantFlags::IS_RECOVERED) . } . . /// Computes the `Ident` of this variant by looking up the `Span` 92,785 ( 0.00%) pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident { 53,020 ( 0.00%) Ident::new(self.name, tcx.def_ident_span(self.def_id).unwrap()) 119,295 ( 0.00%) } . } . 139,890 ( 0.00%) #[derive(Copy, Clone, Debug, PartialEq, Eq, TyEncodable, TyDecodable, HashStable)] . pub enum VariantDiscr { . /// Explicit value for this variant, i.e., `X = 123`. . /// The `DefId` corresponds to the embedded constant. . Explicit(DefId), . . /// The previous variant's discriminant plus one. . /// For efficiency reasons, the distance from the . /// last `Explicit` discriminant is being stored, . /// or `0` for the first variant, if it has none. . Relative(u32), . } . 247,849 ( 0.00%) #[derive(Debug, HashStable, TyEncodable, TyDecodable)] . pub struct FieldDef { . pub did: DefId, . pub name: Symbol, . pub vis: Visibility, . } . 1,582 ( 0.00%) bitflags! { . #[derive(TyEncodable, TyDecodable, Default, HashStable)] . pub struct ReprFlags: u8 { . const IS_C = 1 << 0; . const IS_SIMD = 1 << 1; . const IS_TRANSPARENT = 1 << 2; . // Internal only for now. If true, don't reorder fields. . const IS_LINEAR = 1 << 3; . // If true, don't expose any niche to type's context. -- line 1684 ---------------------------------------- -- line 1689 ---------------------------------------- . // Any of these flags being set prevent field reordering optimisation. . const IS_UNOPTIMISABLE = ReprFlags::IS_C.bits . | ReprFlags::IS_SIMD.bits . | ReprFlags::IS_LINEAR.bits; . } . } . . /// Represents the repr options provided by the user, 118,408 ( 0.00%) #[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Default, HashStable)] . pub struct ReprOptions { . pub int: Option, . pub align: Option, . pub pack: Option, . pub flags: ReprFlags, . /// The seed to be used for randomizing a type's layout . /// . /// Note: This could technically be a `[u8; 16]` (a `u128`) which would -- line 1705 ---------------------------------------- -- line 1706 ---------------------------------------- . /// be the "most accurate" hash as it'd encompass the item and crate . /// hash without loss, but it does pay the price of being larger. . /// Everything's a tradeoff, a `u64` seed should be sufficient for our . /// purposes (primarily `-Z randomize-layout`) . pub field_shuffle_seed: u64, . } . . impl ReprOptions { 1,430 ( 0.00%) pub fn new(tcx: TyCtxt<'_>, did: DefId) -> ReprOptions { . let mut flags = ReprFlags::empty(); . let mut size = None; . let mut max_align: Option = None; . let mut min_pack: Option = None; . . // Generate a deterministically-derived seed from the item's path hash . // to allow for cross-crate compilation to actually work . let mut field_shuffle_seed = tcx.def_path_hash(did).0.to_smaller_hash(); . . // If the user defined a custom seed for layout randomization, xor the item's . // path hash with the user defined seed, this will allowing determinism while . // still allowing users to further randomize layout generation for e.g. fuzzing 330 ( 0.00%) if let Some(user_seed) = tcx.sess.opts.debugging_opts.layout_seed { . field_shuffle_seed ^= user_seed; . } . 440 ( 0.00%) for attr in tcx.get_attrs(did).iter() { 570 ( 0.00%) for r in attr::find_repr_attrs(&tcx.sess, attr) { . flags.insert(match r { . attr::ReprC => ReprFlags::IS_C, . attr::ReprPacked(pack) => { . let pack = Align::from_bytes(pack as u64).unwrap(); . min_pack = Some(if let Some(min_pack) = min_pack { . min_pack.min(pack) . } else { . pack -- line 1740 ---------------------------------------- -- line 1753 ---------------------------------------- . ReprFlags::empty() . } . }); . } . } . . // If `-Z randomize-layout` was enabled for the type definition then we can . // consider performing layout randomization 880 ( 0.00%) if tcx.sess.opts.debugging_opts.randomize_layout { . flags.insert(ReprFlags::RANDOMIZE_LAYOUT); . } . . // This is here instead of layout because the choice must make it into metadata. 660 ( 0.00%) if !tcx.consider_optimizing(|| format!("Reorder fields of {:?}", tcx.def_path_str(did))) { . flags.insert(ReprFlags::IS_LINEAR); . } . 220 ( 0.00%) Self { int: size, align: max_align, pack: min_pack, flags, field_shuffle_seed } 3,080 ( 0.00%) } . . #[inline] . pub fn simd(&self) -> bool { . self.flags.contains(ReprFlags::IS_SIMD) . } . . #[inline] . pub fn c(&self) -> bool { -- line 1779 ---------------------------------------- -- line 1798 ---------------------------------------- . #[inline] . pub fn hide_niche(&self) -> bool { . self.flags.contains(ReprFlags::HIDE_NICHE) . } . . /// Returns the discriminant type, given these `repr` options. . /// This must only be called on enums! . pub fn discr_type(&self) -> attr::IntType { 7,092 ( 0.00%) self.int.unwrap_or(attr::SignedInt(ast::IntTy::Isize)) 4,190 ( 0.00%) } . . /// Returns `true` if this `#[repr()]` should inhabit "smart enum . /// layout" optimizations, such as representing `Foo<&T>` as a . /// single pointer. . pub fn inhibit_enum_layout_opt(&self) -> bool { 155 ( 0.00%) self.c() || self.int.is_some() . } . . /// Returns `true` if this `#[repr()]` should inhibit struct field reordering . /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr()`. . pub fn inhibit_struct_field_reordering_opt(&self) -> bool { 1,680 ( 0.00%) if let Some(pack) = self.pack { . if pack.bytes() == 1 { . return true; . } . } . 3,360 ( 0.00%) self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some() . } . . /// Returns `true` if this type is valid for reordering and `-Z randomize-layout` . /// was enabled for its declaration crate . pub fn can_randomize_type_layout(&self) -> bool { 840 ( 0.00%) !self.inhibit_struct_field_reordering_opt() . && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT) . } . . /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations. . pub fn inhibit_union_abi_opt(&self) -> bool { . self.c() . } . } . . impl<'tcx> FieldDef { . /// Returns the type of this field. The resulting type is not normalized. The `subst` is . /// typically obtained via the second field of [`TyKind::Adt`]. 170,401 ( 0.00%) pub fn ty(&self, tcx: TyCtxt<'tcx>, subst: SubstsRef<'tcx>) -> Ty<'tcx> { 48,686 ( 0.00%) tcx.type_of(self.did).subst(tcx, subst) 194,744 ( 0.00%) } . . /// Computes the `Ident` of this variant by looking up the `Span` 74,949 ( 0.00%) pub fn ident(&self, tcx: TyCtxt<'_>) -> Ident { 42,828 ( 0.00%) Ident::new(self.name, tcx.def_ident_span(self.did).unwrap()) 96,363 ( 0.00%) } . } . . pub type Attributes<'tcx> = &'tcx [ast::Attribute]; . . #[derive(Debug, PartialEq, Eq)] . pub enum ImplOverlapKind { . /// These impls are always allowed to overlap. . Permitted { -- line 1859 ---------------------------------------- -- line 1891 ---------------------------------------- . /// marker traits. . /// 4. Neither of the impls can have any where-clauses. . /// . /// Once `traitobject` 0.1.0 is no longer an active concern, this hack can be removed. . Issue33140, . } . . impl<'tcx> TyCtxt<'tcx> { 37,408 ( 0.00%) pub fn typeck_body(self, body: hir::BodyId) -> &'tcx TypeckResults<'tcx> { 9,352 ( 0.00%) self.typeck(self.hir().body_owner_def_id(body)) 37,408 ( 0.00%) } . . pub fn provided_trait_methods(self, id: DefId) -> impl 'tcx + Iterator { . self.associated_items(id) . .in_definition_order() . .filter(|item| item.kind == AssocKind::Fn && item.defaultness.has_value()) . } . . fn item_name_from_hir(self, def_id: DefId) -> Option { . self.hir().get_if_local(def_id).and_then(|node| node.ident()) . } . 558 ( 0.00%) fn item_name_from_def_id(self, def_id: DefId) -> Option { 62 ( 0.00%) if def_id.index == CRATE_DEF_INDEX { . Some(self.crate_name(def_id.krate)) . } else { 186 ( 0.00%) let def_key = self.def_key(def_id); 310 ( 0.00%) match def_key.disambiguated_data.data { . // The name of a constructor is that of its parent. . rustc_hir::definitions::DefPathData::Ctor => self.item_name_from_def_id(DefId { . krate: def_id.krate, . index: def_key.parent.unwrap(), . }), 124 ( 0.00%) _ => def_key.disambiguated_data.data.get_opt_name(), . } . } 496 ( 0.00%) } . . /// Look up the name of an item across crates. This does not look at HIR. . /// . /// When possible, this function should be used for cross-crate lookups over . /// [`opt_item_name`] to avoid invalidating the incremental cache. If you . /// need to handle items without a name, or HIR items that will not be . /// serialized cross-crate, or if you need the span of the item, use . /// [`opt_item_name`] instead. . /// . /// [`opt_item_name`]: Self::opt_item_name 434 ( 0.00%) pub fn item_name(self, id: DefId) -> Symbol { . // Look at cross-crate items first to avoid invalidating the incremental cache . // unless we have to. 62 ( 0.00%) self.item_name_from_def_id(id).unwrap_or_else(|| { . bug!("item_name: no name for {:?}", self.def_path(id)); . }) 310 ( 0.00%) } . . /// Look up the name and span of an item or [`Node`]. . /// . /// See [`item_name`][Self::item_name] for more information. . pub fn opt_item_name(self, def_id: DefId) -> Option { . // Look at the HIR first so the span will be correct if this is a local item. . self.item_name_from_hir(def_id) . .or_else(|| self.item_name_from_def_id(def_id).map(Ident::with_dummy_span)) . } . 1,320,320 ( 0.02%) pub fn opt_associated_item(self, def_id: DefId) -> Option<&'tcx AssocItem> { 528,128 ( 0.01%) if let DefKind::AssocConst | DefKind::AssocFn | DefKind::AssocTy = self.def_kind(def_id) { . Some(self.associated_item(def_id)) . } else { . None . } 1,188,288 ( 0.02%) } . 16,062 ( 0.00%) pub fn field_index(self, hir_id: hir::HirId, typeck_results: &TypeckResults<'_>) -> usize { . typeck_results.field_indices().get(hir_id).cloned().expect("no index for a field") 32,124 ( 0.00%) } . . pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option { . variant . .fields . .iter() . .position(|field| self.hygienic_eq(ident, field.ident(self), variant.def_id)) . } . . /// Returns `true` if the impls are the same polarity and the trait either . /// has no items or is annotated `#[marker]` and prevents item overrides. 21,160 ( 0.00%) pub fn impls_are_allowed_to_overlap( . self, . def_id1: DefId, . def_id2: DefId, . ) -> Option { . // If either trait impl references an error, they're allowed to overlap, . // as one of them essentially doesn't exist. . if self.impl_trait_ref(def_id1).map_or(false, |tr| tr.references_error()) 4,232 ( 0.00%) || self.impl_trait_ref(def_id2).map_or(false, |tr| tr.references_error()) . { . return Some(ImplOverlapKind::Permitted { marker: false }); . } . 21,160 ( 0.00%) match (self.impl_polarity(def_id1), self.impl_polarity(def_id2)) { . (ImplPolarity::Reservation, _) | (_, ImplPolarity::Reservation) => { . // `#[rustc_reservation_impl]` impls don't overlap with anything . debug!( . "impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (reservations)", . def_id1, def_id2 . ); . return Some(ImplOverlapKind::Permitted { marker: false }); . } -- line 1997 ---------------------------------------- -- line 2004 ---------------------------------------- . ); . return None; . } . (ImplPolarity::Positive, ImplPolarity::Positive) . | (ImplPolarity::Negative, ImplPolarity::Negative) => {} . }; . . let is_marker_overlap = { 21,160 ( 0.00%) let is_marker_impl = |def_id: DefId| -> bool { 6,348 ( 0.00%) let trait_ref = self.impl_trait_ref(def_id); 6,348 ( 0.00%) trait_ref.map_or(false, |tr| self.trait_def(tr.def_id).is_marker) 16,928 ( 0.00%) }; 8,464 ( 0.00%) is_marker_impl(def_id1) && is_marker_impl(def_id2) . }; . . if is_marker_overlap { . debug!( . "impls_are_allowed_to_overlap({:?}, {:?}) = Some(Permitted) (marker overlap)", . def_id1, def_id2 . ); . Some(ImplOverlapKind::Permitted { marker: true }) . } else { 8,464 ( 0.00%) if let Some(self_ty1) = self.issue33140_self_ty(def_id1) { . if let Some(self_ty2) = self.issue33140_self_ty(def_id2) { . if self_ty1 == self_ty2 { . debug!( . "impls_are_allowed_to_overlap({:?}, {:?}) - issue #33140 HACK", . def_id1, def_id2 . ); . return Some(ImplOverlapKind::Issue33140); . } else { -- line 2034 ---------------------------------------- -- line 2038 ---------------------------------------- . ); . } . } . } . . debug!("impls_are_allowed_to_overlap({:?}, {:?}) = None", def_id1, def_id2); . None . } 16,928 ( 0.00%) } . . /// Returns `ty::VariantDef` if `res` refers to a struct, . /// or variant or their constructors, panics otherwise. 1,750 ( 0.00%) pub fn expect_variant_res(self, res: Res) -> &'tcx VariantDef { 3,000 ( 0.00%) match res { . Res::Def(DefKind::Variant, did) => { . let enum_did = self.parent(did).unwrap(); . self.adt_def(enum_did).variant_with_id(did) . } . Res::Def(DefKind::Struct | DefKind::Union, did) => self.adt_def(did).non_enum_variant(), . Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_did) => { . let variant_did = self.parent(variant_ctor_did).unwrap(); . let enum_did = self.parent(variant_did).unwrap(); -- line 2059 ---------------------------------------- -- line 2060 ---------------------------------------- . self.adt_def(enum_did).variant_with_ctor_id(variant_ctor_did) . } . Res::Def(DefKind::Ctor(CtorOf::Struct, ..), ctor_did) => { . let struct_did = self.parent(ctor_did).expect("struct ctor has no parent"); . self.adt_def(struct_did).non_enum_variant() . } . _ => bug!("expect_variant_res used with unexpected res {:?}", res), . } 2,000 ( 0.00%) } . . /// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair. . pub fn instance_mir(self, instance: ty::InstanceDef<'tcx>) -> &'tcx Body<'tcx> { . match instance { . ty::InstanceDef::Item(def) => match self.def_kind(def.did) { . DefKind::Const . | DefKind::Static . | DefKind::AssocConst -- line 2076 ---------------------------------------- -- line 2091 ---------------------------------------- . | ty::InstanceDef::Virtual(..) . | ty::InstanceDef::ClosureOnceShim { .. } . | ty::InstanceDef::DropGlue(..) . | ty::InstanceDef::CloneShim(..) => self.mir_shims(instance), . } . } . . /// Gets the attributes of a definition. 355,850 ( 0.01%) pub fn get_attrs(self, did: DefId) -> Attributes<'tcx> { 106,755 ( 0.00%) if let Some(did) = did.as_local() { 87,840 ( 0.00%) self.hir().attrs(self.hir().local_def_id_to_hir_id(did)) . } else { . self.item_attrs(did) . } 284,680 ( 0.00%) } . . /// Determines whether an item is annotated with an attribute. 48,876 ( 0.00%) pub fn has_attr(self, did: DefId, attr: Symbol) -> bool { 137,124 ( 0.00%) self.sess.contains_name(&self.get_attrs(did), attr) . } . . /// Determines whether an item is annotated with `doc(hidden)`. 6,132 ( 0.00%) pub fn is_doc_hidden(self, did: DefId) -> bool { 876 ( 0.00%) self.get_attrs(did) . .iter() 20 ( 0.00%) .filter_map(|attr| if attr.has_name(sym::doc) { attr.meta_item_list() } else { None }) . .any(|items| items.iter().any(|item| item.has_name(sym::hidden))) 7,008 ( 0.00%) } . . /// Returns `true` if this is an `auto trait`. 15,533 ( 0.00%) pub fn trait_is_auto(self, trait_def_id: DefId) -> bool { 2,219 ( 0.00%) self.trait_def(trait_def_id).has_auto_impl 17,752 ( 0.00%) } . . /// Returns layout of a generator. Layout might be unavailable if the . /// generator is tainted by errors. . pub fn generator_layout(self, def_id: DefId) -> Option<&'tcx GeneratorLayout<'tcx>> { . self.optimized_mir(def_id).generator_layout() . } . . /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements. . /// If it implements no trait, returns `None`. 14,812 ( 0.00%) pub fn trait_id_of_impl(self, def_id: DefId) -> Option { . self.impl_trait_ref(def_id).map(|tr| tr.def_id) 19,044 ( 0.00%) } . . /// If the given defid describes a method belonging to an impl, returns the . /// `DefId` of the impl that the method belongs to; otherwise, returns `None`. 808 ( 0.00%) pub fn impl_of_method(self, def_id: DefId) -> Option { 3,232 ( 0.00%) self.opt_associated_item(def_id).and_then(|trait_item| match trait_item.container { . TraitContainer(_) => None, . ImplContainer(def_id) => Some(def_id), . }) 1,616 ( 0.00%) } . . /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` . /// with the name of the crate containing the impl. . pub fn span_of_impl(self, impl_did: DefId) -> Result { . if let Some(impl_did) = impl_did.as_local() { . Ok(self.def_span(impl_did)) . } else { . Err(self.crate_name(impl_did.krate)) . } . } . . /// Hygienically compares a use-site name (`use_name`) for a field or an associated item with . /// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed . /// definition's parent/scope to perform comparison. 39,120 ( 0.00%) pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool { . // We could use `Ident::eq` here, but we deliberately don't. The name . // comparison fails frequently, and we want to avoid the expensive . // `normalize_to_macros_2_0()` calls required for the span comparison whenever possible. 4,890 ( 0.00%) use_name.name == def_name.name 14,670 ( 0.00%) && use_name . .span . .ctxt() . .hygienic_eq(def_name.span.ctxt(), self.expn_that_defined(def_parent_def_id)) 39,120 ( 0.00%) } . 36,243 ( 0.00%) pub fn adjust_ident(self, mut ident: Ident, scope: DefId) -> Ident { 8,054 ( 0.00%) ident.span.normalize_to_macros_2_0_and_adjust(self.expn_that_defined(scope)); 8,054 ( 0.00%) ident 32,216 ( 0.00%) } . 55,062 ( 0.00%) pub fn adjust_ident_and_get_scope( . self, . mut ident: Ident, . scope: DefId, . block: hir::HirId, . ) -> (Ident, DefId) { 6,118 ( 0.00%) let scope = ident . .span . .normalize_to_macros_2_0_and_adjust(self.expn_that_defined(scope)) 56 ( 0.00%) .and_then(|actual_expansion| actual_expansion.expn_data().parent_module) 24,360 ( 0.00%) .unwrap_or_else(|| self.parent_module(block).to_def_id()); 36,708 ( 0.00%) (ident, scope) 55,062 ( 0.00%) } . 3,255 ( 0.00%) pub fn is_object_safe(self, key: DefId) -> bool { . self.object_safety_violations(key).is_empty() 3,720 ( 0.00%) } . } . . /// Yields the parent function's `LocalDefId` if `def_id` is an `impl Trait` definition. 8,427 ( 0.00%) pub fn is_impl_trait_defn(tcx: TyCtxt<'_>, def_id: DefId) -> Option { 8,427 ( 0.00%) let def_id = def_id.as_local()?; 8,248 ( 0.00%) if let Node::Item(item) = tcx.hir().get_by_def_id(def_id) { 2,232 ( 0.00%) if let hir::ItemKind::OpaqueTy(ref opaque_ty) = item.kind { 338 ( 0.00%) return match opaque_ty.origin { . hir::OpaqueTyOrigin::FnReturn(parent) | hir::OpaqueTyOrigin::AsyncFn(parent) => { . Some(parent) . } . hir::OpaqueTyOrigin::TyAlias => None, . }; . } . } . None 11,236 ( 0.00%) } . . pub fn int_ty(ity: ast::IntTy) -> IntTy { . match ity { . ast::IntTy::Isize => IntTy::Isize, . ast::IntTy::I8 => IntTy::I8, . ast::IntTy::I16 => IntTy::I16, . ast::IntTy::I32 => IntTy::I32, . ast::IntTy::I64 => IntTy::I64, -- line 2216 ---------------------------------------- -- line 2231 ---------------------------------------- . . pub fn float_ty(fty: ast::FloatTy) -> FloatTy { . match fty { . ast::FloatTy::F32 => FloatTy::F32, . ast::FloatTy::F64 => FloatTy::F64, . } . } . 1,504 ( 0.00%) pub fn ast_int_ty(ity: IntTy) -> ast::IntTy { . match ity { . IntTy::Isize => ast::IntTy::Isize, . IntTy::I8 => ast::IntTy::I8, . IntTy::I16 => ast::IntTy::I16, . IntTy::I32 => ast::IntTy::I32, . IntTy::I64 => ast::IntTy::I64, . IntTy::I128 => ast::IntTy::I128, . } 1,504 ( 0.00%) } . . pub fn ast_uint_ty(uty: UintTy) -> ast::UintTy { . match uty { . UintTy::Usize => ast::UintTy::Usize, . UintTy::U8 => ast::UintTy::U8, . UintTy::U16 => ast::UintTy::U16, . UintTy::U32 => ast::UintTy::U32, . UintTy::U64 => ast::UintTy::U64, -- line 2256 ---------------------------------------- -- line 2262 ---------------------------------------- . closure::provide(providers); . context::provide(providers); . erase_regions::provide(providers); . layout::provide(providers); . util::provide(providers); . print::provide(providers); . super::util::bug::provide(providers); . super::middle::provide(providers); 8 ( 0.00%) *providers = ty::query::Providers { . trait_impls_of: trait_def::trait_impls_of_provider, . type_uninhabited_from: inhabitedness::type_uninhabited_from, . const_param_default: consts::const_param_default, . vtable_allocation: vtable::vtable_allocation_provider, . ..*providers . }; 1 ( 0.00%) } . . /// A map for the local crate mapping each type to a vector of its . /// inherent impls. This is not meant to be used outside of coherence; . /// rather, you should request the vector for a specific type via . /// `tcx.inherent_impls(def_id)` so as to minimize your dependencies . /// (constructing this map requires touching the entire crate). . #[derive(Clone, Debug, Default, HashStable)] . pub struct CrateInherentImpls { -- line 2285 ---------------------------------------- -- line 2288 ---------------------------------------- . . #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, TyEncodable, HashStable)] . pub struct SymbolName<'tcx> { . /// `&str` gives a consistent ordering, which ensures reproducible builds. . pub name: &'tcx str, . } . . impl<'tcx> SymbolName<'tcx> { 6 ( 0.00%) pub fn new(tcx: TyCtxt<'tcx>, name: &str) -> SymbolName<'tcx> { . SymbolName { . name: unsafe { str::from_utf8_unchecked(tcx.arena.alloc_slice(name.as_bytes())) }, . } 8 ( 0.00%) } . } . . impl<'tcx> fmt::Display for SymbolName<'tcx> { . fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { . fmt::Display::fmt(&self.name, fmt) . } . } . -- line 2308 ---------------------------------------- 7,107,195 ( 0.12%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 113,114 ( 0.00%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 118,070 ( 0.00%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 255,842 ( 0.00%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 29,913 ( 0.00%) i += 2 . } . 118,070 ( 0.00%) if i < count { 81,173 ( 0.00%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 1,666,862 ( 0.03%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 11,198,653 ( 0.19%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 3,435,012 ( 0.06%) self.nbuf = nbuf + size; . . return; . } . 548,673 ( 0.01%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 165,252 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 165,252 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 1,652,520 ( 0.03%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 1,322,016 ( 0.02%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 1,487,268 ( 0.02%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 772,100 ( 0.01%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 661,008 ( 0.01%) self.processed += BUFFER_SIZE; 330,504 ( 0.01%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 17,720 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 392,609 ( 0.01%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 139,624 ( 0.00%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 71,167 ( 0.00%) self.nbuf = nbuf + length; . . return; . } . 62,891 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 37,910 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 7,582 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 22,746 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 7,582 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 15,164 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 58,281 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 65,863 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 116,562 ( 0.00%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 15,164 ( 0.00%) let input_left = length - processed; 10,428 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 4,492 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 4,492 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 4,492 ( 0.00%) self.state.v0 ^= elem; 8,984 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 7,582 ( 0.00%) self.nbuf = extra_bytes_left; 37,910 ( 0.00%) self.processed += nbuf + processed; 45,492 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 58,953 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 78,604 ( 0.00%) let mut state = self.state; . . for i in 0..last { 52,292 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 52,292 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 52,292 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 39,302 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 17,484 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 56,786 ( 0.00%) let length = self.processed + self.nbuf; 39,300 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 19,650 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 19,650 ( 0.00%) state.v0 ^= b; . 19,650 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 62,440 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 23,138 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 23,138 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 6,014,922 ( 0.10%) compress!(state); 6,497,523 ( 0.11%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 124,876 ( 0.00%) compress!(state); 124,876 ( 0.00%) compress!(state); 124,876 ( 0.00%) compress!(state); 105,227 ( 0.00%) compress!(state); . } . } 1,306,152 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/btree/search.rs -------------------------------------------------------------------------------- Ir -- line 40 ---------------------------------------- . . impl NodeRef { . /// Looks up a given key in a (sub)tree headed by the node, recursively. . /// Returns a `Found` with the handle of the matching KV, if any. Otherwise, . /// returns a `GoDown` with the handle of the leaf edge where the key belongs. . /// . /// The result is meaningful only if the tree is ordered by key, like the tree . /// in a `BTreeMap` is. 2,104,300 ( 0.03%) pub fn search_tree( . mut self, . key: &Q, . ) -> SearchResult . where . Q: Ord, . K: Borrow, . { . loop { . self = match self.search_node(key) { . Found(handle) => return Found(handle), 154,366 ( 0.00%) GoDown(handle) => match handle.force() { . Leaf(leaf) => return GoDown(leaf), . Internal(internal) => internal.descend(), . }, . } . } 1,107,054 ( 0.02%) } . . /// Descends to the nearest node where the edge matching the lower bound . /// of the range is different from the edge matching the upper bound, i.e., . /// the nearest node that has at least one key contained in the range. . /// . /// If found, returns an `Ok` with that node, the strictly ascending pair of . /// edge indices in the node delimiting the range, and the corresponding . /// pair of bounds for continuing the search in the child nodes, in case -- line 73 ---------------------------------------- -- line 202 ---------------------------------------- . unsafe fn find_key_index(&self, key: &Q, start_index: usize) -> IndexResult . where . Q: Ord, . K: Borrow, . { . let node = self.reborrow(); . let keys = node.keys(); . debug_assert!(start_index <= keys.len()); 2,381,478 ( 0.04%) for (offset, k) in unsafe { keys.get_unchecked(start_index..) }.iter().enumerate() { 11,301,510 ( 0.19%) match key.cmp(k.borrow()) { . Ordering::Greater => {} . Ordering::Equal => return IndexResult::KV(start_index + offset), . Ordering::Less => return IndexResult::Edge(start_index + offset), . } . } . IndexResult::Edge(keys.len()) . } . -- line 219 ---------------------------------------- 5,446,816 ( 0.09%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs -------------------------------------------------------------------------------- Ir -- line 5 ---------------------------------------- . #[stable(feature = "alloc_system_type", since = "1.28.0")] . unsafe impl GlobalAlloc for System { . #[inline] . unsafe fn alloc(&self, layout: Layout) -> *mut u8 { . // jemalloc provides alignment less than MIN_ALIGN for small allocations. . // So only rely on MIN_ALIGN if size >= align. . // Also see and . // . 7,671,216 ( 0.13%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 5,753,412 ( 0.10%) libc::malloc(layout.size()) as *mut u8 . } else { . #[cfg(target_os = "macos")] . { . if layout.align() > (1 << 31) { . return ptr::null_mut(); . } . } . aligned_malloc(&layout) . } . } . . #[inline] . unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { . // See the comment above in `alloc` for why this check looks the way it does. 135,720 ( 0.00%) if layout.align() <= MIN_ALIGN && layout.align() <= layout.size() { 203,580 ( 0.00%) libc::calloc(layout.size(), 1) as *mut u8 . } else { . let ptr = self.alloc(layout); . if !ptr.is_null() { . ptr::write_bytes(ptr, 0, layout.size()); . } . ptr . } . } . . #[inline] . unsafe fn dealloc(&self, ptr: *mut u8, _layout: Layout) { 1,951,723 ( 0.03%) libc::free(ptr as *mut libc::c_void) . } . . #[inline] . unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 { 603,856 ( 0.01%) if layout.align() <= MIN_ALIGN && layout.align() <= new_size { 1,207,712 ( 0.02%) libc::realloc(ptr as *mut libc::c_void, new_size) as *mut u8 . } else { . realloc_fallback(self, ptr, layout, new_size) . } . } . } . . cfg_if::cfg_if! { . if #[cfg(any( -- line 56 ---------------------------------------- -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/terminator.rs -------------------------------------------------------------------------------- Ir -- line 12 ---------------------------------------- . use rustc_span::Span; . use std::borrow::Cow; . use std::fmt::{self, Debug, Formatter, Write}; . use std::iter; . use std::slice; . . pub use super::query::*; . 14,760 ( 0.00%) #[derive(Debug, Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq, PartialOrd)] . pub struct SwitchTargets { . /// Possible values. The locations to branch to in each case . /// are found in the corresponding indices from the `targets` vector. . values: SmallVec<[u128; 1]>, . . /// Possible branch sites. The last element of this vector is used . /// for the otherwise branch, so targets.len() == values.len() + 1 . /// should hold. -- line 28 ---------------------------------------- -- line 38 ---------------------------------------- . targets: SmallVec<[BasicBlock; 2]>, . } . . impl SwitchTargets { . /// Creates switch targets from an iterator of values and target blocks. . /// . /// The iterator may be empty, in which case the `SwitchInt` instruction is equivalent to . /// `goto otherwise;`. 9,298 ( 0.00%) pub fn new(targets: impl Iterator, otherwise: BasicBlock) -> Self { 10,672 ( 0.00%) let (values, mut targets): (SmallVec<_>, SmallVec<_>) = targets.unzip(); . targets.push(otherwise); 10,672 ( 0.00%) Self { values, targets } 9,298 ( 0.00%) } . . /// Builds a switch targets definition that jumps to `then` if the tested value equals `value`, . /// and to `else_` if not. . pub fn static_if(value: u128, then: BasicBlock, else_: BasicBlock) -> Self { . Self { values: smallvec![value], targets: smallvec![then, else_] } . } . . /// Returns the fallback target that is jumped to when none of the values match the operand. 1,447 ( 0.00%) pub fn otherwise(&self) -> BasicBlock { 1,447 ( 0.00%) *self.targets.last().unwrap() 2,894 ( 0.00%) } . . /// Returns an iterator over the switch targets. . /// . /// The iterator will yield tuples containing the value and corresponding target to jump to, not . /// including the `otherwise` fallback target. . /// . /// Note that this may yield 0 elements. Only the `otherwise` branch is mandatory. 1,447 ( 0.00%) pub fn iter(&self) -> SwitchTargetsIter<'_> { 10,129 ( 0.00%) SwitchTargetsIter { inner: iter::zip(&self.values, &self.targets) } 1,447 ( 0.00%) } . . /// Returns a slice with all possible jump targets (including the fallback target). 7,132 ( 0.00%) pub fn all_targets(&self) -> &[BasicBlock] { . &self.targets 7,132 ( 0.00%) } . . pub fn all_targets_mut(&mut self) -> &mut [BasicBlock] { . &mut self.targets . } . . /// Finds the `BasicBlock` to which this `SwitchInt` will branch given the . /// specific value. This cannot fail, as it'll return the `otherwise` . /// branch if there's not a specific match for the value. -- line 84 ---------------------------------------- -- line 90 ---------------------------------------- . pub struct SwitchTargetsIter<'a> { . inner: iter::Zip, slice::Iter<'a, BasicBlock>>, . } . . impl<'a> Iterator for SwitchTargetsIter<'a> { . type Item = (u128, BasicBlock); . . fn next(&mut self) -> Option { 9,280 ( 0.00%) self.inner.next().map(|(val, bb)| (*val, *bb)) 6,087 ( 0.00%) } . . fn size_hint(&self) -> (usize, Option) { . self.inner.size_hint() . } . } . . impl<'a> ExactSizeIterator for SwitchTargetsIter<'a> {} . 624,552 ( 0.01%) #[derive(Clone, TyEncodable, TyDecodable, Hash, HashStable, PartialEq)] . pub enum TerminatorKind<'tcx> { . /// Block should have one successor in the graph; we jump there. 68,385 ( 0.00%) Goto { target: BasicBlock }, . . /// Operand evaluates to an integer; jump depending on its value . /// to one of the targets, and otherwise fallback to `otherwise`. . SwitchInt { . /// The discriminant value being tested. . discr: Operand<'tcx>, . . /// The type of value being tested. . /// This is always the same as the type of `discr`. . /// FIXME: remove this redundant information. Currently, it is relied on by pretty-printing. 1,640 ( 0.00%) switch_ty: Ty<'tcx>, . . targets: SwitchTargets, . }, . . /// Indicates that the landing pad is finished and unwinding should . /// continue. Emitted by `build::scope::diverge_cleanup`. . Resume, . -- line 130 ---------------------------------------- -- line 136 ---------------------------------------- . /// been filled in before this executes. This can occur multiple times . /// in different basic blocks. . Return, . . /// Indicates a terminator that can never be reached. . Unreachable, . . /// Drop the `Place`. 151,938 ( 0.00%) Drop { place: Place<'tcx>, target: BasicBlock, unwind: Option }, . . /// Drop the `Place` and assign the new value over it. This ensures . /// that the assignment to `P` occurs *even if* the destructor for . /// place unwinds. Its semantics are best explained by the . /// elaboration: . /// . /// ``` . /// BB0 { -- line 152 ---------------------------------------- -- line 167 ---------------------------------------- . /// BB2 { . /// // P is now uninitialized -- its dtor panicked . /// P <- V . /// } . /// ``` . /// . /// Note that DropAndReplace is eliminated as part of the `ElaborateDrops` pass. . DropAndReplace { 6 ( 0.00%) place: Place<'tcx>, . value: Operand<'tcx>, 4 ( 0.00%) target: BasicBlock, 2 ( 0.00%) unwind: Option, . }, . . /// Block ends with a call of a function. . Call { . /// The function that’s being called. . func: Operand<'tcx>, . /// Arguments the function is called with. . /// These are owned by the callee, which is free to modify them. . /// This allows the memory occupied by "by-value" arguments to be . /// reused across function calls without duplicating the contents. 19,680 ( 0.00%) args: Vec>, . /// Destination for the return value. If some, the call is converging. . destination: Option<(Place<'tcx>, BasicBlock)>, . /// Cleanups to be done if the call unwinds. 13,120 ( 0.00%) cleanup: Option, . /// `true` if this is from a call in HIR rather than from an overloaded . /// operator. True for overloaded function call. . from_hir_call: bool, . /// This `Span` is the span of the function, without the dot and receiver . /// (e.g. `foo(a, b)` in `x.foo(a, b)` . fn_span: Span, . }, . . /// Jump to the target if the condition has the expected value, . /// otherwise panic with a message and a cleanup target. . Assert { . cond: Operand<'tcx>, 156 ( 0.00%) expected: bool, . msg: AssertMessage<'tcx>, 312 ( 0.00%) target: BasicBlock, 164 ( 0.00%) cleanup: Option, . }, . . /// A suspend point. . Yield { . /// The value to return. . value: Operand<'tcx>, . /// Where to resume to. . resume: BasicBlock, -- line 217 ---------------------------------------- -- line 223 ---------------------------------------- . . /// Indicates the end of the dropping of a generator. . GeneratorDrop, . . /// A block where control flow only ever takes one real path, but borrowck . /// needs to be more conservative. . FalseEdge { . /// The target normal control flow will take. 7,490 ( 0.00%) real_target: BasicBlock, . /// A block control flow could conceptually jump to, but won't in . /// practice. 11,235 ( 0.00%) imaginary_target: BasicBlock, . }, . /// A terminator for blocks that only take one path in reality, but where we . /// reserve the right to unwind in borrowck, even if it won't happen in practice. . /// This can arise in infinite loops with no function calls for example. . FalseUnwind { . /// The target normal control flow will take. 104 ( 0.00%) real_target: BasicBlock, . /// The imaginary cleanup block link. This particular path will never be taken . /// in practice, but in order to avoid fragility we want to always . /// consider it in borrowck. We don't want to accept programs which . /// pass borrowck only when `panic=abort` or some assertions are disabled . /// due to release vs. debug mode builds. This needs to be an `Option` because . /// of the `remove_noop_landing_pads` and `abort_unwinding_calls` passes. 104 ( 0.00%) unwind: Option, . }, . . /// Block ends with an inline assembly block. This is a terminator since . /// inline assembly is allowed to diverge. . InlineAsm { . /// The template for the inline assembly, with placeholders. . template: &'tcx [InlineAsmTemplatePiece], . -- line 256 ---------------------------------------- -- line 268 ---------------------------------------- . /// diverging (InlineAsmOptions::NORETURN). . destination: Option, . . /// Cleanup to be done if the inline assembly unwinds. This is present . /// if and only if InlineAsmOptions::MAY_UNWIND is set. . cleanup: Option, . }, . } 642,504 ( 0.01%) #[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] . pub struct Terminator<'tcx> { . pub source_info: SourceInfo, . pub kind: TerminatorKind<'tcx>, . } . . impl<'tcx> Terminator<'tcx> { 266,470 ( 0.00%) pub fn successors(&self) -> Successors<'_> { 699,095 ( 0.01%) self.kind.successors() 399,705 ( 0.01%) } . 201,300 ( 0.00%) pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { 100,650 ( 0.00%) self.kind.successors_mut() 301,950 ( 0.01%) } . . pub fn unwind(&self) -> Option<&Option> { . self.kind.unwind() . } . . pub fn unwind_mut(&mut self) -> Option<&mut Option> { . self.kind.unwind_mut() 4,964 ( 0.00%) } . } . . impl<'tcx> TerminatorKind<'tcx> { 306 ( 0.00%) pub fn if_( . tcx: TyCtxt<'tcx>, . cond: Operand<'tcx>, . t: BasicBlock, . f: BasicBlock, . ) -> TerminatorKind<'tcx> { 3,060 ( 0.00%) TerminatorKind::SwitchInt { 612 ( 0.00%) discr: cond, 306 ( 0.00%) switch_ty: tcx.types.bool, . targets: SwitchTargets::static_if(0, f, t), . } 306 ( 0.00%) } . 490,648 ( 0.01%) pub fn successors(&self) -> Successors<'_> { . use self::TerminatorKind::*; 3,677,410 ( 0.06%) match *self { . Resume . | Abort . | GeneratorDrop . | Return . | Unreachable . | Call { destination: None, cleanup: None, .. } . | InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&[]), 251,205 ( 0.00%) Goto { target: ref t } . | Call { destination: None, cleanup: Some(ref t), .. } . | Call { destination: Some((_, ref t)), cleanup: None, .. } . | Yield { resume: ref t, drop: None, .. } . | DropAndReplace { target: ref t, unwind: None, .. } . | Drop { target: ref t, unwind: None, .. } . | Assert { target: ref t, cleanup: None, .. } . | FalseUnwind { real_target: ref t, unwind: None } . | InlineAsm { destination: Some(ref t), cleanup: None, .. } -- line 332 ---------------------------------------- -- line 338 ---------------------------------------- . | DropAndReplace { target: ref t, unwind: Some(ref u), .. } . | Drop { target: ref t, unwind: Some(ref u), .. } . | Assert { target: ref t, cleanup: Some(ref u), .. } . | FalseUnwind { real_target: ref t, unwind: Some(ref u) } . | InlineAsm { destination: Some(ref t), cleanup: Some(ref u), .. } => { . Some(t).into_iter().chain(slice::from_ref(u)) . } . SwitchInt { ref targets, .. } => None.into_iter().chain(&targets.targets), 125,768 ( 0.00%) FalseEdge { ref real_target, ref imaginary_target } => { . Some(real_target).into_iter().chain(slice::from_ref(imaginary_target)) . } . } 490,648 ( 0.01%) } . 100,650 ( 0.00%) pub fn successors_mut(&mut self) -> SuccessorsMut<'_> { . use self::TerminatorKind::*; 746,066 ( 0.01%) match *self { . Resume . | Abort . | GeneratorDrop . | Return . | Unreachable . | Call { destination: None, cleanup: None, .. } . | InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&mut []), 54,486 ( 0.00%) Goto { target: ref mut t } . | Call { destination: None, cleanup: Some(ref mut t), .. } . | Call { destination: Some((_, ref mut t)), cleanup: None, .. } . | Yield { resume: ref mut t, drop: None, .. } . | DropAndReplace { target: ref mut t, unwind: None, .. } . | Drop { target: ref mut t, unwind: None, .. } . | Assert { target: ref mut t, cleanup: None, .. } . | FalseUnwind { real_target: ref mut t, unwind: None } . | InlineAsm { destination: Some(ref mut t), cleanup: None, .. } -- line 370 ---------------------------------------- -- line 376 ---------------------------------------- . | DropAndReplace { target: ref mut t, unwind: Some(ref mut u), .. } . | Drop { target: ref mut t, unwind: Some(ref mut u), .. } . | Assert { target: ref mut t, cleanup: Some(ref mut u), .. } . | FalseUnwind { real_target: ref mut t, unwind: Some(ref mut u) } . | InlineAsm { destination: Some(ref mut t), cleanup: Some(ref mut u), .. } => { . Some(t).into_iter().chain(slice::from_mut(u)) . } . SwitchInt { ref mut targets, .. } => None.into_iter().chain(&mut targets.targets), 25,048 ( 0.00%) FalseEdge { ref mut real_target, ref mut imaginary_target } => { . Some(real_target).into_iter().chain(slice::from_mut(imaginary_target)) . } . } 100,650 ( 0.00%) } . . pub fn unwind(&self) -> Option<&Option> { . match *self { . TerminatorKind::Goto { .. } . | TerminatorKind::Resume . | TerminatorKind::Abort . | TerminatorKind::Return . | TerminatorKind::Unreachable -- line 396 ---------------------------------------- -- line 403 ---------------------------------------- . | TerminatorKind::DropAndReplace { ref unwind, .. } . | TerminatorKind::Drop { ref unwind, .. } . | TerminatorKind::FalseUnwind { ref unwind, .. } . | TerminatorKind::InlineAsm { cleanup: ref unwind, .. } => Some(unwind), . } . } . . pub fn unwind_mut(&mut self) -> Option<&mut Option> { 29,784 ( 0.00%) match *self { . TerminatorKind::Goto { .. } . | TerminatorKind::Resume . | TerminatorKind::Abort . | TerminatorKind::Return . | TerminatorKind::Unreachable . | TerminatorKind::GeneratorDrop . | TerminatorKind::Yield { .. } . | TerminatorKind::SwitchInt { .. } . | TerminatorKind::FalseEdge { .. } => None, 4,646 ( 0.00%) TerminatorKind::Call { cleanup: ref mut unwind, .. } . | TerminatorKind::Assert { cleanup: ref mut unwind, .. } . | TerminatorKind::DropAndReplace { ref mut unwind, .. } . | TerminatorKind::Drop { ref mut unwind, .. } . | TerminatorKind::FalseUnwind { ref mut unwind, .. } . | TerminatorKind::InlineAsm { cleanup: ref mut unwind, .. } => Some(unwind), . } . } . -- line 429 ---------------------------------------- 1,574,916 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/index.rs -------------------------------------------------------------------------------- Ir -- line 154 ---------------------------------------- . . #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] . unsafe impl SliceIndex<[T]> for usize { . type Output = T; . . #[inline] . fn get(self, slice: &[T]) -> Option<&T> { . // SAFETY: `self` is checked to be in bounds. 2,338,267 ( 0.04%) if self < slice.len() { unsafe { Some(&*self.get_unchecked(slice)) } } else { None } . } . . #[inline] . fn get_mut(self, slice: &mut [T]) -> Option<&mut T> { . // SAFETY: `self` is checked to be in bounds. 12,006,163 ( 0.20%) if self < slice.len() { unsafe { Some(&mut *self.get_unchecked_mut(slice)) } } else { None } . } . . #[inline] . unsafe fn get_unchecked(self, slice: *const [T]) -> *const T { . // SAFETY: the caller guarantees that `slice` is not dangling, so it . // cannot be longer than `isize::MAX`. They also guarantee that . // `self` is in bounds of `slice` so `self` cannot overflow an `isize`, . // so the call to `add` is safe. -- line 176 ---------------------------------------- -- line 181 ---------------------------------------- . unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut T { . // SAFETY: see comments for `get_unchecked` above. . unsafe { slice.as_mut_ptr().add(self) } . } . . #[inline] . fn index(self, slice: &[T]) -> &T { . // N.B., use intrinsic indexing 40,371,913 ( 0.67%) &(*slice)[self] . } . . #[inline] . fn index_mut(self, slice: &mut [T]) -> &mut T { . // N.B., use intrinsic indexing 11,307,672 ( 0.19%) &mut (*slice)[self] . } . } . . #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] . unsafe impl SliceIndex<[T]> for ops::Range { . type Output = [T]; . . #[inline] . fn get(self, slice: &[T]) -> Option<&[T]> { 28,360 ( 0.00%) if self.start > self.end || self.end > slice.len() { . None . } else { . // SAFETY: `self` is checked to be valid and in bounds above. . unsafe { Some(&*self.get_unchecked(slice)) } . } . } . . #[inline] -- line 213 ---------------------------------------- -- line 221 ---------------------------------------- . } . . #[inline] . unsafe fn get_unchecked(self, slice: *const [T]) -> *const [T] { . // SAFETY: the caller guarantees that `slice` is not dangling, so it . // cannot be longer than `isize::MAX`. They also guarantee that . // `self` is in bounds of `slice` so `self` cannot overflow an `isize`, . // so the call to `add` is safe. 1,203,181 ( 0.02%) unsafe { ptr::slice_from_raw_parts(slice.as_ptr().add(self.start), self.end - self.start) } . } . . #[inline] . unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { . // SAFETY: see comments for `get_unchecked` above. . unsafe { 84,520 ( 0.00%) ptr::slice_from_raw_parts_mut(slice.as_mut_ptr().add(self.start), self.end - self.start) . } . } . . #[inline] . fn index(self, slice: &[T]) -> &[T] { 144,789 ( 0.00%) if self.start > self.end { . slice_index_order_fail(self.start, self.end); 787,804 ( 0.01%) } else if self.end > slice.len() { . slice_end_index_len_fail(self.end, slice.len()); . } . // SAFETY: `self` is checked to be valid and in bounds above. . unsafe { &*self.get_unchecked(slice) } . } . . #[inline] . fn index_mut(self, slice: &mut [T]) -> &mut [T] { 25,133 ( 0.00%) if self.start > self.end { . slice_index_order_fail(self.start, self.end); 69,075 ( 0.00%) } else if self.end > slice.len() { . slice_end_index_len_fail(self.end, slice.len()); . } . // SAFETY: `self` is checked to be valid and in bounds above. . unsafe { &mut *self.get_unchecked_mut(slice) } . } . } . . #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] -- line 263 ---------------------------------------- -- line 320 ---------------------------------------- . #[inline] . unsafe fn get_unchecked_mut(self, slice: *mut [T]) -> *mut [T] { . // SAFETY: the caller has to uphold the safety contract for `get_unchecked_mut`. . unsafe { (self.start..slice.len()).get_unchecked_mut(slice) } . } . . #[inline] . fn index(self, slice: &[T]) -> &[T] { 1,436,134 ( 0.02%) if self.start > slice.len() { . slice_start_index_len_fail(self.start, slice.len()); . } . // SAFETY: `self` is checked to be valid and in bounds above. . unsafe { &*self.get_unchecked(slice) } . } . . #[inline] . fn index_mut(self, slice: &mut [T]) -> &mut [T] { 25,295 ( 0.00%) if self.start > slice.len() { . slice_start_index_len_fail(self.start, slice.len()); . } . // SAFETY: `self` is checked to be valid and in bounds above. . unsafe { &mut *self.get_unchecked_mut(slice) } . } . } . . #[stable(feature = "slice_get_slice_impls", since = "1.15.0")] -- line 345 ---------------------------------------- -- line 539 ---------------------------------------- . let end = match end { . ops::Bound::Included(end) => { . end.checked_add(1).unwrap_or_else(|| slice_end_index_overflow_fail()) . } . ops::Bound::Excluded(&end) => end, . ops::Bound::Unbounded => len, . }; . 71,990 ( 0.00%) if start > end { . slice_index_order_fail(start, end); . } 15 ( 0.00%) if end > len { . slice_end_index_len_fail(end, len); . } . . ops::Range { start, end } . } . . /// Convert pair of `ops::Bound`s into `ops::Range` without performing any bounds checking and (in debug) overflow checking . fn into_range_unchecked( -- line 558 ---------------------------------------- 9,625,960 ( 0.16%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_borrowck/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 97 ---------------------------------------- . . /// If true, the capture is behind a reference. . by_ref: bool, . } . . const DEREF_PROJECTION: &[PlaceElem<'_>; 1] = &[ProjectionElem::Deref]; . . pub fn provide(providers: &mut Providers) { 4 ( 0.00%) *providers = Providers { . mir_borrowck: |tcx, did| { 835 ( 0.00%) if let Some(def) = ty::WithOptConstParam::try_lookup(did, tcx) { . tcx.mir_borrowck_const_arg(def) . } else { 3,300 ( 0.00%) mir_borrowck(tcx, ty::WithOptConstParam::unknown(did)) . } . }, . mir_borrowck_const_arg: |tcx, (did, param_did)| { . mir_borrowck(tcx, ty::WithOptConstParam { did, const_param_did: Some(param_did) }) . }, . ..*providers . }; 1 ( 0.00%) } . 8,250 ( 0.00%) fn mir_borrowck<'tcx>( . tcx: TyCtxt<'tcx>, . def: ty::WithOptConstParam, . ) -> &'tcx BorrowCheckResult<'tcx> { . let (input_body, promoted) = tcx.mir_promoted(def); . debug!("run query mir_borrowck: {}", tcx.def_path_str(def.did.to_def_id())); . 10,725 ( 0.00%) let opt_closure_req = tcx.infer_ctxt().with_opaque_type_inference(def.did).enter(|infcx| { . let input_body: &Body<'_> = &input_body.borrow(); . let promoted: &IndexVec<_, _> = &promoted.borrow(); 11,550 ( 0.00%) do_mir_borrowck(&infcx, input_body, promoted, false).0 3,300 ( 0.00%) }); . debug!("mir_borrowck done"); . 825 ( 0.00%) tcx.arena.alloc(opt_closure_req) 6,600 ( 0.00%) } . . /// Perform the actual borrow checking. . /// . /// If `return_body_with_facts` is true, then return the body with non-erased . /// region ids on which the borrow checking was performed together with Polonius . /// facts. 18,150 ( 0.00%) #[instrument(skip(infcx, input_body, input_promoted), level = "debug")] . fn do_mir_borrowck<'a, 'tcx>( . infcx: &InferCtxt<'a, 'tcx>, . input_body: &Body<'tcx>, . input_promoted: &IndexVec>, . return_body_with_facts: bool, . ) -> (BorrowCheckResult<'tcx>, Option>>) { 8,250 ( 0.00%) let def = input_body.source.with_opt_param().as_local().unwrap(); . . debug!(?def); . 1,650 ( 0.00%) let tcx = infcx.tcx; . let param_env = tcx.param_env(def.did); 825 ( 0.00%) let id = tcx.hir().local_def_id_to_hir_id(def.did); . 1,650 ( 0.00%) let mut local_names = IndexVec::from_elem(None, &input_body.local_decls); . for var_debug_info in &input_body.var_debug_info { 18,219 ( 0.00%) if let VarDebugInfoContents::Place(place) = var_debug_info.value { 10,560 ( 0.00%) if let Some(local) = place.as_local() { 15,798 ( 0.00%) if let Some(prev_name) = local_names[local] { . if var_debug_info.name != prev_name { . span_bug!( . var_debug_info.source_info.span, . "local {:?} has many names (`{}` vs `{}`)", . local, . prev_name, . var_debug_info.name . ); . } . } 15,798 ( 0.00%) local_names[local] = Some(var_debug_info.name); . } . } . } . . // Gather the upvars of a closure, if any. 3,300 ( 0.00%) let tables = tcx.typeck_opt_const_arg(def); 1,650 ( 0.00%) if let Some(ErrorReported) = tables.tainted_by_errors { . infcx.set_tainted_by_errors(); . } 3,300 ( 0.00%) let upvars: Vec<_> = tables . .closure_min_captures_flattened(def.did.to_def_id()) . .map(|captured_place| { . let capture = captured_place.info.capture_kind; 28 ( 0.00%) let by_ref = match capture { . ty::UpvarCapture::ByValue => false, . ty::UpvarCapture::ByRef(..) => true, . }; . Upvar { place: captured_place.clone(), by_ref } . }) . .collect(); . . // Replace all regions with fresh inference variables. This . // requires first making our own copy of the MIR. This copy will . // be modified (in place) to contain non-lexical lifetimes. It . // will have a lifetime tied to the inference context. 1,650 ( 0.00%) let mut body_owned = input_body.clone(); . let mut promoted = input_promoted.clone(); . let free_regions = 2,475 ( 0.00%) nll::replace_regions_in_mir(infcx, param_env, &mut body_owned, &mut promoted); . let body = &body_owned; // no further changes . 825 ( 0.00%) let location_table_owned = LocationTable::new(body); . let location_table = &location_table_owned; . . let mut errors_buffer = Vec::new(); 2,475 ( 0.00%) let (move_data, move_errors): (MoveData<'tcx>, Vec<(Place<'tcx>, MoveError<'tcx>)>) = 5,775 ( 0.00%) match MoveData::gather_moves(&body, tcx, param_env) { 4,950 ( 0.00%) Ok(move_data) => (move_data, Vec::new()), . Err((move_data, move_errors)) => (move_data, move_errors), . }; . let promoted_errors = promoted . .iter_enumerated() 950 ( 0.00%) .map(|(idx, body)| (idx, MoveData::gather_moves(&body, tcx, param_env))); . 4,950 ( 0.00%) let mdpe = MoveDataParamEnv { move_data, param_env }; . 6,600 ( 0.00%) let mut flow_inits = MaybeInitializedPlaces::new(tcx, &body, &mdpe) 825 ( 0.00%) .into_engine(tcx, &body) . .pass_name("borrowck") . .iterate_to_fixpoint() . .into_results_cursor(&body); . 9,900 ( 0.00%) let locals_are_invalidated_at_exit = tcx.hir().body_owner_kind(id).is_fn_or_closure(); . let borrow_set = 8,250 ( 0.00%) Rc::new(BorrowSet::build(tcx, body, locals_are_invalidated_at_exit, &mdpe.move_data)); . 4,125 ( 0.00%) let use_polonius = return_body_with_facts || infcx.tcx.sess.opts.debugging_opts.polonius; . . // Compute non-lexical lifetimes. . let nll::NllOutput { 2,475 ( 0.00%) regioncx, 3,300 ( 0.00%) opaque_type_values, 1,650 ( 0.00%) polonius_input, 2,475 ( 0.00%) polonius_output, 3,300 ( 0.00%) opt_closure_req, 8,250 ( 0.00%) nll_errors, 14,850 ( 0.00%) } = nll::compute_regions( . infcx, 13,200 ( 0.00%) free_regions, . body, . &promoted, . location_table, . param_env, . &mut flow_inits, . &mdpe.move_data, . &borrow_set, . &upvars, . use_polonius, . ); . . // Dump MIR results into a file, if that is enabled. This let us . // write unit-tests, as well as helping with debugging. 2,475 ( 0.00%) nll::dump_mir_results(infcx, &body, ®ioncx, &opt_closure_req); . . // We also have a `#[rustc_regions]` annotation that causes us to dump . // information. 1,650 ( 0.00%) nll::dump_annotation( . infcx, . &body, . ®ioncx, . &opt_closure_req, . &opaque_type_values, . &mut errors_buffer, . ); . . // The various `flow_*` structures can be large. We drop `flow_inits` here . // so it doesn't overlap with the others below. This reduces peak memory . // usage significantly on some benchmarks. 13,200 ( 0.00%) drop(flow_inits); . 825 ( 0.00%) let regioncx = Rc::new(regioncx); . 4,950 ( 0.00%) let flow_borrows = Borrows::new(tcx, body, ®ioncx, &borrow_set) 825 ( 0.00%) .into_engine(tcx, body) . .pass_name("borrowck") . .iterate_to_fixpoint(); 4,950 ( 0.00%) let flow_uninits = MaybeUninitializedPlaces::new(tcx, body, &mdpe) 825 ( 0.00%) .into_engine(tcx, body) . .pass_name("borrowck") . .iterate_to_fixpoint(); 4,950 ( 0.00%) let flow_ever_inits = EverInitializedPlaces::new(tcx, body, &mdpe) 825 ( 0.00%) .into_engine(tcx, body) . .pass_name("borrowck") . .iterate_to_fixpoint(); . 1,830 ( 0.00%) let movable_generator = !matches!( 5,130 ( 0.00%) tcx.hir().get(id), . Node::Expr(&hir::Expr { . kind: hir::ExprKind::Closure(.., Some(hir::Movability::Static)), . .. . }) . ); . 665 ( 0.00%) for (idx, move_data_results) in promoted_errors { . let promoted_body = &promoted[idx]; . 190 ( 0.00%) if let Err((move_data, move_errors)) = move_data_results { . let mut promoted_mbcx = MirBorrowckCtxt { . infcx, . param_env, . body: promoted_body, . move_data: &move_data, . location_table, // no need to create a real one for the promoted, it is not used . movable_generator, . fn_self_span_reported: Default::default(), -- line 307 ---------------------------------------- -- line 325 ---------------------------------------- . }; . promoted_mbcx.report_move_errors(move_errors); . errors_buffer = promoted_mbcx.errors_buffer; . }; . } . . let dominators = body.dominators(); . 37,125 ( 0.00%) let mut mbcx = MirBorrowckCtxt { . infcx, . param_env, . body, . move_data: &mdpe.move_data, . location_table, . movable_generator, . locals_are_invalidated_at_exit, . fn_self_span_reported: Default::default(), . access_place_error_reported: Default::default(), . reservation_error_reported: Default::default(), . reservation_warnings: Default::default(), . move_error_reported: BTreeMap::new(), . uninitialized_error_reported: Default::default(), 3,300 ( 0.00%) errors_buffer, 825 ( 0.00%) regioncx: Rc::clone(®ioncx), . used_mut: Default::default(), . used_mut_upvars: SmallVec::new(), 825 ( 0.00%) borrow_set: Rc::clone(&borrow_set), 4,950 ( 0.00%) dominators, 3,300 ( 0.00%) upvars, 3,300 ( 0.00%) local_names, . region_names: RefCell::default(), . next_region_name: RefCell::new(1), . polonius_output, . }; . . // Compute and report region errors, if any. 2,475 ( 0.00%) mbcx.report_region_errors(nll_errors); . . let results = BorrowckResults { 4,950 ( 0.00%) ever_inits: flow_ever_inits, 6,600 ( 0.00%) uninits: flow_uninits, 9,075 ( 0.00%) borrows: flow_borrows, . }; . 4,950 ( 0.00%) mbcx.report_move_errors(move_errors); . 825 ( 0.00%) rustc_mir_dataflow::visit_results( . body, 825 ( 0.00%) traversal::reverse_postorder(body).map(|(bb, _)| bb), . &results, . &mut mbcx, . ); . . // Convert any reservation warnings into lints. . let reservation_warnings = mem::take(&mut mbcx.reservation_warnings); . for (_, (place, span, location, bk, borrow)) in reservation_warnings { . let mut initial_diag = mbcx.report_conflicting_borrow(location, (place, span), bk, &borrow); -- line 381 ---------------------------------------- -- line 406 ---------------------------------------- . // For each non-user used mutable variable, check if it's been assigned from . // a user-declared local. If so, then put that local into the used_mut set. . // Note that this set is expected to be small - only upvars from closures . // would have a chance of erroneously adding non-user-defined mutable vars . // to the set. . let temporary_used_locals: FxHashSet = mbcx . .used_mut . .iter() 825 ( 0.00%) .filter(|&local| !mbcx.body.local_decls[*local].is_user_variable()) . .cloned() . .collect(); . // For the remaining unused locals that are marked as mutable, we avoid linting any that . // were never initialized. These locals may have been removed as unreachable code; or will be . // linted as unused variables. . let unused_mut_locals = 825 ( 0.00%) mbcx.body.mut_vars_iter().filter(|local| !mbcx.used_mut.contains(local)).collect(); 3,300 ( 0.00%) mbcx.gather_used_muts(temporary_used_locals, unused_mut_locals); . . debug!("mbcx.used_mut: {:?}", mbcx.used_mut); 3,300 ( 0.00%) let used_mut = mbcx.used_mut; 825 ( 0.00%) for local in mbcx.body.mut_vars_and_args_iter().filter(|local| !used_mut.contains(local)) { 51 ( 0.00%) let local_decl = &mbcx.body.local_decls[local]; 306 ( 0.00%) let lint_root = match &mbcx.body.source_scopes[local_decl.source_info.scope].local_data { . ClearCrossCrate::Set(data) => data.lint_root, . _ => continue, . }; . . // Skip over locals that begin with an underscore or have no name 153 ( 0.00%) match mbcx.local_names[local] { . Some(name) => { . if name.as_str().starts_with('_') { . continue; . } . } . None => continue, . } . -- line 442 ---------------------------------------- -- line 455 ---------------------------------------- . String::new(), . Applicability::MachineApplicable, . ) . .emit(); . }) . } . . // Buffer any move errors that we collected and de-duplicated. 7,425 ( 0.00%) for (_, (_, diag)) in mbcx.move_error_reported { . diag.buffer(&mut mbcx.errors_buffer); . } . 825 ( 0.00%) if !mbcx.errors_buffer.is_empty() { . mbcx.errors_buffer.sort_by_key(|diag| diag.sort_span); . . for diag in mbcx.errors_buffer.drain(..) { . mbcx.infcx.tcx.sess.diagnostic().emit_diagnostic(&diag); . } . } . . let result = BorrowCheckResult { 3,300 ( 0.00%) concrete_opaque_types: opaque_type_values, 4,125 ( 0.00%) closure_requirements: opt_closure_req, 4,950 ( 0.00%) used_mut_upvars: mbcx.used_mut_upvars, . }; . 1,650 ( 0.00%) let body_with_facts = if return_body_with_facts { . let output_facts = mbcx.polonius_output.expect("Polonius output was not computed"); . Some(Box::new(BodyWithBorrowckFacts { . body: body_owned, . input_facts: *polonius_input.expect("Polonius input facts were not generated"), . output_facts, . location_table: location_table_owned, . })) . } else { . None . }; . . debug!("do_mir_borrowck: result = {:#?}", result); . 11,550 ( 0.00%) (result, body_with_facts) 9,900 ( 0.00%) } . . /// A `Body` with information computed by the borrow checker. This struct is . /// intended to be consumed by compiler consumers. . /// . /// We need to include the MIR body here because the region identifiers must . /// match the ones in the Polonius facts. . pub struct BodyWithBorrowckFacts<'tcx> { . /// A mir body that contains region identifiers. -- line 504 ---------------------------------------- -- line 608 ---------------------------------------- . // Check that: . // 1. assignments are always made to mutable locations (FIXME: does that still really go here?) . // 2. loans made in overlapping scopes do not conflict . // 3. assignments do not affect things loaned out as immutable . // 4. moves do not affect things loaned out in any way . impl<'cx, 'tcx> rustc_mir_dataflow::ResultsVisitor<'cx, 'tcx> for MirBorrowckCtxt<'cx, 'tcx> { . type FlowState = Flows<'cx, 'tcx>; . 644,814 ( 0.01%) fn visit_statement_before_primary_effect( . &mut self, . flow_state: &Flows<'cx, 'tcx>, . stmt: &'cx Statement<'tcx>, . location: Location, . ) { . debug!("MirBorrowckCtxt::process_statement({:?}, {:?}): {:?}", location, stmt, flow_state); . let span = stmt.source_info.span; . . self.check_activations(location, span, flow_state); . 358,230 ( 0.01%) match &stmt.kind { 43,330 ( 0.00%) StatementKind::Assign(box (lhs, ref rhs)) => { . self.consume_rvalue(location, (rhs, span), flow_state); . 86,660 ( 0.00%) self.mutate_place(location, (*lhs, span), Shallow(None), flow_state); . } 2,299 ( 0.00%) StatementKind::FakeRead(box (_, ref place)) => { . // Read for match doesn't access any memory and is used to . // assert that a place is safe and live. So we don't have to . // do any checks here. . // . // FIXME: Remove check that the place is initialized. This is . // needed for now because matches don't have never patterns yet. . // So this is the only place we prevent . // let x: !; . // match x {}; . // from compiling. 9,196 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . InitializationRequiringAction::Use, 20,691 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } . StatementKind::SetDiscriminant { place, variant_index: _ } => { . self.mutate_place(location, (**place, span), Shallow(None), flow_state); . } . StatementKind::CopyNonOverlapping(box rustc_middle::mir::CopyNonOverlapping { . .. -- line 655 ---------------------------------------- -- line 663 ---------------------------------------- . | StatementKind::Coverage(..) . | StatementKind::AscribeUserType(..) . | StatementKind::Retag { .. } . | StatementKind::StorageLive(..) => { . // `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant . // to borrow check. . } . StatementKind::StorageDead(local) => { 153,312 ( 0.00%) self.access_place( . location, 281,072 ( 0.00%) (Place::from(*local), span), . (Shallow(None), Write(WriteKind::StorageDeadOrDrop)), . LocalMutationIsAllowed::Yes, . flow_state, . ); . } . } 573,168 ( 0.01%) } . 222,944 ( 0.00%) fn visit_terminator_before_primary_effect( . &mut self, . flow_state: &Flows<'cx, 'tcx>, . term: &'cx Terminator<'tcx>, . loc: Location, . ) { . debug!("MirBorrowckCtxt::process_terminator({:?}, {:?}): {:?}", loc, term, flow_state); . let span = term.source_info.span; . . self.check_activations(loc, span, flow_state); . 139,340 ( 0.00%) match term.kind { 6,560 ( 0.00%) TerminatorKind::SwitchInt { ref discr, switch_ty: _, targets: _ } => { 6,560 ( 0.00%) self.consume_operand(loc, (discr, span), flow_state); . } 16,882 ( 0.00%) TerminatorKind::Drop { place, target: _, unwind: _ } => { . debug!( . "visit_terminator_drop \ . loc: {:?} term: {:?} place: {:?} span: {:?}", . loc, term, place, span . ); . 50,646 ( 0.00%) self.access_place( . loc, 75,969 ( 0.00%) (place, span), . (AccessDepth::Drop, Write(WriteKind::StorageDeadOrDrop)), . LocalMutationIsAllowed::Yes, . flow_state, . ); . } . TerminatorKind::DropAndReplace { 4 ( 0.00%) place: drop_place, 2 ( 0.00%) value: ref new_value, . target: _, . unwind: _, . } => { 32 ( 0.00%) self.mutate_place(loc, (drop_place, span), Deep, flow_state); . self.consume_operand(loc, (new_value, span), flow_state); . } . TerminatorKind::Call { 19,680 ( 0.00%) ref func, . ref args, . ref destination, . cleanup: _, . from_hir_call: _, . fn_span: _, . } => { 52,480 ( 0.00%) self.consume_operand(loc, (func, span), flow_state); . for arg in args { 57,474 ( 0.00%) self.consume_operand(loc, (arg, span), flow_state); . } 26,222 ( 0.00%) if let Some((dest, _ /*bb*/)) = *destination { 78,504 ( 0.00%) self.mutate_place(loc, (dest, span), Deep, flow_state); . } . } 912 ( 0.00%) TerminatorKind::Assert { ref cond, expected: _, ref msg, target: _, cleanup: _ } => { 152 ( 0.00%) self.consume_operand(loc, (cond, span), flow_state); . use rustc_middle::mir::AssertKind; 310 ( 0.00%) if let AssertKind::BoundsCheck { ref len, ref index } = *msg { 20 ( 0.00%) self.consume_operand(loc, (len, span), flow_state); . self.consume_operand(loc, (index, span), flow_state); . } . } . . TerminatorKind::Yield { ref value, resume: _, resume_arg, drop: _ } => { . self.consume_operand(loc, (value, span), flow_state); . self.mutate_place(loc, (resume_arg, span), Deep, flow_state); . } -- line 749 ---------------------------------------- -- line 790 ---------------------------------------- . | TerminatorKind::Resume . | TerminatorKind::Return . | TerminatorKind::GeneratorDrop . | TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ } . | TerminatorKind::FalseUnwind { real_target: _, unwind: _ } => { . // no data used, thus irrelevant to borrowck . } . } 209,792 ( 0.00%) } . 195,076 ( 0.00%) fn visit_terminator_after_primary_effect( . &mut self, . flow_state: &Flows<'cx, 'tcx>, . term: &'cx Terminator<'tcx>, . loc: Location, . ) { 27,868 ( 0.00%) let span = term.source_info.span; . 111,472 ( 0.00%) match term.kind { . TerminatorKind::Yield { value: _, resume: _, resume_arg: _, drop: _ } => { . if self.movable_generator { . // Look for any active borrows to locals . let borrow_set = self.borrow_set.clone(); . for i in flow_state.borrows.iter() { . let borrow = &borrow_set[i]; . self.check_for_local_borrow(borrow, span); . } -- line 816 ---------------------------------------- -- line 817 ---------------------------------------- . } . } . . TerminatorKind::Resume | TerminatorKind::Return | TerminatorKind::GeneratorDrop => { . // Returning from the function implicitly kills storage for all locals and statics. . // Often, the storage will already have been killed by an explicit . // StorageDead, but we don't always emit those (notably on unwind paths), . // so this "extra check" serves as a kind of backup. 2,984 ( 0.00%) let borrow_set = self.borrow_set.clone(); . for i in flow_state.borrows.iter() { . let borrow = &borrow_set[i]; . self.check_for_invalidation_at_exit(loc, borrow, span); . } . } . . TerminatorKind::Abort . | TerminatorKind::Assert { .. } -- line 833 ---------------------------------------- -- line 836 ---------------------------------------- . | TerminatorKind::DropAndReplace { .. } . | TerminatorKind::FalseEdge { real_target: _, imaginary_target: _ } . | TerminatorKind::FalseUnwind { real_target: _, unwind: _ } . | TerminatorKind::Goto { .. } . | TerminatorKind::SwitchInt { .. } . | TerminatorKind::Unreachable . | TerminatorKind::InlineAsm { .. } => {} . } 222,944 ( 0.00%) } . } . . use self::AccessDepth::{Deep, Shallow}; . use self::ReadOrWrite::{Activation, Read, Reservation, Write}; . . #[derive(Copy, Clone, PartialEq, Eq, Debug)] . enum ArtificialField { . ArrayLength, -- line 852 ---------------------------------------- -- line 913 ---------------------------------------- . /// When checking permissions for a place access, this flag is used to indicate that an immutable . /// local place can be mutated. . // . // FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications: . // - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`. . // - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and . // `is_declared_mutable()`. . // - Take flow state into consideration in `is_assignable()` for local variables. 23,198 ( 0.00%) #[derive(Copy, Clone, PartialEq, Eq, Debug)] . enum LocalMutationIsAllowed { . Yes, . /// We want use of immutable upvars to cause a "write to immutable upvar" . /// error, not an "reassignment" error. . ExceptUpvars, . No, . } . -- line 929 ---------------------------------------- -- line 970 ---------------------------------------- . } . . /// Checks an access to the given place to see if it is allowed. Examines the set of borrows . /// that are in scope, as well as which paths have been initialized, to ensure that (a) the . /// place is initialized and (b) it is not borrowed in some way that would prevent this . /// access. . /// . /// Returns `true` if an error is reported. 1,390,320 ( 0.02%) fn access_place( . &mut self, . location: Location, . place_span: (Place<'tcx>, Span), . kind: (AccessDepth, ReadOrWrite), . is_local_mutation_allowed: LocalMutationIsAllowed, . flow_state: &Flows<'cx, 'tcx>, . ) { . let (sd, rw) = kind; . 347,580 ( 0.01%) if let Activation(_, borrow_index) = rw { 4,818 ( 0.00%) if self.reservation_error_reported.contains(&place_span.0) { . debug!( . "skipping access_place for activation of invalid reservation \ . place: {:?} borrow_index: {:?}", . place_span.0, borrow_index . ); . return; . } . } . . // Check is_empty() first because it's the common case, and doing that . // way we avoid the clone() call. 86,895 ( 0.00%) if !self.access_place_error_reported.is_empty() . && self.access_place_error_reported.contains(&(place_span.0, place_span.1)) . { . debug!( . "access_place: suppressing error place_span=`{:?}` kind=`{:?}`", . place_span, kind . ); . return; . } . 521,370 ( 0.01%) let mutability_error = self.check_access_permissions( 347,580 ( 0.01%) place_span, . rw, . is_local_mutation_allowed, . flow_state, . location, . ); . let conflict_error = 955,845 ( 0.02%) self.check_access_for_conflict(location, place_span, sd, rw, flow_state); . 260,685 ( 0.00%) if let (Activation(_, borrow_idx), true) = (kind.1, conflict_error) { . // Suppress this warning when there's an error being emitted for the . // same borrow: fixing the error is likely to fix the warning. . self.reservation_warnings.remove(&borrow_idx); . } . 260,685 ( 0.00%) if conflict_error || mutability_error { . debug!("access_place: logging error place_span=`{:?}` kind=`{:?}`", place_span, kind); . . self.access_place_error_reported.insert((place_span.0, place_span.1)); . } 695,160 ( 0.01%) } . . fn check_access_for_conflict( . &mut self, . location: Location, . place_span: (Place<'tcx>, Span), . sd: AccessDepth, . rw: ReadOrWrite, . flow_state: &Flows<'cx, 'tcx>, . ) -> bool { . debug!( . "check_access_for_conflict(location={:?}, place_span={:?}, sd={:?}, rw={:?})", . location, place_span, sd, rw, . ); . 86,895 ( 0.00%) let mut error_reported = false; 260,685 ( 0.00%) let tcx = self.infcx.tcx; . let body = self.body; 173,790 ( 0.00%) let borrow_set = self.borrow_set.clone(); . . // Use polonius output if it has been enabled. 173,790 ( 0.00%) let polonius_output = self.polonius_output.clone(); . let borrows_in_scope = if let Some(polonius) = &polonius_output { . let location = self.location_table.start_index(location); . Either::Left(polonius.errors_at(location).iter().copied()) . } else { . Either::Right(flow_state.borrows.iter()) . }; . 1,129,635 ( 0.02%) each_borrow_involving_path( . self, . tcx, . body, . location, 521,370 ( 0.01%) (sd, place_span.0), . &borrow_set, 521,370 ( 0.01%) borrows_in_scope, 987,523 ( 0.02%) |this, borrow_index, borrow| match (rw, borrow.kind) { . // Obviously an activation is compatible with its own . // reservation (or even prior activating uses of same . // borrow); so don't check if they interfere. . // . // NOTE: *reservations* do conflict with themselves; . // thus aren't injecting unsoundenss w/ this check.) 2,409 ( 0.00%) (Activation(_, activating), _) if activating == borrow_index => { . debug!( . "check_access_for_conflict place_span: {:?} sd: {:?} rw: {:?} \ . skipping {:?} b/c activation of same borrow_index", . place_span, . sd, . rw, . (borrow_index, borrow), . ); -- line 1084 ---------------------------------------- -- line 1179 ---------------------------------------- . } . } . Control::Break . } . }, . ); . . error_reported 260,685 ( 0.00%) } . 282,090 ( 0.00%) fn mutate_place( . &mut self, . location: Location, . place_span: (Place<'tcx>, Span), . kind: AccessDepth, . flow_state: &Flows<'cx, 'tcx>, . ) { . // Write of P[i] or *P requires P init'd. 253,881 ( 0.00%) self.check_if_assigned_path_is_moved(location, place_span, flow_state); . . // Special case: you can assign an immutable local variable . // (e.g., `x = ...`) so long as it has never been initialized . // before (at this point in the flow). 28,209 ( 0.00%) if let Some(local) = place_span.0.as_local() { 112,532 ( 0.00%) if let Mutability::Not = self.body.local_decls[local].mutability { . // check for reassignments to immutable local variables . self.check_if_reassignment_to_immutable_state( . location, local, place_span, flow_state, . ); . return; . } . } . . // Otherwise, use the normal access permission rules. 245,712 ( 0.00%) self.access_place( . location, 81,904 ( 0.00%) place_span, . (kind, Write(WriteKind::Mutate)), . LocalMutationIsAllowed::No, . flow_state, . ); 225,672 ( 0.00%) } . . fn consume_rvalue( . &mut self, . location: Location, . (rvalue, span): (&'cx Rvalue<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { 129,990 ( 0.00%) match *rvalue { 24,375 ( 0.00%) Rvalue::Ref(_ /*rgn*/, bk, place) => { 92,034 ( 0.00%) let access_kind = match bk { . BorrowKind::Shallow => { . (Shallow(Some(ArtificialField::ShallowBorrow)), Read(ReadKind::Borrow(bk))) . } . BorrowKind::Shared => (Deep, Read(ReadKind::Borrow(bk))), . BorrowKind::Unique | BorrowKind::Mut { .. } => { . let wk = WriteKind::MutableBorrow(bk); 5,466 ( 0.00%) if allow_two_phase_borrow(bk) { . (Deep, Reservation(wk)) . } else { . (Deep, Write(wk)) . } . } . }; . 170,625 ( 0.00%) self.access_place( . location, 32,500 ( 0.00%) (place, span), . access_kind, . LocalMutationIsAllowed::No, . flow_state, . ); . . let action = if bk == BorrowKind::Shallow { . InitializationRequiringAction::MatchOn . } else { . InitializationRequiringAction::Borrow . }; . 56,875 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . action, 56,875 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } . 6 ( 0.00%) Rvalue::AddressOf(mutability, place) => { 2 ( 0.00%) let access_kind = match mutability { . Mutability::Mut => ( . Deep, . Write(WriteKind::MutableBorrow(BorrowKind::Mut { . allow_two_phase_borrow: false, . })), . ), . Mutability::Not => (Deep, Read(ReadKind::Borrow(BorrowKind::Shared))), . }; . 17 ( 0.00%) self.access_place( . location, 4 ( 0.00%) (place, span), . access_kind, . LocalMutationIsAllowed::No, . flow_state, . ); . 7 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . InitializationRequiringAction::Borrow, 7 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } . . Rvalue::ThreadLocalRef(_) => {} . . Rvalue::Use(ref operand) . | Rvalue::Repeat(ref operand, _) -- line 1297 ---------------------------------------- -- line 1302 ---------------------------------------- . } . . Rvalue::Len(place) | Rvalue::Discriminant(place) => { . let af = match *rvalue { . Rvalue::Len(..) => Some(ArtificialField::ArrayLength), . Rvalue::Discriminant(..) => None, . _ => unreachable!(), . }; 21,056 ( 0.00%) self.access_place( . location, 5,264 ( 0.00%) (place, span), . (Shallow(af), Read(ReadKind::Copy)), . LocalMutationIsAllowed::No, . flow_state, . ); 9,212 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . InitializationRequiringAction::Use, 9,212 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } . . Rvalue::BinaryOp(_bin_op, box (ref operand1, ref operand2)) . | Rvalue::CheckedBinaryOp(_bin_op, box (ref operand1, ref operand2)) => { 3,660 ( 0.00%) self.consume_operand(location, (operand1, span), flow_state); 2,562 ( 0.00%) self.consume_operand(location, (operand2, span), flow_state); . } . . Rvalue::NullaryOp(_op, _ty) => { . // nullary ops take no dynamic input; no borrowck effect. . } . . Rvalue::Aggregate(ref aggregate_kind, ref operands) => { . // We need to report back the list of mutable upvars that were . // moved into the closure and subsequently used by the closure, . // in order to populate our used_mut set. 7,914 ( 0.00%) match **aggregate_kind { . AggregateKind::Closure(def_id, _) | AggregateKind::Generator(def_id, _, _) => { . let BorrowCheckResult { used_mut_upvars, .. } = 180 ( 0.00%) self.infcx.tcx.mir_borrowck(def_id.expect_local()); . debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars); . for field in used_mut_upvars { 8 ( 0.00%) self.propagate_closure_used_mut_upvar(&operands[field.index()]); . } . } . AggregateKind::Adt(..) . | AggregateKind::Array(..) . | AggregateKind::Tuple { .. } => (), . } . . for operand in operands { . self.consume_operand(location, (operand, span), flow_state); . } . } . } . } . . fn propagate_closure_used_mut_upvar(&mut self, operand: &Operand<'tcx>) { 80 ( 0.00%) let propagate_closure_used_mut_place = |this: &mut Self, place: Place<'tcx>| { . // We have three possibilities here: . // a. We are modifying something through a mut-ref . // b. We are modifying something that is local to our parent . // c. Current body is a nested closure, and we are modifying path starting from . // a Place captured by our parent closure. . . // Handle (c), the path being modified is exactly the path captured by our parent 16 ( 0.00%) if let Some(field) = this.is_upvar_field_projection(place.as_ref()) { . this.used_mut_upvars.push(field); . return; . } . 8 ( 0.00%) for (place_ref, proj) in place.iter_projections().rev() { . // Handle (a) . if proj == ProjectionElem::Deref { . match place_ref.ty(this.body(), this.infcx.tcx).ty.kind() { . // We aren't modifying a variable directly . ty::Ref(_, _, hir::Mutability::Mut) => return, . . _ => {} . } -- line 1382 ---------------------------------------- -- line 1391 ---------------------------------------- . . // Handle(b) . this.used_mut.insert(place.local); . }; . . // This relies on the current way that by-value . // captures of a closure are copied/moved directly . // when generating MIR. 24 ( 0.00%) match *operand { . Operand::Move(place) | Operand::Copy(place) => { 8 ( 0.00%) match place.as_local() { 16 ( 0.00%) Some(local) if !self.body.local_decls[local].is_user_variable() => { . if self.body.local_decls[local].ty.is_mutable_ptr() { . // The variable will be marked as mutable by the borrow. . return; . } . // This is an edge case where we have a `move` closure . // inside a non-move closure, and the inner closure . // contains a mutation: . // -- line 1410 ---------------------------------------- -- line 1448 ---------------------------------------- . } . _ => propagate_closure_used_mut_place(self, place), . } . } . Operand::Constant(..) => {} . } . } . 334,608 ( 0.01%) fn consume_operand( . &mut self, . location: Location, . (operand, span): (&'cx Operand<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { 148,722 ( 0.00%) match *operand { 12,420 ( 0.00%) Operand::Copy(place) => { . // copy of place: check if this is "copy of frozen path" . // (FIXME: see check_loans.rs) 24,861 ( 0.00%) self.access_place( . location, 20,700 ( 0.00%) (place, span), . (Deep, Read(ReadKind::Copy)), . LocalMutationIsAllowed::No, . flow_state, . ); . . // Finally, check if path was already moved. 21 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . InitializationRequiringAction::Use, 18 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } 49,305 ( 0.00%) Operand::Move(place) => { . // move of place: check if this is move of already borrowed path 125,479 ( 0.00%) self.access_place( . location, 82,175 ( 0.00%) (place, span), . (Deep, Write(WriteKind::Move)), . LocalMutationIsAllowed::Yes, . flow_state, . ); . . // Finally, check if path was already moved. 13,912 ( 0.00%) self.check_if_path_or_subpath_is_moved( . location, . InitializationRequiringAction::Use, 10,434 ( 0.00%) (place.as_ref(), span), . flow_state, . ); . } . Operand::Constant(_) => {} . } 223,072 ( 0.00%) } . . /// Checks whether a borrow of this place is invalidated when the function . /// exits . fn check_for_invalidation_at_exit( . &mut self, . location: Location, . borrow: &BorrowData<'tcx>, . span: Span, . ) { . debug!("check_for_invalidation_at_exit({:?})", borrow); 9 ( 0.00%) let place = borrow.borrowed_place; . let mut root_place = PlaceRef { local: place.local, projection: &[] }; . . // FIXME(nll-rfc#40): do more precise destructor tracking here. For now . // we just know that all locals are dropped at function exit (otherwise . // we'll have a memory leak) and assume that all statics have a destructor. . // . // FIXME: allow thread-locals to borrow other thread locals? . . let (might_be_alive, will_be_dropped) = 63 ( 0.00%) if self.body.local_decls[root_place.local].is_ref_to_thread_local() { . // Thread-locals might be dropped after the function exits . // We have to dereference the outer reference because . // borrows don't conflict behind shared references. . root_place.projection = DEREF_PROJECTION; . (true, true) . } else { . (false, self.locals_are_invalidated_at_exit) . }; . 45 ( 0.00%) if !will_be_dropped { . debug!("place_is_invalidated_at_exit({:?}) - won't be dropped", place); . return; . } . . let sd = if might_be_alive { Deep } else { Shallow(None) }; . 108 ( 0.00%) if places_conflict::borrow_conflicts_with_place( 18 ( 0.00%) self.infcx.tcx, . &self.body, . place, . borrow.kind, 36 ( 0.00%) root_place, . sd, . places_conflict::PlaceConflictBias::Overlap, . ) { . debug!("check_for_invalidation_at_exit({:?}): INVALID", place); . // FIXME: should be talking about the region lifetime instead . // of just a span here. . let span = self.infcx.tcx.sess.source_map().end_point(span); . self.report_borrowed_value_does_not_live_long_enough( -- line 1553 ---------------------------------------- -- line 1573 ---------------------------------------- . err.buffer(&mut self.errors_buffer); . } . } . . fn check_activations(&mut self, location: Location, span: Span, flow_state: &Flows<'cx, 'tcx>) { . // Two-phase borrow support: For each activation that is newly . // generated at this statement, check if it interferes with . // another borrow. 199,028 ( 0.00%) let borrow_set = self.borrow_set.clone(); 372,597 ( 0.01%) for &borrow_index in borrow_set.activations_at_location(location) { . let borrow = &borrow_set[borrow_index]; . . // only mutable borrows should be 2-phase 12,045 ( 0.00%) assert!(match borrow.kind { . BorrowKind::Shared | BorrowKind::Shallow => false, . BorrowKind::Unique | BorrowKind::Mut { .. } => true, . }); . 28,908 ( 0.00%) self.access_place( . location, 14,454 ( 0.00%) (borrow.borrowed_place, span), 21,681 ( 0.00%) (Deep, Activation(WriteKind::MutableBorrow(borrow.kind), borrow_index)), . LocalMutationIsAllowed::No, . flow_state, . ); . // We do not need to call `check_if_path_or_subpath_is_moved` . // again, as we already called it when we made the . // initial reservation. . } . } -- line 1602 ---------------------------------------- -- line 1606 ---------------------------------------- . location: Location, . local: Local, . place_span: (Place<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { . debug!("check_if_reassignment_to_immutable_state({:?})", local); . . // Check if any of the initializiations of `local` have happened yet: 46,398 ( 0.00%) if let Some(init_index) = self.is_local_ever_initialized(local, flow_state) { . // And, if so, report an error. . let init = &self.move_data.inits[init_index]; . let span = init.span(&self.body); . self.report_illegal_reassignment(location, place_span, span, place_span.0); . } . } . 421,083 ( 0.01%) fn check_if_full_path_is_moved( . &mut self, . location: Location, . desired_action: InitializationRequiringAction, . place_span: (PlaceRef<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { . let maybe_uninits = &flow_state.uninits; . -- line 1630 ---------------------------------------- -- line 1660 ---------------------------------------- . // Therefore, if we seek out the *closest* prefix for which we . // have a MovePath, that should capture the initialization . // state for the place scenario. . // . // This code covers scenarios 1, 2, and 3. . . debug!("check_if_full_path_is_moved place: {:?}", place_span.0); . let (prefix, mpi) = self.move_path_closest_to(place_span.0); 32,391 ( 0.00%) if maybe_uninits.contains(mpi) { . self.report_use_of_moved_or_uninitialized( . location, . desired_action, . (prefix, place_span.0, place_span.1), . mpi, . ); . } // Only query longest prefix with a MovePath, not further . // ancestors; dataflow recurs on children when parents . // move (to support partial (re)inits). . // . // (I.e., querying parents breaks scenario 7; but may want . // to do such a query based on partial-init feature-gate.) 259,128 ( 0.00%) } . . /// Subslices correspond to multiple move paths, so we iterate through the . /// elements of the base array. For each element we check . /// . /// * Does this element overlap with our slice. . /// * Is any part of it uninitialized. . fn check_if_subslice_element_is_moved( . &mut self, -- line 1689 ---------------------------------------- -- line 1719 ---------------------------------------- . return; // don't bother finding other problems. . } . } . } . } . } . } . 355,476 ( 0.01%) fn check_if_path_or_subpath_is_moved( . &mut self, . location: Location, . desired_action: InitializationRequiringAction, . place_span: (PlaceRef<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { 64,632 ( 0.00%) let maybe_uninits = &flow_state.uninits; . . // Bad scenarios: . // . // 1. Move of `a.b.c`, use of `a` or `a.b` . // partial initialization support, one might have `a.x` . // initialized but not `a.b`. . // 2. All bad scenarios from `check_if_full_path_is_moved` . // -- line 1742 ---------------------------------------- -- line 1743 ---------------------------------------- . // OK scenarios: . // . // 3. Move of `a.b.c`, use of `a.b.d` . // 4. Uninitialized `a.x`, initialized `a.b`, use of `a.b` . // 5. Copied `(a.b: &_)`, use of `*(a.b).c`; note that `a.b` . // must have been initialized for the use to be sound. . // 6. Move of `a.b.c` then reinit of `a.b.c.d`, use of `a.b.c.d` . 226,212 ( 0.00%) self.check_if_full_path_is_moved(location, desired_action, place_span, flow_state); . 43,076 ( 0.00%) if let Some((place_base, ProjectionElem::Subslice { from, to, from_end: false })) = 32,316 ( 0.00%) place_span.0.last_projection() . { . let place_ty = place_base.ty(self.body(), self.infcx.tcx); . if let ty::Array(..) = place_ty.ty.kind() { . self.check_if_subslice_element_is_moved( . location, . desired_action, . (place_base, place_span.1), . maybe_uninits, -- line 1762 ---------------------------------------- -- line 1772 ---------------------------------------- . // . // (Distinct from handling of scenarios 1+2+4 above because . // `place` does not interfere with suffixes of its prefixes, . // e.g., `a.b.c` does not interfere with `a.b.d`) . // . // This code covers scenario 1. . . debug!("check_if_path_or_subpath_is_moved place: {:?}", place_span.0); 129,264 ( 0.00%) if let Some(mpi) = self.move_path_for_place(place_span.0) { 92,156 ( 0.00%) let uninit_mpi = self . .move_data . .find_in_move_path_or_its_descendants(mpi, |mpi| maybe_uninits.contains(mpi)); . 46,078 ( 0.00%) if let Some(uninit_mpi) = uninit_mpi { . self.report_use_of_moved_or_uninitialized( . location, . desired_action, . (place_span.0, place_span.0, place_span.1), . uninit_mpi, . ); . return; // don't bother finding other problems. . } . } 258,528 ( 0.00%) } . . /// Currently MoveData does not store entries for all places in . /// the input MIR. For example it will currently filter out . /// places that are Copy; thus we do not track places of shared . /// reference type. This routine will walk up a place along its . /// prefixes, searching for a foundational place that *is* . /// tracked in the MoveData. . /// . /// An Err result includes a tag indicated why the search failed. . /// Currently this can only occur if the place is built off of a . /// static variable, as we do not track those in the MoveData. . fn move_path_closest_to(&mut self, place: PlaceRef<'tcx>) -> (PlaceRef<'tcx>, MovePathIndex) { 421,083 ( 0.01%) match self.move_data.rev_lookup.find(place) { . LookupResult::Parent(Some(mpi)) | LookupResult::Exact(mpi) => { . (self.move_data.move_paths[mpi].place.as_ref(), mpi) . } . LookupResult::Parent(None) => panic!("should have move path for every Local"), . } . } . . fn move_path_for_place(&mut self, place: PlaceRef<'tcx>) -> Option { . // If returns None, then there is no move path corresponding . // to a direct owner of `place` (which means there is nothing . // that borrowck tracks for its analysis). . 291,452 ( 0.00%) match self.move_data.rev_lookup.find(place) { . LookupResult::Parent(_) => None, . LookupResult::Exact(mpi) => Some(mpi), . } . } . . fn check_if_assigned_path_is_moved( . &mut self, . location: Location, . (place, span): (Place<'tcx>, Span), . flow_state: &Flows<'cx, 'tcx>, . ) { . debug!("check_if_assigned_path_is_moved place: {:?}", place); . . // None case => assigning to `x` does not require `x` be initialized. 76 ( 0.00%) for (place_base, elem) in place.iter_projections().rev() { 604 ( 0.00%) match elem { . ProjectionElem::Index(_/*operand*/) | . ProjectionElem::ConstantIndex { .. } | . // assigning to P[i] requires P to be valid. . ProjectionElem::Downcast(_/*adt_def*/, _/*variant_idx*/) => . // assigning to (P->variant) is okay if assigning to `P` is okay . // . // FIXME: is this true even if P is an adt with a dtor? . { } . . // assigning to (*P) requires P to be initialized . ProjectionElem::Deref => { 525 ( 0.00%) self.check_if_full_path_is_moved( . location, InitializationRequiringAction::Use, 450 ( 0.00%) (place_base, span), flow_state); . // (base initialized; no need to . // recur further) . break; . } . . ProjectionElem::Subslice { .. } => { . panic!("we don't allow assignments to subslices, location: {:?}", . location); . } . . ProjectionElem::Field(..) => { . // if type of `P` has a dtor, then . // assigning to `P.f` requires `P` itself . // be already initialized . let tcx = self.infcx.tcx; 76 ( 0.00%) let base_ty = place_base.ty(self.body(), tcx).ty; 244 ( 0.00%) match base_ty.kind() { 432 ( 0.00%) ty::Adt(def, _) if def.has_dtor(tcx) => { . self.check_if_path_or_subpath_is_moved( . location, InitializationRequiringAction::Assignment, . (place_base, span), flow_state); . . // (base initialized; no need to . // recur further) . break; . } -- line 1877 ---------------------------------------- -- line 1931 ---------------------------------------- . . // Shallow so that we'll stop at any dereference; we'll . // report errors about issues with such bases elsewhere. . let maybe_uninits = &flow_state.uninits; . . // Find the shortest uninitialized prefix you can reach . // without going over a Deref. . let mut shortest_uninit_seen = None; 1,616 ( 0.00%) for prefix in this.prefixes(base, PrefixSet::Shallow) { 84 ( 0.00%) let mpi = match this.move_path_for_place(prefix) { . Some(mpi) => mpi, . None => continue, . }; . 4 ( 0.00%) if maybe_uninits.contains(mpi) { . debug!( . "check_parent_of_field updating shortest_uninit_seen from {:?} to {:?}", . shortest_uninit_seen, . Some((prefix, mpi)) . ); . shortest_uninit_seen = Some((prefix, mpi)); . } else { . debug!("check_parent_of_field {:?} is definitely initialized", (prefix, mpi)); . } . } . 216 ( 0.00%) if let Some((prefix, mpi)) = shortest_uninit_seen { . // Check for a reassignment into an uninitialized field of a union (for example, . // after a move out). In this case, do not report an error here. There is an . // exception, if this is the first assignment into the union (that is, there is . // no move out from an earlier location) then this is an attempt at initialization . // of the union - we should error in that case. . let tcx = this.infcx.tcx; . if base.ty(this.body(), tcx).ty.is_union() { . if this.move_data.path_map[mpi].iter().any(|moi| { -- line 1965 ---------------------------------------- -- line 1979 ---------------------------------------- . } . } . . /// Checks the permissions for the given place and read or write kind . /// . /// Returns `true` if an error is reported. . fn check_access_permissions( . &mut self, 173,790 ( 0.00%) (place, span): (Place<'tcx>, Span), . kind: ReadOrWrite, . is_local_mutation_allowed: LocalMutationIsAllowed, . flow_state: &Flows<'cx, 'tcx>, . location: Location, . ) -> bool { . debug!( . "check_access_permissions({:?}, {:?}, is_local_mutation_allowed: {:?})", . place, kind, is_local_mutation_allowed . ); . . let error_access; . let the_place_err; . 1,355,567 ( 0.02%) match kind { . Reservation(WriteKind::MutableBorrow( . borrow_kind @ (BorrowKind::Unique | BorrowKind::Mut { .. }), . )) . | Write(WriteKind::MutableBorrow( . borrow_kind @ (BorrowKind::Unique | BorrowKind::Mut { .. }), . )) => { 27,326 ( 0.00%) let is_local_mutation_allowed = match borrow_kind { . BorrowKind::Unique => LocalMutationIsAllowed::Yes, . BorrowKind::Mut { .. } => is_local_mutation_allowed, . BorrowKind::Shared | BorrowKind::Shallow => unreachable!(), . }; 19,131 ( 0.00%) match self.is_mutable(place.as_ref(), is_local_mutation_allowed) { . Ok(root_place) => { . self.add_used_mut(root_place, flow_state); . return false; . } . Err(place_err) => { . error_access = AccessKind::MutableBorrow; . the_place_err = place_err; . } . } . } . Reservation(WriteKind::Mutate) | Write(WriteKind::Mutate) => { 143,332 ( 0.00%) match self.is_mutable(place.as_ref(), is_local_mutation_allowed) { . Ok(root_place) => { . self.add_used_mut(root_place, flow_state); . return false; . } . Err(place_err) => { . error_access = AccessKind::Mutate; . the_place_err = place_err; . } -- line 2033 ---------------------------------------- -- line 2041 ---------------------------------------- . | WriteKind::MutableBorrow(BorrowKind::Shallow), . ) . | Write( . WriteKind::Move . | WriteKind::StorageDeadOrDrop . | WriteKind::MutableBorrow(BorrowKind::Shared) . | WriteKind::MutableBorrow(BorrowKind::Shallow), . ) => { 100,856 ( 0.00%) if let (Err(_), true) = ( 352,996 ( 0.01%) self.is_mutable(place.as_ref(), is_local_mutation_allowed), . self.errors_buffer.is_empty(), . ) { . // rust-lang/rust#46908: In pure NLL mode this code path should be . // unreachable, but we use `delay_span_bug` because we can hit this when . // dereferencing a non-Copy raw pointer *and* have `-Ztreat-err-as-bug` . // enabled. We don't want to ICE for that case, as other errors will have . // been emitted (#52262). . self.infcx.tcx.sess.delay_span_bug( -- line 2058 ---------------------------------------- -- line 2094 ---------------------------------------- . if previously_initialized { . self.report_mutability_error(place, span, the_place_err, error_access, location); . true . } else { . false . } . } . 142,550 ( 0.00%) fn is_local_ever_initialized( . &self, . local: Local, . flow_state: &Flows<'cx, 'tcx>, . ) -> Option { 85,530 ( 0.00%) let mpi = self.move_data.rev_lookup.find_local(local); 28,510 ( 0.00%) let ii = &self.move_data.init_path_map[mpi]; 1,177,930 ( 0.02%) for &index in ii { 2,184,800 ( 0.04%) if flow_state.ever_inits.contains(index) { . return Some(index); . } . } . None 114,040 ( 0.00%) } . . /// Adds the place into the used mutable variables set 92,836 ( 0.00%) fn add_used_mut(&mut self, root_place: RootPlace<'tcx>, flow_state: &Flows<'cx, 'tcx>) { 22 ( 0.00%) match root_place { 69,627 ( 0.00%) RootPlace { place_local: local, place_projection: [], is_local_mutation_allowed } => { . // If the local may have been initialized, and it is now currently being . // mutated, then it is justified to be annotated with the `mut` . // keyword, since the mutation may be a possible reassignment. 43,975 ( 0.00%) if is_local_mutation_allowed != LocalMutationIsAllowed::Yes 62,331 ( 0.00%) && self.is_local_ever_initialized(local, flow_state).is_some() . { . self.used_mut.insert(local); . } . } . RootPlace { . place_local: _, . place_projection: _, . is_local_mutation_allowed: LocalMutationIsAllowed::Yes, . } => {} . RootPlace { 9 ( 0.00%) place_local, 9 ( 0.00%) place_projection: place_projection @ [.., _], . is_local_mutation_allowed: _, . } => { 18 ( 0.00%) if let Some(field) = self.is_upvar_field_projection(PlaceRef { . local: place_local, . projection: place_projection, . }) { 8 ( 0.00%) self.used_mut_upvars.push(field); . } . } . } 90,468 ( 0.00%) } . . /// Whether this value can be written or borrowed mutably. . /// Returns the root place if the place passed in is a projection. 869,176 ( 0.01%) fn is_mutable( . &self, . place: PlaceRef<'tcx>, . is_local_mutation_allowed: LocalMutationIsAllowed, . ) -> Result, PlaceRef<'tcx>> { . debug!("is_mutable: place={:?}, is_local...={:?}", place, is_local_mutation_allowed); 247,808 ( 0.00%) match place.last_projection() { . None => { 73,636 ( 0.00%) let local = &self.body.local_decls[place.local]; 220,908 ( 0.00%) match local.mutability { 29,846 ( 0.00%) Mutability::Not => match is_local_mutation_allowed { 89,538 ( 0.00%) LocalMutationIsAllowed::Yes => Ok(RootPlace { . place_local: place.local, . place_projection: place.projection, . is_local_mutation_allowed: LocalMutationIsAllowed::Yes, . }), . LocalMutationIsAllowed::ExceptUpvars => Ok(RootPlace { . place_local: place.local, . place_projection: place.projection, . is_local_mutation_allowed: LocalMutationIsAllowed::ExceptUpvars, . }), . LocalMutationIsAllowed::No => Err(place), . }, 117,426 ( 0.00%) Mutability::Mut => Ok(RootPlace { . place_local: place.local, . place_projection: place.projection, . is_local_mutation_allowed, . }), . } . } . Some((place_base, elem)) => { 16,140 ( 0.00%) match elem { . ProjectionElem::Deref => { 4,878 ( 0.00%) let base_ty = place_base.ty(self.body(), self.infcx.tcx).ty; . . // Check the kind of deref to decide 14,631 ( 0.00%) match base_ty.kind() { . ty::Ref(_, _, mutbl) => { 4,866 ( 0.00%) match mutbl { . // Shared borrowed data is never mutable . hir::Mutability::Not => Err(place), . // Mutably borrowed data is mutable, but only if we have a . // unique path to the `&mut` . hir::Mutability::Mut => { 4,866 ( 0.00%) let mode = match self.is_upvar_field_projection(place) { 10 ( 0.00%) Some(field) if self.upvars[field.index()].by_ref => { . is_local_mutation_allowed . } . _ => LocalMutationIsAllowed::Yes, . }; . 14,598 ( 0.00%) self.is_mutable(place_base, mode) . } . } . } . ty::RawPtr(tnm) => { 2 ( 0.00%) match tnm.mutbl { . // `*const` raw pointers are not mutable . hir::Mutability::Not => Err(place), . // `*mut` raw pointers are always mutable, regardless of . // context. The users have to check by themselves. . hir::Mutability::Mut => Ok(RootPlace { . place_local: place.local, . place_projection: place.projection, . is_local_mutation_allowed, . }), . } . } . // `Box` owns its content, so mutable if its location is mutable 5 ( 0.00%) _ if base_ty.is_box() => { . self.is_mutable(place_base, is_local_mutation_allowed) . } . // Deref should only be for reference, pointers or boxes . _ => bug!("Deref of unexpected type: {:?}", base_ty), . } . } . // All other projections are owned by their base path, so mutable if . // base path is mutable . ProjectionElem::Field(..) . | ProjectionElem::Index(..) . | ProjectionElem::ConstantIndex { .. } . | ProjectionElem::Subslice { .. } . | ProjectionElem::Downcast(..) => { . let upvar_field_projection = self.is_upvar_field_projection(place); 5,882 ( 0.00%) if let Some(field) = upvar_field_projection { . let upvar = &self.upvars[field.index()]; . debug!( . "is_mutable: upvar.mutability={:?} local_mutation_is_allowed={:?} \ . place={:?}, place_base={:?}", . upvar, is_local_mutation_allowed, place, place_base . ); 50 ( 0.00%) match (upvar.place.mutability, is_local_mutation_allowed) { . ( . Mutability::Not, . LocalMutationIsAllowed::No . | LocalMutationIsAllowed::ExceptUpvars, . ) => Err(place), . (Mutability::Not, LocalMutationIsAllowed::Yes) . | (Mutability::Mut, _) => { . // Subtle: this is an upvar -- line 2251 ---------------------------------------- -- line 2270 ---------------------------------------- . // fn main() { . // let var = Vec::new(); . // foo(move || { . // var.push(1); . // }); . // } . // ``` . let _ = 90 ( 0.00%) self.is_mutable(place_base, is_local_mutation_allowed)?; . Ok(RootPlace { . place_local: place.local, . place_projection: place.projection, . is_local_mutation_allowed, . }) . } . } . } else { . self.is_mutable(place_base, is_local_mutation_allowed) . } . } . } . } . } 632,128 ( 0.01%) } . . /// If `place` is a field projection, and the field is being projected from a closure type, . /// then returns the index of the field being projected. Note that this closure will always . /// be `self` in the current MIR, because that is the only time we directly access the fields . /// of a closure type. . fn is_upvar_field_projection(&self, place_ref: PlaceRef<'tcx>) -> Option { 48,526 ( 0.00%) path_utils::is_upvar_field_projection(self.infcx.tcx, &self.upvars, place_ref, self.body()) . } . } . . /// The degree of overlap between 2 places for borrow-checking. . enum Overlap { . /// The places might partially overlap - in this case, we give . /// up and say that they might conflict. This occurs when . /// different fields of a union are borrowed. For example, -- line 2308 ---------------------------------------- 2,653,598 ( 0.04%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_privacy/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 59 ---------------------------------------- . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow; . . /// Not overridden, but used to actually visit types and traits. . fn skeleton(&mut self) -> DefIdVisitorSkeleton<'_, 'tcx, Self> { 702,870 ( 0.01%) DefIdVisitorSkeleton { . def_id_visitor: self, . visited_opaque_tys: Default::default(), . dummy: Default::default(), . } . } 4,776 ( 0.00%) fn visit(&mut self, ty_fragment: impl TypeFoldable<'tcx>) -> ControlFlow { . ty_fragment.visit_with(&mut self.skeleton()) 7,960 ( 0.00%) } 2,052 ( 0.00%) fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> ControlFlow { . self.skeleton().visit_trait(trait_ref) 2,052 ( 0.00%) } . fn visit_projection_ty( . &mut self, . projection: ty::ProjectionTy<'tcx>, . ) -> ControlFlow { 40 ( 0.00%) self.skeleton().visit_projection_ty(projection) . } . fn visit_predicates( . &mut self, . predicates: ty::GenericPredicates<'tcx>, . ) -> ControlFlow { 4,469 ( 0.00%) self.skeleton().visit_predicates(predicates) . } . } . . struct DefIdVisitorSkeleton<'v, 'tcx, V: ?Sized> { . def_id_visitor: &'v mut V, . visited_opaque_tys: FxHashSet, . dummy: PhantomData>, . } . . impl<'tcx, V> DefIdVisitorSkeleton<'_, 'tcx, V> . where . V: DefIdVisitor<'tcx> + ?Sized, . { . fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> ControlFlow { 342 ( 0.00%) let TraitRef { def_id, substs } = trait_ref; 95,662 ( 0.00%) self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref.print_only_trait_path())?; 342 ( 0.00%) if self.def_id_visitor.shallow() { ControlFlow::CONTINUE } else { substs.visit_with(self) } . } . 36,240 ( 0.00%) fn visit_projection_ty( . &mut self, . projection: ty::ProjectionTy<'tcx>, . ) -> ControlFlow { 28,992 ( 0.00%) let (trait_ref, assoc_substs) = 7,248 ( 0.00%) projection.trait_ref_and_own_substs(self.def_id_visitor.tcx()); . self.visit_trait(trait_ref)?; . if self.def_id_visitor.shallow() { . ControlFlow::CONTINUE . } else { . assoc_substs.iter().try_for_each(|subst| subst.visit_with(self)) . } 32,616 ( 0.00%) } . . fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow { 2,171 ( 0.00%) match predicate.kind().skip_binder() { . ty::PredicateKind::Trait(ty::TraitPredicate { . trait_ref, . constness: _, . polarity: _, . }) => self.visit_trait(trait_ref), . ty::PredicateKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => { 70 ( 0.00%) term.visit_with(self)?; 1,767 ( 0.00%) self.visit_projection_ty(projection_ty) . } . ty::PredicateKind::TypeOutlives(ty::OutlivesPredicate(ty, _region)) => { . ty.visit_with(self) . } . ty::PredicateKind::RegionOutlives(..) => ControlFlow::CONTINUE, . ty::PredicateKind::ConstEvaluatable(uv) . if self.def_id_visitor.tcx().features().generic_const_exprs => . { -- line 140 ---------------------------------------- -- line 157 ---------------------------------------- . ACNode::Leaf(leaf) => self.visit_const(leaf), . ACNode::Cast(_, _, ty) => self.visit_ty(ty), . ACNode::Binop(..) | ACNode::UnaryOp(..) | ACNode::FunctionCall(_, _) => { . ControlFlow::CONTINUE . } . }) . } . 31,437 ( 0.00%) fn visit_predicates( . &mut self, . predicates: ty::GenericPredicates<'tcx>, . ) -> ControlFlow { . let ty::GenericPredicates { parent: _, predicates } = predicates; . predicates.iter().try_for_each(|&(predicate, _span)| self.visit_predicate(predicate)) 35,928 ( 0.00%) } . } . . impl<'tcx, V> TypeVisitor<'tcx> for DefIdVisitorSkeleton<'_, 'tcx, V> . where . V: DefIdVisitor<'tcx> + ?Sized, . { . type BreakTy = V::BreakTy; . 2,060,959 ( 0.03%) fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { 229,100 ( 0.00%) let tcx = self.def_id_visitor.tcx(); . // InternalSubsts are not visited here because they are visited below in `super_visit_with`. 1,137,468 ( 0.02%) match *ty.kind() { 360,939 ( 0.01%) ty::Adt(&ty::AdtDef { did: def_id, .. }, ..) . | ty::Foreign(def_id) . | ty::FnDef(def_id, ..) . | ty::Closure(def_id, ..) . | ty::Generator(def_id, ..) => { 1,682,661 ( 0.03%) self.def_id_visitor.visit_def_id(def_id, "type", &ty)?; . if self.def_id_visitor.shallow() { . return ControlFlow::CONTINUE; . } . // Default type visitor doesn't visit signatures of fn types. . // Something like `fn() -> Priv {my_func}` is considered a private type even if . // `my_func` is public, so we need to visit signatures. 373,586 ( 0.01%) if let ty::FnDef(..) = ty.kind() { . tcx.fn_sig(def_id).visit_with(self)?; . } . // Inherent static methods don't have self type in substs. . // Something like `fn() {my_method}` type of the method . // `impl Pub { pub fn my_method() {} }` is considered a private type, . // so we need to visit the self type additionally. 543,728 ( 0.01%) if let Some(assoc_item) = tcx.opt_associated_item(def_id) { 29,112 ( 0.00%) if let ty::ImplContainer(impl_def_id) = assoc_item.container { 6,746 ( 0.00%) tcx.type_of(impl_def_id).visit_with(self)?; . } . } . } 6,362 ( 0.00%) ty::Projection(proj) => { . if self.def_id_visitor.skip_assoc_tys() { . // Visitors searching for minimal visibility/reachability want to . // conservatively approximate associated types like `::Alias` . // as visible/reachable even if both `Type` and `Trait` are private. . // Ideally, associated types should be substituted in the same way as . // free type aliases, but this isn't done yet. . return ControlFlow::CONTINUE; . } . // This will also visit substs if necessary, so we don't need to recurse. 31,573 ( 0.00%) return self.visit_projection_ty(proj); . } 722 ( 0.00%) ty::Dynamic(predicates, ..) => { . // All traits in the list are considered the "primary" part of the type . // and are visited by shallow visitors. 5,776 ( 0.00%) for predicate in predicates { 4,332 ( 0.00%) let trait_ref = match predicate.skip_binder() { 4,332 ( 0.00%) ty::ExistentialPredicate::Trait(trait_ref) => trait_ref, . ty::ExistentialPredicate::Projection(proj) => proj.trait_ref(tcx), . ty::ExistentialPredicate::AutoTrait(def_id) => { . ty::ExistentialTraitRef { def_id, substs: InternalSubsts::empty() } . } . }; 1,444 ( 0.00%) let ty::ExistentialTraitRef { def_id, substs: _ } = trait_ref; 8,664 ( 0.00%) self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref)?; . } . } 4,230 ( 0.00%) ty::Opaque(def_id, ..) => { . // Skip repeated `Opaque`s to avoid infinite recursion. 4,230 ( 0.00%) if self.visited_opaque_tys.insert(def_id) { . // The intent is to treat `impl Trait1 + Trait2` identically to . // `dyn Trait1 + Trait2`. Therefore we ignore def-id of the opaque type itself . // (it either has no visibility, or its visibility is insignificant, like . // visibilities of type aliases) and recurse into bounds instead to go . // through the trait list (default type visitor doesn't visit those traits). . // All traits in the list are considered the "primary" part of the type . // and are visited by shallow visitors. 44 ( 0.00%) self.visit_predicates(ty::GenericPredicates { . parent: None, . predicates: tcx.explicit_item_bounds(def_id), . })?; . } . } . // These types don't have their own def-ids (but may have subcomponents . // with def-ids that should be visited recursively). . ty::Bool -- line 254 ---------------------------------------- -- line 270 ---------------------------------------- . ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => { . bug!("unexpected type: {:?}", ty) . } . } . . if self.def_id_visitor.shallow() { . ControlFlow::CONTINUE . } else { 644,835 ( 0.01%) ty.super_visit_with(self) . } 1,990,689 ( 0.03%) } . . fn visit_const(&mut self, c: &'tcx Const<'tcx>) -> ControlFlow { 2,922 ( 0.00%) self.visit_ty(c.ty)?; 487 ( 0.00%) let tcx = self.def_id_visitor.tcx(); 3,409 ( 0.00%) if let Ok(Some(ct)) = AbstractConst::from_const(tcx, c) { . self.visit_abstract_const_expr(tcx, ct)?; . } . ControlFlow::CONTINUE . } . } . . fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility { 1,468 ( 0.00%) if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 } . } . . //////////////////////////////////////////////////////////////////////////////// . /// Visitor used to determine if pub(restricted) is used anywhere in the crate. . /// . /// This is done so that `private_in_public` warnings can be turned into hard errors . /// in crates that have been updated to use pub(restricted). . //////////////////////////////////////////////////////////////////////////////// -- line 301 ---------------------------------------- -- line 303 ---------------------------------------- . tcx: TyCtxt<'tcx>, . has_pub_restricted: bool, . } . . impl<'tcx> Visitor<'tcx> for PubRestrictedVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . fn nested_visit_map(&mut self) -> Self::Map { 2,606 ( 0.00%) self.tcx.hir() . } . fn visit_vis(&mut self, vis: &'tcx hir::Visibility<'tcx>) { 6,969 ( 0.00%) self.has_pub_restricted = self.has_pub_restricted || vis.node.is_pub_restricted(); . } . } . . //////////////////////////////////////////////////////////////////////////////// . /// Visitor used to determine impl visibility and reachability. . //////////////////////////////////////////////////////////////////////////////// . . struct FindMin<'a, 'tcx, VL: VisibilityLike> { -- line 322 ---------------------------------------- -- line 336 ---------------------------------------- . true . } . fn visit_def_id( . &mut self, . def_id: DefId, . _kind: &str, . _descr: &dyn fmt::Display, . ) -> ControlFlow { 13,704 ( 0.00%) self.min = VL::new_min(self, def_id); . ControlFlow::CONTINUE . } . } . . trait VisibilityLike: Sized { . const MAX: Self; . const SHALLOW: bool = false; . fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self; . . // Returns an over-approximation (`skip_assoc_tys` = true) of visibility due to . // associated types for which we can't determine visibility precisely. 11,144 ( 0.00%) fn of_impl(def_id: LocalDefId, tcx: TyCtxt<'_>, access_levels: &AccessLevels) -> Self { 5,174 ( 0.00%) let mut find = FindMin { tcx, access_levels, min: Self::MAX }; 1,592 ( 0.00%) find.visit(tcx.type_of(def_id)); 4,776 ( 0.00%) if let Some(trait_ref) = tcx.impl_trait_ref(def_id) { 342 ( 0.00%) find.visit_trait(trait_ref); . } 1,132 ( 0.00%) find.min 12,736 ( 0.00%) } . } . impl VisibilityLike for ty::Visibility { . const MAX: Self = ty::Visibility::Public; 7,665 ( 0.00%) fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self { 3,285 ( 0.00%) min(find.tcx.visibility(def_id), find.min, find.tcx) 10,950 ( 0.00%) } . } . impl VisibilityLike for Option { . const MAX: Self = Some(AccessLevel::Public); . // Type inference is very smart sometimes. . // It can make an impl reachable even some components of its type or trait are unreachable. . // E.g. methods of `impl ReachableTrait for ReachableTy { ... }` . // can be usable from other crates (#57264). So we skip substs when calculating reachability . // and consider an impl reachable if its "shallow" type and trait are reachable. . // . // The assumption we make here is that type-inference won't let you use an impl without knowing . // both "shallow" version of its self type and "shallow" version of its trait if it exists . // (which require reaching the `DefId`s in them). . const SHALLOW: bool = true; 6,633 ( 0.00%) fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self { . cmp::min( 6,633 ( 0.00%) if let Some(def_id) = def_id.as_local() { 2,634 ( 0.00%) find.access_levels.map.get(&def_id).copied() . } else { . Self::MAX . }, 5,092 ( 0.00%) find.min, . ) 8,844 ( 0.00%) } . } . . //////////////////////////////////////////////////////////////////////////////// . /// The embargo visitor, used to determine the exports of the AST. . //////////////////////////////////////////////////////////////////////////////// . . struct EmbargoVisitor<'tcx> { . tcx: TyCtxt<'tcx>, -- line 400 ---------------------------------------- -- line 422 ---------------------------------------- . . struct ReachEverythingInTheInterfaceVisitor<'a, 'tcx> { . access_level: Option, . item_def_id: LocalDefId, . ev: &'a mut EmbargoVisitor<'tcx>, . } . . impl<'tcx> EmbargoVisitor<'tcx> { 50,330 ( 0.00%) fn get(&self, def_id: LocalDefId) -> Option { . self.access_levels.map.get(&def_id).copied() 50,330 ( 0.00%) } . . fn update_with_hir_id( . &mut self, . hir_id: hir::HirId, . level: Option, . ) -> Option { 23,835 ( 0.00%) let def_id = self.tcx.hir().local_def_id(hir_id); . self.update(def_id, level) . } . . /// Updates node level and returns the updated level. . fn update(&mut self, def_id: LocalDefId, level: Option) -> Option { 55,301 ( 0.00%) let old_level = self.get(def_id); . // Accessibility levels can only grow. . if level > old_level { . self.access_levels.map.insert(def_id, level.unwrap()); 1,022 ( 0.00%) self.changed = true; . level . } else { . old_level . } . } . . fn reach( . &mut self, -- line 457 ---------------------------------------- -- line 458 ---------------------------------------- . def_id: LocalDefId, . access_level: Option, . ) -> ReachEverythingInTheInterfaceVisitor<'_, 'tcx> { . ReachEverythingInTheInterfaceVisitor { . access_level: cmp::min(access_level, Some(AccessLevel::Reachable)), . item_def_id: def_id, . ev: self, . } 15,857 ( 0.00%) } . . // We have to make sure that the items that macros might reference . // are reachable, since they might be exported transitively. . fn update_reachability_from_macro(&mut self, local_def_id: LocalDefId, md: &MacroDef) { . // Non-opaque macros cannot make other items more accessible than they already are. . 9 ( 0.00%) let hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id); 27 ( 0.00%) let attrs = self.tcx.hir().attrs(hir_id); 63 ( 0.00%) if attr::find_transparency(attrs, md.macro_rules).0 != Transparency::Opaque { . return; . } . . let item_def_id = local_def_id.to_def_id(); . let macro_module_def_id = . ty::DefIdTree::parent(self.tcx, item_def_id).unwrap().expect_local(); . if self.tcx.hir().opt_def_kind(macro_module_def_id) != Some(DefKind::Mod) { . // The macro's parent doesn't correspond to a `mod`, return early (#63164, #65252). -- line 483 ---------------------------------------- -- line 630 ---------------------------------------- . } . . impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 7,818 ( 0.00%) self.tcx.hir() . } . 25,623 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 5,694 ( 0.00%) let item_level = match item.kind { . hir::ItemKind::Impl { .. } => { . let impl_level = 7,164 ( 0.00%) Option::::of_impl(item.def_id, self.tcx, &self.access_levels); 1,194 ( 0.00%) self.update(item.def_id, impl_level) . } 6,612 ( 0.00%) _ => self.get(item.def_id), . }; . . // Update levels of nested things. 14,597 ( 0.00%) match item.kind { . hir::ItemKind::Enum(ref def, _) => { 75 ( 0.00%) for variant in def.variants { 6,210 ( 0.00%) let variant_level = self.update_with_hir_id(variant.id, item_level); 8,280 ( 0.00%) if let Some(ctor_hir_id) = variant.data.ctor_hir_id() { . self.update_with_hir_id(ctor_hir_id, item_level); . } 4,140 ( 0.00%) for field in variant.data.fields() { 2,503 ( 0.00%) self.update_with_hir_id(field.hir_id, variant_level); . } . } . } . hir::ItemKind::Impl(ref impl_) => { 3,582 ( 0.00%) for impl_item_ref in impl_.items { 8,967 ( 0.00%) if impl_.of_trait.is_some() 1,974 ( 0.00%) || self.tcx.visibility(impl_item_ref.id.def_id) == ty::Visibility::Public . { 4,146 ( 0.00%) self.update(impl_item_ref.id.def_id, item_level); . } . } . } 45 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { . for trait_item_ref in trait_item_refs { 144 ( 0.00%) self.update(trait_item_ref.id.def_id, item_level); . } . } . hir::ItemKind::Struct(ref def, _) | hir::ItemKind::Union(ref def, _) => { 1,020 ( 0.00%) if let Some(ctor_hir_id) = def.ctor_hir_id() { . self.update_with_hir_id(ctor_hir_id, item_level); . } 510 ( 0.00%) for field in def.fields() { 2,760 ( 0.00%) if field.vis.node.is_pub() { 756 ( 0.00%) self.update_with_hir_id(field.hir_id, item_level); . } . } . } . hir::ItemKind::Macro(ref macro_def) => { 18 ( 0.00%) self.update_reachability_from_macro(item.def_id, macro_def); . } . hir::ItemKind::ForeignMod { items, .. } => { . for foreign_item in items { . if self.tcx.visibility(foreign_item.id.def_id) == ty::Visibility::Public { . self.update(foreign_item.id.def_id, item_level); . } . } . } -- line 697 ---------------------------------------- -- line 704 ---------------------------------------- . | hir::ItemKind::TyAlias(..) . | hir::ItemKind::Mod(..) . | hir::ItemKind::TraitAlias(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::ExternCrate(..) => {} . } . . // Mark all items in interfaces of reachable items as reachable. 14,235 ( 0.00%) match item.kind { . // The interface is empty. . hir::ItemKind::Macro(..) | hir::ItemKind::ExternCrate(..) => {} . // All nested items are checked by `visit_item`. . hir::ItemKind::Mod(..) => {} . // Handled in the access level of in rustc_resolve . hir::ItemKind::Use(..) => {} . // The interface is empty. . hir::ItemKind::GlobalAsm(..) => {} . hir::ItemKind::OpaqueTy(..) => { . // HACK(jynelson): trying to infer the type of `impl trait` breaks `async-std` (and `pub async fn` in general) . // Since rustdoc never needs to do codegen and doesn't care about link-time reachability, . // mark this as unreachable. . // See https://github.com/rust-lang/rust/issues/75100 60 ( 0.00%) if !self.tcx.sess.opts.actually_rustdoc { . // FIXME: This is some serious pessimization intended to workaround deficiencies . // in the reachability pass (`middle/reachable.rs`). Types are marked as link-time . // reachable if they are returned via `impl Trait`, even from private functions. . let exist_level = . cmp::max(item_level, Some(AccessLevel::ReachableFromImplTrait)); 45 ( 0.00%) self.reach(item.def_id, exist_level).generics().predicates().ty(); . } . } . // Visit everything. . hir::ItemKind::Const(..) . | hir::ItemKind::Static(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::TyAlias(..) => { 138 ( 0.00%) if item_level.is_some() { 18 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates().ty(); . } . } 28 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { 15 ( 0.00%) if item_level.is_some() { 112 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates(); . . for trait_item_ref in trait_item_refs { 144 ( 0.00%) let mut reach = self.reach(trait_item_ref.id.def_id, item_level); 288 ( 0.00%) reach.generics().predicates(); . 90 ( 0.00%) if trait_item_ref.kind == AssocItemKind::Type 12 ( 0.00%) && !trait_item_ref.defaultness.has_value() . { . // No type to visit. . } else { 132 ( 0.00%) reach.ty(); . } . } . } . } . hir::ItemKind::TraitAlias(..) => { . if item_level.is_some() { . self.reach(item.def_id, item_level).generics().predicates(); . } . } . // Visit everything except for private impl items. . hir::ItemKind::Impl(ref impl_) => { 1,194 ( 0.00%) if item_level.is_some() { 10,820 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates().ty().trait_ref(); . 1,082 ( 0.00%) for impl_item_ref in impl_.items { 6,558 ( 0.00%) let impl_item_level = self.get(impl_item_ref.id.def_id); 7,514 ( 0.00%) if impl_item_level.is_some() { 19,400 ( 0.00%) self.reach(impl_item_ref.id.def_id, impl_item_level) . .generics() . .predicates() . .ty(); . } . } . } . } . . // Visit everything, but enum variants have their own levels. . hir::ItemKind::Enum(ref def, _) => { 75 ( 0.00%) if item_level.is_some() { 472 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates(); . } 75 ( 0.00%) for variant in def.variants { 18,630 ( 0.00%) let variant_level = self.get(self.tcx.hir().local_def_id(variant.id)); 2,070 ( 0.00%) if variant_level.is_some() { 3,914 ( 0.00%) for field in variant.data.fields() { 7,245 ( 0.00%) self.reach(self.tcx.hir().local_def_id(field.hir_id), variant_level) . .ty(); . } . // Corner case: if the variant is reachable, but its . // enum is not, make the enum reachable as well. 3,914 ( 0.00%) self.update(item.def_id, variant_level); . } . } . } . // Visit everything, but foreign items have their own levels. . hir::ItemKind::ForeignMod { items, .. } => { . for foreign_item in items { . let foreign_item_level = self.get(foreign_item.id.def_id); . if foreign_item_level.is_some() { -- line 806 ---------------------------------------- -- line 808 ---------------------------------------- . .generics() . .predicates() . .ty(); . } . } . } . // Visit everything except for private fields. . hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => { 255 ( 0.00%) if item_level.is_some() { 1,776 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates(); 444 ( 0.00%) for field in struct_def.fields() { 3,801 ( 0.00%) let def_id = self.tcx.hir().local_def_id(field.hir_id); 1,629 ( 0.00%) let field_level = self.get(def_id); 543 ( 0.00%) if field_level.is_some() { 945 ( 0.00%) self.reach(def_id, field_level).ty(); . } . } . } . } . } . . let orig_level = mem::replace(&mut self.prev_level, item_level); 8,541 ( 0.00%) intravisit::walk_item(self, item); 2,847 ( 0.00%) self.prev_level = orig_level; 22,776 ( 0.00%) } . . fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) { . // Blocks can have public items, for example impls, but they always . // start as completely private regardless of publicity of a function, . // constant, type, field, etc., in which this block resides. . let orig_level = mem::replace(&mut self.prev_level, None); . intravisit::walk_block(self, b); . self.prev_level = orig_level; . } . } . . impl ReachEverythingInTheInterfaceVisitor<'_, '_> { 23,891 ( 0.00%) fn generics(&mut self) -> &mut Self { 13,652 ( 0.00%) for param in &self.ev.tcx.generics_of(self.item_def_id).params { 8,497 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => {} 134 ( 0.00%) GenericParamDefKind::Type { has_default, .. } => { 134 ( 0.00%) if has_default { 12 ( 0.00%) self.visit(self.ev.tcx.type_of(param.def_id)); . } . } . GenericParamDefKind::Const { has_default, .. } => { . self.visit(self.ev.tcx.type_of(param.def_id)); . if has_default { . self.visit(self.ev.tcx.const_param_default(param.def_id)); . } . } . } . } . self 30,717 ( 0.00%) } . 17,065 ( 0.00%) fn predicates(&mut self) -> &mut Self { 10,239 ( 0.00%) self.visit_predicates(self.ev.tcx.predicates_of(self.item_def_id)); . self 20,478 ( 0.00%) } . 16,505 ( 0.00%) fn ty(&mut self) -> &mut Self { 10,708 ( 0.00%) self.visit(self.ev.tcx.type_of(self.item_def_id)); . self 19,806 ( 0.00%) } . . fn trait_ref(&mut self) -> &mut Self { 6,492 ( 0.00%) if let Some(trait_ref) = self.ev.tcx.impl_trait_ref(self.item_def_id) { . self.visit_trait(trait_ref); . } . self . } . } . . impl<'tcx> DefIdVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 52,940 ( 0.00%) self.ev.tcx . } 60,335 ( 0.00%) fn visit_def_id( . &mut self, . def_id: DefId, . _kind: &str, . _descr: &dyn fmt::Display, . ) -> ControlFlow { 36,201 ( 0.00%) if let Some(def_id) = def_id.as_local() { 38,442 ( 0.00%) if let (ty::Visibility::Public, _) | (_, Some(AccessLevel::ReachableFromImplTrait)) = 9,634 ( 0.00%) (self.tcx().visibility(def_id.to_def_id()), self.access_level) . { . self.ev.update(def_id, self.access_level); . } . } . ControlFlow::CONTINUE 84,469 ( 0.00%) } . } . . ////////////////////////////////////////////////////////////////////////////////////// . /// Name privacy visitor, checks privacy and reports violations. . /// Most of name privacy checks are performed during the main resolution phase, . /// or later in type checking when field accesses and associated items are resolved. . /// This pass performs remaining checks for fields in struct expressions and patterns. . ////////////////////////////////////////////////////////////////////////////////////// -- line 909 ---------------------------------------- -- line 920 ---------------------------------------- . /// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies). . #[track_caller] . fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> { . self.maybe_typeck_results . .expect("`NamePrivacyVisitor::typeck_results` called outside of body") . } . . // Checks that a field in a struct constructor (expression or pattern) is accessible. 40,270 ( 0.00%) fn check_field( . &mut self, . use_ctxt: Span, // syntax context of the field name at the use site . span: Span, // span of the field pattern, e.g., `x: 0` . def: &'tcx ty::AdtDef, // definition of the struct or enum . field: &'tcx ty::FieldDef, . in_update_syntax: bool, . ) { 8,054 ( 0.00%) if def.is_enum() { . return; . } . . // definition of the field . let ident = Ident::new(kw::Empty, use_ctxt); 1,400 ( 0.00%) let hir_id = self.tcx.hir().local_def_id_to_hir_id(self.current_item); 4,900 ( 0.00%) let def_id = self.tcx.adjust_ident_and_get_scope(ident, def.did, hir_id).1; 700 ( 0.00%) if !field.vis.is_accessible_from(def_id, self.tcx) { . let label = if in_update_syntax { . format!("field `{}` is private", field.name) . } else { . "private field".to_string() . }; . . struct_span_err!( . self.tcx.sess, -- line 952 ---------------------------------------- -- line 955 ---------------------------------------- . "field `{}` of {} `{}` is private", . field.name, . def.variant_descr(), . self.tcx.def_path_str(def.did) . ) . .span_label(span, label) . .emit(); . } 32,216 ( 0.00%) } . } . . impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 1,749 ( 0.00%) self.tcx.hir() . } . . fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) { . // Don't visit nested modules, since we run a separate visitor walk . // for each module in `privacy_access_levels` . } . . fn visit_nested_body(&mut self, body: hir::BodyId) { . let old_maybe_typeck_results = 4,232 ( 0.00%) self.maybe_typeck_results.replace(self.tcx.typeck_body(body)); 4,950 ( 0.00%) let body = self.tcx.hir().body(body); . self.visit_body(body); 131 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; . } . . fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 949 ( 0.00%) let orig_current_item = mem::replace(&mut self.current_item, item.def_id); 2,815 ( 0.00%) intravisit::walk_item(self, item); 949 ( 0.00%) self.current_item = orig_current_item; . } . 318,096 ( 0.01%) fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { 70,688 ( 0.00%) if let hir::ExprKind::Struct(qpath, fields, ref base) = expr.kind { 3,282 ( 0.00%) let res = self.typeck_results().qpath_res(qpath, expr.hir_id); 1,641 ( 0.00%) let adt = self.typeck_results().expr_ty(expr).ty_adt_def().unwrap(); 4,923 ( 0.00%) let variant = adt.variant_of_res(res); 2,188 ( 0.00%) if let Some(base) = *base { . // If the expression uses FRU we need to make sure all the unmentioned fields . // are checked for privacy (RFC 736). Rather than computing the set of . // unmentioned fields, just check them all. . for (vf_index, variant_field) in variant.fields.iter().enumerate() { . let field = fields.iter().find(|f| { . self.tcx.field_index(f.hir_id, self.typeck_results()) == vf_index . }); . let (use_ctxt, span) = match field { . Some(field) => (field.ident.span, field.span), . None => (base.span, base.span), . }; . self.check_field(use_ctxt, span, adt, variant_field, true); . } . } else { . for field in fields { 890 ( 0.00%) let use_ctxt = field.ident.span; 7,180 ( 0.00%) let index = self.tcx.field_index(field.hir_id, self.typeck_results()); 9,790 ( 0.00%) self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false); . } . } . } . 353,440 ( 0.01%) intravisit::walk_expr(self, expr); . } . 108,200 ( 0.00%) fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) { 29,985 ( 0.00%) if let PatKind::Struct(ref qpath, fields, _) = pat.kind { 20,545 ( 0.00%) let res = self.typeck_results().qpath_res(qpath, pat.hir_id); 11,740 ( 0.00%) let adt = self.typeck_results().pat_ty(pat).ty_adt_def().unwrap(); 23,480 ( 0.00%) let variant = adt.variant_of_res(res); . for field in fields { 3,137 ( 0.00%) let use_ctxt = field.ident.span; 24,322 ( 0.00%) let index = self.tcx.field_index(field.hir_id, self.typeck_results()); 31,370 ( 0.00%) self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false); . } . } . 121,725 ( 0.00%) intravisit::walk_pat(self, pat); . } . } . . //////////////////////////////////////////////////////////////////////////////////////////// . /// Type privacy visitor, checks types for privacy and reports violations. . /// Both explicitly written types and inferred types of expressions and patterns are checked. . /// Checks are performed on "semantic" types regardless of names and their hygiene. . //////////////////////////////////////////////////////////////////////////////////////////// -- line 1045 ---------------------------------------- -- line 1061 ---------------------------------------- . .expect("`TypePrivacyVisitor::typeck_results` called outside of body") . } . . fn item_is_accessible(&self, did: DefId) -> bool { . self.tcx.visibility(did).is_accessible_from(self.current_item.to_def_id(), self.tcx) . } . . // Take node-id of an expression or pattern and check its type for privacy. 360,682 ( 0.01%) fn check_expr_pat_type(&mut self, id: hir::HirId, span: Span) -> bool { 51,526 ( 0.00%) self.span = span; 51,526 ( 0.00%) let typeck_results = self.typeck_results(); . let result: ControlFlow<()> = try { 206,104 ( 0.00%) self.visit(typeck_results.node_type(id))?; 412,208 ( 0.01%) self.visit(typeck_results.node_substs(id))?; 463,734 ( 0.01%) if let Some(adjustments) = typeck_results.adjustments().get(id) { . adjustments.iter().try_for_each(|adjustment| self.visit(adjustment.target))?; . } . }; . result.is_break() 412,208 ( 0.01%) } . . fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool { 236,572 ( 0.00%) let is_error = !self.item_is_accessible(def_id); . if is_error { . self.tcx . .sess . .struct_span_err(self.span, &format!("{} `{}` is private", kind, descr)) . .span_label(self.span, &format!("private {}", kind)) . .emit(); . } . is_error -- line 1091 ---------------------------------------- -- line 1093 ---------------------------------------- . } . . impl<'tcx> Visitor<'tcx> for TypePrivacyVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 1,749 ( 0.00%) self.tcx.hir() . } . . fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) { . // Don't visit nested modules, since we run a separate visitor walk . // for each module in `privacy_access_levels` . } . 8,250 ( 0.00%) fn visit_nested_body(&mut self, body: hir::BodyId) { . let old_maybe_typeck_results = 2,475 ( 0.00%) self.maybe_typeck_results.replace(self.tcx.typeck_body(body)); 4,950 ( 0.00%) let body = self.tcx.hir().body(body); . self.visit_body(body); 825 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; 6,600 ( 0.00%) } . . fn visit_generic_arg(&mut self, generic_arg: &'tcx hir::GenericArg<'tcx>) { 20,644 ( 0.00%) match generic_arg { 2,492 ( 0.00%) hir::GenericArg::Type(t) => self.visit_ty(t), . hir::GenericArg::Infer(inf) => self.visit_infer(inf), . hir::GenericArg::Lifetime(_) | hir::GenericArg::Const(_) => {} . } . } . 46,774 ( 0.00%) fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) { 13,364 ( 0.00%) self.span = hir_ty.span; 20,046 ( 0.00%) if let Some(typeck_results) = self.maybe_typeck_results { . // Types in bodies. 6,393 ( 0.00%) if self.visit(typeck_results.node_type(hir_ty.hir_id)).is_break() { . return; . } . } else { . // Types in signatures. . // FIXME: This is very ineffective. Ideally each HIR type should be converted . // into a semantic type only once and the result should be cached somehow. 13,653 ( 0.00%) if self.visit(rustc_typeck::hir_ty_to_ty(self.tcx, hir_ty)).is_break() { . return; . } . } . 20,046 ( 0.00%) intravisit::walk_ty(self, hir_ty); 40,092 ( 0.00%) } . . fn visit_infer(&mut self, inf: &'tcx hir::InferArg) { . self.span = inf.span; . if let Some(typeck_results) = self.maybe_typeck_results { . if let Some(ty) = typeck_results.node_type_opt(inf.hir_id) { . if self.visit(ty).is_break() { . return; . } -- line 1150 ---------------------------------------- -- line 1152 ---------------------------------------- . // We don't do anything for const infers here. . } . } else { . bug!("visit_infer without typeck_results"); . } . intravisit::walk_inf(self, inf); . } . 3,344 ( 0.00%) fn visit_trait_ref(&mut self, trait_ref: &'tcx hir::TraitRef<'tcx>) { 1,254 ( 0.00%) self.span = trait_ref.path.span; 418 ( 0.00%) if self.maybe_typeck_results.is_none() { . // Avoid calling `hir_trait_to_predicates` in bodies, it will ICE. . // The traits' privacy in bodies is already checked as a part of trait object types. 418 ( 0.00%) let bounds = rustc_typeck::hir_trait_to_predicates( 418 ( 0.00%) self.tcx, . trait_ref, . // NOTE: This isn't really right, but the actual type doesn't matter here. It's . // just required by `ty::TraitRef`. 1,254 ( 0.00%) self.tcx.types.never, . ); . 3,762 ( 0.00%) for (trait_predicate, _, _) in bounds.trait_bounds { 2,926 ( 0.00%) if self.visit_trait(trait_predicate.skip_binder()).is_break() { . return; . } . } . 2,578 ( 0.00%) for (poly_predicate, _) in bounds.projection_bounds { 60 ( 0.00%) let pred = poly_predicate.skip_binder(); . let poly_pred_term = self.visit(pred.term); 40 ( 0.00%) if poly_pred_term.is_break() . || self.visit_projection_ty(pred.projection_ty).is_break() . { . return; . } . } . } . . intravisit::walk_trait_ref(self, trait_ref); 3,344 ( 0.00%) } . . // Check types of expressions 318,096 ( 0.01%) fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { 318,096 ( 0.01%) if self.check_expr_pat_type(expr.hir_id, expr.span) { . // Do not check nested expressions if the error already happened. . return; . } 236,886 ( 0.00%) match expr.kind { 1,567 ( 0.00%) hir::ExprKind::Assign(_, rhs, _) | hir::ExprKind::Match(rhs, ..) => { . // Do not report duplicate errors for `x = y` and `match x { ... }`. 10,577 ( 0.00%) if self.check_expr_pat_type(rhs.hir_id, rhs.span) { . return; . } . } 1,903 ( 0.00%) hir::ExprKind::MethodCall(segment, ..) => { . // Method calls have to be checked specially. 3,806 ( 0.00%) self.span = segment.ident.span; 11,418 ( 0.00%) if let Some(def_id) = self.typeck_results().type_dependent_def_id(expr.hir_id) { 5,709 ( 0.00%) if self.visit(self.tcx.type_of(def_id)).is_break() { . return; . } . } else { . self.tcx . .sess . .delay_span_bug(expr.span, "no type-dependent def for method call"); . } . } . _ => {} . } . 106,032 ( 0.00%) intravisit::walk_expr(self, expr); 282,752 ( 0.00%) } . . // Prohibit access to associated items with insufficient nominal visibility. . // . // Additionally, until better reachability analysis for macros 2.0 is available, . // we prohibit access to private statics from other crates, this allows to give . // more code internal visibility at link time. (Access to private functions . // is already prohibited by type privacy for function types.) 253,090 ( 0.00%) fn visit_qpath(&mut self, qpath: &'tcx hir::QPath<'tcx>, id: hir::HirId, span: Span) { 50,618 ( 0.00%) let def = match qpath { 56,889 ( 0.00%) hir::QPath::Resolved(_, path) => match path.res { 53,370 ( 0.00%) Res::Def(kind, def_id) => Some((kind, def_id)), . _ => None, . }, 6,346 ( 0.00%) hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self . .maybe_typeck_results 56,349 ( 0.00%) .and_then(|typeck_results| typeck_results.type_dependent_def(id)), . }; . let def = def.filter(|(kind, _)| { 30,312 ( 0.00%) matches!( . kind, . DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Static . ) . }); . if let Some((kind, def_id)) = def { . let is_local_static = 17,328 ( 0.00%) if let DefKind::Static = kind { def_id.is_local() } else { false }; 9,224 ( 0.00%) if !self.item_is_accessible(def_id) && !is_local_static { . let sess = self.tcx.sess; . let sm = sess.source_map(); . let name = match qpath { . hir::QPath::Resolved(..) | hir::QPath::LangItem(..) => { . sm.span_to_snippet(qpath.span()).ok() . } . hir::QPath::TypeRelative(_, segment) => Some(segment.ident.to_string()), . }; -- line 1258 ---------------------------------------- -- line 1263 ---------------------------------------- . }; . sess.struct_span_err(span, &msg) . .span_label(span, &format!("private {}", kind)) . .emit(); . return; . } . } . 278,399 ( 0.00%) intravisit::walk_qpath(self, qpath, id, span); . } . . // Check types of patterns. . fn visit_pat(&mut self, pattern: &'tcx hir::Pat<'tcx>) { 81,452 ( 0.00%) if self.check_expr_pat_type(pattern.hir_id, pattern.span) { . // Do not check nested patterns if the error already happened. . return; . } . 43,219 ( 0.00%) intravisit::walk_pat(self, pattern); . } . 6,345 ( 0.00%) fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) { 3,807 ( 0.00%) if let Some(init) = local.init { 8,022 ( 0.00%) if self.check_expr_pat_type(init.hir_id, init.span) { . // Do not report duplicate errors for `let x = y`. . return; . } . } . . intravisit::walk_local(self, local); 4,572 ( 0.00%) } . . // Check types in item interfaces. . fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 949 ( 0.00%) let orig_current_item = mem::replace(&mut self.current_item, item.def_id); . let old_maybe_typeck_results = self.maybe_typeck_results.take(); 2,815 ( 0.00%) intravisit::walk_item(self, item); 949 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; 959 ( 0.00%) self.current_item = orig_current_item; . } . } . . impl<'tcx> DefIdVisitor<'tcx> for TypePrivacyVisitor<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 415,270 ( 0.01%) self.tcx . } 1,774,290 ( 0.03%) fn visit_def_id( . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow { . if self.check_def_id(def_id, kind, descr) { . ControlFlow::BREAK . } else { . ControlFlow::CONTINUE . } 1,064,574 ( 0.02%) } . } . . /////////////////////////////////////////////////////////////////////////////// . /// Obsolete visitors for checking for private items in public interfaces. . /// These visitors are supposed to be kept in frozen state and produce an . /// "old error node set". For backward compatibility the new visitor reports . /// warnings instead of hard errors when the erroneous node is not in this old set. . /////////////////////////////////////////////////////////////////////////////// -- line 1328 ---------------------------------------- -- line 1342 ---------------------------------------- . /// Whether we've recurred at all (i.e., if we're pointing at the . /// first type on which `visit_ty` was called). . at_outer_type: bool, . /// Whether that first type is a public path. . outer_type_is_public_path: bool, . } . . impl<'a, 'tcx> ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { 1,453 ( 0.00%) fn path_is_private_type(&self, path: &hir::Path<'_>) -> bool { 4,368 ( 0.00%) let did = match path.res { . Res::PrimTy(..) | Res::SelfTy(..) | Res::Err => return false, 5,838 ( 0.00%) res => res.def_id(), . }; . . // A path can only be private if: . // it's in this crate... 973 ( 0.00%) if let Some(did) = did.as_local() { . // .. and it corresponds to a private type in the AST (this returns . // `None` for type parameters). 4,818 ( 0.00%) match self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(did)) { 3,689 ( 0.00%) Some(Node::Item(item)) => !item.vis.node.is_pub(), . Some(_) | None => false, . } . } else { . false . } 2,906 ( 0.00%) } . . fn trait_is_public(&self, trait_id: LocalDefId) -> bool { . // FIXME: this would preferably be using `exported_items`, but all . // traits are exported currently (see `EmbargoVisitor.exported_trait`). . self.access_levels.is_public(trait_id) . } . . fn check_generic_bound(&mut self, bound: &hir::GenericBound<'_>) { 14 ( 0.00%) if let hir::GenericBound::Trait(ref trait_ref, _) = *bound { 9 ( 0.00%) if self.path_is_private_type(trait_ref.trait_ref.path) { . self.old_error_set.insert(trait_ref.trait_ref.hir_ref_id); . } . } . } . . fn item_is_public(&self, def_id: LocalDefId, vis: &hir::Visibility<'_>) -> bool { 886 ( 0.00%) self.access_levels.is_reachable(def_id) || vis.node.is_pub() . } . } . . impl<'a, 'b, 'tcx, 'v> Visitor<'v> for ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> { . fn visit_generic_arg(&mut self, generic_arg: &'v hir::GenericArg<'v>) { 1,334 ( 0.00%) match generic_arg { . hir::GenericArg::Type(t) => self.visit_ty(t), . hir::GenericArg::Infer(inf) => self.visit_ty(&inf.to_ty()), . hir::GenericArg::Lifetime(_) | hir::GenericArg::Const(_) => {} . } . } . . fn visit_ty(&mut self, ty: &hir::Ty<'_>) { 2,073 ( 0.00%) if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind { 1,652 ( 0.00%) if self.inner.path_is_private_type(path) { . self.contains_private = true; . // Found what we're looking for, so let's stop working. . return; . } . } . if let hir::TyKind::Path(_) = ty.kind { 34 ( 0.00%) if self.at_outer_type { 360 ( 0.00%) self.outer_type_is_public_path = true; . } . } 741 ( 0.00%) self.at_outer_type = false; 758 ( 0.00%) intravisit::walk_ty(self, ty) 1,448 ( 0.00%) } . . // Don't want to recurse into `[, .. expr]`. . fn visit_expr(&mut self, _: &hir::Expr<'_>) {} . } . . impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 1,172 ( 0.00%) self.tcx.hir() . } . 8,469 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 3,764 ( 0.00%) match item.kind { . // Contents of a private mod can be re-exported, so we need . // to check internals. . hir::ItemKind::Mod(_) => {} . . // An `extern {}` doesn't introduce a new privacy . // namespace (the contents have their own privacies). . hir::ItemKind::ForeignMod { .. } => {} . 10 ( 0.00%) hir::ItemKind::Trait(.., bounds, _) => { 25 ( 0.00%) if !self.trait_is_public(item.def_id) { . return; . } . . for bound in bounds.iter() { . self.check_generic_bound(bound) . } . } . -- line 1447 ---------------------------------------- -- line 1454 ---------------------------------------- . // `impl [... for] Private` is never visible. . let self_contains_private; . // `impl [... for] Public<...>`, but not `impl [... for] . // Vec` or `(Public,)`, etc. . let self_is_public_path; . . // Check the properties of the `Self` type: . { 1,592 ( 0.00%) let mut visitor = ObsoleteCheckTypeForPrivatenessVisitor { . inner: self, . contains_private: false, . at_outer_type: true, . outer_type_is_public_path: false, . }; 398 ( 0.00%) visitor.visit_ty(impl_.self_ty); . self_contains_private = visitor.contains_private; . self_is_public_path = visitor.outer_type_is_public_path; . } . . // Miscellaneous info about the impl: . . // `true` iff this is `impl Private for ...`. . let not_private_trait = impl_.of_trait.as_ref().map_or( . true, // no trait counts as public trait . |tr| { 1,026 ( 0.00%) if let Some(def_id) = tr.path.res.def_id().as_local() { 135 ( 0.00%) self.trait_is_public(def_id) . } else { . true // external traits must be public . } . }, . ); . . // `true` iff this is a trait impl or at least one method is public. . // . // `impl Public { $( fn ...() {} )* }` is not visible. . // . // This is required over just using the methods' privacy . // directly because we might have `impl> ...`, . // and we shouldn't warn about the generics if all the methods . // are private (because `T` won't be visible externally). . let trait_or_some_public_method = impl_.of_trait.is_some() 56 ( 0.00%) || impl_.items.iter().any(|impl_item_ref| { 388 ( 0.00%) let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); 474 ( 0.00%) match impl_item.kind { . hir::ImplItemKind::Const(..) | hir::ImplItemKind::Fn(..) => { . self.access_levels.is_reachable(impl_item_ref.id.def_id) . } . hir::ImplItemKind::TyAlias(_) => false, . } . }); . 780 ( 0.00%) if !self_contains_private && not_private_trait && trait_or_some_public_method { . intravisit::walk_generics(self, &impl_.generics); . 632 ( 0.00%) match impl_.of_trait { . None => { 48 ( 0.00%) for impl_item_ref in impl_.items { . // This is where we choose whether to walk down . // further into the impl to check its items. We . // should only walk into public items so that we . // don't erroneously report errors for private . // types in private items. 1,716 ( 0.00%) let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); 1,670 ( 0.00%) match impl_item.kind { . hir::ImplItemKind::Const(..) | hir::ImplItemKind::Fn(..) 815 ( 0.00%) if self 286 ( 0.00%) .item_is_public(impl_item.def_id, &impl_item.vis) => . { . intravisit::walk_impl_item(self, impl_item) . } . hir::ImplItemKind::TyAlias(..) => { . intravisit::walk_impl_item(self, impl_item) . } . _ => {} . } -- line 1529 ---------------------------------------- -- line 1538 ---------------------------------------- . // . // Those in 1. can only occur if the trait is in . // this crate and will've been warned about on the . // trait definition (there's no need to warn twice . // so we don't check the methods). . // . // Those in 2. are warned via walk_generics and this . // call here. 804 ( 0.00%) intravisit::walk_path(self, tr.path); . . // Those in 3. are warned with this call. 268 ( 0.00%) for impl_item_ref in impl_.items { 1,385 ( 0.00%) let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); 2,038 ( 0.00%) if let hir::ImplItemKind::TyAlias(ty) = impl_item.kind { . self.visit_ty(ty); . } . } . } . } 246 ( 0.00%) } else if impl_.of_trait.is_none() && self_is_public_path { . // `impl Public { ... }`. Any public static . // methods will be visible as `Public::foo`. . let mut found_pub_static = false; 2 ( 0.00%) for impl_item_ref in impl_.items { 16 ( 0.00%) if self.access_levels.is_reachable(impl_item_ref.id.def_id) 8 ( 0.00%) || self.tcx.visibility(impl_item_ref.id.def_id) . == ty::Visibility::Public . { 10 ( 0.00%) let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); 26 ( 0.00%) match impl_item_ref.kind { . AssocItemKind::Const => { . found_pub_static = true; . intravisit::walk_impl_item(self, impl_item); . } . AssocItemKind::Fn { has_self: false } => { . found_pub_static = true; . intravisit::walk_impl_item(self, impl_item); . } . _ => {} . } . } . } 4 ( 0.00%) if found_pub_static { . intravisit::walk_generics(self, &impl_.generics) . } . } . return; . } . . // `type ... = ...;` can contain private types, because . // we're introducing a new name. . hir::ItemKind::TyAlias(..) => return, . . // Not at all public, so we don't care. 3,135 ( 0.00%) _ if !self.item_is_public(item.def_id, &item.vis) => { . return; . } . . _ => {} . } . . // We've carefully constructed it so that if we're here, then . // any `visit_ty`'s will be called on things that are in . // public signatures, i.e., things that we're interested in for . // this visitor. 1,780 ( 0.00%) intravisit::walk_item(self, item); 6,104 ( 0.00%) } . 2,400 ( 0.00%) fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) { . for param in generics.params { 104 ( 0.00%) for bound in param.bounds { . self.check_generic_bound(bound); . } . } 300 ( 0.00%) for predicate in generics.where_clause.predicates { 141 ( 0.00%) match predicate { . hir::WherePredicate::BoundPredicate(bound_pred) => { 2 ( 0.00%) for bound in bound_pred.bounds.iter() { . self.check_generic_bound(bound) . } . } . hir::WherePredicate::RegionPredicate(_) => {} . hir::WherePredicate::EqPredicate(eq_pred) => { . self.visit_ty(eq_pred.rhs_ty); . } . } . } 2,400 ( 0.00%) } . . fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) { . if self.access_levels.is_reachable(item.def_id) { . intravisit::walk_foreign_item(self, item) . } . } . . fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx>) { 4,524 ( 0.00%) if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = t.kind { 3,144 ( 0.00%) if self.path_is_private_type(path) { . self.old_error_set.insert(t.hir_id); . } . } 6,875 ( 0.00%) intravisit::walk_ty(self, t) . } . 5,208 ( 0.00%) fn visit_variant( . &mut self, . v: &'tcx hir::Variant<'tcx>, . g: &'tcx hir::Generics<'tcx>, . item_id: hir::HirId, . ) { 6,510 ( 0.00%) if self.access_levels.is_reachable(self.tcx.hir().local_def_id(v.id)) { 651 ( 0.00%) self.in_variant = true; . intravisit::walk_variant(self, v, g, item_id); 651 ( 0.00%) self.in_variant = false; . } 4,557 ( 0.00%) } . 2,250 ( 0.00%) fn visit_field_def(&mut self, s: &'tcx hir::FieldDef<'tcx>) { 2,574 ( 0.00%) if s.vis.node.is_pub() || self.in_variant { . intravisit::walk_field_def(self, s); . } 480 ( 0.00%) } . . // We don't need to introspect into these at all: an . // expression/block context can't possibly contain exported things. . // (Making them no-ops stops us from traversing the whole AST without . // having to be super careful about our `walk_...` calls above.) . fn visit_block(&mut self, _: &'tcx hir::Block<'tcx>) {} . fn visit_expr(&mut self, _: &'tcx hir::Expr<'tcx>) {} . } -- line 1667 ---------------------------------------- -- line 1679 ---------------------------------------- . /// The visitor checks that each component type is at least this visible. . required_visibility: ty::Visibility, . has_pub_restricted: bool, . has_old_errors: bool, . in_assoc_ty: bool, . } . . impl SearchInterfaceForPrivateItemsVisitor<'_> { 7,378 ( 0.00%) fn generics(&mut self) -> &mut Self { 3,162 ( 0.00%) for param in &self.tcx.generics_of(self.item_def_id).params { 1,270 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => {} 59 ( 0.00%) GenericParamDefKind::Type { has_default, .. } => { 59 ( 0.00%) if has_default { 3 ( 0.00%) self.visit(self.tcx.type_of(param.def_id)); . } . } . // FIXME(generic_const_exprs): May want to look inside const here . GenericParamDefKind::Const { .. } => { . self.visit(self.tcx.type_of(param.def_id)); . } . } . } . self 8,432 ( 0.00%) } . 5,245 ( 0.00%) fn predicates(&mut self) -> &mut Self { . // N.B., we use `explicit_predicates_of` and not `predicates_of` . // because we don't want to report privacy errors due to where . // clauses that the compiler inferred. We only want to . // consider the ones that the user wrote. This is important . // for the inferred outlives rules; see . // `src/test/ui/rfc-2093-infer-outlives/privacy.rs`. 2,098 ( 0.00%) self.visit_predicates(self.tcx.explicit_predicates_of(self.item_def_id)); . self 6,294 ( 0.00%) } . 35 ( 0.00%) fn bounds(&mut self) -> &mut Self { . self.visit_predicates(ty::GenericPredicates { . parent: None, 14 ( 0.00%) predicates: self.tcx.explicit_item_bounds(self.item_def_id), . }); . self 35 ( 0.00%) } . 6,945 ( 0.00%) fn ty(&mut self) -> &mut Self { 2,778 ( 0.00%) self.visit(self.tcx.type_of(self.item_def_id)); . self 6,945 ( 0.00%) } . . fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool { 19,387 ( 0.00%) if self.leaks_private_dep(def_id) { . self.tcx.struct_span_lint_hir( . lint::builtin::EXPORTED_PRIVATE_DEPENDENCIES, . self.tcx.hir().local_def_id_to_hir_id(self.item_def_id), . self.tcx.def_span(self.item_def_id.to_def_id()), . |lint| { . lint.build(&format!( . "{} `{}` from private dependency '{}' in public \ . interface", -- line 1738 ---------------------------------------- -- line 1740 ---------------------------------------- . descr, . self.tcx.crate_name(def_id.krate) . )) . .emit() . }, . ); . } . 10,715 ( 0.00%) let hir_id = match def_id.as_local() { 3,304 ( 0.00%) Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id), . None => return false, . }; . . let vis = self.tcx.visibility(def_id); 5,762 ( 0.00%) if !vis.is_at_least(self.required_visibility, self.tcx) { . let vis_descr = match vis { . ty::Visibility::Public => "public", . ty::Visibility::Invisible => "private", . ty::Visibility::Restricted(vis_def_id) => { . if vis_def_id == self.tcx.parent_module(hir_id).to_def_id() { . "private" . } else if vis_def_id.is_top_level_module() { . "crate-private" -- line 1762 ---------------------------------------- -- line 1792 ---------------------------------------- . false . } . . /// An item is 'leaked' from a private dependency if all . /// of the following are true: . /// 1. It's contained within a public type . /// 2. It comes from a private crate . fn leaks_private_dep(&self, item_id: DefId) -> bool { 12,321 ( 0.00%) let ret = self.required_visibility.is_public() && self.tcx.is_private_dep(item_id.krate); . . tracing::debug!("leaks_private_dep(item_id={:?})={}", item_id, ret); . ret . } . } . . impl<'tcx> DefIdVisitor<'tcx> for SearchInterfaceForPrivateItemsVisitor<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 6,825 ( 0.00%) self.tcx . } 65,712 ( 0.00%) fn visit_def_id( . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow { . if self.check_def_id(def_id, kind, descr) { . ControlFlow::BREAK . } else { . ControlFlow::CONTINUE . } 36,963 ( 0.00%) } . } . . struct PrivateItemsInPublicInterfacesVisitor<'tcx> { . tcx: TyCtxt<'tcx>, . has_pub_restricted: bool, . old_error_set_ancestry: LocalDefIdSet, . } . . impl<'tcx> PrivateItemsInPublicInterfacesVisitor<'tcx> { . fn check( . &self, . def_id: LocalDefId, . required_visibility: ty::Visibility, . ) -> SearchInterfaceForPrivateItemsVisitor<'tcx> { 12,803 ( 0.00%) SearchInterfaceForPrivateItemsVisitor { 1,347 ( 0.00%) tcx: self.tcx, . item_def_id: def_id, . required_visibility, 2,084 ( 0.00%) has_pub_restricted: self.has_pub_restricted, . has_old_errors: self.old_error_set_ancestry.contains(&def_id), . in_assoc_ty: false, . } . } . 10,816 ( 0.00%) fn check_assoc_item( . &self, . def_id: LocalDefId, . assoc_item_kind: AssocItemKind, . defaultness: hir::Defaultness, . vis: ty::Visibility, . ) { . let mut check = self.check(def_id, vis); . 1,664 ( 0.00%) let (check_ty, is_assoc_ty) = match assoc_item_kind { . AssocItemKind::Const | AssocItemKind::Fn { .. } => (true, false), 168 ( 0.00%) AssocItemKind::Type => (defaultness.has_value(), true), . }; 1,664 ( 0.00%) check.in_assoc_ty = is_assoc_ty; 3,328 ( 0.00%) check.generics().predicates(); 170 ( 0.00%) if check_ty { 830 ( 0.00%) check.ty(); . } 6,656 ( 0.00%) } . } . . impl<'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'tcx> { . type NestedFilter = nested_filter::OnlyBodies; . . fn nested_visit_map(&mut self) -> Self::Map { 776 ( 0.00%) self.tcx.hir() . } . 8,541 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 949 ( 0.00%) let tcx = self.tcx; 949 ( 0.00%) let item_visibility = tcx.visibility(item.def_id); . 4,745 ( 0.00%) match item.kind { . // Crates are always public. . hir::ItemKind::ExternCrate(..) => {} . // All nested items are checked by `visit_item`. . hir::ItemKind::Mod(..) => {} . // Checked in resolve. . hir::ItemKind::Use(..) => {} . // No subitems. . hir::ItemKind::Macro(..) | hir::ItemKind::GlobalAsm(..) => {} . // Subitems of these items have inherited publicity. . hir::ItemKind::Const(..) . | hir::ItemKind::Static(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::TyAlias(..) => { 276 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates().ty(); . } . hir::ItemKind::OpaqueTy(..) => { . // `ty()` for opaque types is the underlying type, . // it's not a part of interface, so we skip it. 25 ( 0.00%) self.check(item.def_id, item_visibility).generics().bounds(); . } 20 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { 20 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates(); . . for trait_item_ref in trait_item_refs { 144 ( 0.00%) self.check_assoc_item( 24 ( 0.00%) trait_item_ref.id.def_id, . trait_item_ref.kind, . trait_item_ref.defaultness, . item_visibility, . ); . 68 ( 0.00%) if let AssocItemKind::Type = trait_item_ref.kind { 12 ( 0.00%) self.check(trait_item_ref.id.def_id, item_visibility).bounds(); . } . } . } . hir::ItemKind::TraitAlias(..) => { . self.check(item.def_id, item_visibility).generics().predicates(); . } . hir::ItemKind::Enum(ref def, _) => { 100 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates(); . 25 ( 0.00%) for variant in def.variants { 690 ( 0.00%) for field in variant.data.fields() { 3,396 ( 0.00%) self.check(self.tcx.hir().local_def_id(field.hir_id), item_visibility).ty(); . } . } . } . // Subitems of foreign modules have their own publicity. . hir::ItemKind::ForeignMod { items, .. } => { . for foreign_item in items { . let vis = tcx.visibility(foreign_item.id.def_id); . self.check(foreign_item.id.def_id, vis).generics().predicates().ty(); . } . } . // Subitems of structs and unions have their own publicity. . hir::ItemKind::Struct(ref struct_def, _) | hir::ItemKind::Union(ref struct_def, _) => { 340 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates(); . 255 ( 0.00%) for field in struct_def.fields() { 1,380 ( 0.00%) let def_id = tcx.hir().local_def_id(field.hir_id); . let field_visibility = tcx.visibility(def_id); 690 ( 0.00%) self.check(def_id, min(item_visibility, field_visibility, tcx)).ty(); . } . } . // An inherent impl is public when its type is public . // Subitems of inherent impls have their own publicity. . // A trait impl is public when both its type and its trait are public . // Subitems of trait impls have inherited publicity. . hir::ItemKind::Impl(ref impl_) => { 1,592 ( 0.00%) let impl_vis = ty::Visibility::of_impl(item.def_id, tcx, &Default::default()); . // check that private components do not appear in the generics or predicates of inherent impls . // this check is intentionally NOT performed for impls of traits, per #90586 796 ( 0.00%) if impl_.of_trait.is_none() { 280 ( 0.00%) self.check(item.def_id, impl_vis).generics().predicates(); . } 398 ( 0.00%) for impl_item_ref in impl_.items { 600 ( 0.00%) let impl_item_vis = if impl_.of_trait.is_none() { 329 ( 0.00%) min(tcx.visibility(impl_item_ref.id.def_id), impl_vis, tcx) . } else { . impl_vis . }; 4,519 ( 0.00%) self.check_assoc_item( 808 ( 0.00%) impl_item_ref.id.def_id, . impl_item_ref.kind, . impl_item_ref.defaultness, . impl_item_vis, . ); . } . } . } 7,592 ( 0.00%) } . } . . pub fn provide(providers: &mut Providers) { 8 ( 0.00%) *providers = Providers { . visibility, . privacy_access_levels, . check_private_in_public, . check_mod_privacy, . ..*providers . }; 1 ( 0.00%) } . 31,842 ( 0.00%) fn visibility(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Visibility { 3,538 ( 0.00%) let def_id = def_id.expect_local(); . match tcx.resolutions(()).visibilities.get(&def_id) { 5,508 ( 0.00%) Some(vis) => *vis, . None => { . let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); 8,800 ( 0.00%) match tcx.hir().get(hir_id) { . // Unique types created for closures participate in type privacy checking. . // They have visibilities inherited from the module they are defined in. . Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(..), .. }) => { 270 ( 0.00%) ty::Visibility::Restricted(tcx.parent_module(hir_id).to_def_id()) . } . // - AST lowering may clone `use` items and the clones don't . // get their entries in the resolver's visibility table. . // - AST lowering also creates opaque type items with inherited visibilies. . // Visibility on them should have no effect, but to avoid the visibility . // query failing on some items, we provide it for opaque types as well. . Node::Item(hir::Item { 260 ( 0.00%) vis, . kind: hir::ItemKind::Use(..) | hir::ItemKind::OpaqueTy(..), . .. 1,560 ( 0.00%) }) => ty::Visibility::from_hir(vis, hir_id, tcx), . // Visibilities of trait impl items are inherited from their traits . // and are not filled in resolve. . Node::ImplItem(impl_item) => { 6,227 ( 0.00%) match tcx.hir().get_by_def_id(tcx.hir().get_parent_item(hir_id)) { . Node::Item(hir::Item { . kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(tr), .. }), . .. 479 ( 0.00%) }) => tr.path.res.opt_def_id().map_or_else( . || { . tcx.sess.delay_span_bug(tr.path.span, "trait without a def-id"); . ty::Visibility::Public . }, . |def_id| tcx.visibility(def_id), . ), . _ => span_bug!(impl_item.span, "the parent is not a trait impl"), . } -- line 2021 ---------------------------------------- -- line 2023 ---------------------------------------- . _ => span_bug!( . tcx.def_span(def_id), . "visibility table unexpectedly missing a def-id: {:?}", . def_id, . ), . } . } . } 28,304 ( 0.00%) } . 256 ( 0.00%) fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalDefId) { . // Check privacy of names not checked in previous compilation stages. . let mut visitor = 128 ( 0.00%) NamePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: module_def_id }; 288 ( 0.00%) let (module, span, hir_id) = tcx.hir().get_module(module_def_id); . . intravisit::walk_mod(&mut visitor, module, hir_id); . . // Check privacy of explicitly written types and traits as well as . // inferred types of expressions and patterns. . let mut visitor = 192 ( 0.00%) TypePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: module_def_id, span }; . intravisit::walk_mod(&mut visitor, module, hir_id); 256 ( 0.00%) } . 7 ( 0.00%) fn privacy_access_levels(tcx: TyCtxt<'_>, (): ()) -> &AccessLevels { . // Build up a set of all exported items in the AST. This is a set of all . // items which are reachable from external crates based on visibility. 9 ( 0.00%) let mut visitor = EmbargoVisitor { . tcx, . access_levels: tcx.resolutions(()).access_levels.clone(), . macro_reachable: Default::default(), . prev_level: Some(AccessLevel::Public), . changed: false, . }; . . loop { . tcx.hir().walk_toplevel_module(&mut visitor); 6 ( 0.00%) if visitor.changed { 6 ( 0.00%) visitor.changed = false; . } else { . break; . } . } . 1 ( 0.00%) tcx.arena.alloc(visitor.access_levels) 9 ( 0.00%) } . 8 ( 0.00%) fn check_private_in_public(tcx: TyCtxt<'_>, (): ()) { . let access_levels = tcx.privacy_access_levels(()); . 9 ( 0.00%) let mut visitor = ObsoleteVisiblePrivateTypesVisitor { . tcx, . access_levels, . in_variant: false, . old_error_set: Default::default(), . }; . tcx.hir().walk_toplevel_module(&mut visitor); . . let has_pub_restricted = { 5 ( 0.00%) let mut pub_restricted_visitor = PubRestrictedVisitor { tcx, has_pub_restricted: false }; . tcx.hir().walk_toplevel_module(&mut pub_restricted_visitor); 1 ( 0.00%) pub_restricted_visitor.has_pub_restricted . }; . . let mut old_error_set_ancestry = HirIdSet::default(); 9 ( 0.00%) for mut id in visitor.old_error_set.iter().copied() { . loop { . if !old_error_set_ancestry.insert(id) { . break; . } . let parent = tcx.hir().get_parent_node(id); . if parent == id { . break; . } . id = parent; . } . } . . // Check for private types and traits in public interfaces. 8 ( 0.00%) let mut visitor = PrivateItemsInPublicInterfacesVisitor { . tcx, . has_pub_restricted, . // Only definition IDs are ever searched in `old_error_set_ancestry`, . // so we can filter away all non-definition IDs at this point. . old_error_set_ancestry: old_error_set_ancestry . .into_iter() . .filter_map(|hir_id| tcx.hir().opt_local_def_id(hir_id)) . .collect(), . }; 6 ( 0.00%) tcx.hir().visit_all_item_likes(&mut DeepVisitor::new(&mut visitor)); 8 ( 0.00%) } 1,199,622 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/region_constraints/mod.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . use std::collections::BTreeMap; . use std::ops::Range; . use std::{cmp, fmt, mem}; . . mod leak_check; . . pub use rustc_middle::infer::MemberConstraint; . 246,785 ( 0.00%) #[derive(Default)] . pub struct RegionConstraintStorage<'tcx> { . /// For each `RegionVid`, the corresponding `RegionVariableOrigin`. . var_infos: IndexVec, . . data: RegionConstraintData<'tcx>, . . /// For a given pair of regions (R1, R2), maps to a region R3 that . /// is designated as their LUB (edges R1 <= R3 and R2 <= R3 -- line 39 ---------------------------------------- -- line 64 ---------------------------------------- . storage: &'a mut RegionConstraintStorage<'tcx>, . undo_log: &'a mut InferCtxtUndoLogs<'tcx>, . } . . impl<'tcx> std::ops::Deref for RegionConstraintCollector<'_, 'tcx> { . type Target = RegionConstraintStorage<'tcx>; . #[inline] . fn deref(&self) -> &RegionConstraintStorage<'tcx> { 13,352 ( 0.00%) self.storage . } . } . . impl<'tcx> std::ops::DerefMut for RegionConstraintCollector<'_, 'tcx> { . #[inline] . fn deref_mut(&mut self) -> &mut RegionConstraintStorage<'tcx> { 116,007 ( 0.00%) self.storage . } . } . . pub type VarInfos = IndexVec; . . /// The full set of region constraints gathered up by the collector. . /// Describes constraints between the region variables and other . /// regions, as well as other conditions that must be verified, or . /// assumptions that can be made. 127,964 ( 0.00%) #[derive(Debug, Default, Clone)] . pub struct RegionConstraintData<'tcx> { . /// Constraints of the form `A <= B`, where either `A` or `B` can . /// be a region variable (or neither, as it happens). . pub constraints: BTreeMap, SubregionOrigin<'tcx>>, . . /// Constraints of the form `R0 member of [R1, ..., Rn]`, meaning that . /// `R0` must be equal to one of the regions `R1..Rn`. These occur . /// with `impl Trait` quite frequently. -- line 97 ---------------------------------------- -- line 122 ---------------------------------------- . /// This hashmap is used to avoid that naive scenario. Basically . /// we record the fact that `'a <= 'b` is implied by the fn . /// signature, and then ignore the constraint when solving . /// equations. This is a bit of a hack but seems to work. . pub givens: FxHashSet<(Region<'tcx>, ty::RegionVid)>, . } . . /// Represents a constraint that influences the inference process. 11,969,135 ( 0.20%) #[derive(Clone, Copy, PartialEq, Eq, Debug, PartialOrd, Ord)] . pub enum Constraint<'tcx> { . /// A region variable is a subregion of another. 2,735,958 ( 0.05%) VarSubVar(RegionVid, RegionVid), . . /// A concrete region is a subregion of region variable. 667,375 ( 0.01%) RegSubVar(Region<'tcx>, RegionVid), . . /// A region variable is a subregion of a concrete region. This does not . /// directly affect inference, but instead is checked after . /// inference is complete. 258,380 ( 0.00%) VarSubReg(RegionVid, Region<'tcx>), . . /// A constraint where neither side is a variable. This does not . /// directly affect inference, but instead is checked after . /// inference is complete. 28,495 ( 0.00%) RegSubReg(Region<'tcx>, Region<'tcx>), . } . . impl Constraint<'_> { . pub fn involves_placeholders(&self) -> bool { 4,552 ( 0.00%) match self { . Constraint::VarSubVar(_, _) => false, . Constraint::VarSubReg(_, r) | Constraint::RegSubVar(r, _) => r.is_placeholder(), . Constraint::RegSubReg(r, s) => r.is_placeholder() || s.is_placeholder(), . } . } . } . . #[derive(Debug, Clone)] -- line 159 ---------------------------------------- -- line 263 ---------------------------------------- . /// In other words, if we meet *all* bounds in `B`, that suffices. . /// This is used when *some* bound in `B` is known to suffice, but . /// we don't know which. . AllBounds(Vec>), . } . . #[derive(Copy, Clone, PartialEq, Eq, Hash)] . pub(crate) struct TwoRegions<'tcx> { 2,037 ( 0.00%) a: Region<'tcx>, 2,130 ( 0.00%) b: Region<'tcx>, . } . . #[derive(Copy, Clone, PartialEq)] . pub(crate) enum UndoLog<'tcx> { . /// We added `RegionVid`. . AddVar(RegionVid), . . /// We added the given `constraint`. -- line 280 ---------------------------------------- -- line 304 ---------------------------------------- . pub universe: ty::UniverseIndex, . } . . pub struct RegionSnapshot { . any_unifications: bool, . } . . impl<'tcx> RegionConstraintStorage<'tcx> { 44,870 ( 0.00%) pub fn new() -> Self { . Self::default() 44,870 ( 0.00%) } . . #[inline] . pub(crate) fn with_log<'a>( . &'a mut self, . undo_log: &'a mut InferCtxtUndoLogs<'tcx>, . ) -> RegionConstraintCollector<'a, 'tcx> { . RegionConstraintCollector { storage: self, undo_log } . } . . fn rollback_undo_entry(&mut self, undo_entry: UndoLog<'tcx>) { 283,650 ( 0.00%) match undo_entry { . AddVar(vid) => { . self.var_infos.pop().unwrap(); 96,108 ( 0.00%) assert_eq!(self.var_infos.len(), vid.index() as usize); . } 32,703 ( 0.00%) AddConstraint(ref constraint) => { 130,812 ( 0.00%) self.data.constraints.remove(constraint); . } . AddVerify(index) => { . self.data.verifys.pop(); . assert_eq!(self.data.verifys.len(), index); . } . AddGiven(sub, sup) => { . self.data.givens.remove(&(sub, sup)); . } -- line 339 ---------------------------------------- -- line 348 ---------------------------------------- . } . . impl<'tcx> RegionConstraintCollector<'_, 'tcx> { . pub fn num_region_vars(&self) -> usize { . self.var_infos.len() . } . . pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { 4,451 ( 0.00%) &self.data 4,451 ( 0.00%) } . . /// Once all the constraints have been gathered, extract out the final data. . /// . /// Not legal during a snapshot. . pub fn into_infos_and_data(self) -> (VarInfos, RegionConstraintData<'tcx>) { 9,546 ( 0.00%) assert!(!UndoLogs::>::in_snapshot(&self.undo_log)); . (mem::take(&mut self.storage.var_infos), mem::take(&mut self.storage.data)) . } . . /// Takes (and clears) the current set of constraints. Note that . /// the set of variables remains intact, but all relationships . /// between them are reset. This is used during NLL checking to . /// grab the set of constraints that arose from a particular . /// operation. . /// . /// We don't want to leak relationships between variables between . /// points because just because (say) `r1 == r2` was true at some . /// point P in the graph doesn't imply that it will be true at . /// some other point Q, in NLL. . /// . /// Not legal during a snapshot. 60 ( 0.00%) pub fn take_and_reset_data(&mut self) -> RegionConstraintData<'tcx> { 10 ( 0.00%) assert!(!UndoLogs::>::in_snapshot(&self.undo_log)); . . // If you add a new field to `RegionConstraintCollector`, you . // should think carefully about whether it needs to be cleared . // or updated in some way. . let RegionConstraintStorage { . var_infos: _, 10 ( 0.00%) data, . lubs, . glbs, . unification_table: _, . any_unifications, . } = self.storage; . . // Clear the tables of (lubs, glbs), so that we will create . // fresh regions if we do a LUB operation. As it happens, -- line 395 ---------------------------------------- -- line 399 ---------------------------------------- . glbs.clear(); . . let data = mem::take(data); . . // Clear all unifications and recreate the variables a "now . // un-unified" state. Note that when we unify `a` and `b`, we . // also insert `a <= b` and a `b <= a` edges, so the . // `RegionConstraintData` contains the relationship here. 20 ( 0.00%) if *any_unifications { 3 ( 0.00%) *any_unifications = false; 9 ( 0.00%) self.unification_table().reset_unifications(|_| UnifiedRegion(None)); . } . . data 80 ( 0.00%) } . . pub fn data(&self) -> &RegionConstraintData<'tcx> { . &self.data . } . . pub fn start_snapshot(&mut self) -> RegionSnapshot { . debug!("RegionConstraintCollector: start_snapshot"); . RegionSnapshot { any_unifications: self.any_unifications } . } . . pub fn rollback_to(&mut self, snapshot: RegionSnapshot) { . debug!("RegionConstraintCollector: rollback_to({:?})", snapshot); 76,750 ( 0.00%) self.any_unifications = snapshot.any_unifications; . } . 696,018 ( 0.01%) pub fn new_region_var( . &mut self, . universe: ty::UniverseIndex, . origin: RegionVariableOrigin, . ) -> RegionVid { 116,003 ( 0.00%) let vid = self.var_infos.push(RegionVariableInfo { origin, universe }); . 348,009 ( 0.01%) let u_vid = self.unification_table().new_key(UnifiedRegion(None)); 116,003 ( 0.00%) assert_eq!(vid, u_vid.vid); . self.undo_log.push(AddVar(vid)); . debug!("created new region variable {:?} in {:?} with origin {:?}", vid, universe, origin); . vid 928,024 ( 0.02%) } . . /// Returns the universe for the given variable. . pub fn var_universe(&self, vid: RegionVid) -> ty::UniverseIndex { 81,731 ( 0.00%) self.var_infos[vid].universe . } . . /// Returns the origin for the given variable. . pub fn var_origin(&self, vid: RegionVid) -> RegionVariableOrigin { . self.var_infos[vid].origin . } . . fn add_constraint(&mut self, constraint: Constraint<'tcx>, origin: SubregionOrigin<'tcx>) { . // cannot add constraints once regions are resolved . debug!("RegionConstraintCollector: add_constraint({:?})", constraint); . . // never overwrite an existing (constraint, origin) - only insert one if it isn't . // present in the map yet. This prevents origins from outside the snapshot being . // replaced with "less informative" origins e.g., during calls to `can_eq` . let undo_log = &mut self.undo_log; 847,544 ( 0.01%) self.storage.data.constraints.entry(constraint).or_insert_with(|| { 485,922 ( 0.01%) undo_log.push(AddConstraint(constraint)); 323,948 ( 0.01%) origin . }); . } . . fn add_verify(&mut self, verify: Verify<'tcx>) { . // cannot add verifys once regions are resolved . debug!("RegionConstraintCollector: add_verify({:?})", verify); . . // skip no-op cases known to be satisfied 1,676 ( 0.00%) if let VerifyBound::AllBounds(ref bs) = verify.bound { . if bs.is_empty() { . return; . } . } . 838 ( 0.00%) let index = self.data.verifys.len(); 12,570 ( 0.00%) self.data.verifys.push(verify); 838 ( 0.00%) self.undo_log.push(AddVerify(index)); . } . 36 ( 0.00%) pub fn add_given(&mut self, sub: Region<'tcx>, sup: ty::RegionVid) { . // cannot add givens once regions are resolved . if self.data.givens.insert((sub, sup)) { . debug!("add_given({:?} <= {:?})", sub, sup); . 4 ( 0.00%) self.undo_log.push(AddGiven(sub, sup)); . } 28 ( 0.00%) } . 346,110 ( 0.01%) pub fn make_eqregion( . &mut self, . origin: SubregionOrigin<'tcx>, . sub: Region<'tcx>, . sup: Region<'tcx>, . ) { 173,055 ( 0.00%) if sub != sup { . // Eventually, it would be nice to add direct support for . // equating regions. 151,896 ( 0.00%) self.make_subregion(origin.clone(), sub, sup); 170,883 ( 0.00%) self.make_subregion(origin, sup, sub); . 95,332 ( 0.00%) match (sub, sup) { 42,081 ( 0.00%) (&ty::ReVar(sub), &ty::ReVar(sup)) => { . debug!("make_eqregion: unifying {:?} with {:?}", sub, sup); 70,135 ( 0.00%) self.unification_table().union(sub, sup); . self.any_unifications = true; . } . (&ty::ReVar(vid), value) | (value, &ty::ReVar(vid)) => { . debug!("make_eqregion: unifying {:?} with {:?}", vid, value); 18,252 ( 0.00%) self.unification_table().union_value(vid, UnifiedRegion(Some(value))); . self.any_unifications = true; . } . (_, _) => {} . } . } 132,909 ( 0.00%) } . 126 ( 0.00%) pub fn member_constraint( . &mut self, . opaque_type_def_id: DefId, . definition_span: Span, . hidden_ty: Ty<'tcx>, . member_region: ty::Region<'tcx>, . choice_regions: &Lrc>>, . ) { . debug!("member_constraint({:?} in {:#?})", member_region, choice_regions); . 9 ( 0.00%) if choice_regions.iter().any(|&r| r == member_region) { . return; . } . 81 ( 0.00%) self.data.member_constraints.push(MemberConstraint { . opaque_type_def_id, . definition_span, . hidden_ty, . member_region, . choice_regions: choice_regions.clone(), . }); 72 ( 0.00%) } . 1,160,694 ( 0.02%) #[instrument(skip(self, origin), level = "debug")] . pub fn make_subregion( . &mut self, . origin: SubregionOrigin<'tcx>, . sub: Region<'tcx>, . sup: Region<'tcx>, . ) { . // cannot add constraints once regions are resolved . debug!("origin = {:#?}", origin); . 852,948 ( 0.01%) match (sub, sup) { . (&ReLateBound(..), _) | (_, &ReLateBound(..)) => { . span_bug!(origin.span(), "cannot relate bound region: {:?} <= {:?}", sub, sup); . } . (_, &ReStatic) => { . // all regions are subregions of static, so we can ignore this . } 59,058 ( 0.00%) (&ReVar(sub_id), &ReVar(sup_id)) => { 413,406 ( 0.01%) self.add_constraint(Constraint::VarSubVar(sub_id, sup_id), origin); . } . (_, &ReVar(sup_id)) => { 56,511 ( 0.00%) self.add_constraint(Constraint::RegSubVar(sub, sup_id), origin); . } 10,073 ( 0.00%) (&ReVar(sub_id), _) => { 70,511 ( 0.00%) self.add_constraint(Constraint::VarSubReg(sub_id, sup), origin); . } . _ => { 30,268 ( 0.00%) self.add_constraint(Constraint::RegSubReg(sub, sup), origin); . } . } . } . 4,190 ( 0.00%) pub fn verify_generic_bound( . &mut self, . origin: SubregionOrigin<'tcx>, . kind: GenericKind<'tcx>, . sub: Region<'tcx>, . bound: VerifyBound<'tcx>, . ) { 10,894 ( 0.00%) self.add_verify(Verify { kind, origin, region: sub, bound }); 4,190 ( 0.00%) } . . pub fn lub_regions( . &mut self, . tcx: TyCtxt<'tcx>, . origin: SubregionOrigin<'tcx>, . a: Region<'tcx>, . b: Region<'tcx>, . ) -> Region<'tcx> { -- line 591 ---------------------------------------- -- line 599 ---------------------------------------- . _ if a == b => { . a // LUB(a,a) = a . } . . _ => self.combine_vars(tcx, Lub, a, b, origin), . } . } . 9,480 ( 0.00%) pub fn glb_regions( . &mut self, . tcx: TyCtxt<'tcx>, . origin: SubregionOrigin<'tcx>, . a: Region<'tcx>, . b: Region<'tcx>, . ) -> Region<'tcx> { . // cannot add constraints once regions are resolved . debug!("RegionConstraintCollector: glb_regions({:?}, {:?})", a, b); 4,738 ( 0.00%) match (a, b) { . (&ReStatic, r) | (r, &ReStatic) => { . r // static lives longer than everything else . } . 2,366 ( 0.00%) _ if a == b => { . a // GLB(a,a) = a . } . 14,196 ( 0.00%) _ => self.combine_vars(tcx, Glb, a, b, origin), . } 9,480 ( 0.00%) } . . /// Resolves the passed RegionVid to the root RegionVid in the unification table . pub fn opportunistic_resolve_var(&mut self, rid: ty::RegionVid) -> ty::RegionVid { 112,174 ( 0.00%) self.unification_table().find(rid).vid . } . . /// If the Region is a `ReVar`, then resolves it either to the root value in . /// the unification table, if it exists, or to the root `ReVar` in the table. . /// If the Region is not a `ReVar`, just returns the Region itself. . pub fn opportunistic_resolve_region( . &mut self, . tcx: TyCtxt<'tcx>, -- line 639 ---------------------------------------- -- line 647 ---------------------------------------- . tcx.reuse_or_mk_region(region, ty::ReVar(root)) . }) . } . _ => region, . } . } . . fn combine_map(&mut self, t: CombineMapType) -> &mut CombineMap<'tcx> { 9,464 ( 0.00%) match t { . Glb => &mut self.glbs, . Lub => &mut self.lubs, . } . } . 14,196 ( 0.00%) fn combine_vars( . &mut self, . tcx: TyCtxt<'tcx>, . t: CombineMapType, . a: Region<'tcx>, . b: Region<'tcx>, . origin: SubregionOrigin<'tcx>, . ) -> Region<'tcx> { . let vars = TwoRegions { a, b }; . if let Some(&c) = self.combine_map(t).get(&vars) { . return tcx.mk_region(ReVar(c)); . } . let a_universe = self.universe(a); . let b_universe = self.universe(b); . let c_universe = cmp::max(a_universe, b_universe); 5,915 ( 0.00%) let c = self.new_region_var(c_universe, MiscVariable(origin.span())); . self.combine_map(t).insert(vars, c); 1,183 ( 0.00%) self.undo_log.push(AddCombination(t, vars)); 5,915 ( 0.00%) let new_r = tcx.mk_region(ReVar(c)); 9,464 ( 0.00%) for old_r in [a, b] { 2,366 ( 0.00%) match t { 20,111 ( 0.00%) Glb => self.make_subregion(origin.clone(), new_r, old_r), . Lub => self.make_subregion(origin.clone(), old_r, new_r), . } . } . debug!("combine_vars() c={:?}", c); . new_r 10,647 ( 0.00%) } . . pub fn universe(&self, region: Region<'tcx>) -> ty::UniverseIndex { 127,712 ( 0.00%) match *region { . ty::ReStatic | ty::ReErased | ty::ReFree(..) | ty::ReEarlyBound(..) => { . ty::UniverseIndex::ROOT . } . ty::ReEmpty(ui) => ui, . ty::RePlaceholder(placeholder) => placeholder.universe, 17,122 ( 0.00%) ty::ReVar(vid) => self.var_universe(vid), . ty::ReLateBound(..) => bug!("universe(): encountered bound region {:?}", region), . } . } . 35,140 ( 0.00%) pub fn vars_since_snapshot( . &self, . value_count: usize, . ) -> (Range, Vec) { . let range = RegionVid::from(value_count)..RegionVid::from(self.unification_table.len()); 42,168 ( 0.00%) ( . range.clone(), . (range.start.index()..range.end.index()) . .map(|index| self.var_infos[ty::RegionVid::from(index)].origin) . .collect(), . ) 42,168 ( 0.00%) } . . /// See `InferCtxt::region_constraints_added_in_snapshot`. 20,202 ( 0.00%) pub fn region_constraints_added_in_snapshot(&self, mark: &Snapshot<'tcx>) -> Option { 6,734 ( 0.00%) self.undo_log . .region_constraints_in_snapshot(mark) 6,634 ( 0.00%) .map(|&elt| match elt { 1,706 ( 0.00%) AddConstraint(constraint) => Some(constraint.involves_placeholders()), . _ => None, . }) . .max() . .unwrap_or(None) 26,936 ( 0.00%) } . . #[inline] . fn unification_table(&mut self) -> super::UnificationTable<'_, 'tcx, RegionVidKey<'tcx>> { 325,276 ( 0.01%) ut::UnificationTable::with_log(&mut self.storage.unification_table, self.undo_log) . } . } . . impl fmt::Debug for RegionSnapshot { . fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { . write!(f, "RegionSnapshot") . } . } -- line 737 ---------------------------------------- -- line 750 ---------------------------------------- . match *self { . GenericKind::Param(ref p) => write!(f, "{}", p), . GenericKind::Projection(ref p) => write!(f, "{}", p), . } . } . } . . impl<'tcx> GenericKind<'tcx> { 834 ( 0.00%) pub fn to_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> { 17,898 ( 0.00%) match *self { 1,029 ( 0.00%) GenericKind::Param(ref p) => p.to_ty(tcx), 226 ( 0.00%) GenericKind::Projection(ref p) => tcx.mk_projection(p.item_def_id, p.substs), . } 834 ( 0.00%) } . } . . impl<'tcx> VerifyBound<'tcx> { 7,632 ( 0.00%) pub fn must_hold(&self) -> bool { 10,704 ( 0.00%) match self { . VerifyBound::IfEq(..) => false, . VerifyBound::OutlivedBy(ty::ReStatic) => true, . VerifyBound::OutlivedBy(_) => false, . VerifyBound::IsEmpty => false, 2,040 ( 0.00%) VerifyBound::AnyBound(bs) => bs.iter().any(|b| b.must_hold()), . VerifyBound::AllBounds(bs) => bs.iter().all(|b| b.must_hold()), . } 8,904 ( 0.00%) } . 3,650 ( 0.00%) pub fn cannot_hold(&self) -> bool { 5,284 ( 0.00%) match self { . VerifyBound::IfEq(_, b) => b.cannot_hold(), . VerifyBound::IsEmpty => false, . VerifyBound::OutlivedBy(_) => false, 556 ( 0.00%) VerifyBound::AnyBound(bs) => bs.iter().all(|b| b.cannot_hold()), . VerifyBound::AllBounds(bs) => bs.iter().any(|b| b.cannot_hold()), . } 5,110 ( 0.00%) } . 1,356 ( 0.00%) pub fn or(self, vb: VerifyBound<'tcx>) -> VerifyBound<'tcx> { 1,808 ( 0.00%) if self.must_hold() || vb.cannot_hold() { . self 1,112 ( 0.00%) } else if self.cannot_hold() || vb.must_hold() { 696 ( 0.00%) vb . } else { 676 ( 0.00%) VerifyBound::AnyBound(vec![self, vb]) . } 1,652 ( 0.00%) } . } . . impl<'tcx> RegionConstraintData<'tcx> { . /// Returns `true` if this region constraint data contains no constraints, and `false` . /// otherwise. . pub fn is_empty(&self) -> bool { . let RegionConstraintData { constraints, member_constraints, verifys, givens } = self; 2,475 ( 0.00%) constraints.is_empty() . && member_constraints.is_empty() . && verifys.is_empty() . && givens.is_empty() . } . } . . impl<'tcx> Rollback> for RegionConstraintStorage<'tcx> { 283,650 ( 0.00%) fn reverse(&mut self, undo: UndoLog<'tcx>) { 226,920 ( 0.00%) self.rollback_undo_entry(undo) 283,650 ( 0.00%) } . } 3,301,331 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/set_len_on_drop.rs -------------------------------------------------------------------------------- Ir -- line 11 ---------------------------------------- . impl<'a> SetLenOnDrop<'a> { . #[inline] . pub(super) fn new(len: &'a mut usize) -> Self { . SetLenOnDrop { local_len: *len, len } . } . . #[inline] . pub(super) fn increment_len(&mut self, increment: usize) { 15,284,294 ( 0.25%) self.local_len += increment; . } . } . . impl Drop for SetLenOnDrop<'_> { . #[inline] . fn drop(&mut self) { 1,294,528 ( 0.02%) *self.len = self.local_len; . } . } 92,654 ( 0.00%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/hir/map/mod.rs -------------------------------------------------------------------------------- Ir -- line 17 ---------------------------------------- . use rustc_span::hygiene::MacroKind; . use rustc_span::source_map::Spanned; . use rustc_span::symbol::{kw, sym, Ident, Symbol}; . use rustc_span::Span; . use rustc_target::spec::abi::Abi; . use std::collections::VecDeque; . . fn fn_decl<'hir>(node: Node<'hir>) -> Option<&'hir FnDecl<'hir>> { 9,615 ( 0.00%) match node { . Node::Item(Item { kind: ItemKind::Fn(sig, _, _), .. }) . | Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(sig, _), .. }) . | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(sig, _), .. }) => Some(&sig.decl), . Node::Expr(Expr { kind: ExprKind::Closure(_, fn_decl, ..), .. }) . | Node::ForeignItem(ForeignItem { kind: ForeignItemKind::Fn(fn_decl, ..), .. }) => { . Some(fn_decl) . } . _ => None, . } . } . . pub fn fn_sig<'hir>(node: Node<'hir>) -> Option<&'hir FnSig<'hir>> { 256 ( 0.00%) match &node { . Node::Item(Item { kind: ItemKind::Fn(sig, _, _), .. }) . | Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(sig, _), .. }) . | Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(sig, _), .. }) => Some(sig), . _ => None, . } . } . . pub fn associated_body<'hir>(node: Node<'hir>) -> Option { 127,617 ( 0.00%) match node { . Node::Item(Item { . kind: ItemKind::Const(_, body) | ItemKind::Static(.., body) | ItemKind::Fn(.., body), . .. . }) . | Node::TraitItem(TraitItem { . kind: . TraitItemKind::Const(_, Some(body)) | TraitItemKind::Fn(_, TraitFn::Provided(body)), . .. . }) . | Node::ImplItem(ImplItem { . kind: ImplItemKind::Const(_, body) | ImplItemKind::Fn(_, body), . .. . }) 22,408 ( 0.00%) | Node::Expr(Expr { kind: ExprKind::Closure(.., body, _, _), .. }) => Some(*body), . 570 ( 0.00%) Node::AnonConst(constant) => Some(constant.body), . . _ => None, . } . } . . fn is_body_owner<'hir>(node: Node<'hir>, hir_id: HirId) -> bool { . match associated_body(node) { . Some(b) => b.hir_id == hir_id, -- line 71 ---------------------------------------- -- line 83 ---------------------------------------- . pub struct ParentHirIterator<'hir> { . current_id: HirId, . map: Map<'hir>, . } . . impl<'hir> Iterator for ParentHirIterator<'hir> { . type Item = (HirId, Node<'hir>); . 16,177 ( 0.00%) fn next(&mut self) -> Option { 2,311 ( 0.00%) if self.current_id == CRATE_HIR_ID { . return None; . } . loop { . // There are nodes that do not have entries, so we need to skip them. . let parent_id = self.map.get_parent_node(self.current_id); . 6,930 ( 0.00%) if parent_id == self.current_id { . self.current_id = CRATE_HIR_ID; . return None; . } . 4,620 ( 0.00%) self.current_id = parent_id; 18,480 ( 0.00%) if let Some(node) = self.map.find(parent_id) { 9,240 ( 0.00%) return Some((parent_id, node)); . } . // If this `HirId` doesn't have an entry, skip it and look for its `parent_id`. . } 20,799 ( 0.00%) } . } . . /// An iterator that walks up the ancestor tree of a given `HirId`. . /// Constructed using `tcx.hir().parent_owner_iter(hir_id)`. . pub struct ParentOwnerIterator<'hir> { . current_id: HirId, . map: Map<'hir>, . } . . impl<'hir> Iterator for ParentOwnerIterator<'hir> { . type Item = (LocalDefId, OwnerNode<'hir>); . 142,830 ( 0.00%) fn next(&mut self) -> Option { 47,610 ( 0.00%) if self.current_id.local_id.index() != 0 { 8,452 ( 0.00%) self.current_id.local_id = ItemLocalId::new(0); 33,808 ( 0.00%) if let Some(node) = self.map.tcx.hir_owner(self.current_id.owner) { . return Some((self.current_id.owner, node.node)); . } . } 7,418 ( 0.00%) if self.current_id == CRATE_HIR_ID { . return None; . } . loop { . // There are nodes that do not have entries, so we need to skip them. . let parent_id = self.map.def_key(self.current_id.owner).parent; . . let parent_id = parent_id.map_or(CRATE_HIR_ID.owner, |local_def_index| { . let def_id = LocalDefId { local_def_index }; . self.map.local_def_id_to_hir_id(def_id).owner . }); 14,836 ( 0.00%) self.current_id = HirId::make_owner(parent_id); . . // If this `HirId` doesn't have an entry, skip it and look for its `parent_id`. 14,836 ( 0.00%) if let Some(node) = self.map.tcx.hir_owner(self.current_id.owner) { . return Some((self.current_id.owner, node.node)); . } . } 126,960 ( 0.00%) } . } . . impl<'hir> Map<'hir> { 252 ( 0.00%) pub fn krate(&self) -> &'hir Crate<'hir> { 36 ( 0.00%) self.tcx.hir_crate(()) 288 ( 0.00%) } . 7 ( 0.00%) pub fn root_module(&self) -> &'hir Mod<'hir> { 1 ( 0.00%) match self.tcx.hir_owner(CRATE_DEF_ID).map(|o| o.node) { . Some(OwnerNode::Crate(item)) => item, . _ => bug!(), . } 8 ( 0.00%) } . . pub fn items(&self) -> impl Iterator> + 'hir { . let krate = self.krate(); . krate.owners.iter().filter_map(|owner| match owner.as_ref()?.node() { . OwnerNode::Item(item) => Some(item), . _ => None, . }) . } -- line 169 ---------------------------------------- -- line 184 ---------------------------------------- . . #[inline] . pub fn def_path_hash(self, def_id: LocalDefId) -> DefPathHash { . // Accessing the DefPathHash is ok, it is incr. comp. stable. . self.tcx.untracked_resolutions.definitions.def_path_hash(def_id) . } . . #[inline] 376,488 ( 0.01%) pub fn local_def_id(&self, hir_id: HirId) -> LocalDefId { 38,853 ( 0.00%) self.opt_local_def_id(hir_id).unwrap_or_else(|| { . bug!( . "local_def_id: no entry for `{:?}`, which has a map of `{:?}`", . hir_id, . self.find(hir_id) . ) . }) 282,366 ( 0.00%) } . . #[inline] 103,608 ( 0.00%) pub fn opt_local_def_id(&self, hir_id: HirId) -> Option { 47,975 ( 0.00%) if hir_id.local_id == ItemLocalId::new(0) { . Some(hir_id.owner) . } else { 44,316 ( 0.00%) self.tcx . .hir_owner_nodes(hir_id.owner)? . .local_id_to_def_id . .get(&hir_id.local_id) . .copied() . } 116,559 ( 0.00%) } . . #[inline] 738 ( 0.00%) pub fn local_def_id_to_hir_id(&self, def_id: LocalDefId) -> HirId { . // FIXME(#85914) is this access safe for incr. comp.? 10,904 ( 0.00%) self.tcx.untracked_resolutions.definitions.local_def_id_to_hir_id(def_id) 1,476 ( 0.00%) } . 7 ( 0.00%) pub fn iter_local_def_id(&self) -> impl Iterator + '_ { . // Create a dependency to the crate to be sure we reexcute this when the amount of . // definitions change. 1 ( 0.00%) self.tcx.ensure().hir_crate(()); . self.tcx.untracked_resolutions.definitions.iter_local_def_id() 9 ( 0.00%) } . 31,458 ( 0.00%) pub fn opt_def_kind(&self, local_def_id: LocalDefId) -> Option { . let hir_id = self.local_def_id_to_hir_id(local_def_id); 58,422 ( 0.00%) let def_kind = match self.find(hir_id)? { 3,796 ( 0.00%) Node::Item(item) => match item.kind { . ItemKind::Static(..) => DefKind::Static, . ItemKind::Const(..) => DefKind::Const, . ItemKind::Fn(..) => DefKind::Fn, . ItemKind::Macro(..) => DefKind::Macro(MacroKind::Bang), . ItemKind::Mod(..) => DefKind::Mod, . ItemKind::OpaqueTy(..) => DefKind::OpaqueTy, . ItemKind::TyAlias(..) => DefKind::TyAlias, . ItemKind::Enum(..) => DefKind::Enum, -- line 239 ---------------------------------------- -- line 247 ---------------------------------------- . ItemKind::GlobalAsm(..) => DefKind::GlobalAsm, . ItemKind::Impl { .. } => DefKind::Impl, . }, . Node::ForeignItem(item) => match item.kind { . ForeignItemKind::Fn(..) => DefKind::Fn, . ForeignItemKind::Static(..) => DefKind::Static, . ForeignItemKind::Type => DefKind::ForeignTy, . }, 48 ( 0.00%) Node::TraitItem(item) => match item.kind { . TraitItemKind::Const(..) => DefKind::AssocConst, . TraitItemKind::Fn(..) => DefKind::AssocFn, . TraitItemKind::Type(..) => DefKind::AssocTy, . }, 808 ( 0.00%) Node::ImplItem(item) => match item.kind { . ImplItemKind::Const(..) => DefKind::AssocConst, . ImplItemKind::Fn(..) => DefKind::AssocFn, . ImplItemKind::TyAlias(..) => DefKind::AssocTy, . }, . Node::Variant(_) => DefKind::Variant, . Node::Ctor(variant_data) => { . // FIXME(eddyb) is this even possible, if we have a `Node::Ctor`? 2,540 ( 0.00%) assert_ne!(variant_data.ctor_hir_id(), None); . 3,546 ( 0.00%) let ctor_of = match self.find(self.get_parent_node(hir_id)) { . Some(Node::Item(..)) => def::CtorOf::Struct, . Some(Node::Variant(..)) => def::CtorOf::Variant, . _ => unreachable!(), . }; 1,016 ( 0.00%) DefKind::Ctor(ctor_of, def::CtorKind::from_hir(variant_data)) 2,032 ( 0.00%) } . Node::AnonConst(_) => { 52 ( 0.00%) let inline = match self.find(self.get_parent_node(hir_id)) { . Some(Node::Expr(&Expr { . kind: ExprKind::ConstBlock(ref anon_const), .. . })) if anon_const.hir_id == hir_id => true, . _ => false, . }; . if inline { DefKind::InlineConst } else { DefKind::AnonConst } . } . Node::Field(_) => DefKind::Field, 225 ( 0.00%) Node::Expr(expr) => match expr.kind { . ExprKind::Closure(.., None) => DefKind::Closure, . ExprKind::Closure(.., Some(_)) => DefKind::Generator, . _ => bug!("def_kind: unsupported node: {}", self.node_to_string(hir_id)), . }, 1,203 ( 0.00%) Node::GenericParam(param) => match param.kind { . GenericParamKind::Lifetime { .. } => DefKind::LifetimeParam, . GenericParamKind::Type { .. } => DefKind::TyParam, . GenericParamKind::Const { .. } => DefKind::ConstParam, . }, . Node::Crate(_) => DefKind::Mod, . Node::Stmt(_) . | Node::PathSegment(_) . | Node::Ty(_) -- line 300 ---------------------------------------- -- line 304 ---------------------------------------- . | Node::Binding(_) . | Node::Local(_) . | Node::Param(_) . | Node::Arm(_) . | Node::Lifetime(_) . | Node::Visibility(_) . | Node::Block(_) => return None, . }; 15,788 ( 0.00%) Some(def_kind) 44,940 ( 0.00%) } . . pub fn def_kind(&self, local_def_id: LocalDefId) -> DefKind { . self.opt_def_kind(local_def_id) . .unwrap_or_else(|| bug!("def_kind: unsupported node: {:?}", local_def_id)) . } . 2,744,632 ( 0.05%) pub fn find_parent_node(&self, id: HirId) -> Option { 343,079 ( 0.01%) if id.local_id == ItemLocalId::from_u32(0) { . Some(self.tcx.hir_owner_parent(id.owner)) . } else { 664,660 ( 0.01%) let owner = self.tcx.hir_owner_nodes(id.owner)?; 332,330 ( 0.01%) let node = owner.nodes[id.local_id].as_ref()?; 996,990 ( 0.02%) let hir_id = HirId { owner: id.owner, local_id: node.parent }; . Some(hir_id) . } 2,744,632 ( 0.05%) } . 8,539 ( 0.00%) pub fn get_parent_node(&self, hir_id: HirId) -> HirId { 1,902,722 ( 0.03%) self.find_parent_node(hir_id).unwrap() 17,078 ( 0.00%) } . . /// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found. 1,172,752 ( 0.02%) pub fn find(&self, id: HirId) -> Option> { 146,594 ( 0.00%) if id.local_id == ItemLocalId::from_u32(0) { . let owner = self.tcx.hir_owner(id.owner)?; 76,975 ( 0.00%) Some(owner.node.into()) . } else { 138,132 ( 0.00%) let owner = self.tcx.hir_owner_nodes(id.owner)?; 69,066 ( 0.00%) let node = owner.nodes[id.local_id].as_ref()?; 207,198 ( 0.00%) Some(node.node) . } 1,172,752 ( 0.02%) } . . /// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found. . #[inline] . pub fn find_by_def_id(&self, id: LocalDefId) -> Option> { 26,467 ( 0.00%) self.find(self.local_def_id_to_hir_id(id)) . } . . /// Retrieves the `Node` corresponding to `id`, panicking if it cannot be found. 114,786 ( 0.00%) pub fn get(&self, id: HirId) -> Node<'hir> { 55,739 ( 0.00%) self.find(id).unwrap_or_else(|| bug!("couldn't find hir id {} in the HIR map", id)) 76,524 ( 0.00%) } . . /// Retrieves the `Node` corresponding to `id`, panicking if it cannot be found. . #[inline] . pub fn get_by_def_id(&self, id: LocalDefId) -> Node<'hir> { . self.find_by_def_id(id).unwrap_or_else(|| bug!("couldn't find {:?} in the HIR map", id)) . } . 4,138 ( 0.00%) pub fn get_if_local(&self, id: DefId) -> Option> { 2,069 ( 0.00%) id.as_local().and_then(|id| self.find(self.local_def_id_to_hir_id(id))) 4,138 ( 0.00%) } . . pub fn get_generics(&self, id: LocalDefId) -> Option<&'hir Generics<'hir>> { . let node = self.tcx.hir_owner(id)?; . match node.node { . OwnerNode::ImplItem(impl_item) => Some(&impl_item.generics), . OwnerNode::TraitItem(trait_item) => Some(&trait_item.generics), . OwnerNode::Item(Item { . kind: -- line 374 ---------------------------------------- -- line 381 ---------------------------------------- . | ItemKind::TraitAlias(generics, _) . | ItemKind::Impl(Impl { generics, .. }), . .. . }) => Some(generics), . _ => None, . } . } . 188,699 ( 0.00%) pub fn item(&self, id: ItemId) -> &'hir Item<'hir> { 53,914 ( 0.00%) self.tcx.hir_owner(id.def_id).unwrap().node.expect_item() 215,656 ( 0.00%) } . 4,536 ( 0.00%) pub fn trait_item(&self, id: TraitItemId) -> &'hir TraitItem<'hir> { 1,296 ( 0.00%) self.tcx.hir_owner(id.def_id).unwrap().node.expect_trait_item() 5,184 ( 0.00%) } . 166,355 ( 0.00%) pub fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir> { 47,530 ( 0.00%) self.tcx.hir_owner(id.def_id).unwrap().node.expect_impl_item() 190,120 ( 0.00%) } . . pub fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir> { . self.tcx.hir_owner(id.def_id).unwrap().node.expect_foreign_item() . } . 199,430 ( 0.00%) pub fn body(&self, id: BodyId) -> &'hir Body<'hir> { 56,980 ( 0.00%) self.tcx.hir_owner_nodes(id.hir_id.owner).unwrap().bodies[&id.hir_id.local_id] 227,920 ( 0.00%) } . 5,769 ( 0.00%) pub fn fn_decl_by_hir_id(&self, hir_id: HirId) -> Option<&'hir FnDecl<'hir>> { 5,769 ( 0.00%) if let Some(node) = self.find(hir_id) { . fn_decl(node) . } else { . bug!("no node for hir_id `{}`", hir_id) . } 3,846 ( 0.00%) } . 102 ( 0.00%) pub fn fn_sig_by_hir_id(&self, hir_id: HirId) -> Option<&'hir FnSig<'hir>> { 102 ( 0.00%) if let Some(node) = self.find(hir_id) { . fn_sig(node) . } else { . bug!("no node for hir_id `{}`", hir_id) . } 102 ( 0.00%) } . . pub fn enclosing_body_owner(&self, hir_id: HirId) -> HirId { . for (parent, _) in self.parent_iter(hir_id) { . if let Some(body) = self.maybe_body_owned_by(parent) { . return self.body_owner(body); . } . } . . bug!("no `enclosing_body_owner` for hir_id `{}`", hir_id); . } . . /// Returns the `HirId` that corresponds to the definition of . /// which this is the body of, i.e., a `fn`, `const` or `static` . /// item (possibly associated), a closure, or a `hir::AnonConst`. 132,088 ( 0.00%) pub fn body_owner(&self, BodyId { hir_id }: BodyId) -> HirId { . let parent = self.get_parent_node(hir_id); 99,066 ( 0.00%) assert!(self.find(parent).map_or(false, |n| is_body_owner(n, hir_id))); . parent 132,088 ( 0.00%) } . 41,375 ( 0.00%) pub fn body_owner_def_id(&self, id: BodyId) -> LocalDefId { 64,755 ( 0.00%) self.local_def_id(self.body_owner(id)) 41,375 ( 0.00%) } . . /// Given a `HirId`, returns the `BodyId` associated with it, . /// if the node is a body owner, otherwise returns `None`. 6,811 ( 0.00%) pub fn maybe_body_owned_by(&self, hir_id: HirId) -> Option { 22,091 ( 0.00%) self.find(hir_id).map(associated_body).flatten() 20,433 ( 0.00%) } . . /// Given a body owner's id, returns the `BodyId` associated with it. 14,922 ( 0.00%) pub fn body_owned_by(&self, id: HirId) -> BodyId { . self.maybe_body_owned_by(id).unwrap_or_else(|| { . span_bug!( . self.span(id), . "body_owned_by: {} has no associated body", . self.node_to_string(id) . ); . }) 11,606 ( 0.00%) } . 746 ( 0.00%) pub fn body_param_names(&self, id: BodyId) -> impl Iterator + 'hir { 6,554 ( 0.00%) self.body(id).params.iter().map(|arg| match arg.pat.kind { 2,158 ( 0.00%) PatKind::Binding(_, _, ident, _) => ident, . _ => Ident::empty(), . }) 2,238 ( 0.00%) } . . /// Returns the `BodyOwnerKind` of this `LocalDefId`. . /// . /// Panics if `LocalDefId` does not have an associated body. 24,819 ( 0.00%) pub fn body_owner_kind(&self, id: HirId) -> BodyOwnerKind { 64,829 ( 0.00%) match self.get(id) { . Node::Item(&Item { kind: ItemKind::Const(..), .. }) . | Node::TraitItem(&TraitItem { kind: TraitItemKind::Const(..), .. }) . | Node::ImplItem(&ImplItem { kind: ImplItemKind::Const(..), .. }) . | Node::AnonConst(_) => BodyOwnerKind::Const, . Node::Ctor(..) . | Node::Item(&Item { kind: ItemKind::Fn(..), .. }) . | Node::TraitItem(&TraitItem { kind: TraitItemKind::Fn(..), .. }) . | Node::ImplItem(&ImplItem { kind: ImplItemKind::Fn(..), .. }) => BodyOwnerKind::Fn, . Node::Item(&Item { kind: ItemKind::Static(_, m, _), .. }) => BodyOwnerKind::Static(m), . Node::Expr(&Expr { kind: ExprKind::Closure(..), .. }) => BodyOwnerKind::Closure, . node => bug!("{:#?} is not a body node", node), . } 24,819 ( 0.00%) } . . /// Returns the `ConstContext` of the body associated with this `LocalDefId`. . /// . /// Panics if `LocalDefId` does not have an associated body. . /// . /// This should only be used for determining the context of a body, a return . /// value of `Some` does not always suggest that the owner of the body is `const`, . /// just that it has to be checked as if it were. 26,944 ( 0.00%) pub fn body_const_context(&self, did: LocalDefId) -> Option { . let hir_id = self.local_def_id_to_hir_id(did); 40,416 ( 0.00%) let ccx = match self.body_owner_kind(hir_id) { . BodyOwnerKind::Const => ConstContext::Const, . BodyOwnerKind::Static(mt) => ConstContext::Static(mt), . 2,984 ( 0.00%) BodyOwnerKind::Fn if self.tcx.is_constructor(did.to_def_id()) => return None, 2,984 ( 0.00%) BodyOwnerKind::Fn if self.tcx.is_const_fn_raw(did.to_def_id()) => ConstContext::ConstFn, . BodyOwnerKind::Fn 5,968 ( 0.00%) if self.tcx.has_attr(did.to_def_id(), sym::default_method_body_is_const) => . { . ConstContext::ConstFn . } . BodyOwnerKind::Fn | BodyOwnerKind::Closure => return None, . }; . . Some(ccx) 30,312 ( 0.00%) } . . /// Returns an iterator of the `DefId`s for all body-owners in this . /// crate. If you would prefer to iterate over the bodies . /// themselves, you can do `self.hir().krate().body_ids.iter()`. 21 ( 0.00%) pub fn body_owners(self) -> impl Iterator + 'hir { 3 ( 0.00%) self.krate() . .owners . .iter_enumerated() . .flat_map(move |(owner, owner_info)| { 13,437 ( 0.00%) let bodies = &owner_info.as_ref()?.nodes.bodies; . Some(bodies.iter().map(move |&(local_id, _)| { 51 ( 0.00%) let hir_id = HirId { owner, local_id }; . let body_id = BodyId { hir_id }; 8,097 ( 0.00%) self.body_owner_def_id(body_id) . })) . }) . .flatten() 15 ( 0.00%) } . 10 ( 0.00%) pub fn par_body_owners(self, f: F) { . use rustc_data_structures::sync::{par_iter, ParallelIterator}; . #[cfg(parallel_compiler)] . use rustc_rayon::iter::IndexedParallelIterator; . 3 ( 0.00%) par_iter(&self.krate().owners.raw).enumerate().for_each(|(owner, owner_info)| { . let owner = LocalDefId::new(owner); 26,874 ( 0.00%) if let Some(owner_info) = owner_info { . par_iter(owner_info.nodes.bodies.range(..)).for_each(|(local_id, _)| { . let hir_id = HirId { owner, local_id: *local_id }; . let body_id = BodyId { hir_id }; 7,425 ( 0.00%) f(self.body_owner_def_id(body_id)) . }) . } . }); 8 ( 0.00%) } . 230 ( 0.00%) pub fn ty_param_owner(&self, id: HirId) -> LocalDefId { 82 ( 0.00%) match self.get(id) { . Node::Item(&Item { kind: ItemKind::Trait(..) | ItemKind::TraitAlias(..), .. }) => { . id.expect_owner() . } . Node::GenericParam(_) => self.get_parent_item(id), . _ => bug!("ty_param_owner: {} not a type parameter", self.node_to_string(id)), . } 161 ( 0.00%) } . 770 ( 0.00%) pub fn ty_param_name(&self, id: HirId) -> Symbol { 301 ( 0.00%) match self.get(id) { . Node::Item(&Item { kind: ItemKind::Trait(..) | ItemKind::TraitAlias(..), .. }) => { . kw::SelfUpper . } 112 ( 0.00%) Node::GenericParam(param) => param.name.ident().name, . _ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)), . } 462 ( 0.00%) } . 855 ( 0.00%) pub fn trait_impls(&self, trait_did: DefId) -> &'hir [LocalDefId] { 95 ( 0.00%) self.tcx.all_local_trait_impls(()).get(&trait_did).map_or(&[], |xs| &xs[..]) 855 ( 0.00%) } . . /// Gets the attributes on the crate. This is preferable to . /// invoking `krate.attrs` because it registers a tighter . /// dep-graph access. . pub fn krate_attrs(&self) -> &'hir [ast::Attribute] { 20 ( 0.00%) self.attrs(CRATE_HIR_ID) . } . 735 ( 0.00%) pub fn get_module(&self, module: LocalDefId) -> (&'hir Mod<'hir>, Span, HirId) { . let hir_id = HirId::make_owner(module); 525 ( 0.00%) match self.tcx.hir_owner(module).map(|o| o.node) { 186 ( 0.00%) Some(OwnerNode::Item(&Item { span, kind: ItemKind::Mod(ref m), .. })) => { . (m, span, hir_id) . } 36 ( 0.00%) Some(OwnerNode::Crate(item)) => (item, item.inner, hir_id), . node => panic!("not a module: {:?}", node), . } 945 ( 0.00%) } . . /// Walks the contents of a crate. See also `Crate::visit_all_items`. 11 ( 0.00%) pub fn walk_toplevel_module(self, visitor: &mut impl Visitor<'hir>) { 35 ( 0.00%) let (top_mod, span, hir_id) = self.get_module(CRATE_DEF_ID); . visitor.visit_mod(top_mod, span, hir_id); 8 ( 0.00%) } . . /// Walks the attributes in a crate. 20 ( 0.00%) pub fn walk_attributes(self, visitor: &mut impl Visitor<'hir>) { 2 ( 0.00%) let krate = self.krate(); . for (owner, info) in krate.owners.iter_enumerated() { 17,916 ( 0.00%) if let Some(info) = info { . for (local_id, attrs) in info.attrs.map.iter() { . let id = HirId { owner, local_id: *local_id }; . for a in *attrs { 20,060 ( 0.00%) visitor.visit_attribute(id, a) . } . } . } . } 16 ( 0.00%) } . . /// Visits all items in the crate in some deterministic (but . /// unspecified) order. If you just need to process every item, . /// but don't care about nesting, this method is the best choice. . /// . /// If you do care about nesting -- usually because your algorithm . /// follows lexical scoping rules -- then you want a different . /// approach. You should override `visit_nested_item` in your . /// visitor and then call `intravisit::walk_crate` instead. 152 ( 0.00%) pub fn visit_all_item_likes(&self, visitor: &mut V) . where . V: itemlikevisit::ItemLikeVisitor<'hir>, . { 21 ( 0.00%) let krate = self.krate(); 71 ( 0.00%) for owner in krate.owners.iter().filter_map(Option::as_ref) { 64,152 ( 0.00%) match owner.node() { 44,603 ( 0.00%) OwnerNode::Item(item) => visitor.visit_item(item), . OwnerNode::ForeignItem(item) => visitor.visit_foreign_item(item), 2,424 ( 0.00%) OwnerNode::ImplItem(item) => visitor.visit_impl_item(item), . OwnerNode::TraitItem(item) => visitor.visit_trait_item(item), . OwnerNode::Crate(_) => {} . } . } 152 ( 0.00%) } . . /// A parallel version of `visit_all_item_likes`. . pub fn par_visit_all_item_likes(&self, visitor: &V) . where . V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send, . { 1 ( 0.00%) let krate = self.krate(); 2 ( 0.00%) par_for_each_in(&krate.owners.raw, |owner| match owner.as_ref().map(OwnerInfo::node) { 949 ( 0.00%) Some(OwnerNode::Item(item)) => visitor.visit_item(item), . Some(OwnerNode::ForeignItem(item)) => visitor.visit_foreign_item(item), 808 ( 0.00%) Some(OwnerNode::ImplItem(item)) => visitor.visit_impl_item(item), 24 ( 0.00%) Some(OwnerNode::TraitItem(item)) => visitor.visit_trait_item(item), . Some(OwnerNode::Crate(_)) | None => {} . }) . } . 2,752 ( 0.00%) pub fn visit_item_likes_in_module(&self, module: LocalDefId, visitor: &mut V) . where . V: ItemLikeVisitor<'hir>, . { 352 ( 0.00%) let module = self.tcx.hir_module_items(module); . 384 ( 0.00%) for id in module.items.iter() { 42,705 ( 0.00%) visitor.visit_item(self.item(*id)); . } . 384 ( 0.00%) for id in module.trait_items.iter() { 960 ( 0.00%) visitor.visit_trait_item(self.trait_item(*id)); . } . 384 ( 0.00%) for id in module.impl_items.iter() { 31,512 ( 0.00%) visitor.visit_impl_item(self.impl_item(*id)); . } . 384 ( 0.00%) for id in module.foreign_items.iter() { . visitor.visit_foreign_item(self.foreign_item(*id)); . } 2,816 ( 0.00%) } . 46 ( 0.00%) pub fn for_each_module(&self, f: impl Fn(LocalDefId)) { . let mut queue = VecDeque::new(); . queue.push_back(CRATE_DEF_ID); . 256 ( 0.00%) while let Some(id) = queue.pop_front() { . f(id); 96 ( 0.00%) let items = self.tcx.hir_module_items(id); 1,024 ( 0.00%) queue.extend(items.submodules.iter().copied()) . } 40 ( 0.00%) } . . #[cfg(not(parallel_compiler))] . #[inline] . pub fn par_for_each_module(&self, f: impl Fn(LocalDefId)) { 4 ( 0.00%) self.for_each_module(f) . } . . #[cfg(parallel_compiler)] . pub fn par_for_each_module(&self, f: impl Fn(LocalDefId) + Sync) { . use rustc_data_structures::sync::{par_iter, ParallelIterator}; . par_iter_submodules(self.tcx, CRATE_DEF_ID, &f); . . fn par_iter_submodules(tcx: TyCtxt<'_>, module: LocalDefId, f: &F) -- line 699 ---------------------------------------- -- line 703 ---------------------------------------- . (*f)(module); . let items = tcx.hir_module_items(module); . par_iter(&items.submodules[..]).for_each(|&sm| par_iter_submodules(tcx, sm, f)); . } . } . . /// Returns an iterator for the nodes in the ancestor tree of the `current_id` . /// until the crate root is reached. Prefer this over your own loop using `get_parent_node`. 2,257 ( 0.00%) pub fn parent_iter(self, current_id: HirId) -> ParentHirIterator<'hir> { . ParentHirIterator { current_id, map: self } 9,028 ( 0.00%) } . . /// Returns an iterator for the nodes in the ancestor tree of the `current_id` . /// until the crate root is reached. Prefer this over your own loop using `get_parent_node`. . pub fn parent_owner_iter(self, current_id: HirId) -> ParentOwnerIterator<'hir> { . ParentOwnerIterator { current_id, map: self } 46,527 ( 0.00%) } . . /// Checks if the node is left-hand side of an assignment. . pub fn is_lhs(&self, id: HirId) -> bool { . match self.find(self.get_parent_node(id)) { . Some(Node::Expr(expr)) => match expr.kind { . ExprKind::Assign(lhs, _rhs, _span) => lhs.hir_id == id, . _ => false, . }, -- line 727 ---------------------------------------- -- line 792 ---------------------------------------- . } . None . } . . /// Retrieves the `HirId` for `id`'s parent item, or `id` itself if no . /// parent item is in this map. The "parent item" is the closest parent node . /// in the HIR which is recorded by the map and is an item, either an item . /// in a module, trait, or impl. 13,971 ( 0.00%) pub fn get_parent_item(&self, hir_id: HirId) -> LocalDefId { 125,932 ( 0.00%) if let Some((def_id, _node)) = self.parent_owner_iter(hir_id).next() { . def_id . } else { . CRATE_DEF_ID . } 27,942 ( 0.00%) } . . /// Returns the `HirId` of `id`'s nearest module parent, or `id` itself if no . /// module parent is in this map. . pub(super) fn get_module_parent_node(&self, hir_id: HirId) -> LocalDefId { 13,536 ( 0.00%) for (def_id, node) in self.parent_owner_iter(hir_id) { 7,502 ( 0.00%) if let OwnerNode::Item(&Item { kind: ItemKind::Mod(_), .. }) = node { . return def_id; . } . } . CRATE_DEF_ID . } . . /// When on an if expression, a match arm tail expression or a match arm, give back . /// the enclosing `if` or `match` expression. -- line 820 ---------------------------------------- -- line 885 ---------------------------------------- . } . } . bug!( . "expected foreign mod or inlined parent, found {}", . self.node_to_string(HirId::make_owner(parent)) . ) . } . 47,327 ( 0.00%) pub fn expect_item(&self, id: LocalDefId) -> &'hir Item<'hir> { 20,283 ( 0.00%) match self.tcx.hir_owner(id) { . Some(Owner { node: OwnerNode::Item(item), .. }) => item, . _ => bug!("expected item, found {}", self.node_to_string(HirId::make_owner(id))), . } 54,088 ( 0.00%) } . 11,312 ( 0.00%) pub fn expect_impl_item(&self, id: LocalDefId) -> &'hir ImplItem<'hir> { 4,848 ( 0.00%) match self.tcx.hir_owner(id) { . Some(Owner { node: OwnerNode::ImplItem(item), .. }) => item, . _ => bug!("expected impl item, found {}", self.node_to_string(HirId::make_owner(id))), . } 12,928 ( 0.00%) } . 336 ( 0.00%) pub fn expect_trait_item(&self, id: LocalDefId) -> &'hir TraitItem<'hir> { 144 ( 0.00%) match self.tcx.hir_owner(id) { . Some(Owner { node: OwnerNode::TraitItem(item), .. }) => item, . _ => bug!("expected trait item, found {}", self.node_to_string(HirId::make_owner(id))), . } 384 ( 0.00%) } . . pub fn expect_variant(&self, id: HirId) -> &'hir Variant<'hir> { . match self.find(id) { . Some(Node::Variant(variant)) => variant, . _ => bug!("expected variant, found {}", self.node_to_string(id)), . } . } . -- line 920 ---------------------------------------- -- line 929 ---------------------------------------- . . pub fn expect_expr(&self, id: HirId) -> &'hir Expr<'hir> { . match self.find(id) { . Some(Node::Expr(expr)) => expr, . _ => bug!("expected expr, found {}", self.node_to_string(id)), . } . } . 16,434 ( 0.00%) pub fn opt_name(&self, id: HirId) -> Option { 5,590 ( 0.00%) Some(match self.get(id) { . Node::Item(i) => i.ident.name, . Node::ForeignItem(fi) => fi.ident.name, . Node::ImplItem(ii) => ii.ident.name, . Node::TraitItem(ti) => ti.ident.name, . Node::Variant(v) => v.ident.name, . Node::Field(f) => f.ident.name, . Node::Lifetime(lt) => lt.name.ident().name, 5,310 ( 0.00%) Node::GenericParam(param) => param.name.ident().name, 112 ( 0.00%) Node::Binding(&Pat { kind: PatKind::Binding(_, _, l, _), .. }) => l.name, . Node::Ctor(..) => self.name(HirId::make_owner(self.get_parent_item(id))), . _ => return None, . }) 9,130 ( 0.00%) } . 14,384 ( 0.00%) pub fn name(&self, id: HirId) -> Symbol { 5,534 ( 0.00%) match self.opt_name(id) { . Some(name) => name, . None => bug!("no name for {}", self.node_to_string(id)), . } 10,788 ( 0.00%) } . . /// Given a node ID, gets a list of attributes associated with the AST . /// corresponding to the node-ID. 3,739,232 ( 0.06%) pub fn attrs(&self, id: HirId) -> &'hir [ast::Attribute] { 534,176 ( 0.01%) self.tcx.hir_attrs(id.owner).get(id.local_id) 4,273,408 ( 0.07%) } . . /// Gets the span of the definition of the specified HIR node. . /// This is used by `tcx.get_span` 10,092 ( 0.00%) pub fn span(&self, hir_id: HirId) -> Span { 117,088 ( 0.00%) self.opt_span(hir_id) . .unwrap_or_else(|| bug!("hir::map::Map::span: id not in map: {:?}", hir_id)) 10,092 ( 0.00%) } . 252,035 ( 0.00%) pub fn opt_span(&self, hir_id: HirId) -> Option { 252,035 ( 0.00%) let span = match self.find(hir_id)? { . Node::Param(param) => param.span, 2,234 ( 0.00%) Node::Item(item) => match &item.kind { . ItemKind::Fn(sig, _, _) => sig.span, 2,098 ( 0.00%) _ => item.span, . }, . Node::ForeignItem(foreign_item) => foreign_item.span, 418 ( 0.00%) Node::TraitItem(trait_item) => match &trait_item.kind { 366 ( 0.00%) TraitItemKind::Fn(sig, _) => sig.span, 52 ( 0.00%) _ => trait_item.span, . }, 5,972 ( 0.00%) Node::ImplItem(impl_item) => match &impl_item.kind { 5,808 ( 0.00%) ImplItemKind::Fn(sig, _) => sig.span, 164 ( 0.00%) _ => impl_item.span, . }, 1,380 ( 0.00%) Node::Variant(variant) => variant.span, 1,026 ( 0.00%) Node::Field(field) => field.span, 294 ( 0.00%) Node::AnonConst(constant) => self.body(constant.body).value.span, . Node::Expr(expr) => expr.span, . Node::Stmt(stmt) => stmt.span, . Node::PathSegment(seg) => seg.ident.span, . Node::Ty(ty) => ty.span, . Node::TraitRef(tr) => tr.path.span, . Node::Binding(pat) => pat.span, . Node::Pat(pat) => pat.span, . Node::Arm(arm) => arm.span, . Node::Block(block) => block.span, 3,949 ( 0.00%) Node::Ctor(..) => match self.find(self.get_parent_node(hir_id))? { . Node::Item(item) => item.span, . Node::Variant(variant) => variant.span, . _ => unreachable!(), . }, . Node::Lifetime(lifetime) => lifetime.span, . Node::GenericParam(param) => param.span, . Node::Visibility(&Spanned { . node: VisibilityKind::Restricted { ref path, .. }, . .. . }) => path.span, . Node::Infer(i) => i.span, . Node::Visibility(v) => bug!("unexpected Visibility {:?}", v), . Node::Local(local) => local.span, 4 ( 0.00%) Node::Crate(item) => item.inner, . }; . Some(span) 360,050 ( 0.01%) } . . /// Like `hir.span()`, but includes the body of function items . /// (instead of just the function header) . pub fn span_with_body(&self, hir_id: HirId) -> Span { . match self.find(hir_id) { . Some(Node::TraitItem(item)) => item.span, . Some(Node::ImplItem(impl_item)) => impl_item.span, . Some(Node::Item(item)) => item.span, . Some(_) => self.span(hir_id), . _ => bug!("hir::map::Map::span_with_body: id not in map: {:?}", hir_id), . } . } . 1,076 ( 0.00%) pub fn span_if_local(&self, id: DefId) -> Option { 8,129 ( 0.00%) id.as_local().and_then(|id| self.opt_span(self.local_def_id_to_hir_id(id))) 1,614 ( 0.00%) } . . pub fn res_span(&self, res: Res) -> Option { . match res { . Res::Err => None, . Res::Local(id) => Some(self.span(id)), . res => self.span_if_local(res.opt_def_id()?), . } . } -- line 1042 ---------------------------------------- -- line 1062 ---------------------------------------- . } . . impl<'hir> intravisit::Map<'hir> for Map<'hir> { . fn find(&self, hir_id: HirId) -> Option> { . self.find(hir_id) . } . . fn body(&self, id: BodyId) -> &'hir Body<'hir> { 17,424 ( 0.00%) self.body(id) . } . . fn item(&self, id: ItemId) -> &'hir Item<'hir> { 13,266 ( 0.00%) self.item(id) . } . . fn trait_item(&self, id: TraitItemId) -> &'hir TraitItem<'hir> { 360 ( 0.00%) self.trait_item(id) . } . . fn impl_item(&self, id: ImplItemId) -> &'hir ImplItem<'hir> { 12,120 ( 0.00%) self.impl_item(id) . } . . fn foreign_item(&self, id: ForeignItemId) -> &'hir ForeignItem<'hir> { . self.foreign_item(id) . } . } . 7 ( 0.00%) pub(super) fn crate_hash(tcx: TyCtxt<'_>, crate_num: CrateNum) -> Svh { . debug_assert_eq!(crate_num, LOCAL_CRATE); . let krate = tcx.hir_crate(()); 3 ( 0.00%) let hir_body_hash = krate.hir_hash; . . let upstream_crates = upstream_crates(tcx); . . // We hash the final, remapped names of all local source files so we . // don't have to include the path prefix remapping commandline args. . // If we included the full mapping in the SVH, we could only have . // reproducible builds by compiling from the same directory. So we just . // hash the result of the mapping instead of the mapping itself. 2 ( 0.00%) let mut source_file_names: Vec<_> = tcx . .sess . .source_map() . .files() . .iter() . .filter(|source_file| source_file.cnum == LOCAL_CRATE) 92 ( 0.00%) .map(|source_file| source_file.name_hash) . .collect(); . . source_file_names.sort_unstable(); . . let mut hcx = tcx.create_stable_hashing_context(); . let mut stable_hasher = StableHasher::new(); . hir_body_hash.hash_stable(&mut hcx, &mut stable_hasher); . upstream_crates.hash_stable(&mut hcx, &mut stable_hasher); . source_file_names.hash_stable(&mut hcx, &mut stable_hasher); 3 ( 0.00%) if tcx.sess.opts.debugging_opts.incremental_relative_spans { . let definitions = &tcx.untracked_resolutions.definitions; . let mut owner_spans: Vec<_> = krate . .owners . .iter_enumerated() . .filter_map(|(def_id, info)| { . let _ = info.as_ref()?; . let def_path_hash = definitions.def_path_hash(def_id); . let span = definitions.def_span(def_id); . debug_assert_eq!(span.parent(), None); . Some((def_path_hash, span)) . }) . .collect(); . owner_spans.sort_unstable_by_key(|bn| bn.0); . owner_spans.hash_stable(&mut hcx, &mut stable_hasher); . } 3 ( 0.00%) tcx.sess.opts.dep_tracking_hash(true).hash_stable(&mut hcx, &mut stable_hasher); 2 ( 0.00%) tcx.sess.local_stable_crate_id().hash_stable(&mut hcx, &mut stable_hasher); . . let crate_hash: Fingerprint = stable_hasher.finish(); 1 ( 0.00%) Svh::new(crate_hash.to_smaller_hash()) 9 ( 0.00%) } . . fn upstream_crates(tcx: TyCtxt<'_>) -> Vec<(StableCrateId, Svh)> { . let mut upstream_crates: Vec<_> = tcx . .crates(()) . .iter() . .map(|&cnum| { 95 ( 0.00%) let stable_crate_id = tcx.resolutions(()).cstore.stable_crate_id(cnum); 19 ( 0.00%) let hash = tcx.crate_hash(cnum); . (stable_crate_id, hash) . }) . .collect(); . upstream_crates.sort_unstable_by_key(|&(stable_crate_id, _)| stable_crate_id); . upstream_crates . } . . fn hir_id_to_string(map: &Map<'_>, id: HirId) -> String { -- line 1155 ---------------------------------------- -- line 1238 ---------------------------------------- . Some(Node::Lifetime(_)) => node_str("lifetime"), . Some(Node::GenericParam(ref param)) => format!("generic_param {:?}{}", param, id_str), . Some(Node::Visibility(ref vis)) => format!("visibility {:?}{}", vis, id_str), . Some(Node::Crate(..)) => String::from("root_crate"), . None => format!("unknown node{}", id_str), . } . } . 288 ( 0.00%) pub(super) fn hir_module_items(tcx: TyCtxt<'_>, module_id: LocalDefId) -> ModuleItems { 192 ( 0.00%) let mut collector = ModuleCollector { . tcx, . submodules: Vec::default(), . items: Vec::default(), . trait_items: Vec::default(), . impl_items: Vec::default(), . foreign_items: Vec::default(), . }; . 160 ( 0.00%) let (hir_mod, span, hir_id) = tcx.hir().get_module(module_id); . collector.visit_mod(hir_mod, span, hir_id); . 512 ( 0.00%) let ModuleCollector { submodules, items, trait_items, impl_items, foreign_items, .. } = . collector; 352 ( 0.00%) return ModuleItems { 128 ( 0.00%) submodules: submodules.into_boxed_slice(), 128 ( 0.00%) items: items.into_boxed_slice(), 128 ( 0.00%) trait_items: trait_items.into_boxed_slice(), 128 ( 0.00%) impl_items: impl_items.into_boxed_slice(), 128 ( 0.00%) foreign_items: foreign_items.into_boxed_slice(), . }; . . struct ModuleCollector<'tcx> { . tcx: TyCtxt<'tcx>, . submodules: Vec, . items: Vec, . trait_items: Vec, . impl_items: Vec, . foreign_items: Vec, . } . . impl<'hir> Visitor<'hir> for ModuleCollector<'hir> { . type NestedFilter = nested_filter::All; . . fn nested_visit_map(&mut self) -> Self::Map { 2,574 ( 0.00%) self.tcx.hir() . } . 4,745 ( 0.00%) fn visit_item(&mut self, item: &'hir Item<'hir>) { 2,847 ( 0.00%) self.items.push(item.item_id()); 1,898 ( 0.00%) if let ItemKind::Mod(..) = item.kind { . // If this declares another module, do not recurse inside it. 31 ( 0.00%) self.submodules.push(item.def_id); . } else { 5,508 ( 0.00%) intravisit::walk_item(self, item) . } 124 ( 0.00%) } . . fn visit_trait_item(&mut self, item: &'hir TraitItem<'hir>) { 72 ( 0.00%) self.trait_items.push(item.trait_item_id()); 72 ( 0.00%) intravisit::walk_trait_item(self, item) . } . . fn visit_impl_item(&mut self, item: &'hir ImplItem<'hir>) { 2,424 ( 0.00%) self.impl_items.push(item.impl_item_id()); 2,424 ( 0.00%) intravisit::walk_impl_item(self, item) . } . . fn visit_foreign_item(&mut self, item: &'hir ForeignItem<'hir>) { . self.foreign_items.push(item.foreign_item_id()); . intravisit::walk_foreign_item(self, item) . } . } 256 ( 0.00%) } 1,390,843 ( 0.02%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/combine.rs -------------------------------------------------------------------------------- Ir -- line 56 ---------------------------------------- . #[derive(Copy, Clone, Debug)] . pub enum RelationDir { . SubtypeOf, . SupertypeOf, . EqTo, . } . . impl<'infcx, 'tcx> InferCtxt<'infcx, 'tcx> { 647,190 ( 0.01%) pub fn super_combine_tys( . &self, . relation: &mut R, . a: Ty<'tcx>, . b: Ty<'tcx>, . ) -> RelateResult<'tcx, Ty<'tcx>> . where . R: TypeRelation<'tcx>, . { . let a_is_expected = relation.a_is_expected(); . 667,440 ( 0.01%) match (a.kind(), b.kind()) { . // Relate integral variables to other types 46 ( 0.00%) (&ty::Infer(ty::IntVar(a_id)), &ty::Infer(ty::IntVar(b_id))) => { 253 ( 0.00%) self.inner . .borrow_mut() . .int_unification_table() . .unify_var_var(a_id, b_id) . .map_err(|e| int_unification_error(a_is_expected, e))?; . Ok(a) . } 1,147 ( 0.00%) (&ty::Infer(ty::IntVar(v_id)), &ty::Int(v)) => { 2,294 ( 0.00%) self.unify_integral_variable(a_is_expected, v_id, IntType(v)) . } . (&ty::Int(v), &ty::Infer(ty::IntVar(v_id))) => { . self.unify_integral_variable(!a_is_expected, v_id, IntType(v)) . } 1,817 ( 0.00%) (&ty::Infer(ty::IntVar(v_id)), &ty::Uint(v)) => { 3,634 ( 0.00%) self.unify_integral_variable(a_is_expected, v_id, UintType(v)) . } 20 ( 0.00%) (&ty::Uint(v), &ty::Infer(ty::IntVar(v_id))) => { 40 ( 0.00%) self.unify_integral_variable(!a_is_expected, v_id, UintType(v)) . } . . // Relate floating-point variables to other types . (&ty::Infer(ty::FloatVar(a_id)), &ty::Infer(ty::FloatVar(b_id))) => { . self.inner . .borrow_mut() . .float_unification_table() . .unify_var_var(a_id, b_id) -- line 103 ---------------------------------------- -- line 108 ---------------------------------------- . self.unify_float_variable(a_is_expected, v_id, v) . } . (&ty::Float(v), &ty::Infer(ty::FloatVar(v_id))) => { . self.unify_float_variable(!a_is_expected, v_id, v) . } . . // All other cases of inference are errors . (&ty::Infer(_), _) | (_, &ty::Infer(_)) => { 27,350 ( 0.00%) Err(TypeError::Sorts(ty::relate::expected_found(relation, a, b))) . } . 598,194 ( 0.01%) _ => ty::relate::super_relate_tys(relation, a, b), . } 647,190 ( 0.01%) } . 732 ( 0.00%) pub fn super_combine_consts( . &self, . relation: &mut R, . a: &'tcx ty::Const<'tcx>, . b: &'tcx ty::Const<'tcx>, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> . where . R: ConstEquateRelation<'tcx>, . { . debug!("{}.consts({:?}, {:?})", relation.tag(), a, b); 2,585 ( 0.00%) if a == b { . return Ok(a); . } . 183 ( 0.00%) let a = replace_if_possible(&mut self.inner.borrow_mut().const_unification_table(), a); 183 ( 0.00%) let b = replace_if_possible(&mut self.inner.borrow_mut().const_unification_table(), b); . . let a_is_expected = relation.a_is_expected(); . 630 ( 0.00%) match (a.val, b.val) { . ( . ty::ConstKind::Infer(InferConst::Var(a_vid)), . ty::ConstKind::Infer(InferConst::Var(b_vid)), . ) => { . self.inner . .borrow_mut() . .const_unification_table() . .unify_var_var(a_vid, b_vid) -- line 150 ---------------------------------------- -- line 158 ---------------------------------------- . bug!("tried to combine ConstKind::Infer/ConstKind::Infer(InferConst::Var)") . } . . (ty::ConstKind::Infer(InferConst::Var(vid)), _) => { . return self.unify_const_variable(relation.param_env(), vid, b, a_is_expected); . } . . (_, ty::ConstKind::Infer(InferConst::Var(vid))) => { 280 ( 0.00%) return self.unify_const_variable(relation.param_env(), vid, a, !a_is_expected); . } . (ty::ConstKind::Unevaluated(..), _) if self.tcx.lazy_normalization() => { . // FIXME(#59490): Need to remove the leak check to accommodate . // escaping bound variables here. . if !a.has_escaping_bound_vars() && !b.has_escaping_bound_vars() { . relation.const_equate_obligation(a, b); . } . return Ok(b); -- line 174 ---------------------------------------- -- line 179 ---------------------------------------- . if !a.has_escaping_bound_vars() && !b.has_escaping_bound_vars() { . relation.const_equate_obligation(a, b); . } . return Ok(a); . } . _ => {} . } . 25 ( 0.00%) ty::relate::super_relate_consts(relation, a, b) 549 ( 0.00%) } . . /// Unifies the const variable `target_vid` with the given constant. . /// . /// This also tests if the given const `ct` contains an inference variable which was previously . /// unioned with `target_vid`. If this is the case, inferring `target_vid` to `ct` . /// would result in an infinite type as we continuously replace an inference variable . /// in `ct` with `ct` itself. . /// -- line 196 ---------------------------------------- -- line 216 ---------------------------------------- . /// of `fn bind` (meaning that its substs contain `N`). . /// . /// `bind(arr)` now infers that the type of `arr` must be `[u8; N]`. . /// The assignment `arr = bind(arr)` now tries to equate `N` with `3 + 4`. . /// . /// As `3 + 4` contains `N` in its substs, this must not succeed. . /// . /// See `src/test/ui/const-generics/occurs-check/` for more examples where this is relevant. 1,064 ( 0.00%) #[instrument(level = "debug", skip(self))] . fn unify_const_variable( . &self, . param_env: ty::ParamEnv<'tcx>, . target_vid: ty::ConstVid<'tcx>, . ct: &'tcx ty::Const<'tcx>, . vid_is_expected: bool, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { . let (for_universe, span) = { . let mut inner = self.inner.borrow_mut(); 224 ( 0.00%) let variable_table = &mut inner.const_unification_table(); 112 ( 0.00%) let var_value = variable_table.probe_value(target_vid); 112 ( 0.00%) match var_value.val { . ConstVariableValue::Known { value } => { . bug!("instantiating {:?} which has a known value {:?}", target_vid, value) . } . ConstVariableValue::Unknown { universe } => (universe, var_value.origin.span), . } . }; 448 ( 0.00%) let value = ConstInferUnifier { infcx: self, span, param_env, for_universe, target_vid } . .relate(ct, ct)?; . 224 ( 0.00%) self.inner . .borrow_mut() . .const_unification_table() . .unify_var_value( . target_vid, 336 ( 0.00%) ConstVarValue { . origin: ConstVariableOrigin { . kind: ConstVariableOriginKind::ConstInference, . span: DUMMY_SP, . }, . val: ConstVariableValue::Known { value }, . }, . ) . .map(|()| value) . .map_err(|e| const_unification_error(vid_is_expected, e)) . } . 17,904 ( 0.00%) fn unify_integral_variable( . &self, . vid_is_expected: bool, . vid: ty::IntVid, . val: ty::IntVarValue, . ) -> RelateResult<'tcx, Ty<'tcx>> { 23,872 ( 0.00%) self.inner . .borrow_mut() . .int_unification_table() . .unify_var_value(vid, Some(val)) . .map_err(|e| int_unification_error(vid_is_expected, e))?; 5,968 ( 0.00%) match val { 1,147 ( 0.00%) IntType(v) => Ok(self.tcx.mk_mach_int(v)), 6,658 ( 0.00%) UintType(v) => Ok(self.tcx.mk_mach_uint(v)), . } 23,872 ( 0.00%) } . . fn unify_float_variable( . &self, . vid_is_expected: bool, . vid: ty::FloatVid, . val: ty::FloatTy, . ) -> RelateResult<'tcx, Ty<'tcx>> { . self.inner -- line 286 ---------------------------------------- -- line 289 ---------------------------------------- . .unify_var_value(vid, Some(ty::FloatVarValue(val))) . .map_err(|e| float_unification_error(vid_is_expected, e))?; . Ok(self.tcx.mk_mach_float(val)) . } . } . . impl<'infcx, 'tcx> CombineFields<'infcx, 'tcx> { . pub fn tcx(&self) -> TyCtxt<'tcx> { 80,347 ( 0.00%) self.infcx.tcx . } . . pub fn equate<'a>(&'a mut self, a_is_expected: bool) -> Equate<'a, 'infcx, 'tcx> { . Equate::new(self, a_is_expected) . } . 115,068 ( 0.00%) pub fn sub<'a>(&'a mut self, a_is_expected: bool) -> Sub<'a, 'infcx, 'tcx> { . Sub::new(self, a_is_expected) 57,534 ( 0.00%) } . . pub fn lub<'a>(&'a mut self, a_is_expected: bool) -> Lub<'a, 'infcx, 'tcx> { . Lub::new(self, a_is_expected) . } . 129,410 ( 0.00%) pub fn glb<'a>(&'a mut self, a_is_expected: bool) -> Glb<'a, 'infcx, 'tcx> { . Glb::new(self, a_is_expected) 64,705 ( 0.00%) } . . /// Here, `dir` is either `EqTo`, `SubtypeOf`, or `SupertypeOf`. . /// The idea is that we should ensure that the type `a_ty` is equal . /// to, a subtype of, or a supertype of (respectively) the type . /// to which `b_vid` is bound. . /// . /// Since `b_vid` has not yet been instantiated with a type, we . /// will first instantiate `b_vid` with a *generalized* version . /// of `a_ty`. Generalization introduces other inference . /// variables wherever subtyping could occur. 720,016 ( 0.01%) pub fn instantiate( . &mut self, . a_ty: Ty<'tcx>, . dir: RelationDir, . b_vid: ty::TyVid, . a_is_expected: bool, . ) -> RelateResult<'tcx, ()> { . use self::RelationDir::*; . -- line 333 ---------------------------------------- -- line 342 ---------------------------------------- . // - `a_ty == &'x ?1`, where `'x` is some free region and `?1` is an . // inference variable, . // - and `dir` == `SubtypeOf`. . // . // Then the generalized form `b_ty` would be `&'?2 ?3`, where . // `'?2` and `?3` are fresh region/type inference . // variables. (Down below, we will relate `a_ty <: b_ty`, . // adding constraints like `'x: '?2` and `?1 <: ?3`.) 65,456 ( 0.00%) let Generalization { ty: b_ty, needs_wf } = self.generalize(a_ty, b_vid, dir)?; . debug!( . "instantiate(a_ty={:?}, dir={:?}, b_vid={:?}, generalized b_ty={:?})", . a_ty, dir, b_vid, b_ty . ); 458,192 ( 0.01%) self.infcx.inner.borrow_mut().type_variables().instantiate(b_vid, b_ty); . 65,456 ( 0.00%) if needs_wf { . self.obligations.push(Obligation::new( . self.trace.cause.clone(), . self.param_env, . ty::Binder::dummy(ty::PredicateKind::WellFormed(b_ty.into())) . .to_predicate(self.infcx.tcx), . )); . } . . // Finally, relate `b_ty` to `a_ty`, as described in previous comment. . // . // FIXME(#16847): This code is non-ideal because all these subtype . // relations wind up attributed to the same spans. We need . // to associate causes/spans with each of the relations in . // the stack to get this right. 233,140 ( 0.00%) match dir { 173,740 ( 0.00%) EqTo => self.equate(a_is_expected).relate(a_ty, b_ty), 71,710 ( 0.00%) SubtypeOf => self.sub(a_is_expected).relate(a_ty, b_ty), 32,732 ( 0.00%) SupertypeOf => self.sub(a_is_expected).relate_with_variance( . ty::Contravariant, . ty::VarianceDiagInfo::default(), . a_ty, . b_ty, . ), . }?; . 65,456 ( 0.00%) Ok(()) 523,648 ( 0.01%) } . . /// Attempts to generalize `ty` for the type variable `for_vid`. . /// This checks for cycle -- that is, whether the type `ty` . /// references `for_vid`. The `dir` is the "direction" for which we . /// a performing the generalization (i.e., are we producing a type . /// that can be used as a supertype etc). . /// . /// Preconditions: -- line 392 ---------------------------------------- -- line 401 ---------------------------------------- . debug!("generalize(ty={:?}, for_vid={:?}, dir={:?}", ty, for_vid, dir); . // Determine the ambient variance within which `ty` appears. . // The surrounding equation is: . // . // ty [op] ty2 . // . // where `op` is either `==`, `<:`, or `:>`. This maps quite . // naturally. 261,824 ( 0.00%) let ambient_variance = match dir { . RelationDir::EqTo => ty::Invariant, . RelationDir::SubtypeOf => ty::Covariant, . RelationDir::SupertypeOf => ty::Contravariant, . }; . . debug!("generalize: ambient_variance = {:?}", ambient_variance); . 654,560 ( 0.01%) let for_universe = match self.infcx.inner.borrow_mut().type_variables().probe(for_vid) { . v @ TypeVariableValue::Known { .. } => { . bug!("instantiating {:?} which has a known value {:?}", for_vid, v,) . } . TypeVariableValue::Unknown { universe } => universe, . }; . . debug!("generalize: for_universe = {:?}", for_universe); . debug!("generalize: trace = {:?}", self.trace); . 654,560 ( 0.01%) let mut generalize = Generalizer { . infcx: self.infcx, . cause: &self.trace.cause, . for_vid_sub_root: self.infcx.inner.borrow_mut().type_variables().sub_root_var(for_vid), . for_universe, . ambient_variance, . needs_wf: false, . root_ty: ty, 65,456 ( 0.00%) param_env: self.param_env, . cache: SsoHashMap::new(), . }; . 130,912 ( 0.00%) let ty = match generalize.relate(ty, ty) { 65,456 ( 0.00%) Ok(ty) => ty, . Err(e) => { . debug!("generalize: failure {:?}", e); . return Err(e); . } . }; 65,456 ( 0.00%) let needs_wf = generalize.needs_wf; . debug!("generalize: success {{ {:?}, {:?} }}", ty, needs_wf); . Ok(Generalization { ty, needs_wf }) . } . . pub fn add_const_equate_obligation( . &mut self, . a_is_expected: bool, . a: &'tcx ty::Const<'tcx>, -- line 454 ---------------------------------------- -- line 528 ---------------------------------------- . /// will force the calling code to check that `WF(Foo)` . /// holds, which in turn implies that `?C::Item == ?D`. So once . /// `?C` is constrained, that should suffice to restrict `?D`. . needs_wf: bool, . } . . impl<'tcx> TypeRelation<'tcx> for Generalizer<'_, 'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 240,780 ( 0.00%) self.infcx.tcx . } . fn param_env(&self) -> ty::ParamEnv<'tcx> { 238 ( 0.00%) self.param_env . } . . fn tag(&self) -> &'static str { . "Generalizer" . } . . fn a_is_expected(&self) -> bool { . true . } . 7,454 ( 0.00%) fn binders( . &mut self, . a: ty::Binder<'tcx, T>, . b: ty::Binder<'tcx, T>, . ) -> RelateResult<'tcx, ty::Binder<'tcx, T>> . where . T: Relate<'tcx>, . { 10,602 ( 0.00%) Ok(a.rebind(self.relate(a.skip_binder(), b.skip_binder())?)) 7,454 ( 0.00%) } . 839,234 ( 0.01%) fn relate_item_substs( . &mut self, . item_def_id: DefId, . a_subst: SubstsRef<'tcx>, . b_subst: SubstsRef<'tcx>, . ) -> RelateResult<'tcx, SubstsRef<'tcx>> { 76,294 ( 0.00%) if self.ambient_variance == ty::Variance::Invariant { . // Avoid fetching the variance if we are in an invariant . // context; no need, and it can induce dependency cycles . // (e.g., #41849). 38,101 ( 0.00%) relate::relate_substs(self, None, a_subst, b_subst) . } else { . let tcx = self.tcx(); . let opt_variances = tcx.variances_of(item_def_id); 190,965 ( 0.00%) relate::relate_substs(self, Some((item_def_id, &opt_variances)), a_subst, b_subst) . } 686,646 ( 0.01%) } . 103,527 ( 0.00%) fn relate_with_variance>( . &mut self, . variance: ty::Variance, . _info: ty::VarianceDiagInfo<'tcx>, . a: T, . b: T, . ) -> RelateResult<'tcx, T> { . let old_ambient_variance = self.ambient_variance; 427,544 ( 0.01%) self.ambient_variance = self.ambient_variance.xform(variance); . . let result = self.relate(a, b); 83,049 ( 0.00%) self.ambient_variance = old_ambient_variance; . result 80,521 ( 0.00%) } . 1,112,274 ( 0.02%) fn tys(&mut self, t: Ty<'tcx>, t2: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { 123,586 ( 0.00%) assert_eq!(t, t2); // we are abusing TypeRelation here; both LHS and RHS ought to be == . 741,516 ( 0.01%) if let Some(result) = self.cache.get(&t) { 524 ( 0.00%) return result.clone(); . } . debug!("generalize: t={:?}", t); . . // Check to see whether the type we are generalizing references . // any other type variable related to `vid` via . // subtyping. This is basically our "occurs check", preventing . // us from creating infinitely sized types. 402,181 ( 0.01%) let result = match *t.kind() { 5,124 ( 0.00%) ty::Infer(ty::TyVar(vid)) => { 5,124 ( 0.00%) let vid = self.infcx.inner.borrow_mut().type_variables().root_var(vid); 5,124 ( 0.00%) let sub_vid = self.infcx.inner.borrow_mut().type_variables().sub_root_var(vid); 5,124 ( 0.00%) if sub_vid == self.for_vid_sub_root { . // If sub-roots are equal, then `for_vid` and . // `vid` are related via subtyping. . Err(TypeError::CyclicTy(self.root_ty)) . } else { 30,744 ( 0.00%) let probe = self.infcx.inner.borrow_mut().type_variables().probe(vid); 10,248 ( 0.00%) match probe { . TypeVariableValue::Known { value: u } => { . debug!("generalize: known value {:?}", u); . self.relate(u, u) . } . TypeVariableValue::Unknown { universe } => { 14,180 ( 0.00%) match self.ambient_variance { . // Invariant: no need to make a fresh type variable. . ty::Invariant => { 1,275 ( 0.00%) if self.for_universe.can_name(universe) { 944 ( 0.00%) return Ok(t); . } . } . . // Bivariant: make a fresh var, but we . // may need a WF predicate. See . // comment on `needs_wf` field for . // more info. . ty::Bivariant => self.needs_wf = true, . . // Co/contravariant: this will be . // sufficiently constrained later on. . ty::Covariant | ty::Contravariant => (), . } . . let origin = 23,400 ( 0.00%) *self.infcx.inner.borrow_mut().type_variables().var_origin(vid); 18,200 ( 0.00%) let new_var_id = self . .infcx . .inner . .borrow_mut() . .type_variables() 18,200 ( 0.00%) .new_var(self.for_universe, origin); . let u = self.tcx().mk_ty_var(new_var_id); . . // Record that we replaced `vid` with `new_var_id` as part of a generalization . // operation. This is needed to detect cyclic types. To see why, see the . // docs in the `type_variables` module. 13,000 ( 0.00%) self.infcx.inner.borrow_mut().type_variables().sub(vid, new_var_id); . debug!("generalize: replacing original vid={:?} with new={:?}", vid, u); 5,200 ( 0.00%) Ok(u) . } . } . } . } . ty::Infer(ty::IntVar(_) | ty::FloatVar(_)) => { . // No matter what mode we are in, . // integer/floating-point types must be equal to be . // relatable. 268 ( 0.00%) Ok(t) . } 354,189 ( 0.01%) _ => relate::super_relate_tys(self, t, t), . }; . 1,232,190 ( 0.02%) self.cache.insert(t, result.clone()); 739,314 ( 0.01%) return result; 1,112,274 ( 0.02%) } . 238,672 ( 0.00%) fn regions( . &mut self, . r: ty::Region<'tcx>, . r2: ty::Region<'tcx>, . ) -> RelateResult<'tcx, ty::Region<'tcx>> { 29,185 ( 0.00%) assert_eq!(r, r2); // we are abusing TypeRelation here; both LHS and RHS ought to be == . . debug!("generalize: regions r={:?}", r); . 58,370 ( 0.00%) match *r { . // Never make variables for regions bound within the type itself, . // nor for erased regions. . ty::ReLateBound(..) | ty::ReErased => { . return Ok(r); . } . . ty::RePlaceholder(..) . | ty::ReVar(..) -- line 691 ---------------------------------------- -- line 697 ---------------------------------------- . } . } . . // If we are in an invariant context, we can re-use the region . // as is, unless it happens to be in some universe that we . // can't name. (In the case of a region *variable*, we could . // use it if we promoted it into our universe, but we don't . // bother.) 58,900 ( 0.00%) if let ty::Invariant = self.ambient_variance { 55,503 ( 0.00%) let r_universe = self.infcx.universe_of_region(r); 129,473 ( 0.00%) if self.for_universe.can_name(r_universe) { . return Ok(r); . } . } . . // FIXME: This is non-ideal because we don't give a . // very descriptive origin for this region variable. 109,796 ( 0.00%) Ok(self.infcx.next_region_var_in_universe(MiscVariable(self.cause.span), self.for_universe)) 238,672 ( 0.00%) } . 2,196 ( 0.00%) fn consts( . &mut self, . c: &'tcx ty::Const<'tcx>, . c2: &'tcx ty::Const<'tcx>, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { 714 ( 0.00%) assert_eq!(c, c2); // we are abusing TypeRelation here; both LHS and RHS ought to be == . . match c.val { . ty::ConstKind::Infer(InferConst::Var(vid)) => { 6 ( 0.00%) let mut inner = self.infcx.inner.borrow_mut(); 24 ( 0.00%) let variable_table = &mut inner.const_unification_table(); 6 ( 0.00%) let var_value = variable_table.probe_value(vid); 12 ( 0.00%) match var_value.val { . ConstVariableValue::Known { value: u } => { . drop(inner); . self.relate(u, u) . } 4 ( 0.00%) ConstVariableValue::Unknown { universe } => { 20 ( 0.00%) if self.for_universe.can_name(universe) { . Ok(c) . } else { . let new_var_id = variable_table.new_key(ConstVarValue { . origin: var_value.origin, . val: ConstVariableValue::Unknown { universe: self.for_universe }, . }); . Ok(self.tcx().mk_const_var(new_var_id, c.ty)) . } -- line 743 ---------------------------------------- -- line 754 ---------------------------------------- . substs, . substs, . )?; . Ok(self.tcx().mk_const(ty::Const { . ty: c.ty, . val: ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted }), . })) . } 1,190 ( 0.00%) _ => relate::super_relate_consts(self, c, c), . } 2,196 ( 0.00%) } . } . . pub trait ConstEquateRelation<'tcx>: TypeRelation<'tcx> { . /// Register an obligation that both constants must be equal to each other. . /// . /// If they aren't equal then the relation doesn't hold. . fn const_equate_obligation(&mut self, a: &'tcx ty::Const<'tcx>, b: &'tcx ty::Const<'tcx>); . } -- line 772 ---------------------------------------- -- line 824 ---------------------------------------- . target_vid: ty::ConstVid<'tcx>, . } . . // We use `TypeRelation` here to propagate `RelateResult` upwards. . // . // Both inputs are expected to be the same. . impl<'tcx> TypeRelation<'tcx> for ConstInferUnifier<'_, 'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 224 ( 0.00%) self.infcx.tcx . } . . fn param_env(&self) -> ty::ParamEnv<'tcx> { 56 ( 0.00%) self.param_env . } . . fn tag(&self) -> &'static str { . "ConstInferUnifier" . } . . fn a_is_expected(&self) -> bool { . true -- line 844 ---------------------------------------- -- line 937 ---------------------------------------- . return Ok(r); . } else { . // FIXME: This is non-ideal because we don't give a . // very descriptive origin for this region variable. . Ok(self.infcx.next_region_var_in_universe(MiscVariable(self.span), self.for_universe)) . } . } . 1,064 ( 0.00%) #[tracing::instrument(level = "debug", skip(self))] . fn consts( . &mut self, . c: &'tcx ty::Const<'tcx>, . _c: &'tcx ty::Const<'tcx>, . ) -> RelateResult<'tcx, &'tcx ty::Const<'tcx>> { . debug_assert_eq!(c, _c); . debug!("ConstInferUnifier: c={:?}", c); . 280 ( 0.00%) match c.val { . ty::ConstKind::Infer(InferConst::Var(vid)) => { . // Check if the current unification would end up . // unifying `target_vid` with a const which contains . // an inference variable which is unioned with `target_vid`. . // . // Not doing so can easily result in stack overflows. . if self . .infcx -- line 962 ---------------------------------------- -- line 1000 ---------------------------------------- . substs, . substs, . )?; . Ok(self.tcx().mk_const(ty::Const { . ty: c.ty, . val: ty::ConstKind::Unevaluated(ty::Unevaluated { def, substs, promoted }), . })) . } 280 ( 0.00%) _ => relate::super_relate_consts(self, c, c), . } . } . } 1,030,614 ( 0.02%) -------------------------------------------------------------------------------- The following files chosen for auto-annotation could not be found: -------------------------------------------------------------------------------- ./elf/dl-lookup.c ./malloc/malloc.c ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S ./string/../sysdeps/x86_64/multiarch/memset-vec-unaligned-erms.S -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 3,139,351,512 (52.09%) events annotated