-------------------------------------------------------------------------------- I1 cache: 65536 B, 64 B, 4-way associative D1 cache: 32768 B, 64 B, 8-way associative LL cache: 67108864 B, 64 B, 64-way associative Command: /usr/home/liquid/.rustup/toolchains/w-profiling/bin/rustc --crate-name unicode_xid src/lib.rs --error-format=json --json=diagnostic-rendered-ansi,artifacts,future-incompat --crate-type lib --emit=dep-info,metadata -C embed-bitcode=no -C debuginfo=2 --cfg feature="default" -C metadata=948cc6d010a98947 -C extra-filename=-948cc6d010a98947 --out-dir /usr/home/liquid/tmp/.tmpMTTQYC/target/debug/deps -L dependency=/usr/home/liquid/tmp/.tmpMTTQYC/target/debug/deps -Adeprecated -Aunknown-lints -Zincremental-verify-ich Data file: results/cgout-w-profiling-unicode-xid-0.2.2-Check-Full Events recorded: Ir Events shown: Ir Event sort order: Ir Thresholds: 0.1 Include dirs: User annotated: Auto-annotation: on -------------------------------------------------------------------------------- Ir -------------------------------------------------------------------------------- 218,751,313 (100.0%) PROGRAM TOTALS -------------------------------------------------------------------------------- Ir file:function -------------------------------------------------------------------------------- 10,305,124 ( 4.71%) ./elf/dl-lookup.c:_dl_lookup_symbol_x 6,187,581 ( 2.83%) ./malloc/malloc.c:_int_malloc 5,457,480 ( 2.49%) ./malloc/malloc.c:_int_free 4,835,679 ( 2.21%) ./elf/dl-lookup.c:do_lookup_x 4,141,271 ( 1.89%) ./elf/../sysdeps/x86_64/dl-machine.h:_dl_relocate_object 3,967,421 ( 1.81%) ./string/../sysdeps/x86_64/multiarch/memmove-vec-unaligned-erms.S:__memcpy_avx_unaligned_erms 3,788,563 ( 1.73%) ./malloc/malloc.c:malloc 2,139,033 ( 0.98%) ./elf/do-rel.h:_dl_relocate_object 1,990,381 ( 0.91%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/opaque.rs:>::decode 1,671,154 ( 0.76%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/vec/mod.rs:>::decode 1,628,199 ( 0.74%) ./malloc/malloc.c:free 1,619,973 ( 0.74%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::decode 1,362,217 ( 0.62%) ???:SetImpliedBits(llvm::FeatureBitset&, llvm::FeatureBitset const&, llvm::ArrayRef) 1,296,111 ( 0.59%) ./malloc/malloc.c:malloc_consolidate 964,312 ( 0.44%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::from_key_hashed_nocheck:: 873,946 ( 0.40%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/mod.rs:::next_token 864,856 ( 0.40%) ./elf/dl-lookup.c:check_match 797,582 ( 0.36%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::get:: 723,023 ( 0.33%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 720,640 ( 0.33%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::new_imported_source_file 719,048 ( 0.33%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:::new_imported_source_file 718,348 ( 0.33%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_lexer/src/unescape.rs:rustc_lexer::unescape::scan_escape 658,846 ( 0.30%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/operand.rs:>::try_read_immediate 624,476 ( 0.29%) ./malloc/malloc.c:unlink_chunk.constprop.0 605,074 ( 0.28%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_lexer/src/lib.rs:::advance_token 602,660 ( 0.28%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::check 597,106 ( 0.27%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/tokentrees.rs:::parse_token_tree 591,111 ( 0.27%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp/ring.rs:>::pop_first 582,602 ( 0.27%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs:::span_data_to_lines_and_cols 553,669 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs:::advance_left 547,385 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/vec_deque/mod.rs:>::push_back 542,788 ( 0.25%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/check/coercion.rs:::coerce 535,408 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/char/mod.rs:::next 529,040 ( 0.24%) ./string/../sysdeps/x86_64/strcmp.S:strcmp 527,616 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/hir/map/mod.rs:::attrs 520,505 ( 0.24%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/decoder.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 510,812 ( 0.23%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/middle/region.rs:::temporary_scope 508,921 ( 0.23%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:::short_write_process_buffer:: 487,843 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::next 479,637 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}> as core::iter::traits::iterator::Iterator>::fold::<(), core::iter::traits::iterator::Iterator::for_each::call as alloc::vec::spec_extend::SpecExtend, >::decode::{closure#0}>, ::imported_source_files::{closure#3}::{closure#0}>>>::spec_extend::{closure#0}>::{closure#0}> 475,386 ( 0.22%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs:::bump 468,069 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/library/std/src/sys/unix/alloc.rs:__rdl_alloc 459,321 ( 0.21%) ???:llvm::StringMapImpl::LookupBucketFor(llvm::StringRef) 455,200 ( 0.21%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/cx/expr.rs:::mirror_expr_inner 426,974 ( 0.20%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/interpret/allocation.rs:::read_scalar:: 425,182 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/option.rs:::advance_left 413,838 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::maybe_lint_level_root_bounded 410,784 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/freshen.rs:::fold_ty 407,811 ( 0.19%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/lib.rs:>::hash_stable 406,338 ( 0.19%) ./malloc/malloc.c:realloc 405,464 ( 0.19%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>::reserve_rehash::>::{closure#0}> 402,990 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_metadata/src/rmeta/encoder.rs:>::encode 399,894 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs:::clone 397,065 ( 0.18%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::>::{closure#0}> 395,559 ( 0.18%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/hir/map/mod.rs:::find_parent_node 393,440 ( 0.18%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:::node_type_opt 381,210 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/transform/check_consts/qualifs.rs:rustc_const_eval::transform::check_consts::qualifs::in_operand::::qualif_local::{closure#0}> 368,508 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/middle/region.rs:>>::get:: 361,086 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/layout.rs:<&rustc_middle::ty::TyS as rustc_target::abi::TyAbiInterface<_>>::ty_and_layout_field::field_ty_or_layout::> 360,950 ( 0.17%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/expr.rs:::parse_assoc_expr_with 357,019 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/layout.rs:<&rustc_middle::ty::TyS as rustc_target::abi::TyAbiInterface>>::ty_and_layout_field 355,261 ( 0.16%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>>::from_key_hashed_nocheck:: 348,347 ( 0.16%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>>::insert 343,715 ( 0.16%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (core::result::Result, rustc_middle::ty::layout::LayoutError>, rustc_query_system::dep_graph::graph::DepNodeIndex), core::hash::BuildHasherDefault>>::from_key_hashed_nocheck::> 341,388 ( 0.16%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:::short_write_process_buffer:: 338,125 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/build/expr/as_operand.rs:::as_operand 335,369 ( 0.15%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/map.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 334,534 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/check/regionck.rs:::visit_expr 333,562 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::lookup_source_file_idx 333,373 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/tokenstream.rs:::next_with_spacing 332,562 ( 0.15%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:, (), core::hash::BuildHasherDefault>>::from_hash::, rustc_middle::ty::context::Interned>::{closure#0}> 331,827 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_build/src/thir/cx/expr.rs:::make_mirror_unadjusted 331,368 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/expr.rs:::collect_tokens_for_expr::<::parse_dot_or_call_expr::{closure#0}>::{closure#0} 328,560 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/interpret/allocation.rs:::get_bytes_internal:: 328,332 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/validity.rs:>::try_visit_primitive 326,115 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_mir_dataflow/src/drop_flag_effects.rs:rustc_mir_dataflow::drop_flag_effects::on_all_children_bits::is_terminal_path 322,777 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast/src/token.rs:::eq 320,185 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_lint/src/levels.rs:::push 319,651 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/sty.rs:::is_trivially_sized 318,120 ( 0.15%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::shallow_resolve_ty 312,094 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/undo_log.rs:>::reverse 308,996 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/undo_log.rs:::rollback_to 308,827 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/validity.rs:>::aggregate_field_path_elem 305,212 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/check/writeback.rs:::visit_node_id 304,171 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/context.rs:::_intern_substs 302,625 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/md-5-0.9.1/src/utils.rs:md5::utils::compress 300,980 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/interpret/allocation.rs:::get_relocations:: 298,204 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/iter/macros.rs:::_intern_substs 297,491 ( 0.14%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/rustc-hash-1.1.0/src/lib.rs:<&str as core::hash::Hash>::hash:: 296,784 ( 0.14%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs:::attrs 291,637 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/place.rs:>::force_allocation_maybe_sized 283,896 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs:>::hash_stable 281,206 ( 0.13%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/check/expr.rs:::check_expr_with_expectation_and_args 272,023 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::commit_from 271,887 ( 0.12%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>, (), core::hash::BuildHasherDefault>>::from_hash::>>::{closure#0}> 270,719 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_target/src/abi/mod.rs:::read_scalar:: 264,696 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/num/uint_macros.rs:md5::utils::compress 262,673 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs:::scan_string 260,072 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/memory.rs:>::get 258,540 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::start_snapshot 255,632 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/sub.rs:::tys 253,997 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/../../stdarch/crates/core_arch/src/x86/sse2.rs:>>::from_key_hashed_nocheck:: 253,220 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_hir_pretty/src/lib.rs:::print_expr 252,524 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/source_map.rs:::new_imported_source_file 252,037 ( 0.12%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/vec_deque/mod.rs:::advance_left 251,340 ( 0.11%) ./malloc/malloc.c:_int_realloc 250,704 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/check/coercion.rs:::try_coerce 247,673 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/operand.rs:>::eval_operand 243,364 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/string.rs:alloc::string::String::push 241,290 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/mod.rs:::check_static_ptr 238,468 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cmp.rs:>::decode 238,468 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:>::decode 238,468 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/iter/range.rs:>::decode 237,753 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/validity.rs: as rustc_const_eval::interpret::visitor::ValueVisitor>::visit_value 237,000 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_typeck/src/mem_categorization.rs:::cat_expr_unadjusted 236,879 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/ptr/mod.rs:>::truncate 236,703 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/place.rs:>::write_immediate_to_mplace_no_validate 236,570 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_passes/src/region.rs:::visit_expr 235,994 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/lexer/tokentrees.rs:::push 235,511 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/memory.rs:>::get_global_alloc 231,691 ( 0.11%) ./string/../sysdeps/x86_64/multiarch/memcmp-avx2-movbe.S:__memcmp_avx2_movbe 231,344 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_hir/src/intravisit.rs:rustc_hir::intravisit::walk_expr:: 231,025 ( 0.11%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/place.rs:>::place_field 227,203 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_infer/src/infer/mod.rs:::commit_if_ok::, rustc_middle::ty::error::TypeError, ::sub<&rustc_middle::ty::TyS>::{closure#0}> 226,969 ( 0.10%) /usr/home/liquid/.cargo/registry/src/github.com-1ecc6299db9ec823/hashbrown-0.12.0/src/raw/mod.rs:>::insert::>::{closure#0}> 225,833 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/library/alloc/src/collections/vec_deque/mod.rs:::check_stack 225,348 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_serialize/src/leb128.rs:>::encode 225,067 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs:::check_stack 224,824 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/slice/mod.rs:::span_data_to_lines_and_cols 223,478 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/library/core/src/cell.rs:::start_snapshot 223,199 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/place.rs:>::try_read_immediate 221,867 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/place.rs:>::copy_op_no_validate 221,549 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_const_eval/src/interpret/visitor.rs: as rustc_const_eval::interpret::visitor::ValueVisitor>::walk_value 221,269 ( 0.10%) ./stdlib/cxa_finalize.c:__cxa_finalize 218,881 ( 0.10%) /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_privacy/src/lib.rs:::check_expr_pat_type -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_lexer/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 38 ---------------------------------------- . #[derive(Debug)] . pub struct Token { . pub kind: TokenKind, . pub len: usize, . } . . impl Token { . fn new(kind: TokenKind, len: usize) -> Token { 105,552 ( 0.05%) Token { kind, len } . } . } . . /// Enum representing common lexeme types. . // perf note: Changing all `usize` to `u32` doesn't change performance. See #77629 1,560 ( 0.00%) #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] . pub enum TokenKind { . // Multi-char tokens: . /// "// comment" . LineComment { doc_style: Option }, . /// `/* block comment */` . /// . /// Block comments can be recursive, so the sequence like `/* /* */` . /// will not be considered terminated and will result in a parsing error. -- line 60 ---------------------------------------- -- line 192 ---------------------------------------- . /// Literal starts with "0x". . Hexadecimal, . /// Literal doesn't contain a prefix. . Decimal, . } . . /// `rustc` allows files to have a shebang, e.g. "#!/usr/bin/rustrun", . /// but shebang isn't a part of rust syntax. 21 ( 0.00%) pub fn strip_shebang(input: &str) -> Option { . // Shebang must start with `#!` literally, without any preceding whitespace. . // For simplicity we consider any line starting with `#!` a shebang, . // regardless of restrictions put on shebangs by specific platforms. . if let Some(input_tail) = input.strip_prefix("#!") { . // Ok, this is a shebang but if the next non-whitespace token is `[`, . // then it may be valid Rust code, so consider it Rust code. . let next_non_whitespace_token = tokenize(input_tail).map(|tok| tok.kind).find(|tok| { . !matches!( -- line 208 ---------------------------------------- -- line 213 ---------------------------------------- . ) . }); . if next_non_whitespace_token != Some(TokenKind::OpenBracket) { . // No other choice than to consider this a shebang. . return Some(2 + input_tail.lines().next().unwrap_or_default().len()); . } . } . None 24 ( 0.00%) } . . /// Parses the first token from the provided input string. 35,184 ( 0.02%) pub fn first_token(input: &str) -> Token { . debug_assert!(!input.is_empty()); 11,728 ( 0.01%) Cursor::new(input).advance_token() 46,912 ( 0.02%) } . . /// Creates an iterator that produces tokens from the input string. . pub fn tokenize(input: &str) -> impl Iterator + '_ { . let mut cursor = Cursor::new(input); . std::iter::from_fn(move || { . if cursor.is_eof() { . None . } else { -- line 235 ---------------------------------------- -- line 243 ---------------------------------------- . /// See [Rust language reference](https://doc.rust-lang.org/reference/whitespace.html) . /// for definitions of these classes. . pub fn is_whitespace(c: char) -> bool { . // This is Pattern_White_Space. . // . // Note that this set is stable (ie, it doesn't change with different . // Unicode versions), so it's ok to just hard-code the values. . 34,648 ( 0.02%) matches!( . c, . // Usual ASCII suspects . '\u{0009}' // \t . | '\u{000A}' // \n . | '\u{000B}' // vertical tab . | '\u{000C}' // form feed . | '\u{000D}' // \r . | '\u{0020}' // space -- line 259 ---------------------------------------- -- line 271 ---------------------------------------- . ) . } . . /// True if `c` is valid as a first character of an identifier. . /// See [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html) for . /// a formal definition of valid identifier name. . pub fn is_id_start(c: char) -> bool { . // This is XID_Start OR '_' (which formally is not a XID_Start). 10,972 ( 0.01%) c == '_' || unicode_xid::UnicodeXID::is_xid_start(c) . } . . /// True if `c` is valid as a non-first character of an identifier. . /// See [Rust language reference](https://doc.rust-lang.org/reference/identifiers.html) for . /// a formal definition of valid identifier name. . pub fn is_id_continue(c: char) -> bool { . unicode_xid::UnicodeXID::is_xid_continue(c) . } -- line 287 ---------------------------------------- -- line 293 ---------------------------------------- . is_id_start(start) && chars.all(is_id_continue) . } else { . false . } . } . . impl Cursor<'_> { . /// Parses a token from the input string. 93,824 ( 0.04%) fn advance_token(&mut self) -> Token { . let first_char = self.bump().unwrap(); 49,279 ( 0.02%) let token_kind = match first_char { . // Slash, comment or block comment. 240 ( 0.00%) '/' => match self.first() { . '/' => self.line_comment(), . '*' => self.block_comment(), . _ => Slash, . }, . . // Whitespace sequence. . c if is_whitespace(c) => self.whitespace(), . . // Raw identifier, raw string literal or identifier. 8 ( 0.00%) 'r' => match (self.first(), self.second()) { . ('#', c1) if is_id_start(c1) => self.raw_ident(), . ('#', _) | ('"', _) => { . let (n_hashes, err) = self.raw_double_quoted_string(1); . let suffix_start = self.len_consumed(); . if err.is_none() { . self.eat_literal_suffix(); . } . let kind = RawStr { n_hashes, err }; . Literal { kind, suffix_start } . } . _ => self.ident_or_unknown_prefix(), . }, . . // Byte literal, byte string literal, raw byte string literal or identifier. 99 ( 0.00%) 'b' => match (self.first(), self.second()) { . ('\'', _) => { . self.bump(); . let terminated = self.single_quoted_string(); . let suffix_start = self.len_consumed(); . if terminated { . self.eat_literal_suffix(); . } . let kind = Byte { terminated }; -- line 338 ---------------------------------------- -- line 361 ---------------------------------------- . _ => self.ident_or_unknown_prefix(), . }, . . // Identifier (this should be checked after other variant that can . // start as identifier). . c if is_id_start(c) => self.ident_or_unknown_prefix(), . . // Numeric literal. 25,512 ( 0.01%) c @ '0'..='9' => { . let literal_kind = self.number(c); . let suffix_start = self.len_consumed(); . self.eat_literal_suffix(); 15 ( 0.00%) TokenKind::Literal { kind: literal_kind, suffix_start } . } . . // One-symbol tokens. 24 ( 0.00%) ';' => Semi, 5,500 ( 0.00%) ',' => Comma, 4 ( 0.00%) '.' => Dot, 2,802 ( 0.00%) '(' => OpenParen, 2,802 ( 0.00%) ')' => CloseParen, 26 ( 0.00%) '{' => OpenBrace, 26 ( 0.00%) '}' => CloseBrace, 34 ( 0.00%) '[' => OpenBracket, 34 ( 0.00%) ']' => CloseBracket, . '@' => At, 24 ( 0.00%) '#' => Pound, . '~' => Tilde, . '?' => Question, 50 ( 0.00%) ':' => Colon, . '$' => Dollar, 40 ( 0.00%) '=' => Eq, 12 ( 0.00%) '!' => Bang, 22 ( 0.00%) '<' => Lt, 20 ( 0.00%) '>' => Gt, 14 ( 0.00%) '-' => Minus, 40 ( 0.00%) '&' => And, 28 ( 0.00%) '|' => Or, . '+' => Plus, . '*' => Star, . '^' => Caret, . '%' => Percent, . . // Lifetime or character literal. 8,235 ( 0.00%) '\'' => self.lifetime_or_char(), . . // String literal. . '"' => { 15 ( 0.00%) let terminated = self.double_quoted_string(); . let suffix_start = self.len_consumed(); 10 ( 0.00%) if terminated { . self.eat_literal_suffix(); . } . let kind = Str { terminated }; 10 ( 0.00%) Literal { kind, suffix_start } . } . // Identifier starting with an emoji. Only lexed for graceful error recovery. . c if !c.is_ascii() && unic_emoji_char::is_emoji(c) => { . self.fake_ident_or_unknown_prefix() . } . _ => Unknown, . }; . Token::new(token_kind, self.len_consumed()) 105,552 ( 0.05%) } . . fn line_comment(&mut self) -> TokenKind { . debug_assert!(self.prev() == '/' && self.first() == '/'); . self.bump(); . 210 ( 0.00%) let doc_style = match self.first() { . // `//!` is an inner line doc comment. . '!' => Some(DocStyle::Inner), . // `////` (more than 3 slashes) is not considered a doc comment. 45 ( 0.00%) '/' if self.second() != '/' => Some(DocStyle::Outer), . _ => None, . }; . 2,645 ( 0.00%) self.eat_while(|c| c != '\n'); 180 ( 0.00%) LineComment { doc_style } . } . . fn block_comment(&mut self) -> TokenKind { . debug_assert!(self.prev() == '/' && self.first() == '*'); . self.bump(); . . let doc_style = match self.first() { . // `/*!` is an inner block doc comment. -- line 447 ---------------------------------------- -- line 474 ---------------------------------------- . } . . BlockComment { doc_style, terminated: depth == 0 } . } . . fn whitespace(&mut self) -> TokenKind { . debug_assert!(is_whitespace(self.prev())); . self.eat_while(is_whitespace); 3,008 ( 0.00%) Whitespace . } . . fn raw_ident(&mut self) -> TokenKind { . debug_assert!(self.prev() == 'r' && self.first() == '#' && is_id_start(self.second())); . // Eat "#" symbol. . self.bump(); . // Eat the identifier part of RawIdent. . self.eat_identifier(); . RawIdent . } . 780 ( 0.00%) fn ident_or_unknown_prefix(&mut self) -> TokenKind { . debug_assert!(is_id_start(self.prev())); . // Start is already eaten, eat the rest of identifier. 312 ( 0.00%) self.eat_while(is_id_continue); . // Known prefixes must have been handled earlier. So if . // we see a prefix here, it is definitely an unknown prefix. 312 ( 0.00%) match self.first() { . '#' | '"' | '\'' => UnknownPrefix, 156 ( 0.00%) c if !c.is_ascii() && unic_emoji_char::is_emoji(c) => { . self.fake_ident_or_unknown_prefix() . } 156 ( 0.00%) _ => Ident, . } 624 ( 0.00%) } . . fn fake_ident_or_unknown_prefix(&mut self) -> TokenKind { . // Start is already eaten, eat the rest of identifier. . self.eat_while(|c| { . unicode_xid::UnicodeXID::is_xid_continue(c) . || (!c.is_ascii() && unic_emoji_char::is_emoji(c)) . || c == '\u{200d}' . }); -- line 515 ---------------------------------------- -- line 519 ---------------------------------------- . '#' | '"' | '\'' => UnknownPrefix, . _ => InvalidIdent, . } . } . . fn number(&mut self, first_digit: char) -> LiteralKind { . debug_assert!('0' <= self.prev() && self.prev() <= '9'); . let mut base = Base::Decimal; 6 ( 0.00%) if first_digit == '0' { . // Attempt to parse encoding base. 10 ( 0.00%) let has_digits = match self.first() { . 'b' => { . base = Base::Binary; . self.bump(); . self.eat_decimal_digits() . } . 'o' => { . base = Base::Octal; . self.bump(); -- line 537 ---------------------------------------- -- line 538 ---------------------------------------- . self.eat_decimal_digits() . } . 'x' => { . base = Base::Hexadecimal; . self.bump(); . self.eat_hexadecimal_digits() . } . // Not a base prefix. 6 ( 0.00%) '0'..='9' | '_' | '.' | 'e' | 'E' => { . self.eat_decimal_digits(); . true . } . // Just a 0. . _ => return Int { base, empty_int: false }, . }; . // Base prefix was provided, but there were no digits . // after it, e.g. "0x". -- line 554 ---------------------------------------- -- line 555 ---------------------------------------- . if !has_digits { . return Int { base, empty_int: true }; . } . } else { . // No base prefix, parse number in the usual way. . self.eat_decimal_digits(); . }; . 6 ( 0.00%) match self.first() { . // Don't be greedy if this is actually an . // integer literal followed by field/method access or a range pattern . // (`0..2` and `12.foo()`) . '.' if self.second() != '.' && !is_id_start(self.second()) => { . // might have stuff after the ., and if it does, it needs to start . // with a number . self.bump(); . let mut empty_exponent = false; -- line 571 ---------------------------------------- -- line 585 ---------------------------------------- . self.bump(); . let empty_exponent = !self.eat_float_exponent(); . Float { base, empty_exponent } . } . _ => Int { base, empty_int: false }, . } . } . 19,215 ( 0.01%) fn lifetime_or_char(&mut self) -> TokenKind { . debug_assert!(self.prev() == '\''); . 5,490 ( 0.00%) let can_be_a_lifetime = if self.second() == '\'' { . // It's surely not a lifetime. . false . } else { . // If the first symbol is valid for identifier, it can be a lifetime. . // Also check if it's a number for a better error reporting (so '0 will . // be reported as invalid lifetime and not as unterminated char literal). . is_id_start(self.first()) || self.first().is_digit(10) . }; . 2,734 ( 0.00%) if !can_be_a_lifetime { 8,235 ( 0.00%) let terminated = self.single_quoted_string(); . let suffix_start = self.len_consumed(); 5,490 ( 0.00%) if terminated { . self.eat_literal_suffix(); . } . let kind = Char { terminated }; 10,980 ( 0.01%) return Literal { kind, suffix_start }; . } . . // Either a lifetime or a character literal with . // length greater than 1. . . let starts_with_number = self.first().is_digit(10); . . // Skip the literal contents. -- line 621 ---------------------------------------- -- line 629 ---------------------------------------- . // string with single quotes). . if self.first() == '\'' { . self.bump(); . let kind = Char { terminated: true }; . Literal { kind, suffix_start: self.len_consumed() } . } else { . Lifetime { starts_with_number } . } 16,470 ( 0.01%) } . . fn single_quoted_string(&mut self) -> bool { . debug_assert!(self.prev() == '\''); . // Check if it's a one-symbol literal. 5,512 ( 0.00%) if self.second() == '\'' && self.first() != '\\' { . self.bump(); . self.bump(); . return true; . } . . // Literal has more than one symbol. . . // Parse until either quotes are terminated or error is detected. . loop { 130,418 ( 0.06%) match self.first() { . // Quotes are terminated, finish parsing. . '\'' => { . self.bump(); . return true; . } . // Probably beginning of the comment, which we don't want to include . // to the error report. . '/' => break, -- line 660 ---------------------------------------- -- line 670 ---------------------------------------- . // Skip the character. . _ => { . self.bump(); . } . } . } . // String was not terminated. . false 2,745 ( 0.00%) } . . /// Eats double-quoted string and returns true . /// if string is terminated. . fn double_quoted_string(&mut self) -> bool { . debug_assert!(self.prev() == '"'); . while let Some(c) = self.bump() { 456 ( 0.00%) match c { . '"' => { . return true; . } . '\\' if self.first() == '\\' || self.first() == '"' => { . // Bump again to skip escaped character. . self.bump(); . } . _ => (), . } . } . // End of file reached. . false 5 ( 0.00%) } . . /// Eats the double-quoted string and returns `n_hashes` and an error if encountered. . fn raw_double_quoted_string(&mut self, prefix_len: usize) -> (u16, Option) { . // Wrap the actual function to handle the error with too many hashes. . // This way, it eats the whole raw string. . let (n_hashes, err) = self.raw_string_unvalidated(prefix_len); . // Only up to 65535 `#`s are allowed in raw strings . match u16::try_from(n_hashes) { -- line 706 ---------------------------------------- -- line 773 ---------------------------------------- . max_hashes = n_end_hashes; . } . } . } . . fn eat_decimal_digits(&mut self) -> bool { . let mut has_digits = false; . loop { 4 ( 0.00%) match self.first() { . '_' => { . self.bump(); . } 6 ( 0.00%) '0'..='9' => { . has_digits = true; . self.bump(); . } . _ => break, . } . } . has_digits 2 ( 0.00%) } . . fn eat_hexadecimal_digits(&mut self) -> bool { . let mut has_digits = false; . loop { . match self.first() { . '_' => { . self.bump(); . } -- line 801 ---------------------------------------- -- line 816 ---------------------------------------- . if self.first() == '-' || self.first() == '+' { . self.bump(); . } . self.eat_decimal_digits() . } . . // Eats the suffix of the literal, e.g. "_u8". . fn eat_literal_suffix(&mut self) { 5,506 ( 0.00%) self.eat_identifier(); . } . . // Eats the identifier. 8,259 ( 0.00%) fn eat_identifier(&mut self) { . if !is_id_start(self.first()) { . return; . } . self.bump(); . . self.eat_while(is_id_continue); 11,012 ( 0.01%) } . } 178,764 ( 0.08%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_privacy/src/lib.rs -------------------------------------------------------------------------------- Ir -- line 59 ---------------------------------------- . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow; . . /// Not overridden, but used to actually visit types and traits. . fn skeleton(&mut self) -> DefIdVisitorSkeleton<'_, 'tcx, Self> { 42,685 ( 0.02%) DefIdVisitorSkeleton { . def_id_visitor: self, . visited_opaque_tys: Default::default(), . dummy: Default::default(), . } . } 9 ( 0.00%) fn visit(&mut self, ty_fragment: impl TypeFoldable<'tcx>) -> ControlFlow { . ty_fragment.visit_with(&mut self.skeleton()) 15 ( 0.00%) } 6 ( 0.00%) fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> ControlFlow { . self.skeleton().visit_trait(trait_ref) 6 ( 0.00%) } . fn visit_projection_ty( . &mut self, . projection: ty::ProjectionTy<'tcx>, . ) -> ControlFlow { . self.skeleton().visit_projection_ty(projection) . } . fn visit_predicates( . &mut self, . predicates: ty::GenericPredicates<'tcx>, . ) -> ControlFlow { 25 ( 0.00%) self.skeleton().visit_predicates(predicates) . } . } . . struct DefIdVisitorSkeleton<'v, 'tcx, V: ?Sized> { . def_id_visitor: &'v mut V, . visited_opaque_tys: FxHashSet, . dummy: PhantomData>, . } . . impl<'tcx, V> DefIdVisitorSkeleton<'_, 'tcx, V> . where . V: DefIdVisitor<'tcx> + ?Sized, . { . fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> ControlFlow { 1 ( 0.00%) let TraitRef { def_id, substs } = trait_ref; 55 ( 0.00%) self.def_id_visitor.visit_def_id(def_id, "trait", &trait_ref.print_only_trait_path())?; 1 ( 0.00%) if self.def_id_visitor.shallow() { ControlFlow::CONTINUE } else { substs.visit_with(self) } . } . . fn visit_projection_ty( . &mut self, . projection: ty::ProjectionTy<'tcx>, . ) -> ControlFlow { . let (trait_ref, assoc_substs) = . projection.trait_ref_and_own_substs(self.def_id_visitor.tcx()); -- line 114 ---------------------------------------- -- line 116 ---------------------------------------- . if self.def_id_visitor.shallow() { . ControlFlow::CONTINUE . } else { . assoc_substs.iter().try_for_each(|subst| subst.visit_with(self)) . } . } . . fn visit_predicate(&mut self, predicate: ty::Predicate<'tcx>) -> ControlFlow { 2 ( 0.00%) match predicate.kind().skip_binder() { . ty::PredicateKind::Trait(ty::TraitPredicate { . trait_ref, . constness: _, . polarity: _, . }) => self.visit_trait(trait_ref), . ty::PredicateKind::Projection(ty::ProjectionPredicate { projection_ty, term }) => { . term.visit_with(self)?; . self.visit_projection_ty(projection_ty) -- line 132 ---------------------------------------- -- line 157 ---------------------------------------- . ACNode::Leaf(leaf) => self.visit_const(leaf), . ACNode::Cast(_, _, ty) => self.visit_ty(ty), . ACNode::Binop(..) | ACNode::UnaryOp(..) | ACNode::FunctionCall(_, _) => { . ControlFlow::CONTINUE . } . }) . } . 175 ( 0.00%) fn visit_predicates( . &mut self, . predicates: ty::GenericPredicates<'tcx>, . ) -> ControlFlow { . let ty::GenericPredicates { parent: _, predicates } = predicates; . predicates.iter().try_for_each(|&(predicate, _span)| self.visit_predicate(predicate)) 200 ( 0.00%) } . } . . impl<'tcx, V> TypeVisitor<'tcx> for DefIdVisitorSkeleton<'_, 'tcx, V> . where . V: DefIdVisitor<'tcx> + ?Sized, . { . type BreakTy = V::BreakTy; . 65,559 ( 0.03%) fn visit_ty(&mut self, ty: Ty<'tcx>) -> ControlFlow { 7,283 ( 0.00%) let tcx = self.def_id_visitor.tcx(); . // InternalSubsts are not visited here because they are visited below in `super_visit_with`. 36,399 ( 0.02%) match *ty.kind() { 48 ( 0.00%) ty::Adt(&ty::AdtDef { did: def_id, .. }, ..) . | ty::Foreign(def_id) . | ty::FnDef(def_id, ..) . | ty::Closure(def_id, ..) . | ty::Generator(def_id, ..) => { 460 ( 0.00%) self.def_id_visitor.visit_def_id(def_id, "type", &ty)?; . if self.def_id_visitor.shallow() { . return ControlFlow::CONTINUE; . } . // Default type visitor doesn't visit signatures of fn types. . // Something like `fn() -> Priv {my_func}` is considered a private type even if . // `my_func` is public, so we need to visit signatures. 109 ( 0.00%) if let ty::FnDef(..) = ty.kind() { . tcx.fn_sig(def_id).visit_with(self)?; . } . // Inherent static methods don't have self type in substs. . // Something like `fn() {my_method}` type of the method . // `impl Pub { pub fn my_method() {} }` is considered a private type, . // so we need to visit the self type additionally. 186 ( 0.00%) if let Some(assoc_item) = tcx.opt_associated_item(def_id) { 44 ( 0.00%) if let ty::ImplContainer(impl_def_id) = assoc_item.container { 4 ( 0.00%) tcx.type_of(impl_def_id).visit_with(self)?; . } . } . } . ty::Projection(proj) => { . if self.def_id_visitor.skip_assoc_tys() { . // Visitors searching for minimal visibility/reachability want to . // conservatively approximate associated types like `::Alias` . // as visible/reachable even if both `Type` and `Trait` are private. -- line 213 ---------------------------------------- -- line 270 ---------------------------------------- . ty::Bound(..) | ty::Placeholder(..) | ty::Infer(..) => { . bug!("unexpected type: {:?}", ty) . } . } . . if self.def_id_visitor.shallow() { . ControlFlow::CONTINUE . } else { 21,739 ( 0.01%) ty.super_visit_with(self) . } 65,417 ( 0.03%) } . . fn visit_const(&mut self, c: &'tcx Const<'tcx>) -> ControlFlow { 48 ( 0.00%) self.visit_ty(c.ty)?; 8 ( 0.00%) let tcx = self.def_id_visitor.tcx(); 56 ( 0.00%) if let Ok(Some(ct)) = AbstractConst::from_const(tcx, c) { . self.visit_abstract_const_expr(tcx, ct)?; . } . ControlFlow::CONTINUE . } . } . . fn min(vis1: ty::Visibility, vis2: ty::Visibility, tcx: TyCtxt<'_>) -> ty::Visibility { 1 ( 0.00%) if vis1.is_at_least(vis2, tcx) { vis2 } else { vis1 } . } . . //////////////////////////////////////////////////////////////////////////////// . /// Visitor used to determine if pub(restricted) is used anywhere in the crate. . /// . /// This is done so that `private_in_public` warnings can be turned into hard errors . /// in crates that have been updated to use pub(restricted). . //////////////////////////////////////////////////////////////////////////////// -- line 301 ---------------------------------------- -- line 303 ---------------------------------------- . tcx: TyCtxt<'tcx>, . has_pub_restricted: bool, . } . . impl<'tcx> Visitor<'tcx> for PubRestrictedVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . fn nested_visit_map(&mut self) -> Self::Map { 35 ( 0.00%) self.tcx.hir() . } . fn visit_vis(&mut self, vis: &'tcx hir::Visibility<'tcx>) { 120 ( 0.00%) self.has_pub_restricted = self.has_pub_restricted || vis.node.is_pub_restricted(); . } . } . . //////////////////////////////////////////////////////////////////////////////// . /// Visitor used to determine impl visibility and reachability. . //////////////////////////////////////////////////////////////////////////////// . . struct FindMin<'a, 'tcx, VL: VisibilityLike> { -- line 322 ---------------------------------------- -- line 336 ---------------------------------------- . true . } . fn visit_def_id( . &mut self, . def_id: DefId, . _kind: &str, . _descr: &dyn fmt::Display, . ) -> ControlFlow { 11 ( 0.00%) self.min = VL::new_min(self, def_id); . ControlFlow::CONTINUE . } . } . . trait VisibilityLike: Sized { . const MAX: Self; . const SHALLOW: bool = false; . fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self; . . // Returns an over-approximation (`skip_assoc_tys` = true) of visibility due to . // associated types for which we can't determine visibility precisely. 21 ( 0.00%) fn of_impl(def_id: LocalDefId, tcx: TyCtxt<'_>, access_levels: &AccessLevels) -> Self { 10 ( 0.00%) let mut find = FindMin { tcx, access_levels, min: Self::MAX }; 3 ( 0.00%) find.visit(tcx.type_of(def_id)); 9 ( 0.00%) if let Some(trait_ref) = tcx.impl_trait_ref(def_id) { 1 ( 0.00%) find.visit_trait(trait_ref); . } 2 ( 0.00%) find.min 24 ( 0.00%) } . } . impl VisibilityLike for ty::Visibility { . const MAX: Self = ty::Visibility::Public; 7 ( 0.00%) fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self { 3 ( 0.00%) min(find.tcx.visibility(def_id), find.min, find.tcx) 10 ( 0.00%) } . } . impl VisibilityLike for Option { . const MAX: Self = Some(AccessLevel::Public); . // Type inference is very smart sometimes. . // It can make an impl reachable even some components of its type or trait are unreachable. . // E.g. methods of `impl ReachableTrait for ReachableTy { ... }` . // can be usable from other crates (#57264). So we skip substs when calculating reachability . // and consider an impl reachable if its "shallow" type and trait are reachable. . // . // The assumption we make here is that type-inference won't let you use an impl without knowing . // both "shallow" version of its self type and "shallow" version of its trait if it exists . // (which require reaching the `DefId`s in them). . const SHALLOW: bool = true; 6 ( 0.00%) fn new_min(find: &FindMin<'_, '_, Self>, def_id: DefId) -> Self { . cmp::min( 6 ( 0.00%) if let Some(def_id) = def_id.as_local() { 4 ( 0.00%) find.access_levels.map.get(&def_id).copied() . } else { . Self::MAX . }, 4 ( 0.00%) find.min, . ) 8 ( 0.00%) } . } . . //////////////////////////////////////////////////////////////////////////////// . /// The embargo visitor, used to determine the exports of the AST. . //////////////////////////////////////////////////////////////////////////////// . . struct EmbargoVisitor<'tcx> { . tcx: TyCtxt<'tcx>, -- line 400 ---------------------------------------- -- line 422 ---------------------------------------- . . struct ReachEverythingInTheInterfaceVisitor<'a, 'tcx> { . access_level: Option, . item_def_id: LocalDefId, . ev: &'a mut EmbargoVisitor<'tcx>, . } . . impl<'tcx> EmbargoVisitor<'tcx> { 136 ( 0.00%) fn get(&self, def_id: LocalDefId) -> Option { . self.access_levels.map.get(&def_id).copied() 136 ( 0.00%) } . . fn update_with_hir_id( . &mut self, . hir_id: hir::HirId, . level: Option, . ) -> Option { . let def_id = self.tcx.hir().local_def_id(hir_id); . self.update(def_id, level) . } . . /// Updates node level and returns the updated level. . fn update(&mut self, def_id: LocalDefId, level: Option) -> Option { 60 ( 0.00%) let old_level = self.get(def_id); . // Accessibility levels can only grow. . if level > old_level { . self.access_levels.map.insert(def_id, level.unwrap()); 3 ( 0.00%) self.changed = true; . level . } else { . old_level . } . } . . fn reach( . &mut self, -- line 457 ---------------------------------------- -- line 458 ---------------------------------------- . def_id: LocalDefId, . access_level: Option, . ) -> ReachEverythingInTheInterfaceVisitor<'_, 'tcx> { . ReachEverythingInTheInterfaceVisitor { . access_level: cmp::min(access_level, Some(AccessLevel::Reachable)), . item_def_id: def_id, . ev: self, . } 54 ( 0.00%) } . . // We have to make sure that the items that macros might reference . // are reachable, since they might be exported transitively. . fn update_reachability_from_macro(&mut self, local_def_id: LocalDefId, md: &MacroDef) { . // Non-opaque macros cannot make other items more accessible than they already are. . . let hir_id = self.tcx.hir().local_def_id_to_hir_id(local_def_id); . let attrs = self.tcx.hir().attrs(hir_id); -- line 474 ---------------------------------------- -- line 630 ---------------------------------------- . } . . impl<'tcx> Visitor<'tcx> for EmbargoVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 70 ( 0.00%) self.tcx.hir() . } . 396 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 88 ( 0.00%) let item_level = match item.kind { . hir::ItemKind::Impl { .. } => { . let impl_level = 12 ( 0.00%) Option::::of_impl(item.def_id, self.tcx, &self.access_levels); 2 ( 0.00%) self.update(item.def_id, impl_level) . } 168 ( 0.00%) _ => self.get(item.def_id), . }; . . // Update levels of nested things. 221 ( 0.00%) match item.kind { . hir::ItemKind::Enum(ref def, _) => { . for variant in def.variants { . let variant_level = self.update_with_hir_id(variant.id, item_level); . if let Some(ctor_hir_id) = variant.data.ctor_hir_id() { . self.update_with_hir_id(ctor_hir_id, item_level); . } . for field in variant.data.fields() { . self.update_with_hir_id(field.hir_id, variant_level); . } . } . } . hir::ItemKind::Impl(ref impl_) => { 6 ( 0.00%) for impl_item_ref in impl_.items { 14 ( 0.00%) if impl_.of_trait.is_some() . || self.tcx.visibility(impl_item_ref.id.def_id) == ty::Visibility::Public . { 8 ( 0.00%) self.update(impl_item_ref.id.def_id, item_level); . } . } . } 6 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { . for trait_item_ref in trait_item_refs { 8 ( 0.00%) self.update(trait_item_ref.id.def_id, item_level); . } . } . hir::ItemKind::Struct(ref def, _) | hir::ItemKind::Union(ref def, _) => { . if let Some(ctor_hir_id) = def.ctor_hir_id() { . self.update_with_hir_id(ctor_hir_id, item_level); . } . for field in def.fields() { . if field.vis.node.is_pub() { -- line 683 ---------------------------------------- -- line 704 ---------------------------------------- . | hir::ItemKind::TyAlias(..) . | hir::ItemKind::Mod(..) . | hir::ItemKind::TraitAlias(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::ExternCrate(..) => {} . } . . // Mark all items in interfaces of reachable items as reachable. 220 ( 0.00%) match item.kind { . // The interface is empty. . hir::ItemKind::Macro(..) | hir::ItemKind::ExternCrate(..) => {} . // All nested items are checked by `visit_item`. . hir::ItemKind::Mod(..) => {} . // Handled in the access level of in rustc_resolve . hir::ItemKind::Use(..) => {} . // The interface is empty. . hir::ItemKind::GlobalAsm(..) => {} -- line 720 ---------------------------------------- -- line 732 ---------------------------------------- . self.reach(item.def_id, exist_level).generics().predicates().ty(); . } . } . // Visit everything. . hir::ItemKind::Const(..) . | hir::ItemKind::Static(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::TyAlias(..) => { 12 ( 0.00%) if item_level.is_some() { 4 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates().ty(); . } . } 4 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { 2 ( 0.00%) if item_level.is_some() { 16 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates(); . . for trait_item_ref in trait_item_refs { 8 ( 0.00%) let mut reach = self.reach(trait_item_ref.id.def_id, item_level); 16 ( 0.00%) reach.generics().predicates(); . 4 ( 0.00%) if trait_item_ref.kind == AssocItemKind::Type . && !trait_item_ref.defaultness.has_value() . { . // No type to visit. . } else { 8 ( 0.00%) reach.ty(); . } . } . } . } . hir::ItemKind::TraitAlias(..) => { . if item_level.is_some() { . self.reach(item.def_id, item_level).generics().predicates(); . } . } . // Visit everything except for private impl items. . hir::ItemKind::Impl(ref impl_) => { 2 ( 0.00%) if item_level.is_some() { 20 ( 0.00%) self.reach(item.def_id, item_level).generics().predicates().ty().trait_ref(); . 2 ( 0.00%) for impl_item_ref in impl_.items { 12 ( 0.00%) let impl_item_level = self.get(impl_item_ref.id.def_id); 16 ( 0.00%) if impl_item_level.is_some() { 40 ( 0.00%) self.reach(impl_item_ref.id.def_id, impl_item_level) . .generics() . .predicates() . .ty(); . } . } . } . } . -- line 783 ---------------------------------------- -- line 822 ---------------------------------------- . self.reach(def_id, field_level).ty(); . } . } . } . } . } . . let orig_level = mem::replace(&mut self.prev_level, item_level); 132 ( 0.00%) intravisit::walk_item(self, item); 44 ( 0.00%) self.prev_level = orig_level; 352 ( 0.00%) } . . fn visit_block(&mut self, b: &'tcx hir::Block<'tcx>) { . // Blocks can have public items, for example impls, but they always . // start as completely private regardless of publicity of a function, . // constant, type, field, etc., in which this block resides. . let orig_level = mem::replace(&mut self.prev_level, None); . intravisit::walk_block(self, b); . self.prev_level = orig_level; . } . } . . impl ReachEverythingInTheInterfaceVisitor<'_, '_> { 98 ( 0.00%) fn generics(&mut self) -> &mut Self { 56 ( 0.00%) for param in &self.ev.tcx.generics_of(self.item_def_id).params { 20 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => {} 2 ( 0.00%) GenericParamDefKind::Type { has_default, .. } => { 2 ( 0.00%) if has_default { . self.visit(self.ev.tcx.type_of(param.def_id)); . } . } . GenericParamDefKind::Const { has_default, .. } => { . self.visit(self.ev.tcx.type_of(param.def_id)); . if has_default { . self.visit(self.ev.tcx.const_param_default(param.def_id)); . } . } . } . } . self 126 ( 0.00%) } . 70 ( 0.00%) fn predicates(&mut self) -> &mut Self { 42 ( 0.00%) self.visit_predicates(self.ev.tcx.predicates_of(self.item_def_id)); . self 84 ( 0.00%) } . 60 ( 0.00%) fn ty(&mut self) -> &mut Self { 36 ( 0.00%) self.visit(self.ev.tcx.type_of(self.item_def_id)); . self 72 ( 0.00%) } . . fn trait_ref(&mut self) -> &mut Self { 12 ( 0.00%) if let Some(trait_ref) = self.ev.tcx.impl_trait_ref(self.item_def_id) { . self.visit_trait(trait_ref); . } . self . } . } . . impl<'tcx> DefIdVisitor<'tcx> for ReachEverythingInTheInterfaceVisitor<'_, 'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 116 ( 0.00%) self.ev.tcx . } 60 ( 0.00%) fn visit_def_id( . &mut self, . def_id: DefId, . _kind: &str, . _descr: &dyn fmt::Display, . ) -> ControlFlow { 36 ( 0.00%) if let Some(def_id) = def_id.as_local() { 48 ( 0.00%) if let (ty::Visibility::Public, _) | (_, Some(AccessLevel::ReachableFromImplTrait)) = 12 ( 0.00%) (self.tcx().visibility(def_id.to_def_id()), self.access_level) . { . self.ev.update(def_id, self.access_level); . } . } . ControlFlow::CONTINUE 84 ( 0.00%) } . } . . ////////////////////////////////////////////////////////////////////////////////////// . /// Name privacy visitor, checks privacy and reports violations. . /// Most of name privacy checks are performed during the main resolution phase, . /// or later in type checking when field accesses and associated items are resolved. . /// This pass performs remaining checks for fields in struct expressions and patterns. . ////////////////////////////////////////////////////////////////////////////////////// -- line 909 ---------------------------------------- -- line 964 ---------------------------------------- . } . . impl<'tcx> Visitor<'tcx> for NamePrivacyVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 23 ( 0.00%) self.tcx.hir() . } . . fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) { . // Don't visit nested modules, since we run a separate visitor walk . // for each module in `privacy_access_levels` . } . . fn visit_nested_body(&mut self, body: hir::BodyId) { . let old_maybe_typeck_results = 50 ( 0.00%) self.maybe_typeck_results.replace(self.tcx.typeck_body(body)); 54 ( 0.00%) let body = self.tcx.hir().body(body); . self.visit_body(body); 13 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; . } . . fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 22 ( 0.00%) let orig_current_item = mem::replace(&mut self.current_item, item.def_id); 63 ( 0.00%) intravisit::walk_item(self, item); 22 ( 0.00%) self.current_item = orig_current_item; . } . 37,782 ( 0.02%) fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { 8,396 ( 0.00%) if let hir::ExprKind::Struct(qpath, fields, ref base) = expr.kind { . let res = self.typeck_results().qpath_res(qpath, expr.hir_id); . let adt = self.typeck_results().expr_ty(expr).ty_adt_def().unwrap(); . let variant = adt.variant_of_res(res); . if let Some(base) = *base { . // If the expression uses FRU we need to make sure all the unmentioned fields . // are checked for privacy (RFC 736). Rather than computing the set of . // unmentioned fields, just check them all. . for (vf_index, variant_field) in variant.fields.iter().enumerate() { -- line 1003 ---------------------------------------- -- line 1014 ---------------------------------------- . for field in fields { . let use_ctxt = field.ident.span; . let index = self.tcx.field_index(field.hir_id, self.typeck_results()); . self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false); . } . } . } . 41,980 ( 0.02%) intravisit::walk_expr(self, expr); . } . 80 ( 0.00%) fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) { 20 ( 0.00%) if let PatKind::Struct(ref qpath, fields, _) = pat.kind { . let res = self.typeck_results().qpath_res(qpath, pat.hir_id); . let adt = self.typeck_results().pat_ty(pat).ty_adt_def().unwrap(); . let variant = adt.variant_of_res(res); . for field in fields { . let use_ctxt = field.ident.span; . let index = self.tcx.field_index(field.hir_id, self.typeck_results()); . self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false); . } . } . 90 ( 0.00%) intravisit::walk_pat(self, pat); . } . } . . //////////////////////////////////////////////////////////////////////////////////////////// . /// Type privacy visitor, checks types for privacy and reports violations. . /// Both explicitly written types and inferred types of expressions and patterns are checked. . /// Checks are performed on "semantic" types regardless of names and their hygiene. . //////////////////////////////////////////////////////////////////////////////////////////// -- line 1045 ---------------------------------------- -- line 1061 ---------------------------------------- . .expect("`TypePrivacyVisitor::typeck_results` called outside of body") . } . . fn item_is_accessible(&self, did: DefId) -> bool { . self.tcx.visibility(did).is_accessible_from(self.current_item.to_def_id(), self.tcx) . } . . // Take node-id of an expression or pattern and check its type for privacy. 29,456 ( 0.01%) fn check_expr_pat_type(&mut self, id: hir::HirId, span: Span) -> bool { 4,208 ( 0.00%) self.span = span; 4,208 ( 0.00%) let typeck_results = self.typeck_results(); . let result: ControlFlow<()> = try { 16,832 ( 0.01%) self.visit(typeck_results.node_type(id))?; 33,664 ( 0.02%) self.visit(typeck_results.node_substs(id))?; 37,872 ( 0.02%) if let Some(adjustments) = typeck_results.adjustments().get(id) { . adjustments.iter().try_for_each(|adjustment| self.visit(adjustment.target))?; . } . }; . result.is_break() 33,664 ( 0.02%) } . . fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool { 50 ( 0.00%) let is_error = !self.item_is_accessible(def_id); . if is_error { . self.tcx . .sess . .struct_span_err(self.span, &format!("{} `{}` is private", kind, descr)) . .span_label(self.span, &format!("private {}", kind)) . .emit(); . } . is_error -- line 1091 ---------------------------------------- -- line 1093 ---------------------------------------- . } . . impl<'tcx> Visitor<'tcx> for TypePrivacyVisitor<'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 23 ( 0.00%) self.tcx.hir() . } . . fn visit_mod(&mut self, _m: &'tcx hir::Mod<'tcx>, _s: Span, _n: hir::HirId) { . // Don't visit nested modules, since we run a separate visitor walk . // for each module in `privacy_access_levels` . } . 90 ( 0.00%) fn visit_nested_body(&mut self, body: hir::BodyId) { . let old_maybe_typeck_results = 27 ( 0.00%) self.maybe_typeck_results.replace(self.tcx.typeck_body(body)); 54 ( 0.00%) let body = self.tcx.hir().body(body); . self.visit_body(body); 9 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; 72 ( 0.00%) } . . fn visit_generic_arg(&mut self, generic_arg: &'tcx hir::GenericArg<'tcx>) { . match generic_arg { . hir::GenericArg::Type(t) => self.visit_ty(t), . hir::GenericArg::Infer(inf) => self.visit_infer(inf), . hir::GenericArg::Lifetime(_) | hir::GenericArg::Const(_) => {} . } . } . 245 ( 0.00%) fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) { 70 ( 0.00%) self.span = hir_ty.span; 105 ( 0.00%) if let Some(typeck_results) = self.maybe_typeck_results { . // Types in bodies. 3 ( 0.00%) if self.visit(typeck_results.node_type(hir_ty.hir_id)).is_break() { . return; . } . } else { . // Types in signatures. . // FIXME: This is very ineffective. Ideally each HIR type should be converted . // into a semantic type only once and the result should be cached somehow. 102 ( 0.00%) if self.visit(rustc_typeck::hir_ty_to_ty(self.tcx, hir_ty)).is_break() { . return; . } . } . 105 ( 0.00%) intravisit::walk_ty(self, hir_ty); 210 ( 0.00%) } . . fn visit_infer(&mut self, inf: &'tcx hir::InferArg) { . self.span = inf.span; . if let Some(typeck_results) = self.maybe_typeck_results { . if let Some(ty) = typeck_results.node_type_opt(inf.hir_id) { . if self.visit(ty).is_break() { . return; . } -- line 1150 ---------------------------------------- -- line 1152 ---------------------------------------- . // We don't do anything for const infers here. . } . } else { . bug!("visit_infer without typeck_results"); . } . intravisit::walk_inf(self, inf); . } . 8 ( 0.00%) fn visit_trait_ref(&mut self, trait_ref: &'tcx hir::TraitRef<'tcx>) { 3 ( 0.00%) self.span = trait_ref.path.span; 1 ( 0.00%) if self.maybe_typeck_results.is_none() { . // Avoid calling `hir_trait_to_predicates` in bodies, it will ICE. . // The traits' privacy in bodies is already checked as a part of trait object types. 1 ( 0.00%) let bounds = rustc_typeck::hir_trait_to_predicates( 1 ( 0.00%) self.tcx, . trait_ref, . // NOTE: This isn't really right, but the actual type doesn't matter here. It's . // just required by `ty::TraitRef`. 3 ( 0.00%) self.tcx.types.never, . ); . 9 ( 0.00%) for (trait_predicate, _, _) in bounds.trait_bounds { 7 ( 0.00%) if self.visit_trait(trait_predicate.skip_binder()).is_break() { . return; . } . } . 6 ( 0.00%) for (poly_predicate, _) in bounds.projection_bounds { . let pred = poly_predicate.skip_binder(); . let poly_pred_term = self.visit(pred.term); . if poly_pred_term.is_break() . || self.visit_projection_ty(pred.projection_ty).is_break() . { . return; . } . } . } . . intravisit::walk_trait_ref(self, trait_ref); 8 ( 0.00%) } . . // Check types of expressions 37,782 ( 0.02%) fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) { 37,782 ( 0.02%) if self.check_expr_pat_type(expr.hir_id, expr.span) { . // Do not check nested expressions if the error already happened. . return; . } 29,378 ( 0.01%) match expr.kind { . hir::ExprKind::Assign(_, rhs, _) | hir::ExprKind::Match(rhs, ..) => { . // Do not report duplicate errors for `x = y` and `match x { ... }`. . if self.check_expr_pat_type(rhs.hir_id, rhs.span) { . return; . } . } 2 ( 0.00%) hir::ExprKind::MethodCall(segment, ..) => { . // Method calls have to be checked specially. 4 ( 0.00%) self.span = segment.ident.span; 12 ( 0.00%) if let Some(def_id) = self.typeck_results().type_dependent_def_id(expr.hir_id) { 6 ( 0.00%) if self.visit(self.tcx.type_of(def_id)).is_break() { . return; . } . } else { . self.tcx . .sess . .delay_span_bug(expr.span, "no type-dependent def for method call"); . } . } . _ => {} . } . 12,594 ( 0.01%) intravisit::walk_expr(self, expr); 33,584 ( 0.02%) } . . // Prohibit access to associated items with insufficient nominal visibility. . // . // Additionally, until better reachability analysis for macros 2.0 is available, . // we prohibit access to private statics from other crates, this allows to give . // more code internal visibility at link time. (Access to private functions . // is already prohibited by type privacy for function types.) 550 ( 0.00%) fn visit_qpath(&mut self, qpath: &'tcx hir::QPath<'tcx>, id: hir::HirId, span: Span) { 110 ( 0.00%) let def = match qpath { 165 ( 0.00%) hir::QPath::Resolved(_, path) => match path.res { 54 ( 0.00%) Res::Def(kind, def_id) => Some((kind, def_id)), . _ => None, . }, . hir::QPath::TypeRelative(..) | hir::QPath::LangItem(..) => self . .maybe_typeck_results . .and_then(|typeck_results| typeck_results.type_dependent_def(id)), . }; . let def = def.filter(|(kind, _)| { 18 ( 0.00%) matches!( . kind, . DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Static . ) . }); . if let Some((kind, def_id)) = def { . let is_local_static = . if let DefKind::Static = kind { def_id.is_local() } else { false }; . if !self.item_is_accessible(def_id) && !is_local_static { -- line 1250 ---------------------------------------- -- line 1263 ---------------------------------------- . }; . sess.struct_span_err(span, &msg) . .span_label(span, &format!("private {}", kind)) . .emit(); . return; . } . } . 605 ( 0.00%) intravisit::walk_qpath(self, qpath, id, span); . } . . // Check types of patterns. . fn visit_pat(&mut self, pattern: &'tcx hir::Pat<'tcx>) { 63 ( 0.00%) if self.check_expr_pat_type(pattern.hir_id, pattern.span) { . // Do not check nested patterns if the error already happened. . return; . } . 36 ( 0.00%) intravisit::walk_pat(self, pattern); . } . . fn visit_local(&mut self, local: &'tcx hir::Local<'tcx>) { . if let Some(init) = local.init { . if self.check_expr_pat_type(init.hir_id, init.span) { . // Do not report duplicate errors for `let x = y`. . return; . } . } . . intravisit::walk_local(self, local); . } . . // Check types in item interfaces. . fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 22 ( 0.00%) let orig_current_item = mem::replace(&mut self.current_item, item.def_id); . let old_maybe_typeck_results = self.maybe_typeck_results.take(); 63 ( 0.00%) intravisit::walk_item(self, item); 22 ( 0.00%) self.maybe_typeck_results = old_maybe_typeck_results; 36 ( 0.00%) self.current_item = orig_current_item; . } . } . . impl<'tcx> DefIdVisitor<'tcx> for TypePrivacyVisitor<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 14,386 ( 0.01%) self.tcx . } 375 ( 0.00%) fn visit_def_id( . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow { . if self.check_def_id(def_id, kind, descr) { . ControlFlow::BREAK . } else { . ControlFlow::CONTINUE . } 225 ( 0.00%) } . } . . /////////////////////////////////////////////////////////////////////////////// . /// Obsolete visitors for checking for private items in public interfaces. . /// These visitors are supposed to be kept in frozen state and produce an . /// "old error node set". For backward compatibility the new visitor reports . /// warnings instead of hard errors when the erroneous node is not in this old set. . /////////////////////////////////////////////////////////////////////////////// -- line 1328 ---------------------------------------- -- line 1342 ---------------------------------------- . /// Whether we've recurred at all (i.e., if we're pointing at the . /// first type on which `visit_ty` was called). . at_outer_type: bool, . /// Whether that first type is a public path. . outer_type_is_public_path: bool, . } . . impl<'a, 'tcx> ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { 16 ( 0.00%) fn path_is_private_type(&self, path: &hir::Path<'_>) -> bool { 48 ( 0.00%) let did = match path.res { . Res::PrimTy(..) | Res::SelfTy(..) | Res::Err => return false, . res => res.def_id(), . }; . . // A path can only be private if: . // it's in this crate... . if let Some(did) = did.as_local() { . // .. and it corresponds to a private type in the AST (this returns -- line 1359 ---------------------------------------- -- line 1360 ---------------------------------------- . // `None` for type parameters). . match self.tcx.hir().find(self.tcx.hir().local_def_id_to_hir_id(did)) { . Some(Node::Item(item)) => !item.vis.node.is_pub(), . Some(_) | None => false, . } . } else { . false . } 32 ( 0.00%) } . . fn trait_is_public(&self, trait_id: LocalDefId) -> bool { . // FIXME: this would preferably be using `exported_items`, but all . // traits are exported currently (see `EmbargoVisitor.exported_trait`). . self.access_levels.is_public(trait_id) . } . . fn check_generic_bound(&mut self, bound: &hir::GenericBound<'_>) { -- line 1376 ---------------------------------------- -- line 1377 ---------------------------------------- . if let hir::GenericBound::Trait(ref trait_ref, _) = *bound { . if self.path_is_private_type(trait_ref.trait_ref.path) { . self.old_error_set.insert(trait_ref.trait_ref.hir_ref_id); . } . } . } . . fn item_is_public(&self, def_id: LocalDefId, vis: &hir::Visibility<'_>) -> bool { 20 ( 0.00%) self.access_levels.is_reachable(def_id) || vis.node.is_pub() . } . } . . impl<'a, 'b, 'tcx, 'v> Visitor<'v> for ObsoleteCheckTypeForPrivatenessVisitor<'a, 'b, 'tcx> { . fn visit_generic_arg(&mut self, generic_arg: &'v hir::GenericArg<'v>) { . match generic_arg { . hir::GenericArg::Type(t) => self.visit_ty(t), . hir::GenericArg::Infer(inf) => self.visit_ty(&inf.to_ty()), . hir::GenericArg::Lifetime(_) | hir::GenericArg::Const(_) => {} . } . } . . fn visit_ty(&mut self, ty: &hir::Ty<'_>) { 5 ( 0.00%) if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = ty.kind { 4 ( 0.00%) if self.inner.path_is_private_type(path) { . self.contains_private = true; . // Found what we're looking for, so let's stop working. . return; . } . } . if let hir::TyKind::Path(_) = ty.kind { . if self.at_outer_type { 1 ( 0.00%) self.outer_type_is_public_path = true; . } . } 2 ( 0.00%) self.at_outer_type = false; 2 ( 0.00%) intravisit::walk_ty(self, ty) 4 ( 0.00%) } . . // Don't want to recurse into `[, .. expr]`. . fn visit_expr(&mut self, _: &hir::Expr<'_>) {} . } . . impl<'a, 'tcx> Visitor<'tcx> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> { . type NestedFilter = nested_filter::All; . . /// We want to visit items in the context of their containing . /// module and so forth, so supply a crate for doing a deep walk. . fn nested_visit_map(&mut self) -> Self::Map { 22 ( 0.00%) self.tcx.hir() . } . 135 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 60 ( 0.00%) match item.kind { . // Contents of a private mod can be re-exported, so we need . // to check internals. . hir::ItemKind::Mod(_) => {} . . // An `extern {}` doesn't introduce a new privacy . // namespace (the contents have their own privacies). . hir::ItemKind::ForeignMod { .. } => {} . 2 ( 0.00%) hir::ItemKind::Trait(.., bounds, _) => { 5 ( 0.00%) if !self.trait_is_public(item.def_id) { . return; . } . . for bound in bounds.iter() { . self.check_generic_bound(bound) . } . } . -- line 1447 ---------------------------------------- -- line 1454 ---------------------------------------- . // `impl [... for] Private` is never visible. . let self_contains_private; . // `impl [... for] Public<...>`, but not `impl [... for] . // Vec` or `(Public,)`, etc. . let self_is_public_path; . . // Check the properties of the `Self` type: . { 4 ( 0.00%) let mut visitor = ObsoleteCheckTypeForPrivatenessVisitor { . inner: self, . contains_private: false, . at_outer_type: true, . outer_type_is_public_path: false, . }; 1 ( 0.00%) visitor.visit_ty(impl_.self_ty); . self_contains_private = visitor.contains_private; . self_is_public_path = visitor.outer_type_is_public_path; . } . . // Miscellaneous info about the impl: . . // `true` iff this is `impl Private for ...`. . let not_private_trait = impl_.of_trait.as_ref().map_or( . true, // no trait counts as public trait . |tr| { 3 ( 0.00%) if let Some(def_id) = tr.path.res.def_id().as_local() { 3 ( 0.00%) self.trait_is_public(def_id) . } else { . true // external traits must be public . } . }, . ); . . // `true` iff this is a trait impl or at least one method is public. . // -- line 1488 ---------------------------------------- -- line 1498 ---------------------------------------- . match impl_item.kind { . hir::ImplItemKind::Const(..) | hir::ImplItemKind::Fn(..) => { . self.access_levels.is_reachable(impl_item_ref.id.def_id) . } . hir::ImplItemKind::TyAlias(_) => false, . } . }); . 2 ( 0.00%) if !self_contains_private && not_private_trait && trait_or_some_public_method { . intravisit::walk_generics(self, &impl_.generics); . 2 ( 0.00%) match impl_.of_trait { . None => { . for impl_item_ref in impl_.items { . // This is where we choose whether to walk down . // further into the impl to check its items. We . // should only walk into public items so that we . // don't erroneously report errors for private . // types in private items. . let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); -- line 1517 ---------------------------------------- -- line 1538 ---------------------------------------- . // . // Those in 1. can only occur if the trait is in . // this crate and will've been warned about on the . // trait definition (there's no need to warn twice . // so we don't check the methods). . // . // Those in 2. are warned via walk_generics and this . // call here. 3 ( 0.00%) intravisit::walk_path(self, tr.path); . . // Those in 3. are warned with this call. 1 ( 0.00%) for impl_item_ref in impl_.items { 10 ( 0.00%) let impl_item = self.tcx.hir().impl_item(impl_item_ref.id); 11 ( 0.00%) if let hir::ImplItemKind::TyAlias(ty) = impl_item.kind { . self.visit_ty(ty); . } . } . } . } . } else if impl_.of_trait.is_none() && self_is_public_path { . // `impl Public { ... }`. Any public static . // methods will be visible as `Public::foo`. -- line 1559 ---------------------------------------- -- line 1584 ---------------------------------------- . return; . } . . // `type ... = ...;` can contain private types, because . // we're introducing a new name. . hir::ItemKind::TyAlias(..) => return, . . // Not at all public, so we don't care. 67 ( 0.00%) _ if !self.item_is_public(item.def_id, &item.vis) => { . return; . } . . _ => {} . } . . // We've carefully constructed it so that if we're here, then . // any `visit_ty`'s will be called on things that are in . // public signatures, i.e., things that we're interested in for . // this visitor. 90 ( 0.00%) intravisit::walk_item(self, item); 48 ( 0.00%) } . 40 ( 0.00%) fn visit_generics(&mut self, generics: &'tcx hir::Generics<'tcx>) { . for param in generics.params { . for bound in param.bounds { . self.check_generic_bound(bound); . } . } 5 ( 0.00%) for predicate in generics.where_clause.predicates { . match predicate { . hir::WherePredicate::BoundPredicate(bound_pred) => { . for bound in bound_pred.bounds.iter() { . self.check_generic_bound(bound) . } . } . hir::WherePredicate::RegionPredicate(_) => {} . hir::WherePredicate::EqPredicate(eq_pred) => { . self.visit_ty(eq_pred.rhs_ty); . } . } . } 40 ( 0.00%) } . . fn visit_foreign_item(&mut self, item: &'tcx hir::ForeignItem<'tcx>) { . if self.access_levels.is_reachable(item.def_id) { . intravisit::walk_foreign_item(self, item) . } . } . . fn visit_ty(&mut self, t: &'tcx hir::Ty<'tcx>) { 81 ( 0.00%) if let hir::TyKind::Path(hir::QPath::Resolved(_, path)) = t.kind { 60 ( 0.00%) if self.path_is_private_type(path) { . self.old_error_set.insert(t.hir_id); . } . } 93 ( 0.00%) intravisit::walk_ty(self, t) . } . . fn visit_variant( . &mut self, . v: &'tcx hir::Variant<'tcx>, . g: &'tcx hir::Generics<'tcx>, . item_id: hir::HirId, . ) { -- line 1647 ---------------------------------------- -- line 1679 ---------------------------------------- . /// The visitor checks that each component type is at least this visible. . required_visibility: ty::Visibility, . has_pub_restricted: bool, . has_old_errors: bool, . in_assoc_ty: bool, . } . . impl SearchInterfaceForPrivateItemsVisitor<'_> { 77 ( 0.00%) fn generics(&mut self) -> &mut Self { 33 ( 0.00%) for param in &self.tcx.generics_of(self.item_def_id).params { 8 ( 0.00%) match param.kind { . GenericParamDefKind::Lifetime => {} 1 ( 0.00%) GenericParamDefKind::Type { has_default, .. } => { 1 ( 0.00%) if has_default { . self.visit(self.tcx.type_of(param.def_id)); . } . } . // FIXME(generic_const_exprs): May want to look inside const here . GenericParamDefKind::Const { .. } => { . self.visit(self.tcx.type_of(param.def_id)); . } . } . } . self 88 ( 0.00%) } . 55 ( 0.00%) fn predicates(&mut self) -> &mut Self { . // N.B., we use `explicit_predicates_of` and not `predicates_of` . // because we don't want to report privacy errors due to where . // clauses that the compiler inferred. We only want to . // consider the ones that the user wrote. This is important . // for the inferred outlives rules; see . // `src/test/ui/rfc-2093-infer-outlives/privacy.rs`. 22 ( 0.00%) self.visit_predicates(self.tcx.explicit_predicates_of(self.item_def_id)); . self 66 ( 0.00%) } . . fn bounds(&mut self) -> &mut Self { . self.visit_predicates(ty::GenericPredicates { . parent: None, . predicates: self.tcx.explicit_item_bounds(self.item_def_id), . }); . self . } . 50 ( 0.00%) fn ty(&mut self) -> &mut Self { 20 ( 0.00%) self.visit(self.tcx.type_of(self.item_def_id)); . self 50 ( 0.00%) } . . fn check_def_id(&mut self, def_id: DefId, kind: &str, descr: &dyn fmt::Display) -> bool { 34 ( 0.00%) if self.leaks_private_dep(def_id) { . self.tcx.struct_span_lint_hir( . lint::builtin::EXPORTED_PRIVATE_DEPENDENCIES, . self.tcx.hir().local_def_id_to_hir_id(self.item_def_id), . self.tcx.def_span(self.item_def_id.to_def_id()), . |lint| { . lint.build(&format!( . "{} `{}` from private dependency '{}' in public \ . interface", -- line 1738 ---------------------------------------- -- line 1740 ---------------------------------------- . descr, . self.tcx.crate_name(def_id.krate) . )) . .emit() . }, . ); . } . 21 ( 0.00%) let hir_id = match def_id.as_local() { 7 ( 0.00%) Some(def_id) => self.tcx.hir().local_def_id_to_hir_id(def_id), . None => return false, . }; . . let vis = self.tcx.visibility(def_id); 13 ( 0.00%) if !vis.is_at_least(self.required_visibility, self.tcx) { . let vis_descr = match vis { . ty::Visibility::Public => "public", . ty::Visibility::Invisible => "private", . ty::Visibility::Restricted(vis_def_id) => { . if vis_def_id == self.tcx.parent_module(hir_id).to_def_id() { . "private" . } else if vis_def_id.is_top_level_module() { . "crate-private" -- line 1762 ---------------------------------------- -- line 1792 ---------------------------------------- . false . } . . /// An item is 'leaked' from a private dependency if all . /// of the following are true: . /// 1. It's contained within a public type . /// 2. It comes from a private crate . fn leaks_private_dep(&self, item_id: DefId) -> bool { 21 ( 0.00%) let ret = self.required_visibility.is_public() && self.tcx.is_private_dep(item_id.krate); . . tracing::debug!("leaks_private_dep(item_id={:?})={}", item_id, ret); . ret . } . } . . impl<'tcx> DefIdVisitor<'tcx> for SearchInterfaceForPrivateItemsVisitor<'tcx> { . fn tcx(&self) -> TyCtxt<'tcx> { 44 ( 0.00%) self.tcx . } 112 ( 0.00%) fn visit_def_id( . &mut self, . def_id: DefId, . kind: &str, . descr: &dyn fmt::Display, . ) -> ControlFlow { . if self.check_def_id(def_id, kind, descr) { . ControlFlow::BREAK . } else { . ControlFlow::CONTINUE . } 63 ( 0.00%) } . } . . struct PrivateItemsInPublicInterfacesVisitor<'tcx> { . tcx: TyCtxt<'tcx>, . has_pub_restricted: bool, . old_error_set_ancestry: LocalDefIdSet, . } . . impl<'tcx> PrivateItemsInPublicInterfacesVisitor<'tcx> { . fn check( . &self, . def_id: LocalDefId, . required_visibility: ty::Visibility, . ) -> SearchInterfaceForPrivateItemsVisitor<'tcx> { 93 ( 0.00%) SearchInterfaceForPrivateItemsVisitor { 4 ( 0.00%) tcx: self.tcx, . item_def_id: def_id, . required_visibility, 11 ( 0.00%) has_pub_restricted: self.has_pub_restricted, . has_old_errors: self.old_error_set_ancestry.contains(&def_id), . in_assoc_ty: false, . } . } . 52 ( 0.00%) fn check_assoc_item( . &self, . def_id: LocalDefId, . assoc_item_kind: AssocItemKind, . defaultness: hir::Defaultness, . vis: ty::Visibility, . ) { . let mut check = self.check(def_id, vis); . 8 ( 0.00%) let (check_ty, is_assoc_ty) = match assoc_item_kind { . AssocItemKind::Const | AssocItemKind::Fn { .. } => (true, false), . AssocItemKind::Type => (defaultness.has_value(), true), . }; 8 ( 0.00%) check.in_assoc_ty = is_assoc_ty; 16 ( 0.00%) check.generics().predicates(); . if check_ty { 4 ( 0.00%) check.ty(); . } 32 ( 0.00%) } . } . . impl<'tcx> Visitor<'tcx> for PrivateItemsInPublicInterfacesVisitor<'tcx> { . type NestedFilter = nested_filter::OnlyBodies; . . fn nested_visit_map(&mut self) -> Self::Map { 2 ( 0.00%) self.tcx.hir() . } . 198 ( 0.00%) fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) { 22 ( 0.00%) let tcx = self.tcx; 22 ( 0.00%) let item_visibility = tcx.visibility(item.def_id); . 110 ( 0.00%) match item.kind { . // Crates are always public. . hir::ItemKind::ExternCrate(..) => {} . // All nested items are checked by `visit_item`. . hir::ItemKind::Mod(..) => {} . // Checked in resolve. . hir::ItemKind::Use(..) => {} . // No subitems. . hir::ItemKind::Macro(..) | hir::ItemKind::GlobalAsm(..) => {} . // Subitems of these items have inherited publicity. . hir::ItemKind::Const(..) . | hir::ItemKind::Static(..) . | hir::ItemKind::Fn(..) . | hir::ItemKind::TyAlias(..) => { 36 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates().ty(); . } . hir::ItemKind::OpaqueTy(..) => { . // `ty()` for opaque types is the underlying type, . // it's not a part of interface, so we skip it. . self.check(item.def_id, item_visibility).generics().bounds(); . } 4 ( 0.00%) hir::ItemKind::Trait(.., trait_item_refs) => { 4 ( 0.00%) self.check(item.def_id, item_visibility).generics().predicates(); . . for trait_item_ref in trait_item_refs { 12 ( 0.00%) self.check_assoc_item( 2 ( 0.00%) trait_item_ref.id.def_id, . trait_item_ref.kind, . trait_item_ref.defaultness, . item_visibility, . ); . 9 ( 0.00%) if let AssocItemKind::Type = trait_item_ref.kind { . self.check(trait_item_ref.id.def_id, item_visibility).bounds(); . } . } . } . hir::ItemKind::TraitAlias(..) => { . self.check(item.def_id, item_visibility).generics().predicates(); . } . hir::ItemKind::Enum(ref def, _) => { -- line 1919 ---------------------------------------- -- line 1942 ---------------------------------------- . self.check(def_id, min(item_visibility, field_visibility, tcx)).ty(); . } . } . // An inherent impl is public when its type is public . // Subitems of inherent impls have their own publicity. . // A trait impl is public when both its type and its trait are public . // Subitems of trait impls have inherited publicity. . hir::ItemKind::Impl(ref impl_) => { 4 ( 0.00%) let impl_vis = ty::Visibility::of_impl(item.def_id, tcx, &Default::default()); . // check that private components do not appear in the generics or predicates of inherent impls . // this check is intentionally NOT performed for impls of traits, per #90586 2 ( 0.00%) if impl_.of_trait.is_none() { . self.check(item.def_id, impl_vis).generics().predicates(); . } 1 ( 0.00%) for impl_item_ref in impl_.items { 2 ( 0.00%) let impl_item_vis = if impl_.of_trait.is_none() { . min(tcx.visibility(impl_item_ref.id.def_id), impl_vis, tcx) . } else { . impl_vis . }; 12 ( 0.00%) self.check_assoc_item( 2 ( 0.00%) impl_item_ref.id.def_id, . impl_item_ref.kind, . impl_item_ref.defaultness, . impl_item_vis, . ); . } . } . } 176 ( 0.00%) } . } . . pub fn provide(providers: &mut Providers) { 8 ( 0.00%) *providers = Providers { . visibility, . privacy_access_levels, . check_private_in_public, . check_mod_privacy, . ..*providers . }; 1 ( 0.00%) } . 252 ( 0.00%) fn visibility(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Visibility { 28 ( 0.00%) let def_id = def_id.expect_local(); . match tcx.resolutions(()).visibilities.get(&def_id) { 38 ( 0.00%) Some(vis) => *vis, . None => { . let hir_id = tcx.hir().local_def_id_to_hir_id(def_id); 112 ( 0.00%) match tcx.hir().get(hir_id) { . // Unique types created for closures participate in type privacy checking. . // They have visibilities inherited from the module they are defined in. . Node::Expr(hir::Expr { kind: hir::ExprKind::Closure(..), .. }) => { 6 ( 0.00%) ty::Visibility::Restricted(tcx.parent_module(hir_id).to_def_id()) . } . // - AST lowering may clone `use` items and the clones don't . // get their entries in the resolver's visibility table. . // - AST lowering also creates opaque type items with inherited visibilies. . // Visibility on them should have no effect, but to avoid the visibility . // query failing on some items, we provide it for opaque types as well. . Node::Item(hir::Item { 6 ( 0.00%) vis, . kind: hir::ItemKind::Use(..) | hir::ItemKind::OpaqueTy(..), . .. 36 ( 0.00%) }) => ty::Visibility::from_hir(vis, hir_id, tcx), . // Visibilities of trait impl items are inherited from their traits . // and are not filled in resolve. . Node::ImplItem(impl_item) => { 26 ( 0.00%) match tcx.hir().get_by_def_id(tcx.hir().get_parent_item(hir_id)) { . Node::Item(hir::Item { . kind: hir::ItemKind::Impl(hir::Impl { of_trait: Some(tr), .. }), . .. 2 ( 0.00%) }) => tr.path.res.opt_def_id().map_or_else( . || { . tcx.sess.delay_span_bug(tr.path.span, "trait without a def-id"); . ty::Visibility::Public . }, . |def_id| tcx.visibility(def_id), . ), . _ => span_bug!(impl_item.span, "the parent is not a trait impl"), . } -- line 2021 ---------------------------------------- -- line 2023 ---------------------------------------- . _ => span_bug!( . tcx.def_span(def_id), . "visibility table unexpectedly missing a def-id: {:?}", . def_id, . ), . } . } . } 224 ( 0.00%) } . 24 ( 0.00%) fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalDefId) { . // Check privacy of names not checked in previous compilation stages. . let mut visitor = 12 ( 0.00%) NamePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: module_def_id }; 27 ( 0.00%) let (module, span, hir_id) = tcx.hir().get_module(module_def_id); . . intravisit::walk_mod(&mut visitor, module, hir_id); . . // Check privacy of explicitly written types and traits as well as . // inferred types of expressions and patterns. . let mut visitor = 18 ( 0.00%) TypePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: module_def_id, span }; . intravisit::walk_mod(&mut visitor, module, hir_id); 24 ( 0.00%) } . 7 ( 0.00%) fn privacy_access_levels(tcx: TyCtxt<'_>, (): ()) -> &AccessLevels { . // Build up a set of all exported items in the AST. This is a set of all . // items which are reachable from external crates based on visibility. 9 ( 0.00%) let mut visitor = EmbargoVisitor { . tcx, . access_levels: tcx.resolutions(()).access_levels.clone(), . macro_reachable: Default::default(), . prev_level: Some(AccessLevel::Public), . changed: false, . }; . . loop { . tcx.hir().walk_toplevel_module(&mut visitor); 4 ( 0.00%) if visitor.changed { 3 ( 0.00%) visitor.changed = false; . } else { . break; . } . } . 1 ( 0.00%) tcx.arena.alloc(visitor.access_levels) 9 ( 0.00%) } . 8 ( 0.00%) fn check_private_in_public(tcx: TyCtxt<'_>, (): ()) { . let access_levels = tcx.privacy_access_levels(()); . 9 ( 0.00%) let mut visitor = ObsoleteVisiblePrivateTypesVisitor { . tcx, . access_levels, . in_variant: false, . old_error_set: Default::default(), . }; . tcx.hir().walk_toplevel_module(&mut visitor); . . let has_pub_restricted = { 5 ( 0.00%) let mut pub_restricted_visitor = PubRestrictedVisitor { tcx, has_pub_restricted: false }; . tcx.hir().walk_toplevel_module(&mut pub_restricted_visitor); 1 ( 0.00%) pub_restricted_visitor.has_pub_restricted . }; . . let mut old_error_set_ancestry = HirIdSet::default(); 9 ( 0.00%) for mut id in visitor.old_error_set.iter().copied() { . loop { . if !old_error_set_ancestry.insert(id) { . break; . } . let parent = tcx.hir().get_parent_node(id); . if parent == id { . break; . } . id = parent; . } . } . . // Check for private types and traits in public interfaces. 8 ( 0.00%) let mut visitor = PrivateItemsInPublicInterfacesVisitor { . tcx, . has_pub_restricted, . // Only definition IDs are ever searched in `old_error_set_ancestry`, . // so we can filter away all non-definition IDs at this point. . old_error_set_ancestry: old_error_set_ancestry . .into_iter() . .filter_map(|hir_id| tcx.hir().opt_local_def_id(hir_id)) . .collect(), . }; 6 ( 0.00%) tcx.hir().visit_all_item_likes(&mut DeepVisitor::new(&mut visitor)); 8 ( 0.00%) } 25,132 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/mod.rs -------------------------------------------------------------------------------- Ir -- line 36 ---------------------------------------- . use rustc_session::parse::ParseSess; . use rustc_span::source_map::{MultiSpan, Span, DUMMY_SP}; . use rustc_span::symbol::{kw, sym, Ident, Symbol}; . use tracing::debug; . . use std::ops::Range; . use std::{cmp, mem, slice}; . 12,476 ( 0.01%) bitflags::bitflags! { . struct Restrictions: u8 { . const STMT_EXPR = 1 << 0; . const NO_STRUCT_LITERAL = 1 << 1; . const CONST_EXPR = 1 << 2; . } . } . . #[derive(Clone, Copy, PartialEq, Debug)] -- line 52 ---------------------------------------- -- line 104 ---------------------------------------- . $self.bump(); . return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty); . } . } . } . }; . } . 56 ( 0.00%) #[derive(Clone)] . pub struct Parser<'a> { 2 ( 0.00%) pub sess: &'a ParseSess, . /// The current token. 2 ( 0.00%) pub token: Token, . /// The spacing for the current token 1 ( 0.00%) pub token_spacing: Spacing, . /// The previous token. 3 ( 0.00%) pub prev_token: Token, 1 ( 0.00%) pub capture_cfg: bool, 2 ( 0.00%) restrictions: Restrictions, 2 ( 0.00%) expected_tokens: Vec, . // Important: This must only be advanced from `next_tok` . // to ensure that `token_cursor.num_next_calls` is updated properly . token_cursor: TokenCursor, 2 ( 0.00%) desugar_doc_comments: bool, . /// This field is used to keep track of how many left angle brackets we have seen. This is . /// required in order to detect extra leading left angle brackets (`<` characters) and error . /// appropriately. . /// . /// See the comments in the `parse_path_segment` function for more details. 2 ( 0.00%) unmatched_angle_bracket_count: u32, 3 ( 0.00%) max_angle_bracket_count: u32, . /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery . /// it gets removed from here. Every entry left at the end gets emitted as an independent . /// error. 1 ( 0.00%) pub(super) unclosed_delims: Vec, . last_unexpected_token_span: Option, . /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it . /// looked like it could have been a mistyped path or literal `Option:Some(42)`). . pub last_type_ascription: Option<(Span, bool /* likely path typo */)>, . /// If present, this `Parser` is not parsing Rust code but rather a macro call. . subparser_name: Option<&'static str>, . capture_state: CaptureState, . /// This allows us to recover when the user forget to add braces around -- line 146 ---------------------------------------- -- line 173 ---------------------------------------- . /// the first macro inner attribute to invoke a proc-macro). . /// When create a `TokenStream`, the inner attributes get inserted . /// into the proper place in the token stream. . pub type ReplaceRange = (Range, Vec<(FlatToken, Spacing)>); . . /// Controls how we capture tokens. Capturing can be expensive, . /// so we try to avoid performing capturing in cases where . /// we will never need an `AttrAnnotatedTokenStream` 1 ( 0.00%) #[derive(Copy, Clone)] . pub enum Capturing { . /// We aren't performing any capturing - this is the default mode. . No, . /// We are capturing tokens . Yes, . } . 4 ( 0.00%) #[derive(Clone)] . struct CaptureState { 4 ( 0.00%) capturing: Capturing, 1 ( 0.00%) replace_ranges: Vec, . inner_attr_ranges: FxHashMap, . } . . impl<'a> Drop for Parser<'a> { . fn drop(&mut self) { 52 ( 0.00%) emit_unclosed_delims(&mut self.unclosed_delims, &self.sess); . } . } . 1,015 ( 0.00%) #[derive(Clone)] . struct TokenCursor { . frame: TokenCursorFrame, 228 ( 0.00%) stack: Vec, . desugar_doc_comments: bool, . // Counts the number of calls to `next` or `next_desugared`, . // depending on whether `desugar_doc_comments` is set. 74 ( 0.00%) num_next_calls: usize, . // During parsing, we may sometimes need to 'unglue' a . // glued token into two component tokens . // (e.g. '>>' into '>' and '>), so that the parser . // can consume them one at a time. This process . // bypasses the normal capturing mechanism . // (e.g. `num_next_calls` will not be incremented), . // since the 'unglued' tokens due not exist in . // the original `TokenStream`. -- line 217 ---------------------------------------- -- line 226 ---------------------------------------- . // in `Option>` requires us to unglue . // the trailing `>>` token. The `break_last_token` . // field is used to track this token - it gets . // appended to the captured stream when . // we evaluate a `LazyTokenStream` . break_last_token: bool, . } . 477 ( 0.00%) #[derive(Clone)] . struct TokenCursorFrame { 113 ( 0.00%) delim: token::DelimToken, . span: DelimSpan, . open_delim: bool, 226 ( 0.00%) tree_cursor: tokenstream::Cursor, . close_delim: bool, . } . . impl TokenCursorFrame { . fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self { 65 ( 0.00%) TokenCursorFrame { . delim, . span, . open_delim: false, 1,465 ( 0.00%) tree_cursor: tts.into_trees(), . close_delim: false, . } . } . } . . impl TokenCursor { 104,988 ( 0.05%) fn next(&mut self) -> (Token, Spacing) { . loop { 53,812 ( 0.02%) let (tree, spacing) = if !self.frame.open_delim { 1,439 ( 0.00%) self.frame.open_delim = true; 7,195 ( 0.00%) TokenTree::open_tt(self.frame.span, self.frame.delim).into() 101,997 ( 0.05%) } else if let Some(tree) = self.frame.tree_cursor.next_with_spacing() { . tree 5,778 ( 0.00%) } else if !self.frame.close_delim { 1,436 ( 0.00%) self.frame.close_delim = true; 7,180 ( 0.00%) TokenTree::close_tt(self.frame.span, self.frame.delim).into() 2,872 ( 0.00%) } else if let Some(frame) = self.stack.pop() { 18,668 ( 0.01%) self.frame = frame; . continue; . } else { 306 ( 0.00%) (TokenTree::Token(Token::new(token::Eof, DUMMY_SP)), Spacing::Alone) . }; . 20,376 ( 0.01%) match tree { . TokenTree::Token(token) => { 69,992 ( 0.03%) return (token, spacing); . } . TokenTree::Delimited(sp, delim, tts) => { . let frame = TokenCursorFrame::new(sp, delim, tts); . self.stack.push(mem::replace(&mut self.frame, frame)); . } . } . } 69,992 ( 0.03%) } . 150 ( 0.00%) fn next_desugared(&mut self) -> (Token, Spacing) { 45 ( 0.00%) let (data, attr_style, sp) = match self.next() { . (Token { kind: token::DocComment(_, attr_style, data), span }, _) => { . (data, attr_style, span) . } 75 ( 0.00%) tok => return tok, . }; . . // Searches for the occurrences of `"#*` and returns the minimum number of `#`s . // required to wrap the text. . let mut num_of_hashes = 0; . let mut count = 0; . for ch in data.as_str().chars() { . count = match ch { -- line 298 ---------------------------------------- -- line 332 ---------------------------------------- . .iter() . .cloned() . .collect::() . }, . ), . )); . . self.next() 135 ( 0.00%) } . } . 210 ( 0.00%) #[derive(Debug, Clone, PartialEq)] . enum TokenType { 168 ( 0.00%) Token(TokenKind), . Keyword(Symbol), . Operator, . Lifetime, . Ident, . Path, . Type, . Const, . } -- line 353 ---------------------------------------- -- line 378 ---------------------------------------- . /// The separator token. . sep: Option, . /// `true` if a trailing separator is allowed. . trailing_sep_allowed: bool, . } . . impl SeqSep { . fn trailing_allowed(t: TokenKind) -> SeqSep { 3 ( 0.00%) SeqSep { sep: Some(t), trailing_sep_allowed: true } . } . . fn none() -> SeqSep { . SeqSep { sep: None, trailing_sep_allowed: false } . } . } . . pub enum FollowedByType { . Yes, . No, . } . . fn token_descr_opt(token: &Token) -> Option<&'static str> { 20 ( 0.00%) Some(match token.kind { 40 ( 0.00%) _ if token.is_special_ident() => "reserved identifier", 40 ( 0.00%) _ if token.is_used_keyword() => "keyword", 40 ( 0.00%) _ if token.is_unused_keyword() => "reserved keyword", . token::DocComment(..) => "doc comment", . _ => return None, . }) . } . 70 ( 0.00%) pub(super) fn token_descr(token: &Token) -> String { 20 ( 0.00%) let token_str = pprust::token_to_string(token); . match token_descr_opt(token) { . Some(prefix) => format!("{} `{}`", prefix, token_str), 70 ( 0.00%) _ => format!("`{}`", token_str), . } 50 ( 0.00%) } . . impl<'a> Parser<'a> { 182 ( 0.00%) pub fn new( . sess: &'a ParseSess, . tokens: TokenStream, . desugar_doc_comments: bool, . subparser_name: Option<&'static str>, . ) -> Self { 13 ( 0.00%) let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens); 26 ( 0.00%) start_frame.open_delim = true; . start_frame.close_delim = true; . 403 ( 0.00%) let mut parser = Parser { . sess, 13 ( 0.00%) token: Token::dummy(), . token_spacing: Spacing::Alone, 13 ( 0.00%) prev_token: Token::dummy(), . capture_cfg: false, . restrictions: Restrictions::empty(), . expected_tokens: Vec::new(), . token_cursor: TokenCursor { 78 ( 0.00%) frame: start_frame, . stack: Vec::new(), . num_next_calls: 0, . desugar_doc_comments, . break_last_token: false, . }, . desugar_doc_comments, . unmatched_angle_bracket_count: 0, . max_angle_bracket_count: 0, -- line 445 ---------------------------------------- -- line 451 ---------------------------------------- . capturing: Capturing::No, . replace_ranges: Vec::new(), . inner_attr_ranges: Default::default(), . }, . current_closure: None, . }; . . // Make parser point to the first token. 26 ( 0.00%) parser.bump(); . . parser 117 ( 0.00%) } . . fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) { . loop { 52,332 ( 0.02%) let (mut next, spacing) = if self.desugar_doc_comments { 60 ( 0.00%) self.token_cursor.next_desugared() . } else { 26,121 ( 0.01%) self.token_cursor.next() . }; 34,888 ( 0.02%) self.token_cursor.num_next_calls += 1; . // We've retrieved an token from the underlying . // cursor, so we no longer need to worry about . // an unglued token. See `break_and_eat` for more details 8,722 ( 0.00%) self.token_cursor.break_last_token = false; 34,888 ( 0.02%) if next.span.is_dummy() { . // Tweak the location for better diagnostics, but keep syntactic context intact. 60 ( 0.00%) next.span = fallback_span.with_ctxt(next.span.ctxt()); . } 30,474 ( 0.01%) if matches!( 17,444 ( 0.01%) next.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } 34,888 ( 0.02%) return (next, spacing); . } . } . . pub fn unexpected(&mut self) -> PResult<'a, T> { . match self.expect_one_of(&[], &[]) { . Err(e) => Err(e), . // We can get `Ok(true)` from `recover_closing_delimiter` . // which is called in `expected_one_of_not_found`. . Ok(_) => FatalError.raise(), . } . } . . /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. 37,521 ( 0.02%) pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { 4,169 ( 0.00%) if self.expected_tokens.is_empty() { 120 ( 0.00%) if self.token == *t { 100 ( 0.00%) self.bump(); . Ok(false) . } else { . self.unexpected_try_recover(t) . } . } else { 49,788 ( 0.02%) self.expect_one_of(slice::from_ref(t), &[]) . } 62,535 ( 0.03%) } . . /// Expect next token to be edible or inedible token. If edible, . /// then consume it; if inedible, then return without consuming . /// anything. Signal a fatal error if next token is unexpected. 49,884 ( 0.02%) pub fn expect_one_of( . &mut self, . edible: &[TokenKind], . inedible: &[TokenKind], . ) -> PResult<'a, bool /* recovered */> { 8,314 ( 0.00%) if edible.contains(&self.token.kind) { 12,447 ( 0.01%) self.bump(); . Ok(false) . } else if inedible.contains(&self.token.kind) { . // leave it in the input . Ok(false) . } else if self.last_unexpected_token_span == Some(self.token.span) { . FatalError.raise(); . } else { . self.expected_one_of_not_found(edible, inedible) . } 62,355 ( 0.03%) } . . // Public for rustfmt usage. . pub fn parse_ident(&mut self) -> PResult<'a, Ident> { 394 ( 0.00%) self.parse_ident_common(true) . } . . fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> { 300 ( 0.00%) self.token.ident().ok_or_else(|| match self.prev_token.kind { . TokenKind::DocComment(..) => { . self.span_err(self.prev_token.span, Error::UselessDocComment) . } . _ => self.expected_ident_found(), . }) . } . 700 ( 0.00%) fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { 100 ( 0.00%) let (ident, is_raw) = self.ident_or_err()?; 600 ( 0.00%) if !is_raw && ident.is_reserved() { . let mut err = self.expected_ident_found(); . if recover { . err.emit(); . } else { . return Err(err); . } . } 400 ( 0.00%) self.bump(); . Ok(ident) 1,000 ( 0.00%) } . . /// Checks if the next token is `tok`, and returns `true` if so. . /// . /// This method will automatically add `tok` to `expected_tokens` if `tok` is not . /// encountered. 182,056 ( 0.08%) fn check(&mut self, tok: &TokenKind) -> bool { 78,046 ( 0.04%) let is_present = self.token == *tok; 52,048 ( 0.02%) if !is_present { 108,498 ( 0.05%) self.expected_tokens.push(TokenType::Token(tok.clone())); . } . is_present 182,056 ( 0.08%) } . . /// Consumes a token 'tok' if it exists. Returns whether the given token was present. . pub fn eat(&mut self, tok: &TokenKind) -> bool { 30,695 ( 0.01%) let is_present = self.check(tok); 20,458 ( 0.01%) if is_present { 3,012 ( 0.00%) self.bump() . } . is_present . } . . /// If the next token is the given keyword, returns `true` without eating it. . /// An expectation is also added for diagnostics purposes. 340 ( 0.00%) fn check_keyword(&mut self, kw: Symbol) -> bool { 108 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw)); 2,568 ( 0.00%) self.token.is_keyword(kw) . } . . /// If the next token is the given keyword, eats it and returns `true`. . /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes. . // Public for rustfmt usage. 455 ( 0.00%) pub fn eat_keyword(&mut self, kw: Symbol) -> bool { 508 ( 0.00%) if self.check_keyword(kw) { 75 ( 0.00%) self.bump(); . true . } else { . false . } 455 ( 0.00%) } . . fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool { 84 ( 0.00%) if self.token.is_keyword(kw) { 21 ( 0.00%) self.bump(); . true . } else { . false . } . } . . /// If the given word is not a keyword, signals an error. . /// If the next token is not the given word, signals an error. . /// Otherwise, eats it. 25 ( 0.00%) fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> { . if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) } 20 ( 0.00%) } . . /// Is the given keyword `kw` followed by a non-reserved identifier? 336 ( 0.00%) fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool { 168 ( 0.00%) self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident()) 378 ( 0.00%) } . 462 ( 0.00%) fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool { 174 ( 0.00%) if ok { . true . } else { 100 ( 0.00%) self.expected_tokens.push(typ); . false . } 462 ( 0.00%) } . . fn check_ident(&mut self) -> bool { 54 ( 0.00%) self.check_or_expected(self.token.is_ident(), TokenType::Ident) . } . 200 ( 0.00%) fn check_path(&mut self) -> bool { 410 ( 0.00%) self.check_or_expected(self.token.is_path_start(), TokenType::Path) 250 ( 0.00%) } . . fn check_type(&mut self) -> bool { . self.check_or_expected(self.token.can_begin_type(), TokenType::Type) . } . . fn check_const_arg(&mut self) -> bool { . self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const) . } . 54 ( 0.00%) fn check_inline_const(&self, dist: usize) -> bool { 30 ( 0.00%) self.is_keyword_ahead(dist, &[kw::Const]) . && self.look_ahead(dist + 1, |t| match t.kind { . token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)), . token::OpenDelim(DelimToken::Brace) => true, . _ => false, . }) 54 ( 0.00%) } . . /// Checks to see if the next token is either `+` or `+=`. . /// Otherwise returns `false`. . fn check_plus(&mut self) -> bool { 84 ( 0.00%) self.check_or_expected( 42 ( 0.00%) self.token.is_like_plus(), 42 ( 0.00%) TokenType::Token(token::BinOp(token::Plus)), . ) . } . . /// Eats the expected token if it's present possibly breaking . /// compound tokens like multi-character operators in process. . /// Returns `true` if the token was eaten. 732 ( 0.00%) fn break_and_eat(&mut self, expected: TokenKind) -> bool { 427 ( 0.00%) if self.token.kind == expected { 14 ( 0.00%) self.bump(); . return true; . } 270 ( 0.00%) match self.token.kind.break_two_token_op() { . Some((first, second)) if first == expected => { . let first_span = self.sess.source_map().start_point(self.token.span); . let second_span = self.token.span.with_lo(first_span.hi()); . self.token = Token::new(first, first_span); . // Keep track of this token - if we end token capturing now, . // we'll want to append this token to the captured stream. . // . // If we consume any additional tokens, then this token -- line 682 ---------------------------------------- -- line 684 ---------------------------------------- . // and `next_tok` will set this field to `None` . self.token_cursor.break_last_token = true; . // Use the spacing of the glued token as the spacing . // of the unglued second token. . self.bump_with((Token::new(second, second_span), self.token_spacing)); . true . } . _ => { 162 ( 0.00%) self.expected_tokens.push(TokenType::Token(expected)); . false . } . } 765 ( 0.00%) } . . /// Eats `+` possibly breaking tokens like `+=` in process. . fn eat_plus(&mut self) -> bool { . self.break_and_eat(token::BinOp(token::Plus)) . } . . /// Eats `&` possibly breaking tokens like `&&` in process. . /// Signals an error if `&` is not eaten. . fn expect_and(&mut self) -> PResult<'a, ()> { 36 ( 0.00%) if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `|` possibly breaking tokens like `||` in process. . /// Signals an error if `|` was not eaten. . fn expect_or(&mut self) -> PResult<'a, ()> { 6 ( 0.00%) if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() } . } . . /// Eats `<` possibly breaking tokens like `<<` in process. 42 ( 0.00%) fn eat_lt(&mut self) -> bool { 170 ( 0.00%) let ate = self.break_and_eat(token::Lt); 108 ( 0.00%) if ate { . // See doc comment for `unmatched_angle_bracket_count`. . self.unmatched_angle_bracket_count += 1; . self.max_angle_bracket_count += 1; . debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count); . } . ate 42 ( 0.00%) } . . /// Eats `<` possibly breaking tokens like `<<` in process. . /// Signals an error if `<` was not eaten. . fn expect_lt(&mut self) -> PResult<'a, ()> { . if self.eat_lt() { Ok(()) } else { self.unexpected() } . } . . /// Eats `>` possibly breaking tokens like `>>` in process. -- line 733 ---------------------------------------- -- line 742 ---------------------------------------- . Ok(()) . } else { . self.unexpected() . } . } . . fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool { . kets.iter().any(|k| match expect { 28,962 ( 0.01%) TokenExpectType::Expect => self.check(k), 15 ( 0.00%) TokenExpectType::NoExpect => self.token == **k, . }) . } . . fn parse_seq_to_before_tokens( . &mut self, . kets: &[&TokenKind], . sep: SeqSep, . expect: TokenExpectType, -- line 759 ---------------------------------------- -- line 761 ---------------------------------------- . ) -> PResult<'a, (Vec, bool /* trailing */, bool /* recovered */)> { . let mut first = true; . let mut recovered = false; . let mut trailing = false; . let mut v = vec![]; . let unclosed_delims = !self.unclosed_delims.is_empty(); . . while !self.expect_any_with_type(kets, expect) { 22,085 ( 0.01%) if let token::CloseDelim(..) | token::Eof = self.token.kind { . break; . } 13,824 ( 0.01%) if let Some(ref t) = sep.sep { 12,461 ( 0.01%) if first { . first = false; . } else { 20,565 ( 0.01%) match self.expect(t) { . Ok(false) => { . self.current_closure.take(); . } . Ok(true) => { . self.current_closure.take(); . recovered = true; . break; . } -- line 784 ---------------------------------------- -- line 857 ---------------------------------------- . e.cancel(); . break; . } . } . } . } . } . } 8,276 ( 0.00%) if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { . trailing = true; . break; . } . 4,149 ( 0.00%) let t = f(self)?; 48 ( 0.00%) v.push(t); . } . 5,584 ( 0.00%) Ok((v, trailing, recovered)) . } . . fn recover_missing_braces_around_closure_body( . &mut self, . closure_spans: ClosureSpans, . mut expect_err: DiagnosticBuilder<'_>, . ) -> PResult<'a, ()> { . let initial_semicolon = self.token.span; -- line 882 ---------------------------------------- -- line 937 ---------------------------------------- . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_seq_to_before_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool, bool)> { 4,219 ( 0.00%) self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) . } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. 60 ( 0.00%) fn parse_seq_to_end( . &mut self, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool /* trailing */)> { 2,797 ( 0.00%) let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; 2,783 ( 0.00%) if !recovered { . self.eat(ket); . } 2,853 ( 0.00%) Ok((val, trailing)) 48 ( 0.00%) } . . /// Parses a sequence, including the closing delimiter. The function . /// `f` must consume tokens until reaching the next separator or . /// closing bracket. . fn parse_unspanned_seq( . &mut self, . bra: &TokenKind, . ket: &TokenKind, . sep: SeqSep, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { 37 ( 0.00%) self.expect(bra)?; 48 ( 0.00%) self.parse_seq_to_end(ket, sep, f) . } . . fn parse_delim_comma_seq( . &mut self, . delim: DelimToken, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_unspanned_seq( 19 ( 0.00%) &token::OpenDelim(delim), 32 ( 0.00%) &token::CloseDelim(delim), . SeqSep::trailing_allowed(token::Comma), . f, . ) . } . . fn parse_paren_comma_seq( . &mut self, . f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, . ) -> PResult<'a, (Vec, bool)> { . self.parse_delim_comma_seq(token::Paren, f) . } . . /// Advance the parser by one token using provided token as the next one. 113,386 ( 0.05%) fn bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) { . // Bumping after EOF is a bad sign, usually an infinite loop. 52,332 ( 0.02%) if self.prev_token.kind == TokenKind::Eof { . let msg = "attempted to bump the parser past EOF (may be stuck in a loop)"; . self.span_bug(self.token.span, msg); . } . . // Update the current and previous tokens. 34,888 ( 0.02%) self.prev_token = mem::replace(&mut self.token, next_token); 8,722 ( 0.00%) self.token_spacing = next_spacing; . . // Diagnostics. 8,722 ( 0.00%) self.expected_tokens.clear(); . } . . /// Advance the parser by one token. 69,776 ( 0.03%) pub fn bump(&mut self) { 34,888 ( 0.02%) let next_token = self.next_tok(self.token.span); 52,332 ( 0.02%) self.bump_with(next_token); 69,776 ( 0.03%) } . . /// Look-ahead `dist` tokens of `self.token` and get access to that token there. . /// When `dist == 0` then the current token is looked at. . pub fn look_ahead(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R { 30 ( 0.00%) if dist == 0 { 15 ( 0.00%) return looker(&self.token); . } . 4,263 ( 0.00%) let frame = &self.token_cursor.frame; 4,282 ( 0.00%) if frame.delim != DelimToken::NoDelim { . let all_normal = (0..dist).all(|i| { 12,777 ( 0.01%) let token = frame.tree_cursor.look_ahead(i); 14,275 ( 0.01%) !matches!(token, Some(TokenTree::Delimited(_, DelimToken::NoDelim, _))) . }); . if all_normal { 21,276 ( 0.01%) return match frame.tree_cursor.look_ahead(dist - 1) { 5,714 ( 0.00%) Some(tree) => match tree { 2,839 ( 0.00%) TokenTree::Token(token) => looker(token), . TokenTree::Delimited(dspan, delim, _) => { 131 ( 0.00%) looker(&Token::new(token::OpenDelim(*delim), dspan.open)) . } . }, 9,810 ( 0.00%) None => looker(&Token::new(token::CloseDelim(frame.delim), frame.span.close)), . }; . } . } . . let mut cursor = self.token_cursor.clone(); . let mut i = 0; 48 ( 0.00%) let mut token = Token::dummy(); . while i < dist { 189 ( 0.00%) token = cursor.next().0; 87 ( 0.00%) if matches!( 54 ( 0.00%) token.kind, . token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) . ) { . continue; . } . i += 1; . } . return looker(&token); . } . . /// Returns whether any of the given keywords are `dist` tokens ahead of the current one. 126 ( 0.00%) fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool { 40 ( 0.00%) self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw))) 126 ( 0.00%) } . . /// Parses asyncness: `async` or nothing. . fn parse_asyncness(&mut self) -> Async { . if self.eat_keyword(kw::Async) { . let span = self.prev_token.uninterpolated_span(); . Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID } . } else { . Async::No . } . } . . /// Parses unsafety: `unsafe` or nothing. 24 ( 0.00%) fn parse_unsafety(&mut self) -> Unsafe { . if self.eat_keyword(kw::Unsafe) { . Unsafe::Yes(self.prev_token.uninterpolated_span()) . } else { . Unsafe::No . } 96 ( 0.00%) } . . /// Parses constness: `const` or nothing. 176 ( 0.00%) fn parse_constness(&mut self) -> Const { . // Avoid const blocks to be parsed as const items 54 ( 0.00%) if self.look_ahead(1, |t| t != &token::OpenDelim(DelimToken::Brace)) . && self.eat_keyword(kw::Const) . { 15 ( 0.00%) Const::Yes(self.prev_token.uninterpolated_span()) . } else { . Const::No . } 308 ( 0.00%) } . . /// Parses inline const expressions. . fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P> { . if pat { . self.sess.gated_spans.gate(sym::inline_const_pat, span); . } else { . self.sess.gated_spans.gate(sym::inline_const, span); . } -- line 1104 ---------------------------------------- -- line 1110 ---------------------------------------- . }; . let blk_span = anon_const.value.span; . Ok(self.mk_expr(span.to(blk_span), ExprKind::ConstBlock(anon_const), AttrVec::new())) . } . . /// Parses mutability (`mut` or nothing). . fn parse_mutability(&mut self) -> Mutability { . if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not } 10 ( 0.00%) } . . /// Possibly parses mutability (`const` or `mut`). . fn parse_const_or_mut(&mut self) -> Option { . if self.eat_keyword(kw::Mut) { . Some(Mutability::Mut) . } else if self.eat_keyword(kw::Const) { . Some(Mutability::Not) . } else { -- line 1126 ---------------------------------------- -- line 1139 ---------------------------------------- . } . } . . fn parse_mac_args(&mut self) -> PResult<'a, P> { . self.parse_mac_args_common(true).map(P) . } . . fn parse_attr_args(&mut self) -> PResult<'a, MacArgs> { 42 ( 0.00%) self.parse_mac_args_common(false) . } . 126 ( 0.00%) fn parse_mac_args_common(&mut self, delimited_only: bool) -> PResult<'a, MacArgs> { 112 ( 0.00%) Ok( 86 ( 0.00%) if self.check(&token::OpenDelim(DelimToken::Paren)) 12 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Bracket)) 12 ( 0.00%) || self.check(&token::OpenDelim(DelimToken::Brace)) . { 40 ( 0.00%) match self.parse_token_tree() { 50 ( 0.00%) TokenTree::Delimited(dspan, delim, tokens) => . // We've confirmed above that there is a delimiter so unwrapping is OK. . { 20 ( 0.00%) MacArgs::Delimited(dspan, MacDelimiter::from_token(delim).unwrap(), tokens) . } . _ => unreachable!(), . } 8 ( 0.00%) } else if !delimited_only { . if self.eat(&token::Eq) { . let eq_span = self.prev_token.span; . . // Collect tokens because they are used during lowering to HIR. . let expr = self.parse_expr_force_collect()?; . let span = expr.span; . . let token_kind = token::Interpolated(Lrc::new(token::NtExpr(expr))); -- line 1172 ---------------------------------------- -- line 1173 ---------------------------------------- . MacArgs::Eq(eq_span, Token::new(token_kind, span)) . } else { . MacArgs::Empty . } . } else { . return self.unexpected(); . }, . ) 98 ( 0.00%) } . . fn parse_or_use_outer_attributes( . &mut self, . already_parsed_attrs: Option, . ) -> PResult<'a, AttrWrapper> { 12,471 ( 0.01%) if let Some(attrs) = already_parsed_attrs { . Ok(attrs) . } else { 8,302 ( 0.00%) self.parse_outer_attributes() . } . } . . /// Parses a single token tree from the input. 96 ( 0.00%) pub(crate) fn parse_token_tree(&mut self) -> TokenTree { 36 ( 0.00%) match self.token.kind { . token::OpenDelim(..) => { . let depth = self.token_cursor.stack.len(); . . // We keep advancing the token cursor until we hit . // the matching `CloseDelim` token. 222 ( 0.00%) while !(depth == self.token_cursor.stack.len() . && matches!(self.token.kind, token::CloseDelim(_))) . { . // Advance one token at a time, so `TokenCursor::next()` . // can capture these tokens if necessary. 110 ( 0.00%) self.bump(); . } . // We are still inside the frame corresponding . // to the delimited stream we captured, so grab . // the tokens from this frame. . let frame = &self.token_cursor.frame; 24 ( 0.00%) let stream = frame.tree_cursor.stream.clone(); 24 ( 0.00%) let span = frame.span; 12 ( 0.00%) let delim = frame.delim; . // Consume close delimiter 24 ( 0.00%) self.bump(); 60 ( 0.00%) TokenTree::Delimited(span, delim, stream) . } . token::CloseDelim(_) | token::Eof => unreachable!(), . _ => { . self.bump(); . TokenTree::Token(self.prev_token.clone()) . } . } 84 ( 0.00%) } . . /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF. . pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec> { . let mut tts = Vec::new(); . while self.token != token::Eof { . tts.push(self.parse_token_tree()); . } . Ok(tts) -- line 1234 ---------------------------------------- -- line 1244 ---------------------------------------- . } . TokenStream::new(result) . } . . /// Evaluates the closure with restrictions in place. . /// . /// Afters the closure is evaluated, restrictions are reset. . fn with_res(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T { 4,163 ( 0.00%) let old = self.restrictions; 4,201 ( 0.00%) self.restrictions = res; . let res = f(self); 4,173 ( 0.00%) self.restrictions = old; . res . } . 256 ( 0.00%) fn is_crate_vis(&self) -> bool { 160 ( 0.00%) self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep) 288 ( 0.00%) } . . /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`, . /// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`. . /// If the following element can't be a tuple (i.e., it's a function definition), then . /// it's not a tuple struct field), and the contents within the parentheses aren't valid, . /// so emit a proper diagnostic. . // Public for rustfmt usage. 260 ( 0.00%) pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> { 52 ( 0.00%) maybe_whole!(self, NtVis, |x| x); . 26 ( 0.00%) self.expected_tokens.push(TokenType::Keyword(kw::Crate)); 104 ( 0.00%) if self.is_crate_vis() { . self.bump(); // `crate` . self.sess.gated_spans.gate(sym::crate_visibility_modifier, self.prev_token.span); . return Ok(Visibility { . span: self.prev_token.span, . kind: VisibilityKind::Crate(CrateSugar::JustCrate), . tokens: None, . }); . } . . if !self.eat_keyword(kw::Pub) { . // We need a span for our `Spanned`, but there's inherently no . // keyword to grab a span from for inherited visibility; an empty span at the . // beginning of the current token would seem to be the "Schelling span". 18 ( 0.00%) return Ok(Visibility { 54 ( 0.00%) span: self.token.span.shrink_to_lo(), . kind: VisibilityKind::Inherited, . tokens: None, . }); . } 8 ( 0.00%) let lo = self.prev_token.span; . 40 ( 0.00%) if self.check(&token::OpenDelim(token::Paren)) { . // We don't `self.bump()` the `(` yet because this might be a struct definition where . // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`. . // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so . // by the following tokens. . if self.is_keyword_ahead(1, &[kw::Crate]) && self.look_ahead(2, |t| t != &token::ModSep) . // account for `pub(crate::foo)` . { . // Parse `pub(crate)`. -- line 1303 ---------------------------------------- -- line 1338 ---------------------------------------- . } else if let FollowedByType::No = fbt { . // Provide this diagnostic if a type cannot follow; . // in particular, if this is not a tuple struct. . self.recover_incorrect_vis_restriction()?; . // Emit diagnostic, but continue with public visibility. . } . } . 24 ( 0.00%) Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None }) 234 ( 0.00%) } . . /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }` . fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> { . self.bump(); // `(` . let path = self.parse_path(PathStyle::Mod)?; . self.expect(&token::CloseDelim(token::Paren))?; // `)` . . let msg = "incorrect visibility restriction"; -- line 1355 ---------------------------------------- -- line 1370 ---------------------------------------- . ) . .emit(); . . Ok(()) . } . . /// Parses `extern string_literal?`. . fn parse_extern(&mut self) -> Extern { 7 ( 0.00%) if self.eat_keyword(kw::Extern) { Extern::from_abi(self.parse_abi()) } else { Extern::None } . } . . /// Parses a string literal as an ABI spec. . fn parse_abi(&mut self) -> Option { . match self.parse_str_lit() { . Ok(str_lit) => Some(str_lit), . Err(Some(lit)) => match lit.kind { . ast::LitKind::Err(_) => None, -- line 1386 ---------------------------------------- -- line 1409 ---------------------------------------- . self.collect_tokens_trailing_token( . AttrWrapper::empty(), . ForceCollect::Yes, . |this, _attrs| Ok((f(this)?, TrailingToken::None)), . ) . } . . /// `::{` or `::*` 792 ( 0.00%) fn is_import_coupler(&mut self) -> bool { 396 ( 0.00%) self.check(&token::ModSep) . && self.look_ahead(1, |t| { 117 ( 0.00%) *t == token::OpenDelim(token::Brace) || *t == token::BinOp(token::Star) . }) 837 ( 0.00%) } . . pub fn clear_expected_tokens(&mut self) { . self.expected_tokens.clear(); . } . } . . crate fn make_unclosed_delims_error( . unmatched: UnmatchedBrace, -- line 1430 ---------------------------------------- -- line 1450 ---------------------------------------- . err.span_label(sp, "closing delimiter possibly meant for this"); . } . if let Some(sp) = unmatched.unclosed_span { . err.span_label(sp, "unclosed delimiter"); . } . Some(err) . } . 112 ( 0.00%) pub fn emit_unclosed_delims(unclosed_delims: &mut Vec, sess: &ParseSess) { 56 ( 0.00%) *sess.reached_eof.borrow_mut() |= . unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none()); 56 ( 0.00%) for unmatched in unclosed_delims.drain(..) { . if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) { . e.emit(); . } . } 112 ( 0.00%) } . . /// A helper struct used when building an `AttrAnnotatedTokenStream` from . /// a `LazyTokenStream`. Both delimiter and non-delimited tokens . /// are stored as `FlatToken::Token`. A vector of `FlatToken`s . /// is then 'parsed' to build up an `AttrAnnotatedTokenStream` with nested . /// `AttrAnnotatedTokenTree::Delimited` tokens . #[derive(Debug, Clone)] . pub enum FlatToken { -- line 1474 ---------------------------------------- 57,947 ( 0.03%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp/ring.rs -------------------------------------------------------------------------------- Ir -- line 22 ---------------------------------------- . RingBuffer { data: VecDeque::new(), offset: 0 } . } . . pub fn is_empty(&self) -> bool { . self.data.is_empty() . } . . pub fn push(&mut self, value: T) -> usize { 43,786 ( 0.02%) let index = self.offset + self.data.len(); 136,818 ( 0.06%) self.data.push_back(value); . index . } . . pub fn clear(&mut self) { 3 ( 0.00%) self.data.clear(); . } . . pub fn index_of_first(&self) -> usize { . self.offset . } . . pub fn first(&self) -> Option<&T> { . self.data.front() . } . . pub fn first_mut(&mut self) -> Option<&mut T> { . self.data.front_mut() . } . 109,465 ( 0.05%) pub fn pop_first(&mut self) -> Option { . let first = self.data.pop_front()?; 87,572 ( 0.04%) self.offset += 1; 131,358 ( 0.06%) Some(first) 153,251 ( 0.07%) } . . pub fn last(&self) -> Option<&T> { . self.data.back() . } . . pub fn last_mut(&mut self) -> Option<&mut T> { . self.data.back_mut() . } -- line 63 ---------------------------------------- 109,465 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_ast_pretty/src/pp.rs -------------------------------------------------------------------------------- Ir -- line 166 ---------------------------------------- . String(Cow<'static, str>), . Break(BreakToken), . Begin(BeginToken), . End, . } . . impl Token { . pub fn is_hardbreak_tok(&self) -> bool { 5,464 ( 0.00%) matches!(self, Token::Break(BreakToken { offset: 0, blank_space: SIZE_INFINITY })) . } . } . . #[derive(Copy, Clone)] . enum PrintFrame { . Fits, . Broken { offset: isize, breaks: Breaks }, . } -- line 182 ---------------------------------------- -- line 213 ---------------------------------------- . . #[derive(Clone)] . struct BufEntry { . token: Token, . size: isize, . } . . impl Printer { 12 ( 0.00%) pub fn new() -> Self { . let linewidth = 78; 427 ( 0.00%) Printer { . out: String::new(), . margin: linewidth as isize, . space: linewidth as isize, . buf: RingBuffer::new(), . left_total: 0, . right_total: 0, . scan_stack: VecDeque::new(), . print_stack: Vec::new(), . pending_indentation: 0, . last_printed: None, . } 15 ( 0.00%) } . . pub fn last_token(&self) -> Option<&Token> { . self.last_token_still_buffered().or_else(|| self.last_printed.as_ref()) . } . . pub fn last_token_still_buffered(&self) -> Option<&Token> { . self.buf.last().map(|last| &last.token) . } . . /// Be very careful with this! . pub fn replace_last_token_still_buffered(&mut self, token: Token) { . self.buf.last_mut().unwrap().token = token; . } . . fn scan_eof(&mut self) { 18 ( 0.00%) if !self.scan_stack.is_empty() { 6 ( 0.00%) self.check_stack(0); 6 ( 0.00%) self.advance_left(); . } . } . . fn scan_begin(&mut self, token: BeginToken) { 5,477 ( 0.00%) if self.scan_stack.is_empty() { 6 ( 0.00%) self.left_total = 1; . self.right_total = 1; . self.buf.clear(); . } 21,908 ( 0.01%) let right = self.buf.push(BufEntry { token: Token::Begin(token), size: -self.right_total }); . self.scan_stack.push_back(right); . } . . fn scan_end(&mut self) { 5,477 ( 0.00%) if self.scan_stack.is_empty() { . self.print_end(); . } else { . let right = self.buf.push(BufEntry { token: Token::End, size: -1 }); . self.scan_stack.push_back(right); . } . } . . fn scan_break(&mut self, token: BreakToken) { 2,732 ( 0.00%) if self.scan_stack.is_empty() { . self.left_total = 1; . self.right_total = 1; . self.buf.clear(); . } else { 5,464 ( 0.00%) self.check_stack(0); . } 10,928 ( 0.00%) let right = self.buf.push(BufEntry { token: Token::Break(token), size: -self.right_total }); . self.scan_stack.push_back(right); 8,196 ( 0.00%) self.right_total += token.blank_space; . } . 73,998 ( 0.03%) fn scan_string(&mut self, string: Cow<'static, str>) { 8,222 ( 0.00%) if self.scan_stack.is_empty() { . self.print_string(&string); . } else { . let len = string.len() as isize; 8,207 ( 0.00%) self.buf.push(BufEntry { token: Token::String(string), size: len }); 24,621 ( 0.01%) self.right_total += len; . self.check_stream(); . } 65,656 ( 0.03%) } . . fn check_stream(&mut self) { 39,058 ( 0.02%) while self.right_total - self.left_total > self.space { 3,738 ( 0.00%) if *self.scan_stack.front().unwrap() == self.buf.index_of_first() { . self.scan_stack.pop_front().unwrap(); 628 ( 0.00%) self.buf.first_mut().unwrap().size = SIZE_INFINITY; . } 2,492 ( 0.00%) self.advance_left(); 1,246 ( 0.00%) if self.buf.is_empty() { . break; . } . } . } . 43,126 ( 0.02%) fn advance_left(&mut self) { 46,278 ( 0.02%) while self.buf.first().unwrap().size >= 0 { 65,679 ( 0.03%) let left = self.buf.pop_first().unwrap(); . . match &left.token { . Token::String(string) => { 24,621 ( 0.01%) self.left_total += string.len() as isize; . self.print_string(string); . } . Token::Break(token) => { 8,196 ( 0.00%) self.left_total += token.blank_space; . self.print_break(*token, left.size); . } 24 ( 0.00%) Token::Begin(token) => self.print_begin(*token, left.size), . Token::End => self.print_end(), . } . 175,144 ( 0.08%) self.last_printed = Some(left.token); . 21,893 ( 0.01%) if self.buf.is_empty() { . break; . } . } 9,992 ( 0.00%) } . 24,615 ( 0.01%) fn check_stack(&mut self, mut depth: usize) { 29,476 ( 0.01%) while let Some(&index) = self.scan_stack.back() { . let mut entry = &mut self.buf[index]; 58,459 ( 0.03%) match entry.token { . Token::Begin(_) => { 13,676 ( 0.01%) if depth == 0 { . break; . } . self.scan_stack.pop_back().unwrap(); 16,407 ( 0.01%) entry.size += self.right_total; 5,469 ( 0.00%) depth -= 1; . } . Token::End => { . // paper says + not =, but that makes no sense. . self.scan_stack.pop_back().unwrap(); 5,477 ( 0.00%) entry.size = 1; 10,954 ( 0.01%) depth += 1; . } . _ => { . self.scan_stack.pop_back().unwrap(); 6,336 ( 0.00%) entry.size += self.right_total; 4,968 ( 0.00%) if depth == 0 { . break; . } . } . } . } 21,880 ( 0.01%) } . . fn get_top(&self) -> PrintFrame { 8,196 ( 0.00%) *self . .print_stack . .last() . .unwrap_or(&PrintFrame::Broken { offset: 0, breaks: Breaks::Inconsistent }) . } . . fn print_begin(&mut self, token: BeginToken, size: isize) { 16,431 ( 0.01%) if size > self.space { 40 ( 0.00%) let col = self.margin - self.space + token.offset; . self.print_stack.push(PrintFrame::Broken { offset: col, breaks: token.breaks }); . } else { . self.print_stack.push(PrintFrame::Fits); . } . } . . fn print_end(&mut self) { . self.print_stack.pop().unwrap(); . } . . fn print_break(&mut self, token: BreakToken, size: isize) { . let break_offset = 10,920 ( 0.00%) match self.get_top() { . PrintFrame::Fits => None, . PrintFrame::Broken { offset, breaks: Breaks::Consistent } => Some(offset), . PrintFrame::Broken { offset, breaks: Breaks::Inconsistent } => { . if size > self.space { Some(offset) } else { None } . } . }; . if let Some(offset) = break_offset { . self.out.push('\n'); 1,860 ( 0.00%) self.pending_indentation = offset + token.offset; 2,480 ( 0.00%) self.space = self.margin - (offset + token.offset); . } else { 6,336 ( 0.00%) self.pending_indentation += token.blank_space; 8,448 ( 0.00%) self.space -= token.blank_space; . } . } . . fn print_string(&mut self, string: &str) { . // Write the pending indent. A more concise way of doing this would be: . // . // write!(self.out, "{: >n$}", "", n = self.pending_indentation as usize)?; . // . // But that is significantly slower. This code is sufficiently hot, and indents can get . // sufficiently large, that the difference is significant on some workloads. 8,222 ( 0.00%) self.out.reserve(self.pending_indentation as usize); 3 ( 0.00%) self.out.extend(iter::repeat(' ').take(self.pending_indentation as usize)); 8,222 ( 0.00%) self.pending_indentation = 0; . . self.out.push_str(string); 41,095 ( 0.02%) self.space -= string.len() as isize; . } . . // Convenience functions to talk to the printer. . . /// "raw box" 10,952 ( 0.01%) pub fn rbox(&mut self, indent: usize, breaks: Breaks) { . self.scan_begin(BeginToken { offset: indent as isize, breaks }) 8,214 ( 0.00%) } . . /// Inconsistent breaking box 24,648 ( 0.01%) pub fn ibox(&mut self, indent: usize) { . self.rbox(indent, Breaks::Inconsistent) 20,540 ( 0.01%) } . . /// Consistent breaking box . pub fn cbox(&mut self, indent: usize) { . self.rbox(indent, Breaks::Consistent) . } . 21,856 ( 0.01%) pub fn break_offset(&mut self, n: usize, off: isize) { . self.scan_break(BreakToken { offset: off, blank_space: n as isize }) 16,392 ( 0.01%) } . 21,908 ( 0.01%) pub fn end(&mut self) { . self.scan_end() 21,908 ( 0.01%) } . 90 ( 0.00%) pub fn eof(mut self) -> String { . self.scan_eof(); 72 ( 0.00%) self.out 90 ( 0.00%) } . . pub fn word>>(&mut self, wrd: S) { . let string = wrd.into(); 54,836 ( 0.03%) self.scan_string(string) . } . . fn spaces(&mut self, n: usize) { 10,928 ( 0.00%) self.break_offset(n, 0) . } . . crate fn zerobreak(&mut self) { . self.spaces(0) . } . . pub fn space(&mut self) { . self.spaces(1) . } . . pub fn hardbreak(&mut self) { . self.spaces(SIZE_INFINITY as usize) . } . . pub fn is_beginning_of_line(&self) -> bool { 5,464 ( 0.00%) match self.last_token() { . Some(last_token) => last_token.is_hardbreak_tok(), . None => true, . } . } . . pub fn hardbreak_tok_offset(off: isize) -> Token { . Token::Break(BreakToken { offset: off, blank_space: SIZE_INFINITY }) . } -- line 481 ---------------------------------------- 142,887 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_span/src/caching_source_map_view.rs -------------------------------------------------------------------------------- Ir -- line 27 ---------------------------------------- . impl CacheEntry { . #[inline] . fn update( . &mut self, . new_file_and_idx: Option<(Lrc, usize)>, . pos: BytePos, . time_stamp: usize, . ) { 1,512 ( 0.00%) if let Some((file, file_idx)) = new_file_and_idx { 51 ( 0.00%) self.file = file; 69 ( 0.00%) self.file_index = file_idx; . } . 1,431 ( 0.00%) let line_index = self.file.lookup_line(pos).unwrap(); . let line_bounds = self.file.line_bounds(line_index); 2,942 ( 0.00%) self.line_number = line_index + 1; 4,398 ( 0.00%) self.line = line_bounds; . self.touch(time_stamp); . } . . #[inline] . fn touch(&mut self, time_stamp: usize) { 13,612 ( 0.01%) self.time_stamp = time_stamp; . } . } . . #[derive(Clone)] . pub struct CachingSourceMapView<'sm> { . source_map: &'sm SourceMap, . line_cache: [CacheEntry; 3], . time_stamp: usize, . } . . impl<'sm> CachingSourceMapView<'sm> { 60 ( 0.00%) pub fn new(source_map: &'sm SourceMap) -> CachingSourceMapView<'sm> { . let files = source_map.files(); 30 ( 0.00%) let first_file = files[0].clone(); . let entry = CacheEntry { . time_stamp: 0, . line_number: 0, . line: BytePos(0)..BytePos(0), . file: first_file, . file_index: 0, . }; . 150 ( 0.00%) CachingSourceMapView { . source_map, 210 ( 0.00%) line_cache: [entry.clone(), entry.clone(), entry], . time_stamp: 0, . } 120 ( 0.00%) } . . pub fn byte_pos_to_line_and_col( . &mut self, . pos: BytePos, . ) -> Option<(Lrc, usize, BytePos)> { . self.time_stamp += 1; . . // Check if the position is in one of the cached lines -- line 85 ---------------------------------------- -- line 106 ---------------------------------------- . }; . . let cache_entry = &mut self.line_cache[oldest]; . cache_entry.update(new_file_and_idx, pos, self.time_stamp); . . Some((cache_entry.file.clone(), cache_entry.line_number, pos - cache_entry.line.start)) . } . 52,752 ( 0.02%) pub fn span_data_to_lines_and_cols( . &mut self, . span_data: &SpanData, . ) -> Option<(Lrc, usize, BytePos, usize, BytePos)> { 30,144 ( 0.01%) self.time_stamp += 1; . . // Check if lo and hi are in the cached lines. 7,536 ( 0.00%) let lo_cache_idx = self.cache_entry_index(span_data.lo); 7,536 ( 0.00%) let hi_cache_idx = self.cache_entry_index(span_data.hi); . 12,154 ( 0.01%) if lo_cache_idx != -1 && hi_cache_idx != -1 { . // Cache hit for span lo and hi. Check if they belong to the same file. . let result = { 12,152 ( 0.01%) let lo = &self.line_cache[lo_cache_idx as usize]; . let hi = &self.line_cache[hi_cache_idx as usize]; . 36,456 ( 0.02%) if lo.file_index != hi.file_index { . return None; . } . . ( 12,152 ( 0.01%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 6,076 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . ) . }; . 6,076 ( 0.00%) self.line_cache[lo_cache_idx as usize].touch(self.time_stamp); 6,076 ( 0.00%) self.line_cache[hi_cache_idx as usize].touch(self.time_stamp); . 24,304 ( 0.01%) return Some(result); . } . . // No cache hit or cache hit for only one of span lo and hi. 2,918 ( 0.00%) let oldest = if lo_cache_idx != -1 || hi_cache_idx != -1 { . let avoid_idx = if lo_cache_idx != -1 { lo_cache_idx } else { hi_cache_idx }; . self.oldest_cache_entry_index_avoid(avoid_idx as usize) . } else { . self.oldest_cache_entry_index() . }; . . // If the entry doesn't point to the correct file, get the new file and index. . // Return early if the file containing beginning of span doesn't contain end of span. 11,680 ( 0.01%) let new_file_and_idx = if !file_contains(&self.line_cache[oldest].file, span_data.lo) { 145 ( 0.00%) let new_file_and_idx = self.file_for_position(span_data.lo)?; 174 ( 0.00%) if !file_contains(&new_file_and_idx.0, span_data.hi) { . return None; . } . 116 ( 0.00%) Some(new_file_and_idx) . } else { . let file = &self.line_cache[oldest].file; 4,293 ( 0.00%) if !file_contains(&file, span_data.hi) { . return None; . } . 4,293 ( 0.00%) None . }; . . // Update the cache entries. 7,300 ( 0.00%) let (lo_idx, hi_idx) = match (lo_cache_idx, hi_cache_idx) { . // Oldest cache entry is for span_data.lo line. . (-1, -1) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); . 4,299 ( 0.00%) if !lo.line.contains(&span_data.hi) { . let new_file_and_idx = Some((lo.file.clone(), lo.file_index)); . let next_oldest = self.oldest_cache_entry_index_avoid(oldest); . let hi = &mut self.line_cache[next_oldest]; . hi.update(new_file_and_idx, span_data.hi, self.time_stamp); . (oldest, next_oldest) . } else { . (oldest, oldest) . } . } . // Oldest cache entry is for span_data.lo line. . (-1, _) => { . let lo = &mut self.line_cache[oldest]; . lo.update(new_file_and_idx, span_data.lo, self.time_stamp); 2 ( 0.00%) let hi = &mut self.line_cache[hi_cache_idx as usize]; 3 ( 0.00%) hi.touch(self.time_stamp); . (oldest, hi_cache_idx as usize) . } . // Oldest cache entry is for span_data.hi line. . (_, -1) => { . let hi = &mut self.line_cache[oldest]; 26 ( 0.00%) hi.update(new_file_and_idx, span_data.hi, self.time_stamp); 52 ( 0.00%) let lo = &mut self.line_cache[lo_cache_idx as usize]; 104 ( 0.00%) lo.touch(self.time_stamp); . (lo_cache_idx as usize, oldest) . } . _ => { . panic!(); . } . }; . . let lo = &self.line_cache[lo_idx]; . let hi = &self.line_cache[hi_idx]; . . // Span lo and hi may equal line end when last line doesn't . // end in newline, hence the inclusive upper bounds below. 2,920 ( 0.00%) assert!(span_data.lo >= lo.line.start); 1,460 ( 0.00%) assert!(span_data.lo <= lo.line.end); 2,920 ( 0.00%) assert!(span_data.hi >= hi.line.start); 1,460 ( 0.00%) assert!(span_data.hi <= hi.line.end); 7,300 ( 0.00%) assert!(lo.file.contains(span_data.lo)); 4,380 ( 0.00%) assert!(lo.file.contains(span_data.hi)); 4,380 ( 0.00%) assert_eq!(lo.file_index, hi.file_index); . 4,380 ( 0.00%) Some(( 1,460 ( 0.00%) lo.file.clone(), . lo.line_number, . span_data.lo - lo.line.start, 1,460 ( 0.00%) hi.line_number, . span_data.hi - hi.line.start, . )) 67,824 ( 0.03%) } . . fn cache_entry_index(&self, pos: BytePos) -> isize { . for (idx, cache_entry) in self.line_cache.iter().enumerate() { 96,651 ( 0.04%) if cache_entry.line.contains(&pos) { . return idx as isize; . } . } . . -1 . } . . fn oldest_cache_entry_index(&self) -> usize { . let mut oldest = 0; . . for idx in 1..self.line_cache.len() { 8,598 ( 0.00%) if self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp { . oldest = idx; . } . } . . oldest . } . . fn oldest_cache_entry_index_avoid(&self, avoid_idx: usize) -> usize { . let mut oldest = if avoid_idx != 0 { 0 } else { 1 }; . . for idx in 0..self.line_cache.len() { 232 ( 0.00%) if idx != avoid_idx 118 ( 0.00%) && self.line_cache[idx].time_stamp < self.line_cache[oldest].time_stamp . { . oldest = idx; . } . } . . oldest . } . 145 ( 0.00%) fn file_for_position(&self, pos: BytePos) -> Option<(Lrc, usize)> { 29 ( 0.00%) if !self.source_map.files().is_empty() { 58 ( 0.00%) let file_idx = self.source_map.lookup_source_file_idx(pos); . let file = &self.source_map.files()[file_idx]; . 232 ( 0.00%) if file_contains(file, pos) { . return Some((file.clone(), file_idx)); . } . } . . None 145 ( 0.00%) } . } . . #[inline] . fn file_contains(file: &SourceFile, pos: BytePos) -> bool { . // `SourceMap::lookup_source_file_idx` and `SourceFile::contains` both consider the position . // one past the end of a file to belong to it. Normally, that's what we want. But for the . // purposes of converting a byte position to a line and column number, we can't come up with a . // line and column number if the file is empty, because an empty file doesn't contain any -- line 290 ---------------------------------------- 104,283 ( 0.05%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/ty/layout.rs -------------------------------------------------------------------------------- Ir -- line 23 ---------------------------------------- . use std::iter; . use std::num::NonZeroUsize; . use std::ops::Bound; . . use rand::{seq::SliceRandom, SeedableRng}; . use rand_xoshiro::Xoshiro128StarStar; . . pub fn provide(providers: &mut ty::query::Providers) { 6 ( 0.00%) *providers = . ty::query::Providers { layout_of, fn_abi_of_fn_ptr, fn_abi_of_instance, ..*providers }; . } . . pub trait IntegerExt { . fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx>; . fn from_attr(cx: &C, ity: attr::IntType) -> Integer; . fn from_int_ty(cx: &C, ity: ty::IntTy) -> Integer; . fn from_uint_ty(cx: &C, uty: ty::UintTy) -> Integer; -- line 39 ---------------------------------------- -- line 91 ---------------------------------------- . } . fn from_uint_ty(cx: &C, ity: ty::UintTy) -> Integer { . match ity { . ty::UintTy::U8 => I8, . ty::UintTy::U16 => I16, . ty::UintTy::U32 => I32, . ty::UintTy::U64 => I64, . ty::UintTy::U128 => I128, 1 ( 0.00%) ty::UintTy::Usize => cx.data_layout().ptr_sized_integer(), . } . } . . /// Finds the appropriate Integer type and signedness for the given . /// signed discriminant range and `#[repr]` attribute. . /// N.B.: `u128` values above `i128::MAX` will be treated as signed, but . /// that shouldn't affect anything, other than maybe debuginfo. . fn repr_discr<'tcx>( -- line 107 ---------------------------------------- -- line 215 ---------------------------------------- . "unable to determine layout for `{}` because `{}` cannot be normalized", . t, . e.get_type_for_failure() . ), . } . } . } . 240 ( 0.00%) #[instrument(skip(tcx, query), level = "debug")] . fn layout_of<'tcx>( . tcx: TyCtxt<'tcx>, . query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>, . ) -> Result, LayoutError<'tcx>> { . ty::tls::with_related_context(tcx, move |icx| { 15 ( 0.00%) let (param_env, ty) = query.into_parts(); . debug!(?ty); . 60 ( 0.00%) if !tcx.recursion_limit().value_within_limit(icx.layout_depth) { . tcx.sess.fatal(&format!("overflow representing the type `{}`", ty)); . } . . // Update the ImplicitCtxt to increase the layout_depth 135 ( 0.00%) let icx = ty::tls::ImplicitCtxt { layout_depth: icx.layout_depth + 1, ..icx.clone() }; . . ty::tls::enter_context(&icx, |_| { 45 ( 0.00%) let param_env = param_env.with_reveal_all_normalized(tcx); . let unnormalized_ty = ty; . . // FIXME: We might want to have two different versions of `layout_of`: . // One that can be called after typecheck has completed and can use . // `normalize_erasing_regions` here and another one that can be called . // before typecheck has completed and uses `try_normalize_erasing_regions`. . let ty = match tcx.try_normalize_erasing_regions(param_env, ty) { . Ok(t) => t, . Err(normalization_error) => { . return Err(LayoutError::NormalizationFailure(ty, normalization_error)); . } . }; . 15 ( 0.00%) if ty != unnormalized_ty { . // Ensure this layout is also cached for the normalized type. . return tcx.layout_of(param_env.and(ty)); . } . 60 ( 0.00%) let cx = LayoutCx { tcx, param_env }; . 45 ( 0.00%) let layout = cx.layout_of_uncached(ty)?; . let layout = TyAndLayout { ty, layout }; . . cx.record_layout_for_printing(layout); . . // Type-level uninhabitedness should always imply ABI uninhabitedness. 15 ( 0.00%) if tcx.conservative_is_privately_uninhabited(param_env.and(ty)) { . assert!(layout.abi.is_uninhabited()); . } . 45 ( 0.00%) Ok(layout) . }) . }) . } . . pub struct LayoutCx<'tcx, C> { . pub tcx: C, . pub param_env: ty::ParamEnv<'tcx>, . } -- line 279 ---------------------------------------- -- line 291 ---------------------------------------- . // Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`. . // This is used to go between `memory_index` (source field order to memory order) . // and `inverse_memory_index` (memory order to source field order). . // See also `FieldsShape::Arbitrary::memory_index` for more details. . // FIXME(eddyb) build a better abstraction for permutations, if possible. . fn invert_mapping(map: &[u32]) -> Vec { . let mut inverse = vec![0; map.len()]; . for i in 0..map.len() { 28 ( 0.00%) inverse[map[i] as usize] = i as u32; . } . inverse . } . . impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> { 27 ( 0.00%) fn scalar_pair(&self, a: Scalar, b: Scalar) -> Layout { . let dl = self.data_layout(); 21 ( 0.00%) let b_align = b.value.align(dl); 24 ( 0.00%) let align = a.value.align(dl).max(b_align).max(dl.aggregate_align); . let b_offset = a.value.size(dl).align_to(b_align.abi); . let size = (b_offset + b.value.size(dl)).align_to(align.abi); . . // HACK(nox): We iter on `b` and then `a` because `max_by_key` . // returns the last maximum. 36 ( 0.00%) let largest_niche = Niche::from_scalar(dl, b_offset, b) . .into_iter() 30 ( 0.00%) .chain(Niche::from_scalar(dl, Size::ZERO, a)) . .max_by_key(|niche| niche.available(dl)); . 57 ( 0.00%) Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Arbitrary { 6 ( 0.00%) offsets: vec![Size::ZERO, b_offset], 6 ( 0.00%) memory_index: vec![0, 1], . }, 39 ( 0.00%) abi: Abi::ScalarPair(a, b), 30 ( 0.00%) largest_niche, . align, . size, . } 24 ( 0.00%) } . 36 ( 0.00%) fn univariant_uninterned( . &self, . ty: Ty<'tcx>, . fields: &[TyAndLayout<'_>], . repr: &ReprOptions, . kind: StructKind, . ) -> Result> { . let dl = self.data_layout(); 12 ( 0.00%) let pack = repr.pack; 3 ( 0.00%) if pack.is_some() && repr.align.is_some() { . self.tcx.sess.delay_span_bug(DUMMY_SP, "struct cannot be packed and aligned"); . return Err(LayoutError::Unknown(ty)); . } . 12 ( 0.00%) let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align }; . . let mut inverse_memory_index: Vec = (0..fields.len() as u32).collect(); . . let optimize = !repr.inhibit_struct_field_reordering_opt(); 3 ( 0.00%) if optimize { . let end = 21 ( 0.00%) if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() }; . let optimizing = &mut inverse_memory_index[..end]; 3 ( 0.00%) let field_align = |f: &TyAndLayout<'_>| { 8 ( 0.00%) if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi } . }; . . // If `-Z randomize-layout` was enabled for the type definition we can shuffle . // the field ordering to try and catch some code making assumptions about layouts . // we don't guarantee . if repr.can_randomize_type_layout() { . // `ReprOptions.layout_seed` is a deterministic seed that we can use to . // randomize field ordering with . let mut rng = Xoshiro128StarStar::seed_from_u64(repr.field_shuffle_seed); . . // Shuffle the ordering of the fields . optimizing.shuffle(&mut rng); . . // Otherwise we just leave things alone and actually optimize the type's fields . } else { 6 ( 0.00%) match kind { . StructKind::AlwaysSized | StructKind::MaybeUnsized => { 15 ( 0.00%) optimizing.sort_by_key(|&x| { . // Place ZSTs first to avoid "interesting offsets", . // especially with only one or two non-ZST fields. 7 ( 0.00%) let f = &fields[x as usize]; 1 ( 0.00%) (!f.is_zst(), cmp::Reverse(field_align(f))) . }); . } . . StructKind::Prefixed(..) => { . // Sort in ascending alignment so that the layout stays optimal . // regardless of the prefix . optimizing.sort_by_key(|&x| field_align(&fields[x as usize])); . } -- line 386 ---------------------------------------- -- line 399 ---------------------------------------- . // produce `memory_index` (see `invert_mapping`). . . let mut sized = true; . let mut offsets = vec![Size::ZERO; fields.len()]; . let mut offset = Size::ZERO; . let mut largest_niche = None; . let mut largest_niche_available = 0; . 9 ( 0.00%) if let StructKind::Prefixed(prefix_size, prefix_align) = kind { . let prefix_align = . if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align }; . align = align.max(AbiAndPrefAlign::new(prefix_align)); . offset = prefix_size.align_to(prefix_align); . } . 7 ( 0.00%) for &i in &inverse_memory_index { 75 ( 0.00%) let field = fields[i as usize]; 14 ( 0.00%) if !sized { . self.tcx.sess.delay_span_bug( . DUMMY_SP, . &format!( . "univariant: field #{} of `{}` comes after unsized field", . offsets.len(), . ty . ), . ); . } . 35 ( 0.00%) if field.is_unsized() { . sized = false; . } . . // Invariant: offset < dl.obj_size_bound() <= 1<<61 14 ( 0.00%) let field_align = if let Some(pack) = pack { . field.align.min(AbiAndPrefAlign::new(pack)) . } else { 21 ( 0.00%) field.align . }; . offset = offset.align_to(field_align.abi); . align = align.max(field_align); . . debug!("univariant offset: {:?} field: {:#?}", offset, field); 14 ( 0.00%) offsets[i as usize] = offset; . 7 ( 0.00%) if !repr.hide_niche() { 73 ( 0.00%) if let Some(mut niche) = field.largest_niche { . let available = niche.available(dl); 16 ( 0.00%) if available > largest_niche_available { . largest_niche_available = available; . niche.offset += offset; 32 ( 0.00%) largest_niche = Some(niche); . } . } . } . 7 ( 0.00%) offset = offset.checked_add(field.size, dl).ok_or(LayoutError::SizeOverflow(ty))?; . } . 9 ( 0.00%) if let Some(repr_align) = repr.align { . align = align.max(AbiAndPrefAlign::new(repr_align)); . } . . debug!("univariant min_size: {:?}", offset); . let min_size = offset; . . // As stated above, inverse_memory_index holds field indices by increasing offset. . // This makes it an already-sorted view of the offsets vec. . // To invert it, consider: . // If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0. . // Field 5 would be the first element, so memory_index is i: . // Note: if we didn't optimize, it's already right. . . let memory_index = 6 ( 0.00%) if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index }; . . let size = min_size.align_to(align.abi); 6 ( 0.00%) let mut abi = Abi::Aggregate { sized }; . . // Unpack newtype ABIs and find scalar pairs. 12 ( 0.00%) if sized && size.bytes() > 0 { . // All other fields must be ZSTs. . let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst()); . 12 ( 0.00%) match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) { . // We have exactly one non-ZST field. . (Some((i, field)), None, None) => { . // Field fills the struct and it has a scalar or scalar pair ABI. . if offsets[i].bytes() == 0 && align.abi == field.align.abi && size == field.size . { . match field.abi { . // For plain scalars, or vectors of them, we can't unpack . // newtypes for `#[repr(C)]`, as that affects C ABIs. -- line 490 ---------------------------------------- -- line 503 ---------------------------------------- . . // Two non-ZST fields, and they're both scalars. . ( . Some((i, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(a), .. }, .. })), . Some((j, &TyAndLayout { layout: &Layout { abi: Abi::Scalar(b), .. }, .. })), . None, . ) => { . // Order by the memory placement, not source order. 18 ( 0.00%) let ((i, a), (j, b)) = 14 ( 0.00%) if offsets[i] < offsets[j] { ((i, a), (j, b)) } else { ((j, b), (i, a)) }; 22 ( 0.00%) let pair = self.scalar_pair(a, b); 4 ( 0.00%) let pair_offsets = match pair.fields { 4 ( 0.00%) FieldsShape::Arbitrary { ref offsets, ref memory_index } => { 2 ( 0.00%) assert_eq!(memory_index, &[0, 1]); . offsets . } . _ => bug!(), . }; 18 ( 0.00%) if offsets[i] == pair_offsets[0] 2 ( 0.00%) && offsets[j] == pair_offsets[1] . && align == pair.align . && size == pair.size . { . // We can use `ScalarPair` only when it matches our . // already computed layout (including `#[repr(C)]`). 28 ( 0.00%) abi = pair.abi; . } 2 ( 0.00%) } . . _ => {} . } . } . 7 ( 0.00%) if fields.iter().any(|f| f.abi.is_uninhabited()) { . abi = Abi::Uninhabited; . } . 96 ( 0.00%) Ok(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 30 ( 0.00%) fields: FieldsShape::Arbitrary { offsets, memory_index }, . abi, 33 ( 0.00%) largest_niche, . align, . size, . }) 30 ( 0.00%) } . 150 ( 0.00%) fn layout_of_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx Layout, LayoutError<'tcx>> { 30 ( 0.00%) let tcx = self.tcx; 15 ( 0.00%) let param_env = self.param_env; . let dl = self.data_layout(); 17 ( 0.00%) let scalar_unit = |value: Primitive| { . let size = value.size(dl); 14 ( 0.00%) assert!(size.bits() <= 128); 12 ( 0.00%) Scalar { value, valid_range: WrappingRange { start: 0, end: size.unsigned_int_max() } } 2 ( 0.00%) }; . let scalar = |value: Primitive| tcx.intern_layout(Layout::scalar(self, scalar_unit(value))); . . let univariant = |fields: &[TyAndLayout<'_>], repr: &ReprOptions, kind| { 63 ( 0.00%) Ok(tcx.intern_layout(self.univariant_uninterned(ty, fields, repr, kind)?)) . }; . debug_assert!(!ty.has_infer_types_or_consts()); . 116 ( 0.00%) Ok(match *ty.kind() { . // Basic scalars. . ty::Bool => tcx.intern_layout(Layout::scalar( . self, 6 ( 0.00%) Scalar { value: Int(I8, false), valid_range: WrappingRange { start: 0, end: 1 } }, . )), . ty::Char => tcx.intern_layout(Layout::scalar( . self, 12 ( 0.00%) Scalar { . value: Int(I32, false), . valid_range: WrappingRange { start: 0, end: 0x10FFFF }, . }, . )), . ty::Int(ity) => scalar(Int(Integer::from_int_ty(dl, ity), true)), 15 ( 0.00%) ty::Uint(ity) => scalar(Int(Integer::from_uint_ty(dl, ity), false)), . ty::Float(fty) => scalar(match fty { . ty::FloatTy::F32 => F32, . ty::FloatTy::F64 => F64, . }), . ty::FnPtr(_) => { . let mut ptr = scalar_unit(Pointer); . ptr.valid_range = ptr.valid_range.with_start(1); . tcx.intern_layout(Layout::scalar(self, ptr)) -- line 588 ---------------------------------------- -- line 594 ---------------------------------------- . fields: FieldsShape::Primitive, . abi: Abi::Uninhabited, . largest_niche: None, . align: dl.i8_align, . size: Size::ZERO, . }), . . // Potentially-wide pointers. 3 ( 0.00%) ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => { . let mut data_ptr = scalar_unit(Pointer); . if !ty.is_unsafe_ptr() { . data_ptr.valid_range = data_ptr.valid_range.with_start(1); . } . . let pointee = tcx.normalize_erasing_regions(param_env, pointee); 24 ( 0.00%) if pointee.is_sized(tcx.at(DUMMY_SP), param_env) { 34 ( 0.00%) return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr))); . } . 4 ( 0.00%) let unsized_part = tcx.struct_tail_erasing_lifetimes(pointee, param_env); 4 ( 0.00%) let metadata = match unsized_part.kind() { . ty::Foreign(..) => { . return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr))); . } 8 ( 0.00%) ty::Slice(_) | ty::Str => scalar_unit(Int(dl.ptr_sized_integer(), false)), . ty::Dynamic(..) => { . let mut vtable = scalar_unit(Pointer); . vtable.valid_range = vtable.valid_range.with_start(1); . vtable . } . _ => return Err(LayoutError::Unknown(unsized_part)), . }; . . // Effectively a (ptr, meta) tuple. 21 ( 0.00%) tcx.intern_layout(self.scalar_pair(data_ptr, metadata)) . } . . // Arrays and slices. 6 ( 0.00%) ty::Array(element, mut count) => { 2 ( 0.00%) if count.has_projections() { . count = tcx.normalize_erasing_regions(param_env, count); . if count.has_projections() { . return Err(LayoutError::Unknown(ty)); . } . } . 8 ( 0.00%) let count = count.try_eval_usize(tcx, param_env).ok_or(LayoutError::Unknown(ty))?; 10 ( 0.00%) let element = self.layout_of(element)?; . let size = 10 ( 0.00%) element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow(ty))?; . . let abi = 16 ( 0.00%) if count != 0 && tcx.conservative_is_privately_uninhabited(param_env.and(ty)) { . Abi::Uninhabited . } else { . Abi::Aggregate { sized: true } . }; . 22 ( 0.00%) let largest_niche = if count != 0 { element.largest_niche } else { None }; . 44 ( 0.00%) tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 2 ( 0.00%) fields: FieldsShape::Array { stride: element.size, count }, . abi, . largest_niche, 2 ( 0.00%) align: element.align, . size, . }) . } 2 ( 0.00%) ty::Slice(element) => { 3 ( 0.00%) let element = self.layout_of(element)?; 11 ( 0.00%) tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, 1 ( 0.00%) fields: FieldsShape::Array { stride: element.size, count: 0 }, . abi: Abi::Aggregate { sized: false }, . largest_niche: None, 1 ( 0.00%) align: element.align, . size: Size::ZERO, . }) . } . ty::Str => tcx.intern_layout(Layout { . variants: Variants::Single { index: VariantIdx::new(0) }, . fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 }, . abi: Abi::Aggregate { sized: false }, . largest_niche: None, -- line 678 ---------------------------------------- -- line 702 ---------------------------------------- . let tys = substs.as_closure().upvar_tys(); . univariant( . &tys.map(|ty| self.layout_of(ty)).collect::, _>>()?, . &ReprOptions::default(), . StructKind::AlwaysSized, . )? . } . 3 ( 0.00%) ty::Tuple(tys) => { . let kind = 9 ( 0.00%) if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized }; . 3 ( 0.00%) univariant( 6 ( 0.00%) &tys.iter() . .map(|k| self.layout_of(k.expect_ty())) . .collect::, _>>()?, 6 ( 0.00%) &ReprOptions::default(), . kind, . )? . } . . // SIMD vector types. . ty::Adt(def, substs) if def.repr.simd() => { . if !def.is_struct() { . // Should have yielded E0517 by now. -- line 726 ---------------------------------------- -- line 1420 ---------------------------------------- . ty::Placeholder(..) | ty::GeneratorWitness(..) | ty::Infer(_) => { . bug!("Layout::compute: unexpected type `{}`", ty) . } . . ty::Bound(..) | ty::Param(_) | ty::Error(_) => { . return Err(LayoutError::Unknown(ty)); . } . }) 120 ( 0.00%) } . } . . /// Overlap eligibility and variant assignment for each GeneratorSavedLocal. . #[derive(Clone, Debug, PartialEq)] . enum SavedLocalEligibility { . Unassigned, . Assigned(VariantIdx), . // FIXME: Use newtype_index so we aren't wasting bytes -- line 1436 ---------------------------------------- -- line 1755 ---------------------------------------- . } . . /// This is invoked by the `layout_of` query to record the final . /// layout of each type. . #[inline(always)] . fn record_layout_for_printing(&self, layout: TyAndLayout<'tcx>) { . // If we are running with `-Zprint-type-sizes`, maybe record layouts . // for dumping later. 75 ( 0.00%) if self.tcx.sess.opts.debugging_opts.print_type_sizes { . self.record_layout_for_printing_outlined(layout) . } . } . . fn record_layout_for_printing_outlined(&self, layout: TyAndLayout<'tcx>) { . // Ignore layouts that are done with non-empty environments or . // non-monomorphic layouts, as the user only wants to see the stuff . // resulting from the final codegen session. -- line 1771 ---------------------------------------- -- line 2041 ---------------------------------------- . . pub trait HasParamEnv<'tcx> { . fn param_env(&self) -> ty::ParamEnv<'tcx>; . } . . impl<'tcx> HasDataLayout for TyCtxt<'tcx> { . #[inline] . fn data_layout(&self) -> &TargetDataLayout { 54,773 ( 0.03%) &self.data_layout . } . } . . impl<'tcx> HasTargetSpec for TyCtxt<'tcx> { . fn target_spec(&self) -> &Target { . &self.sess.target . } . } -- line 2057 ---------------------------------------- -- line 2080 ---------------------------------------- . #[inline] . fn tcx(&self) -> TyCtxt<'tcx> { . **self . } . } . . impl<'tcx, C> HasParamEnv<'tcx> for LayoutCx<'tcx, C> { . fn param_env(&self) -> ty::ParamEnv<'tcx> { 7 ( 0.00%) self.param_env . } . } . . impl<'tcx, T: HasDataLayout> HasDataLayout for LayoutCx<'tcx, T> { . fn data_layout(&self) -> &TargetDataLayout { . self.tcx.data_layout() . } . } -- line 2096 ---------------------------------------- -- line 2098 ---------------------------------------- . impl<'tcx, T: HasTargetSpec> HasTargetSpec for LayoutCx<'tcx, T> { . fn target_spec(&self) -> &Target { . self.tcx.target_spec() . } . } . . impl<'tcx, T: HasTyCtxt<'tcx>> HasTyCtxt<'tcx> for LayoutCx<'tcx, T> { . fn tcx(&self) -> TyCtxt<'tcx> { 7 ( 0.00%) self.tcx.tcx() . } . } . . pub trait MaybeResult { . type Error; . . fn from(x: Result) -> Self; . fn to_result(self) -> Result; -- line 2114 ---------------------------------------- -- line 2124 ---------------------------------------- . Ok(self) . } . } . . impl MaybeResult for Result { . type Error = E; . . fn from(x: Result) -> Self { 4,295 ( 0.00%) x . } . fn to_result(self) -> Result { . self . } . } . . pub type TyAndLayout<'tcx> = rustc_target::abi::TyAndLayout<'tcx, Ty<'tcx>>; . -- line 2140 ---------------------------------------- -- line 2167 ---------------------------------------- . ) -> >>::Error; . } . . /// Blanket extension trait for contexts that can compute layouts of types. . pub trait LayoutOf<'tcx>: LayoutOfHelpers<'tcx> { . /// Computes the layout of a type. Note that this implicitly . /// executes in "reveal all" mode, and will normalize the input type. . #[inline] 11,243 ( 0.01%) fn layout_of(&self, ty: Ty<'tcx>) -> Self::LayoutOfResult { . self.spanned_layout_of(ty, DUMMY_SP) 11,240 ( 0.01%) } . . /// Computes the layout of a type, at `span`. Note that this implicitly . /// executes in "reveal all" mode, and will normalize the input type. . // FIXME(eddyb) avoid passing information like this, and instead add more . // `TyCtxt::at`-like APIs to be able to do e.g. `cx.at(span).layout_of(ty)`. . #[inline] . fn spanned_layout_of(&self, ty: Ty<'tcx>, span: Span) -> Self::LayoutOfResult { . let span = if !span.is_dummy() { span } else { self.layout_tcx_at_span() }; -- line 2185 ---------------------------------------- -- line 2268 ---------------------------------------- . Variants::Multiple { ref variants, .. } => &variants[variant_index], . }; . . assert_eq!(layout.variants, Variants::Single { index: variant_index }); . . TyAndLayout { ty: this.ty, layout } . } . 184,665 ( 0.08%) fn ty_and_layout_field(this: TyAndLayout<'tcx>, cx: &C, i: usize) -> TyAndLayout<'tcx> { . enum TyMaybeWithLayout<'tcx> { . Ty(Ty<'tcx>), . TyAndLayout(TyAndLayout<'tcx>), . } . 147,732 ( 0.07%) fn field_ty_or_layout<'tcx>( . this: TyAndLayout<'tcx>, . cx: &(impl HasTyCtxt<'tcx> + HasParamEnv<'tcx>), . i: usize, . ) -> TyMaybeWithLayout<'tcx> { . let tcx = cx.tcx(); . let tag_layout = |tag: Scalar| -> TyAndLayout<'tcx> { . let layout = Layout::scalar(cx, tag); . TyAndLayout { layout: tcx.intern_layout(layout), ty: tag.value.to_ty(tcx) } . }; . 36,933 ( 0.02%) match *this.ty.kind() { . ty::Bool . | ty::Char . | ty::Int(_) . | ty::Uint(_) . | ty::Float(_) . | ty::FnPtr(_) . | ty::Never . | ty::FnDef(..) -- line 2301 ---------------------------------------- -- line 2377 ---------------------------------------- . Variants::Multiple { tag, tag_field, .. } => { . if i == tag_field { . return TyMaybeWithLayout::TyAndLayout(tag_layout(tag)); . } . TyMaybeWithLayout::Ty(substs.as_generator().prefix_tys().nth(i).unwrap()) . } . }, . 43,748 ( 0.02%) ty::Tuple(tys) => TyMaybeWithLayout::Ty(tys[i].expect_ty()), . . // ADTs. . ty::Adt(def, substs) => { . match this.variants { . Variants::Single { index } => { . TyMaybeWithLayout::Ty(def.variants[index].fields[i].ty(tcx, substs)) . } . -- line 2393 ---------------------------------------- -- line 2402 ---------------------------------------- . ty::Projection(_) . | ty::Bound(..) . | ty::Placeholder(..) . | ty::Opaque(..) . | ty::Param(_) . | ty::Infer(_) . | ty::Error(_) => bug!("TyAndLayout::field: unexpected type `{}`", this.ty), . } 110,799 ( 0.05%) } . 49,244 ( 0.02%) match field_ty_or_layout(this, cx, i) { 24,622 ( 0.01%) TyMaybeWithLayout::Ty(field_ty) => { . cx.tcx().layout_of(cx.param_env().and(field_ty)).unwrap_or_else(|e| { . bug!( . "failed to get layout for `{}`: {},\n\ . despite it being a field (#{}) of an existing layout: {:#?}", . field_ty, . e, . i, . this . ) . }) . } . TyMaybeWithLayout::TyAndLayout(field_layout) => field_layout, . } 98,488 ( 0.05%) } . . fn ty_and_layout_pointee_info_at( . this: TyAndLayout<'tcx>, . cx: &C, . offset: Size, . ) -> Option { . let tcx = cx.tcx(); . let param_env = cx.param_env(); -- line 2435 ---------------------------------------- 22,190 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_middle/src/mir/interpret/allocation.rs -------------------------------------------------------------------------------- Ir -- line 18 ---------------------------------------- . }; . use crate::ty; . . /// This type represents an Allocation in the Miri/CTFE core engine. . /// . /// Its public API is rather low-level, working directly with allocation offsets and a custom error . /// type to account for the lack of an AllocId on this level. The Miri/CTFE core engine `memory` . /// module provides higher-level access. 100 ( 0.00%) #[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, TyEncodable, TyDecodable)] . #[derive(HashStable)] . pub struct Allocation { . /// The actual bytes of the allocation. . /// Note that the bytes of a pointer represent the offset of the pointer. 10 ( 0.00%) bytes: Box<[u8]>, . /// Maps from byte addresses to extra data for each pointer. . /// Only the first byte of a pointer is inserted into the map; i.e., . /// every entry in this map applies to `pointer_size` consecutive bytes starting . /// at the given offset. . relocations: Relocations, . /// Denotes which part of this allocation is initialized. . init_mask: InitMask, . /// The alignment of the allocation to detect unaligned reads. -- line 39 ---------------------------------------- -- line 91 ---------------------------------------- . impl AllocRange { . #[inline(always)] . pub fn end(self) -> Size { . self.start + self.size // This does overflow checking. . } . . /// Returns the `subrange` within this range; panics if it is not a subrange. . #[inline] 13,686 ( 0.01%) pub fn subrange(self, subrange: AllocRange) -> AllocRange { . let sub_start = self.start + subrange.start; . let range = alloc_range(sub_start, subrange.size); 13,686 ( 0.01%) assert!(range.end() <= self.end(), "access outside the bounds for given AllocRange"); . range 41,058 ( 0.02%) } . } . . // The constructors are all without extra; the extra gets added by a machine hook later. . impl Allocation { . /// Creates an allocation initialized by the given bytes . pub fn from_bytes<'a>( . slice: impl Into>, . align: Align, -- line 112 ---------------------------------------- -- line 125 ---------------------------------------- . } . . pub fn from_bytes_byte_aligned_immutable<'a>(slice: impl Into>) -> Self { . Allocation::from_bytes(slice, Align::ONE, Mutability::Not) . } . . /// Try to create an Allocation of `size` bytes, failing if there is not enough memory . /// available to the compiler to do so. 15,114 ( 0.01%) pub fn uninit(size: Size, align: Align, panic_on_fail: bool) -> InterpResult<'static, Self> { . let bytes = Box::<[u8]>::try_new_zeroed_slice(size.bytes_usize()).map_err(|_| { . // This results in an error that can happen non-deterministically, since the memory . // available to the compiler can change between runs. Normally queries are always . // deterministic. However, we can be non-determinstic here because all uses of const . // evaluation (including ConstProp!) will make compilation fail (via hard error . // or ICE) upon encountering a `MemoryExhausted` error. . if panic_on_fail { . panic!("Allocation::uninit called with panic_on_fail had allocation failure") -- line 141 ---------------------------------------- -- line 142 ---------------------------------------- . } . ty::tls::with(|tcx| { . tcx.sess.delay_span_bug(DUMMY_SP, "exhausted memory during interpreation") . }); . InterpError::ResourceExhaustion(ResourceExhaustionInfo::MemoryExhausted) . })?; . // SAFETY: the box was zero-allocated, which is a valid initial value for Box<[u8]> . let bytes = unsafe { bytes.assume_init() }; 19,236 ( 0.01%) Ok(Allocation { . bytes, . relocations: Relocations::new(), 4,122 ( 0.00%) init_mask: InitMask::new(size, false), . align, . mutability: Mutability::Mut, . extra: (), . }) 12,366 ( 0.01%) } . } . . impl Allocation { . /// Convert Tag and add Extra fields . pub fn convert_tag_add_extra( . self, . cx: &impl HasDataLayout, . extra: Extra, -- line 166 ---------------------------------------- -- line 194 ---------------------------------------- . . /// Raw accessors. Provide access to otherwise private bytes. . impl Allocation { . pub fn len(&self) -> usize { . self.bytes.len() . } . . pub fn size(&self) -> Size { 10,959 ( 0.01%) Size::from_bytes(self.len()) . } . . /// Looks at a slice which may describe uninitialized bytes or describe a relocation. This differs . /// from `get_bytes_with_uninit_and_ptr` in that it does no relocation checks (even on the . /// edges) at all. . /// This must not be used for reads affecting the interpreter execution. . pub fn inspect_with_uninit_and_ptr_outside_interpreter(&self, range: Range) -> &[u8] { . &self.bytes[range] -- line 210 ---------------------------------------- -- line 227 ---------------------------------------- . /// or pointer bytes. You should never call this, call `get_bytes` or . /// `get_bytes_with_uninit_and_ptr` instead, . /// . /// This function also guarantees that the resulting pointer will remain stable . /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies . /// on that. . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. 98,568 ( 0.05%) fn get_bytes_internal( . &self, . cx: &impl HasDataLayout, . range: AllocRange, . check_init_and_ptr: bool, . ) -> AllocResult<&[u8]> { 16,428 ( 0.01%) if check_init_and_ptr { . self.check_init(range)?; . self.check_relocations(cx, range)?; . } else { . // We still don't want relocations on the *edges*. . self.check_relocation_edges(cx, range)?; . } . 32,856 ( 0.02%) Ok(&self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]) 73,926 ( 0.03%) } . . /// Checks that these bytes are initialized and not pointer bytes, and then return them . /// as a slice. . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. . /// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods . /// on `InterpCx` instead. . #[inline] -- line 258 ---------------------------------------- -- line 265 ---------------------------------------- . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. . #[inline] . pub fn get_bytes_with_uninit_and_ptr( . &self, . cx: &impl HasDataLayout, . range: AllocRange, . ) -> AllocResult<&[u8]> { 16,433 ( 0.01%) self.get_bytes_internal(cx, range, false) . } . . /// Just calling this already marks everything as defined and removes relocations, . /// so be sure to actually put data there! . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. . /// Most likely, you want to use the `PlaceTy` and `OperandTy`-based methods . /// on `InterpCx` instead. 60,203 ( 0.03%) pub fn get_bytes_mut( . &mut self, . cx: &impl HasDataLayout, . range: AllocRange, . ) -> AllocResult<&mut [u8]> { . self.mark_init(range, true); 27,365 ( 0.01%) self.clear_relocations(cx, range)?; . 21,892 ( 0.01%) Ok(&mut self.bytes[range.start.bytes_usize()..range.end().bytes_usize()]) 43,784 ( 0.02%) } . . /// A raw pointer variant of `get_bytes_mut` that avoids invalidating existing aliases into this memory. 11 ( 0.00%) pub fn get_bytes_mut_ptr( . &mut self, . cx: &impl HasDataLayout, . range: AllocRange, . ) -> AllocResult<*mut [u8]> { . self.mark_init(range, true); 5 ( 0.00%) self.clear_relocations(cx, range)?; . 2 ( 0.00%) assert!(range.end().bytes_usize() <= self.bytes.len()); // need to do our own bounds-check . let begin_ptr = self.bytes.as_mut_ptr().wrapping_add(range.start.bytes_usize()); 2 ( 0.00%) let len = range.end().bytes_usize() - range.start.bytes_usize(); 4 ( 0.00%) Ok(ptr::slice_from_raw_parts_mut(begin_ptr, len)) 8 ( 0.00%) } . } . . /// Reading and writing. . impl Allocation { . /// Validates that `ptr.offset` and `ptr.offset + size` do not point to the middle of a . /// relocation. If `allow_uninit_and_ptr` is `false`, also enforces that the memory in the . /// given range contains neither relocations nor uninitialized bytes. . pub fn check_bytes( -- line 314 ---------------------------------------- -- line 329 ---------------------------------------- . . /// Reads a *non-ZST* scalar. . /// . /// ZSTs can't be read because in order to obtain a `Pointer`, we need to check . /// for ZSTness anyway due to integer pointers being valid for ZSTs. . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. . /// Most likely, you want to call `InterpCx::read_scalar` instead of this method. 106,769 ( 0.05%) pub fn read_scalar( . &self, . cx: &impl HasDataLayout, . range: AllocRange, . ) -> AllocResult> { . // `get_bytes_with_uninit_and_ptr` tests relocation edges. . // We deliberately error when loading data that partially has provenance, or partially . // initialized data (that's the check below), into a scalar. The LLVM semantics of this are . // unclear so we are conservative. See for -- line 345 ---------------------------------------- -- line 348 ---------------------------------------- . // Uninit check happens *after* we established that the alignment is correct. . // We must not return `Ok()` for unaligned pointers! . if self.is_init(range).is_err() { . // This inflates uninitialized bytes to the entire scalar, even if only a few . // bytes are uninitialized. . return Ok(ScalarMaybeUninit::Uninit); . } . // Now we do the actual reading. 8,213 ( 0.00%) let bits = read_target_uint(cx.data_layout().endian, bytes).unwrap(); . // See if we got a pointer. 8,213 ( 0.00%) if range.size != cx.data_layout().pointer_size { . // Not a pointer. . // *Now*, we better make sure that the inside is free of relocations too. . self.check_relocations(cx, range)?; . } else { . // Maybe a pointer. . if let Some(&prov) = self.relocations.get(&range.start) { . let ptr = Pointer::new(prov, Size::from_bytes(bits)); 50 ( 0.00%) return Ok(ScalarMaybeUninit::from_pointer(ptr, cx)); . } . } . // We don't. Just return the bits. 49,218 ( 0.02%) Ok(ScalarMaybeUninit::Scalar(Scalar::from_uint(bits, range.size))) 73,917 ( 0.03%) } . . /// Writes a *non-ZST* scalar. . /// . /// ZSTs can't be read because in order to obtain a `Pointer`, we need to check . /// for ZSTness anyway due to integer pointers being valid for ZSTs. . /// . /// It is the caller's responsibility to check bounds and alignment beforehand. . /// Most likely, you want to call `InterpCx::write_scalar` instead of this method. 38,311 ( 0.02%) pub fn write_scalar( . &mut self, . cx: &impl HasDataLayout, . range: AllocRange, . val: ScalarMaybeUninit, . ) -> AllocResult { 5,473 ( 0.00%) assert!(self.mutability == Mutability::Mut); . 16,419 ( 0.01%) let val = match val { . ScalarMaybeUninit::Scalar(scalar) => scalar, . ScalarMaybeUninit::Uninit => { . self.mark_init(range, false); . return Ok(()); . } . }; . . // `to_bits_or_ptr_internal` is the right method because we just want to store this data . // as-is into memory. 21,892 ( 0.01%) let (bytes, provenance) = match val.to_bits_or_ptr_internal(range.size) { . Err(val) => { . let (provenance, offset) = val.into_parts(); . (u128::from(offset.bytes()), Some(provenance)) . } . Ok(data) => (data, None), . }; . 16,419 ( 0.01%) let endian = cx.data_layout().endian; 16,419 ( 0.01%) let dst = self.get_bytes_mut(cx, range)?; . write_target_uint(endian, dst, bytes).unwrap(); . . // See if we have to also write a relocation. 16,415 ( 0.01%) if let Some(provenance) = provenance { 16 ( 0.00%) self.relocations.0.insert(range.start, provenance); . } . . Ok(()) 49,257 ( 0.02%) } . } . . /// Relocations. . impl Allocation { . /// Returns all relocations overlapping with the given pointer-offset pair. 90,294 ( 0.04%) pub fn get_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> &[(Size, Tag)] { . // We have to go back `pointer_size - 1` bytes, as that one would still overlap with . // the beginning of this range. 90,297 ( 0.04%) let start = range.start.bytes().saturating_sub(cx.data_layout().pointer_size.bytes() - 1); . self.relocations.range(Size::from_bytes(start)..range.end()) 120,392 ( 0.06%) } . . /// Checks that there are no relocations overlapping with the given range. . #[inline(always)] . fn check_relocations(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult { 147,744 ( 0.07%) if self.get_relocations(cx, range).is_empty() { . Ok(()) . } else { . Err(AllocError::ReadPointerAsBytes) . } . } . . /// Removes all relocations inside the given range. . /// If there are relocations overlapping with the edges, they . /// are removed as well *and* the bytes they cover are marked as . /// uninitialized. This is a somewhat odd "spooky action at a distance", . /// but it allows strictly more code to run than if we would just error . /// immediately in that case. 60,214 ( 0.03%) fn clear_relocations(&mut self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult . where . Tag: Provenance, . { . // Find the start and end of the given range and its outermost relocations. . let (first, last) = { . // Find all relocations overlapping the given range. 27,370 ( 0.01%) let relocations = self.get_relocations(cx, range); 5,474 ( 0.00%) if relocations.is_empty() { . return Ok(()); . } . . ( . relocations.first().unwrap().0, . relocations.last().unwrap().0 + cx.data_layout().pointer_size, . ) . }; -- line 461 ---------------------------------------- -- line 478 ---------------------------------------- . } . self.init_mask.set_range(end, last, false); . } . . // Forget all the relocations. . self.relocations.0.remove_range(first..last); . . Ok(()) 38,318 ( 0.02%) } . . /// Errors if there are relocations overlapping with the edges of the . /// given memory range. . #[inline] . fn check_relocation_edges(&self, cx: &impl HasDataLayout, range: AllocRange) -> AllocResult { . self.check_relocations(cx, alloc_range(range.start, Size::ZERO))?; . self.check_relocations(cx, alloc_range(range.end(), Size::ZERO))?; . Ok(()) -- line 494 ---------------------------------------- -- line 496 ---------------------------------------- . } . . /// "Relocations" stores the provenance information of pointers stored in memory. . #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, TyEncodable, TyDecodable)] . pub struct Relocations(SortedMap); . . impl Relocations { . pub fn new() -> Self { 2,748 ( 0.00%) Relocations(SortedMap::new()) . } . . // The caller must guarantee that the given relocations are already sorted . // by address and contain no duplicates. . pub fn from_presorted(r: Vec<(Size, Tag)>) -> Self { . Relocations(SortedMap::from_presorted_elements(r)) . } . } -- line 512 ---------------------------------------- -- line 520 ---------------------------------------- . } . . /// A partial, owned list of relocations to transfer into another allocation. . pub struct AllocationRelocations { . relative_relocations: Vec<(Size, Tag)>, . } . . impl Allocation { 10 ( 0.00%) pub fn prepare_relocation_copy( . &self, . cx: &impl HasDataLayout, . src: AllocRange, . dest: Size, . count: u64, . ) -> AllocationRelocations { . let relocations = self.get_relocations(cx, src); 1 ( 0.00%) if relocations.is_empty() { 2 ( 0.00%) return AllocationRelocations { relative_relocations: Vec::new() }; . } . . let size = src.size; . let mut new_relocations = Vec::with_capacity(relocations.len() * (count as usize)); . . for i in 0..count { . new_relocations.extend(relocations.iter().map(|&(offset, reloc)| { . // compute offset for current repetition -- line 545 ---------------------------------------- -- line 548 ---------------------------------------- . // shift offsets from source allocation to destination allocation . (offset + dest_offset) - src.start, // `Size` operations . reloc, . ) . })); . } . . AllocationRelocations { relative_relocations: new_relocations } 9 ( 0.00%) } . . /// Applies a relocation copy. . /// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected . /// to be clear of relocations. 7 ( 0.00%) pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations) { 4 ( 0.00%) self.relocations.0.insert_presorted(relocations.relative_relocations); 8 ( 0.00%) } . } . . //////////////////////////////////////////////////////////////////////////////// . // Uninitialized byte tracking . //////////////////////////////////////////////////////////////////////////////// . . type Block = u64; . -- line 571 ---------------------------------------- -- line 582 ---------------------------------------- . pub const BLOCK_SIZE: u64 = 64; . . #[inline] . fn bit_index(bits: Size) -> (usize, usize) { . // BLOCK_SIZE is the number of bits that can fit in a `Block`. . // Each bit in a `Block` represents the initialization state of one byte of an allocation, . // so we use `.bytes()` here. . let bits = bits.bytes(); 43,826 ( 0.02%) let a = bits / InitMask::BLOCK_SIZE; 13,698 ( 0.01%) let b = bits % InitMask::BLOCK_SIZE; . (usize::try_from(a).unwrap(), usize::try_from(b).unwrap()) . } . . #[inline] . fn size_from_bit_index(block: impl TryInto, bit: impl TryInto) -> Size { . let block = block.try_into().ok().unwrap(); . let bit = bit.try_into().ok().unwrap(); 2,774 ( 0.00%) Size::from_bytes(block * InitMask::BLOCK_SIZE + bit) . } . 5,496 ( 0.00%) pub fn new(size: Size, state: bool) -> Self { 2,748 ( 0.00%) let mut m = InitMask { blocks: vec![], len: Size::ZERO }; 1,374 ( 0.00%) m.grow(size, state); . m 6,870 ( 0.00%) } . 43,792 ( 0.02%) pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) { 10,948 ( 0.01%) let len = self.len; 5,474 ( 0.00%) if end > len { . self.grow(end - len, new_state); . } 54,740 ( 0.03%) self.set_range_inbounds(start, end, new_state); . } . 27,396 ( 0.01%) pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) { . let (blocka, bita) = Self::bit_index(start); . let (blockb, bitb) = Self::bit_index(end); 13,698 ( 0.01%) if blocka == blockb { . // First set all bits except the first `bita`, . // then unset the last `64 - bitb` bits. 13,354 ( 0.01%) let range = if bitb == 0 { . u64::MAX << bita . } else { 46,739 ( 0.02%) (u64::MAX << bita) & (u64::MAX >> (64 - bitb)) . }; 13,354 ( 0.01%) if new_state { 5,305 ( 0.00%) self.blocks[blocka] |= range; . } else { 2,744 ( 0.00%) self.blocks[blocka] &= !range; . } . return; . } . // across block boundaries 344 ( 0.00%) if new_state { . // Set `bita..64` to `1`. 510 ( 0.00%) self.blocks[blocka] |= u64::MAX << bita; . // Set `0..bitb` to `1`. 340 ( 0.00%) if bitb != 0 { . self.blocks[blockb] |= u64::MAX >> (64 - bitb); . } . // Fill in all the other blocks (much faster than one bit at a time). . for block in (blocka + 1)..blockb { . self.blocks[block] = u64::MAX; . } . } else { . // Set `bita..64` to `0`. 8 ( 0.00%) self.blocks[blocka] &= !(u64::MAX << bita); . // Set `0..bitb` to `0`. 4 ( 0.00%) if bitb != 0 { 12 ( 0.00%) self.blocks[blockb] &= !(u64::MAX >> (64 - bitb)); . } . // Fill in all the other blocks (much faster than one bit at a time). . for block in (blocka + 1)..blockb { 168 ( 0.00%) self.blocks[block] = 0; . } . } 13,698 ( 0.01%) } . . #[inline] . pub fn get(&self, i: Size) -> bool { . let (block, bit) = Self::bit_index(i); 7 ( 0.00%) (self.blocks[block] & (1 << bit)) != 0 . } . . #[inline] . pub fn set(&mut self, i: Size, new_state: bool) { . let (block, bit) = Self::bit_index(i); . self.set_bit(block, bit, new_state); . } . -- line 671 ---------------------------------------- -- line 673 ---------------------------------------- . fn set_bit(&mut self, block: usize, bit: usize, new_state: bool) { . if new_state { . self.blocks[block] |= 1 << bit; . } else { . self.blocks[block] &= !(1 << bit); . } . } . 8,244 ( 0.00%) pub fn grow(&mut self, amount: Size, new_state: bool) { 2,748 ( 0.00%) if amount.bytes() == 0 { . return; . } . let unused_trailing_bits = 10,992 ( 0.01%) u64::try_from(self.blocks.len()).unwrap() * Self::BLOCK_SIZE - self.len.bytes(); 2,748 ( 0.00%) if amount.bytes() > unused_trailing_bits { 4,122 ( 0.00%) let additional_blocks = amount.bytes() / Self::BLOCK_SIZE + 1; . self.blocks.extend( . // FIXME(oli-obk): optimize this by repeating `new_state as Block`. . iter::repeat(0).take(usize::try_from(additional_blocks).unwrap()), . ); . } 1,374 ( 0.00%) let start = self.len; . self.len += amount; 12,366 ( 0.01%) self.set_range_inbounds(start, start + amount, new_state); // `Size` operation . } . . /// Returns the index of the first bit in `start..end` (end-exclusive) that is equal to is_init. 16,428 ( 0.01%) fn find_bit(&self, start: Size, end: Size, is_init: bool) -> Option { . /// A fast implementation of `find_bit`, . /// which skips over an entire block at a time if it's all 0s (resp. 1s), . /// and finds the first 1 (resp. 0) bit inside a block using `trailing_zeros` instead of a loop. . /// . /// Note that all examples below are written with 8 (instead of 64) bit blocks for simplicity, . /// and with the least significant bit (and lowest block) first: . /// . /// 00000000|00000000 -- line 708 ---------------------------------------- -- line 728 ---------------------------------------- . // start_bit = 3 . // is_init = false . // Note that, for the examples in this function, the most significant bit is written first, . // which is backwards compared to the comments in `find_bit`/`find_bit_fast`. . . // Invert bits so we're always looking for the first set bit. . // ! 0b00111011 . // bits = 0b11000100 32,856 ( 0.02%) let bits = if is_init { bits } else { !bits }; . // Mask off unused start bits. . // 0b11000100 . // & 0b11111000 . // bits = 0b11000000 24,642 ( 0.01%) let bits = bits & (!0 << start_bit); . // Find set bit, if any. . // bit = trailing_zeros(0b11000000) . // bit = 6 16,428 ( 0.01%) if bits == 0 { . None . } else { . let bit = bits.trailing_zeros(); . Some(InitMask::size_from_bit_index(block, bit)) . } . } . 8,214 ( 0.00%) if start >= end { . return None; . } . . // Convert `start` and `end` to block indexes and bit indexes within each block. . // We must convert `end` to an inclusive bound to handle block boundaries correctly. . // . // For example: . // -- line 761 ---------------------------------------- -- line 819 ---------------------------------------- . // The block marked (3) in this example is the first block that will be handled by this loop, . // and it will be skipped for that reason: . // . // (3) . // -------- . // (e) 01000000|00000000|00000001 . // ^~~~~~~~~~~~~~~~~~^ . // start end 10,880 ( 0.00%) if start_block < end_block_inclusive { . // This loop is written in a specific way for performance. . // Notably: `..end_block_inclusive + 1` is used for an inclusive range instead of `..=end_block_inclusive`, . // and `.zip(start_block + 1..)` is used to track the index instead of `.enumerate().skip().take()`, . // because both alternatives result in significantly worse codegen. . // `end_block_inclusive + 1` is guaranteed not to wrap, because `end_block_inclusive <= end / BLOCK_SIZE`, . // and `BLOCK_SIZE` (the number of bits per block) will always be at least 8 (1 byte). . for (&bits, block) in init_mask.blocks[start_block + 1..end_block_inclusive + 1] . .iter() -- line 835 ---------------------------------------- -- line 879 ---------------------------------------- . "optimized implementation of find_bit is wrong for start={:?} end={:?} is_init={} init_mask={:#?}", . start, . end, . is_init, . self . ); . . result 16,428 ( 0.01%) } . } . . /// A contiguous chunk of initialized or uninitialized memory. . pub enum InitChunk { . Init(Range), . Uninit(Range), . } . -- line 895 ---------------------------------------- -- line 913 ---------------------------------------- . . impl InitMask { . /// Checks whether the range `start..end` (end-exclusive) is entirely initialized. . /// . /// Returns `Ok(())` if it's initialized. Otherwise returns a range of byte . /// indexes for the first contiguous span of the uninitialized access. . #[inline] . pub fn is_range_initialized(&self, start: Size, end: Size) -> Result<(), Range> { 8,213 ( 0.00%) if end > self.len { . return Err(self.len..end); . } . 41,065 ( 0.02%) let uninit_start = self.find_bit(start, end, false); . 16,426 ( 0.01%) match uninit_start { . Some(uninit_start) => { . let uninit_end = self.find_bit(uninit_start, end, true).unwrap_or(end); . Err(uninit_start..uninit_end) . } . None => Ok(()), . } . } . -- line 935 ---------------------------------------- -- line 938 ---------------------------------------- . /// . /// The iterator guarantees the following: . /// - Chunks are nonempty. . /// - Chunks are adjacent (each range's start is equal to the previous range's end). . /// - Chunks span exactly `start..end` (the first starts at `start`, the last ends at `end`). . /// - Chunks alternate between [`InitChunk::Init`] and [`InitChunk::Uninit`]. . #[inline] . pub fn range_as_init_chunks(&self, start: Size, end: Size) -> InitChunkIter<'_> { 1 ( 0.00%) assert!(end <= self.len); . 1 ( 0.00%) let is_init = if start < end { . self.get(start) . } else { . // `start..end` is empty: there are no chunks, so use some arbitrary value . false . }; . . InitChunkIter { init_mask: self, is_init, start, end } . } -- line 956 ---------------------------------------- -- line 968 ---------------------------------------- . end: Size, . } . . impl<'a> Iterator for InitChunkIter<'a> { . type Item = InitChunk; . . #[inline] . fn next(&mut self) -> Option { 1 ( 0.00%) if self.start >= self.end { . return None; . } . . let end_of_chunk = 5 ( 0.00%) self.init_mask.find_bit(self.start, self.end, !self.is_init).unwrap_or(self.end); . let range = self.start..end_of_chunk; . . let ret = 4 ( 0.00%) Some(if self.is_init { InitChunk::Init(range) } else { InitChunk::Uninit(range) }); . . self.is_init = !self.is_init; . self.start = end_of_chunk; . . ret . } . } . -- line 993 ---------------------------------------- -- line 1010 ---------------------------------------- . access_size: range.size, . uninit_offset: idx_range.start, . uninit_size: idx_range.end - idx_range.start, // `Size` subtraction . })) . }) . } . . pub fn mark_init(&mut self, range: AllocRange, is_init: bool) { 10,948 ( 0.01%) if range.size.bytes() == 0 { . return; . } 5,474 ( 0.00%) assert!(self.mutability == Mutability::Mut); 21,896 ( 0.01%) self.init_mask.set_range(range.start, range.end(), is_init); . } . } . . /// Run-length encoding of the uninit mask. . /// Used to copy parts of a mask multiple times to another allocation. . pub struct InitMaskCompressed { . /// Whether the first range is initialized. . initial: bool, -- line 1030 ---------------------------------------- -- line 1032 ---------------------------------------- . /// The initialization state of the ranges alternate starting with `initial`. . ranges: smallvec::SmallVec<[u64; 1]>, . } . . impl InitMaskCompressed { . pub fn no_bytes_init(&self) -> bool { . // The `ranges` are run-length encoded and of alternating initialization state. . // So if `ranges.len() > 1` then the second block is an initialized range. 2 ( 0.00%) !self.initial && self.ranges.len() == 1 1 ( 0.00%) } . } . . /// Transferring the initialization mask to other allocations. . impl Allocation { . /// Creates a run-length encoding of the initialization mask; panics if range is empty. . /// . /// This is essentially a more space-efficient version of . /// `InitMask::range_as_init_chunks(...).collect::>()`. 7 ( 0.00%) pub fn compress_uninit_range(&self, range: AllocRange) -> InitMaskCompressed { . // Since we are copying `size` bytes from `src` to `dest + i * size` (`for i in 0..repeat`), . // a naive initialization mask copying algorithm would repeatedly have to read the initialization mask from . // the source and write it to the destination. Even if we optimized the memory accesses, . // we'd be doing all of this `repeat` times. . // Therefore we precompute a compressed version of the initialization mask of the source value and . // then write it back `repeat` times without computing any more information from the source. . . // A precomputed cache for ranges of initialized / uninitialized bits -- line 1058 ---------------------------------------- -- line 1063 ---------------------------------------- . let mut ranges = smallvec::SmallVec::<[u64; 1]>::new(); . . let mut chunks = self.init_mask.range_as_init_chunks(range.start, range.end()).peekable(); . . let initial = chunks.peek().expect("range should be nonempty").is_init(); . . // Here we rely on `range_as_init_chunks` to yield alternating init/uninit chunks. . for chunk in chunks { 2 ( 0.00%) let len = chunk.range().end.bytes() - chunk.range().start.bytes(); . ranges.push(len); . } . 7 ( 0.00%) InitMaskCompressed { ranges, initial } 8 ( 0.00%) } . . /// Applies multiple instances of the run-length encoding to the initialization mask. 11 ( 0.00%) pub fn mark_compressed_init_range( . &mut self, . defined: &InitMaskCompressed, . range: AllocRange, . repeat: u64, . ) { . // An optimization where we can just overwrite an entire range of initialization . // bits if they are going to be uniformly `1` or `0`. 2 ( 0.00%) if defined.ranges.len() <= 1 { 10 ( 0.00%) self.init_mask.set_range_inbounds( . range.start, . range.start + range.size * repeat, // `Size` operations 1 ( 0.00%) defined.initial, . ); . return; . } . . for mut j in 0..repeat { . j *= range.size.bytes(); . j += range.start.bytes(); . let mut cur = defined.initial; -- line 1099 ---------------------------------------- 161,805 ( 0.07%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_data_structures/src/sip128.rs -------------------------------------------------------------------------------- Ir -- line 91 ---------------------------------------- . // maximum of number bytes needed to fill an 8-byte-sized element on which . // SipHash operates. Note that for variable-sized copies which are known to be . // less than 8 bytes, this function will perform more work than necessary unless . // the compiler is able to optimize the extra work away. . #[inline] . unsafe fn copy_nonoverlapping_small(src: *const u8, dst: *mut u8, count: usize) { . debug_assert!(count <= 8); . 4,031 ( 0.00%) if count == 8 { . ptr::copy_nonoverlapping(src, dst, 8); . return; . } . . let mut i = 0; 7,200 ( 0.00%) if i + 3 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 4); . i += 4; . } . 15,544 ( 0.01%) if i + 1 < count { . ptr::copy_nonoverlapping(src.add(i), dst.add(i), 2); 2,142 ( 0.00%) i += 2 . } . 7,200 ( 0.00%) if i < count { 3,197 ( 0.00%) *dst.add(i) = *src.add(i); . i += 1; . } . . debug_assert_eq!(i, count); . } . . // # Implementation . // -- line 124 ---------------------------------------- -- line 201 ---------------------------------------- . . hasher . } . . // A specialized write function for values with size <= 8. . #[inline] . fn short_write(&mut self, x: T) { . let size = mem::size_of::(); 45,518 ( 0.02%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . 300,080 ( 0.14%) if nbuf + size < BUFFER_SIZE { . unsafe { . // The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . } . 100,209 ( 0.05%) self.nbuf = nbuf + size; . . return; . } . 11,665 ( 0.01%) unsafe { self.short_write_process_buffer(x) } . } . . // A specialized write function for values with size <= 8 that should only . // be called when the write would cause the buffer to fill. . // . // SAFETY: the write of `x` into `self.buf` starting at byte offset . // `self.nbuf` must cause `self.buf` to become fully initialized (and not . // overflow) if it wasn't already. . #[inline(never)] 4,168 ( 0.00%) unsafe fn short_write_process_buffer(&mut self, x: T) { . let size = mem::size_of::(); 4,168 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(size <= 8); . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + size >= BUFFER_SIZE); . debug_assert!(nbuf + size < BUFFER_WITH_SPILL_SIZE); . . // Copy first part of input into end of buffer, possibly into spill . // element. The memcpy call is optimized away because the size is known. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . ptr::copy_nonoverlapping(&x as *const _ as *const u8, dst, size); . . // Process buffer. . for i in 0..BUFFER_CAPACITY { 41,680 ( 0.02%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 33,344 ( 0.02%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 37,512 ( 0.02%) self.state.v0 ^= elem; . } . . // Copy remaining input into start of buffer by copying size - 1 . // elements from spill (at most size - 1 bytes could have overflowed . // into the spill). The memcpy call is optimized away because the size . // is known. And the whole copy is optimized away for size == 1. . let src = self.buf.get_unchecked(BUFFER_SPILL_INDEX) as *const _ as *const u8; . ptr::copy_nonoverlapping(src, self.buf.as_mut_ptr() as *mut u8, size - 1); . . // This function should only be called when the write fills the buffer. . // Therefore, when size == 1, the new `self.nbuf` must be zero. The size . // is statically known, so the branch is optimized away. 19,384 ( 0.01%) self.nbuf = if size == 1 { 0 } else { nbuf + size - BUFFER_SIZE }; 16,672 ( 0.01%) self.processed += BUFFER_SIZE; 8,336 ( 0.00%) } . . // A write function for byte slices. . #[inline] . fn slice_write(&mut self, msg: &[u8]) { . let length = msg.len(); 3,777 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . 19,986 ( 0.01%) if nbuf + length < BUFFER_SIZE { . unsafe { . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . 7,324 ( 0.00%) if length <= 8 { . copy_nonoverlapping_small(msg.as_ptr(), dst, length); . } else { . // This memcpy is *not* optimized away. . ptr::copy_nonoverlapping(msg.as_ptr(), dst, length); . } . } . 3,672 ( 0.00%) self.nbuf = nbuf + length; . . return; . } . 1,738 ( 0.00%) unsafe { self.slice_write_process_buffer(msg) } . } . . // A write function for byte slices that should only be called when the . // write would cause the buffer to fill. . // . // SAFETY: `self.buf` must be initialized up to the byte offset `self.nbuf`, . // and `msg` must contain enough bytes to initialize the rest of the element . // containing the byte offset `self.nbuf`. . #[inline(never)] 1,630 ( 0.00%) unsafe fn slice_write_process_buffer(&mut self, msg: &[u8]) { . let length = msg.len(); 326 ( 0.00%) let nbuf = self.nbuf; . debug_assert!(nbuf < BUFFER_SIZE); . debug_assert!(nbuf + length >= BUFFER_SIZE); . . // Always copy first part of input into current element of buffer. . // This function should only be called when the write fills the buffer, . // so we know that there is enough input to fill the current element. 978 ( 0.00%) let valid_in_elem = nbuf % ELEM_SIZE; 326 ( 0.00%) let needed_in_elem = ELEM_SIZE - valid_in_elem; . . let src = msg.as_ptr(); . let dst = (self.buf.as_mut_ptr() as *mut u8).add(nbuf); . copy_nonoverlapping_small(src, dst, needed_in_elem); . . // Process buffer. . . // Using `nbuf / ELEM_SIZE + 1` rather than `(nbuf + needed_in_elem) / . // ELEM_SIZE` to show the compiler that this loop's upper bound is > 0. . // We know that is true, because last step ensured we have a full . // element in the buffer. 652 ( 0.00%) let last = nbuf / ELEM_SIZE + 1; . . for i in 0..last { 2,514 ( 0.00%) let elem = self.buf.get_unchecked(i).assume_init().to_le(); 2,840 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 5,028 ( 0.00%) self.state.v0 ^= elem; . } . . // Process the remaining element-sized chunks of input. . let mut processed = needed_in_elem; 652 ( 0.00%) let input_left = length - processed; 225 ( 0.00%) let elems_left = input_left / ELEM_SIZE; . let extra_bytes_left = input_left % ELEM_SIZE; . . for _ in 0..elems_left { 138 ( 0.00%) let elem = (msg.as_ptr().add(processed) as *const u64).read_unaligned().to_le(); 138 ( 0.00%) self.state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut self.state); 138 ( 0.00%) self.state.v0 ^= elem; 276 ( 0.00%) processed += ELEM_SIZE; . } . . // Copy remaining input into start of buffer. . let src = msg.as_ptr().add(processed); . let dst = self.buf.as_mut_ptr() as *mut u8; . copy_nonoverlapping_small(src, dst, extra_bytes_left); . 326 ( 0.00%) self.nbuf = extra_bytes_left; 1,630 ( 0.00%) self.processed += nbuf + processed; 1,956 ( 0.00%) } . . #[inline] . pub fn finish128(mut self) -> (u64, u64) { . debug_assert!(self.nbuf < BUFFER_SIZE); . . // Process full elements in buffer. 1,425 ( 0.00%) let last = self.nbuf / ELEM_SIZE; . . // Since we're consuming self, avoid updating members for a potential . // performance gain. 1,900 ( 0.00%) let mut state = self.state; . . for i in 0..last { 1,444 ( 0.00%) let elem = unsafe { self.buf.get_unchecked(i).assume_init().to_le() }; 1,444 ( 0.00%) state.v3 ^= elem; . Sip24Rounds::c_rounds(&mut state); 1,444 ( 0.00%) state.v0 ^= elem; . } . . // Get remaining partial element. 950 ( 0.00%) let elem = if self.nbuf % ELEM_SIZE != 0 { . unsafe { . // Ensure element is initialized by writing zero bytes. At most . // `ELEM_SIZE - 1` are required given the above check. It's safe . // to write this many because we have the spill and we maintain . // `self.nbuf` such that this write will start before the spill. . let dst = (self.buf.as_mut_ptr() as *mut u8).add(self.nbuf); . ptr::write_bytes(dst, 0, ELEM_SIZE - 1); 410 ( 0.00%) self.buf.get_unchecked(last).assume_init().to_le() . } . } else { . 0 . }; . . // Finalize the hash. 1,360 ( 0.00%) let length = self.processed + self.nbuf; 948 ( 0.00%) let b: u64 = ((length as u64 & 0xff) << 56) | elem; . 474 ( 0.00%) state.v3 ^= b; . Sip24Rounds::c_rounds(&mut state); 474 ( 0.00%) state.v0 ^= b; . 474 ( 0.00%) state.v2 ^= 0xee; . Sip24Rounds::d_rounds(&mut state); 1,190 ( 0.00%) let _0 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . 240 ( 0.00%) state.v1 ^= 0xdd; . Sip24Rounds::d_rounds(&mut state); 240 ( 0.00%) let _1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; . . (_0, _1) . } . } . . impl Hasher for SipHasher128 { . #[inline] . fn write_u8(&mut self, i: u8) { -- line 414 ---------------------------------------- -- line 471 ---------------------------------------- . } . . #[derive(Debug, Clone, Default)] . struct Sip24Rounds; . . impl Sip24Rounds { . #[inline] . fn c_rounds(state: &mut State) { 156,802 ( 0.07%) compress!(state); 171,702 ( 0.08%) compress!(state); . } . . #[inline] . fn d_rounds(state: &mut State) { 2,376 ( 0.00%) compress!(state); 2,376 ( 0.00%) compress!(state); 2,376 ( 0.00%) compress!(state); 1,903 ( 0.00%) compress!(state); . } . } 20,930 ( 0.01%) -------------------------------------------------------------------------------- -- Auto-annotated source: /usr/home/liquid/rust/worktree-benchmarking/compiler/rustc_parse/src/parser/expr.rs -------------------------------------------------------------------------------- Ir -- line 72 ---------------------------------------- . } . . impl From> for LhsExpr { . /// Converts `Some(attrs)` into `LhsExpr::AttributesParsed(attrs)` . /// and `None` into `LhsExpr::NotYetParsed`. . /// . /// This conversion does not allocate. . fn from(o: Option) -> Self { 9,645 ( 0.00%) if let Some(attrs) = o { LhsExpr::AttributesParsed(attrs) } else { LhsExpr::NotYetParsed } . } . } . . impl From> for LhsExpr { . /// Converts the `expr: P` into `LhsExpr::AlreadyParsed(expr)`. . /// . /// This conversion does not allocate. . fn from(expr: P) -> Self { -- line 88 ---------------------------------------- -- line 103 ---------------------------------------- . pub fn parse_expr_force_collect(&mut self) -> PResult<'a, P> { . self.collect_tokens_no_attrs(|this| this.parse_expr()) . } . . pub fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> { . self.parse_expr().map(|value| AnonConst { id: DUMMY_NODE_ID, value }) . } . 21,992 ( 0.01%) fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P> { 5,498 ( 0.00%) match self.parse_expr() { . Ok(expr) => Ok(expr), . Err(mut err) => match self.token.ident() { . Some((Ident { name: kw::Underscore, .. }, false)) . if self.look_ahead(1, |t| t == &token::Comma) => . { . // Special-case handling of `foo(_, _, _)` . err.emit(); . self.bump(); . Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new())) . } . _ => Err(err), . }, . } 27,490 ( 0.01%) } . . /// Parses a sequence of expressions delimited by parentheses. . fn parse_paren_expr_seq(&mut self) -> PResult<'a, Vec>> { 38 ( 0.00%) self.parse_paren_comma_seq(|p| p.parse_expr_catch_underscore()).map(|(r, _)| r) . } . . /// Parses an expression, subject to the given restrictions. . #[inline] . pub(super) fn parse_expr_res( . &mut self, . r: Restrictions, . already_parsed_attrs: Option, -- line 138 ---------------------------------------- -- line 144 ---------------------------------------- . /// . /// This parses an expression accounting for associativity and precedence of the operators in . /// the expression. . #[inline] . fn parse_assoc_expr( . &mut self, . already_parsed_attrs: Option, . ) -> PResult<'a, P> { 13,736 ( 0.01%) self.parse_assoc_expr_with(0, already_parsed_attrs.into()) . } . . /// Parses an associative expression with operators of at least `min_prec` precedence. 37,422 ( 0.02%) pub(super) fn parse_assoc_expr_with( . &mut self, . min_prec: usize, . lhs: LhsExpr, . ) -> PResult<'a, P> { 20,793 ( 0.01%) let mut lhs = if let LhsExpr::AlreadyParsed(expr) = lhs { 24 ( 0.00%) expr . } else { 8,304 ( 0.00%) let attrs = match lhs { 8,319 ( 0.00%) LhsExpr::AttributesParsed(attrs) => Some(attrs), . _ => None, . }; 4,152 ( 0.00%) if [token::DotDot, token::DotDotDot, token::DotDotEq].contains(&self.token.kind) { . return self.parse_prefix_range_expr(attrs); . } else { 49,824 ( 0.02%) self.parse_prefix_expr(attrs)? . } . }; 8,316 ( 0.00%) let last_type_ascription_set = self.last_type_ascription.is_some(); . . if !self.should_continue_as_assoc_expr(&lhs) { 3 ( 0.00%) self.last_type_ascription = None; . return Ok(lhs); . } . . self.expected_tokens.push(TokenType::Operator); 160 ( 0.00%) while let Some(op) = self.check_assoc_op() { . // Adjust the span for interpolated LHS to point to the `$lhs` token . // and not to what it refers to. 240 ( 0.00%) let lhs_span = match self.prev_token.kind { . TokenKind::Interpolated(..) => self.prev_token.span, . _ => lhs.span, . }; . 80 ( 0.00%) let cur_op_span = self.token.span; 320 ( 0.00%) let restrictions = if op.node.is_assign_like() { . self.restrictions & Restrictions::NO_STRUCT_LITERAL . } else { . self.restrictions . }; 120 ( 0.00%) let prec = op.node.precedence(); 80 ( 0.00%) if prec < min_prec { . break; . } . // Check for deprecated `...` syntax 112 ( 0.00%) if self.token == token::DotDotDot && op.node == AssocOp::DotDotEq { . self.err_dotdotdot_syntax(self.token.span); . } . 112 ( 0.00%) if self.token == token::LArrow { . self.err_larrow_operator(self.token.span); . } . 28 ( 0.00%) self.bump(); 112 ( 0.00%) if op.node.is_comparison() { . if let Some(expr) = self.check_no_chained_comparison(&lhs, &op)? { . return Ok(expr); . } . } . . // Look for JS' `===` and `!==` and recover 30 ( 0.00%) if (op.node == AssocOp::Equal || op.node == AssocOp::NotEqual) 2 ( 0.00%) && self.token.kind == token::Eq . && self.prev_token.span.hi() == self.token.span.lo() . { . let sp = op.span.to(self.token.span); . let sugg = match op.node { . AssocOp::Equal => "==", . AssocOp::NotEqual => "!=", . _ => unreachable!(), . }; -- line 226 ---------------------------------------- -- line 231 ---------------------------------------- . sugg.to_string(), . Applicability::MachineApplicable, . ) . .emit(); . self.bump(); . } . . // Look for PHP's `<>` and recover 3 ( 0.00%) if op.node == AssocOp::Less 2 ( 0.00%) && self.token.kind == token::Gt . && self.prev_token.span.hi() == self.token.span.lo() . { . let sp = op.span.to(self.token.span); . self.struct_span_err(sp, "invalid comparison operator `<>`") . .span_suggestion_short( . sp, . "`<>` is not a valid comparison operator, use `!=`", . "!=".to_string(), . Applicability::MachineApplicable, . ) . .emit(); . self.bump(); . } . . // Look for C++'s `<=>` and recover 20 ( 0.00%) if op.node == AssocOp::LessEqual 20 ( 0.00%) && self.token.kind == token::Gt . && self.prev_token.span.hi() == self.token.span.lo() . { . let sp = op.span.to(self.token.span); . self.struct_span_err(sp, "invalid comparison operator `<=>`") . .span_label( . sp, . "`<=>` is not a valid comparison operator, use `std::cmp::Ordering`", . ) . .emit(); . self.bump(); . } . 56 ( 0.00%) let op = op.node; . // Special cases: . if op == AssocOp::As { . lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Cast)?; . continue; . } else if op == AssocOp::Colon { . lhs = self.parse_assoc_op_ascribe(lhs, lhs_span)?; . continue; . } else if op == AssocOp::DotDot || op == AssocOp::DotDotEq { . // If we didn’t have to handle `x..`/`x..=`, it would be pretty easy to . // generalise it to the Fixity::None code. . lhs = self.parse_range_expr(prec, lhs, op, cur_op_span)?; . break; . } . 28 ( 0.00%) let fixity = op.fixity(); 84 ( 0.00%) let prec_adjustment = match fixity { . Fixity::Right => 0, . Fixity::Left => 1, . // We currently have no non-associative operators that are not handled above by . // the special cases. The code is here only for future convenience. . Fixity::None => 1, . }; 56 ( 0.00%) let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { 140 ( 0.00%) this.parse_assoc_expr_with(prec + prec_adjustment, LhsExpr::NotYetParsed) . })?; . . let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span); 252 ( 0.00%) lhs = match op { . AssocOp::Add . | AssocOp::Subtract . | AssocOp::Multiply . | AssocOp::Divide . | AssocOp::Modulus . | AssocOp::LAnd . | AssocOp::LOr . | AssocOp::BitXor -- line 306 ---------------------------------------- -- line 309 ---------------------------------------- . | AssocOp::ShiftLeft . | AssocOp::ShiftRight . | AssocOp::Equal . | AssocOp::Less . | AssocOp::LessEqual . | AssocOp::NotEqual . | AssocOp::Greater . | AssocOp::GreaterEqual => { 56 ( 0.00%) let ast_op = op.to_ast_binop().unwrap(); . let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs); . self.mk_expr(span, binary, AttrVec::new()) . } . AssocOp::Assign => { . self.mk_expr(span, ExprKind::Assign(lhs, rhs, cur_op_span), AttrVec::new()) . } . AssocOp::AssignOp(k) => { . let aop = match k { -- line 325 ---------------------------------------- -- line 337 ---------------------------------------- . let aopexpr = self.mk_assign_op(source_map::respan(cur_op_span, aop), lhs, rhs); . self.mk_expr(span, aopexpr, AttrVec::new()) . } . AssocOp::As | AssocOp::Colon | AssocOp::DotDot | AssocOp::DotDotEq => { . self.span_bug(span, "AssocOp should have been handled by special case") . } . }; . 56 ( 0.00%) if let Fixity::None = fixity { . break; . } . } 4,157 ( 0.00%) if last_type_ascription_set { . self.last_type_ascription = None; . } 8,314 ( 0.00%) Ok(lhs) 70,682 ( 0.03%) } . . fn should_continue_as_assoc_expr(&mut self, lhs: &Expr) -> bool { 29,108 ( 0.01%) match (self.expr_is_complete(lhs), AssocOp::from_token(&self.token)) { . // Semi-statement forms are odd: . // See https://github.com/rust-lang/rust/issues/29071 . (true, None) => false, . (false, _) => true, // Continue parsing the expression. . // An exhaustive check is done in the following block, but these are checked first . // because they *are* ambiguous but also reasonable looking incorrect syntax, so we . // want to keep their span info to improve diagnostics in these cases in a later stage. . (true, Some(AssocOp::Multiply)) | // `{ 42 } *foo = bar;` or `{ 42 } * 3` -- line 364 ---------------------------------------- -- line 401 ---------------------------------------- . err.emit(); . } . . /// Possibly translate the current token to an associative operator. . /// The method does not advance the current token. . /// . /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively. . fn check_assoc_op(&self) -> Option> { 46,075 ( 0.02%) let (op, span) = match (AssocOp::from_token(&self.token), self.token.ident()) { . // When parsing const expressions, stop parsing when encountering `>`. . ( . Some( . AssocOp::ShiftRight . | AssocOp::Greater . | AssocOp::GreaterEqual . | AssocOp::AssignOp(token::BinOpToken::Shr), . ), . _, 3 ( 0.00%) ) if self.restrictions.contains(Restrictions::CONST_EXPR) => { . return None; . } 80 ( 0.00%) (Some(op), _) => (op, self.token.span), . (None, Some((Ident { name: sym::and, span }, false))) => { . self.error_bad_logical_op("and", "&&", "conjunction"); . (AssocOp::LAnd, span) . } . (None, Some((Ident { name: sym::or, span }, false))) => { . self.error_bad_logical_op("or", "||", "disjunction"); . (AssocOp::LOr, span) . } -- line 430 ---------------------------------------- -- line 443 ---------------------------------------- . Applicability::MachineApplicable, . ) . .note("unlike in e.g., python and PHP, `&&` and `||` are used for logical operators") . .emit(); . } . . /// Checks if this expression is a successfully parsed statement. . fn expr_is_complete(&self, e: &Expr) -> bool { 8,320 ( 0.00%) self.restrictions.contains(Restrictions::STMT_EXPR) 49 ( 0.00%) && !classify::expr_requires_semi_to_be_stmt(e) . } . . /// Parses `x..y`, `x..=y`, and `x..`/`x..=`. . /// The other two variants are handled in `parse_prefix_range_expr` below. . fn parse_range_expr( . &mut self, . prec: usize, . lhs: P, -- line 460 ---------------------------------------- -- line 520 ---------------------------------------- . (lo, None) . }; . let range = this.mk_range(None, opt_end, limits); . Ok(this.mk_expr(span, range, attrs.into())) . }) . } . . /// Parses a prefix-unary-operator expr. 29,078 ( 0.01%) fn parse_prefix_expr(&mut self, attrs: Option) -> PResult<'a, P> { 33,232 ( 0.02%) let attrs = self.parse_or_use_outer_attributes(attrs)?; 20,770 ( 0.01%) let lo = self.token.span; . . macro_rules! make_it { . ($this:ident, $attrs:expr, |this, _| $body:expr) => { 18 ( 0.00%) $this.collect_tokens_for_expr($attrs, |$this, attrs| { 34 ( 0.00%) let (hi, ex) = $body?; 32 ( 0.00%) Ok($this.mk_expr(lo.to(hi), ex, attrs.into())) 20 ( 0.00%) }) . }; . } . . let this = self; . . // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr() 24,936 ( 0.01%) match this.token.uninterpolate().kind { . token::Not => make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Not)), // `!expr` . token::Tilde => make_it!(this, attrs, |this, _| this.recover_tilde_expr(lo)), // `~expr` . token::BinOp(token::Minus) => { . make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Neg)) . } // `-expr` . token::BinOp(token::Star) => { . make_it!(this, attrs, |this, _| this.parse_unary_expr(lo, UnOp::Deref)) . } // `*expr` . token::BinOp(token::And) | token::AndAnd => { 4 ( 0.00%) make_it!(this, attrs, |this, _| this.parse_borrow_expr(lo)) . } . token::BinOp(token::Plus) if this.look_ahead(1, |tok| tok.is_numeric_lit()) => { . let mut err = this.struct_span_err(lo, "leading `+` is not supported"); . err.span_label(lo, "unexpected `+`"); . . // a block on the LHS might have been intended to be an expression instead . if let Some(sp) = this.sess.ambiguous_block_expr_parse.borrow().get(&lo) { . this.sess.expr_parentheses_needed(&mut err, *sp); -- line 562 ---------------------------------------- -- line 568 ---------------------------------------- . Applicability::MachineApplicable, . ); . } . err.emit(); . . this.bump(); . this.parse_prefix_expr(None) . } // `+expr` 104 ( 0.00%) token::Ident(..) if this.token.is_keyword(kw::Box) => { . make_it!(this, attrs, |this, _| this.parse_box_expr(lo)) . } . token::Ident(..) if this.is_mistaken_not_ident_negation() => { . make_it!(this, attrs, |this, _| this.recover_not_expr(lo)) . } . _ => return this.parse_dot_or_call_expr(Some(attrs)), . } 41,540 ( 0.02%) } . . fn parse_prefix_expr_common(&mut self, lo: Span) -> PResult<'a, (Span, P)> { . self.bump(); . let expr = self.parse_prefix_expr(None); . let (span, expr) = self.interpolated_or_expr_span(expr)?; . Ok((lo.to(span), expr)) . } . -- line 592 ---------------------------------------- -- line 619 ---------------------------------------- . fn is_mistaken_not_ident_negation(&self) -> bool { . let token_cannot_continue_expr = |t: &Token| match t.uninterpolate().kind { . // These tokens can start an expression after `!`, but . // can't continue an expression after an ident . token::Ident(name, is_raw) => token::ident_can_begin_expr(name, t.span, is_raw), . token::Literal(..) | token::Pound => true, . _ => t.is_whole_expr(), . }; 104 ( 0.00%) self.token.is_ident_named(sym::not) && self.look_ahead(1, token_cannot_continue_expr) . } . . /// Recover on `not expr` in favor of `!expr`. . fn recover_not_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { . // Emit the error... . let not_token = self.look_ahead(1, |t| t.clone()); . self.struct_span_err( . not_token.span, -- line 635 ---------------------------------------- -- line 651 ---------------------------------------- . . /// Returns the span of expr, if it was not interpolated or the span of the interpolated token. . fn interpolated_or_expr_span( . &self, . expr: PResult<'a, P>, . ) -> PResult<'a, (Span, P)> { . expr.map(|e| { . ( 16,618 ( 0.01%) match self.prev_token.kind { . TokenKind::Interpolated(..) => self.prev_token.span, . _ => e.span, . }, . e, . ) . }) . } . -- line 667 ---------------------------------------- -- line 844 ---------------------------------------- . let lhs = self.parse_assoc_op_cast(lhs, lhs_span, ExprKind::Type)?; . self.sess.gated_spans.gate(sym::type_ascription, lhs.span); . Ok(lhs) . } . . /// Parse `& mut? ` or `& raw [ const | mut ] `. . fn parse_borrow_expr(&mut self, lo: Span) -> PResult<'a, (Span, ExprKind)> { . self.expect_and()?; 10 ( 0.00%) let has_lifetime = self.token.is_lifetime() && self.look_ahead(1, |t| t != &token::Colon); . let lifetime = has_lifetime.then(|| self.expect_lifetime()); // For recovery, see below. . let (borrow_kind, mutbl) = self.parse_borrow_modifiers(lo); 8 ( 0.00%) let expr = self.parse_prefix_expr(None); 4 ( 0.00%) let (hi, expr) = self.interpolated_or_expr_span(expr)?; 2 ( 0.00%) let span = lo.to(hi); 4 ( 0.00%) if let Some(lt) = lifetime { . self.error_remove_borrow_lifetime(span, lt.ident.span); . } . Ok((span, ExprKind::AddrOf(borrow_kind, mutbl, expr))) . } . . fn error_remove_borrow_lifetime(&self, span: Span, lt_span: Span) { . self.struct_span_err(span, "borrow expressions cannot be annotated with lifetimes") . .span_label(lt_span, "annotated with lifetime here") -- line 866 ---------------------------------------- -- line 870 ---------------------------------------- . String::new(), . Applicability::MachineApplicable, . ) . .emit(); . } . . /// Parse `mut?` or `raw [ const | mut ]`. . fn parse_borrow_modifiers(&mut self, lo: Span) -> (ast::BorrowKind, ast::Mutability) { 4 ( 0.00%) if self.check_keyword(kw::Raw) && self.look_ahead(1, Token::is_mutability) { . // `raw [ const | mut ]`. . let found_raw = self.eat_keyword(kw::Raw); . assert!(found_raw); . let mutability = self.parse_const_or_mut().unwrap(); . self.sess.gated_spans.gate(sym::raw_ref_op, lo.to(self.prev_token.span)); . (ast::BorrowKind::Raw, mutability) . } else { . // `mut?` -- line 886 ---------------------------------------- -- line 889 ---------------------------------------- . } . . /// Parses `a.b` or `a(13)` or `a[4]` or just `a`. . fn parse_dot_or_call_expr(&mut self, attrs: Option) -> PResult<'a, P> { . let attrs = self.parse_or_use_outer_attributes(attrs)?; . self.collect_tokens_for_expr(attrs, |this, attrs| { . let base = this.parse_bottom_expr(); . let (span, base) = this.interpolated_or_expr_span(base)?; 16,608 ( 0.01%) this.parse_dot_or_call_expr_with(base, span, attrs) . }) . } . . pub(super) fn parse_dot_or_call_expr_with( . &mut self, . e0: P, . lo: Span, . mut attrs: Vec, . ) -> PResult<'a, P> { . // Stitch the list of outer attributes onto the return value. . // A little bit ugly, but the best way given the current code . // structure 29,106 ( 0.01%) self.parse_dot_or_call_expr_with_(e0, lo).map(|expr| { 29,106 ( 0.01%) expr.map(|mut expr| { 4,158 ( 0.00%) attrs.extend::>(expr.attrs.into()); 8,316 ( 0.00%) expr.attrs = attrs.into(); 58,212 ( 0.03%) expr . }) . }) . } . 128,898 ( 0.06%) fn parse_dot_or_call_expr_with_(&mut self, mut e: P, lo: Span) -> PResult<'a, P> { . loop { . if self.eat(&token::Question) { . // `expr?` . e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e), AttrVec::new()); . continue; . } . if self.eat(&token::Dot) { . // expr.f 8 ( 0.00%) e = self.parse_dot_suffix_expr(lo, e)?; . continue; . } 22 ( 0.00%) if self.expr_is_complete(&e) { . return Ok(e); . } 8,360 ( 0.00%) e = match self.token.kind { 12 ( 0.00%) token::OpenDelim(token::Paren) => self.parse_fn_call_expr(lo, e), . token::OpenDelim(token::Bracket) => self.parse_index_expr(lo, e)?, 4,157 ( 0.00%) _ => return Ok(e), . } . } 37,422 ( 0.02%) } . . fn look_ahead_type_ascription_as_field(&mut self) -> bool { 26 ( 0.00%) self.look_ahead(1, |t| t.is_ident()) . && self.look_ahead(2, |t| t == &token::Colon) . && self.look_ahead(3, |t| t.can_begin_expr()) . } . . fn parse_dot_suffix_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { 22 ( 0.00%) match self.token.uninterpolate().kind { . token::Ident(..) => self.parse_dot_suffix(base, lo), . token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => { . Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix, None)) . } . token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => { . Ok(self.parse_tuple_field_access_expr_float(lo, base, symbol, suffix)) . } . _ => { -- line 957 ---------------------------------------- -- line 1091 ---------------------------------------- . let span = self.prev_token.span; . let field = ExprKind::Field(base, Ident::new(field, span)); . self.expect_no_suffix(span, "a tuple index", suffix); . self.mk_expr(lo.to(span), field, AttrVec::new()) . } . . /// Parse a function call expression, `expr(...)`. . fn parse_fn_call_expr(&mut self, lo: Span, fun: P) -> P { 20 ( 0.00%) let snapshot = if self.token.kind == token::OpenDelim(token::Paren) . && self.look_ahead_type_ascription_as_field() . { . Some((self.clone(), fun.kind.clone())) . } else { 12 ( 0.00%) None . }; 4 ( 0.00%) let open_paren = self.token.span; . 8 ( 0.00%) let mut seq = self.parse_paren_expr_seq().map(|args| { 12 ( 0.00%) self.mk_expr(lo.to(self.prev_token.span), self.mk_call(fun, args), AttrVec::new()) . }); . if let Some(expr) = . self.maybe_recover_struct_lit_bad_delims(lo, open_paren, &mut seq, snapshot) . { . return expr; . } 20 ( 0.00%) self.recover_seq_parse_error(token::Paren, lo, seq) . } . . /// If we encounter a parser state that looks like the user has written a `struct` literal with . /// parentheses instead of braces, recover the parser state and provide suggestions. . #[instrument(skip(self, seq, snapshot), level = "trace")] . fn maybe_recover_struct_lit_bad_delims( . &mut self, . lo: Span, . open_paren: Span, . seq: &mut PResult<'a, P>, . snapshot: Option<(Self, ExprKind)>, . ) -> Option> { 40 ( 0.00%) match (seq.as_mut(), snapshot) { . (Err(ref mut err), Some((mut snapshot, ExprKind::Path(None, path)))) => { . let name = pprust::path_to_string(&path); . snapshot.bump(); // `(` . match snapshot.parse_struct_fields(path, false, token::Paren) { . Ok((fields, ..)) if snapshot.eat(&token::CloseDelim(token::Paren)) => { . // We are certain we have `Enum::Foo(a: 3, b: 4)`, suggest . // `Enum::Foo { a: 3, b: 4 }` or `Enum::Foo(3, 4)`. . *self = snapshot; -- line 1137 ---------------------------------------- -- line 1165 ---------------------------------------- . } . return Some(self.mk_expr_err(span)); . } . Ok(_) => {} . Err(mut err) => err.emit(), . } . } . _ => {} 12 ( 0.00%) } . None . } . . /// Parse an indexing expression `expr[...]`. . fn parse_index_expr(&mut self, lo: Span, base: P) -> PResult<'a, P> { . self.bump(); // `[` . let index = self.parse_expr()?; . self.expect(&token::CloseDelim(token::Bracket))?; . Ok(self.mk_expr(lo.to(self.prev_token.span), self.mk_index(base, index), AttrVec::new())) . } . . /// Assuming we have just parsed `.`, continue parsing into an expression. . fn parse_dot_suffix(&mut self, self_arg: P, lo: Span) -> PResult<'a, P> { 4 ( 0.00%) if self.token.uninterpolated_span().rust_2018() && self.eat_keyword(kw::Await) { . return Ok(self.mk_await_expr(self_arg, lo)); . } . 2 ( 0.00%) let fn_span_lo = self.token.span; 12 ( 0.00%) let mut segment = self.parse_path_segment(PathStyle::Expr, None)?; 10 ( 0.00%) self.check_trailing_angle_brackets(&segment, &[&token::OpenDelim(token::Paren)]); . self.check_turbofish_missing_angle_brackets(&mut segment); . 10 ( 0.00%) if self.check(&token::OpenDelim(token::Paren)) { . // Method call `expr.f()` 2 ( 0.00%) let mut args = self.parse_paren_expr_seq()?; 8 ( 0.00%) args.insert(0, self_arg); . 6 ( 0.00%) let fn_span = fn_span_lo.to(self.prev_token.span); 6 ( 0.00%) let span = lo.to(self.prev_token.span); 10 ( 0.00%) Ok(self.mk_expr(span, ExprKind::MethodCall(segment, args, fn_span), AttrVec::new())) . } else { . // Field access `expr.f` . if let Some(args) = segment.args { . self.struct_span_err( . args.span(), . "field expressions cannot have generic arguments", . ) . .emit(); -- line 1211 ---------------------------------------- -- line 1217 ---------------------------------------- . } . . /// At the bottom (top?) of the precedence hierarchy, . /// Parses things like parenthesized exprs, macros, `return`, etc. . /// . /// N.B., this does not parse outer attributes, and is private because it only works . /// correctly if called from `parse_dot_or_call_expr()`. . fn parse_bottom_expr(&mut self) -> PResult<'a, P> { 9,688 ( 0.00%) maybe_recover_from_interpolated_ty_qpath!(self, true); 12,456 ( 0.01%) maybe_whole_expr!(self); . . // Outer attributes are already parsed and will be . // added to the return value after the fact. . // . // Therefore, prevent sub-parser from parsing . // attributes by giving them an empty "already-parsed" list. 4,152 ( 0.00%) let attrs = AttrVec::new(); . . // Note: when adding new syntax here, don't forget to adjust `TokenKind::can_begin_expr()`. 1,404 ( 0.00%) let lo = self.token.span; 12,456 ( 0.01%) if let token::Literal(_) = self.token.kind { . // This match arm is a special-case of the `_` match arm below and . // could be removed without changing functionality, but it's faster . // to have it here, especially for programs with large constants. 10,992 ( 0.01%) self.parse_lit_expr(attrs) 7,020 ( 0.00%) } else if self.check(&token::OpenDelim(token::Paren)) { 4,122 ( 0.00%) self.parse_tuple_parens_expr(attrs) 150 ( 0.00%) } else if self.check(&token::OpenDelim(token::Brace)) { 8 ( 0.00%) self.parse_block_expr(None, lo, BlockCheckMode::Default, attrs) 285 ( 0.00%) } else if self.check(&token::BinOp(token::Or)) || self.check(&token::OrOr) { 4 ( 0.00%) self.parse_closure_expr(attrs) 140 ( 0.00%) } else if self.check(&token::OpenDelim(token::Bracket)) { 10 ( 0.00%) self.parse_array_or_repeat_expr(attrs, token::Bracket) 104 ( 0.00%) } else if self.check_path() { 100 ( 0.00%) self.parse_path_start_expr(attrs) 10 ( 0.00%) } else if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) { . self.parse_closure_expr(attrs) 5 ( 0.00%) } else if self.eat_keyword(kw::If) { 4 ( 0.00%) self.parse_if_expr(attrs) . } else if self.check_keyword(kw::For) { . if self.choose_generics_over_qpath(1) { . // NOTE(Centril, eddyb): DO NOT REMOVE! Beyond providing parser recovery, . // this is an insurance policy in case we allow qpaths in (tuple-)struct patterns. . // When `for ::Proj in $expr $block` is wanted, . // you can disambiguate in favor of a pattern with `(...)`. . self.recover_quantified_closure_expr(attrs) . } else { -- line 1263 ---------------------------------------- -- line 1325 ---------------------------------------- . } else { . self.parse_lit_expr(attrs) . } . } else { . self.parse_lit_expr(attrs) . } . } . 27,480 ( 0.01%) fn parse_lit_expr(&mut self, attrs: AttrVec) -> PResult<'a, P> { 5,496 ( 0.00%) let lo = self.token.span; 10,992 ( 0.01%) match self.parse_opt_lit() { 16,488 ( 0.01%) Some(literal) => { 24,732 ( 0.01%) let expr = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Lit(literal), attrs); 27,480 ( 0.01%) self.maybe_recover_from_bad_qpath(expr, true) . } . None => self.try_macro_suggestion(), . } . } . . fn parse_tuple_parens_expr(&mut self, attrs: AttrVec) -> PResult<'a, P> { 2,748 ( 0.00%) let lo = self.token.span; 4,122 ( 0.00%) self.expect(&token::OpenDelim(token::Paren))?; . let (es, trailing_comma) = match self.parse_seq_to_end( . &token::CloseDelim(token::Paren), . SeqSep::trailing_allowed(token::Comma), 5,484 ( 0.00%) |p| p.parse_expr_catch_underscore(), . ) { . Ok(x) => x, . Err(err) => return Ok(self.recover_seq_parse_error(token::Paren, lo, Err(err))), . }; 6,870 ( 0.00%) let kind = if es.len() == 1 && !trailing_comma { . // `(e)` is parenthesized `e`. 21 ( 0.00%) ExprKind::Paren(es.into_iter().next().unwrap()) . } else { . // `(e,)` is a tuple with only one field, `e`. 6,835 ( 0.00%) ExprKind::Tup(es) . }; 4,122 ( 0.00%) let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs); 4,122 ( 0.00%) self.maybe_recover_from_bad_qpath(expr, true) . } . 22 ( 0.00%) fn parse_array_or_repeat_expr( . &mut self, . attrs: AttrVec, . close_delim: token::DelimToken, . ) -> PResult<'a, P> { 6 ( 0.00%) let lo = self.token.span; 2 ( 0.00%) self.bump(); // `[` or other open delim . 6 ( 0.00%) let close = &token::CloseDelim(close_delim); . let kind = if self.eat(close) { . // Empty vector . ExprKind::Array(Vec::new()) . } else { . // Non-empty vector 4 ( 0.00%) let first_expr = self.parse_expr()?; . if self.eat(&token::Semi) { . // Repeating array syntax: `[ 0; 512 ]` . let count = self.parse_anon_const_expr()?; . self.expect(close)?; . ExprKind::Repeat(first_expr, count) . } else if self.eat(&token::Comma) { . // Vector with two or more elements. . let sep = SeqSep::trailing_allowed(token::Comma); . let (remaining_exprs, _) = self.parse_seq_to_end(close, sep, |p| p.parse_expr())?; 2 ( 0.00%) let mut exprs = vec![first_expr]; . exprs.extend(remaining_exprs); 10 ( 0.00%) ExprKind::Array(exprs) . } else { . // Vector with one element . self.expect(close)?; . ExprKind::Array(vec![first_expr]) . } . }; 6 ( 0.00%) let expr = self.mk_expr(lo.to(self.prev_token.span), kind, attrs); 6 ( 0.00%) self.maybe_recover_from_bad_qpath(expr, true) 20 ( 0.00%) } . 250 ( 0.00%) fn parse_path_start_expr(&mut self, attrs: AttrVec) -> PResult<'a, P> { 200 ( 0.00%) let (qself, path) = if self.eat_lt() { . let (qself, path) = self.parse_qpath(PathStyle::Expr)?; . (Some(qself), path) . } else { 50 ( 0.00%) (None, self.parse_path(PathStyle::Expr)?) . }; 25 ( 0.00%) let lo = path.span; . . // `!`, as an operator, is prefix, so we know this isn't that. 375 ( 0.00%) let (hi, kind) = if self.eat(&token::Not) { . // MACRO INVOCATION expression . if qself.is_some() { . self.struct_span_err(path.span, "macros cannot use qualified paths").emit(); . } . let mac = MacCall { . path, . args: self.parse_mac_args()?, . prior_type_ascription: self.last_type_ascription, . }; . (self.prev_token.span, ExprKind::MacCall(mac)) 125 ( 0.00%) } else if self.check(&token::OpenDelim(token::Brace)) { . if let Some(expr) = self.maybe_parse_struct_expr(qself.as_ref(), &path, &attrs) { . if qself.is_some() { . self.sess.gated_spans.gate(sym::more_qualified_paths, path.span); . } . return expr; . } else { . (path.span, ExprKind::Path(qself, path)) . } . } else { . (path.span, ExprKind::Path(qself, path)) . }; . 50 ( 0.00%) let expr = self.mk_expr(lo.to(hi), kind, attrs); 75 ( 0.00%) self.maybe_recover_from_bad_qpath(expr, true) 300 ( 0.00%) } . . /// Parse `'label: $expr`. The label is already parsed. . fn parse_labeled_expr( . &mut self, . label: Label, . attrs: AttrVec, . mut consume_colon: bool, . ) -> PResult<'a, P> { -- line 1447 ---------------------------------------- -- line 1607 ---------------------------------------- . symbol_unescaped, . }), . _ => Err(Some(lit)), . }, . None => Err(None), . } . } . 144 ( 0.00%) pub(super) fn parse_lit(&mut self) -> PResult<'a, Lit> { 16 ( 0.00%) self.parse_opt_lit().ok_or_else(|| { 20 ( 0.00%) if let token::Interpolated(inner) = &self.token.kind { . let expr = match inner.as_ref() { . token::NtExpr(expr) => Some(expr), . token::NtLiteral(expr) => Some(expr), . _ => None, . }; . if let Some(expr) = expr { . if matches!(expr.kind, ExprKind::Err) { . self.diagnostic() . .delay_span_bug(self.token.span, &"invalid interpolated expression"); . return self.diagnostic().struct_dummy(); . } . } . } 110 ( 0.00%) let msg = format!("unexpected token: {}", super::token_descr(&self.token)); 10 ( 0.00%) self.struct_span_err(self.token.span, &msg) . }) 112 ( 0.00%) } . . /// Matches `lit = true | false | token_lit`. . /// Returns `None` if the next token is not a literal. 24,876 ( 0.01%) pub(super) fn parse_opt_lit(&mut self) -> Option { 2,764 ( 0.00%) let mut recovered = None; 16,584 ( 0.01%) if self.token == token::Dot { . // Attempt to recover `.4` as `0.4`. We don't currently have any syntax where . // dot would follow an optional literal, so we do this unconditionally. . recovered = self.look_ahead(1, |next_token| { . if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = . next_token.kind . { . if self.token.span.hi() == next_token.span.lo() { . let s = String::from("0.") + symbol.as_str(); -- line 1648 ---------------------------------------- -- line 1654 ---------------------------------------- . }); . if let Some(token) = &recovered { . self.bump(); . self.error_float_lits_must_have_int_part(&token); . } . } . . let token = recovered.as_ref().unwrap_or(&self.token); 8,322 ( 0.00%) match Lit::from_token(token) { 16,524 ( 0.01%) Ok(lit) => { 5,508 ( 0.00%) self.bump(); 16,524 ( 0.01%) Some(lit) . } 10 ( 0.00%) Err(LitError::NotLiteral) => None, . Err(err) => { . let span = token.span; . let lit = match token.kind { . token::Literal(lit) => lit, . _ => unreachable!(), . }; . self.bump(); . self.report_lit_error(err, lit, span); -- line 1675 ---------------------------------------- -- line 1676 ---------------------------------------- . // Pack possible quotes and prefixes from the original literal into . // the error literal's symbol so they can be pretty-printed faithfully. . let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None); . let symbol = Symbol::intern(&suffixless_lit.to_string()); . let lit = token::Lit::new(token::Err, symbol, lit.suffix); . Some(Lit::from_lit_token(lit, span).unwrap_or_else(|_| unreachable!())) . } . } 22,112 ( 0.01%) } . . fn error_float_lits_must_have_int_part(&self, token: &Token) { . self.struct_span_err(token.span, "float literals must have an integer part") . .span_suggestion( . token.span, . "must have an integer part", . pprust::token_to_string(token).into(), . Applicability::MachineApplicable, -- line 1692 ---------------------------------------- -- line 1815 ---------------------------------------- . AttrVec::new(), . )) . } else { . Ok(expr) . } . } . . fn is_array_like_block(&mut self) -> bool { 3 ( 0.00%) self.look_ahead(1, |t| matches!(t.kind, TokenKind::Ident(..) | TokenKind::Literal(_))) . && self.look_ahead(2, |t| t == &token::Comma) . && self.look_ahead(3, |t| t.can_begin_expr()) . } . . /// Emits a suggestion if it looks like the user meant an array but . /// accidentally used braces, causing the code to be interpreted as a block . /// expression. . fn maybe_suggest_brackets_instead_of_braces( -- line 1831 ---------------------------------------- -- line 1855 ---------------------------------------- . Err(mut e) => { . e.cancel(); . None . } . } . } . . /// Parses a block or unsafe block. 14 ( 0.00%) pub(super) fn parse_block_expr( . &mut self, . opt_label: Option