Skip to content

Rollup of 8 pull requests #118732

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 19 commits into from
Dec 8, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
47e6e5e
coverage: Avoid unnecessary macros in unit tests
Zalathar Dec 7, 2023
aa58ccb
Tell MirUsedCollector that the pointer alignment checks calls its pan…
saethlin Dec 7, 2023
ec0110b
coverage: Merge refined spans in a separate final pass
Zalathar Dec 7, 2023
9089d28
coverage: Inline `push_refined_span`
Zalathar Dec 7, 2023
9a43215
coverage: Simplify code that pushes to `refined_spans`
Zalathar Dec 7, 2023
bf681dc
Fix display of features in rustdoc
GuillaumeGomez Dec 6, 2023
9e1797b
Extend GUI tests for `doc_cfg`
GuillaumeGomez Dec 6, 2023
c3bb1b5
Elaborate on ip_addr bit conversion endianness
CLEckhardt Dec 1, 2023
6a0a89a
rustdoc: remove unused parameter `reversed` from onEach(Lazy)
notriddle Dec 7, 2023
88fccc4
OnceLock: Rework example, statics aren't dropped
ianrrees Dec 4, 2023
dabedb7
1. fix jobserver GLOBAL_CLIENT_CHECKED uninitialized before use
oksbsb Dec 7, 2023
992f7ee
Rollup merge of #118505 - CLEckhardt:update_ip_addr_bits_docs, r=cuviper
matthiaskrgr Dec 8, 2023
982a238
Rollup merge of #118581 - ianrrees:add-drop-note-to-once_lock, r=work…
matthiaskrgr Dec 8, 2023
fa724cc
Rollup merge of #118677 - GuillaumeGomez:doc_cfg-display, r=notriddle
matthiaskrgr Dec 8, 2023
0c121b5
Rollup merge of #118690 - Zalathar:test-macros, r=cjgillot
matthiaskrgr Dec 8, 2023
646d627
Rollup merge of #118693 - saethlin:alignment-check-symbol-reachable, …
matthiaskrgr Dec 8, 2023
f7c892e
Rollup merge of #118695 - Zalathar:push-refined, r=davidtwco
matthiaskrgr Dec 8, 2023
beabb5e
Rollup merge of #118709 - oksbsb:fix-job-server, r=SparrowLii
matthiaskrgr Dec 8, 2023
9dd34d5
Rollup merge of #118722 - notriddle:notriddle/dom-opt-3, r=GuillaumeG…
matthiaskrgr Dec 8, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -795,10 +795,6 @@ dependencies = [
"rustc-demangle",
]

[[package]]
name = "coverage_test_macros"
version = "0.0.0"

[[package]]
name = "cpufeatures"
version = "0.2.8"
Expand Down Expand Up @@ -4266,7 +4262,6 @@ dependencies = [
name = "rustc_mir_transform"
version = "0.0.0"
dependencies = [
"coverage_test_macros",
"either",
"itertools",
"rustc_arena",
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_data_structures/src/jobserver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn default_client() -> Client {

static GLOBAL_CLIENT_CHECKED: OnceLock<Client> = OnceLock::new();

pub fn check(report_warning: impl FnOnce(&'static str)) {
pub fn initialize_checked(report_warning: impl FnOnce(&'static str)) {
let client_checked = match &*GLOBAL_CLIENT {
Ok(client) => client.clone(),
Err(e) => {
Expand Down
4 changes: 4 additions & 0 deletions compiler/rustc_interface/src/interface.rs
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,10 @@ pub fn run_compiler<R: Send>(config: Config, f: impl FnOnce(&Compiler) -> R + Se
// Set parallel mode before thread pool creation, which will create `Lock`s.
rustc_data_structures::sync::set_dyn_thread_safe_mode(config.opts.unstable_opts.threads > 1);

// Check jobserver before run_in_thread_pool_with_globals, which call jobserver::acquire_thread
let early_handler = EarlyErrorHandler::new(config.opts.error_format);
early_handler.initialize_checked_jobserver();

util::run_in_thread_pool_with_globals(
config.opts.edition,
config.opts.unstable_opts.threads,
Expand Down
2 changes: 2 additions & 0 deletions compiler/rustc_interface/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@ use std::sync::Arc;

fn mk_session(matches: getopts::Matches) -> (Session, Cfg) {
let mut early_handler = EarlyErrorHandler::new(ErrorOutputType::default());
early_handler.initialize_checked_jobserver();

let registry = registry::Registry::new(&[]);
let sessopts = build_session_options(&mut early_handler, &matches);
let temps_dir = sessopts.unstable_opts.temps_dir.as_deref().map(PathBuf::from);
Expand Down
5 changes: 0 additions & 5 deletions compiler/rustc_mir_transform/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,3 @@ rustc_trait_selection = { path = "../rustc_trait_selection" }
smallvec = { version = "1.8.1", features = ["union", "may_dangle"] }
tracing = "0.1"
# tidy-alphabetical-end

[dev-dependencies]
# tidy-alphabetical-start
coverage_test_macros = { path = "src/coverage/test_macros" }
# tidy-alphabetical-end
87 changes: 37 additions & 50 deletions compiler/rustc_mir_transform/src/coverage/spans.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,10 +89,10 @@ impl CoverageSpan {
}
}

pub fn merge_from(&mut self, mut other: CoverageSpan) {
debug_assert!(self.is_mergeable(&other));
pub fn merge_from(&mut self, other: &Self) {
debug_assert!(self.is_mergeable(other));
self.span = self.span.to(other.span);
self.merged_spans.append(&mut other.merged_spans);
self.merged_spans.extend_from_slice(&other.merged_spans);
}

pub fn cutoff_statements_at(&mut self, cutoff_pos: BytePos) {
Expand Down Expand Up @@ -267,15 +267,15 @@ impl<'a> CoverageSpansGenerator<'a> {
if curr.is_mergeable(prev) {
debug!(" same bcb (and neither is a closure), merge with prev={prev:?}");
let prev = self.take_prev();
self.curr_mut().merge_from(prev);
self.curr_mut().merge_from(&prev);
self.maybe_push_macro_name_span();
// Note that curr.span may now differ from curr_original_span
} else if prev.span.hi() <= curr.span.lo() {
debug!(
" different bcbs and disjoint spans, so keep curr for next iter, and add prev={prev:?}",
);
let prev = self.take_prev();
self.push_refined_span(prev);
self.refined_spans.push(prev);
self.maybe_push_macro_name_span();
} else if prev.is_closure {
// drop any equal or overlapping span (`curr`) and keep `prev` to test again in the
Expand Down Expand Up @@ -322,11 +322,10 @@ impl<'a> CoverageSpansGenerator<'a> {
let prev = self.take_prev();
debug!(" AT END, adding last prev={prev:?}");

// Take `pending_dups` so that we can drain it while calling self methods.
// It is never used as a field after this point.
for dup in std::mem::take(&mut self.pending_dups) {
// Drain any remaining dups into the output.
for dup in self.pending_dups.drain(..) {
debug!(" ...adding at least one pending dup={:?}", dup);
self.push_refined_span(dup);
self.refined_spans.push(dup);
}

// Async functions wrap a closure that implements the body to be executed. The enclosing
Expand All @@ -343,28 +342,27 @@ impl<'a> CoverageSpansGenerator<'a> {
};

if !body_ends_with_closure {
self.push_refined_span(prev);
self.refined_spans.push(prev);
}

// Do one last merge pass, to simplify the output.
self.refined_spans.dedup_by(|b, a| {
if a.is_mergeable(b) {
debug!(?a, ?b, "merging list-adjacent refined spans");
a.merge_from(b);
true
} else {
false
}
});

// Remove `CoverageSpan`s derived from closures, originally added to ensure the coverage
// regions for the current function leave room for the closure's own coverage regions
// (injected separately, from the closure's own MIR).
self.refined_spans.retain(|covspan| !covspan.is_closure);
self.refined_spans
}

fn push_refined_span(&mut self, covspan: CoverageSpan) {
if let Some(last) = self.refined_spans.last_mut()
&& last.is_mergeable(&covspan)
{
// Instead of pushing the new span, merge it with the last refined span.
debug!(?last, ?covspan, "merging new refined span with last refined span");
last.merge_from(covspan);
} else {
self.refined_spans.push(covspan);
}
}

/// If `curr` is part of a new macro expansion, carve out and push a separate
/// span that ends just after the macro name and its subsequent `!`.
fn maybe_push_macro_name_span(&mut self) {
Expand Down Expand Up @@ -397,7 +395,7 @@ impl<'a> CoverageSpansGenerator<'a> {
" and curr starts a new macro expansion, so add a new span just for \
the macro `{visible_macro}!`, new span={macro_name_cov:?}",
);
self.push_refined_span(macro_name_cov);
self.refined_spans.push(macro_name_cov);
}

fn curr(&self) -> &CoverageSpan {
Expand Down Expand Up @@ -454,19 +452,14 @@ impl<'a> CoverageSpansGenerator<'a> {
previous iteration, or prev started a new disjoint span"
);
if last_dup.span.hi() <= self.curr().span.lo() {
// Temporarily steal `pending_dups` into a local, so that we can
// drain it while calling other self methods.
let mut pending_dups = std::mem::take(&mut self.pending_dups);
for dup in pending_dups.drain(..) {
for dup in self.pending_dups.drain(..) {
debug!(" ...adding at least one pending={:?}", dup);
self.push_refined_span(dup);
self.refined_spans.push(dup);
}
// The list of dups is now empty, but we can recycle its capacity.
assert!(pending_dups.is_empty() && self.pending_dups.is_empty());
self.pending_dups = pending_dups;
} else {
self.pending_dups.clear();
}
assert!(self.pending_dups.is_empty());
}

/// Advance `prev` to `curr` (if any), and `curr` to the next `CoverageSpan` in sorted order.
Expand Down Expand Up @@ -513,22 +506,18 @@ impl<'a> CoverageSpansGenerator<'a> {
let has_pre_closure_span = prev.span.lo() < right_cutoff;
let has_post_closure_span = prev.span.hi() > right_cutoff;

// Temporarily steal `pending_dups` into a local, so that we can
// mutate and/or drain it while calling other self methods.
let mut pending_dups = std::mem::take(&mut self.pending_dups);

if has_pre_closure_span {
let mut pre_closure = self.prev().clone();
pre_closure.span = pre_closure.span.with_hi(left_cutoff);
debug!(" prev overlaps a closure. Adding span for pre_closure={:?}", pre_closure);
if !pending_dups.is_empty() {
for mut dup in pending_dups.iter().cloned() {
dup.span = dup.span.with_hi(left_cutoff);
debug!(" ...and at least one pre_closure dup={:?}", dup);
self.push_refined_span(dup);
}

for mut dup in self.pending_dups.iter().cloned() {
dup.span = dup.span.with_hi(left_cutoff);
debug!(" ...and at least one pre_closure dup={:?}", dup);
self.refined_spans.push(dup);
}
self.push_refined_span(pre_closure);

self.refined_spans.push(pre_closure);
}

if has_post_closure_span {
Expand All @@ -537,19 +526,17 @@ impl<'a> CoverageSpansGenerator<'a> {
// about how the `CoverageSpan`s are ordered.)
self.prev_mut().span = self.prev().span.with_lo(right_cutoff);
debug!(" Mutated prev.span to start after the closure. prev={:?}", self.prev());
for dup in pending_dups.iter_mut() {

for dup in &mut self.pending_dups {
debug!(" ...and at least one overlapping dup={:?}", dup);
dup.span = dup.span.with_lo(right_cutoff);
}

let closure_covspan = self.take_curr(); // Prevent this curr from becoming prev.
self.push_refined_span(closure_covspan); // since self.prev() was already updated
self.refined_spans.push(closure_covspan); // since self.prev() was already updated
} else {
pending_dups.clear();
self.pending_dups.clear();
}

// Restore the modified post-closure spans, or the empty vector's capacity.
assert!(self.pending_dups.is_empty());
self.pending_dups = pending_dups;
}

/// Called if `curr.span` equals `prev_original_span` (and potentially equal to all
Expand Down Expand Up @@ -645,7 +632,7 @@ impl<'a> CoverageSpansGenerator<'a> {
} else {
debug!(" ... adding modified prev={:?}", self.prev());
let prev = self.take_prev();
self.push_refined_span(prev);
self.refined_spans.push(prev);
}
} else {
// with `pending_dups`, `prev` cannot have any statements that don't overlap
Expand Down

This file was deleted.

This file was deleted.

Loading