diff --git a/library/alloc/src/vec/mod.rs b/library/alloc/src/vec/mod.rs index b59d2977add95..f3a47cba759e6 100644 --- a/library/alloc/src/vec/mod.rs +++ b/library/alloc/src/vec/mod.rs @@ -2568,6 +2568,8 @@ impl Vec { } unsafe { ptr::write(self.as_mut_ptr().add(len), element); + // Since next() executes user code which can panic we have to bump the length + // after each step. // NB can't overflow since we would have had to alloc the address space self.set_len(len + 1); } diff --git a/library/alloc/src/vec/source_iter_marker.rs b/library/alloc/src/vec/source_iter_marker.rs index e857d284d3ab6..d814d4ae355b0 100644 --- a/library/alloc/src/vec/source_iter_marker.rs +++ b/library/alloc/src/vec/source_iter_marker.rs @@ -89,6 +89,8 @@ fn write_in_place_with_drop( // all we can do is check if it's still in range debug_assert!(sink.dst as *const _ <= src_end, "InPlaceIterable contract violation"); ptr::write(sink.dst, item); + // Since this executes user code which can panic we have to bump the pointer + // after each step. sink.dst = sink.dst.add(1); } Ok(sink) @@ -136,6 +138,8 @@ where let dst = dst_buf.offset(i as isize); debug_assert!(dst as *const _ <= end, "InPlaceIterable contract violation"); ptr::write(dst, self.__iterator_get_unchecked(i)); + // Since this executes user code which can panic we have to bump the pointer + // after each step. drop_guard.dst = dst.add(1); } } diff --git a/library/alloc/src/vec/spec_extend.rs b/library/alloc/src/vec/spec_extend.rs index c6f4f22a01f6e..c3b4534096de5 100644 --- a/library/alloc/src/vec/spec_extend.rs +++ b/library/alloc/src/vec/spec_extend.rs @@ -40,6 +40,8 @@ where iterator.for_each(move |element| { ptr::write(ptr, element); ptr = ptr.offset(1); + // Since the loop executes user code which can panic we have to bump the pointer + // after each step. // NB can't overflow since we would have had to alloc the address space local_len.increment_len(1); }); diff --git a/library/core/src/iter/adapters/zip.rs b/library/core/src/iter/adapters/zip.rs index 46bf93dcc01e2..3d0401cbebcf7 100644 --- a/library/core/src/iter/adapters/zip.rs +++ b/library/core/src/iter/adapters/zip.rs @@ -227,6 +227,8 @@ where fn next(&mut self) -> Option<(A::Item, B::Item)> { if self.index < self.len { let i = self.index; + // since get_unchecked executes code which can panic we increment the counters beforehand + // so that the same index won't be accessed twice, as required by TrustedRandomAccess self.index += 1; // SAFETY: `i` is smaller than `self.len`, thus smaller than `self.a.len()` and `self.b.len()` unsafe { @@ -234,6 +236,7 @@ where } } else if A::MAY_HAVE_SIDE_EFFECT && self.index < self.a_len { let i = self.index; + // as above, increment before executing code that may panic self.index += 1; self.len += 1; // match the base implementation's potential side effects @@ -259,6 +262,8 @@ where let end = self.index + delta; while self.index < end { let i = self.index; + // since get_unchecked executes code which can panic we increment the counters beforehand + // so that the same index won't be accessed twice, as required by TrustedRandomAccess self.index += 1; if A::MAY_HAVE_SIDE_EFFECT { // SAFETY: the usage of `cmp::min` to calculate `delta` @@ -295,6 +300,8 @@ where let sz_a = self.a.size(); if A::MAY_HAVE_SIDE_EFFECT && sz_a > self.len { for _ in 0..sz_a - self.len { + // since next_back() may panic we increment the counters beforehand + // to keep Zip's state in sync with the underlying iterator source self.a_len -= 1; self.a.next_back(); } @@ -309,6 +316,8 @@ where } } if self.index < self.len { + // since get_unchecked executes code which can panic we increment the counters beforehand + // so that the same index won't be accessed twice, as required by TrustedRandomAccess self.len -= 1; self.a_len -= 1; let i = self.len;