summaryrefslogtreecommitdiff
path: root/library/alloc
diff options
context:
space:
mode:
authorbors <bors@rust-lang.org>2020-07-27 17:39:01 +0000
committerbors <bors@rust-lang.org>2020-07-27 17:39:01 +0000
commit54e000891ffccd4cbfb92146b92736c83085df63 (patch)
tree1200bb13eb9ae22def4c43bc657bc56da8faedc6 /library/alloc
parent4a90e36c85336d1d4b209556c1a9733210bbff19 (diff)
parent6d9705220fec4553d693a7c19d99496e14c89edf (diff)
downloadrust-tmp-nightly.tar.gz
Auto merge of #73265 - mark-i-m:mv-std, r=<try>tmp-nightly
mv std libs to library/ This is the first step in refactoring the directory layout of this repository, with further followup steps planned (but not done yet). Background: currently, all crates are under src/, without nested src directories and with the unconventional `lib*` prefixes (e.g., `src/libcore/lib.rs`). This directory structures is not idiomatic and makes the `src/` directory rather overwhelming. To improve contributor experience and make things a bit more approachable, we are reorganizing the repo a bit. In this PR, we move the standard libs (basically anything that is "runtime", as opposed to part of the compiler, build system, or one of the tools, etc). The new layout moves these libraries to a new `library/` directory in the root of the repo. Additionally, we remove the `lib*` prefixes and add nested `src/` directories. The other crates/tools in this repo are not touched. So in summary: ``` library/<crate>/src/*.rs src/<all the rest> // unchanged ``` where `<crate>` is: - core - alloc - std - test - proc_macro - panic_abort - panic_unwind - profiler_builtins - term - unwind - rtstartup - backtrace - rustc-std-workspace-* There was a lot of discussion about this and a few rounds of compiler team approvals, FCPs, MCPs, and nominations. The original MCP is https://github.com/rust-lang/compiler-team/issues/298. The final approval of the compiler team was given here: https://github.com/rust-lang/rust/pull/73265#issuecomment-659498446. The name `library` was chosen to complement a later move of the compiler crates to a `compiler/` directory. There was a lot of discussion around adding the nested `src/` directories. Note that this does increase the nesting depth (plausibly important for manual traversal of the tree, e.g., through GitHub's UI or `cd`), but this is deemed to be better as it fits the standard layout of Rust crates throughout most of the ecosystem, though there is some debate about how much this should apply to multi-crate projects. Overall, there seem to be more people in favor of nested `src/` than against. After this PR, there are no dependencies out of the `library/` directory except on the `build_helper` (or crates.io crates).
Diffstat (limited to 'library/alloc')
-rw-r--r--library/alloc/Cargo.toml33
-rw-r--r--library/alloc/benches/btree/map.rs284
-rw-r--r--library/alloc/benches/btree/mod.rs2
-rw-r--r--library/alloc/benches/btree/set.rs216
-rw-r--r--library/alloc/benches/lib.rs17
-rw-r--r--library/alloc/benches/linked_list.rs77
-rw-r--r--library/alloc/benches/slice.rs382
-rw-r--r--library/alloc/benches/str.rs299
-rw-r--r--library/alloc/benches/string.rs164
-rw-r--r--library/alloc/benches/vec.rs482
-rw-r--r--library/alloc/benches/vec_deque.rs54
-rw-r--r--library/alloc/benches/vec_deque_append.rs34
-rw-r--r--library/alloc/src/alloc.rs319
-rw-r--r--library/alloc/src/alloc/tests.rs31
-rw-r--r--library/alloc/src/borrow.rs476
-rw-r--r--library/alloc/src/boxed.rs1200
-rw-r--r--library/alloc/src/collections/binary_heap.rs1431
-rw-r--r--library/alloc/src/collections/btree/map.rs2860
-rw-r--r--library/alloc/src/collections/btree/mod.rs27
-rw-r--r--library/alloc/src/collections/btree/navigate.rs261
-rw-r--r--library/alloc/src/collections/btree/node.rs1488
-rw-r--r--library/alloc/src/collections/btree/search.rs83
-rw-r--r--library/alloc/src/collections/btree/set.rs1574
-rw-r--r--library/alloc/src/collections/linked_list.rs1904
-rw-r--r--library/alloc/src/collections/linked_list/tests.rs457
-rw-r--r--library/alloc/src/collections/mod.rs103
-rw-r--r--library/alloc/src/collections/vec_deque.rs3117
-rw-r--r--library/alloc/src/collections/vec_deque/drain.rs126
-rw-r--r--library/alloc/src/collections/vec_deque/tests.rs567
-rw-r--r--library/alloc/src/fmt.rs588
-rw-r--r--library/alloc/src/lib.rs186
-rw-r--r--library/alloc/src/macros.rs110
-rw-r--r--library/alloc/src/prelude/mod.rs15
-rw-r--r--library/alloc/src/prelude/v1.rs14
-rw-r--r--library/alloc/src/raw_vec.rs536
-rw-r--r--library/alloc/src/raw_vec/tests.rs78
-rw-r--r--library/alloc/src/rc.rs2138
-rw-r--r--library/alloc/src/rc/tests.rs436
-rw-r--r--library/alloc/src/slice.rs1069
-rw-r--r--library/alloc/src/str.rs576
-rw-r--r--library/alloc/src/string.rs2504
-rw-r--r--library/alloc/src/sync.rs2294
-rw-r--r--library/alloc/src/sync/tests.rs494
-rw-r--r--library/alloc/src/task.rs91
-rw-r--r--library/alloc/src/tests.rs151
-rw-r--r--library/alloc/src/vec.rs3122
-rw-r--r--library/alloc/tests/arc.rs197
-rw-r--r--library/alloc/tests/binary_heap.rs464
-rw-r--r--library/alloc/tests/borrow.rs47
-rw-r--r--library/alloc/tests/boxed.rs51
-rw-r--r--library/alloc/tests/btree/map.rs1463
-rw-r--r--library/alloc/tests/btree/mod.rs27
-rw-r--r--library/alloc/tests/btree/set.rs666
-rw-r--r--library/alloc/tests/cow_str.rs144
-rw-r--r--library/alloc/tests/fmt.rs7
-rw-r--r--library/alloc/tests/heap.rs47
-rw-r--r--library/alloc/tests/lib.rs57
-rw-r--r--library/alloc/tests/linked_list.rs705
-rw-r--r--library/alloc/tests/rc.rs193
-rw-r--r--library/alloc/tests/slice.rs1771
-rw-r--r--library/alloc/tests/str.rs1899
-rw-r--r--library/alloc/tests/string.rs723
-rw-r--r--library/alloc/tests/vec.rs1629
-rw-r--r--library/alloc/tests/vec_deque.rs1646
64 files changed, 44206 insertions, 0 deletions
diff --git a/library/alloc/Cargo.toml b/library/alloc/Cargo.toml
new file mode 100644
index 00000000000..381750a5198
--- /dev/null
+++ b/library/alloc/Cargo.toml
@@ -0,0 +1,33 @@
+[package]
+authors = ["The Rust Project Developers"]
+name = "alloc"
+version = "0.0.0"
+autotests = false
+autobenches = false
+edition = "2018"
+
+[dependencies]
+core = { path = "../core" }
+compiler_builtins = { version = "0.1.10", features = ['rustc-dep-of-std'] }
+
+[dev-dependencies]
+rand = "0.7"
+rand_xorshift = "0.2"
+
+[[test]]
+name = "collectionstests"
+path = "tests/lib.rs"
+
+[[bench]]
+name = "collectionsbenches"
+path = "benches/lib.rs"
+test = true
+
+[[bench]]
+name = "vec_deque_append_bench"
+path = "benches/vec_deque_append.rs"
+harness = false
+
+[features]
+compiler-builtins-mem = ['compiler_builtins/mem']
+compiler-builtins-c = ["compiler_builtins/c"]
diff --git a/library/alloc/benches/btree/map.rs b/library/alloc/benches/btree/map.rs
new file mode 100644
index 00000000000..38d19c59ad1
--- /dev/null
+++ b/library/alloc/benches/btree/map.rs
@@ -0,0 +1,284 @@
+use std::collections::BTreeMap;
+use std::iter::Iterator;
+use std::ops::RangeBounds;
+use std::vec::Vec;
+
+use rand::{seq::SliceRandom, thread_rng, Rng};
+use test::{black_box, Bencher};
+
+macro_rules! map_insert_rand_bench {
+ ($name: ident, $n: expr, $map: ident) => {
+ #[bench]
+ pub fn $name(b: &mut Bencher) {
+ let n: usize = $n;
+ let mut map = $map::new();
+ // setup
+ let mut rng = thread_rng();
+
+ for _ in 0..n {
+ let i = rng.gen::<usize>() % n;
+ map.insert(i, i);
+ }
+
+ // measure
+ b.iter(|| {
+ let k = rng.gen::<usize>() % n;
+ map.insert(k, k);
+ map.remove(&k);
+ });
+ black_box(map);
+ }
+ };
+}
+
+macro_rules! map_insert_seq_bench {
+ ($name: ident, $n: expr, $map: ident) => {
+ #[bench]
+ pub fn $name(b: &mut Bencher) {
+ let mut map = $map::new();
+ let n: usize = $n;
+ // setup
+ for i in 0..n {
+ map.insert(i * 2, i * 2);
+ }
+
+ // measure
+ let mut i = 1;
+ b.iter(|| {
+ map.insert(i, i);
+ map.remove(&i);
+ i = (i + 2) % n;
+ });
+ black_box(map);
+ }
+ };
+}
+
+macro_rules! map_find_rand_bench {
+ ($name: ident, $n: expr, $map: ident) => {
+ #[bench]
+ pub fn $name(b: &mut Bencher) {
+ let mut map = $map::new();
+ let n: usize = $n;
+
+ // setup
+ let mut rng = thread_rng();
+ let mut keys: Vec<_> = (0..n).map(|_| rng.gen::<usize>() % n).collect();
+
+ for &k in &keys {
+ map.insert(k, k);
+ }
+
+ keys.shuffle(&mut rng);
+
+ // measure
+ let mut i = 0;
+ b.iter(|| {
+ let t = map.get(&keys[i]);
+ i = (i + 1) % n;
+ black_box(t);
+ })
+ }
+ };
+}
+
+macro_rules! map_find_seq_bench {
+ ($name: ident, $n: expr, $map: ident) => {
+ #[bench]
+ pub fn $name(b: &mut Bencher) {
+ let mut map = $map::new();
+ let n: usize = $n;
+
+ // setup
+ for i in 0..n {
+ map.insert(i, i);
+ }
+
+ // measure
+ let mut i = 0;
+ b.iter(|| {
+ let x = map.get(&i);
+ i = (i + 1) % n;
+ black_box(x);
+ })
+ }
+ };
+}
+
+map_insert_rand_bench! {insert_rand_100, 100, BTreeMap}
+map_insert_rand_bench! {insert_rand_10_000, 10_000, BTreeMap}
+
+map_insert_seq_bench! {insert_seq_100, 100, BTreeMap}
+map_insert_seq_bench! {insert_seq_10_000, 10_000, BTreeMap}
+
+map_find_rand_bench! {find_rand_100, 100, BTreeMap}
+map_find_rand_bench! {find_rand_10_000, 10_000, BTreeMap}
+
+map_find_seq_bench! {find_seq_100, 100, BTreeMap}
+map_find_seq_bench! {find_seq_10_000, 10_000, BTreeMap}
+
+fn bench_iteration(b: &mut Bencher, size: i32) {
+ let mut map = BTreeMap::<i32, i32>::new();
+ let mut rng = thread_rng();
+
+ for _ in 0..size {
+ map.insert(rng.gen(), rng.gen());
+ }
+
+ b.iter(|| {
+ for entry in &map {
+ black_box(entry);
+ }
+ });
+}
+
+#[bench]
+pub fn iteration_20(b: &mut Bencher) {
+ bench_iteration(b, 20);
+}
+
+#[bench]
+pub fn iteration_1000(b: &mut Bencher) {
+ bench_iteration(b, 1000);
+}
+
+#[bench]
+pub fn iteration_100000(b: &mut Bencher) {
+ bench_iteration(b, 100000);
+}
+
+fn bench_iteration_mut(b: &mut Bencher, size: i32) {
+ let mut map = BTreeMap::<i32, i32>::new();
+ let mut rng = thread_rng();
+
+ for _ in 0..size {
+ map.insert(rng.gen(), rng.gen());
+ }
+
+ b.iter(|| {
+ for kv in map.iter_mut() {
+ black_box(kv);
+ }
+ });
+}
+
+#[bench]
+pub fn iteration_mut_20(b: &mut Bencher) {
+ bench_iteration_mut(b, 20);
+}
+
+#[bench]
+pub fn iteration_mut_1000(b: &mut Bencher) {
+ bench_iteration_mut(b, 1000);
+}
+
+#[bench]
+pub fn iteration_mut_100000(b: &mut Bencher) {
+ bench_iteration_mut(b, 100000);
+}
+
+fn bench_first_and_last(b: &mut Bencher, size: i32) {
+ let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+ b.iter(|| {
+ for _ in 0..10 {
+ black_box(map.first_key_value());
+ black_box(map.last_key_value());
+ }
+ });
+}
+
+#[bench]
+pub fn first_and_last_0(b: &mut Bencher) {
+ bench_first_and_last(b, 0);
+}
+
+#[bench]
+pub fn first_and_last_100(b: &mut Bencher) {
+ bench_first_and_last(b, 100);
+}
+
+#[bench]
+pub fn first_and_last_10k(b: &mut Bencher) {
+ bench_first_and_last(b, 10_000);
+}
+
+const BENCH_RANGE_SIZE: i32 = 145;
+const BENCH_RANGE_COUNT: i32 = BENCH_RANGE_SIZE * (BENCH_RANGE_SIZE - 1) / 2;
+
+fn bench_range<F, R>(b: &mut Bencher, f: F)
+where
+ F: Fn(i32, i32) -> R,
+ R: RangeBounds<i32>,
+{
+ let map: BTreeMap<_, _> = (0..BENCH_RANGE_SIZE).map(|i| (i, i)).collect();
+ b.iter(|| {
+ let mut c = 0;
+ for i in 0..BENCH_RANGE_SIZE {
+ for j in i + 1..BENCH_RANGE_SIZE {
+ black_box(map.range(f(i, j)));
+ c += 1;
+ }
+ }
+ debug_assert_eq!(c, BENCH_RANGE_COUNT);
+ });
+}
+
+#[bench]
+pub fn range_included_excluded(b: &mut Bencher) {
+ bench_range(b, |i, j| i..j);
+}
+
+#[bench]
+pub fn range_included_included(b: &mut Bencher) {
+ bench_range(b, |i, j| i..=j);
+}
+
+#[bench]
+pub fn range_included_unbounded(b: &mut Bencher) {
+ bench_range(b, |i, _| i..);
+}
+
+#[bench]
+pub fn range_unbounded_unbounded(b: &mut Bencher) {
+ bench_range(b, |_, _| ..);
+}
+
+fn bench_iter(b: &mut Bencher, repeats: i32, size: i32) {
+ let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+ b.iter(|| {
+ for _ in 0..repeats {
+ black_box(map.iter());
+ }
+ });
+}
+
+/// Contrast range_unbounded_unbounded with `iter()`.
+#[bench]
+pub fn range_unbounded_vs_iter(b: &mut Bencher) {
+ bench_iter(b, BENCH_RANGE_COUNT, BENCH_RANGE_SIZE);
+}
+
+#[bench]
+pub fn iter_0(b: &mut Bencher) {
+ bench_iter(b, 1_000, 0);
+}
+
+#[bench]
+pub fn iter_1(b: &mut Bencher) {
+ bench_iter(b, 1_000, 1);
+}
+
+#[bench]
+pub fn iter_100(b: &mut Bencher) {
+ bench_iter(b, 1_000, 100);
+}
+
+#[bench]
+pub fn iter_10k(b: &mut Bencher) {
+ bench_iter(b, 1_000, 10_000);
+}
+
+#[bench]
+pub fn iter_1m(b: &mut Bencher) {
+ bench_iter(b, 1_000, 1_000_000);
+}
diff --git a/library/alloc/benches/btree/mod.rs b/library/alloc/benches/btree/mod.rs
new file mode 100644
index 00000000000..095ca5dd2e2
--- /dev/null
+++ b/library/alloc/benches/btree/mod.rs
@@ -0,0 +1,2 @@
+mod map;
+mod set;
diff --git a/library/alloc/benches/btree/set.rs b/library/alloc/benches/btree/set.rs
new file mode 100644
index 00000000000..2518506b9b5
--- /dev/null
+++ b/library/alloc/benches/btree/set.rs
@@ -0,0 +1,216 @@
+use std::collections::BTreeSet;
+
+use rand::{thread_rng, Rng};
+use test::Bencher;
+
+fn random(n: usize) -> BTreeSet<usize> {
+ let mut rng = thread_rng();
+ let mut set = BTreeSet::new();
+ while set.len() < n {
+ set.insert(rng.gen());
+ }
+ assert_eq!(set.len(), n);
+ set
+}
+
+fn neg(n: usize) -> BTreeSet<i32> {
+ let set: BTreeSet<i32> = (-(n as i32)..=-1).collect();
+ assert_eq!(set.len(), n);
+ set
+}
+
+fn pos(n: usize) -> BTreeSet<i32> {
+ let set: BTreeSet<i32> = (1..=(n as i32)).collect();
+ assert_eq!(set.len(), n);
+ set
+}
+
+fn stagger(n1: usize, factor: usize) -> [BTreeSet<u32>; 2] {
+ let n2 = n1 * factor;
+ let mut sets = [BTreeSet::new(), BTreeSet::new()];
+ for i in 0..(n1 + n2) {
+ let b = i % (factor + 1) != 0;
+ sets[b as usize].insert(i as u32);
+ }
+ assert_eq!(sets[0].len(), n1);
+ assert_eq!(sets[1].len(), n2);
+ sets
+}
+
+macro_rules! set_bench {
+ ($name: ident, $set_func: ident, $result_func: ident, $sets: expr) => {
+ #[bench]
+ pub fn $name(b: &mut Bencher) {
+ // setup
+ let sets = $sets;
+
+ // measure
+ b.iter(|| sets[0].$set_func(&sets[1]).$result_func())
+ }
+ };
+}
+
+#[bench]
+pub fn clone_100(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| src.clone())
+}
+
+#[bench]
+pub fn clone_100_and_clear(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| src.clone().clear())
+}
+
+#[bench]
+pub fn clone_100_and_drain_all(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| src.clone().drain_filter(|_| true).count())
+}
+
+#[bench]
+pub fn clone_100_and_drain_half(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| {
+ let mut set = src.clone();
+ assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 100 / 2);
+ assert_eq!(set.len(), 100 / 2);
+ })
+}
+
+#[bench]
+pub fn clone_100_and_into_iter(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| src.clone().into_iter().count())
+}
+
+#[bench]
+pub fn clone_100_and_pop_all(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| {
+ let mut set = src.clone();
+ while set.pop_first().is_some() {}
+ set
+ });
+}
+
+#[bench]
+pub fn clone_100_and_remove_all(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| {
+ let mut set = src.clone();
+ while let Some(elt) = set.iter().copied().next() {
+ set.remove(&elt);
+ }
+ set
+ });
+}
+
+#[bench]
+pub fn clone_100_and_remove_half(b: &mut Bencher) {
+ let src = pos(100);
+ b.iter(|| {
+ let mut set = src.clone();
+ for i in (2..=100 as i32).step_by(2) {
+ set.remove(&i);
+ }
+ assert_eq!(set.len(), 100 / 2);
+ set
+ })
+}
+
+#[bench]
+pub fn clone_10k(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| src.clone())
+}
+
+#[bench]
+pub fn clone_10k_and_clear(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| src.clone().clear())
+}
+
+#[bench]
+pub fn clone_10k_and_drain_all(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| src.clone().drain_filter(|_| true).count())
+}
+
+#[bench]
+pub fn clone_10k_and_drain_half(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| {
+ let mut set = src.clone();
+ assert_eq!(set.drain_filter(|i| i % 2 == 0).count(), 10_000 / 2);
+ assert_eq!(set.len(), 10_000 / 2);
+ })
+}
+
+#[bench]
+pub fn clone_10k_and_into_iter(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| src.clone().into_iter().count())
+}
+
+#[bench]
+pub fn clone_10k_and_pop_all(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| {
+ let mut set = src.clone();
+ while set.pop_first().is_some() {}
+ set
+ });
+}
+
+#[bench]
+pub fn clone_10k_and_remove_all(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| {
+ let mut set = src.clone();
+ while let Some(elt) = set.iter().copied().next() {
+ set.remove(&elt);
+ }
+ set
+ });
+}
+
+#[bench]
+pub fn clone_10k_and_remove_half(b: &mut Bencher) {
+ let src = pos(10_000);
+ b.iter(|| {
+ let mut set = src.clone();
+ for i in (2..=10_000 as i32).step_by(2) {
+ set.remove(&i);
+ }
+ assert_eq!(set.len(), 10_000 / 2);
+ set
+ })
+}
+
+set_bench! {intersection_100_neg_vs_100_pos, intersection, count, [neg(100), pos(100)]}
+set_bench! {intersection_100_neg_vs_10k_pos, intersection, count, [neg(100), pos(10_000)]}
+set_bench! {intersection_100_pos_vs_100_neg, intersection, count, [pos(100), neg(100)]}
+set_bench! {intersection_100_pos_vs_10k_neg, intersection, count, [pos(100), neg(10_000)]}
+set_bench! {intersection_10k_neg_vs_100_pos, intersection, count, [neg(10_000), pos(100)]}
+set_bench! {intersection_10k_neg_vs_10k_pos, intersection, count, [neg(10_000), pos(10_000)]}
+set_bench! {intersection_10k_pos_vs_100_neg, intersection, count, [pos(10_000), neg(100)]}
+set_bench! {intersection_10k_pos_vs_10k_neg, intersection, count, [pos(10_000), neg(10_000)]}
+set_bench! {intersection_random_100_vs_100, intersection, count, [random(100), random(100)]}
+set_bench! {intersection_random_100_vs_10k, intersection, count, [random(100), random(10_000)]}
+set_bench! {intersection_random_10k_vs_100, intersection, count, [random(10_000), random(100)]}
+set_bench! {intersection_random_10k_vs_10k, intersection, count, [random(10_000), random(10_000)]}
+set_bench! {intersection_staggered_100_vs_100, intersection, count, stagger(100, 1)}
+set_bench! {intersection_staggered_10k_vs_10k, intersection, count, stagger(10_000, 1)}
+set_bench! {intersection_staggered_100_vs_10k, intersection, count, stagger(100, 100)}
+set_bench! {difference_random_100_vs_100, difference, count, [random(100), random(100)]}
+set_bench! {difference_random_100_vs_10k, difference, count, [random(100), random(10_000)]}
+set_bench! {difference_random_10k_vs_100, difference, count, [random(10_000), random(100)]}
+set_bench! {difference_random_10k_vs_10k, difference, count, [random(10_000), random(10_000)]}
+set_bench! {difference_staggered_100_vs_100, difference, count, stagger(100, 1)}
+set_bench! {difference_staggered_10k_vs_10k, difference, count, stagger(10_000, 1)}
+set_bench! {difference_staggered_100_vs_10k, difference, count, stagger(100, 100)}
+set_bench! {is_subset_100_vs_100, is_subset, clone, [pos(100), pos(100)]}
+set_bench! {is_subset_100_vs_10k, is_subset, clone, [pos(100), pos(10_000)]}
+set_bench! {is_subset_10k_vs_100, is_subset, clone, [pos(10_000), pos(100)]}
+set_bench! {is_subset_10k_vs_10k, is_subset, clone, [pos(10_000), pos(10_000)]}
diff --git a/library/alloc/benches/lib.rs b/library/alloc/benches/lib.rs
new file mode 100644
index 00000000000..608eafc88d2
--- /dev/null
+++ b/library/alloc/benches/lib.rs
@@ -0,0 +1,17 @@
+// Disabling on android for the time being
+// See https://github.com/rust-lang/rust/issues/73535#event-3477699747
+#![cfg(not(target_os = "android"))]
+#![feature(btree_drain_filter)]
+#![feature(map_first_last)]
+#![feature(repr_simd)]
+#![feature(test)]
+
+extern crate test;
+
+mod btree;
+mod linked_list;
+mod slice;
+mod str;
+mod string;
+mod vec;
+mod vec_deque;
diff --git a/library/alloc/benches/linked_list.rs b/library/alloc/benches/linked_list.rs
new file mode 100644
index 00000000000..29c5ad2bc6e
--- /dev/null
+++ b/library/alloc/benches/linked_list.rs
@@ -0,0 +1,77 @@
+use std::collections::LinkedList;
+use test::Bencher;
+
+#[bench]
+fn bench_collect_into(b: &mut Bencher) {
+ let v = &[0; 64];
+ b.iter(|| {
+ let _: LinkedList<_> = v.iter().cloned().collect();
+ })
+}
+
+#[bench]
+fn bench_push_front(b: &mut Bencher) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ b.iter(|| {
+ m.push_front(0);
+ })
+}
+
+#[bench]
+fn bench_push_back(b: &mut Bencher) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ b.iter(|| {
+ m.push_back(0);
+ })
+}
+
+#[bench]
+fn bench_push_back_pop_back(b: &mut Bencher) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ b.iter(|| {
+ m.push_back(0);
+ m.pop_back();
+ })
+}
+
+#[bench]
+fn bench_push_front_pop_front(b: &mut Bencher) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ b.iter(|| {
+ m.push_front(0);
+ m.pop_front();
+ })
+}
+
+#[bench]
+fn bench_iter(b: &mut Bencher) {
+ let v = &[0; 128];
+ let m: LinkedList<_> = v.iter().cloned().collect();
+ b.iter(|| {
+ assert!(m.iter().count() == 128);
+ })
+}
+#[bench]
+fn bench_iter_mut(b: &mut Bencher) {
+ let v = &[0; 128];
+ let mut m: LinkedList<_> = v.iter().cloned().collect();
+ b.iter(|| {
+ assert!(m.iter_mut().count() == 128);
+ })
+}
+#[bench]
+fn bench_iter_rev(b: &mut Bencher) {
+ let v = &[0; 128];
+ let m: LinkedList<_> = v.iter().cloned().collect();
+ b.iter(|| {
+ assert!(m.iter().rev().count() == 128);
+ })
+}
+#[bench]
+fn bench_iter_mut_rev(b: &mut Bencher) {
+ let v = &[0; 128];
+ let mut m: LinkedList<_> = v.iter().cloned().collect();
+ b.iter(|| {
+ assert!(m.iter_mut().rev().count() == 128);
+ })
+}
diff --git a/library/alloc/benches/slice.rs b/library/alloc/benches/slice.rs
new file mode 100644
index 00000000000..e20c043286e
--- /dev/null
+++ b/library/alloc/benches/slice.rs
@@ -0,0 +1,382 @@
+use std::{mem, ptr};
+
+use rand::distributions::{Alphanumeric, Standard};
+use rand::{thread_rng, Rng, SeedableRng};
+use rand_xorshift::XorShiftRng;
+use test::{black_box, Bencher};
+
+#[bench]
+fn iterator(b: &mut Bencher) {
+ // peculiar numbers to stop LLVM from optimising the summation
+ // out.
+ let v: Vec<_> = (0..100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect();
+
+ b.iter(|| {
+ let mut sum = 0;
+ for x in &v {
+ sum += *x;
+ }
+ // sum == 11806, to stop dead code elimination.
+ if sum == 0 {
+ panic!()
+ }
+ })
+}
+
+#[bench]
+fn mut_iterator(b: &mut Bencher) {
+ let mut v = vec![0; 100];
+
+ b.iter(|| {
+ let mut i = 0;
+ for x in &mut v {
+ *x = i;
+ i += 1;
+ }
+ })
+}
+
+#[bench]
+fn concat(b: &mut Bencher) {
+ let xss: Vec<Vec<i32>> = (0..100).map(|i| (0..i).collect()).collect();
+ b.iter(|| {
+ xss.concat();
+ });
+}
+
+#[bench]
+fn join(b: &mut Bencher) {
+ let xss: Vec<Vec<i32>> = (0..100).map(|i| (0..i).collect()).collect();
+ b.iter(|| xss.join(&0));
+}
+
+#[bench]
+fn push(b: &mut Bencher) {
+ let mut vec = Vec::<i32>::new();
+ b.iter(|| {
+ vec.push(0);
+ black_box(&vec);
+ });
+}
+
+#[bench]
+fn starts_with_same_vector(b: &mut Bencher) {
+ let vec: Vec<_> = (0..100).collect();
+ b.iter(|| vec.starts_with(&vec))
+}
+
+#[bench]
+fn starts_with_single_element(b: &mut Bencher) {
+ let vec: Vec<_> = vec![0];
+ b.iter(|| vec.starts_with(&vec))
+}
+
+#[bench]
+fn starts_with_diff_one_element_at_end(b: &mut Bencher) {
+ let vec: Vec<_> = (0..100).collect();
+ let mut match_vec: Vec<_> = (0..99).collect();
+ match_vec.push(0);
+ b.iter(|| vec.starts_with(&match_vec))
+}
+
+#[bench]
+fn ends_with_same_vector(b: &mut Bencher) {
+ let vec: Vec<_> = (0..100).collect();
+ b.iter(|| vec.ends_with(&vec))
+}
+
+#[bench]
+fn ends_with_single_element(b: &mut Bencher) {
+ let vec: Vec<_> = vec![0];
+ b.iter(|| vec.ends_with(&vec))
+}
+
+#[bench]
+fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) {
+ let vec: Vec<_> = (0..100).collect();
+ let mut match_vec: Vec<_> = (0..100).collect();
+ match_vec[0] = 200;
+ b.iter(|| vec.starts_with(&match_vec))
+}
+
+#[bench]
+fn contains_last_element(b: &mut Bencher) {
+ let vec: Vec<_> = (0..100).collect();
+ b.iter(|| vec.contains(&99))
+}
+
+#[bench]
+fn zero_1kb_from_elem(b: &mut Bencher) {
+ b.iter(|| vec![0u8; 1024]);
+}
+
+#[bench]
+fn zero_1kb_set_memory(b: &mut Bencher) {
+ b.iter(|| {
+ let mut v = Vec::<u8>::with_capacity(1024);
+ unsafe {
+ let vp = v.as_mut_ptr();
+ ptr::write_bytes(vp, 0, 1024);
+ v.set_len(1024);
+ }
+ v
+ });
+}
+
+#[bench]
+fn zero_1kb_loop_set(b: &mut Bencher) {
+ b.iter(|| {
+ let mut v = Vec::<u8>::with_capacity(1024);
+ unsafe {
+ v.set_len(1024);
+ }
+ for i in 0..1024 {
+ v[i] = 0;
+ }
+ });
+}
+
+#[bench]
+fn zero_1kb_mut_iter(b: &mut Bencher) {
+ b.iter(|| {
+ let mut v = Vec::<u8>::with_capacity(1024);
+ unsafe {
+ v.set_len(1024);
+ }
+ for x in &mut v {
+ *x = 0;
+ }
+ v
+ });
+}
+
+#[bench]
+fn random_inserts(b: &mut Bencher) {
+ let mut rng = thread_rng();
+ b.iter(|| {
+ let mut v = vec![(0, 0); 30];
+ for _ in 0..100 {
+ let l = v.len();
+ v.insert(rng.gen::<usize>() % (l + 1), (1, 1));
+ }
+ })
+}
+
+#[bench]
+fn random_removes(b: &mut Bencher) {
+ let mut rng = thread_rng();
+ b.iter(|| {
+ let mut v = vec![(0, 0); 130];
+ for _ in 0..100 {
+ let l = v.len();
+ v.remove(rng.gen::<usize>() % l);
+ }
+ })
+}
+
+fn gen_ascending(len: usize) -> Vec<u64> {
+ (0..len as u64).collect()
+}
+
+fn gen_descending(len: usize) -> Vec<u64> {
+ (0..len as u64).rev().collect()
+}
+
+const SEED: [u8; 16] = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15];
+
+fn gen_random(len: usize) -> Vec<u64> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ (&mut rng).sample_iter(&Standard).take(len).collect()
+}
+
+fn gen_random_bytes(len: usize) -> Vec<u8> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ (&mut rng).sample_iter(&Standard).take(len).collect()
+}
+
+fn gen_mostly_ascending(len: usize) -> Vec<u64> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ let mut v = gen_ascending(len);
+ for _ in (0usize..).take_while(|x| x * x <= len) {
+ let x = rng.gen::<usize>() % len;
+ let y = rng.gen::<usize>() % len;
+ v.swap(x, y);
+ }
+ v
+}
+
+fn gen_mostly_descending(len: usize) -> Vec<u64> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ let mut v = gen_descending(len);
+ for _ in (0usize..).take_while(|x| x * x <= len) {
+ let x = rng.gen::<usize>() % len;
+ let y = rng.gen::<usize>() % len;
+ v.swap(x, y);
+ }
+ v
+}
+
+fn gen_strings(len: usize) -> Vec<String> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ let mut v = vec![];
+ for _ in 0..len {
+ let n = rng.gen::<usize>() % 20 + 1;
+ v.push((&mut rng).sample_iter(&Alphanumeric).take(n).collect());
+ }
+ v
+}
+
+fn gen_big_random(len: usize) -> Vec<[u64; 16]> {
+ let mut rng = XorShiftRng::from_seed(SEED);
+ (&mut rng).sample_iter(&Standard).map(|x| [x; 16]).take(len).collect()
+}
+
+macro_rules! sort {
+ ($f:ident, $name:ident, $gen:expr, $len:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let v = $gen($len);
+ b.iter(|| v.clone().$f());
+ b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
+ }
+ };
+}
+
+macro_rules! sort_strings {
+ ($f:ident, $name:ident, $gen:expr, $len:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let v = $gen($len);
+ let v = v.iter().map(|s| &**s).collect::<Vec<&str>>();
+ b.iter(|| v.clone().$f());
+ b.bytes = $len * mem::size_of::<&str>() as u64;
+ }
+ };
+}
+
+macro_rules! sort_expensive {
+ ($f:ident, $name:ident, $gen:expr, $len:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let v = $gen($len);
+ b.iter(|| {
+ let mut v = v.clone();
+ let mut count = 0;
+ v.$f(|a: &u64, b: &u64| {
+ count += 1;
+ if count % 1_000_000_000 == 0 {
+ panic!("should not happen");
+ }
+ (*a as f64).cos().partial_cmp(&(*b as f64).cos()).unwrap()
+ });
+ black_box(count);
+ });
+ b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
+ }
+ };
+}
+
+macro_rules! sort_lexicographic {
+ ($f:ident, $name:ident, $gen:expr, $len:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let v = $gen($len);
+ b.iter(|| v.clone().$f(|x| x.to_string()));
+ b.bytes = $len * mem::size_of_val(&$gen(1)[0]) as u64;
+ }
+ };
+}
+
+sort!(sort, sort_small_ascending, gen_ascending, 10);
+sort!(sort, sort_small_descending, gen_descending, 10);
+sort!(sort, sort_small_random, gen_random, 10);
+sort!(sort, sort_small_big, gen_big_random, 10);
+sort!(sort, sort_medium_random, gen_random, 100);
+sort!(sort, sort_large_ascending, gen_ascending, 10000);
+sort!(sort, sort_large_descending, gen_descending, 10000);
+sort!(sort, sort_large_mostly_ascending, gen_mostly_ascending, 10000);
+sort!(sort, sort_large_mostly_descending, gen_mostly_descending, 10000);
+sort!(sort, sort_large_random, gen_random, 10000);
+sort!(sort, sort_large_big, gen_big_random, 10000);
+sort_strings!(sort, sort_large_strings, gen_strings, 10000);
+sort_expensive!(sort_by, sort_large_expensive, gen_random, 10000);
+
+sort!(sort_unstable, sort_unstable_small_ascending, gen_ascending, 10);
+sort!(sort_unstable, sort_unstable_small_descending, gen_descending, 10);
+sort!(sort_unstable, sort_unstable_small_random, gen_random, 10);
+sort!(sort_unstable, sort_unstable_small_big, gen_big_random, 10);
+sort!(sort_unstable, sort_unstable_medium_random, gen_random, 100);
+sort!(sort_unstable, sort_unstable_large_ascending, gen_ascending, 10000);
+sort!(sort_unstable, sort_unstable_large_descending, gen_descending, 10000);
+sort!(sort_unstable, sort_unstable_large_mostly_ascending, gen_mostly_ascending, 10000);
+sort!(sort_unstable, sort_unstable_large_mostly_descending, gen_mostly_descending, 10000);
+sort!(sort_unstable, sort_unstable_large_random, gen_random, 10000);
+sort!(sort_unstable, sort_unstable_large_big, gen_big_random, 10000);
+sort_strings!(sort_unstable, sort_unstable_large_strings, gen_strings, 10000);
+sort_expensive!(sort_unstable_by, sort_unstable_large_expensive, gen_random, 10000);
+
+sort_lexicographic!(sort_by_key, sort_by_key_lexicographic, gen_random, 10000);
+sort_lexicographic!(sort_unstable_by_key, sort_unstable_by_key_lexicographic, gen_random, 10000);
+sort_lexicographic!(sort_by_cached_key, sort_by_cached_key_lexicographic, gen_random, 10000);
+
+macro_rules! reverse {
+ ($name:ident, $ty:ty, $f:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ // odd length and offset by 1 to be as unaligned as possible
+ let n = 0xFFFFF;
+ let mut v: Vec<_> = (0..1 + (n / mem::size_of::<$ty>() as u64)).map($f).collect();
+ b.iter(|| black_box(&mut v[1..]).reverse());
+ b.bytes = n;
+ }
+ };
+}
+
+reverse!(reverse_u8, u8, |x| x as u8);
+reverse!(reverse_u16, u16, |x| x as u16);
+reverse!(reverse_u8x3, [u8; 3], |x| [x as u8, (x >> 8) as u8, (x >> 16) as u8]);
+reverse!(reverse_u32, u32, |x| x as u32);
+reverse!(reverse_u64, u64, |x| x as u64);
+reverse!(reverse_u128, u128, |x| x as u128);
+#[repr(simd)]
+struct F64x4(f64, f64, f64, f64);
+reverse!(reverse_simd_f64x4, F64x4, |x| {
+ let x = x as f64;
+ F64x4(x, x, x, x)
+});
+
+macro_rules! rotate {
+ ($name:ident, $gen:expr, $len:expr, $mid:expr) => {
+ #[bench]
+ fn $name(b: &mut Bencher) {
+ let size = mem::size_of_val(&$gen(1)[0]);
+ let mut v = $gen($len * 8 / size);
+ b.iter(|| black_box(&mut v).rotate_left(($mid * 8 + size - 1) / size));
+ b.bytes = (v.len() * size) as u64;
+ }
+ };
+}
+
+rotate!(rotate_tiny_by1, gen_random, 16, 1);
+rotate!(rotate_tiny_half, gen_random, 16, 16 / 2);
+rotate!(rotate_tiny_half_plus_one, gen_random, 16, 16 / 2 + 1);
+
+rotate!(rotate_medium_by1, gen_random, 9158, 1);
+rotate!(rotate_medium_by727_u64, gen_random, 9158, 727);
+rotate!(rotate_medium_by727_bytes, gen_random_bytes, 9158, 727);
+rotate!(rotate_medium_by727_strings, gen_strings, 9158, 727);
+rotate!(rotate_medium_half, gen_random, 9158, 9158 / 2);
+rotate!(rotate_medium_half_plus_one, gen_random, 9158, 9158 / 2 + 1);
+
+// Intended to use more RAM than the machine has cache
+rotate!(rotate_huge_by1, gen_random, 5 * 1024 * 1024, 1);
+rotate!(rotate_huge_by9199_u64, gen_random, 5 * 1024 * 1024, 9199);
+rotate!(rotate_huge_by9199_bytes, gen_random_bytes, 5 * 1024 * 1024, 9199);
+rotate!(rotate_huge_by9199_strings, gen_strings, 5 * 1024 * 1024, 9199);
+rotate!(rotate_huge_by9199_big, gen_big_random, 5 * 1024 * 1024, 9199);
+rotate!(rotate_huge_by1234577_u64, gen_random, 5 * 1024 * 1024, 1234577);
+rotate!(rotate_huge_by1234577_bytes, gen_random_bytes, 5 * 1024 * 1024, 1234577);
+rotate!(rotate_huge_by1234577_strings, gen_strings, 5 * 1024 * 1024, 1234577);
+rotate!(rotate_huge_by1234577_big, gen_big_random, 5 * 1024 * 1024, 1234577);
+rotate!(rotate_huge_half, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2);
+rotate!(rotate_huge_half_plus_one, gen_random, 5 * 1024 * 1024, 5 * 1024 * 1024 / 2 + 1);
diff --git a/library/alloc/benches/str.rs b/library/alloc/benches/str.rs
new file mode 100644
index 00000000000..391475bc0c7
--- /dev/null
+++ b/library/alloc/benches/str.rs
@@ -0,0 +1,299 @@
+use test::{black_box, Bencher};
+
+#[bench]
+fn char_iterator(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+
+ b.iter(|| s.chars().count());
+}
+
+#[bench]
+fn char_iterator_for(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+
+ b.iter(|| {
+ for ch in s.chars() {
+ black_box(ch);
+ }
+ });
+}
+
+#[bench]
+fn char_iterator_ascii(b: &mut Bencher) {
+ let s = "Mary had a little lamb, Little lamb
+ Mary had a little lamb, Little lamb
+ Mary had a little lamb, Little lamb
+ Mary had a little lamb, Little lamb
+ Mary had a little lamb, Little lamb
+ Mary had a little lamb, Little lamb";
+
+ b.iter(|| s.chars().count());
+}
+
+#[bench]
+fn char_iterator_rev(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+
+ b.iter(|| s.chars().rev().count());
+}
+
+#[bench]
+fn char_iterator_rev_for(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+
+ b.iter(|| {
+ for ch in s.chars().rev() {
+ black_box(ch);
+ }
+ });
+}
+
+#[bench]
+fn char_indicesator(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+ let len = s.chars().count();
+
+ b.iter(|| assert_eq!(s.char_indices().count(), len));
+}
+
+#[bench]
+fn char_indicesator_rev(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+ let len = s.chars().count();
+
+ b.iter(|| assert_eq!(s.char_indices().rev().count(), len));
+}
+
+#[bench]
+fn split_unicode_ascii(b: &mut Bencher) {
+ let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam";
+
+ b.iter(|| assert_eq!(s.split('V').count(), 3));
+}
+
+#[bench]
+fn split_ascii(b: &mut Bencher) {
+ let s = "Mary had a little lamb, Little lamb, little-lamb.";
+ let len = s.split(' ').count();
+
+ b.iter(|| assert_eq!(s.split(' ').count(), len));
+}
+
+#[bench]
+fn split_extern_fn(b: &mut Bencher) {
+ let s = "Mary had a little lamb, Little lamb, little-lamb.";
+ let len = s.split(' ').count();
+ fn pred(c: char) -> bool {
+ c == ' '
+ }
+
+ b.iter(|| assert_eq!(s.split(pred).count(), len));
+}
+
+#[bench]
+fn split_closure(b: &mut Bencher) {
+ let s = "Mary had a little lamb, Little lamb, little-lamb.";
+ let len = s.split(' ').count();
+
+ b.iter(|| assert_eq!(s.split(|c: char| c == ' ').count(), len));
+}
+
+#[bench]
+fn split_slice(b: &mut Bencher) {
+ let s = "Mary had a little lamb, Little lamb, little-lamb.";
+ let len = s.split(' ').count();
+
+ let c: &[char] = &[' '];
+ b.iter(|| assert_eq!(s.split(c).count(), len));
+}
+
+#[bench]
+fn bench_join(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+ let sep = "→";
+ let v = vec![s, s, s, s, s, s, s, s, s, s];
+ b.iter(|| {
+ assert_eq!(v.join(sep).len(), s.len() * 10 + sep.len() * 9);
+ })
+}
+
+#[bench]
+fn bench_contains_short_short(b: &mut Bencher) {
+ let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
+ let needle = "sit";
+
+ b.iter(|| {
+ assert!(haystack.contains(needle));
+ })
+}
+
+#[bench]
+fn bench_contains_short_long(b: &mut Bencher) {
+ let haystack = "\
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
+ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
+eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
+sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
+tempus vel, gravida nec quam.
+
+In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
+sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
+diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
+lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
+eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
+interdum. Curabitur ut nisi justo.
+
+Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
+mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
+lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
+est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
+felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
+ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
+feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
+Aliquam sit amet placerat lorem.
+
+Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
+mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
+Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
+lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
+suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
+cursus accumsan.
+
+Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
+feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
+vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
+leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
+malesuada sollicitudin quam eu fermentum.";
+ let needle = "english";
+
+ b.iter(|| {
+ assert!(!haystack.contains(needle));
+ })
+}
+
+#[bench]
+fn bench_contains_bad_naive(b: &mut Bencher) {
+ let haystack = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
+ let needle = "aaaaaaaab";
+
+ b.iter(|| {
+ assert!(!haystack.contains(needle));
+ })
+}
+
+#[bench]
+fn bench_contains_equal(b: &mut Bencher) {
+ let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
+ let needle = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
+
+ b.iter(|| {
+ assert!(haystack.contains(needle));
+ })
+}
+
+macro_rules! make_test_inner {
+ ($s:ident, $code:expr, $name:ident, $str:expr, $iters:expr) => {
+ #[bench]
+ fn $name(bencher: &mut Bencher) {
+ let mut $s = $str;
+ black_box(&mut $s);
+ bencher.iter(|| {
+ for _ in 0..$iters {
+ black_box($code);
+ }
+ });
+ }
+ };
+}
+
+macro_rules! make_test {
+ ($name:ident, $s:ident, $code:expr) => {
+ make_test!($name, $s, $code, 1);
+ };
+ ($name:ident, $s:ident, $code:expr, $iters:expr) => {
+ mod $name {
+ use test::Bencher;
+ use test::black_box;
+
+ // Short strings: 65 bytes each
+ make_test_inner!($s, $code, short_ascii,
+ "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!", $iters);
+ make_test_inner!($s, $code, short_mixed,
+ "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!", $iters);
+ make_test_inner!($s, $code, short_pile_of_poo,
+ "💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!", $iters);
+ make_test_inner!($s, $code, long_lorem_ipsum,"\
+Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
+ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
+eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
+sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
+tempus vel, gravida nec quam.
+
+In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
+sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
+diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
+lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
+eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
+interdum. Curabitur ut nisi justo.
+
+Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
+mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
+lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
+est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
+felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
+ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
+feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
+Aliquam sit amet placerat lorem.
+
+Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
+mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
+Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
+lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
+suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
+cursus accumsan.
+
+Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
+feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
+vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
+leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
+malesuada sollicitudin quam eu fermentum!", $iters);
+ }
+ }
+}
+
+make_test!(chars_count, s, s.chars().count());
+
+make_test!(contains_bang_str, s, s.contains("!"));
+make_test!(contains_bang_char, s, s.contains('!'));
+
+make_test!(match_indices_a_str, s, s.match_indices("a").count());
+
+make_test!(split_a_str, s, s.split("a").count());
+
+make_test!(trim_ascii_char, s, { s.trim_matches(|c: char| c.is_ascii()) });
+make_test!(trim_start_ascii_char, s, { s.trim_start_matches(|c: char| c.is_ascii()) });
+make_test!(trim_end_ascii_char, s, { s.trim_end_matches(|c: char| c.is_ascii()) });
+
+make_test!(find_underscore_char, s, s.find('_'));
+make_test!(rfind_underscore_char, s, s.rfind('_'));
+make_test!(find_underscore_str, s, s.find("_"));
+
+make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
+make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
+make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
+
+make_test!(starts_with_ascii_char, s, s.starts_with('/'), 1024);
+make_test!(ends_with_ascii_char, s, s.ends_with('/'), 1024);
+make_test!(starts_with_unichar, s, s.starts_with('\u{1F4A4}'), 1024);
+make_test!(ends_with_unichar, s, s.ends_with('\u{1F4A4}'), 1024);
+make_test!(starts_with_str, s, s.starts_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
+make_test!(ends_with_str, s, s.ends_with("💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩"), 1024);
+
+make_test!(split_space_char, s, s.split(' ').count());
+make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
+
+make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
+make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
+
+make_test!(split_space_str, s, s.split(" ").count());
+make_test!(split_ad_str, s, s.split("ad").count());
diff --git a/library/alloc/benches/string.rs b/library/alloc/benches/string.rs
new file mode 100644
index 00000000000..5c95160ba2d
--- /dev/null
+++ b/library/alloc/benches/string.rs
@@ -0,0 +1,164 @@
+use std::iter::repeat;
+use test::{black_box, Bencher};
+
+#[bench]
+fn bench_with_capacity(b: &mut Bencher) {
+ b.iter(|| String::with_capacity(100));
+}
+
+#[bench]
+fn bench_push_str(b: &mut Bencher) {
+ let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
+ b.iter(|| {
+ let mut r = String::new();
+ r.push_str(s);
+ });
+}
+
+const REPETITIONS: u64 = 10_000;
+
+#[bench]
+fn bench_push_str_one_byte(b: &mut Bencher) {
+ b.bytes = REPETITIONS;
+ b.iter(|| {
+ let mut r = String::new();
+ for _ in 0..REPETITIONS {
+ r.push_str("a")
+ }
+ });
+}
+
+#[bench]
+fn bench_push_char_one_byte(b: &mut Bencher) {
+ b.bytes = REPETITIONS;
+ b.iter(|| {
+ let mut r = String::new();
+ for _ in 0..REPETITIONS {
+ r.push('a')
+ }
+ });
+}
+
+#[bench]
+fn bench_push_char_two_bytes(b: &mut Bencher) {
+ b.bytes = REPETITIONS * 2;
+ b.iter(|| {
+ let mut r = String::new();
+ for _ in 0..REPETITIONS {
+ r.push('â')
+ }
+ });
+}
+
+#[bench]
+fn from_utf8_lossy_100_ascii(b: &mut Bencher) {
+ let s = b"Hello there, the quick brown fox jumped over the lazy dog! \
+ Lorem ipsum dolor sit amet, consectetur. ";
+
+ assert_eq!(100, s.len());
+ b.iter(|| {
+ let _ = String::from_utf8_lossy(s);
+ });
+}
+
+#[bench]
+fn from_utf8_lossy_100_multibyte(b: &mut Bencher) {
+ let s = "𐌀𐌖𐌋𐌄𐌑𐌉ปรدولة الكويتทศไทย中华𐍅𐌿𐌻𐍆𐌹𐌻𐌰".as_bytes();
+ assert_eq!(100, s.len());
+ b.iter(|| {
+ let _ = String::from_utf8_lossy(s);
+ });
+}
+
+#[bench]
+fn from_utf8_lossy_invalid(b: &mut Bencher) {
+ let s = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
+ b.iter(|| {
+ let _ = String::from_utf8_lossy(s);
+ });
+}
+
+#[bench]
+fn from_utf8_lossy_100_invalid(b: &mut Bencher) {
+ let s = repeat(0xf5).take(100).collect::<Vec<_>>();
+ b.iter(|| {
+ let _ = String::from_utf8_lossy(&s);
+ });
+}
+
+#[bench]
+fn bench_exact_size_shrink_to_fit(b: &mut Bencher) {
+ let s = "Hello there, the quick brown fox jumped over the lazy dog! \
+ Lorem ipsum dolor sit amet, consectetur. ";
+ // ensure our operation produces an exact-size string before we benchmark it
+ let mut r = String::with_capacity(s.len());
+ r.push_str(s);
+ assert_eq!(r.len(), r.capacity());
+ b.iter(|| {
+ let mut r = String::with_capacity(s.len());
+ r.push_str(s);
+ r.shrink_to_fit();
+ r
+ });
+}
+
+#[bench]
+fn bench_from_str(b: &mut Bencher) {
+ let s = "Hello there, the quick brown fox jumped over the lazy dog! \
+ Lorem ipsum dolor sit amet, consectetur. ";
+ b.iter(|| String::from(s))
+}
+
+#[bench]
+fn bench_from(b: &mut Bencher) {
+ let s = "Hello there, the quick brown fox jumped over the lazy dog! \
+ Lorem ipsum dolor sit amet, consectetur. ";
+ b.iter(|| String::from(s))
+}
+
+#[bench]
+fn bench_to_string(b: &mut Bencher) {
+ let s = "Hello there, the quick brown fox jumped over the lazy dog! \
+ Lorem ipsum dolor sit amet, consectetur. ";
+ b.iter(|| s.to_string())
+}
+
+#[bench]
+fn bench_insert_char_short(b: &mut Bencher) {
+ let s = "Hello, World!";
+ b.iter(|| {
+ let mut x = String::from(s);
+ black_box(&mut x).insert(6, black_box(' '));
+ x
+ })
+}
+
+#[bench]
+fn bench_insert_char_long(b: &mut Bencher) {
+ let s = "Hello, World!";
+ b.iter(|| {
+ let mut x = String::from(s);
+ black_box(&mut x).insert(6, black_box('❤'));
+ x
+ })
+}
+
+#[bench]
+fn bench_insert_str_short(b: &mut Bencher) {
+ let s = "Hello, World!";
+ b.iter(|| {
+ let mut x = String::from(s);
+ black_box(&mut x).insert_str(6, black_box(" "));
+ x
+ })
+}
+
+#[bench]
+fn bench_insert_str_long(b: &mut Bencher) {
+ let s = "Hello, World!";
+ b.iter(|| {
+ let mut x = String::from(s);
+ black_box(&mut x).insert_str(6, black_box(" rustic "));
+ x
+ })
+}
diff --git a/library/alloc/benches/vec.rs b/library/alloc/benches/vec.rs
new file mode 100644
index 00000000000..a3da9e80cd0
--- /dev/null
+++ b/library/alloc/benches/vec.rs
@@ -0,0 +1,482 @@
+use std::iter::{repeat, FromIterator};
+use test::Bencher;
+
+#[bench]
+fn bench_new(b: &mut Bencher) {
+ b.iter(|| {
+ let v: Vec<u32> = Vec::new();
+ assert_eq!(v.len(), 0);
+ assert_eq!(v.capacity(), 0);
+ })
+}
+
+fn do_bench_with_capacity(b: &mut Bencher, src_len: usize) {
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let v: Vec<u32> = Vec::with_capacity(src_len);
+ assert_eq!(v.len(), 0);
+ assert_eq!(v.capacity(), src_len);
+ })
+}
+
+#[bench]
+fn bench_with_capacity_0000(b: &mut Bencher) {
+ do_bench_with_capacity(b, 0)
+}
+
+#[bench]
+fn bench_with_capacity_0010(b: &mut Bencher) {
+ do_bench_with_capacity(b, 10)
+}
+
+#[bench]
+fn bench_with_capacity_0100(b: &mut Bencher) {
+ do_bench_with_capacity(b, 100)
+}
+
+#[bench]
+fn bench_with_capacity_1000(b: &mut Bencher) {
+ do_bench_with_capacity(b, 1000)
+}
+
+fn do_bench_from_fn(b: &mut Bencher, src_len: usize) {
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let dst = (0..src_len).collect::<Vec<_>>();
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ })
+}
+
+#[bench]
+fn bench_from_fn_0000(b: &mut Bencher) {
+ do_bench_from_fn(b, 0)
+}
+
+#[bench]
+fn bench_from_fn_0010(b: &mut Bencher) {
+ do_bench_from_fn(b, 10)
+}
+
+#[bench]
+fn bench_from_fn_0100(b: &mut Bencher) {
+ do_bench_from_fn(b, 100)
+}
+
+#[bench]
+fn bench_from_fn_1000(b: &mut Bencher) {
+ do_bench_from_fn(b, 1000)
+}
+
+fn do_bench_from_elem(b: &mut Bencher, src_len: usize) {
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let dst: Vec<usize> = repeat(5).take(src_len).collect();
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().all(|x| *x == 5));
+ })
+}
+
+#[bench]
+fn bench_from_elem_0000(b: &mut Bencher) {
+ do_bench_from_elem(b, 0)
+}
+
+#[bench]
+fn bench_from_elem_0010(b: &mut Bencher) {
+ do_bench_from_elem(b, 10)
+}
+
+#[bench]
+fn bench_from_elem_0100(b: &mut Bencher) {
+ do_bench_from_elem(b, 100)
+}
+
+#[bench]
+fn bench_from_elem_1000(b: &mut Bencher) {
+ do_bench_from_elem(b, 1000)
+}
+
+fn do_bench_from_slice(b: &mut Bencher, src_len: usize) {
+ let src: Vec<_> = FromIterator::from_iter(0..src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let dst = src.clone()[..].to_vec();
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_from_slice_0000(b: &mut Bencher) {
+ do_bench_from_slice(b, 0)
+}
+
+#[bench]
+fn bench_from_slice_0010(b: &mut Bencher) {
+ do_bench_from_slice(b, 10)
+}
+
+#[bench]
+fn bench_from_slice_0100(b: &mut Bencher) {
+ do_bench_from_slice(b, 100)
+}
+
+#[bench]
+fn bench_from_slice_1000(b: &mut Bencher) {
+ do_bench_from_slice(b, 1000)
+}
+
+fn do_bench_from_iter(b: &mut Bencher, src_len: usize) {
+ let src: Vec<_> = FromIterator::from_iter(0..src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let dst: Vec<_> = FromIterator::from_iter(src.clone());
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_from_iter_0000(b: &mut Bencher) {
+ do_bench_from_iter(b, 0)
+}
+
+#[bench]
+fn bench_from_iter_0010(b: &mut Bencher) {
+ do_bench_from_iter(b, 10)
+}
+
+#[bench]
+fn bench_from_iter_0100(b: &mut Bencher) {
+ do_bench_from_iter(b, 100)
+}
+
+#[bench]
+fn bench_from_iter_1000(b: &mut Bencher) {
+ do_bench_from_iter(b, 1000)
+}
+
+fn do_bench_extend(b: &mut Bencher, dst_len: usize, src_len: usize) {
+ let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
+ let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let mut dst = dst.clone();
+ dst.extend(src.clone());
+ assert_eq!(dst.len(), dst_len + src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_extend_0000_0000(b: &mut Bencher) {
+ do_bench_extend(b, 0, 0)
+}
+
+#[bench]
+fn bench_extend_0000_0010(b: &mut Bencher) {
+ do_bench_extend(b, 0, 10)
+}
+
+#[bench]
+fn bench_extend_0000_0100(b: &mut Bencher) {
+ do_bench_extend(b, 0, 100)
+}
+
+#[bench]
+fn bench_extend_0000_1000(b: &mut Bencher) {
+ do_bench_extend(b, 0, 1000)
+}
+
+#[bench]
+fn bench_extend_0010_0010(b: &mut Bencher) {
+ do_bench_extend(b, 10, 10)
+}
+
+#[bench]
+fn bench_extend_0100_0100(b: &mut Bencher) {
+ do_bench_extend(b, 100, 100)
+}
+
+#[bench]
+fn bench_extend_1000_1000(b: &mut Bencher) {
+ do_bench_extend(b, 1000, 1000)
+}
+
+fn do_bench_push_all(b: &mut Bencher, dst_len: usize, src_len: usize) {
+ let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
+ let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let mut dst = dst.clone();
+ dst.extend_from_slice(&src);
+ assert_eq!(dst.len(), dst_len + src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_push_all_0000_0000(b: &mut Bencher) {
+ do_bench_push_all(b, 0, 0)
+}
+
+#[bench]
+fn bench_push_all_0000_0010(b: &mut Bencher) {
+ do_bench_push_all(b, 0, 10)
+}
+
+#[bench]
+fn bench_push_all_0000_0100(b: &mut Bencher) {
+ do_bench_push_all(b, 0, 100)
+}
+
+#[bench]
+fn bench_push_all_0000_1000(b: &mut Bencher) {
+ do_bench_push_all(b, 0, 1000)
+}
+
+#[bench]
+fn bench_push_all_0010_0010(b: &mut Bencher) {
+ do_bench_push_all(b, 10, 10)
+}
+
+#[bench]
+fn bench_push_all_0100_0100(b: &mut Bencher) {
+ do_bench_push_all(b, 100, 100)
+}
+
+#[bench]
+fn bench_push_all_1000_1000(b: &mut Bencher) {
+ do_bench_push_all(b, 1000, 1000)
+}
+
+fn do_bench_push_all_move(b: &mut Bencher, dst_len: usize, src_len: usize) {
+ let dst: Vec<_> = FromIterator::from_iter(0..dst_len);
+ let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let mut dst = dst.clone();
+ dst.extend(src.clone());
+ assert_eq!(dst.len(), dst_len + src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_push_all_move_0000_0000(b: &mut Bencher) {
+ do_bench_push_all_move(b, 0, 0)
+}
+
+#[bench]
+fn bench_push_all_move_0000_0010(b: &mut Bencher) {
+ do_bench_push_all_move(b, 0, 10)
+}
+
+#[bench]
+fn bench_push_all_move_0000_0100(b: &mut Bencher) {
+ do_bench_push_all_move(b, 0, 100)
+}
+
+#[bench]
+fn bench_push_all_move_0000_1000(b: &mut Bencher) {
+ do_bench_push_all_move(b, 0, 1000)
+}
+
+#[bench]
+fn bench_push_all_move_0010_0010(b: &mut Bencher) {
+ do_bench_push_all_move(b, 10, 10)
+}
+
+#[bench]
+fn bench_push_all_move_0100_0100(b: &mut Bencher) {
+ do_bench_push_all_move(b, 100, 100)
+}
+
+#[bench]
+fn bench_push_all_move_1000_1000(b: &mut Bencher) {
+ do_bench_push_all_move(b, 1000, 1000)
+}
+
+fn do_bench_clone(b: &mut Bencher, src_len: usize) {
+ let src: Vec<usize> = FromIterator::from_iter(0..src_len);
+
+ b.bytes = src_len as u64;
+
+ b.iter(|| {
+ let dst = src.clone();
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| i == *x));
+ });
+}
+
+#[bench]
+fn bench_clone_0000(b: &mut Bencher) {
+ do_bench_clone(b, 0)
+}
+
+#[bench]
+fn bench_clone_0010(b: &mut Bencher) {
+ do_bench_clone(b, 10)
+}
+
+#[bench]
+fn bench_clone_0100(b: &mut Bencher) {
+ do_bench_clone(b, 100)
+}
+
+#[bench]
+fn bench_clone_1000(b: &mut Bencher) {
+ do_bench_clone(b, 1000)
+}
+
+fn do_bench_clone_from(b: &mut Bencher, times: usize, dst_len: usize, src_len: usize) {
+ let dst: Vec<_> = FromIterator::from_iter(0..src_len);
+ let src: Vec<_> = FromIterator::from_iter(dst_len..dst_len + src_len);
+
+ b.bytes = (times * src_len) as u64;
+
+ b.iter(|| {
+ let mut dst = dst.clone();
+
+ for _ in 0..times {
+ dst.clone_from(&src);
+
+ assert_eq!(dst.len(), src_len);
+ assert!(dst.iter().enumerate().all(|(i, x)| dst_len + i == *x));
+ }
+ });
+}
+
+#[bench]
+fn bench_clone_from_01_0000_0000(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 0, 0)
+}
+
+#[bench]
+fn bench_clone_from_01_0000_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 0, 10)
+}
+
+#[bench]
+fn bench_clone_from_01_0000_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 0, 100)
+}
+
+#[bench]
+fn bench_clone_from_01_0000_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 0, 1000)
+}
+
+#[bench]
+fn bench_clone_from_01_0010_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 10, 10)
+}
+
+#[bench]
+fn bench_clone_from_01_0100_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 100, 100)
+}
+
+#[bench]
+fn bench_clone_from_01_1000_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 1000, 1000)
+}
+
+#[bench]
+fn bench_clone_from_01_0010_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 10, 100)
+}
+
+#[bench]
+fn bench_clone_from_01_0100_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 100, 1000)
+}
+
+#[bench]
+fn bench_clone_from_01_0010_0000(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 10, 0)
+}
+
+#[bench]
+fn bench_clone_from_01_0100_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 100, 10)
+}
+
+#[bench]
+fn bench_clone_from_01_1000_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 1, 1000, 100)
+}
+
+#[bench]
+fn bench_clone_from_10_0000_0000(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 0, 0)
+}
+
+#[bench]
+fn bench_clone_from_10_0000_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 0, 10)
+}
+
+#[bench]
+fn bench_clone_from_10_0000_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 0, 100)
+}
+
+#[bench]
+fn bench_clone_from_10_0000_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 0, 1000)
+}
+
+#[bench]
+fn bench_clone_from_10_0010_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 10, 10)
+}
+
+#[bench]
+fn bench_clone_from_10_0100_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 100, 100)
+}
+
+#[bench]
+fn bench_clone_from_10_1000_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 1000, 1000)
+}
+
+#[bench]
+fn bench_clone_from_10_0010_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 10, 100)
+}
+
+#[bench]
+fn bench_clone_from_10_0100_1000(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 100, 1000)
+}
+
+#[bench]
+fn bench_clone_from_10_0010_0000(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 10, 0)
+}
+
+#[bench]
+fn bench_clone_from_10_0100_0010(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 100, 10)
+}
+
+#[bench]
+fn bench_clone_from_10_1000_0100(b: &mut Bencher) {
+ do_bench_clone_from(b, 10, 1000, 100)
+}
diff --git a/library/alloc/benches/vec_deque.rs b/library/alloc/benches/vec_deque.rs
new file mode 100644
index 00000000000..bf2dffd1e93
--- /dev/null
+++ b/library/alloc/benches/vec_deque.rs
@@ -0,0 +1,54 @@
+use std::collections::VecDeque;
+use test::{black_box, Bencher};
+
+#[bench]
+fn bench_new(b: &mut Bencher) {
+ b.iter(|| {
+ let ring: VecDeque<i32> = VecDeque::new();
+ black_box(ring);
+ })
+}
+
+#[bench]
+fn bench_grow_1025(b: &mut Bencher) {
+ b.iter(|| {
+ let mut deq = VecDeque::new();
+ for i in 0..1025 {
+ deq.push_front(i);
+ }
+ black_box(deq);
+ })
+}
+
+#[bench]
+fn bench_iter_1000(b: &mut Bencher) {
+ let ring: VecDeque<_> = (0..1000).collect();
+
+ b.iter(|| {
+ let mut sum = 0;
+ for &i in &ring {
+ sum += i;
+ }
+ black_box(sum);
+ })
+}
+
+#[bench]
+fn bench_mut_iter_1000(b: &mut Bencher) {
+ let mut ring: VecDeque<_> = (0..1000).collect();
+
+ b.iter(|| {
+ let mut sum = 0;
+ for i in &mut ring {
+ sum += *i;
+ }
+ black_box(sum);
+ })
+}
+
+#[bench]
+fn bench_try_fold(b: &mut Bencher) {
+ let ring: VecDeque<_> = (0..1000).collect();
+
+ b.iter(|| black_box(ring.iter().try_fold(0, |a, b| Some(a + b))))
+}
diff --git a/library/alloc/benches/vec_deque_append.rs b/library/alloc/benches/vec_deque_append.rs
new file mode 100644
index 00000000000..5825bdc355f
--- /dev/null
+++ b/library/alloc/benches/vec_deque_append.rs
@@ -0,0 +1,34 @@
+use std::{collections::VecDeque, time::Instant};
+
+const VECDEQUE_LEN: i32 = 100000;
+const WARMUP_N: usize = 100;
+const BENCH_N: usize = 1000;
+
+fn main() {
+ let a: VecDeque<i32> = (0..VECDEQUE_LEN).collect();
+ let b: VecDeque<i32> = (0..VECDEQUE_LEN).collect();
+
+ for _ in 0..WARMUP_N {
+ let mut c = a.clone();
+ let mut d = b.clone();
+ c.append(&mut d);
+ }
+
+ let mut durations = Vec::with_capacity(BENCH_N);
+
+ for _ in 0..BENCH_N {
+ let mut c = a.clone();
+ let mut d = b.clone();
+ let before = Instant::now();
+ c.append(&mut d);
+ let after = Instant::now();
+ durations.push(after.duration_since(before));
+ }
+
+ let l = durations.len();
+ durations.sort();
+
+ assert!(BENCH_N % 2 == 0);
+ let median = (durations[(l / 2) - 1] + durations[l / 2]) / 2;
+ println!("\ncustom-bench vec_deque_append {:?} ns/iter\n", median.as_nanos());
+}
diff --git a/library/alloc/src/alloc.rs b/library/alloc/src/alloc.rs
new file mode 100644
index 00000000000..98c7ac3f2ef
--- /dev/null
+++ b/library/alloc/src/alloc.rs
@@ -0,0 +1,319 @@
+//! Memory allocation APIs
+
+#![stable(feature = "alloc_module", since = "1.28.0")]
+
+use core::intrinsics::{self, min_align_of_val, size_of_val};
+use core::ptr::{NonNull, Unique};
+
+#[stable(feature = "alloc_module", since = "1.28.0")]
+#[doc(inline)]
+pub use core::alloc::*;
+
+#[cfg(test)]
+mod tests;
+
+extern "Rust" {
+ // These are the magic symbols to call the global allocator. rustc generates
+ // them from the `#[global_allocator]` attribute if there is one, or uses the
+ // default implementations in libstd (`__rdl_alloc` etc in `src/libstd/alloc.rs`)
+ // otherwise.
+ #[rustc_allocator]
+ #[rustc_allocator_nounwind]
+ fn __rust_alloc(size: usize, align: usize) -> *mut u8;
+ #[rustc_allocator_nounwind]
+ fn __rust_dealloc(ptr: *mut u8, size: usize, align: usize);
+ #[rustc_allocator_nounwind]
+ fn __rust_realloc(ptr: *mut u8, old_size: usize, align: usize, new_size: usize) -> *mut u8;
+ #[rustc_allocator_nounwind]
+ fn __rust_alloc_zeroed(size: usize, align: usize) -> *mut u8;
+}
+
+/// The global memory allocator.
+///
+/// This type implements the [`AllocRef`] trait by forwarding calls
+/// to the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// Note: while this type is unstable, the functionality it provides can be
+/// accessed through the [free functions in `alloc`](index.html#functions).
+///
+/// [`AllocRef`]: trait.AllocRef.html
+#[unstable(feature = "allocator_api", issue = "32838")]
+#[derive(Copy, Clone, Default, Debug)]
+pub struct Global;
+
+/// Allocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::alloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `alloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::alloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::alloc`]: trait.GlobalAlloc.html#tymethod.alloc
+///
+/// # Examples
+///
+/// ```
+/// use std::alloc::{alloc, dealloc, Layout};
+///
+/// unsafe {
+/// let layout = Layout::new::<u16>();
+/// let ptr = alloc(layout);
+///
+/// *(ptr as *mut u16) = 42;
+/// assert_eq!(*(ptr as *mut u16), 42);
+///
+/// dealloc(ptr, layout);
+/// }
+/// ```
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn alloc(layout: Layout) -> *mut u8 {
+ unsafe { __rust_alloc(layout.size(), layout.align()) }
+}
+
+/// Deallocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::dealloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `dealloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::dealloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::dealloc`]: trait.GlobalAlloc.html#tymethod.dealloc
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
+ unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
+}
+
+/// Reallocate memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::realloc`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `realloc` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::realloc`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::realloc`]: trait.GlobalAlloc.html#method.realloc
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
+ unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
+}
+
+/// Allocate zero-initialized memory with the global allocator.
+///
+/// This function forwards calls to the [`GlobalAlloc::alloc_zeroed`] method
+/// of the allocator registered with the `#[global_allocator]` attribute
+/// if there is one, or the `std` crate’s default.
+///
+/// This function is expected to be deprecated in favor of the `alloc_zeroed` method
+/// of the [`Global`] type when it and the [`AllocRef`] trait become stable.
+///
+/// # Safety
+///
+/// See [`GlobalAlloc::alloc_zeroed`].
+///
+/// [`Global`]: struct.Global.html
+/// [`AllocRef`]: trait.AllocRef.html
+/// [`GlobalAlloc::alloc_zeroed`]: trait.GlobalAlloc.html#method.alloc_zeroed
+///
+/// # Examples
+///
+/// ```
+/// use std::alloc::{alloc_zeroed, dealloc, Layout};
+///
+/// unsafe {
+/// let layout = Layout::new::<u16>();
+/// let ptr = alloc_zeroed(layout);
+///
+/// assert_eq!(*(ptr as *mut u16), 0);
+///
+/// dealloc(ptr, layout);
+/// }
+/// ```
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[inline]
+pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
+ unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
+}
+
+#[unstable(feature = "allocator_api", issue = "32838")]
+unsafe impl AllocRef for Global {
+ #[inline]
+ fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
+ unsafe {
+ let size = layout.size();
+ if size == 0 {
+ Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
+ } else {
+ let raw_ptr = match init {
+ AllocInit::Uninitialized => alloc(layout),
+ AllocInit::Zeroed => alloc_zeroed(layout),
+ };
+ let ptr = NonNull::new(raw_ptr).ok_or(AllocErr)?;
+ Ok(MemoryBlock { ptr, size })
+ }
+ }
+ }
+
+ #[inline]
+ unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
+ if layout.size() != 0 {
+ unsafe { dealloc(ptr.as_ptr(), layout) }
+ }
+ }
+
+ #[inline]
+ unsafe fn grow(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ placement: ReallocPlacement,
+ init: AllocInit,
+ ) -> Result<MemoryBlock, AllocErr> {
+ let size = layout.size();
+ debug_assert!(
+ new_size >= size,
+ "`new_size` must be greater than or equal to `memory.size()`"
+ );
+
+ if size == new_size {
+ return Ok(MemoryBlock { ptr, size });
+ }
+
+ match placement {
+ ReallocPlacement::InPlace => Err(AllocErr),
+ ReallocPlacement::MayMove if layout.size() == 0 => {
+ let new_layout =
+ unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
+ self.alloc(new_layout, init)
+ }
+ ReallocPlacement::MayMove => {
+ // `realloc` probably checks for `new_size > size` or something similar.
+ let ptr = unsafe {
+ intrinsics::assume(new_size > size);
+ realloc(ptr.as_ptr(), layout, new_size)
+ };
+ let memory =
+ MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
+ unsafe {
+ init.init_offset(memory, size);
+ }
+ Ok(memory)
+ }
+ }
+ }
+
+ #[inline]
+ unsafe fn shrink(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ placement: ReallocPlacement,
+ ) -> Result<MemoryBlock, AllocErr> {
+ let size = layout.size();
+ debug_assert!(
+ new_size <= size,
+ "`new_size` must be smaller than or equal to `memory.size()`"
+ );
+
+ if size == new_size {
+ return Ok(MemoryBlock { ptr, size });
+ }
+
+ match placement {
+ ReallocPlacement::InPlace => Err(AllocErr),
+ ReallocPlacement::MayMove if new_size == 0 => {
+ unsafe {
+ self.dealloc(ptr, layout);
+ }
+ Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
+ }
+ ReallocPlacement::MayMove => {
+ // `realloc` probably checks for `new_size < size` or something similar.
+ let ptr = unsafe {
+ intrinsics::assume(new_size < size);
+ realloc(ptr.as_ptr(), layout, new_size)
+ };
+ Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
+ }
+ }
+ }
+}
+
+/// The allocator for unique pointers.
+// This function must not unwind. If it does, MIR codegen will fail.
+#[cfg(not(test))]
+#[lang = "exchange_malloc"]
+#[inline]
+unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
+ let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
+ match Global.alloc(layout, AllocInit::Uninitialized) {
+ Ok(memory) => memory.ptr.as_ptr(),
+ Err(_) => handle_alloc_error(layout),
+ }
+}
+
+#[cfg_attr(not(test), lang = "box_free")]
+#[inline]
+// This signature has to be the same as `Box`, otherwise an ICE will happen.
+// When an additional parameter to `Box` is added (like `A: AllocRef`), this has to be added here as
+// well.
+// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
+// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
+pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
+ unsafe {
+ let size = size_of_val(ptr.as_ref());
+ let align = min_align_of_val(ptr.as_ref());
+ let layout = Layout::from_size_align_unchecked(size, align);
+ Global.dealloc(ptr.cast().into(), layout)
+ }
+}
+
+/// Abort on memory allocation error or failure.
+///
+/// Callers of memory allocation APIs wishing to abort computation
+/// in response to an allocation error are encouraged to call this function,
+/// rather than directly invoking `panic!` or similar.
+///
+/// The default behavior of this function is to print a message to standard error
+/// and abort the process.
+/// It can be replaced with [`set_alloc_error_hook`] and [`take_alloc_error_hook`].
+///
+/// [`set_alloc_error_hook`]: ../../std/alloc/fn.set_alloc_error_hook.html
+/// [`take_alloc_error_hook`]: ../../std/alloc/fn.take_alloc_error_hook.html
+#[stable(feature = "global_alloc", since = "1.28.0")]
+#[rustc_allocator_nounwind]
+pub fn handle_alloc_error(layout: Layout) -> ! {
+ extern "Rust" {
+ #[lang = "oom"]
+ fn oom_impl(layout: Layout) -> !;
+ }
+ unsafe { oom_impl(layout) }
+}
diff --git a/library/alloc/src/alloc/tests.rs b/library/alloc/src/alloc/tests.rs
new file mode 100644
index 00000000000..1c003983df9
--- /dev/null
+++ b/library/alloc/src/alloc/tests.rs
@@ -0,0 +1,31 @@
+use super::*;
+
+extern crate test;
+use crate::boxed::Box;
+use test::Bencher;
+
+#[test]
+fn allocate_zeroed() {
+ unsafe {
+ let layout = Layout::from_size_align(1024, 1).unwrap();
+ let memory = Global
+ .alloc(layout.clone(), AllocInit::Zeroed)
+ .unwrap_or_else(|_| handle_alloc_error(layout));
+
+ let mut i = memory.ptr.cast::<u8>().as_ptr();
+ let end = i.add(layout.size());
+ while i < end {
+ assert_eq!(*i, 0);
+ i = i.offset(1);
+ }
+ Global.dealloc(memory.ptr, layout);
+ }
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn alloc_owned_small(b: &mut Bencher) {
+ b.iter(|| {
+ let _: Box<_> = box 10;
+ })
+}
diff --git a/library/alloc/src/borrow.rs b/library/alloc/src/borrow.rs
new file mode 100644
index 00000000000..51c233a21f1
--- /dev/null
+++ b/library/alloc/src/borrow.rs
@@ -0,0 +1,476 @@
+//! A module for working with borrowed data.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::hash::{Hash, Hasher};
+use core::ops::{Add, AddAssign, Deref};
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::borrow::{Borrow, BorrowMut};
+
+use crate::fmt;
+use crate::string::String;
+
+use Cow::*;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, B: ?Sized> Borrow<B> for Cow<'a, B>
+where
+ B: ToOwned,
+ <B as ToOwned>::Owned: 'a,
+{
+ fn borrow(&self) -> &B {
+ &**self
+ }
+}
+
+/// A generalization of `Clone` to borrowed data.
+///
+/// Some types make it possible to go from borrowed to owned, usually by
+/// implementing the `Clone` trait. But `Clone` works only for going from `&T`
+/// to `T`. The `ToOwned` trait generalizes `Clone` to construct owned data
+/// from any borrow of a given type.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub trait ToOwned {
+ /// The resulting type after obtaining ownership.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ type Owned: Borrow<Self>;
+
+ /// Creates owned data from borrowed data, usually by cloning.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s: &str = "a";
+ /// let ss: String = s.to_owned();
+ ///
+ /// let v: &[i32] = &[1, 2];
+ /// let vv: Vec<i32> = v.to_owned();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[must_use = "cloning is often expensive and is not expected to have side effects"]
+ fn to_owned(&self) -> Self::Owned;
+
+ /// Uses borrowed data to replace owned data, usually by cloning.
+ ///
+ /// This is borrow-generalized version of `Clone::clone_from`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// # #![feature(toowned_clone_into)]
+ /// let mut s: String = String::new();
+ /// "hello".clone_into(&mut s);
+ ///
+ /// let mut v: Vec<i32> = Vec::new();
+ /// [1, 2][..].clone_into(&mut v);
+ /// ```
+ #[unstable(feature = "toowned_clone_into", reason = "recently added", issue = "41263")]
+ fn clone_into(&self, target: &mut Self::Owned) {
+ *target = self.to_owned();
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ToOwned for T
+where
+ T: Clone,
+{
+ type Owned = T;
+ fn to_owned(&self) -> T {
+ self.clone()
+ }
+
+ fn clone_into(&self, target: &mut T) {
+ target.clone_from(self);
+ }
+}
+
+/// A clone-on-write smart pointer.
+///
+/// The type `Cow` is a smart pointer providing clone-on-write functionality: it
+/// can enclose and provide immutable access to borrowed data, and clone the
+/// data lazily when mutation or ownership is required. The type is designed to
+/// work with general borrowed data via the `Borrow` trait.
+///
+/// `Cow` implements `Deref`, which means that you can call
+/// non-mutating methods directly on the data it encloses. If mutation
+/// is desired, `to_mut` will obtain a mutable reference to an owned
+/// value, cloning if necessary.
+///
+/// # Examples
+///
+/// ```
+/// use std::borrow::Cow;
+///
+/// fn abs_all(input: &mut Cow<[i32]>) {
+/// for i in 0..input.len() {
+/// let v = input[i];
+/// if v < 0 {
+/// // Clones into a vector if not already owned.
+/// input.to_mut()[i] = -v;
+/// }
+/// }
+/// }
+///
+/// // No clone occurs because `input` doesn't need to be mutated.
+/// let slice = [0, 1, 2];
+/// let mut input = Cow::from(&slice[..]);
+/// abs_all(&mut input);
+///
+/// // Clone occurs because `input` needs to be mutated.
+/// let slice = [-1, 0, 1];
+/// let mut input = Cow::from(&slice[..]);
+/// abs_all(&mut input);
+///
+/// // No clone occurs because `input` is already owned.
+/// let mut input = Cow::from(vec![-1, 0, 1]);
+/// abs_all(&mut input);
+/// ```
+///
+/// Another example showing how to keep `Cow` in a struct:
+///
+/// ```
+/// use std::borrow::Cow;
+///
+/// struct Items<'a, X: 'a> where [X]: ToOwned<Owned = Vec<X>> {
+/// values: Cow<'a, [X]>,
+/// }
+///
+/// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned<Owned = Vec<X>> {
+/// fn new(v: Cow<'a, [X]>) -> Self {
+/// Items { values: v }
+/// }
+/// }
+///
+/// // Creates a container from borrowed values of a slice
+/// let readonly = [1, 2];
+/// let borrowed = Items::new((&readonly[..]).into());
+/// match borrowed {
+/// Items { values: Cow::Borrowed(b) } => println!("borrowed {:?}", b),
+/// _ => panic!("expect borrowed value"),
+/// }
+///
+/// let mut clone_on_write = borrowed;
+/// // Mutates the data from slice into owned vec and pushes a new value on top
+/// clone_on_write.values.to_mut().push(3);
+/// println!("clone_on_write = {:?}", clone_on_write.values);
+///
+/// // The data was mutated. Let check it out.
+/// match clone_on_write {
+/// Items { values: Cow::Owned(_) } => println!("clone_on_write contains owned data"),
+/// _ => panic!("expect owned data"),
+/// }
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub enum Cow<'a, B: ?Sized + 'a>
+where
+ B: ToOwned,
+{
+ /// Borrowed data.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B),
+
+ /// Owned data.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Owned(#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned),
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized + ToOwned> Clone for Cow<'_, B> {
+ fn clone(&self) -> Self {
+ match *self {
+ Borrowed(b) => Borrowed(b),
+ Owned(ref o) => {
+ let b: &B = o.borrow();
+ Owned(b.to_owned())
+ }
+ }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ match (self, source) {
+ (&mut Owned(ref mut dest), &Owned(ref o)) => o.borrow().clone_into(dest),
+ (t, s) => *t = s.clone(),
+ }
+ }
+}
+
+impl<B: ?Sized + ToOwned> Cow<'_, B> {
+ /// Returns true if the data is borrowed, i.e. if `to_mut` would require additional work.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(cow_is_borrowed)]
+ /// use std::borrow::Cow;
+ ///
+ /// let cow = Cow::Borrowed("moo");
+ /// assert!(cow.is_borrowed());
+ ///
+ /// let bull: Cow<'_, str> = Cow::Owned("...moo?".to_string());
+ /// assert!(!bull.is_borrowed());
+ /// ```
+ #[unstable(feature = "cow_is_borrowed", issue = "65143")]
+ pub fn is_borrowed(&self) -> bool {
+ match *self {
+ Borrowed(_) => true,
+ Owned(_) => false,
+ }
+ }
+
+ /// Returns true if the data is owned, i.e. if `to_mut` would be a no-op.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(cow_is_borrowed)]
+ /// use std::borrow::Cow;
+ ///
+ /// let cow: Cow<'_, str> = Cow::Owned("moo".to_string());
+ /// assert!(cow.is_owned());
+ ///
+ /// let bull = Cow::Borrowed("...moo?");
+ /// assert!(!bull.is_owned());
+ /// ```
+ #[unstable(feature = "cow_is_borrowed", issue = "65143")]
+ pub fn is_owned(&self) -> bool {
+ !self.is_borrowed()
+ }
+
+ /// Acquires a mutable reference to the owned form of the data.
+ ///
+ /// Clones the data if it is not already owned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::borrow::Cow;
+ ///
+ /// let mut cow = Cow::Borrowed("foo");
+ /// cow.to_mut().make_ascii_uppercase();
+ ///
+ /// assert_eq!(
+ /// cow,
+ /// Cow::Owned(String::from("FOO")) as Cow<str>
+ /// );
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn to_mut(&mut self) -> &mut <B as ToOwned>::Owned {
+ match *self {
+ Borrowed(borrowed) => {
+ *self = Owned(borrowed.to_owned());
+ match *self {
+ Borrowed(..) => unreachable!(),
+ Owned(ref mut owned) => owned,
+ }
+ }
+ Owned(ref mut owned) => owned,
+ }
+ }
+
+ /// Extracts the owned data.
+ ///
+ /// Clones the data if it is not already owned.
+ ///
+ /// # Examples
+ ///
+ /// Calling `into_owned` on a `Cow::Borrowed` clones the underlying data
+ /// and becomes a `Cow::Owned`:
+ ///
+ /// ```
+ /// use std::borrow::Cow;
+ ///
+ /// let s = "Hello world!";
+ /// let cow = Cow::Borrowed(s);
+ ///
+ /// assert_eq!(
+ /// cow.into_owned(),
+ /// String::from(s)
+ /// );
+ /// ```
+ ///
+ /// Calling `into_owned` on a `Cow::Owned` is a no-op:
+ ///
+ /// ```
+ /// use std::borrow::Cow;
+ ///
+ /// let s = "Hello world!";
+ /// let cow: Cow<str> = Cow::Owned(String::from(s));
+ ///
+ /// assert_eq!(
+ /// cow.into_owned(),
+ /// String::from(s)
+ /// );
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_owned(self) -> <B as ToOwned>::Owned {
+ match self {
+ Borrowed(borrowed) => borrowed.to_owned(),
+ Owned(owned) => owned,
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized + ToOwned> Deref for Cow<'_, B> {
+ type Target = B;
+
+ fn deref(&self) -> &B {
+ match *self {
+ Borrowed(borrowed) => borrowed,
+ Owned(ref owned) => owned.borrow(),
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Eq for Cow<'_, B> where B: Eq + ToOwned {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Ord for Cow<'_, B>
+where
+ B: Ord + ToOwned,
+{
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ Ord::cmp(&**self, &**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
+where
+ B: PartialEq<C> + ToOwned,
+ C: ToOwned,
+{
+ #[inline]
+ fn eq(&self, other: &Cow<'b, C>) -> bool {
+ PartialEq::eq(&**self, &**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
+where
+ B: PartialOrd + ToOwned,
+{
+ #[inline]
+ fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
+ PartialOrd::partial_cmp(&**self, &**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> fmt::Debug for Cow<'_, B>
+where
+ B: fmt::Debug + ToOwned<Owned: fmt::Debug>,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Borrowed(ref b) => fmt::Debug::fmt(b, f),
+ Owned(ref o) => fmt::Debug::fmt(o, f),
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> fmt::Display for Cow<'_, B>
+where
+ B: fmt::Display + ToOwned<Owned: fmt::Display>,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Borrowed(ref b) => fmt::Display::fmt(b, f),
+ Owned(ref o) => fmt::Display::fmt(o, f),
+ }
+ }
+}
+
+#[stable(feature = "default", since = "1.11.0")]
+impl<B: ?Sized> Default for Cow<'_, B>
+where
+ B: ToOwned<Owned: Default>,
+{
+ /// Creates an owned Cow<'a, B> with the default value for the contained owned value.
+ fn default() -> Self {
+ Owned(<B as ToOwned>::Owned::default())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<B: ?Sized> Hash for Cow<'_, B>
+where
+ B: Hash + ToOwned,
+{
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ Hash::hash(&**self, state)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + ToOwned> AsRef<T> for Cow<'_, T> {
+ fn as_ref(&self) -> &T {
+ self
+ }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> Add<&'a str> for Cow<'a, str> {
+ type Output = Cow<'a, str>;
+
+ #[inline]
+ fn add(mut self, rhs: &'a str) -> Self::Output {
+ self += rhs;
+ self
+ }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> Add<Cow<'a, str>> for Cow<'a, str> {
+ type Output = Cow<'a, str>;
+
+ #[inline]
+ fn add(mut self, rhs: Cow<'a, str>) -> Self::Output {
+ self += rhs;
+ self
+ }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> AddAssign<&'a str> for Cow<'a, str> {
+ fn add_assign(&mut self, rhs: &'a str) {
+ if self.is_empty() {
+ *self = Cow::Borrowed(rhs)
+ } else if !rhs.is_empty() {
+ if let Cow::Borrowed(lhs) = *self {
+ let mut s = String::with_capacity(lhs.len() + rhs.len());
+ s.push_str(lhs);
+ *self = Cow::Owned(s);
+ }
+ self.to_mut().push_str(rhs);
+ }
+ }
+}
+
+#[stable(feature = "cow_add", since = "1.14.0")]
+impl<'a> AddAssign<Cow<'a, str>> for Cow<'a, str> {
+ fn add_assign(&mut self, rhs: Cow<'a, str>) {
+ if self.is_empty() {
+ *self = rhs
+ } else if !rhs.is_empty() {
+ if let Cow::Borrowed(lhs) = *self {
+ let mut s = String::with_capacity(lhs.len() + rhs.len());
+ s.push_str(lhs);
+ *self = Cow::Owned(s);
+ }
+ self.to_mut().push_str(&rhs);
+ }
+ }
+}
diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs
new file mode 100644
index 00000000000..f225aa18853
--- /dev/null
+++ b/library/alloc/src/boxed.rs
@@ -0,0 +1,1200 @@
+//! A pointer type for heap allocation.
+//!
+//! [`Box<T>`], casually referred to as a 'box', provides the simplest form of
+//! heap allocation in Rust. Boxes provide ownership for this allocation, and
+//! drop their contents when they go out of scope. Boxes also ensure that they
+//! never allocate more than `isize::MAX` bytes.
+//!
+//! # Examples
+//!
+//! Move a value from the stack to the heap by creating a [`Box`]:
+//!
+//! ```
+//! let val: u8 = 5;
+//! let boxed: Box<u8> = Box::new(val);
+//! ```
+//!
+//! Move a value from a [`Box`] back to the stack by [dereferencing]:
+//!
+//! ```
+//! let boxed: Box<u8> = Box::new(5);
+//! let val: u8 = *boxed;
+//! ```
+//!
+//! Creating a recursive data structure:
+//!
+//! ```
+//! #[derive(Debug)]
+//! enum List<T> {
+//! Cons(T, Box<List<T>>),
+//! Nil,
+//! }
+//!
+//! let list: List<i32> = List::Cons(1, Box::new(List::Cons(2, Box::new(List::Nil))));
+//! println!("{:?}", list);
+//! ```
+//!
+//! This will print `Cons(1, Cons(2, Nil))`.
+//!
+//! Recursive structures must be boxed, because if the definition of `Cons`
+//! looked like this:
+//!
+//! ```compile_fail,E0072
+//! # enum List<T> {
+//! Cons(T, List<T>),
+//! # }
+//! ```
+//!
+//! It wouldn't work. This is because the size of a `List` depends on how many
+//! elements are in the list, and so we don't know how much memory to allocate
+//! for a `Cons`. By introducing a [`Box<T>`], which has a defined size, we know how
+//! big `Cons` needs to be.
+//!
+//! # Memory layout
+//!
+//! For non-zero-sized values, a [`Box`] will use the [`Global`] allocator for
+//! its allocation. It is valid to convert both ways between a [`Box`] and a
+//! raw pointer allocated with the [`Global`] allocator, given that the
+//! [`Layout`] used with the allocator is correct for the type. More precisely,
+//! a `value: *mut T` that has been allocated with the [`Global`] allocator
+//! with `Layout::for_value(&*value)` may be converted into a box using
+//! [`Box::<T>::from_raw(value)`]. Conversely, the memory backing a `value: *mut
+//! T` obtained from [`Box::<T>::into_raw`] may be deallocated using the
+//! [`Global`] allocator with [`Layout::for_value(&*value)`].
+//!
+//! So long as `T: Sized`, a `Box<T>` is guaranteed to be represented
+//! as a single pointer and is also ABI-compatible with C pointers
+//! (i.e. the C type `T*`). This means that if you have extern "C"
+//! Rust functions that will be called from C, you can define those
+//! Rust functions using `Box<T>` types, and use `T*` as corresponding
+//! type on the C side. As an example, consider this C header which
+//! declares functions that create and destroy some kind of `Foo`
+//! value:
+//!
+//! ```c
+//! /* C header */
+//!
+//! /* Returns ownership to the caller */
+//! struct Foo* foo_new(void);
+//!
+//! /* Takes ownership from the caller; no-op when invoked with NULL */
+//! void foo_delete(struct Foo*);
+//! ```
+//!
+//! These two functions might be implemented in Rust as follows. Here, the
+//! `struct Foo*` type from C is translated to `Box<Foo>`, which captures
+//! the ownership constraints. Note also that the nullable argument to
+//! `foo_delete` is represented in Rust as `Option<Box<Foo>>`, since `Box<Foo>`
+//! cannot be null.
+//!
+//! ```
+//! #[repr(C)]
+//! pub struct Foo;
+//!
+//! #[no_mangle]
+//! #[allow(improper_ctypes_definitions)]
+//! pub extern "C" fn foo_new() -> Box<Foo> {
+//! Box::new(Foo)
+//! }
+//!
+//! #[no_mangle]
+//! #[allow(improper_ctypes_definitions)]
+//! pub extern "C" fn foo_delete(_: Option<Box<Foo>>) {}
+//! ```
+//!
+//! Even though `Box<T>` has the same representation and C ABI as a C pointer,
+//! this does not mean that you can convert an arbitrary `T*` into a `Box<T>`
+//! and expect things to work. `Box<T>` values will always be fully aligned,
+//! non-null pointers. Moreover, the destructor for `Box<T>` will attempt to
+//! free the value with the global allocator. In general, the best practice
+//! is to only use `Box<T>` for pointers that originated from the global
+//! allocator.
+//!
+//! **Important.** At least at present, you should avoid using
+//! `Box<T>` types for functions that are defined in C but invoked
+//! from Rust. In those cases, you should directly mirror the C types
+//! as closely as possible. Using types like `Box<T>` where the C
+//! definition is just using `T*` can lead to undefined behavior, as
+//! described in [rust-lang/unsafe-code-guidelines#198][ucg#198].
+//!
+//! [ucg#198]: https://github.com/rust-lang/unsafe-code-guidelines/issues/198
+//! [dereferencing]: ../../std/ops/trait.Deref.html
+//! [`Box`]: struct.Box.html
+//! [`Box<T>`]: struct.Box.html
+//! [`Box::<T>::from_raw(value)`]: struct.Box.html#method.from_raw
+//! [`Box::<T>::into_raw`]: struct.Box.html#method.into_raw
+//! [`Global`]: ../alloc/struct.Global.html
+//! [`Layout`]: ../alloc/struct.Layout.html
+//! [`Layout::for_value(&*value)`]: ../alloc/struct.Layout.html#method.for_value
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::any::Any;
+use core::borrow;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::future::Future;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Iterator};
+use core::marker::{Unpin, Unsize};
+use core::mem;
+use core::ops::{
+ CoerceUnsized, Deref, DerefMut, DispatchFromDyn, Generator, GeneratorState, Receiver,
+};
+use core::pin::Pin;
+use core::ptr::{self, NonNull, Unique};
+use core::task::{Context, Poll};
+
+use crate::alloc::{self, AllocInit, AllocRef, Global};
+use crate::borrow::Cow;
+use crate::raw_vec::RawVec;
+use crate::str::from_boxed_utf8_unchecked;
+use crate::vec::Vec;
+
+/// A pointer type for heap allocation.
+///
+/// See the [module-level documentation](../../std/boxed/index.html) for more.
+#[lang = "owned_box"]
+#[fundamental]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Box<T: ?Sized>(Unique<T>);
+
+impl<T> Box<T> {
+ /// Allocates memory on the heap and then places `x` into it.
+ ///
+ /// This doesn't actually allocate if `T` is zero-sized.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let five = Box::new(5);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline(always)]
+ pub fn new(x: T) -> Box<T> {
+ box x
+ }
+
+ /// Constructs a new box with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// let mut five = Box::<u32>::new_uninit();
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// five.as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit() -> Box<mem::MaybeUninit<T>> {
+ let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
+ let ptr = Global
+ .alloc(layout, AllocInit::Uninitialized)
+ .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
+ .ptr
+ .cast();
+ unsafe { Box::from_raw(ptr.as_ptr()) }
+ }
+
+ /// Constructs a new `Box` with uninitialized contents, with the memory
+ /// being filled with `0` bytes.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+ /// of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// let zero = Box::<u32>::new_zeroed();
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_zeroed() -> Box<mem::MaybeUninit<T>> {
+ let layout = alloc::Layout::new::<mem::MaybeUninit<T>>();
+ let ptr = Global
+ .alloc(layout, AllocInit::Zeroed)
+ .unwrap_or_else(|_| alloc::handle_alloc_error(layout))
+ .ptr
+ .cast();
+ unsafe { Box::from_raw(ptr.as_ptr()) }
+ }
+
+ /// Constructs a new `Pin<Box<T>>`. If `T` does not implement `Unpin`, then
+ /// `x` will be pinned in memory and unable to be moved.
+ #[stable(feature = "pin", since = "1.33.0")]
+ #[inline(always)]
+ pub fn pin(x: T) -> Pin<Box<T>> {
+ (box x).into()
+ }
+
+ /// Converts a `Box<T>` into a `Box<[T]>`
+ ///
+ /// This conversion does not allocate on the heap and happens in place.
+ ///
+ #[unstable(feature = "box_into_boxed_slice", issue = "71582")]
+ pub fn into_boxed_slice(boxed: Box<T>) -> Box<[T]> {
+ // *mut T and *mut [T; 1] have the same size and alignment
+ unsafe { Box::from_raw(Box::into_raw(boxed) as *mut [T; 1]) }
+ }
+}
+
+impl<T> Box<[T]> {
+ /// Constructs a new boxed slice with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// let mut values = Box::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// values[0].as_mut_ptr().write(1);
+ /// values[1].as_mut_ptr().write(2);
+ /// values[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit_slice(len: usize) -> Box<[mem::MaybeUninit<T>]> {
+ unsafe { RawVec::with_capacity(len).into_box(len) }
+ }
+}
+
+impl<T> Box<mem::MaybeUninit<T>> {
+ /// Converts to `Box<T>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the value
+ /// really is in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// let mut five = Box::<u32>::new_uninit();
+ ///
+ /// let five: Box<u32> = unsafe {
+ /// // Deferred initialization:
+ /// five.as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Box<T> {
+ unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
+ }
+}
+
+impl<T> Box<[mem::MaybeUninit<T>]> {
+ /// Converts to `Box<[T]>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the values
+ /// really are in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// let mut values = Box::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// values[0].as_mut_ptr().write(1);
+ /// values[1].as_mut_ptr().write(2);
+ /// values[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Box<[T]> {
+ unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
+ }
+}
+
+impl<T: ?Sized> Box<T> {
+ /// Constructs a box from a raw pointer.
+ ///
+ /// After calling this function, the raw pointer is owned by the
+ /// resulting `Box`. Specifically, the `Box` destructor will call
+ /// the destructor of `T` and free the allocated memory. For this
+ /// to be safe, the memory must have been allocated in accordance
+ /// with the [memory layout] used by `Box` .
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because improper use may lead to
+ /// memory problems. For example, a double-free may occur if the
+ /// function is called twice on the same raw pointer.
+ ///
+ /// # Examples
+ /// Recreate a `Box` which was previously converted to a raw pointer
+ /// using [`Box::into_raw`]:
+ /// ```
+ /// let x = Box::new(5);
+ /// let ptr = Box::into_raw(x);
+ /// let x = unsafe { Box::from_raw(ptr) };
+ /// ```
+ /// Manually create a `Box` from scratch by using the global allocator:
+ /// ```
+ /// use std::alloc::{alloc, Layout};
+ ///
+ /// unsafe {
+ /// let ptr = alloc(Layout::new::<i32>()) as *mut i32;
+ /// // In general .write is required to avoid attempting to destruct
+ /// // the (uninitialized) previous contents of `ptr`, though for this
+ /// // simple example `*ptr = 5` would have worked as well.
+ /// ptr.write(5);
+ /// let x = Box::from_raw(ptr);
+ /// }
+ /// ```
+ ///
+ /// [memory layout]: index.html#memory-layout
+ /// [`Layout`]: ../alloc/struct.Layout.html
+ /// [`Box::into_raw`]: struct.Box.html#method.into_raw
+ #[stable(feature = "box_raw", since = "1.4.0")]
+ #[inline]
+ pub unsafe fn from_raw(raw: *mut T) -> Self {
+ Box(unsafe { Unique::new_unchecked(raw) })
+ }
+
+ /// Consumes the `Box`, returning a wrapped raw pointer.
+ ///
+ /// The pointer will be properly aligned and non-null.
+ ///
+ /// After calling this function, the caller is responsible for the
+ /// memory previously managed by the `Box`. In particular, the
+ /// caller should properly destroy `T` and release the memory, taking
+ /// into account the [memory layout] used by `Box`. The easiest way to
+ /// do this is to convert the raw pointer back into a `Box` with the
+ /// [`Box::from_raw`] function, allowing the `Box` destructor to perform
+ /// the cleanup.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ ///
+ /// # Examples
+ /// Converting the raw pointer back into a `Box` with [`Box::from_raw`]
+ /// for automatic cleanup:
+ /// ```
+ /// let x = Box::new(String::from("Hello"));
+ /// let ptr = Box::into_raw(x);
+ /// let x = unsafe { Box::from_raw(ptr) };
+ /// ```
+ /// Manual cleanup by explicitly running the destructor and deallocating
+ /// the memory:
+ /// ```
+ /// use std::alloc::{dealloc, Layout};
+ /// use std::ptr;
+ ///
+ /// let x = Box::new(String::from("Hello"));
+ /// let p = Box::into_raw(x);
+ /// unsafe {
+ /// ptr::drop_in_place(p);
+ /// dealloc(p as *mut u8, Layout::new::<String>());
+ /// }
+ /// ```
+ ///
+ /// [memory layout]: index.html#memory-layout
+ /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+ #[stable(feature = "box_raw", since = "1.4.0")]
+ #[inline]
+ pub fn into_raw(b: Box<T>) -> *mut T {
+ // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+ // raw pointer for the type system. Turning it directly into a raw pointer would not be
+ // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+ // so all raw pointer methods go through `leak` which creates a (unique)
+ // mutable reference. Turning *that* to a raw pointer behaves correctly.
+ Box::leak(b) as *mut T
+ }
+
+ /// Consumes the `Box`, returning the wrapped pointer as `NonNull<T>`.
+ ///
+ /// After calling this function, the caller is responsible for the
+ /// memory previously managed by the `Box`. In particular, the
+ /// caller should properly destroy `T` and release the memory. The
+ /// easiest way to do so is to convert the `NonNull<T>` pointer
+ /// into a raw pointer and back into a `Box` with the [`Box::from_raw`]
+ /// function.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Box::into_raw_non_null(b)`
+ /// instead of `b.into_raw_non_null()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ ///
+ /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(box_into_raw_non_null)]
+ /// #![allow(deprecated)]
+ ///
+ /// let x = Box::new(5);
+ /// let ptr = Box::into_raw_non_null(x);
+ ///
+ /// // Clean up the memory by converting the NonNull pointer back
+ /// // into a Box and letting the Box be dropped.
+ /// let x = unsafe { Box::from_raw(ptr.as_ptr()) };
+ /// ```
+ #[unstable(feature = "box_into_raw_non_null", issue = "47336")]
+ #[rustc_deprecated(
+ since = "1.44.0",
+ reason = "use `Box::leak(b).into()` or `NonNull::from(Box::leak(b))` instead"
+ )]
+ #[inline]
+ pub fn into_raw_non_null(b: Box<T>) -> NonNull<T> {
+ // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+ // raw pointer for the type system. Turning it directly into a raw pointer would not be
+ // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+ // so all raw pointer methods go through `leak` which creates a (unique)
+ // mutable reference. Turning *that* to a raw pointer behaves correctly.
+ Box::leak(b).into()
+ }
+
+ #[unstable(
+ feature = "ptr_internals",
+ issue = "none",
+ reason = "use `Box::leak(b).into()` or `Unique::from(Box::leak(b))` instead"
+ )]
+ #[inline]
+ #[doc(hidden)]
+ pub fn into_unique(b: Box<T>) -> Unique<T> {
+ // Box is recognized as a "unique pointer" by Stacked Borrows, but internally it is a
+ // raw pointer for the type system. Turning it directly into a raw pointer would not be
+ // recognized as "releasing" the unique pointer to permit aliased raw accesses,
+ // so all raw pointer methods go through `leak` which creates a (unique)
+ // mutable reference. Turning *that* to a raw pointer behaves correctly.
+ Box::leak(b).into()
+ }
+
+ /// Consumes and leaks the `Box`, returning a mutable reference,
+ /// `&'a mut T`. Note that the type `T` must outlive the chosen lifetime
+ /// `'a`. If the type has only static references, or none at all, then this
+ /// may be chosen to be `'static`.
+ ///
+ /// This function is mainly useful for data that lives for the remainder of
+ /// the program's life. Dropping the returned reference will cause a memory
+ /// leak. If this is not acceptable, the reference should first be wrapped
+ /// with the [`Box::from_raw`] function producing a `Box`. This `Box` can
+ /// then be dropped which will properly destroy `T` and release the
+ /// allocated memory.
+ ///
+ /// Note: this is an associated function, which means that you have
+ /// to call it as `Box::leak(b)` instead of `b.leak()`. This
+ /// is so that there is no conflict with a method on the inner type.
+ ///
+ /// [`Box::from_raw`]: struct.Box.html#method.from_raw
+ ///
+ /// # Examples
+ ///
+ /// Simple usage:
+ ///
+ /// ```
+ /// let x = Box::new(41);
+ /// let static_ref: &'static mut usize = Box::leak(x);
+ /// *static_ref += 1;
+ /// assert_eq!(*static_ref, 42);
+ /// ```
+ ///
+ /// Unsized data:
+ ///
+ /// ```
+ /// let x = vec![1, 2, 3].into_boxed_slice();
+ /// let static_ref = Box::leak(x);
+ /// static_ref[0] = 4;
+ /// assert_eq!(*static_ref, [4, 2, 3]);
+ /// ```
+ #[stable(feature = "box_leak", since = "1.26.0")]
+ #[inline]
+ pub fn leak<'a>(b: Box<T>) -> &'a mut T
+ where
+ T: 'a, // Technically not needed, but kept to be explicit.
+ {
+ unsafe { &mut *mem::ManuallyDrop::new(b).0.as_ptr() }
+ }
+
+ /// Converts a `Box<T>` into a `Pin<Box<T>>`
+ ///
+ /// This conversion does not allocate on the heap and happens in place.
+ ///
+ /// This is also available via [`From`].
+ #[unstable(feature = "box_into_pin", issue = "62370")]
+ pub fn into_pin(boxed: Box<T>) -> Pin<Box<T>> {
+ // It's not possible to move or replace the insides of a `Pin<Box<T>>`
+ // when `T: !Unpin`, so it's safe to pin it directly without any
+ // additional requirements.
+ unsafe { Pin::new_unchecked(boxed) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Box<T> {
+ fn drop(&mut self) {
+ // FIXME: Do nothing, drop is currently performed by compiler.
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Box<T> {
+ /// Creates a `Box<T>`, with the `Default` value for T.
+ fn default() -> Box<T> {
+ box Default::default()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for Box<[T]> {
+ fn default() -> Box<[T]> {
+ Box::<[T; 0]>::new([])
+ }
+}
+
+#[stable(feature = "default_box_extra", since = "1.17.0")]
+impl Default for Box<str> {
+ fn default() -> Box<str> {
+ unsafe { from_boxed_utf8_unchecked(Default::default()) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Box<T> {
+ /// Returns a new box with a `clone()` of this box's contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let x = Box::new(5);
+ /// let y = x.clone();
+ ///
+ /// // The value is the same
+ /// assert_eq!(x, y);
+ ///
+ /// // But they are unique objects
+ /// assert_ne!(&*x as *const i32, &*y as *const i32);
+ /// ```
+ #[rustfmt::skip]
+ #[inline]
+ fn clone(&self) -> Box<T> {
+ box { (**self).clone() }
+ }
+
+ /// Copies `source`'s contents into `self` without creating a new allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let x = Box::new(5);
+ /// let mut y = Box::new(10);
+ /// let yp: *const i32 = &*y;
+ ///
+ /// y.clone_from(&x);
+ ///
+ /// // The value is the same
+ /// assert_eq!(x, y);
+ ///
+ /// // And no allocation occurred
+ /// assert_eq!(yp, &*y);
+ /// ```
+ #[inline]
+ fn clone_from(&mut self, source: &Box<T>) {
+ (**self).clone_from(&(**source));
+ }
+}
+
+#[stable(feature = "box_slice_clone", since = "1.3.0")]
+impl Clone for Box<str> {
+ fn clone(&self) -> Self {
+ // this makes a copy of the data
+ let buf: Box<[u8]> = self.as_bytes().into();
+ unsafe { from_boxed_utf8_unchecked(buf) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Box<T> {
+ #[inline]
+ fn eq(&self, other: &Box<T>) -> bool {
+ PartialEq::eq(&**self, &**other)
+ }
+ #[inline]
+ fn ne(&self, other: &Box<T>) -> bool {
+ PartialEq::ne(&**self, &**other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Box<T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Box<T>) -> Option<Ordering> {
+ PartialOrd::partial_cmp(&**self, &**other)
+ }
+ #[inline]
+ fn lt(&self, other: &Box<T>) -> bool {
+ PartialOrd::lt(&**self, &**other)
+ }
+ #[inline]
+ fn le(&self, other: &Box<T>) -> bool {
+ PartialOrd::le(&**self, &**other)
+ }
+ #[inline]
+ fn ge(&self, other: &Box<T>) -> bool {
+ PartialOrd::ge(&**self, &**other)
+ }
+ #[inline]
+ fn gt(&self, other: &Box<T>) -> bool {
+ PartialOrd::gt(&**self, &**other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Box<T> {
+ #[inline]
+ fn cmp(&self, other: &Box<T>) -> Ordering {
+ Ord::cmp(&**self, &**other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Box<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Box<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state);
+ }
+}
+
+#[stable(feature = "indirect_hasher_impl", since = "1.22.0")]
+impl<T: ?Sized + Hasher> Hasher for Box<T> {
+ fn finish(&self) -> u64 {
+ (**self).finish()
+ }
+ fn write(&mut self, bytes: &[u8]) {
+ (**self).write(bytes)
+ }
+ fn write_u8(&mut self, i: u8) {
+ (**self).write_u8(i)
+ }
+ fn write_u16(&mut self, i: u16) {
+ (**self).write_u16(i)
+ }
+ fn write_u32(&mut self, i: u32) {
+ (**self).write_u32(i)
+ }
+ fn write_u64(&mut self, i: u64) {
+ (**self).write_u64(i)
+ }
+ fn write_u128(&mut self, i: u128) {
+ (**self).write_u128(i)
+ }
+ fn write_usize(&mut self, i: usize) {
+ (**self).write_usize(i)
+ }
+ fn write_i8(&mut self, i: i8) {
+ (**self).write_i8(i)
+ }
+ fn write_i16(&mut self, i: i16) {
+ (**self).write_i16(i)
+ }
+ fn write_i32(&mut self, i: i32) {
+ (**self).write_i32(i)
+ }
+ fn write_i64(&mut self, i: i64) {
+ (**self).write_i64(i)
+ }
+ fn write_i128(&mut self, i: i128) {
+ (**self).write_i128(i)
+ }
+ fn write_isize(&mut self, i: isize) {
+ (**self).write_isize(i)
+ }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Box<T> {
+ /// Converts a generic type `T` into a `Box<T>`
+ ///
+ /// The conversion allocates on the heap and moves `t`
+ /// from the stack into it.
+ ///
+ /// # Examples
+ /// ```rust
+ /// let x = 5;
+ /// let boxed = Box::new(5);
+ ///
+ /// assert_eq!(Box::from(x), boxed);
+ /// ```
+ fn from(t: T) -> Self {
+ Box::new(t)
+ }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> From<Box<T>> for Pin<Box<T>> {
+ /// Converts a `Box<T>` into a `Pin<Box<T>>`
+ ///
+ /// This conversion does not allocate on the heap and happens in place.
+ fn from(boxed: Box<T>) -> Self {
+ Box::into_pin(boxed)
+ }
+}
+
+#[stable(feature = "box_from_slice", since = "1.17.0")]
+impl<T: Copy> From<&[T]> for Box<[T]> {
+ /// Converts a `&[T]` into a `Box<[T]>`
+ ///
+ /// This conversion allocates on the heap
+ /// and performs a copy of `slice`.
+ ///
+ /// # Examples
+ /// ```rust
+ /// // create a &[u8] which will be used to create a Box<[u8]>
+ /// let slice: &[u8] = &[104, 101, 108, 108, 111];
+ /// let boxed_slice: Box<[u8]> = Box::from(slice);
+ ///
+ /// println!("{:?}", boxed_slice);
+ /// ```
+ fn from(slice: &[T]) -> Box<[T]> {
+ let len = slice.len();
+ let buf = RawVec::with_capacity(len);
+ unsafe {
+ ptr::copy_nonoverlapping(slice.as_ptr(), buf.ptr(), len);
+ buf.into_box(slice.len()).assume_init()
+ }
+ }
+}
+
+#[stable(feature = "box_from_cow", since = "1.45.0")]
+impl<T: Copy> From<Cow<'_, [T]>> for Box<[T]> {
+ #[inline]
+ fn from(cow: Cow<'_, [T]>) -> Box<[T]> {
+ match cow {
+ Cow::Borrowed(slice) => Box::from(slice),
+ Cow::Owned(slice) => Box::from(slice),
+ }
+ }
+}
+
+#[stable(feature = "box_from_slice", since = "1.17.0")]
+impl From<&str> for Box<str> {
+ /// Converts a `&str` into a `Box<str>`
+ ///
+ /// This conversion allocates on the heap
+ /// and performs a copy of `s`.
+ ///
+ /// # Examples
+ /// ```rust
+ /// let boxed: Box<str> = Box::from("hello");
+ /// println!("{}", boxed);
+ /// ```
+ #[inline]
+ fn from(s: &str) -> Box<str> {
+ unsafe { from_boxed_utf8_unchecked(Box::from(s.as_bytes())) }
+ }
+}
+
+#[stable(feature = "box_from_cow", since = "1.45.0")]
+impl From<Cow<'_, str>> for Box<str> {
+ #[inline]
+ fn from(cow: Cow<'_, str>) -> Box<str> {
+ match cow {
+ Cow::Borrowed(s) => Box::from(s),
+ Cow::Owned(s) => Box::from(s),
+ }
+ }
+}
+
+#[stable(feature = "boxed_str_conv", since = "1.19.0")]
+impl From<Box<str>> for Box<[u8]> {
+ /// Converts a `Box<str>>` into a `Box<[u8]>`
+ ///
+ /// This conversion does not allocate on the heap and happens in place.
+ ///
+ /// # Examples
+ /// ```rust
+ /// // create a Box<str> which will be used to create a Box<[u8]>
+ /// let boxed: Box<str> = Box::from("hello");
+ /// let boxed_str: Box<[u8]> = Box::from(boxed);
+ ///
+ /// // create a &[u8] which will be used to create a Box<[u8]>
+ /// let slice: &[u8] = &[104, 101, 108, 108, 111];
+ /// let boxed_slice = Box::from(slice);
+ ///
+ /// assert_eq!(boxed_slice, boxed_str);
+ /// ```
+ #[inline]
+ fn from(s: Box<str>) -> Self {
+ unsafe { Box::from_raw(Box::into_raw(s) as *mut [u8]) }
+ }
+}
+
+#[stable(feature = "box_from_array", since = "1.45.0")]
+impl<T, const N: usize> From<[T; N]> for Box<[T]> {
+ /// Converts a `[T; N]` into a `Box<[T]>`
+ ///
+ /// This conversion moves the array to newly heap-allocated memory.
+ ///
+ /// # Examples
+ /// ```rust
+ /// let boxed: Box<[u8]> = Box::from([4, 2]);
+ /// println!("{:?}", boxed);
+ /// ```
+ fn from(array: [T; N]) -> Box<[T]> {
+ box array
+ }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Box<[T]>> for Box<[T; N]> {
+ type Error = Box<[T]>;
+
+ fn try_from(boxed_slice: Box<[T]>) -> Result<Self, Self::Error> {
+ if boxed_slice.len() == N {
+ Ok(unsafe { Box::from_raw(Box::into_raw(boxed_slice) as *mut [T; N]) })
+ } else {
+ Err(boxed_slice)
+ }
+ }
+}
+
+impl Box<dyn Any> {
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ /// Attempt to downcast the box to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ ///
+ /// fn print_if_string(value: Box<dyn Any>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Box::new(my_string));
+ /// print_if_string(Box::new(0i8));
+ /// ```
+ pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<dyn Any>> {
+ if self.is::<T>() {
+ unsafe {
+ let raw: *mut dyn Any = Box::into_raw(self);
+ Ok(Box::from_raw(raw as *mut T))
+ }
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl Box<dyn Any + Send> {
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ /// Attempt to downcast the box to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ ///
+ /// fn print_if_string(value: Box<dyn Any + Send>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Box::new(my_string));
+ /// print_if_string(Box::new(0i8));
+ /// ```
+ pub fn downcast<T: Any>(self) -> Result<Box<T>, Box<dyn Any + Send>> {
+ <Box<dyn Any>>::downcast(self).map_err(|s| unsafe {
+ // reapply the Send marker
+ Box::from_raw(Box::into_raw(s) as *mut (dyn Any + Send))
+ })
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Display + ?Sized> fmt::Display for Box<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug + ?Sized> fmt::Debug for Box<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Box<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // It's not possible to extract the inner Uniq directly from the Box,
+ // instead we cast it to a *const which aliases the Unique
+ let ptr: *const T = &**self;
+ fmt::Pointer::fmt(&ptr, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> DerefMut for Box<T> {
+ fn deref_mut(&mut self) -> &mut T {
+ &mut **self
+ }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Box<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator + ?Sized> Iterator for Box<I> {
+ type Item = I::Item;
+ fn next(&mut self) -> Option<I::Item> {
+ (**self).next()
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (**self).size_hint()
+ }
+ fn nth(&mut self, n: usize) -> Option<I::Item> {
+ (**self).nth(n)
+ }
+ fn last(self) -> Option<I::Item> {
+ BoxIter::last(self)
+ }
+}
+
+trait BoxIter {
+ type Item;
+ fn last(self) -> Option<Self::Item>;
+}
+
+impl<I: Iterator + ?Sized> BoxIter for Box<I> {
+ type Item = I::Item;
+ default fn last(self) -> Option<I::Item> {
+ #[inline]
+ fn some<T>(_: Option<T>, x: T) -> Option<T> {
+ Some(x)
+ }
+
+ self.fold(None, some)
+ }
+}
+
+/// Specialization for sized `I`s that uses `I`s implementation of `last()`
+/// instead of the default.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> BoxIter for Box<I> {
+ fn last(self) -> Option<I::Item> {
+ (*self).last()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for Box<I> {
+ fn next_back(&mut self) -> Option<I::Item> {
+ (**self).next_back()
+ }
+ fn nth_back(&mut self, n: usize) -> Option<I::Item> {
+ (**self).nth_back(n)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: ExactSizeIterator + ?Sized> ExactSizeIterator for Box<I> {
+ fn len(&self) -> usize {
+ (**self).len()
+ }
+ fn is_empty(&self) -> bool {
+ (**self).is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<I: FusedIterator + ?Sized> FusedIterator for Box<I> {}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: FnOnce<A> + ?Sized> FnOnce<A> for Box<F> {
+ type Output = <F as FnOnce<A>>::Output;
+
+ extern "rust-call" fn call_once(self, args: A) -> Self::Output {
+ <F as FnOnce<A>>::call_once(*self, args)
+ }
+}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: FnMut<A> + ?Sized> FnMut<A> for Box<F> {
+ extern "rust-call" fn call_mut(&mut self, args: A) -> Self::Output {
+ <F as FnMut<A>>::call_mut(self, args)
+ }
+}
+
+#[stable(feature = "boxed_closure_impls", since = "1.35.0")]
+impl<A, F: Fn<A> + ?Sized> Fn<A> for Box<F> {
+ extern "rust-call" fn call(&self, args: A) -> Self::Output {
+ <F as Fn<A>>::call(self, args)
+ }
+}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Box<U>> for Box<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Box<U>> for Box<T> {}
+
+#[stable(feature = "boxed_slice_from_iter", since = "1.32.0")]
+impl<A> FromIterator<A> for Box<[A]> {
+ fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> Self {
+ iter.into_iter().collect::<Vec<_>>().into_boxed_slice()
+ }
+}
+
+#[stable(feature = "box_slice_clone", since = "1.3.0")]
+impl<T: Clone> Clone for Box<[T]> {
+ fn clone(&self) -> Self {
+ self.to_vec().into_boxed_slice()
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ if self.len() == other.len() {
+ self.clone_from_slice(&other);
+ } else {
+ *self = other.clone();
+ }
+ }
+}
+
+#[stable(feature = "box_borrow", since = "1.1.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Box<T> {
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(feature = "box_borrow", since = "1.1.0")]
+impl<T: ?Sized> borrow::BorrowMut<T> for Box<T> {
+ fn borrow_mut(&mut self) -> &mut T {
+ &mut **self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Box<T> {
+ fn as_ref(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsMut<T> for Box<T> {
+ fn as_mut(&mut self) -> &mut T {
+ &mut **self
+ }
+}
+
+/* Nota bene
+ *
+ * We could have chosen not to add this impl, and instead have written a
+ * function of Pin<Box<T>> to Pin<T>. Such a function would not be sound,
+ * because Box<T> implements Unpin even when T does not, as a result of
+ * this impl.
+ *
+ * We chose this API instead of the alternative for a few reasons:
+ * - Logically, it is helpful to understand pinning in regard to the
+ * memory region being pointed to. For this reason none of the
+ * standard library pointer types support projecting through a pin
+ * (Box<T> is the only pointer type in std for which this would be
+ * safe.)
+ * - It is in practice very useful to have Box<T> be unconditionally
+ * Unpin because of trait objects, for which the structural auto
+ * trait functionality does not apply (e.g., Box<dyn Foo> would
+ * otherwise not be Unpin).
+ *
+ * Another type with the same semantics as Box but only a conditional
+ * implementation of `Unpin` (where `T: Unpin`) would be valid/safe, and
+ * could have a method to project a Pin<T> from it.
+ */
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Box<T> {}
+
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R> + Unpin, R> Generator<R> for Box<G> {
+ type Yield = G::Yield;
+ type Return = G::Return;
+
+ fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+ G::resume(Pin::new(&mut *self), arg)
+ }
+}
+
+#[unstable(feature = "generator_trait", issue = "43122")]
+impl<G: ?Sized + Generator<R>, R> Generator<R> for Pin<Box<G>> {
+ type Yield = G::Yield;
+ type Return = G::Return;
+
+ fn resume(mut self: Pin<&mut Self>, arg: R) -> GeneratorState<Self::Yield, Self::Return> {
+ G::resume((*self).as_mut(), arg)
+ }
+}
+
+#[stable(feature = "futures_api", since = "1.36.0")]
+impl<F: ?Sized + Future + Unpin> Future for Box<F> {
+ type Output = F::Output;
+
+ fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
+ F::poll(Pin::new(&mut *self), cx)
+ }
+}
diff --git a/library/alloc/src/collections/binary_heap.rs b/library/alloc/src/collections/binary_heap.rs
new file mode 100644
index 00000000000..8398cfa3bd3
--- /dev/null
+++ b/library/alloc/src/collections/binary_heap.rs
@@ -0,0 +1,1431 @@
+//! A priority queue implemented with a binary heap.
+//!
+//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
+//! Checking the largest element is *O*(1). Converting a vector to a binary heap
+//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
+//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
+//! in-place heapsort.
+//!
+//! # Examples
+//!
+//! This is a larger example that implements [Dijkstra's algorithm][dijkstra]
+//! to solve the [shortest path problem][sssp] on a [directed graph][dir_graph].
+//! It shows how to use [`BinaryHeap`] with custom types.
+//!
+//! [dijkstra]: http://en.wikipedia.org/wiki/Dijkstra%27s_algorithm
+//! [sssp]: http://en.wikipedia.org/wiki/Shortest_path_problem
+//! [dir_graph]: http://en.wikipedia.org/wiki/Directed_graph
+//! [`BinaryHeap`]: struct.BinaryHeap.html
+//!
+//! ```
+//! use std::cmp::Ordering;
+//! use std::collections::BinaryHeap;
+//!
+//! #[derive(Copy, Clone, Eq, PartialEq)]
+//! struct State {
+//! cost: usize,
+//! position: usize,
+//! }
+//!
+//! // The priority queue depends on `Ord`.
+//! // Explicitly implement the trait so the queue becomes a min-heap
+//! // instead of a max-heap.
+//! impl Ord for State {
+//! fn cmp(&self, other: &State) -> Ordering {
+//! // Notice that the we flip the ordering on costs.
+//! // In case of a tie we compare positions - this step is necessary
+//! // to make implementations of `PartialEq` and `Ord` consistent.
+//! other.cost.cmp(&self.cost)
+//! .then_with(|| self.position.cmp(&other.position))
+//! }
+//! }
+//!
+//! // `PartialOrd` needs to be implemented as well.
+//! impl PartialOrd for State {
+//! fn partial_cmp(&self, other: &State) -> Option<Ordering> {
+//! Some(self.cmp(other))
+//! }
+//! }
+//!
+//! // Each node is represented as an `usize`, for a shorter implementation.
+//! struct Edge {
+//! node: usize,
+//! cost: usize,
+//! }
+//!
+//! // Dijkstra's shortest path algorithm.
+//!
+//! // Start at `start` and use `dist` to track the current shortest distance
+//! // to each node. This implementation isn't memory-efficient as it may leave duplicate
+//! // nodes in the queue. It also uses `usize::MAX` as a sentinel value,
+//! // for a simpler implementation.
+//! fn shortest_path(adj_list: &Vec<Vec<Edge>>, start: usize, goal: usize) -> Option<usize> {
+//! // dist[node] = current shortest distance from `start` to `node`
+//! let mut dist: Vec<_> = (0..adj_list.len()).map(|_| usize::MAX).collect();
+//!
+//! let mut heap = BinaryHeap::new();
+//!
+//! // We're at `start`, with a zero cost
+//! dist[start] = 0;
+//! heap.push(State { cost: 0, position: start });
+//!
+//! // Examine the frontier with lower cost nodes first (min-heap)
+//! while let Some(State { cost, position }) = heap.pop() {
+//! // Alternatively we could have continued to find all shortest paths
+//! if position == goal { return Some(cost); }
+//!
+//! // Important as we may have already found a better way
+//! if cost > dist[position] { continue; }
+//!
+//! // For each node we can reach, see if we can find a way with
+//! // a lower cost going through this node
+//! for edge in &adj_list[position] {
+//! let next = State { cost: cost + edge.cost, position: edge.node };
+//!
+//! // If so, add it to the frontier and continue
+//! if next.cost < dist[next.position] {
+//! heap.push(next);
+//! // Relaxation, we have now found a better way
+//! dist[next.position] = next.cost;
+//! }
+//! }
+//! }
+//!
+//! // Goal not reachable
+//! None
+//! }
+//!
+//! fn main() {
+//! // This is the directed graph we're going to use.
+//! // The node numbers correspond to the different states,
+//! // and the edge weights symbolize the cost of moving
+//! // from one node to another.
+//! // Note that the edges are one-way.
+//! //
+//! // 7
+//! // +-----------------+
+//! // | |
+//! // v 1 2 | 2
+//! // 0 -----> 1 -----> 3 ---> 4
+//! // | ^ ^ ^
+//! // | | 1 | |
+//! // | | | 3 | 1
+//! // +------> 2 -------+ |
+//! // 10 | |
+//! // +---------------+
+//! //
+//! // The graph is represented as an adjacency list where each index,
+//! // corresponding to a node value, has a list of outgoing edges.
+//! // Chosen for its efficiency.
+//! let graph = vec![
+//! // Node 0
+//! vec![Edge { node: 2, cost: 10 },
+//! Edge { node: 1, cost: 1 }],
+//! // Node 1
+//! vec![Edge { node: 3, cost: 2 }],
+//! // Node 2
+//! vec![Edge { node: 1, cost: 1 },
+//! Edge { node: 3, cost: 3 },
+//! Edge { node: 4, cost: 1 }],
+//! // Node 3
+//! vec![Edge { node: 0, cost: 7 },
+//! Edge { node: 4, cost: 2 }],
+//! // Node 4
+//! vec![]];
+//!
+//! assert_eq!(shortest_path(&graph, 0, 1), Some(1));
+//! assert_eq!(shortest_path(&graph, 0, 3), Some(3));
+//! assert_eq!(shortest_path(&graph, 3, 0), Some(7));
+//! assert_eq!(shortest_path(&graph, 0, 4), Some(5));
+//! assert_eq!(shortest_path(&graph, 4, 0), None);
+//! }
+//! ```
+
+#![allow(missing_docs)]
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::fmt;
+use core::iter::{FromIterator, FusedIterator, TrustedLen};
+use core::mem::{self, size_of, swap, ManuallyDrop};
+use core::ops::{Deref, DerefMut};
+use core::ptr;
+
+use crate::slice;
+use crate::vec::{self, Vec};
+
+use super::SpecExtend;
+
+/// A priority queue implemented with a binary heap.
+///
+/// This will be a max-heap.
+///
+/// It is a logic error for an item to be modified in such a way that the
+/// item's ordering relative to any other item, as determined by the `Ord`
+/// trait, changes while it is in the heap. This is normally only possible
+/// through `Cell`, `RefCell`, global state, I/O, or unsafe code.
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BinaryHeap;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BinaryHeap<i32>` in this example).
+/// let mut heap = BinaryHeap::new();
+///
+/// // We can use peek to look at the next item in the heap. In this case,
+/// // there's no items in there yet so we get None.
+/// assert_eq!(heap.peek(), None);
+///
+/// // Let's add some scores...
+/// heap.push(1);
+/// heap.push(5);
+/// heap.push(2);
+///
+/// // Now peek shows the most important item in the heap.
+/// assert_eq!(heap.peek(), Some(&5));
+///
+/// // We can check the length of a heap.
+/// assert_eq!(heap.len(), 3);
+///
+/// // We can iterate over the items in the heap, although they are returned in
+/// // a random order.
+/// for x in &heap {
+/// println!("{}", x);
+/// }
+///
+/// // If we instead pop these scores, they should come back in order.
+/// assert_eq!(heap.pop(), Some(5));
+/// assert_eq!(heap.pop(), Some(2));
+/// assert_eq!(heap.pop(), Some(1));
+/// assert_eq!(heap.pop(), None);
+///
+/// // We can clear the heap of any remaining items.
+/// heap.clear();
+///
+/// // The heap should now be empty.
+/// assert!(heap.is_empty())
+/// ```
+///
+/// ## Min-heap
+///
+/// Either `std::cmp::Reverse` or a custom `Ord` implementation can be used to
+/// make `BinaryHeap` a min-heap. This makes `heap.pop()` return the smallest
+/// value instead of the greatest one.
+///
+/// ```
+/// use std::collections::BinaryHeap;
+/// use std::cmp::Reverse;
+///
+/// let mut heap = BinaryHeap::new();
+///
+/// // Wrap values in `Reverse`
+/// heap.push(Reverse(1));
+/// heap.push(Reverse(5));
+/// heap.push(Reverse(2));
+///
+/// // If we pop these scores now, they should come back in the reverse order.
+/// assert_eq!(heap.pop(), Some(Reverse(1)));
+/// assert_eq!(heap.pop(), Some(Reverse(2)));
+/// assert_eq!(heap.pop(), Some(Reverse(5)));
+/// assert_eq!(heap.pop(), None);
+/// ```
+///
+/// # Time complexity
+///
+/// | [push] | [pop] | [peek]/[peek\_mut] |
+/// |--------|-----------|--------------------|
+/// | O(1)~ | *O*(log(*n*)) | *O*(1) |
+///
+/// The value for `push` is an expected cost; the method documentation gives a
+/// more detailed analysis.
+///
+/// [push]: #method.push
+/// [pop]: #method.pop
+/// [peek]: #method.peek
+/// [peek\_mut]: #method.peek_mut
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BinaryHeap<T> {
+ data: Vec<T>,
+}
+
+/// Structure wrapping a mutable reference to the greatest item on a
+/// `BinaryHeap`.
+///
+/// This `struct` is created by the [`peek_mut`] method on [`BinaryHeap`]. See
+/// its documentation for more.
+///
+/// [`peek_mut`]: struct.BinaryHeap.html#method.peek_mut
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+pub struct PeekMut<'a, T: 'a + Ord> {
+ heap: &'a mut BinaryHeap<T>,
+ sift: bool,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: Ord + fmt::Debug> fmt::Debug for PeekMut<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("PeekMut").field(&self.heap.data[0]).finish()
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> Drop for PeekMut<'_, T> {
+ fn drop(&mut self) {
+ if self.sift {
+ self.heap.sift_down(0);
+ }
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> Deref for PeekMut<'_, T> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ debug_assert!(!self.heap.is_empty());
+ // SAFE: PeekMut is only instantiated for non-empty heaps
+ unsafe { self.heap.data.get_unchecked(0) }
+ }
+}
+
+#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+impl<T: Ord> DerefMut for PeekMut<'_, T> {
+ fn deref_mut(&mut self) -> &mut T {
+ debug_assert!(!self.heap.is_empty());
+ // SAFE: PeekMut is only instantiated for non-empty heaps
+ unsafe { self.heap.data.get_unchecked_mut(0) }
+ }
+}
+
+impl<'a, T: Ord> PeekMut<'a, T> {
+ /// Removes the peeked value from the heap and returns it.
+ #[stable(feature = "binary_heap_peek_mut_pop", since = "1.18.0")]
+ pub fn pop(mut this: PeekMut<'a, T>) -> T {
+ let value = this.heap.pop().unwrap();
+ this.sift = false;
+ value
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for BinaryHeap<T> {
+ fn clone(&self) -> Self {
+ BinaryHeap { data: self.data.clone() }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.data.clone_from(&source.data);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BinaryHeap<T> {
+ /// Creates an empty `BinaryHeap<T>`.
+ #[inline]
+ fn default() -> BinaryHeap<T> {
+ BinaryHeap::new()
+ }
+}
+
+#[stable(feature = "binaryheap_debug", since = "1.4.0")]
+impl<T: fmt::Debug> fmt::Debug for BinaryHeap<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.iter()).finish()
+ }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+ /// Creates an empty `BinaryHeap` as a max-heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> BinaryHeap<T> {
+ BinaryHeap { data: vec![] }
+ }
+
+ /// Creates an empty `BinaryHeap` with a specific capacity.
+ /// This preallocates enough memory for `capacity` elements,
+ /// so that the `BinaryHeap` does not have to be reallocated
+ /// until it contains at least that many values.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::with_capacity(10);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(capacity: usize) -> BinaryHeap<T> {
+ BinaryHeap { data: Vec::with_capacity(capacity) }
+ }
+
+ /// Returns a mutable reference to the greatest item in the binary heap, or
+ /// `None` if it is empty.
+ ///
+ /// Note: If the `PeekMut` value is leaked, the heap may be in an
+ /// inconsistent state.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// assert!(heap.peek_mut().is_none());
+ ///
+ /// heap.push(1);
+ /// heap.push(5);
+ /// heap.push(2);
+ /// {
+ /// let mut val = heap.peek_mut().unwrap();
+ /// *val = 0;
+ /// }
+ /// assert_eq!(heap.peek(), Some(&2));
+ /// ```
+ ///
+ /// # Time complexity
+ ///
+ /// Cost is *O*(1) in the worst case.
+ #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
+ pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
+ if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: true }) }
+ }
+
+ /// Removes the greatest item from the binary heap and returns it, or `None` if it
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert_eq!(heap.pop(), Some(3));
+ /// assert_eq!(heap.pop(), Some(1));
+ /// assert_eq!(heap.pop(), None);
+ /// ```
+ ///
+ /// # Time complexity
+ ///
+ /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop(&mut self) -> Option<T> {
+ self.data.pop().map(|mut item| {
+ if !self.is_empty() {
+ swap(&mut item, &mut self.data[0]);
+ self.sift_down_to_bottom(0);
+ }
+ item
+ })
+ }
+
+ /// Pushes an item onto the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.push(3);
+ /// heap.push(5);
+ /// heap.push(1);
+ ///
+ /// assert_eq!(heap.len(), 3);
+ /// assert_eq!(heap.peek(), Some(&5));
+ /// ```
+ ///
+ /// # Time complexity
+ ///
+ /// The expected cost of `push`, averaged over every possible ordering of
+ /// the elements being pushed, and over a sufficiently large number of
+ /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
+ /// elements that are *not* already in any sorted pattern.
+ ///
+ /// The time complexity degrades if elements are pushed in predominantly
+ /// ascending order. In the worst case, elements are pushed in ascending
+ /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
+ /// containing *n* elements.
+ ///
+ /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
+ /// occurs when capacity is exhausted and needs a resize. The resize cost
+ /// has been amortized in the previous figures.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push(&mut self, item: T) {
+ let old_len = self.len();
+ self.data.push(item);
+ self.sift_up(0, old_len);
+ }
+
+ /// Consumes the `BinaryHeap` and returns a vector in sorted
+ /// (ascending) order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let mut heap = BinaryHeap::from(vec![1, 2, 4, 5, 7]);
+ /// heap.push(6);
+ /// heap.push(3);
+ ///
+ /// let vec = heap.into_sorted_vec();
+ /// assert_eq!(vec, [1, 2, 3, 4, 5, 6, 7]);
+ /// ```
+ #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+ pub fn into_sorted_vec(mut self) -> Vec<T> {
+ let mut end = self.len();
+ while end > 1 {
+ end -= 1;
+ self.data.swap(0, end);
+ self.sift_down_range(0, end);
+ }
+ self.into_vec()
+ }
+
+ // The implementations of sift_up and sift_down use unsafe blocks in
+ // order to move an element out of the vector (leaving behind a
+ // hole), shift along the others and move the removed element back into the
+ // vector at the final location of the hole.
+ // The `Hole` type is used to represent this, and make sure
+ // the hole is filled back at the end of its scope, even on panic.
+ // Using a hole reduces the constant factor compared to using swaps,
+ // which involves twice as many moves.
+ fn sift_up(&mut self, start: usize, pos: usize) -> usize {
+ unsafe {
+ // Take out the value at `pos` and create a hole.
+ let mut hole = Hole::new(&mut self.data, pos);
+
+ while hole.pos() > start {
+ let parent = (hole.pos() - 1) / 2;
+ if hole.element() <= hole.get(parent) {
+ break;
+ }
+ hole.move_to(parent);
+ }
+ hole.pos()
+ }
+ }
+
+ /// Take an element at `pos` and move it down the heap,
+ /// while its children are larger.
+ fn sift_down_range(&mut self, pos: usize, end: usize) {
+ unsafe {
+ let mut hole = Hole::new(&mut self.data, pos);
+ let mut child = 2 * pos + 1;
+ while child < end {
+ let right = child + 1;
+ // compare with the greater of the two children
+ if right < end && hole.get(child) <= hole.get(right) {
+ child = right;
+ }
+ // if we are already in order, stop.
+ if hole.element() >= hole.get(child) {
+ break;
+ }
+ hole.move_to(child);
+ child = 2 * hole.pos() + 1;
+ }
+ }
+ }
+
+ fn sift_down(&mut self, pos: usize) {
+ let len = self.len();
+ self.sift_down_range(pos, len);
+ }
+
+ /// Take an element at `pos` and move it all the way down the heap,
+ /// then sift it up to its position.
+ ///
+ /// Note: This is faster when the element is known to be large / should
+ /// be closer to the bottom.
+ fn sift_down_to_bottom(&mut self, mut pos: usize) {
+ let end = self.len();
+ let start = pos;
+ unsafe {
+ let mut hole = Hole::new(&mut self.data, pos);
+ let mut child = 2 * pos + 1;
+ while child < end {
+ let right = child + 1;
+ // compare with the greater of the two children
+ if right < end && hole.get(child) <= hole.get(right) {
+ child = right;
+ }
+ hole.move_to(child);
+ child = 2 * hole.pos() + 1;
+ }
+ pos = hole.pos;
+ }
+ self.sift_up(start, pos);
+ }
+
+ fn rebuild(&mut self) {
+ let mut n = self.len() / 2;
+ while n > 0 {
+ n -= 1;
+ self.sift_down(n);
+ }
+ }
+
+ /// Moves all the elements of `other` into `self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let v = vec![-10, 1, 2, 3, 3];
+ /// let mut a = BinaryHeap::from(v);
+ ///
+ /// let v = vec![-20, 5, 43];
+ /// let mut b = BinaryHeap::from(v);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+ /// assert!(b.is_empty());
+ /// ```
+ #[stable(feature = "binary_heap_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ if self.len() < other.len() {
+ swap(self, other);
+ }
+
+ if other.is_empty() {
+ return;
+ }
+
+ #[inline(always)]
+ fn log2_fast(x: usize) -> usize {
+ 8 * size_of::<usize>() - (x.leading_zeros() as usize) - 1
+ }
+
+ // `rebuild` takes O(len1 + len2) operations
+ // and about 2 * (len1 + len2) comparisons in the worst case
+ // while `extend` takes O(len2 * log(len1)) operations
+ // and about 1 * len2 * log_2(len1) comparisons in the worst case,
+ // assuming len1 >= len2.
+ #[inline]
+ fn better_to_rebuild(len1: usize, len2: usize) -> bool {
+ 2 * (len1 + len2) < len2 * log2_fast(len1)
+ }
+
+ if better_to_rebuild(self.len(), other.len()) {
+ self.data.append(&mut other.data);
+ self.rebuild();
+ } else {
+ self.extend(other.drain());
+ }
+ }
+
+ /// Returns an iterator which retrieves elements in heap order.
+ /// The retrieved elements are removed from the original heap.
+ /// The remaining elements will be removed on drop in heap order.
+ ///
+ /// Note:
+ /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
+ /// You should use the latter for most cases.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(binary_heap_drain_sorted)]
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let mut heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
+ /// assert_eq!(heap.len(), 5);
+ ///
+ /// drop(heap.drain_sorted()); // removes all elements in heap order
+ /// assert_eq!(heap.len(), 0);
+ /// ```
+ #[inline]
+ #[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+ pub fn drain_sorted(&mut self) -> DrainSorted<'_, T> {
+ DrainSorted { inner: self }
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns
+ /// `false`. The elements are visited in unsorted (and unspecified) order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(binary_heap_retain)]
+ /// use std::collections::BinaryHeap;
+ ///
+ /// let mut heap = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
+ ///
+ /// heap.retain(|x| x % 2 == 0); // only keep even numbers
+ ///
+ /// assert_eq!(heap.into_sorted_vec(), [-10, 2, 4])
+ /// ```
+ #[unstable(feature = "binary_heap_retain", issue = "71503")]
+ pub fn retain<F>(&mut self, f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ self.data.retain(f);
+ self.rebuild();
+ }
+}
+
+impl<T> BinaryHeap<T> {
+ /// Returns an iterator visiting all values in the underlying vector, in
+ /// arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+ ///
+ /// // Print 1, 2, 3, 4 in arbitrary order
+ /// for x in heap.iter() {
+ /// println!("{}", x);
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter { iter: self.data.iter() }
+ }
+
+ /// Returns an iterator which retrieves elements in heap order.
+ /// This method consumes the original heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(binary_heap_into_iter_sorted)]
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5]);
+ ///
+ /// assert_eq!(heap.into_iter_sorted().take(2).collect::<Vec<_>>(), vec![5, 4]);
+ /// ```
+ #[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+ pub fn into_iter_sorted(self) -> IntoIterSorted<T> {
+ IntoIterSorted { inner: self }
+ }
+
+ /// Returns the greatest item in the binary heap, or `None` if it is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// assert_eq!(heap.peek(), None);
+ ///
+ /// heap.push(1);
+ /// heap.push(5);
+ /// heap.push(2);
+ /// assert_eq!(heap.peek(), Some(&5));
+ ///
+ /// ```
+ ///
+ /// # Time complexity
+ ///
+ /// Cost is *O*(1) in the worst case.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn peek(&self) -> Option<&T> {
+ self.data.get(0)
+ }
+
+ /// Returns the number of elements the binary heap can hold without reallocating.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::with_capacity(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.data.capacity()
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+ /// given `BinaryHeap`. Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it requests. Therefore
+ /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+ /// insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.reserve_exact(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ ///
+ /// [`reserve`]: #method.reserve
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.data.reserve_exact(additional);
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted in the
+ /// `BinaryHeap`. The collection may reserve more space to avoid frequent reallocations.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ /// heap.reserve(100);
+ /// assert!(heap.capacity() >= 100);
+ /// heap.push(4);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ self.data.reserve(additional);
+ }
+
+ /// Discards as much additional capacity as possible.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+ ///
+ /// assert!(heap.capacity() >= 100);
+ /// heap.shrink_to_fit();
+ /// assert!(heap.capacity() == 0);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn shrink_to_fit(&mut self) {
+ self.data.shrink_to_fit();
+ }
+
+ /// Discards capacity with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// use std::collections::BinaryHeap;
+ /// let mut heap: BinaryHeap<i32> = BinaryHeap::with_capacity(100);
+ ///
+ /// assert!(heap.capacity() >= 100);
+ /// heap.shrink_to(10);
+ /// assert!(heap.capacity() >= 10);
+ /// ```
+ #[inline]
+ #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ self.data.shrink_to(min_capacity)
+ }
+
+ /// Consumes the `BinaryHeap` and returns the underlying vector
+ /// in arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4, 5, 6, 7]);
+ /// let vec = heap.into_vec();
+ ///
+ /// // Will print in some order
+ /// for x in vec {
+ /// println!("{}", x);
+ /// }
+ /// ```
+ #[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+ pub fn into_vec(self) -> Vec<T> {
+ self.into()
+ }
+
+ /// Returns the length of the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert_eq!(heap.len(), 2);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Checks if the binary heap is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::new();
+ ///
+ /// assert!(heap.is_empty());
+ ///
+ /// heap.push(3);
+ /// heap.push(5);
+ /// heap.push(1);
+ ///
+ /// assert!(!heap.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Clears the binary heap, returning an iterator over the removed elements.
+ ///
+ /// The elements are removed in arbitrary order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert!(!heap.is_empty());
+ ///
+ /// for x in heap.drain() {
+ /// println!("{}", x);
+ /// }
+ ///
+ /// assert!(heap.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain(&mut self) -> Drain<'_, T> {
+ Drain { iter: self.data.drain(..) }
+ }
+
+ /// Drops all items from the binary heap.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let mut heap = BinaryHeap::from(vec![1, 3]);
+ ///
+ /// assert!(!heap.is_empty());
+ ///
+ /// heap.clear();
+ ///
+ /// assert!(heap.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.drain();
+ }
+}
+
+/// Hole represents a hole in a slice i.e., an index without valid value
+/// (because it was moved from or duplicated).
+/// In drop, `Hole` will restore the slice by filling the hole
+/// position with the value that was originally removed.
+struct Hole<'a, T: 'a> {
+ data: &'a mut [T],
+ elt: ManuallyDrop<T>,
+ pos: usize,
+}
+
+impl<'a, T> Hole<'a, T> {
+ /// Create a new `Hole` at index `pos`.
+ ///
+ /// Unsafe because pos must be within the data slice.
+ #[inline]
+ unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
+ debug_assert!(pos < data.len());
+ // SAFE: pos should be inside the slice
+ let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
+ Hole { data, elt: ManuallyDrop::new(elt), pos }
+ }
+
+ #[inline]
+ fn pos(&self) -> usize {
+ self.pos
+ }
+
+ /// Returns a reference to the element removed.
+ #[inline]
+ fn element(&self) -> &T {
+ &self.elt
+ }
+
+ /// Returns a reference to the element at `index`.
+ ///
+ /// Unsafe because index must be within the data slice and not equal to pos.
+ #[inline]
+ unsafe fn get(&self, index: usize) -> &T {
+ debug_assert!(index != self.pos);
+ debug_assert!(index < self.data.len());
+ unsafe { self.data.get_unchecked(index) }
+ }
+
+ /// Move hole to new location
+ ///
+ /// Unsafe because index must be within the data slice and not equal to pos.
+ #[inline]
+ unsafe fn move_to(&mut self, index: usize) {
+ debug_assert!(index != self.pos);
+ debug_assert!(index < self.data.len());
+ unsafe {
+ let index_ptr: *const _ = self.data.get_unchecked(index);
+ let hole_ptr = self.data.get_unchecked_mut(self.pos);
+ ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
+ }
+ self.pos = index;
+ }
+}
+
+impl<T> Drop for Hole<'_, T> {
+ #[inline]
+ fn drop(&mut self) {
+ // fill the hole again
+ unsafe {
+ let pos = self.pos;
+ ptr::copy_nonoverlapping(&*self.elt, self.data.get_unchecked_mut(pos), 1);
+ }
+ }
+}
+
+/// An iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.BinaryHeap.html#method.iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ iter: slice::Iter<'a, T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Iter").field(&self.iter.as_slice()).finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+ fn clone(&self) -> Self {
+ Iter { iter: self.iter.clone() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ #[inline]
+ fn last(self) -> Option<&'a T> {
+ self.iter.last()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+/// An owning iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BinaryHeap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.BinaryHeap.html#method.into_iter
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Clone)]
+pub struct IntoIter<T> {
+ iter: vec::IntoIter<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.iter.as_slice()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+#[derive(Clone, Debug)]
+pub struct IntoIterSorted<T> {
+ inner: BinaryHeap<T>,
+}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> Iterator for IntoIterSorted<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.inner.pop()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let exact = self.inner.len();
+ (exact, Some(exact))
+ }
+}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> ExactSizeIterator for IntoIterSorted<T> {}
+
+#[unstable(feature = "binary_heap_into_iter_sorted", issue = "59278")]
+impl<T: Ord> FusedIterator for IntoIterSorted<T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T: Ord> TrustedLen for IntoIterSorted<T> {}
+
+/// A draining iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`drain`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.BinaryHeap.html#method.drain
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[stable(feature = "drain", since = "1.6.0")]
+#[derive(Debug)]
+pub struct Drain<'a, T: 'a> {
+ iter: vec::Drain<'a, T>,
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
+
+/// A draining iterator over the elements of a `BinaryHeap`.
+///
+/// This `struct` is created by the [`drain_sorted`] method on [`BinaryHeap`]. See its
+/// documentation for more.
+///
+/// [`drain_sorted`]: struct.BinaryHeap.html#method.drain_sorted
+/// [`BinaryHeap`]: struct.BinaryHeap.html
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+#[derive(Debug)]
+pub struct DrainSorted<'a, T: Ord> {
+ inner: &'a mut BinaryHeap<T>,
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<'a, T: Ord> Drop for DrainSorted<'a, T> {
+ /// Removes heap elements in heap order.
+ fn drop(&mut self) {
+ struct DropGuard<'r, 'a, T: Ord>(&'r mut DrainSorted<'a, T>);
+
+ impl<'r, 'a, T: Ord> Drop for DropGuard<'r, 'a, T> {
+ fn drop(&mut self) {
+ while self.0.inner.pop().is_some() {}
+ }
+ }
+
+ while let Some(item) = self.inner.pop() {
+ let guard = DropGuard(self);
+ drop(item);
+ mem::forget(guard);
+ }
+ }
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> Iterator for DrainSorted<'_, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.inner.pop()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let exact = self.inner.len();
+ (exact, Some(exact))
+ }
+}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> ExactSizeIterator for DrainSorted<'_, T> {}
+
+#[unstable(feature = "binary_heap_drain_sorted", issue = "59278")]
+impl<T: Ord> FusedIterator for DrainSorted<'_, T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T: Ord> TrustedLen for DrainSorted<'_, T> {}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
+ /// Converts a `Vec<T>` into a `BinaryHeap<T>`.
+ ///
+ /// This conversion happens in-place, and has *O*(*n*) time complexity.
+ fn from(vec: Vec<T>) -> BinaryHeap<T> {
+ let mut heap = BinaryHeap { data: vec };
+ heap.rebuild();
+ heap
+ }
+}
+
+#[stable(feature = "binary_heap_extras_15", since = "1.5.0")]
+impl<T> From<BinaryHeap<T>> for Vec<T> {
+ fn from(heap: BinaryHeap<T>) -> Vec<T> {
+ heap.data
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BinaryHeap<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BinaryHeap<T> {
+ BinaryHeap::from(iter.into_iter().collect::<Vec<_>>())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for BinaryHeap<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Creates a consuming iterator, that is, one that moves each value out of
+ /// the binary heap in arbitrary order. The binary heap cannot be used
+ /// after calling this.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BinaryHeap;
+ /// let heap = BinaryHeap::from(vec![1, 2, 3, 4]);
+ ///
+ /// // Print 1, 2, 3, 4 in arbitrary order
+ /// for x in heap.into_iter() {
+ /// // x has type i32, not &i32
+ /// println!("{}", x);
+ /// }
+ /// ```
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { iter: self.data.into_iter() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BinaryHeap<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BinaryHeap<T> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<I>>::spec_extend(self, iter);
+ }
+
+ #[inline]
+ fn extend_one(&mut self, item: T) {
+ self.push(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+impl<T: Ord, I: IntoIterator<Item = T>> SpecExtend<I> for BinaryHeap<T> {
+ default fn spec_extend(&mut self, iter: I) {
+ self.extend_desugared(iter.into_iter());
+ }
+}
+
+impl<T: Ord> SpecExtend<BinaryHeap<T>> for BinaryHeap<T> {
+ fn spec_extend(&mut self, ref mut other: BinaryHeap<T>) {
+ self.append(other);
+ }
+}
+
+impl<T: Ord> BinaryHeap<T> {
+ fn extend_desugared<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ let iterator = iter.into_iter();
+ let (lower, _) = iterator.size_hint();
+
+ self.reserve(lower);
+
+ iterator.for_each(move |elem| self.push(elem));
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BinaryHeap<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &item: &'a T) {
+ self.push(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs
new file mode 100644
index 00000000000..24d1f61fa68
--- /dev/null
+++ b/library/alloc/src/collections/btree/map.rs
@@ -0,0 +1,2860 @@
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+use core::fmt::Debug;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, RangeBounds};
+use core::{fmt, ptr};
+
+use super::node::{self, marker, ForceResult::*, Handle, InsertResult::*, NodeRef};
+use super::search::{self, SearchResult::*};
+use super::unwrap_unchecked;
+
+use Entry::*;
+use UnderflowResult::*;
+
+/// A map based on a B-Tree.
+///
+/// B-Trees represent a fundamental compromise between cache-efficiency and actually minimizing
+/// the amount of work performed in a search. In theory, a binary search tree (BST) is the optimal
+/// choice for a sorted map, as a perfectly balanced BST performs the theoretical minimum amount of
+/// comparisons necessary to find an element (log<sub>2</sub>n). However, in practice the way this
+/// is done is *very* inefficient for modern computer architectures. In particular, every element
+/// is stored in its own individually heap-allocated node. This means that every single insertion
+/// triggers a heap-allocation, and every single comparison should be a cache-miss. Since these
+/// are both notably expensive things to do in practice, we are forced to at very least reconsider
+/// the BST strategy.
+///
+/// A B-Tree instead makes each node contain B-1 to 2B-1 elements in a contiguous array. By doing
+/// this, we reduce the number of allocations by a factor of B, and improve cache efficiency in
+/// searches. However, this does mean that searches will have to do *more* comparisons on average.
+/// The precise number of comparisons depends on the node search strategy used. For optimal cache
+/// efficiency, one could search the nodes linearly. For optimal comparisons, one could search
+/// the node using binary search. As a compromise, one could also perform a linear search
+/// that initially only checks every i<sup>th</sup> element for some choice of i.
+///
+/// Currently, our implementation simply performs naive linear search. This provides excellent
+/// performance on *small* nodes of elements which are cheap to compare. However in the future we
+/// would like to further explore choosing the optimal search strategy based on the choice of B,
+/// and possibly other factors. Using linear search, searching for a random element is expected
+/// to take O(B * log(n)) comparisons, which is generally worse than a BST. In practice,
+/// however, performance is excellent.
+///
+/// It is a logic error for a key to be modified in such a way that the key's ordering relative to
+/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, &str>` in this example).
+/// let mut movie_reviews = BTreeMap::new();
+///
+/// // review some movies.
+/// movie_reviews.insert("Office Space", "Deals with real issues in the workplace.");
+/// movie_reviews.insert("Pulp Fiction", "Masterpiece.");
+/// movie_reviews.insert("The Godfather", "Very enjoyable.");
+/// movie_reviews.insert("The Blues Brothers", "Eye lyked it a lot.");
+///
+/// // check for a specific one.
+/// if !movie_reviews.contains_key("Les Misérables") {
+/// println!("We've got {} reviews, but Les Misérables ain't one.",
+/// movie_reviews.len());
+/// }
+///
+/// // oops, this review has a lot of spelling mistakes, let's delete it.
+/// movie_reviews.remove("The Blues Brothers");
+///
+/// // look up the values associated with some keys.
+/// let to_find = ["Up!", "Office Space"];
+/// for movie in &to_find {
+/// match movie_reviews.get(movie) {
+/// Some(review) => println!("{}: {}", movie, review),
+/// None => println!("{} is unreviewed.", movie)
+/// }
+/// }
+///
+/// // Look up the value for a key (will panic if the key is not found).
+/// println!("Movie review: {}", movie_reviews["Office Space"]);
+///
+/// // iterate over everything.
+/// for (movie, review) in &movie_reviews {
+/// println!("{}: \"{}\"", movie, review);
+/// }
+/// ```
+///
+/// `BTreeMap` also implements an [`Entry API`](#method.entry), which allows
+/// for more complex methods of getting, setting, updating and removing keys and
+/// their values:
+///
+/// ```
+/// use std::collections::BTreeMap;
+///
+/// // type inference lets us omit an explicit type signature (which
+/// // would be `BTreeMap<&str, u8>` in this example).
+/// let mut player_stats = BTreeMap::new();
+///
+/// fn random_stat_buff() -> u8 {
+/// // could actually return some random value here - let's just return
+/// // some fixed value for now
+/// 42
+/// }
+///
+/// // insert a key only if it doesn't already exist
+/// player_stats.entry("health").or_insert(100);
+///
+/// // insert a key using a function that provides a new value only if it
+/// // doesn't already exist
+/// player_stats.entry("defence").or_insert_with(random_stat_buff);
+///
+/// // update a key, guarding against the key possibly not being set
+/// let stat = player_stats.entry("attack").or_insert(100);
+/// *stat += random_stat_buff();
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeMap<K, V> {
+ root: Option<node::Root<K, V>>,
+ length: usize,
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
+ fn drop(&mut self) {
+ unsafe {
+ drop(ptr::read(self).into_iter());
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
+ fn clone(&self) -> BTreeMap<K, V> {
+ fn clone_subtree<'a, K: Clone, V: Clone>(
+ node: node::NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>,
+ ) -> BTreeMap<K, V>
+ where
+ K: 'a,
+ V: 'a,
+ {
+ match node.force() {
+ Leaf(leaf) => {
+ let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 };
+
+ {
+ let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
+ let mut out_node = match root.as_mut().force() {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+
+ let mut in_edge = leaf.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ out_node.push(k.clone(), v.clone());
+ out_tree.length += 1;
+ }
+ }
+
+ out_tree
+ }
+ Internal(internal) => {
+ let mut out_tree = clone_subtree(internal.first_edge().descend());
+
+ {
+ let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root);
+ let mut out_node = out_root.push_level();
+ let mut in_edge = internal.first_edge();
+ while let Ok(kv) = in_edge.right_kv() {
+ let (k, v) = kv.into_kv();
+ in_edge = kv.right_edge();
+
+ let k = (*k).clone();
+ let v = (*v).clone();
+ let subtree = clone_subtree(in_edge.descend());
+
+ // We can't destructure subtree directly
+ // because BTreeMap implements Drop
+ let (subroot, sublength) = unsafe {
+ let subtree = ManuallyDrop::new(subtree);
+ let root = ptr::read(&subtree.root);
+ let length = subtree.length;
+ (root, length)
+ };
+
+ out_node.push(k, v, subroot.unwrap_or_else(node::Root::new_leaf));
+ out_tree.length += 1 + sublength;
+ }
+ }
+
+ out_tree
+ }
+ }
+ }
+
+ if self.is_empty() {
+ // Ideally we'd call `BTreeMap::new` here, but that has the `K:
+ // Ord` constraint, which this method lacks.
+ BTreeMap { root: None, length: 0 }
+ } else {
+ clone_subtree(self.root.as_ref().unwrap().as_ref()) // unwrap succeeds because not empty
+ }
+ }
+}
+
+impl<K, Q: ?Sized> super::Recover<Q> for BTreeMap<K, ()>
+where
+ K: Borrow<Q> + Ord,
+ Q: Ord,
+{
+ type Key = K;
+
+ fn get(&self, key: &Q) -> Option<&K> {
+ match search::search_tree(self.root.as_ref()?.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().0),
+ GoDown(_) => None,
+ }
+ }
+
+ fn take(&mut self, key: &Q) -> Option<K> {
+ match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+ Found(handle) => Some(
+ OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
+ .remove_kv()
+ .0,
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ fn replace(&mut self, key: K) -> Option<K> {
+ let root = Self::ensure_is_owned(&mut self.root);
+ match search::search_tree::<marker::Mut<'_>, K, (), K>(root.as_mut(), &key) {
+ Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
+ GoDown(handle) => {
+ VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData }
+ .insert(());
+ None
+ }
+ }
+ }
+}
+
+/// An iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter`]: BTreeMap::iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, K: 'a, V: 'a> {
+ range: Range<'a, K, V>,
+ length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Iter<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: BTreeMap::iter_mut
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IterMut<'a, K: 'a, V: 'a> {
+ range: RangeMut<'a, K, V>,
+ length: usize,
+}
+
+/// An owning iterator over the entries of a `BTreeMap`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: IntoIterator::into_iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<K, V> {
+ front: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
+ back: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
+ length: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Range {
+ front: self.front.as_ref().map(|f| f.reborrow()),
+ back: self.back.as_ref().map(|b| b.reborrow()),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// An iterator over the keys of a `BTreeMap`.
+///
+/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`keys`]: BTreeMap::keys
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Keys<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V> fmt::Debug for Keys<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// An iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values`]: BTreeMap::values
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Values<'a, K: 'a, V: 'a> {
+ inner: Iter<'a, K, V>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K, V: fmt::Debug> fmt::Debug for Values<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over the values of a `BTreeMap`.
+///
+/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`values_mut`]: BTreeMap::values_mut
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+#[derive(Debug)]
+pub struct ValuesMut<'a, K: 'a, V: 'a> {
+ inner: IterMut<'a, K, V>,
+}
+
+/// An iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range`]: BTreeMap::range
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, K: 'a, V: 'a> {
+ front: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+ back: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for Range<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self.clone()).finish()
+ }
+}
+
+/// A mutable iterator over a sub-range of entries in a `BTreeMap`.
+///
+/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
+/// documentation for more.
+///
+/// [`range_mut`]: BTreeMap::range_mut
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct RangeMut<'a, K: 'a, V: 'a> {
+ front: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+ back: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for RangeMut<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let range = Range {
+ front: self.front.as_ref().map(|f| f.reborrow()),
+ back: self.back.as_ref().map(|b| b.reborrow()),
+ };
+ f.debug_list().entries(range).finish()
+ }
+}
+
+/// A view into a single entry in a map, which may either be vacant or occupied.
+///
+/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
+///
+/// [`entry`]: BTreeMap::entry
+#[stable(feature = "rust1", since = "1.0.0")]
+pub enum Entry<'a, K: 'a, V: 'a> {
+ /// A vacant entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Vacant(#[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V>),
+
+ /// An occupied entry.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ Occupied(#[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V>),
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug> Debug for Entry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match *self {
+ Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(),
+ Occupied(ref o) => f.debug_tuple("Entry").field(o).finish(),
+ }
+ }
+}
+
+/// A view into a vacant entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VacantEntry<'a, K: 'a, V: 'a> {
+ key: K,
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>,
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V> Debug for VacantEntry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("VacantEntry").field(self.key()).finish()
+ }
+}
+
+/// A view into an occupied entry in a `BTreeMap`.
+/// It is part of the [`Entry`] enum.
+///
+/// [`Entry`]: enum.Entry.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct OccupiedEntry<'a, K: 'a, V: 'a> {
+ handle: Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>,
+
+ length: &'a mut usize,
+
+ // Be invariant in `K` and `V`
+ _marker: PhantomData<&'a mut (K, V)>,
+}
+
+#[stable(feature = "debug_btree_map", since = "1.12.0")]
+impl<K: Debug + Ord, V: Debug> Debug for OccupiedEntry<'_, K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish()
+ }
+}
+
+// An iterator for merging two sorted sequences into one
+struct MergeIter<K, V, I: Iterator<Item = (K, V)>> {
+ left: Peekable<I>,
+ right: Peekable<I>,
+}
+
+impl<K: Ord, V> BTreeMap<K, V> {
+ /// Makes a new empty BTreeMap.
+ ///
+ /// Does not allocate anything on its own.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ ///
+ /// // entries can now be inserted into the empty map
+ /// map.insert(1, "a");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn new() -> BTreeMap<K, V> {
+ BTreeMap { root: None, length: 0 }
+ }
+
+ /// Clears the map, removing all elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.clear();
+ /// assert!(a.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ *self = BTreeMap::new();
+ }
+
+ /// Returns a reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get(&1), Some(&"a"));
+ /// assert_eq!(map.get(&2), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get<Q: ?Sized>(&self, key: &Q) -> Option<&V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_ref()?.as_ref(), key) {
+ Found(handle) => Some(handle.into_kv().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the key-value pair corresponding to the supplied key.
+ ///
+ /// The supplied key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.get_key_value(&1), Some((&1, &"a")));
+ /// assert_eq!(map.get_key_value(&2), None);
+ /// ```
+ #[stable(feature = "map_get_key_value", since = "1.40.0")]
+ pub fn get_key_value<Q: ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_ref()?.as_ref(), k) {
+ Found(handle) => Some(handle.into_kv()),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Returns the first key-value pair in the map.
+ /// The key in this pair is the minimum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.first_key_value(), None);
+ /// map.insert(1, "b");
+ /// map.insert(2, "a");
+ /// assert_eq!(map.first_key_value(), Some((&1, &"b")));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first_key_value(&self) -> Option<(&K, &V)> {
+ let front = self.root.as_ref()?.as_ref().first_leaf_edge();
+ front.right_kv().ok().map(Handle::into_kv)
+ }
+
+ /// Returns the first entry in the map for in-place manipulation.
+ /// The key of this entry is the minimum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// if let Some(mut entry) = map.first_entry() {
+ /// if *entry.key() > 0 {
+ /// entry.insert("first");
+ /// }
+ /// }
+ /// assert_eq!(*map.get(&1).unwrap(), "first");
+ /// assert_eq!(*map.get(&2).unwrap(), "b");
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>> {
+ let front = self.root.as_mut()?.as_mut().first_leaf_edge();
+ let kv = front.right_kv().ok()?;
+ Some(OccupiedEntry {
+ handle: kv.forget_node_type(),
+ length: &mut self.length,
+ _marker: PhantomData,
+ })
+ }
+
+ /// Removes and returns the first element in the map.
+ /// The key of this element is the minimum key that was in the map.
+ ///
+ /// # Examples
+ ///
+ /// Draining elements in ascending order, while keeping a usable map each iteration.
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// while let Some((key, _val)) = map.pop_first() {
+ /// assert!(map.iter().all(|(k, _v)| *k > key));
+ /// }
+ /// assert!(map.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_first(&mut self) -> Option<(K, V)> {
+ self.first_entry().map(|entry| entry.remove_entry())
+ }
+
+ /// Returns the last key-value pair in the map.
+ /// The key in this pair is the maximum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "b");
+ /// map.insert(2, "a");
+ /// assert_eq!(map.last_key_value(), Some((&2, &"a")));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last_key_value(&self) -> Option<(&K, &V)> {
+ let back = self.root.as_ref()?.as_ref().last_leaf_edge();
+ back.left_kv().ok().map(Handle::into_kv)
+ }
+
+ /// Returns the last entry in the map for in-place manipulation.
+ /// The key of this entry is the maximum key in the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// if let Some(mut entry) = map.last_entry() {
+ /// if *entry.key() > 0 {
+ /// entry.insert("last");
+ /// }
+ /// }
+ /// assert_eq!(*map.get(&1).unwrap(), "a");
+ /// assert_eq!(*map.get(&2).unwrap(), "last");
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last_entry(&mut self) -> Option<OccupiedEntry<'_, K, V>> {
+ let back = self.root.as_mut()?.as_mut().last_leaf_edge();
+ let kv = back.left_kv().ok()?;
+ Some(OccupiedEntry {
+ handle: kv.forget_node_type(),
+ length: &mut self.length,
+ _marker: PhantomData,
+ })
+ }
+
+ /// Removes and returns the last element in the map.
+ /// The key of this element is the maximum key that was in the map.
+ ///
+ /// # Examples
+ ///
+ /// Draining elements in descending order, while keeping a usable map each iteration.
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// map.insert(2, "b");
+ /// while let Some((key, _val)) = map.pop_last() {
+ /// assert!(map.iter().all(|(k, _v)| *k < key));
+ /// }
+ /// assert!(map.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_last(&mut self) -> Option<(K, V)> {
+ self.last_entry().map(|entry| entry.remove_entry())
+ }
+
+ /// Returns `true` if the map contains a value for the specified key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.contains_key(&1), true);
+ /// assert_eq!(map.contains_key(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains_key<Q: ?Sized>(&self, key: &Q) -> bool
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ self.get(key).is_some()
+ }
+
+ /// Returns a mutable reference to the value corresponding to the key.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// if let Some(x) = map.get_mut(&1) {
+ /// *x = "b";
+ /// }
+ /// assert_eq!(map[&1], "b");
+ /// ```
+ // See `get` for implementation notes, this is basically a copy-paste with mut's added
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut<Q: ?Sized>(&mut self, key: &Q) -> Option<&mut V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+ Found(handle) => Some(handle.into_kv_mut().1),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Inserts a key-value pair into the map.
+ ///
+ /// If the map did not have this key present, `None` is returned.
+ ///
+ /// If the map did have this key present, the value is updated, and the old
+ /// value is returned. The key is not updated, though; this matters for
+ /// types that can be `==` without being identical. See the [module-level
+ /// documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// assert_eq!(map.insert(37, "a"), None);
+ /// assert_eq!(map.is_empty(), false);
+ ///
+ /// map.insert(37, "b");
+ /// assert_eq!(map.insert(37, "c"), Some("b"));
+ /// assert_eq!(map[&37], "c");
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, key: K, value: V) -> Option<V> {
+ match self.entry(key) {
+ Occupied(mut entry) => Some(entry.insert(value)),
+ Vacant(entry) => {
+ entry.insert(value);
+ None
+ }
+ }
+ }
+
+ /// Removes a key from the map, returning the value at the key if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove(&1), Some("a"));
+ /// assert_eq!(map.remove(&1), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, key: &Q) -> Option<V>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ self.remove_entry(key).map(|(_, v)| v)
+ }
+
+ /// Removes a key from the map, returning the stored key and value if the key
+ /// was previously in the map.
+ ///
+ /// The key may be any borrowed form of the map's key type, but the ordering
+ /// on the borrowed form *must* match the ordering on the key type.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(1, "a");
+ /// assert_eq!(map.remove_entry(&1), Some((1, "a")));
+ /// assert_eq!(map.remove_entry(&1), None);
+ /// ```
+ #[stable(feature = "btreemap_remove_entry", since = "1.45.0")]
+ pub fn remove_entry<Q: ?Sized>(&mut self, key: &Q) -> Option<(K, V)>
+ where
+ K: Borrow<Q>,
+ Q: Ord,
+ {
+ match search::search_tree(self.root.as_mut()?.as_mut(), key) {
+ Found(handle) => Some(
+ OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }
+ .remove_entry(),
+ ),
+ GoDown(_) => None,
+ }
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ ///
+ /// let mut b = BTreeMap::new();
+ /// b.insert(3, "d");
+ /// b.insert(4, "e");
+ /// b.insert(5, "f");
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ /// assert_eq!(a[&3], "d");
+ /// assert_eq!(a[&4], "e");
+ /// assert_eq!(a[&5], "f");
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ // Do we have to append anything at all?
+ if other.is_empty() {
+ return;
+ }
+
+ // We can just swap `self` and `other` if `self` is empty.
+ if self.is_empty() {
+ mem::swap(self, other);
+ return;
+ }
+
+ // First, we merge `self` and `other` into a sorted sequence in linear time.
+ let self_iter = mem::take(self).into_iter();
+ let other_iter = mem::take(other).into_iter();
+ let iter = MergeIter { left: self_iter.peekable(), right: other_iter.peekable() };
+
+ // Second, we build a tree from the sorted sequence in linear time.
+ self.from_sorted_iter(iter);
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "a");
+ /// map.insert(5, "b");
+ /// map.insert(8, "c");
+ /// for (&key, &value) in map.range((Included(&4), Included(&8))) {
+ /// println!("{}: {}", key, value);
+ /// }
+ /// assert_eq!(Some((&5, &"b")), map.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<T: ?Sized, R>(&self, range: R) -> Range<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T>,
+ R: RangeBounds<T>,
+ {
+ if let Some(root) = &self.root {
+ let (f, b) = range_search(root.as_ref(), range);
+
+ Range { front: Some(f), back: Some(b) }
+ } else {
+ Range { front: None, back: None }
+ }
+ }
+
+ /// Constructs a mutable double-ended iterator over a sub-range of elements in the map.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Panics
+ ///
+ /// Panics if range `start > end`.
+ /// Panics if range `start == end` and both bounds are `Excluded`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, i32> = ["Alice", "Bob", "Carol", "Cheryl"]
+ /// .iter()
+ /// .map(|&s| (s, 0))
+ /// .collect();
+ /// for (_, balance) in map.range_mut("B".."Cheryl") {
+ /// *balance += 100;
+ /// }
+ /// for (name, balance) in &map {
+ /// println!("{} => {}", name, balance);
+ /// }
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range_mut<T: ?Sized, R>(&mut self, range: R) -> RangeMut<'_, K, V>
+ where
+ T: Ord,
+ K: Borrow<T>,
+ R: RangeBounds<T>,
+ {
+ if let Some(root) = &mut self.root {
+ let (f, b) = range_search(root.as_mut(), range);
+
+ RangeMut { front: Some(f), back: Some(b), _marker: PhantomData }
+ } else {
+ RangeMut { front: None, back: None, _marker: PhantomData }
+ }
+ }
+
+ /// Gets the given key's corresponding entry in the map for in-place manipulation.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut count: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// // count the number of occurrences of letters in the vec
+ /// for x in vec!["a","b","a","c","a","b"] {
+ /// *count.entry(x).or_insert(0) += 1;
+ /// }
+ ///
+ /// assert_eq!(count["a"], 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
+ // FIXME(@porglezomp) Avoid allocating if we don't insert
+ let root = Self::ensure_is_owned(&mut self.root);
+ match search::search_tree(root.as_mut(), &key) {
+ Found(handle) => {
+ Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData })
+ }
+ GoDown(handle) => {
+ Vacant(VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData })
+ }
+ }
+ }
+
+ fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
+ let root = Self::ensure_is_owned(&mut self.root);
+ let mut cur_node = root.as_mut().last_leaf_edge().into_node();
+ // Iterate through all key-value pairs, pushing them into nodes at the right level.
+ for (key, value) in iter {
+ // Try to push key-value pair into the current leaf node.
+ if cur_node.len() < node::CAPACITY {
+ cur_node.push(key, value);
+ } else {
+ // No space left, go up and push there.
+ let mut open_node;
+ let mut test_node = cur_node.forget_type();
+ loop {
+ match test_node.ascend() {
+ Ok(parent) => {
+ let parent = parent.into_node();
+ if parent.len() < node::CAPACITY {
+ // Found a node with space left, push here.
+ open_node = parent;
+ break;
+ } else {
+ // Go up again.
+ test_node = parent.forget_type();
+ }
+ }
+ Err(node) => {
+ // We are at the top, create a new root node and push there.
+ open_node = node.into_root_mut().push_level();
+ break;
+ }
+ }
+ }
+
+ // Push key-value pair and new right subtree.
+ let tree_height = open_node.height() - 1;
+ let mut right_tree = node::Root::new_leaf();
+ for _ in 0..tree_height {
+ right_tree.push_level();
+ }
+ open_node.push(key, value, right_tree);
+
+ // Go down to the right-most leaf again.
+ cur_node = open_node.forget_type().last_leaf_edge().into_node();
+ }
+
+ self.length += 1;
+ }
+ Self::fix_right_edge(root)
+ }
+
+ fn fix_right_edge(root: &mut node::Root<K, V>) {
+ // Handle underfull nodes, start from the top.
+ let mut cur_node = root.as_mut();
+ while let Internal(internal) = cur_node.force() {
+ // Check if right-most child is underfull.
+ let mut last_edge = internal.last_edge();
+ let right_child_len = last_edge.reborrow().descend().len();
+ if right_child_len < node::MIN_LEN {
+ // We need to steal.
+ let mut last_kv = match last_edge.left_kv() {
+ Ok(left) => left,
+ Err(_) => unreachable!(),
+ };
+ last_kv.bulk_steal_left(node::MIN_LEN - right_child_len);
+ last_edge = last_kv.right_edge();
+ }
+
+ // Go further down.
+ cur_node = last_edge.descend();
+ }
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "a");
+ /// a.insert(2, "b");
+ /// a.insert(3, "c");
+ /// a.insert(17, "d");
+ /// a.insert(41, "e");
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert_eq!(a[&1], "a");
+ /// assert_eq!(a[&2], "b");
+ ///
+ /// assert_eq!(b[&3], "c");
+ /// assert_eq!(b[&17], "d");
+ /// assert_eq!(b[&41], "e");
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+ where
+ K: Borrow<Q>,
+ {
+ if self.is_empty() {
+ return Self::new();
+ }
+
+ let total_num = self.len();
+ let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
+
+ let mut right = Self::new();
+ let right_root = Self::ensure_is_owned(&mut right.root);
+ for _ in 0..left_root.height() {
+ right_root.push_level();
+ }
+
+ {
+ let mut left_node = left_root.as_mut();
+ let mut right_node = right_root.as_mut();
+
+ loop {
+ let mut split_edge = match search::search_node(left_node, key) {
+ // key is going to the right tree
+ Found(handle) => handle.left_edge(),
+ GoDown(handle) => handle,
+ };
+
+ split_edge.move_suffix(&mut right_node);
+
+ match (split_edge.force(), right_node.force()) {
+ (Internal(edge), Internal(node)) => {
+ left_node = edge.descend();
+ right_node = node.first_edge().descend();
+ }
+ (Leaf(_), Leaf(_)) => {
+ break;
+ }
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+
+ left_root.fix_right_border();
+ right_root.fix_left_border();
+
+ if left_root.height() < right_root.height() {
+ self.recalc_length();
+ right.length = total_num - self.len();
+ } else {
+ right.recalc_length();
+ self.length = total_num - right.len();
+ }
+
+ right
+ }
+
+ /// Creates an iterator which uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns true, the element is removed from the map and yielded.
+ /// If the closure returns false, or panics, the element remains in the map and will not be
+ /// yielded.
+ ///
+ /// Note that `drain_filter` lets you mutate every value in the filter closure, regardless of
+ /// whether you choose to keep or remove it.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+ /// elements will still be subjected to the closure and removed and dropped if it returns true.
+ ///
+ /// It is unspecified how many more elements will be subjected to the closure
+ /// if a panic occurs in the closure, or a panic occurs while dropping an element,
+ /// or if the `DrainFilter` value is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a map into even and odd keys, reusing the original map:
+ ///
+ /// ```
+ /// #![feature(btree_drain_filter)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<i32, i32> = (0..8).map(|x| (x, x)).collect();
+ /// let evens: BTreeMap<_, _> = map.drain_filter(|k, _v| k % 2 == 0).collect();
+ /// let odds = map;
+ /// assert_eq!(evens.keys().copied().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
+ /// assert_eq!(odds.keys().copied().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
+ /// ```
+ #[unstable(feature = "btree_drain_filter", issue = "70530")]
+ pub fn drain_filter<F>(&mut self, pred: F) -> DrainFilter<'_, K, V, F>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ DrainFilter { pred, inner: self.drain_filter_inner() }
+ }
+ pub(super) fn drain_filter_inner(&mut self) -> DrainFilterInner<'_, K, V> {
+ let front = self.root.as_mut().map(|r| r.as_mut().first_leaf_edge());
+ DrainFilterInner { length: &mut self.length, cur_leaf_edge: front }
+ }
+
+ /// Calculates the number of elements if it is incorrect.
+ fn recalc_length(&mut self) {
+ fn dfs<'a, K, V>(node: NodeRef<marker::Immut<'a>, K, V, marker::LeafOrInternal>) -> usize
+ where
+ K: 'a,
+ V: 'a,
+ {
+ let mut res = node.len();
+
+ if let Internal(node) = node.force() {
+ let mut edge = node.first_edge();
+ loop {
+ res += dfs(edge.reborrow().descend());
+ match edge.right_kv() {
+ Ok(right_kv) => {
+ edge = right_kv.right_edge();
+ }
+ Err(_) => {
+ break;
+ }
+ }
+ }
+ }
+
+ res
+ }
+
+ self.length = dfs(self.root.as_ref().unwrap().as_ref());
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap<K, V> {
+ type Item = (&'a K, &'a V);
+ type IntoIter = Iter<'a, K, V>;
+
+ fn into_iter(self) -> Iter<'a, K, V> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_unchecked()) }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Iter<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ unsafe { Some(self.range.next_back_unchecked()) }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Iter<'_, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Iter<'_, K, V> {
+ fn clone(&self) -> Self {
+ Iter { range: self.range.clone(), length: self.length }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap<K, V> {
+ type Item = (&'a K, &'a mut V);
+ type IntoIter = IterMut<'a, K, V>;
+
+ fn into_iter(self) -> IterMut<'a, K, V> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ let (k, v) = unsafe { self.range.next_unchecked() };
+ Some((k, v)) // coerce k from `&mut K` to `&K`
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ let (k, v) = unsafe { self.range.next_back_unchecked() };
+ Some((k, v)) // coerce k from `&mut K` to `&K`
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IterMut<'_, K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IterMut<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> IntoIterator for BTreeMap<K, V> {
+ type Item = (K, V);
+ type IntoIter = IntoIter<K, V>;
+
+ fn into_iter(self) -> IntoIter<K, V> {
+ let mut me = ManuallyDrop::new(self);
+ if let Some(root) = me.root.take() {
+ let (f, b) = full_range_search(root.into_ref());
+
+ IntoIter { front: Some(f), back: Some(b), length: me.length }
+ } else {
+ IntoIter { front: None, back: None, length: 0 }
+ }
+ }
+}
+
+#[stable(feature = "btree_drop", since = "1.7.0")]
+impl<K, V> Drop for IntoIter<K, V> {
+ fn drop(&mut self) {
+ struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
+
+ impl<'a, K, V> Drop for DropGuard<'a, K, V> {
+ fn drop(&mut self) {
+ // Continue the same loop we perform below. This only runs when unwinding, so we
+ // don't have to care about panics this time (they'll abort).
+ while let Some(_) = self.0.next() {}
+
+ unsafe {
+ let mut node =
+ unwrap_unchecked(ptr::read(&self.0.front)).into_node().forget_type();
+ while let Some(parent) = node.deallocate_and_ascend() {
+ node = parent.into_node().forget_type();
+ }
+ }
+ }
+ }
+
+ while let Some(pair) = self.next() {
+ let guard = DropGuard(self);
+ drop(pair);
+ mem::forget(guard);
+ }
+
+ unsafe {
+ if let Some(front) = ptr::read(&self.front) {
+ let mut node = front.into_node().forget_type();
+ // Most of the nodes have been deallocated while traversing
+ // but one pile from a leaf up to the root is left standing.
+ while let Some(parent) = node.deallocate_and_ascend() {
+ node = parent.into_node().forget_type();
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Iterator for IntoIter<K, V> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.front.as_mut().unwrap().next_unchecked() })
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.length, Some(self.length))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
+ fn next_back(&mut self) -> Option<(K, V)> {
+ if self.length == 0 {
+ None
+ } else {
+ self.length -= 1;
+ Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for IntoIter<K, V> {
+ fn len(&self) -> usize {
+ self.length
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for IntoIter<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Keys<'a, K, V> {
+ type Item = &'a K;
+
+ fn next(&mut self) -> Option<&'a K> {
+ self.inner.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a K> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a K> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a K> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a K> {
+ self.inner.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Keys<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Keys<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Keys<'_, K, V> {
+ fn clone(&self) -> Self {
+ Keys { inner: self.inner.clone() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> Iterator for Values<'a, K, V> {
+ type Item = &'a V;
+
+ fn next(&mut self) -> Option<&'a V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a V> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> ExactSizeIterator for Values<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Values<'_, K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K, V> Clone for Values<'_, K, V> {
+ fn clone(&self) -> Self {
+ Values { inner: self.inner.clone() }
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeMap.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<'a, K, V, F>
+where
+ K: 'a,
+ V: 'a,
+ F: 'a + FnMut(&K, &mut V) -> bool,
+{
+ pred: F,
+ inner: DrainFilterInner<'a, K, V>,
+}
+/// Most of the implementation of DrainFilter, independent of the type
+/// of the predicate, thus also serving for BTreeSet::DrainFilter.
+pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
+ length: &'a mut usize,
+ cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> Drop for DrainFilter<'_, K, V, F>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> fmt::Debug for DrainFilter<'_, K, V, F>
+where
+ K: fmt::Debug,
+ V: fmt::Debug,
+ F: FnMut(&K, &mut V) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("DrainFilter").field(&self.inner.peek()).finish()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> Iterator for DrainFilter<'_, K, V, F>
+where
+ F: FnMut(&K, &mut V) -> bool,
+{
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ self.inner.next(&mut self.pred)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
+ /// Allow Debug implementations to predict the next element.
+ pub(super) fn peek(&self) -> Option<(&K, &V)> {
+ let edge = self.cur_leaf_edge.as_ref()?;
+ edge.reborrow().next_kv().ok().map(|kv| kv.into_kv())
+ }
+
+ /// Implementation of a typical `DrainFilter::next` method, given the predicate.
+ pub(super) fn next<F>(&mut self, pred: &mut F) -> Option<(K, V)>
+ where
+ F: FnMut(&K, &mut V) -> bool,
+ {
+ while let Ok(mut kv) = self.cur_leaf_edge.take()?.next_kv() {
+ let (k, v) = kv.kv_mut();
+ if pred(k, v) {
+ *self.length -= 1;
+ let (k, v, leaf_edge_location) = kv.remove_kv_tracking();
+ self.cur_leaf_edge = Some(leaf_edge_location);
+ return Some((k, v));
+ }
+ self.cur_leaf_edge = Some(kv.next_leaf_edge());
+ }
+ None
+ }
+
+ /// Implementation of a typical `DrainFilter::size_hint` method.
+ pub(super) fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(*self.length))
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<K, V, F> FusedIterator for DrainFilter<'_, K, V, F> where F: FnMut(&K, &mut V) -> bool {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for Range<'a, K, V> {
+ type Item = (&'a K, &'a V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.is_empty() { None } else { unsafe { Some(self.next_unchecked()) } }
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a V)> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> Iterator for ValuesMut<'a, K, V> {
+ type Item = &'a mut V;
+
+ fn next(&mut self) -> Option<&'a mut V> {
+ self.inner.next().map(|(_, v)| v)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a mut V> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<&'a mut V> {
+ self.inner.next_back().map(|(_, v)| v)
+ }
+}
+
+#[stable(feature = "map_values_mut", since = "1.10.0")]
+impl<K, V> ExactSizeIterator for ValuesMut<'_, K, V> {
+ fn len(&self) -> usize {
+ self.inner.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for ValuesMut<'_, K, V> {}
+
+impl<'a, K, V> Range<'a, K, V> {
+ fn is_empty(&self) -> bool {
+ self.front == self.back
+ }
+
+ unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a V)> {
+ if self.is_empty() { None } else { Some(unsafe { self.next_back_unchecked() }) }
+ }
+}
+
+impl<'a, K, V> Range<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for Range<'_, K, V> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<K, V> Clone for Range<'_, K, V> {
+ fn clone(&self) -> Self {
+ Range { front: self.front, back: self.back }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> Iterator for RangeMut<'a, K, V> {
+ type Item = (&'a K, &'a mut V);
+
+ fn next(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.is_empty() {
+ None
+ } else {
+ let (k, v) = unsafe { self.next_unchecked() };
+ Some((k, v)) // coerce k from `&mut K` to `&K`
+ }
+ }
+
+ fn last(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<(&'a K, &'a mut V)> {
+ self.next_back()
+ }
+}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ fn is_empty(&self) -> bool {
+ self.front == self.back
+ }
+
+ unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+ unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, K, V> DoubleEndedIterator for RangeMut<'a, K, V> {
+ fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> {
+ if self.is_empty() {
+ None
+ } else {
+ let (k, v) = unsafe { self.next_back_unchecked() };
+ Some((k, v)) // coerce k from `&mut K` to `&K`
+ }
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
+
+impl<'a, K, V> RangeMut<'a, K, V> {
+ unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+ unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> FromIterator<(K, V)> for BTreeMap<K, V> {
+ fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> BTreeMap<K, V> {
+ let mut map = BTreeMap::new();
+ map.extend(iter);
+ map
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Extend<(K, V)> for BTreeMap<K, V> {
+ #[inline]
+ fn extend<T: IntoIterator<Item = (K, V)>>(&mut self, iter: T) {
+ iter.into_iter().for_each(move |(k, v)| {
+ self.insert(k, v);
+ });
+ }
+
+ #[inline]
+ fn extend_one(&mut self, (k, v): (K, V)) {
+ self.insert(k, v);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap<K, V> {
+ fn extend<I: IntoIterator<Item = (&'a K, &'a V)>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().map(|(&key, &value)| (key, value)));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, (&k, &v): (&'a K, &'a V)) {
+ self.insert(k, v);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Hash, V: Hash> Hash for BTreeMap<K, V> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V> Default for BTreeMap<K, V> {
+ /// Creates an empty `BTreeMap<K, V>`.
+ fn default() -> BTreeMap<K, V> {
+ BTreeMap::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialEq, V: PartialEq> PartialEq for BTreeMap<K, V> {
+ fn eq(&self, other: &BTreeMap<K, V>) -> bool {
+ self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Eq, V: Eq> Eq for BTreeMap<K, V> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: PartialOrd, V: PartialOrd> PartialOrd for BTreeMap<K, V> {
+ #[inline]
+ fn partial_cmp(&self, other: &BTreeMap<K, V>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, V: Ord> Ord for BTreeMap<K, V> {
+ #[inline]
+ fn cmp(&self, other: &BTreeMap<K, V>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Debug, V: Debug> Debug for BTreeMap<K, V> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_map().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<K: Ord, Q: ?Sized, V> Index<&Q> for BTreeMap<K, V>
+where
+ K: Borrow<Q>,
+ Q: Ord,
+{
+ type Output = V;
+
+ /// Returns a reference to the value corresponding to the supplied key.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the key is not present in the `BTreeMap`.
+ #[inline]
+ fn index(&self, key: &Q) -> &V {
+ self.get(key).expect("no entry found for key")
+ }
+}
+
+/// Finds the leaf edges delimiting a specified range in or underneath a node.
+fn range_search<BorrowType, K, V, Q: ?Sized, R: RangeBounds<Q>>(
+ root: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ range: R,
+) -> (
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+)
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ match (range.start_bound(), range.end_bound()) {
+ (Excluded(s), Excluded(e)) if s == e => {
+ panic!("range start and end are equal and excluded in BTreeMap")
+ }
+ (Included(s) | Excluded(s), Included(e) | Excluded(e)) if s > e => {
+ panic!("range start is greater than range end in BTreeMap")
+ }
+ _ => {}
+ };
+
+ // We duplicate the root NodeRef here -- we will never access it in a way
+ // that overlaps references obtained from the root.
+ let mut min_node = unsafe { ptr::read(&root) };
+ let mut max_node = root;
+ let mut min_found = false;
+ let mut max_found = false;
+
+ loop {
+ let front = match (min_found, range.start_bound()) {
+ (false, Included(key)) => match search::search_node(min_node, key) {
+ Found(kv) => {
+ min_found = true;
+ kv.left_edge()
+ }
+ GoDown(edge) => edge,
+ },
+ (false, Excluded(key)) => match search::search_node(min_node, key) {
+ Found(kv) => {
+ min_found = true;
+ kv.right_edge()
+ }
+ GoDown(edge) => edge,
+ },
+ (true, Included(_)) => min_node.last_edge(),
+ (true, Excluded(_)) => min_node.first_edge(),
+ (_, Unbounded) => min_node.first_edge(),
+ };
+
+ let back = match (max_found, range.end_bound()) {
+ (false, Included(key)) => match search::search_node(max_node, key) {
+ Found(kv) => {
+ max_found = true;
+ kv.right_edge()
+ }
+ GoDown(edge) => edge,
+ },
+ (false, Excluded(key)) => match search::search_node(max_node, key) {
+ Found(kv) => {
+ max_found = true;
+ kv.left_edge()
+ }
+ GoDown(edge) => edge,
+ },
+ (true, Included(_)) => max_node.first_edge(),
+ (true, Excluded(_)) => max_node.last_edge(),
+ (_, Unbounded) => max_node.last_edge(),
+ };
+
+ if front.partial_cmp(&back) == Some(Ordering::Greater) {
+ panic!("Ord is ill-defined in BTreeMap range");
+ }
+ match (front.force(), back.force()) {
+ (Leaf(f), Leaf(b)) => {
+ return (f, b);
+ }
+ (Internal(min_int), Internal(max_int)) => {
+ min_node = min_int.descend();
+ max_node = max_int.descend();
+ }
+ _ => unreachable!("BTreeMap has different depths"),
+ };
+ }
+}
+
+/// Equivalent to `range_search(k, v, ..)` without the `Ord` bound.
+fn full_range_search<BorrowType, K, V>(
+ root: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+) -> (
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge>,
+) {
+ // We duplicate the root NodeRef here -- we will never access it in a way
+ // that overlaps references obtained from the root.
+ let mut min_node = unsafe { ptr::read(&root) };
+ let mut max_node = root;
+ loop {
+ let front = min_node.first_edge();
+ let back = max_node.last_edge();
+ match (front.force(), back.force()) {
+ (Leaf(f), Leaf(b)) => {
+ return (f, b);
+ }
+ (Internal(min_int), Internal(max_int)) => {
+ min_node = min_int.descend();
+ max_node = max_int.descend();
+ }
+ _ => unreachable!("BTreeMap has different depths"),
+ };
+ }
+}
+
+impl<K, V> BTreeMap<K, V> {
+ /// Gets an iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert(3, "c");
+ /// map.insert(2, "b");
+ /// map.insert(1, "a");
+ ///
+ /// for (key, value) in map.iter() {
+ /// println!("{}: {}", key, value);
+ /// }
+ ///
+ /// let (first_key, first_value) = map.iter().next().unwrap();
+ /// assert_eq!((*first_key, *first_value), (1, "a"));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, K, V> {
+ if let Some(root) = &self.root {
+ let (f, b) = full_range_search(root.as_ref());
+
+ Iter { range: Range { front: Some(f), back: Some(b) }, length: self.length }
+ } else {
+ Iter { range: Range { front: None, back: None }, length: 0 }
+ }
+ }
+
+ /// Gets a mutable iterator over the entries of the map, sorted by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map = BTreeMap::new();
+ /// map.insert("a", 1);
+ /// map.insert("b", 2);
+ /// map.insert("c", 3);
+ ///
+ /// // add 10 to the value if the key isn't "a"
+ /// for (key, value) in map.iter_mut() {
+ /// if key != &"a" {
+ /// *value += 10;
+ /// }
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
+ if let Some(root) = &mut self.root {
+ let (f, b) = full_range_search(root.as_mut());
+
+ IterMut {
+ range: RangeMut { front: Some(f), back: Some(b), _marker: PhantomData },
+ length: self.length,
+ }
+ } else {
+ IterMut { range: RangeMut { front: None, back: None, _marker: PhantomData }, length: 0 }
+ }
+ }
+
+ /// Gets an iterator over the keys of the map, in sorted order.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(2, "b");
+ /// a.insert(1, "a");
+ ///
+ /// let keys: Vec<_> = a.keys().cloned().collect();
+ /// assert_eq!(keys, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn keys(&self) -> Keys<'_, K, V> {
+ Keys { inner: self.iter() }
+ }
+
+ /// Gets an iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, "hello");
+ /// a.insert(2, "goodbye");
+ ///
+ /// let values: Vec<&str> = a.values().cloned().collect();
+ /// assert_eq!(values, ["hello", "goodbye"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn values(&self) -> Values<'_, K, V> {
+ Values { inner: self.iter() }
+ }
+
+ /// Gets a mutable iterator over the values of the map, in order by key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// a.insert(1, String::from("hello"));
+ /// a.insert(2, String::from("goodbye"));
+ ///
+ /// for value in a.values_mut() {
+ /// value.push_str("!");
+ /// }
+ ///
+ /// let values: Vec<String> = a.values().cloned().collect();
+ /// assert_eq!(values, [String::from("hello!"),
+ /// String::from("goodbye!")]);
+ /// ```
+ #[stable(feature = "map_values_mut", since = "1.10.0")]
+ pub fn values_mut(&mut self) -> ValuesMut<'_, K, V> {
+ ValuesMut { inner: self.iter_mut() }
+ }
+
+ /// Returns the number of elements in the map.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert_eq!(a.len(), 0);
+ /// a.insert(1, "a");
+ /// assert_eq!(a.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.length
+ }
+
+ /// Returns `true` if the map contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut a = BTreeMap::new();
+ /// assert!(a.is_empty());
+ /// a.insert(1, "a");
+ /// assert!(!a.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// If the root node is the empty (non-allocated) root node, allocate our
+ /// own node. Is an associated function to avoid borrowing the entire BTreeMap.
+ fn ensure_is_owned(root: &mut Option<node::Root<K, V>>) -> &mut node::Root<K, V> {
+ root.get_or_insert_with(node::Root::new_leaf)
+ }
+}
+
+impl<'a, K: Ord, V> Entry<'a, K, V> {
+ /// Ensures a value is in the entry by inserting the default if empty, and returns
+ /// a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert(self, default: V) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default),
+ }
+ }
+
+ /// Ensures a value is in the entry by inserting the result of the default function if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, String> = BTreeMap::new();
+ /// let s = "hoho".to_string();
+ ///
+ /// map.entry("poneyland").or_insert_with(|| s);
+ ///
+ /// assert_eq!(map["poneyland"], "hoho".to_string());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(default()),
+ }
+ }
+
+ #[unstable(feature = "or_insert_with_key", issue = "71024")]
+ /// Ensures a value is in the entry by inserting, if empty, the result of the default function,
+ /// which takes the key as its argument, and returns a mutable reference to the value in the
+ /// entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(or_insert_with_key)]
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland").or_insert_with_key(|key| key.chars().count());
+ ///
+ /// assert_eq!(map["poneyland"], 9);
+ /// ```
+ #[inline]
+ pub fn or_insert_with_key<F: FnOnce(&K) -> V>(self, default: F) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => {
+ let value = default(entry.key());
+ entry.insert(value)
+ }
+ }
+ }
+
+ /// Returns a reference to this entry's key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ match *self {
+ Occupied(ref entry) => entry.key(),
+ Vacant(ref entry) => entry.key(),
+ }
+ }
+
+ /// Provides in-place mutable access to an occupied entry before any
+ /// potential inserts into the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 42);
+ ///
+ /// map.entry("poneyland")
+ /// .and_modify(|e| { *e += 1 })
+ /// .or_insert(42);
+ /// assert_eq!(map["poneyland"], 43);
+ /// ```
+ #[stable(feature = "entry_and_modify", since = "1.26.0")]
+ pub fn and_modify<F>(self, f: F) -> Self
+ where
+ F: FnOnce(&mut V),
+ {
+ match self {
+ Occupied(mut entry) => {
+ f(entry.get_mut());
+ Occupied(entry)
+ }
+ Vacant(entry) => Vacant(entry),
+ }
+ }
+}
+
+impl<'a, K: Ord, V: Default> Entry<'a, K, V> {
+ #[stable(feature = "entry_or_default", since = "1.28.0")]
+ /// Ensures a value is in the entry by inserting the default value if empty,
+ /// and returns a mutable reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, Option<usize>> = BTreeMap::new();
+ /// map.entry("poneyland").or_default();
+ ///
+ /// assert_eq!(map["poneyland"], None);
+ /// ```
+ pub fn or_default(self) -> &'a mut V {
+ match self {
+ Occupied(entry) => entry.into_mut(),
+ Vacant(entry) => entry.insert(Default::default()),
+ }
+ }
+}
+
+impl<'a, K: Ord, V> VacantEntry<'a, K, V> {
+ /// Gets a reference to the key that would be used when inserting a value
+ /// through the VacantEntry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ &self.key
+ }
+
+ /// Take ownership of the key.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(v) = map.entry("poneyland") {
+ /// v.into_key();
+ /// }
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn into_key(self) -> K {
+ self.key
+ }
+
+ /// Sets the value of the entry with the `VacantEntry`'s key,
+ /// and returns a mutable reference to it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, u32> = BTreeMap::new();
+ ///
+ /// if let Entry::Vacant(o) = map.entry("poneyland") {
+ /// o.insert(37);
+ /// }
+ /// assert_eq!(map["poneyland"], 37);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(self, value: V) -> &'a mut V {
+ *self.length += 1;
+
+ let out_ptr;
+
+ let mut ins_k;
+ let mut ins_v;
+ let mut ins_edge;
+
+ let mut cur_parent = match self.handle.insert(self.key, value) {
+ (Fit(handle), _) => return handle.into_kv_mut().1,
+ (Split(left, k, v, right), ptr) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ out_ptr = ptr;
+ left.ascend().map_err(|n| n.into_root_mut())
+ }
+ };
+
+ loop {
+ match cur_parent {
+ Ok(parent) => match parent.insert(ins_k, ins_v, ins_edge) {
+ Fit(_) => return unsafe { &mut *out_ptr },
+ Split(left, k, v, right) => {
+ ins_k = k;
+ ins_v = v;
+ ins_edge = right;
+ cur_parent = left.ascend().map_err(|n| n.into_root_mut());
+ }
+ },
+ Err(root) => {
+ root.push_level().push(ins_k, ins_v, ins_edge);
+ return unsafe { &mut *out_ptr };
+ }
+ }
+ }
+ }
+}
+
+impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> {
+ /// Gets a reference to the key in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ /// assert_eq!(map.entry("poneyland").key(), &"poneyland");
+ /// ```
+ #[stable(feature = "map_entry_keys", since = "1.10.0")]
+ pub fn key(&self) -> &K {
+ self.handle.reborrow().into_kv().0
+ }
+
+ /// Take ownership of the key and value from the map.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// // We delete the entry from the map.
+ /// o.remove_entry();
+ /// }
+ ///
+ /// // If now try to get the value, it will panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "map_entry_recover_keys2", since = "1.12.0")]
+ pub fn remove_entry(self) -> (K, V) {
+ self.remove_kv()
+ }
+
+ /// Gets a reference to the value in the entry.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.get(), &12);
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self) -> &V {
+ self.handle.reborrow().into_kv().1
+ }
+
+ /// Gets a mutable reference to the value in the entry.
+ ///
+ /// If you need a reference to the `OccupiedEntry` that may outlive the
+ /// destruction of the `Entry` value, see [`into_mut`].
+ ///
+ /// [`into_mut`]: #method.into_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// *o.get_mut() += 10;
+ /// assert_eq!(*o.get(), 22);
+ ///
+ /// // We can use the same Entry multiple times.
+ /// *o.get_mut() += 2;
+ /// }
+ /// assert_eq!(map["poneyland"], 24);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self) -> &mut V {
+ self.handle.kv_mut().1
+ }
+
+ /// Converts the entry into a mutable reference to its value.
+ ///
+ /// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
+ ///
+ /// [`get_mut`]: #method.get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// assert_eq!(map["poneyland"], 12);
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// *o.into_mut() += 10;
+ /// }
+ /// assert_eq!(map["poneyland"], 22);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_mut(self) -> &'a mut V {
+ self.handle.into_kv_mut().1
+ }
+
+ /// Sets the value of the entry with the `OccupiedEntry`'s key,
+ /// and returns the entry's old value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(mut o) = map.entry("poneyland") {
+ /// assert_eq!(o.insert(15), 12);
+ /// }
+ /// assert_eq!(map["poneyland"], 15);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: V) -> V {
+ mem::replace(self.get_mut(), value)
+ }
+
+ /// Takes the value of the entry out of the map, and returns it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeMap;
+ /// use std::collections::btree_map::Entry;
+ ///
+ /// let mut map: BTreeMap<&str, usize> = BTreeMap::new();
+ /// map.entry("poneyland").or_insert(12);
+ ///
+ /// if let Entry::Occupied(o) = map.entry("poneyland") {
+ /// assert_eq!(o.remove(), 12);
+ /// }
+ /// // If we try to get "poneyland"'s value, it'll panic:
+ /// // println!("{}", map["poneyland"]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(self) -> V {
+ self.remove_kv().1
+ }
+
+ fn remove_kv(self) -> (K, V) {
+ *self.length -= 1;
+
+ let (old_key, old_val, _) = self.handle.remove_kv_tracking();
+ (old_key, old_val)
+ }
+}
+
+impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
+ /// Removes a key/value-pair from the map, and returns that pair, as well as
+ /// the leaf edge corresponding to that former pair.
+ fn remove_kv_tracking(
+ self,
+ ) -> (K, V, Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
+ let (mut pos, old_key, old_val, was_internal) = match self.force() {
+ Leaf(leaf) => {
+ let (hole, old_key, old_val) = leaf.remove();
+ (hole, old_key, old_val, false)
+ }
+ Internal(mut internal) => {
+ // Replace the location freed in the internal node with the next KV,
+ // and remove that next KV from its leaf.
+
+ let key_loc = internal.kv_mut().0 as *mut K;
+ let val_loc = internal.kv_mut().1 as *mut V;
+
+ // Deleting from the left side is typically faster since we can
+ // just pop an element from the end of the KV array without
+ // needing to shift the other values.
+ let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok();
+ let to_remove = unsafe { unwrap_unchecked(to_remove) };
+
+ let (hole, key, val) = to_remove.remove();
+
+ let old_key = unsafe { mem::replace(&mut *key_loc, key) };
+ let old_val = unsafe { mem::replace(&mut *val_loc, val) };
+
+ (hole, old_key, old_val, true)
+ }
+ };
+
+ // Handle underflow
+ let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
+ let mut at_leaf = true;
+ while cur_node.len() < node::MIN_LEN {
+ match handle_underfull_node(cur_node) {
+ AtRoot => break,
+ Merged(edge, merged_with_left, offset) => {
+ // If we merged with our right sibling then our tracked
+ // position has not changed. However if we merged with our
+ // left sibling then our tracked position is now dangling.
+ if at_leaf && merged_with_left {
+ let idx = pos.idx() + offset;
+ let node = match unsafe { ptr::read(&edge).descend().force() } {
+ Leaf(leaf) => leaf,
+ Internal(_) => unreachable!(),
+ };
+ pos = unsafe { Handle::new_edge(node, idx) };
+ }
+
+ let parent = edge.into_node();
+ if parent.len() == 0 {
+ // We must be at the root
+ parent.into_root_mut().pop_level();
+ break;
+ } else {
+ cur_node = parent.forget_type();
+ at_leaf = false;
+ }
+ }
+ Stole(stole_from_left) => {
+ // Adjust the tracked position if we stole from a left sibling
+ if stole_from_left && at_leaf {
+ // SAFETY: This is safe since we just added an element to our node.
+ unsafe {
+ pos.next_unchecked();
+ }
+ }
+ break;
+ }
+ }
+ }
+
+ // If we deleted from an internal node then we need to compensate for
+ // the earlier swap and adjust the tracked position to point to the
+ // next element.
+ if was_internal {
+ pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() };
+ }
+
+ (old_key, old_val, pos)
+ }
+}
+
+impl<K, V> node::Root<K, V> {
+ /// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty.
+ fn fix_top(&mut self) {
+ while self.height() > 0 && self.as_ref().len() == 0 {
+ self.pop_level();
+ }
+ }
+
+ fn fix_right_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut last_kv = node.last_kv();
+
+ if last_kv.can_merge() {
+ cur_node = last_kv.merge().descend();
+ } else {
+ let right_len = last_kv.reborrow().right_edge().descend().len();
+ // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+ if right_len < node::MIN_LEN + 1 {
+ last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+ }
+ cur_node = last_kv.right_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// The symmetric clone of `fix_right_border`.
+ fn fix_left_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut first_kv = node.first_kv();
+
+ if first_kv.can_merge() {
+ cur_node = first_kv.merge().descend();
+ } else {
+ let left_len = first_kv.reborrow().left_edge().descend().len();
+ if left_len < node::MIN_LEN + 1 {
+ first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+ }
+ cur_node = first_kv.left_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+}
+
+enum UnderflowResult<'a, K, V> {
+ AtRoot,
+ Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
+ Stole(bool),
+}
+
+fn handle_underfull_node<K, V>(
+ node: NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal>,
+) -> UnderflowResult<'_, K, V> {
+ let parent = match node.ascend() {
+ Ok(parent) => parent,
+ Err(_) => return AtRoot,
+ };
+
+ let (is_left, mut handle) = match parent.left_kv() {
+ Ok(left) => (true, left),
+ Err(parent) => {
+ let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) };
+ (false, right)
+ }
+ };
+
+ if handle.can_merge() {
+ let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 };
+ Merged(handle.merge(), is_left, offset)
+ } else {
+ if is_left {
+ handle.steal_left();
+ } else {
+ handle.steal_right();
+ }
+ Stole(is_left)
+ }
+}
+
+impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
+ type Item = (K, V);
+
+ fn next(&mut self) -> Option<(K, V)> {
+ let res = match (self.left.peek(), self.right.peek()) {
+ (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key),
+ (Some(_), None) => Ordering::Less,
+ (None, Some(_)) => Ordering::Greater,
+ (None, None) => return None,
+ };
+
+ // Check which elements comes first and only advance the corresponding iterator.
+ // If two keys are equal, take the value from `right`.
+ match res {
+ Ordering::Less => self.left.next(),
+ Ordering::Greater => self.right.next(),
+ Ordering::Equal => {
+ self.left.next();
+ self.right.next()
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/mod.rs b/library/alloc/src/collections/btree/mod.rs
new file mode 100644
index 00000000000..543ff41a4d4
--- /dev/null
+++ b/library/alloc/src/collections/btree/mod.rs
@@ -0,0 +1,27 @@
+pub mod map;
+mod navigate;
+mod node;
+mod search;
+pub mod set;
+
+#[doc(hidden)]
+trait Recover<Q: ?Sized> {
+ type Key;
+
+ fn get(&self, key: &Q) -> Option<&Self::Key>;
+ fn take(&mut self, key: &Q) -> Option<Self::Key>;
+ fn replace(&mut self, key: Self::Key) -> Option<Self::Key>;
+}
+
+#[inline(always)]
+pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
+ val.unwrap_or_else(|| {
+ if cfg!(debug_assertions) {
+ panic!("'unchecked' unwrap on None in BTreeMap");
+ } else {
+ unsafe {
+ core::intrinsics::unreachable();
+ }
+ }
+ })
+}
diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs
new file mode 100644
index 00000000000..44f0e25bbd7
--- /dev/null
+++ b/library/alloc/src/collections/btree/navigate.rs
@@ -0,0 +1,261 @@
+use core::ptr;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+use super::unwrap_unchecked;
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+ /// on the right side, which is either in the same leaf node or in an ancestor node.
+ /// If the leaf edge is the last one in the tree, returns [`Result::Err`] with the root node.
+ pub fn next_kv(
+ self,
+ ) -> Result<
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+ NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ > {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.right_kv() {
+ Ok(internal_kv) => return Ok(internal_kv),
+ Err(last_edge) => match last_edge.into_node().ascend() {
+ Ok(parent_edge) => parent_edge.forget_node_type(),
+ Err(root) => return Err(root.forget_type()),
+ },
+ }
+ }
+ }
+
+ /// Given a leaf edge handle, returns [`Result::Ok`] with a handle to the neighboring KV
+ /// on the left side, which is either in the same leaf node or in an ancestor node.
+ /// If the leaf edge is the first one in the tree, returns [`Result::Err`] with the root node.
+ pub fn next_back_kv(
+ self,
+ ) -> Result<
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV>,
+ NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ > {
+ let mut edge = self.forget_node_type();
+ loop {
+ edge = match edge.left_kv() {
+ Ok(internal_kv) => return Ok(internal_kv),
+ Err(last_edge) => match last_edge.into_node().ascend() {
+ Ok(parent_edge) => parent_edge.forget_node_type(),
+ Err(root) => return Err(root.forget_type()),
+ },
+ }
+ }
+ }
+}
+
+macro_rules! def_next_kv_uncheched_dealloc {
+ { unsafe fn $name:ident : $adjacent_kv:ident } => {
+ /// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
+ /// while deallocating any node left behind.
+ /// Unsafe for two reasons:
+ /// - The caller must ensure that the leaf edge is not the last one in the tree.
+ /// - The node pointed at by the given handle, and its ancestors, may be deallocated,
+ /// while the reference to those nodes in the surviving ancestors is left dangling;
+ /// thus using the returned handle to navigate further is dangerous.
+ unsafe fn $name <K, V>(
+ leaf_edge: Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>,
+ ) -> Handle<NodeRef<marker::Owned, K, V, marker::LeafOrInternal>, marker::KV> {
+ let mut edge = leaf_edge.forget_node_type();
+ loop {
+ edge = match edge.$adjacent_kv() {
+ Ok(internal_kv) => return internal_kv,
+ Err(last_edge) => {
+ unsafe {
+ let parent_edge = last_edge.into_node().deallocate_and_ascend();
+ unwrap_unchecked(parent_edge).forget_node_type()
+ }
+ }
+ }
+ }
+ }
+ };
+}
+
+def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
+def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
+
+/// This replaces the value behind the `v` unique reference by calling the
+/// relevant function.
+///
+/// Safety: The change closure must not panic.
+#[inline]
+unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
+ let value = unsafe { ptr::read(v) };
+ let (new_value, ret) = change(value);
+ unsafe {
+ ptr::write(v, new_value);
+ }
+ ret
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns references to the
+ /// key and value in between.
+ /// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
+ pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe {
+ replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_kv();
+ let kv = unwrap_unchecked(kv.ok());
+ (kv.next_leaf_edge(), kv.into_kv())
+ })
+ }
+ }
+
+ /// Moves the leaf edge handle to the previous leaf edge and returns references to the
+ /// key and value in between.
+ /// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
+ pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
+ unsafe {
+ replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_back_kv();
+ let kv = unwrap_unchecked(kv.ok());
+ (kv.next_back_leaf_edge(), kv.into_kv())
+ })
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns references to the
+ /// key and value in between.
+ /// Unsafe for two reasons:
+ /// - The caller must ensure that the leaf edge is not the last one in the tree.
+ /// - Using the updated handle may well invalidate the returned references.
+ pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+ unsafe {
+ let kv = replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_kv();
+ let kv = unwrap_unchecked(kv.ok());
+ (ptr::read(&kv).next_leaf_edge(), kv)
+ });
+ // Doing the descend (and perhaps another move) invalidates the references
+ // returned by `into_kv_mut`, so we have to do this last.
+ kv.into_kv_mut()
+ }
+ }
+
+ /// Moves the leaf edge handle to the previous leaf and returns references to the
+ /// key and value in between.
+ /// Unsafe for two reasons:
+ /// - The caller must ensure that the leaf edge is not the first one in the tree.
+ /// - Using the updated handle may well invalidate the returned references.
+ pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
+ unsafe {
+ let kv = replace(self, |leaf_edge| {
+ let kv = leaf_edge.next_back_kv();
+ let kv = unwrap_unchecked(kv.ok());
+ (ptr::read(&kv).next_back_leaf_edge(), kv)
+ });
+ // Doing the descend (and perhaps another move) invalidates the references
+ // returned by `into_kv_mut`, so we have to do this last.
+ kv.into_kv_mut()
+ }
+ }
+}
+
+impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
+ /// Moves the leaf edge handle to the next leaf edge and returns the key and value
+ /// in between, while deallocating any node left behind.
+ /// Unsafe for two reasons:
+ /// - The caller must ensure that the leaf edge is not the last one in the tree
+ /// and is not a handle previously resulting from counterpart `next_back_unchecked`.
+ /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+ /// if the leaf edge is the last edge of a node, that node and possibly ancestors
+ /// will be deallocated, while the reference to those nodes in the surviving ancestor
+ /// is left dangling.
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// call this method again subject to both preconditions listed in the first point,
+ /// or call counterpart `next_back_unchecked` subject to its preconditions.
+ pub unsafe fn next_unchecked(&mut self) -> (K, V) {
+ unsafe {
+ replace(self, |leaf_edge| {
+ let kv = next_kv_unchecked_dealloc(leaf_edge);
+ let k = ptr::read(kv.reborrow().into_kv().0);
+ let v = ptr::read(kv.reborrow().into_kv().1);
+ (kv.next_leaf_edge(), (k, v))
+ })
+ }
+ }
+
+ /// Moves the leaf edge handle to the previous leaf edge and returns the key
+ /// and value in between, while deallocating any node left behind.
+ /// Unsafe for two reasons:
+ /// - The caller must ensure that the leaf edge is not the first one in the tree
+ /// and is not a handle previously resulting from counterpart `next_unchecked`.
+ /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+ /// if the leaf edge is the first edge of a node, that node and possibly ancestors
+ /// will be deallocated, while the reference to those nodes in the surviving ancestor
+ /// is left dangling.
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// call this method again subject to both preconditions listed in the first point,
+ /// or call counterpart `next_unchecked` subject to its preconditions.
+ pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
+ unsafe {
+ replace(self, |leaf_edge| {
+ let kv = next_back_kv_unchecked_dealloc(leaf_edge);
+ let k = ptr::read(kv.reborrow().into_kv().0);
+ let v = ptr::read(kv.reborrow().into_kv().1);
+ (kv.next_back_leaf_edge(), (k, v))
+ })
+ }
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Returns the leftmost leaf edge in or underneath a node - in other words, the edge
+ /// you need first when navigating forward (or last when navigating backward).
+ #[inline]
+ pub fn first_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ let mut node = self;
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.first_edge(),
+ Internal(internal) => node = internal.first_edge().descend(),
+ }
+ }
+ }
+
+ /// Returns the rightmost leaf edge in or underneath a node - in other words, the edge
+ /// you need last when navigating forward (or first when navigating backward).
+ #[inline]
+ pub fn last_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ let mut node = self;
+ loop {
+ match node.force() {
+ Leaf(leaf) => return leaf.last_edge(),
+ Internal(internal) => node = internal.last_edge().descend(),
+ }
+ }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
+ /// Returns the leaf edge closest to a KV for forward navigation.
+ pub fn next_leaf_edge(self) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ match self.force() {
+ Leaf(leaf_kv) => leaf_kv.right_edge(),
+ Internal(internal_kv) => {
+ let next_internal_edge = internal_kv.right_edge();
+ next_internal_edge.descend().first_leaf_edge()
+ }
+ }
+ }
+
+ /// Returns the leaf edge closest to a KV for backward navigation.
+ pub fn next_back_leaf_edge(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ match self.force() {
+ Leaf(leaf_kv) => leaf_kv.left_edge(),
+ Internal(internal_kv) => {
+ let next_internal_edge = internal_kv.left_edge();
+ next_internal_edge.descend().last_leaf_edge()
+ }
+ }
+ }
+}
diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs
new file mode 100644
index 00000000000..f7bd64608d6
--- /dev/null
+++ b/library/alloc/src/collections/btree/node.rs
@@ -0,0 +1,1488 @@
+// This is an attempt at an implementation following the ideal
+//
+// ```
+// struct BTreeMap<K, V> {
+// height: usize,
+// root: Option<Box<Node<K, V, height>>>
+// }
+//
+// struct Node<K, V, height: usize> {
+// keys: [K; 2 * B - 1],
+// vals: [V; 2 * B - 1],
+// edges: if height > 0 {
+// [Box<Node<K, V, height - 1>>; 2 * B]
+// } else { () },
+// parent: *const Node<K, V, height + 1>,
+// parent_idx: u16,
+// len: u16,
+// }
+// ```
+//
+// Since Rust doesn't actually have dependent types and polymorphic recursion,
+// we make do with lots of unsafety.
+
+// A major goal of this module is to avoid complexity by treating the tree as a generic (if
+// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such,
+// this module doesn't care whether the entries are sorted, which nodes can be underfull, or
+// even what underfull means. However, we do rely on a few invariants:
+//
+// - Trees must have uniform depth/height. This means that every path down to a leaf from a
+// given node has exactly the same length.
+// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges.
+// This implies that even an empty internal node has at least one edge.
+
+use core::cmp::Ordering;
+use core::marker::PhantomData;
+use core::mem::{self, MaybeUninit};
+use core::ptr::{self, NonNull, Unique};
+use core::slice;
+
+use crate::alloc::{AllocRef, Global, Layout};
+use crate::boxed::Box;
+
+const B: usize = 6;
+pub const MIN_LEN: usize = B - 1;
+pub const CAPACITY: usize = 2 * B - 1;
+
+/// The underlying representation of leaf nodes.
+#[repr(C)]
+struct LeafNode<K, V> {
+ /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`.
+ /// This either points to an actual node or is null.
+ parent: *const InternalNode<K, V>,
+
+ /// This node's index into the parent node's `edges` array.
+ /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`.
+ /// This is only guaranteed to be initialized when `parent` is non-null.
+ parent_idx: MaybeUninit<u16>,
+
+ /// The number of keys and values this node stores.
+ ///
+ /// This next to `parent_idx` to encourage the compiler to join `len` and
+ /// `parent_idx` into the same 32-bit word, reducing space overhead.
+ len: u16,
+
+ /// The arrays storing the actual data of the node. Only the first `len` elements of each
+ /// array are initialized and valid.
+ keys: [MaybeUninit<K>; CAPACITY],
+ vals: [MaybeUninit<V>; CAPACITY],
+}
+
+impl<K, V> LeafNode<K, V> {
+ /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind
+ /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values.
+ unsafe fn new() -> Self {
+ LeafNode {
+ // As a general policy, we leave fields uninitialized if they can be, as this should
+ // be both slightly faster and easier to track in Valgrind.
+ keys: [MaybeUninit::UNINIT; CAPACITY],
+ vals: [MaybeUninit::UNINIT; CAPACITY],
+ parent: ptr::null(),
+ parent_idx: MaybeUninit::uninit(),
+ len: 0,
+ }
+ }
+}
+
+/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden
+/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an
+/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the
+/// node, allowing code to act on leaf and internal nodes generically without having to even check
+/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`.
+#[repr(C)]
+struct InternalNode<K, V> {
+ data: LeafNode<K, V>,
+
+ /// The pointers to the children of this node. `len + 1` of these are considered
+ /// initialized and valid. Although during the process of `into_iter` or `drop`,
+ /// some pointers are dangling while others still need to be traversed.
+ edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
+}
+
+impl<K, V> InternalNode<K, V> {
+ /// Creates a new `InternalNode`.
+ ///
+ /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking
+ /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1`
+ /// edges are initialized and valid, meaning that even when the node is empty (having a
+ /// `len` of 0), there must be one initialized and valid edge. This function does not set up
+ /// such an edge.
+ unsafe fn new() -> Self {
+ InternalNode { data: unsafe { LeafNode::new() }, edges: [MaybeUninit::UNINIT; 2 * B] }
+ }
+}
+
+/// A managed, non-null pointer to a node. This is either an owned pointer to
+/// `LeafNode<K, V>` or an owned pointer to `InternalNode<K, V>`.
+///
+/// However, `BoxedNode` contains no information as to which of the two types
+/// of nodes it actually contains, and, partially due to this lack of information,
+/// has no destructor.
+struct BoxedNode<K, V> {
+ ptr: Unique<LeafNode<K, V>>,
+}
+
+impl<K, V> BoxedNode<K, V> {
+ fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
+ BoxedNode { ptr: Box::into_unique(node) }
+ }
+
+ fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
+ BoxedNode { ptr: Box::into_unique(node).cast() }
+ }
+
+ unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
+ BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
+ }
+
+ fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
+ NonNull::from(self.ptr)
+ }
+}
+
+/// An owned tree.
+///
+/// Note that this does not have a destructor, and must be cleaned up manually.
+pub struct Root<K, V> {
+ node: BoxedNode<K, V>,
+ /// The number of levels below the root node.
+ height: usize,
+}
+
+unsafe impl<K: Sync, V: Sync> Sync for Root<K, V> {}
+unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
+
+impl<K, V> Root<K, V> {
+ /// Returns the number of levels below the root.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ /// Returns a new owned tree, with its own root node that is initially empty.
+ pub fn new_leaf() -> Self {
+ Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
+ }
+
+ pub fn as_ref(&self) -> NodeRef<marker::Immut<'_>, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: ptr::null(),
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn as_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
+ }
+ }
+
+ pub fn into_ref(self) -> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: ptr::null(),
+ _marker: PhantomData,
+ }
+ }
+
+ /// Adds a new internal node with a single edge, pointing to the previous root, and make that
+ /// new node the root. This increases the height by 1 and is the opposite of `pop_level`.
+ pub fn push_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
+ let mut new_node = Box::new(unsafe { InternalNode::new() });
+ new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
+
+ self.node = BoxedNode::from_internal(new_node);
+ self.height += 1;
+
+ let mut ret = NodeRef {
+ height: self.height,
+ node: self.node.as_ptr(),
+ root: self as *mut _,
+ _marker: PhantomData,
+ };
+
+ unsafe {
+ ret.reborrow_mut().first_edge().correct_parent_link();
+ }
+
+ ret
+ }
+
+ /// Removes the root node, using its first child as the new root. This cannot be called when
+ /// the tree consists only of a leaf node. As it is intended only to be called when the root
+ /// has only one edge, no cleanup is done on any of the other children of the root.
+ /// This decreases the height by 1 and is the opposite of `push_level`.
+ pub fn pop_level(&mut self) {
+ assert!(self.height > 0);
+
+ let top = self.node.ptr;
+
+ self.node = unsafe {
+ BoxedNode::from_ptr(
+ self.as_mut().cast_unchecked::<marker::Internal>().first_edge().descend().node,
+ )
+ };
+ self.height -= 1;
+ unsafe {
+ (*self.as_mut().as_leaf_mut()).parent = ptr::null();
+ }
+
+ unsafe {
+ Global.dealloc(NonNull::from(top).cast(), Layout::new::<InternalNode<K, V>>());
+ }
+ }
+}
+
+// N.B. `NodeRef` is always covariant in `K` and `V`, even when the `BorrowType`
+// is `Mut`. This is technically wrong, but cannot result in any unsafety due to
+// internal use of `NodeRef` because we stay completely generic over `K` and `V`.
+// However, whenever a public type wraps `NodeRef`, make sure that it has the
+// correct variance.
+/// A reference to a node.
+///
+/// This type has a number of parameters that controls how it acts:
+/// - `BorrowType`: This can be `Immut<'a>` or `Mut<'a>` for some `'a` or `Owned`.
+/// When this is `Immut<'a>`, the `NodeRef` acts roughly like `&'a Node`,
+/// when this is `Mut<'a>`, the `NodeRef` acts roughly like `&'a mut Node`,
+/// and when this is `Owned`, the `NodeRef` acts roughly like `Box<Node>`.
+/// - `K` and `V`: These control what types of things are stored in the nodes.
+/// - `Type`: This can be `Leaf`, `Internal`, or `LeafOrInternal`. When this is
+/// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the
+/// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the
+/// `NodeRef` could be pointing to either type of node.
+pub struct NodeRef<BorrowType, K, V, Type> {
+ /// The number of levels below the node.
+ height: usize,
+ node: NonNull<LeafNode<K, V>>,
+ // `root` is null unless the borrow type is `Mut`
+ root: *const Root<K, V>,
+ _marker: PhantomData<(BorrowType, Type)>,
+}
+
+impl<'a, K: 'a, V: 'a, Type> Copy for NodeRef<marker::Immut<'a>, K, V, Type> {}
+impl<'a, K: 'a, V: 'a, Type> Clone for NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+unsafe impl<BorrowType, K: Sync, V: Sync, Type> Sync for NodeRef<BorrowType, K, V, Type> {}
+
+unsafe impl<'a, K: Sync + 'a, V: Sync + 'a, Type> Send for NodeRef<marker::Immut<'a>, K, V, Type> {}
+unsafe impl<'a, K: Send + 'a, V: Send + 'a, Type> Send for NodeRef<marker::Mut<'a>, K, V, Type> {}
+unsafe impl<K: Send, V: Send, Type> Send for NodeRef<marker::Owned, K, V, Type> {}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::Internal> {
+ fn as_internal(&self) -> &InternalNode<K, V> {
+ unsafe { &*(self.node.as_ptr() as *mut InternalNode<K, V>) }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ fn as_internal_mut(&mut self) -> &mut InternalNode<K, V> {
+ unsafe { &mut *(self.node.as_ptr() as *mut InternalNode<K, V>) }
+ }
+}
+
+impl<BorrowType, K, V, Type> NodeRef<BorrowType, K, V, Type> {
+ /// Finds the length of the node. This is the number of keys or values. In an
+ /// internal node, the number of edges is `len() + 1`.
+ /// For any node, the number of possible edge handles is also `len() + 1`.
+ /// Note that, despite being safe, calling this function can have the side effect
+ /// of invalidating mutable references that unsafe code has created.
+ pub fn len(&self) -> usize {
+ self.as_leaf().len as usize
+ }
+
+ /// Returns the height of this node in the whole tree. Zero height denotes the
+ /// leaf level.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
+ /// Removes any static information about whether this node is a `Leaf` or an
+ /// `Internal` node.
+ pub fn forget_type(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+ }
+
+ /// Temporarily takes out another, immutable reference to the same node.
+ fn reborrow(&self) -> NodeRef<marker::Immut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+ }
+
+ /// Exposes the leaf "portion" of any leaf or internal node.
+ /// If the node is a leaf, this function simply opens up its data.
+ /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
+ /// (header, keys and values), and this function exposes that.
+ fn as_leaf(&self) -> &LeafNode<K, V> {
+ // The node must be valid for at least the LeafNode portion.
+ // This is not a reference in the NodeRef type because we don't know if
+ // it should be unique or shared.
+ unsafe { self.node.as_ref() }
+ }
+
+ /// Borrows a view into the keys stored in the node.
+ pub fn keys(&self) -> &[K] {
+ self.reborrow().into_key_slice()
+ }
+
+ /// Borrows a view into the values stored in the node.
+ fn vals(&self) -> &[V] {
+ self.reborrow().into_val_slice()
+ }
+
+ /// Finds the parent of the current node. Returns `Ok(handle)` if the current
+ /// node actually has a parent, where `handle` points to the edge of the parent
+ /// that points to the current node. Returns `Err(self)` if the current node has
+ /// no parent, giving back the original `NodeRef`.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn ascend(
+ self,
+ ) -> Result<Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge>, Self> {
+ let parent_as_leaf = self.as_leaf().parent as *const LeafNode<K, V>;
+ if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) {
+ Ok(Handle {
+ node: NodeRef {
+ height: self.height + 1,
+ node: non_zero,
+ root: self.root,
+ _marker: PhantomData,
+ },
+ idx: unsafe { usize::from(*self.as_leaf().parent_idx.as_ptr()) },
+ _marker: PhantomData,
+ })
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn first_edge(self) -> Handle<Self, marker::Edge> {
+ unsafe { Handle::new_edge(self, 0) }
+ }
+
+ pub fn last_edge(self) -> Handle<Self, marker::Edge> {
+ let len = self.len();
+ unsafe { Handle::new_edge(self, len) }
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn first_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ assert!(len > 0);
+ unsafe { Handle::new_kv(self, 0) }
+ }
+
+ /// Note that `self` must be nonempty.
+ pub fn last_kv(self) -> Handle<Self, marker::KV> {
+ let len = self.len();
+ assert!(len > 0);
+ unsafe { Handle::new_kv(self, len - 1) }
+ }
+}
+
+impl<K, V> NodeRef<marker::Owned, K, V, marker::LeafOrInternal> {
+ /// Similar to `ascend`, gets a reference to a node's parent node, but also
+ /// deallocate the current node in the process. This is unsafe because the
+ /// current node will still be accessible despite being deallocated.
+ pub unsafe fn deallocate_and_ascend(
+ self,
+ ) -> Option<Handle<NodeRef<marker::Owned, K, V, marker::Internal>, marker::Edge>> {
+ let height = self.height;
+ let node = self.node;
+ let ret = self.ascend().ok();
+ unsafe {
+ Global.dealloc(
+ node.cast(),
+ if height > 0 {
+ Layout::new::<InternalNode<K, V>>()
+ } else {
+ Layout::new::<LeafNode<K, V>>()
+ },
+ );
+ }
+ ret
+ }
+}
+
+impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Unsafely asserts to the compiler some static information about whether this
+ /// node is a `Leaf` or an `Internal`.
+ unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
+ NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+ }
+
+ /// Temporarily takes out another, mutable reference to the same node. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ unsafe fn reborrow_mut(&mut self) -> NodeRef<marker::Mut<'_>, K, V, Type> {
+ NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
+ }
+
+ /// Exposes the leaf "portion" of any leaf or internal node for writing.
+ /// If the node is a leaf, this function simply opens up its data.
+ /// If the node is an internal node, so not a leaf, it does have all the data a leaf has
+ /// (header, keys and values), and this function exposes that.
+ ///
+ /// Returns a raw ptr to avoid asserting exclusive access to the entire node.
+ fn as_leaf_mut(&mut self) -> *mut LeafNode<K, V> {
+ self.node.as_ptr()
+ }
+
+ fn keys_mut(&mut self) -> &mut [K] {
+ // SAFETY: the caller will not be able to call further methods on self
+ // until the key slice reference is dropped, as we have unique access
+ // for the lifetime of the borrow.
+ unsafe { self.reborrow_mut().into_key_slice_mut() }
+ }
+
+ fn vals_mut(&mut self) -> &mut [V] {
+ // SAFETY: the caller will not be able to call further methods on self
+ // until the value slice reference is dropped, as we have unique access
+ // for the lifetime of the borrow.
+ unsafe { self.reborrow_mut().into_val_slice_mut() }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Immut<'a>, K, V, Type> {
+ fn into_key_slice(self) -> &'a [K] {
+ unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) }
+ }
+
+ fn into_val_slice(self) -> &'a [V] {
+ unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) }
+ }
+
+ fn into_slices(self) -> (&'a [K], &'a [V]) {
+ // SAFETY: equivalent to reborrow() except not requiring Type: 'a
+ let k = unsafe { ptr::read(&self) };
+ (k.into_key_slice(), self.into_val_slice())
+ }
+}
+
+impl<'a, K: 'a, V: 'a, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
+ /// Gets a mutable reference to the root itself. This is useful primarily when the
+ /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer.
+ pub fn into_root_mut(self) -> &'a mut Root<K, V> {
+ unsafe { &mut *(self.root as *mut Root<K, V>) }
+ }
+
+ fn into_key_slice_mut(mut self) -> &'a mut [K] {
+ // SAFETY: The keys of a node must always be initialized up to length.
+ unsafe {
+ slice::from_raw_parts_mut(
+ MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys),
+ self.len(),
+ )
+ }
+ }
+
+ fn into_val_slice_mut(mut self) -> &'a mut [V] {
+ // SAFETY: The values of a node must always be initialized up to length.
+ unsafe {
+ slice::from_raw_parts_mut(
+ MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals),
+ self.len(),
+ )
+ }
+ }
+
+ fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) {
+ // We cannot use the getters here, because calling the second one
+ // invalidates the reference returned by the first.
+ // More precisely, it is the call to `len` that is the culprit,
+ // because that creates a shared reference to the header, which *can*
+ // overlap with the keys (and even the values, for ZST keys).
+ let len = self.len();
+ let leaf = self.as_leaf_mut();
+ // SAFETY: The keys and values of a node must always be initialized up to length.
+ let keys = unsafe {
+ slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len)
+ };
+ let vals = unsafe {
+ slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len)
+ };
+ (keys, vals)
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
+ /// Adds a key/value pair to the end of the node.
+ pub fn push(&mut self, key: K, val: V) {
+ assert!(self.len() < CAPACITY);
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+
+ (*self.as_leaf_mut()).len += 1;
+ }
+ }
+
+ /// Adds a key/value pair to the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V) {
+ assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+
+ (*self.as_leaf_mut()).len += 1;
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
+ /// Adds a key/value pair and an edge to go to the right of that pair to
+ /// the end of the node.
+ pub fn push(&mut self, key: K, val: V, edge: Root<K, V>) {
+ assert!(edge.height == self.height - 1);
+ assert!(self.len() < CAPACITY);
+
+ let idx = self.len();
+
+ unsafe {
+ ptr::write(self.keys_mut().get_unchecked_mut(idx), key);
+ ptr::write(self.vals_mut().get_unchecked_mut(idx), val);
+ self.as_internal_mut().edges.get_unchecked_mut(idx + 1).write(edge.node);
+
+ (*self.as_leaf_mut()).len += 1;
+
+ Handle::new_edge(self.reborrow_mut(), idx + 1).correct_parent_link();
+ }
+ }
+
+ // Unsafe because 'first' and 'after_last' must be in range
+ unsafe fn correct_childrens_parent_links(&mut self, first: usize, after_last: usize) {
+ debug_assert!(first <= self.len());
+ debug_assert!(after_last <= self.len() + 1);
+ for i in first..after_last {
+ unsafe { Handle::new_edge(self.reborrow_mut(), i) }.correct_parent_link();
+ }
+ }
+
+ fn correct_all_childrens_parent_links(&mut self) {
+ let len = self.len();
+ unsafe { self.correct_childrens_parent_links(0, len + 1) };
+ }
+
+ /// Adds a key/value pair and an edge to go to the left of that pair to
+ /// the beginning of the node.
+ pub fn push_front(&mut self, key: K, val: V, edge: Root<K, V>) {
+ assert!(edge.height == self.height - 1);
+ assert!(self.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.keys_mut(), 0, key);
+ slice_insert(self.vals_mut(), 0, val);
+ slice_insert(
+ slice::from_raw_parts_mut(
+ MaybeUninit::first_ptr_mut(&mut self.as_internal_mut().edges),
+ self.len() + 1,
+ ),
+ 0,
+ edge.node,
+ );
+
+ (*self.as_leaf_mut()).len += 1;
+
+ self.correct_all_childrens_parent_links();
+ }
+ }
+}
+
+impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
+ /// Removes a key/value pair from the end of this node and returns the pair.
+ /// If this is an internal node, also removes the edge that was to the right
+ /// of that pair and returns the orphaned node that this edge owned with its
+ /// parent erased.
+ pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
+ assert!(self.len() > 0);
+
+ let idx = self.len() - 1;
+
+ unsafe {
+ let key = ptr::read(self.keys().get_unchecked(idx));
+ let val = ptr::read(self.vals().get_unchecked(idx));
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(internal) => {
+ let edge =
+ ptr::read(internal.as_internal().edges.get_unchecked(idx + 1).as_ptr());
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+ Some(new_root)
+ }
+ };
+
+ (*self.as_leaf_mut()).len -= 1;
+ (key, val, edge)
+ }
+ }
+
+ /// Removes a key/value pair from the beginning of this node. If this is an internal node,
+ /// also removes the edge that was to the left of that pair.
+ pub fn pop_front(&mut self) -> (K, V, Option<Root<K, V>>) {
+ assert!(self.len() > 0);
+
+ let old_len = self.len();
+
+ unsafe {
+ let key = slice_remove(self.keys_mut(), 0);
+ let val = slice_remove(self.vals_mut(), 0);
+ let edge = match self.reborrow_mut().force() {
+ ForceResult::Leaf(_) => None,
+ ForceResult::Internal(mut internal) => {
+ let edge = slice_remove(
+ slice::from_raw_parts_mut(
+ MaybeUninit::first_ptr_mut(&mut internal.as_internal_mut().edges),
+ old_len + 1,
+ ),
+ 0,
+ );
+
+ let mut new_root = Root { node: edge, height: internal.height - 1 };
+ (*new_root.as_mut().as_leaf_mut()).parent = ptr::null();
+
+ for i in 0..old_len {
+ Handle::new_edge(internal.reborrow_mut(), i).correct_parent_link();
+ }
+
+ Some(new_root)
+ }
+ };
+
+ (*self.as_leaf_mut()).len -= 1;
+
+ (key, val, edge)
+ }
+ }
+
+ fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) {
+ (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr())
+ }
+}
+
+impl<BorrowType, K, V> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ /// Checks whether a node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ NodeRef<BorrowType, K, V, marker::Leaf>,
+ NodeRef<BorrowType, K, V, marker::Internal>,
+ > {
+ if self.height == 0 {
+ ForceResult::Leaf(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ })
+ } else {
+ ForceResult::Internal(NodeRef {
+ height: self.height,
+ node: self.node,
+ root: self.root,
+ _marker: PhantomData,
+ })
+ }
+ }
+}
+
+/// A reference to a specific key/value pair or edge within a node. The `Node` parameter
+/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value
+/// pair) or `Edge` (signifying a handle on an edge).
+///
+/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to
+/// a child node, these represent the spaces where child pointers would go between the key/value
+/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one
+/// to the left of the node, one between the two pairs, and one at the right of the node.
+pub struct Handle<Node, Type> {
+ node: Node,
+ idx: usize,
+ _marker: PhantomData<Type>,
+}
+
+impl<Node: Copy, Type> Copy for Handle<Node, Type> {}
+// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be
+// `Clone`able is when it is an immutable reference and therefore `Copy`.
+impl<Node: Copy, Type> Clone for Handle<Node, Type> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<Node, Type> Handle<Node, Type> {
+ /// Retrieves the node that contains the edge of key/value pair this handle points to.
+ pub fn into_node(self) -> Node {
+ self.node
+ }
+
+ /// Returns the position of this handle in the node.
+ pub fn idx(&self) -> usize {
+ self.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV> {
+ /// Creates a new handle to a key/value pair in `node`.
+ /// Unsafe because the caller must ensure that `idx < node.len()`.
+ pub unsafe fn new_kv(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ debug_assert!(idx < node.len());
+
+ Handle { node, idx, _marker: PhantomData }
+ }
+
+ pub fn left_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node, self.idx) }
+ }
+
+ pub fn right_edge(self) -> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node, self.idx + 1) }
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialEq
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ fn eq(&self, other: &Self) -> bool {
+ self.node.node == other.node.node && self.idx == other.idx
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType> PartialOrd
+ for Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ if self.node.node == other.node.node { Some(self.idx.cmp(&other.idx)) } else { None }
+ }
+}
+
+impl<BorrowType, K, V, NodeType, HandleType>
+ Handle<NodeRef<BorrowType, K, V, NodeType>, HandleType>
+{
+ /// Temporarily takes out another, immutable handle on the same location.
+ pub fn reborrow(&self) -> Handle<NodeRef<marker::Immut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle { node: self.node.reborrow(), idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<'a, K, V, NodeType, HandleType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, HandleType> {
+ /// Temporarily takes out another, mutable handle on the same location. Beware, as
+ /// this method is very dangerous, doubly so since it may not immediately appear
+ /// dangerous.
+ ///
+ /// Because mutable pointers can roam anywhere around the tree and can even (through
+ /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut`
+ /// can easily be used to make the original mutable pointer dangling, or, in the case
+ /// of a reborrowed handle, out of bounds.
+ // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts
+ // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety.
+ pub unsafe fn reborrow_mut(
+ &mut self,
+ ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NodeType>, HandleType> {
+ // We can't use Handle::new_kv or Handle::new_edge because we don't know our type
+ Handle { node: unsafe { self.node.reborrow_mut() }, idx: self.idx, _marker: PhantomData }
+ }
+}
+
+impl<BorrowType, K, V, NodeType> Handle<NodeRef<BorrowType, K, V, NodeType>, marker::Edge> {
+ /// Creates a new handle to an edge in `node`.
+ /// Unsafe because the caller must ensure that `idx <= node.len()`.
+ pub unsafe fn new_edge(node: NodeRef<BorrowType, K, V, NodeType>, idx: usize) -> Self {
+ debug_assert!(idx <= node.len());
+
+ Handle { node, idx, _marker: PhantomData }
+ }
+
+ pub fn left_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx > 0 {
+ Ok(unsafe { Handle::new_kv(self.node, self.idx - 1) })
+ } else {
+ Err(self)
+ }
+ }
+
+ pub fn right_kv(self) -> Result<Handle<NodeRef<BorrowType, K, V, NodeType>, marker::KV>, Self> {
+ if self.idx < self.node.len() {
+ Ok(unsafe { Handle::new_kv(self.node, self.idx) })
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method assumes that there is enough space in the node for the new
+ /// pair to fit.
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
+ // Necessary for correctness, but in a private module
+ debug_assert!(self.node.len() < CAPACITY);
+
+ unsafe {
+ slice_insert(self.node.keys_mut(), self.idx, key);
+ slice_insert(self.node.vals_mut(), self.idx, val);
+
+ (*self.node.as_leaf_mut()).len += 1;
+
+ self.node.vals_mut().get_unchecked_mut(self.idx)
+ }
+ }
+
+ /// Inserts a new key/value pair between the key/value pairs to the right and left of
+ /// this edge. This method splits the node if there isn't enough room.
+ ///
+ /// The returned pointer points to the inserted value.
+ pub fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) {
+ if self.node.len() < CAPACITY {
+ let ptr = self.insert_fit(key, val);
+ let kv = unsafe { Handle::new_kv(self.node, self.idx) };
+ (InsertResult::Fit(kv), ptr)
+ } else {
+ let middle = unsafe { Handle::new_kv(self.node, B) };
+ let (mut left, k, v, mut right) = middle.split();
+ let ptr = if self.idx <= B {
+ unsafe { Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val) }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Leaf>(),
+ self.idx - (B + 1),
+ )
+ .insert_fit(key, val)
+ }
+ };
+ (InsertResult::Split(left, k, v, right), ptr)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ /// Fixes the parent pointer and index in the child node below this edge. This is useful
+ /// when the ordering of edges has been changed, such as in the various `insert` methods.
+ fn correct_parent_link(mut self) {
+ let idx = self.idx as u16;
+ let ptr = self.node.as_internal_mut() as *mut _;
+ let mut child = self.descend();
+ unsafe {
+ (*child.as_leaf_mut()).parent = ptr;
+ (*child.as_leaf_mut()).parent_idx.write(idx);
+ }
+ }
+
+ /// Unsafely asserts to the compiler some static information about whether the underlying
+ /// node of this handle is a `Leaf` or an `Internal`.
+ unsafe fn cast_unchecked<NewType>(
+ &mut self,
+ ) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node.cast_unchecked(), self.idx) }
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method assumes
+ /// that there is enough space in the node for the new pair to fit.
+ fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ // Necessary for correctness, but in an internal module
+ debug_assert!(self.node.len() < CAPACITY);
+ debug_assert!(edge.height == self.node.height - 1);
+
+ unsafe {
+ // This cast is a lie, but it allows us to reuse the key/value insertion logic.
+ self.cast_unchecked::<marker::Leaf>().insert_fit(key, val);
+
+ slice_insert(
+ slice::from_raw_parts_mut(
+ MaybeUninit::first_ptr_mut(&mut self.node.as_internal_mut().edges),
+ self.node.len(),
+ ),
+ self.idx + 1,
+ edge.node,
+ );
+
+ for i in (self.idx + 1)..(self.node.len() + 1) {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ }
+ }
+
+ /// Inserts a new key/value pair and an edge that will go to the right of that new pair
+ /// between this edge and the key/value pair to the right of this edge. This method splits
+ /// the node if there isn't enough room.
+ pub fn insert(
+ mut self,
+ key: K,
+ val: V,
+ edge: Root<K, V>,
+ ) -> InsertResult<'a, K, V, marker::Internal> {
+ assert!(edge.height == self.node.height - 1);
+
+ if self.node.len() < CAPACITY {
+ self.insert_fit(key, val, edge);
+ let kv = unsafe { Handle::new_kv(self.node, self.idx) };
+ InsertResult::Fit(kv)
+ } else {
+ let middle = unsafe { Handle::new_kv(self.node, B) };
+ let (mut left, k, v, mut right) = middle.split();
+ if self.idx <= B {
+ unsafe {
+ Handle::new_edge(left.reborrow_mut(), self.idx).insert_fit(key, val, edge);
+ }
+ } else {
+ unsafe {
+ Handle::new_edge(
+ right.as_mut().cast_unchecked::<marker::Internal>(),
+ self.idx - (B + 1),
+ )
+ .insert_fit(key, val, edge);
+ }
+ }
+ InsertResult::Split(left, k, v, right)
+ }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+ /// Finds the node pointed to by this edge.
+ ///
+ /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should
+ /// both, upon success, do nothing.
+ pub fn descend(self) -> NodeRef<BorrowType, K, V, marker::LeafOrInternal> {
+ NodeRef {
+ height: self.node.height - 1,
+ node: unsafe {
+ (&*self.node.as_internal().edges.get_unchecked(self.idx).as_ptr()).as_ptr()
+ },
+ root: self.node.root,
+ _marker: PhantomData,
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Immut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv(self) -> (&'a K, &'a V) {
+ unsafe {
+ let (keys, vals) = self.node.into_slices();
+ (keys.get_unchecked(self.idx), vals.get_unchecked(self.idx))
+ }
+ }
+}
+
+impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn into_kv_mut(self) -> (&'a mut K, &'a mut V) {
+ unsafe {
+ let (keys, vals) = self.node.into_slices_mut();
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
+ }
+}
+
+impl<'a, K, V, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ pub fn kv_mut(&mut self) -> (&mut K, &mut V) {
+ unsafe {
+ let (keys, vals) = self.node.reborrow_mut().into_slices_mut();
+ (keys.get_unchecked_mut(self.idx), vals.get_unchecked_mut(self.idx))
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the key/value pairs to the right of
+ /// this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the key/value pairs to the right of this handle are put into a newly
+ /// allocated node.
+ pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, K, V, Root<K, V>) {
+ unsafe {
+ let mut new_node = Box::new(LeafNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().add(self.idx + 1),
+ new_node.keys.as_mut_ptr() as *mut K,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().add(self.idx + 1),
+ new_node.vals.as_mut_ptr() as *mut V,
+ new_len,
+ );
+
+ (*self.node.as_leaf_mut()).len = self.idx as u16;
+ new_node.len = new_len as u16;
+
+ (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
+ }
+ }
+
+ /// Removes the key/value pair pointed to by this handle and returns it, along with the edge
+ /// between the now adjacent key/value pairs (if any) to the left and right of this handle.
+ pub fn remove(
+ mut self,
+ ) -> (Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>, K, V) {
+ unsafe {
+ let k = slice_remove(self.node.keys_mut(), self.idx);
+ let v = slice_remove(self.node.vals_mut(), self.idx);
+ (*self.node.as_leaf_mut()).len -= 1;
+ (self.left_edge(), k, v)
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::KV> {
+ /// Splits the underlying node into three parts:
+ ///
+ /// - The node is truncated to only contain the edges and key/value pairs to the
+ /// right of this handle.
+ /// - The key and value pointed to by this handle and extracted.
+ /// - All the edges and key/value pairs to the right of this handle are put into
+ /// a newly allocated node.
+ pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
+ unsafe {
+ let mut new_node = Box::new(InternalNode::new());
+
+ let k = ptr::read(self.node.keys().get_unchecked(self.idx));
+ let v = ptr::read(self.node.vals().get_unchecked(self.idx));
+
+ let height = self.node.height;
+ let new_len = self.node.len() - self.idx - 1;
+
+ ptr::copy_nonoverlapping(
+ self.node.keys().as_ptr().add(self.idx + 1),
+ new_node.data.keys.as_mut_ptr() as *mut K,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.vals().as_ptr().add(self.idx + 1),
+ new_node.data.vals.as_mut_ptr() as *mut V,
+ new_len,
+ );
+ ptr::copy_nonoverlapping(
+ self.node.as_internal().edges.as_ptr().add(self.idx + 1),
+ new_node.edges.as_mut_ptr(),
+ new_len + 1,
+ );
+
+ (*self.node.as_leaf_mut()).len = self.idx as u16;
+ new_node.data.len = new_len as u16;
+
+ let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
+
+ for i in 0..(new_len + 1) {
+ Handle::new_edge(new_root.as_mut().cast_unchecked(), i).correct_parent_link();
+ }
+
+ (self.node, k, v, new_root)
+ }
+ }
+
+ /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
+ /// a node to hold the combination of the nodes to the left and right of this handle along
+ /// with the key/value pair at this handle.
+ pub fn can_merge(&self) -> bool {
+ (self.reborrow().left_edge().descend().len()
+ + self.reborrow().right_edge().descend().len()
+ + 1)
+ <= CAPACITY
+ }
+
+ /// Combines the node immediately to the left of this handle, the key/value pair pointed
+ /// to by this handle, and the node immediately to the right of this handle into one new
+ /// child of the underlying node, returning an edge referencing that new child.
+ ///
+ /// Assumes that this edge `.can_merge()`.
+ pub fn merge(
+ mut self,
+ ) -> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge> {
+ let self1 = unsafe { ptr::read(&self) };
+ let self2 = unsafe { ptr::read(&self) };
+ let mut left_node = self1.left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = self2.right_edge().descend();
+ let right_len = right_node.len();
+
+ // necessary for correctness, but in a private module
+ assert!(left_len + right_len < CAPACITY);
+
+ unsafe {
+ ptr::write(
+ left_node.keys_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.keys_mut(), self.idx),
+ );
+ ptr::copy_nonoverlapping(
+ right_node.keys().as_ptr(),
+ left_node.keys_mut().as_mut_ptr().add(left_len + 1),
+ right_len,
+ );
+ ptr::write(
+ left_node.vals_mut().get_unchecked_mut(left_len),
+ slice_remove(self.node.vals_mut(), self.idx),
+ );
+ ptr::copy_nonoverlapping(
+ right_node.vals().as_ptr(),
+ left_node.vals_mut().as_mut_ptr().add(left_len + 1),
+ right_len,
+ );
+
+ slice_remove(&mut self.node.as_internal_mut().edges, self.idx + 1);
+ for i in self.idx + 1..self.node.len() {
+ Handle::new_edge(self.node.reborrow_mut(), i).correct_parent_link();
+ }
+ (*self.node.as_leaf_mut()).len -= 1;
+
+ (*left_node.as_leaf_mut()).len += right_len as u16 + 1;
+
+ let layout = if self.node.height > 1 {
+ ptr::copy_nonoverlapping(
+ right_node.cast_unchecked().as_internal().edges.as_ptr(),
+ left_node
+ .cast_unchecked()
+ .as_internal_mut()
+ .edges
+ .as_mut_ptr()
+ .add(left_len + 1),
+ right_len + 1,
+ );
+
+ for i in left_len + 1..left_len + right_len + 2 {
+ Handle::new_edge(left_node.cast_unchecked().reborrow_mut(), i)
+ .correct_parent_link();
+ }
+
+ Layout::new::<InternalNode<K, V>>()
+ } else {
+ Layout::new::<LeafNode<K, V>>()
+ };
+ Global.dealloc(right_node.node.cast(), layout);
+
+ Handle::new_edge(self.node, self.idx)
+ }
+ }
+
+ /// This removes a key/value pair from the left child and places it in the key/value storage
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the right
+ /// child.
+ pub fn steal_left(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().right_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push_front(k, v),
+ ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()),
+ }
+ }
+ }
+
+ /// This removes a key/value pair from the right child and places it in the key/value storage
+ /// pointed to by this handle while pushing the old key/value pair of this handle into the left
+ /// child.
+ pub fn steal_right(&mut self) {
+ unsafe {
+ let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front();
+
+ let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k);
+ let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v);
+
+ match self.reborrow_mut().left_edge().descend().force() {
+ ForceResult::Leaf(mut leaf) => leaf.push(k, v),
+ ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()),
+ }
+ }
+ }
+
+ /// This does stealing similar to `steal_left` but steals multiple elements at once.
+ pub fn bulk_steal_left(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ assert!(right_len + count <= CAPACITY);
+ assert!(left_len >= count);
+
+ let new_left_len = left_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Make room for stolen elements in the right child.
+ ptr::copy(right_kv.0, right_kv.0.add(count), right_len);
+ ptr::copy(right_kv.1, right_kv.1.add(count), right_len);
+
+ // Move elements from the left child to the right one.
+ move_kv(left_kv, new_left_len + 1, right_kv, 0, count - 1);
+
+ // Move parent's key/value pair to the right child.
+ move_kv(parent_kv, 0, right_kv, count - 1, 1);
+
+ // Move the left-most stolen pair to the parent.
+ move_kv(left_kv, new_left_len, parent_kv, 0, 1);
+ }
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ // Make room for stolen edges.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges, right_edges.add(count), right_len + 1);
+ right.correct_childrens_parent_links(count, count + right_len + 1);
+
+ move_edges(left, new_left_len + 1, right, 0, count);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+
+ /// The symmetric clone of `bulk_steal_left`.
+ pub fn bulk_steal_right(&mut self, count: usize) {
+ unsafe {
+ let mut left_node = ptr::read(self).left_edge().descend();
+ let left_len = left_node.len();
+ let mut right_node = ptr::read(self).right_edge().descend();
+ let right_len = right_node.len();
+
+ // Make sure that we may steal safely.
+ assert!(left_len + count <= CAPACITY);
+ assert!(right_len >= count);
+
+ let new_right_len = right_len - count;
+
+ // Move data.
+ {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+ let parent_kv = {
+ let kv = self.reborrow_mut().into_kv_mut();
+ (kv.0 as *mut K, kv.1 as *mut V)
+ };
+
+ // Move parent's key/value pair to the left child.
+ move_kv(parent_kv, 0, left_kv, left_len, 1);
+
+ // Move elements from the right child to the left one.
+ move_kv(right_kv, 0, left_kv, left_len + 1, count - 1);
+
+ // Move the right-most stolen pair to the parent.
+ move_kv(right_kv, count - 1, parent_kv, 0, 1);
+
+ // Fix right indexing
+ ptr::copy(right_kv.0.add(count), right_kv.0, new_right_len);
+ ptr::copy(right_kv.1.add(count), right_kv.1, new_right_len);
+ }
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len += count as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len -= count as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(mut right)) => {
+ move_edges(right.reborrow_mut(), 0, left, left_len + 1, count);
+
+ // Fix right indexing.
+ let right_edges = right.reborrow_mut().as_internal_mut().edges.as_mut_ptr();
+ ptr::copy(right_edges.add(count), right_edges, new_right_len + 1);
+ right.correct_childrens_parent_links(0, new_right_len + 1);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+}
+
+unsafe fn move_kv<K, V>(
+ source: (*mut K, *mut V),
+ source_offset: usize,
+ dest: (*mut K, *mut V),
+ dest_offset: usize,
+ count: usize,
+) {
+ unsafe {
+ ptr::copy_nonoverlapping(source.0.add(source_offset), dest.0.add(dest_offset), count);
+ ptr::copy_nonoverlapping(source.1.add(source_offset), dest.1.add(dest_offset), count);
+ }
+}
+
+// Source and destination must have the same height.
+unsafe fn move_edges<K, V>(
+ mut source: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+ source_offset: usize,
+ mut dest: NodeRef<marker::Mut<'_>, K, V, marker::Internal>,
+ dest_offset: usize,
+ count: usize,
+) {
+ let source_ptr = source.as_internal_mut().edges.as_mut_ptr();
+ let dest_ptr = dest.as_internal_mut().edges.as_mut_ptr();
+ unsafe {
+ ptr::copy_nonoverlapping(source_ptr.add(source_offset), dest_ptr.add(dest_offset), count);
+ dest.correct_childrens_parent_links(dest_offset, dest_offset + count);
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::Edge> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Internal>, marker::Edge> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::Edge> {
+ unsafe { Handle::new_edge(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V> Handle<NodeRef<BorrowType, K, V, marker::Leaf>, marker::KV> {
+ pub fn forget_node_type(
+ self,
+ ) -> Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, marker::KV> {
+ unsafe { Handle::new_kv(self.node.forget_type(), self.idx) }
+ }
+}
+
+impl<BorrowType, K, V, HandleType>
+ Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType>
+{
+ /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
+ pub fn force(
+ self,
+ ) -> ForceResult<
+ Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
+ Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>,
+ > {
+ match self.node.force() {
+ ForceResult::Leaf(node) => {
+ ForceResult::Leaf(Handle { node, idx: self.idx, _marker: PhantomData })
+ }
+ ForceResult::Internal(node) => {
+ ForceResult::Internal(Handle { node, idx: self.idx, _marker: PhantomData })
+ }
+ }
+ }
+}
+
+impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::Edge> {
+ /// Move the suffix after `self` from one node to another one. `right` must be empty.
+ /// The first edge of `right` remains unchanged.
+ pub fn move_suffix(
+ &mut self,
+ right: &mut NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
+ ) {
+ unsafe {
+ let left_new_len = self.idx;
+ let mut left_node = self.reborrow_mut().into_node();
+
+ let right_new_len = left_node.len() - left_new_len;
+ let mut right_node = right.reborrow_mut();
+
+ assert!(right_node.len() == 0);
+ assert!(left_node.height == right_node.height);
+
+ if right_new_len > 0 {
+ let left_kv = left_node.reborrow_mut().into_kv_pointers_mut();
+ let right_kv = right_node.reborrow_mut().into_kv_pointers_mut();
+
+ move_kv(left_kv, left_new_len, right_kv, 0, right_new_len);
+
+ (*left_node.reborrow_mut().as_leaf_mut()).len = left_new_len as u16;
+ (*right_node.reborrow_mut().as_leaf_mut()).len = right_new_len as u16;
+
+ match (left_node.force(), right_node.force()) {
+ (ForceResult::Internal(left), ForceResult::Internal(right)) => {
+ move_edges(left, left_new_len + 1, right, 1, right_new_len);
+ }
+ (ForceResult::Leaf(_), ForceResult::Leaf(_)) => {}
+ _ => {
+ unreachable!();
+ }
+ }
+ }
+ }
+ }
+}
+
+pub enum ForceResult<Leaf, Internal> {
+ Leaf(Leaf),
+ Internal(Internal),
+}
+
+pub enum InsertResult<'a, K, V, Type> {
+ Fit(Handle<NodeRef<marker::Mut<'a>, K, V, Type>, marker::KV>),
+ Split(NodeRef<marker::Mut<'a>, K, V, Type>, K, V, Root<K, V>),
+}
+
+pub mod marker {
+ use core::marker::PhantomData;
+
+ pub enum Leaf {}
+ pub enum Internal {}
+ pub enum LeafOrInternal {}
+
+ pub enum Owned {}
+ pub struct Immut<'a>(PhantomData<&'a ()>);
+ pub struct Mut<'a>(PhantomData<&'a mut ()>);
+
+ pub enum KV {}
+ pub enum Edge {}
+}
+
+unsafe fn slice_insert<T>(slice: &mut [T], idx: usize, val: T) {
+ unsafe {
+ ptr::copy(slice.as_ptr().add(idx), slice.as_mut_ptr().add(idx + 1), slice.len() - idx);
+ ptr::write(slice.get_unchecked_mut(idx), val);
+ }
+}
+
+unsafe fn slice_remove<T>(slice: &mut [T], idx: usize) -> T {
+ unsafe {
+ let ret = ptr::read(slice.get_unchecked(idx));
+ ptr::copy(slice.as_ptr().add(idx + 1), slice.as_mut_ptr().add(idx), slice.len() - idx - 1);
+ ret
+ }
+}
diff --git a/library/alloc/src/collections/btree/search.rs b/library/alloc/src/collections/btree/search.rs
new file mode 100644
index 00000000000..4e80f7f21eb
--- /dev/null
+++ b/library/alloc/src/collections/btree/search.rs
@@ -0,0 +1,83 @@
+use core::borrow::Borrow;
+use core::cmp::Ordering;
+
+use super::node::{marker, ForceResult::*, Handle, NodeRef};
+
+use SearchResult::*;
+
+pub enum SearchResult<BorrowType, K, V, FoundType, GoDownType> {
+ Found(Handle<NodeRef<BorrowType, K, V, FoundType>, marker::KV>),
+ GoDown(Handle<NodeRef<BorrowType, K, V, GoDownType>, marker::Edge>),
+}
+
+/// Looks up a given key in a (sub)tree headed by the given node, recursively.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the possible leaf edge where the key
+/// belongs.
+pub fn search_tree<BorrowType, K, V, Q: ?Sized>(
+ mut node: NodeRef<BorrowType, K, V, marker::LeafOrInternal>,
+ key: &Q,
+) -> SearchResult<BorrowType, K, V, marker::LeafOrInternal, marker::Leaf>
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ loop {
+ match search_node(node, key) {
+ Found(handle) => return Found(handle),
+ GoDown(handle) => match handle.force() {
+ Leaf(leaf) => return GoDown(leaf),
+ Internal(internal) => {
+ node = internal.descend();
+ continue;
+ }
+ },
+ }
+ }
+}
+
+/// Looks up a given key in a given node, without recursion.
+/// Returns a `Found` with the handle of the matching KV, if any. Otherwise,
+/// returns a `GoDown` with the handle of the edge where the key might be found.
+/// If the node is a leaf, a `GoDown` edge is not an actual edge but a possible edge.
+pub fn search_node<BorrowType, K, V, Type, Q: ?Sized>(
+ node: NodeRef<BorrowType, K, V, Type>,
+ key: &Q,
+) -> SearchResult<BorrowType, K, V, Type, Type>
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ match search_linear(&node, key) {
+ (idx, true) => Found(unsafe { Handle::new_kv(node, idx) }),
+ (idx, false) => SearchResult::GoDown(unsafe { Handle::new_edge(node, idx) }),
+ }
+}
+
+/// Returns the index in the node at which the key (or an equivalent) exists
+/// or could exist, and whether it exists in the node itself. If it doesn't
+/// exist in the node itself, it may exist in the subtree with that index
+/// (if the node has subtrees). If the key doesn't exist in node or subtree,
+/// the returned index is the position or subtree where the key belongs.
+fn search_linear<BorrowType, K, V, Type, Q: ?Sized>(
+ node: &NodeRef<BorrowType, K, V, Type>,
+ key: &Q,
+) -> (usize, bool)
+where
+ Q: Ord,
+ K: Borrow<Q>,
+{
+ // This function is defined over all borrow types (immutable, mutable, owned).
+ // Using `keys()` is fine here even if BorrowType is mutable, as all we return
+ // is an index -- not a reference.
+ let len = node.len();
+ let keys = node.keys();
+ for (i, k) in keys.iter().enumerate() {
+ match key.cmp(k.borrow()) {
+ Ordering::Greater => {}
+ Ordering::Equal => return (i, true),
+ Ordering::Less => return (i, false),
+ }
+ }
+ (len, false)
+}
diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs
new file mode 100644
index 00000000000..35f4ef1d9b4
--- /dev/null
+++ b/library/alloc/src/collections/btree/set.rs
@@ -0,0 +1,1574 @@
+// This is pretty much entirely stolen from TreeSet, since BTreeMap has an identical interface
+// to TreeMap
+
+use core::borrow::Borrow;
+use core::cmp::Ordering::{Equal, Greater, Less};
+use core::cmp::{max, min};
+use core::fmt::{self, Debug};
+use core::iter::{FromIterator, FusedIterator, Peekable};
+use core::ops::{BitAnd, BitOr, BitXor, RangeBounds, Sub};
+
+use super::map::{BTreeMap, Keys};
+use super::Recover;
+
+// FIXME(conventions): implement bounded iterators
+
+/// A set based on a B-Tree.
+///
+/// See [`BTreeMap`]'s documentation for a detailed discussion of this collection's performance
+/// benefits and drawbacks.
+///
+/// It is a logic error for an item to be modified in such a way that the item's ordering relative
+/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
+/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
+///
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
+///
+/// # Examples
+///
+/// ```
+/// use std::collections::BTreeSet;
+///
+/// // Type inference lets us omit an explicit type signature (which
+/// // would be `BTreeSet<&str>` in this example).
+/// let mut books = BTreeSet::new();
+///
+/// // Add some books.
+/// books.insert("A Dance With Dragons");
+/// books.insert("To Kill a Mockingbird");
+/// books.insert("The Odyssey");
+/// books.insert("The Great Gatsby");
+///
+/// // Check for a specific one.
+/// if !books.contains("The Winds of Winter") {
+/// println!("We have {} books, but The Winds of Winter ain't one.",
+/// books.len());
+/// }
+///
+/// // Remove a book.
+/// books.remove("The Odyssey");
+///
+/// // Iterate over everything.
+/// for book in &books {
+/// println!("{}", book);
+/// }
+/// ```
+#[derive(Hash, PartialEq, Eq, Ord, PartialOrd)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct BTreeSet<T> {
+ map: BTreeMap<T, ()>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for BTreeSet<T> {
+ fn clone(&self) -> Self {
+ BTreeSet { map: self.map.clone() }
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ self.map.clone_from(&other.map);
+ }
+}
+
+/// An iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`iter`]: BTreeSet::iter
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ iter: Keys<'a, T, ()>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Iter").field(&self.iter.clone()).finish()
+ }
+}
+
+/// An owning iterator over the items of a `BTreeSet`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: BTreeSet#method.into_iter
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct IntoIter<T> {
+ iter: super::map::IntoIter<T, ()>,
+}
+
+/// An iterator over a sub-range of items in a `BTreeSet`.
+///
+/// This `struct` is created by the [`range`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`range`]: BTreeSet::range
+#[derive(Debug)]
+#[stable(feature = "btree_range", since = "1.17.0")]
+pub struct Range<'a, T: 'a> {
+ iter: super::map::Range<'a, T, ()>,
+}
+
+/// Core of SymmetricDifference and Union.
+/// More efficient than btree.map.MergeIter,
+/// and crucially for SymmetricDifference, nexts() reports on both sides.
+#[derive(Clone)]
+struct MergeIterInner<I>
+where
+ I: Iterator,
+ I::Item: Copy,
+{
+ a: I,
+ b: I,
+ peeked: Option<MergeIterPeeked<I>>,
+}
+
+#[derive(Copy, Clone, Debug)]
+enum MergeIterPeeked<I: Iterator> {
+ A(I::Item),
+ B(I::Item),
+}
+
+impl<I> MergeIterInner<I>
+where
+ I: ExactSizeIterator + FusedIterator,
+ I::Item: Copy + Ord,
+{
+ fn new(a: I, b: I) -> Self {
+ MergeIterInner { a, b, peeked: None }
+ }
+
+ fn nexts(&mut self) -> (Option<I::Item>, Option<I::Item>) {
+ let mut a_next = match self.peeked {
+ Some(MergeIterPeeked::A(next)) => Some(next),
+ _ => self.a.next(),
+ };
+ let mut b_next = match self.peeked {
+ Some(MergeIterPeeked::B(next)) => Some(next),
+ _ => self.b.next(),
+ };
+ let ord = match (a_next, b_next) {
+ (None, None) => Equal,
+ (_, None) => Less,
+ (None, _) => Greater,
+ (Some(a1), Some(b1)) => a1.cmp(&b1),
+ };
+ self.peeked = match ord {
+ Less => b_next.take().map(MergeIterPeeked::B),
+ Equal => None,
+ Greater => a_next.take().map(MergeIterPeeked::A),
+ };
+ (a_next, b_next)
+ }
+
+ fn lens(&self) -> (usize, usize) {
+ match self.peeked {
+ Some(MergeIterPeeked::A(_)) => (1 + self.a.len(), self.b.len()),
+ Some(MergeIterPeeked::B(_)) => (self.a.len(), 1 + self.b.len()),
+ _ => (self.a.len(), self.b.len()),
+ }
+ }
+}
+
+impl<I> Debug for MergeIterInner<I>
+where
+ I: Iterator + Debug,
+ I::Item: Copy + Debug,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("MergeIterInner").field(&self.a).field(&self.b).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`difference`]: BTreeSet::difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Difference<'a, T: 'a> {
+ inner: DifferenceInner<'a, T>,
+}
+#[derive(Debug)]
+enum DifferenceInner<'a, T: 'a> {
+ Stitch {
+ // iterate all of `self` and some of `other`, spotting matches along the way
+ self_iter: Iter<'a, T>,
+ other_iter: Peekable<Iter<'a, T>>,
+ },
+ Search {
+ // iterate `self`, look up in `other`
+ self_iter: Iter<'a, T>,
+ other_set: &'a BTreeSet<T>,
+ },
+ Iterate(Iter<'a, T>), // simply produce all values in `self`
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Difference<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Difference").field(&self.inner).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the symmetric difference of `BTreeSet`s.
+///
+/// This `struct` is created by the [`symmetric_difference`] method on
+/// [`BTreeSet`]. See its documentation for more.
+///
+/// [`symmetric_difference`]: BTreeSet::symmetric_difference
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for SymmetricDifference<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("SymmetricDifference").field(&self.0).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the intersection of `BTreeSet`s.
+///
+/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`intersection`]: BTreeSet::intersection
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Intersection<'a, T: 'a> {
+ inner: IntersectionInner<'a, T>,
+}
+#[derive(Debug)]
+enum IntersectionInner<'a, T: 'a> {
+ Stitch {
+ // iterate similarly sized sets jointly, spotting matches along the way
+ a: Iter<'a, T>,
+ b: Iter<'a, T>,
+ },
+ Search {
+ // iterate a small set, look up in the large set
+ small_iter: Iter<'a, T>,
+ large_set: &'a BTreeSet<T>,
+ },
+ Answer(Option<&'a T>), // return a specific value or emptiness
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Intersection<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Intersection").field(&self.inner).finish()
+ }
+}
+
+/// A lazy iterator producing elements in the union of `BTreeSet`s.
+///
+/// This `struct` is created by the [`union`] method on [`BTreeSet`].
+/// See its documentation for more.
+///
+/// [`union`]: BTreeSet::union
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Union<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Union").field(&self.0).finish()
+ }
+}
+
+// This constant is used by functions that compare two sets.
+// It estimates the relative size at which searching performs better
+// than iterating, based on the benchmarks in
+// https://github.com/ssomers/rust_bench_btreeset_intersection;
+// It's used to divide rather than multiply sizes, to rule out overflow,
+// and it's a power of two to make that division cheap.
+const ITER_PERFORMANCE_TIPPING_SIZE_DIFF: usize = 16;
+
+impl<T: Ord> BTreeSet<T> {
+ /// Makes a new `BTreeSet` with a reasonable choice of B.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = BTreeSet::new();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_const_unstable(feature = "const_btree_new", issue = "71835")]
+ pub const fn new() -> BTreeSet<T> {
+ BTreeSet { map: BTreeMap::new() }
+ }
+
+ /// Constructs a double-ended iterator over a sub-range of elements in the set.
+ /// The simplest way is to use the range syntax `min..max`, thus `range(min..max)` will
+ /// yield elements from min (inclusive) to max (exclusive).
+ /// The range may also be entered as `(Bound<T>, Bound<T>)`, so for example
+ /// `range((Excluded(4), Included(10)))` will yield a left-exclusive, right-inclusive
+ /// range from 4 to 10.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ /// use std::ops::Bound::Included;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(3);
+ /// set.insert(5);
+ /// set.insert(8);
+ /// for &elem in set.range((Included(&4), Included(&8))) {
+ /// println!("{}", elem);
+ /// }
+ /// assert_eq!(Some(&5), set.range(4..).next());
+ /// ```
+ #[stable(feature = "btree_range", since = "1.17.0")]
+ pub fn range<K: ?Sized, R>(&self, range: R) -> Range<'_, T>
+ where
+ K: Ord,
+ T: Borrow<K>,
+ R: RangeBounds<K>,
+ {
+ Range { iter: self.map.range(range) }
+ }
+
+ /// Visits the values representing the difference,
+ /// i.e., the values that are in `self` but not in `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let diff: Vec<_> = a.difference(&b).cloned().collect();
+ /// assert_eq!(diff, [1]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn difference<'a>(&'a self, other: &'a BTreeSet<T>) -> Difference<'a, T> {
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return Difference { inner: DifferenceInner::Iterate(self.iter()) };
+ };
+ Difference {
+ inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+ (Greater, _) | (_, Less) => DifferenceInner::Iterate(self.iter()),
+ (Equal, _) => {
+ let mut self_iter = self.iter();
+ self_iter.next();
+ DifferenceInner::Iterate(self_iter)
+ }
+ (_, Equal) => {
+ let mut self_iter = self.iter();
+ self_iter.next_back();
+ DifferenceInner::Iterate(self_iter)
+ }
+ _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ DifferenceInner::Search { self_iter: self.iter(), other_set: other }
+ }
+ _ => DifferenceInner::Stitch {
+ self_iter: self.iter(),
+ other_iter: other.iter().peekable(),
+ },
+ },
+ }
+ }
+
+ /// Visits the values representing the symmetric difference,
+ /// i.e., the values that are in `self` or in `other` but not in both,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let sym_diff: Vec<_> = a.symmetric_difference(&b).cloned().collect();
+ /// assert_eq!(sym_diff, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn symmetric_difference<'a>(
+ &'a self,
+ other: &'a BTreeSet<T>,
+ ) -> SymmetricDifference<'a, T> {
+ SymmetricDifference(MergeIterInner::new(self.iter(), other.iter()))
+ }
+
+ /// Visits the values representing the intersection,
+ /// i.e., the values that are both in `self` and `other`,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ /// b.insert(3);
+ ///
+ /// let intersection: Vec<_> = a.intersection(&b).cloned().collect();
+ /// assert_eq!(intersection, [2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn intersection<'a>(&'a self, other: &'a BTreeSet<T>) -> Intersection<'a, T> {
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return Intersection { inner: IntersectionInner::Answer(None) };
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return Intersection { inner: IntersectionInner::Answer(None) };
+ };
+ Intersection {
+ inner: match (self_min.cmp(other_max), self_max.cmp(other_min)) {
+ (Greater, _) | (_, Less) => IntersectionInner::Answer(None),
+ (Equal, _) => IntersectionInner::Answer(Some(self_min)),
+ (_, Equal) => IntersectionInner::Answer(Some(self_max)),
+ _ if self.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ IntersectionInner::Search { small_iter: self.iter(), large_set: other }
+ }
+ _ if other.len() <= self.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF => {
+ IntersectionInner::Search { small_iter: other.iter(), large_set: self }
+ }
+ _ => IntersectionInner::Stitch { a: self.iter(), b: other.iter() },
+ },
+ }
+ }
+
+ /// Visits the values representing the union,
+ /// i.e., all the values in `self` or `other`, without duplicates,
+ /// in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(2);
+ ///
+ /// let union: Vec<_> = a.union(&b).cloned().collect();
+ /// assert_eq!(union, [1, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn union<'a>(&'a self, other: &'a BTreeSet<T>) -> Union<'a, T> {
+ Union(MergeIterInner::new(self.iter(), other.iter()))
+ }
+
+ /// Clears the set, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// v.insert(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.map.clear()
+ }
+
+ /// Returns `true` if the set contains a value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.contains(&1), true);
+ /// assert_eq!(set.contains(&4), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ self.map.contains_key(value)
+ }
+
+ /// Returns a reference to the value in the set, if any, that is equal to the given value.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.get(&2), Some(&2));
+ /// assert_eq!(set.get(&4), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ Recover::get(&self.map, value)
+ }
+
+ /// Returns `true` if `self` has no elements in common with `other`.
+ /// This is equivalent to checking for an empty intersection.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut b = BTreeSet::new();
+ ///
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(4);
+ /// assert_eq!(a.is_disjoint(&b), true);
+ /// b.insert(1);
+ /// assert_eq!(a.is_disjoint(&b), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_disjoint(&self, other: &BTreeSet<T>) -> bool {
+ self.intersection(other).next().is_none()
+ }
+
+ /// Returns `true` if the set is a subset of another,
+ /// i.e., `other` contains at least all the values in `self`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sup: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(2);
+ /// assert_eq!(set.is_subset(&sup), true);
+ /// set.insert(4);
+ /// assert_eq!(set.is_subset(&sup), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_subset(&self, other: &BTreeSet<T>) -> bool {
+ // Same result as self.difference(other).next().is_none()
+ // but the code below is faster (hugely in some cases).
+ if self.len() > other.len() {
+ return false;
+ }
+ let (self_min, self_max) =
+ if let (Some(self_min), Some(self_max)) = (self.first(), self.last()) {
+ (self_min, self_max)
+ } else {
+ return true; // self is empty
+ };
+ let (other_min, other_max) =
+ if let (Some(other_min), Some(other_max)) = (other.first(), other.last()) {
+ (other_min, other_max)
+ } else {
+ return false; // other is empty
+ };
+ let mut self_iter = self.iter();
+ match self_min.cmp(other_min) {
+ Less => return false,
+ Equal => {
+ self_iter.next();
+ }
+ Greater => (),
+ }
+ match self_max.cmp(other_max) {
+ Greater => return false,
+ Equal => {
+ self_iter.next_back();
+ }
+ Less => (),
+ }
+ if self_iter.len() <= other.len() / ITER_PERFORMANCE_TIPPING_SIZE_DIFF {
+ for next in self_iter {
+ if !other.contains(next) {
+ return false;
+ }
+ }
+ } else {
+ let mut other_iter = other.iter();
+ other_iter.next();
+ other_iter.next_back();
+ let mut self_next = self_iter.next();
+ while let Some(self1) = self_next {
+ match other_iter.next().map_or(Less, |other1| self1.cmp(other1)) {
+ Less => return false,
+ Equal => self_next = self_iter.next(),
+ Greater => (),
+ }
+ }
+ }
+ true
+ }
+
+ /// Returns `true` if the set is a superset of another,
+ /// i.e., `self` contains at least all the values in `other`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let sub: BTreeSet<_> = [1, 2].iter().cloned().collect();
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(0);
+ /// set.insert(1);
+ /// assert_eq!(set.is_superset(&sub), false);
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.is_superset(&sub), true);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_superset(&self, other: &BTreeSet<T>) -> bool {
+ other.is_subset(self)
+ }
+
+ /// Returns a reference to the first value in the set, if any.
+ /// This value is always the minimum of all values in the set.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut map = BTreeSet::new();
+ /// assert_eq!(map.first(), None);
+ /// map.insert(1);
+ /// assert_eq!(map.first(), Some(&1));
+ /// map.insert(2);
+ /// assert_eq!(map.first(), Some(&1));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn first(&self) -> Option<&T> {
+ self.map.first_key_value().map(|(k, _)| k)
+ }
+
+ /// Returns a reference to the last value in the set, if any.
+ /// This value is always the maximum of all values in the set.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut map = BTreeSet::new();
+ /// assert_eq!(map.first(), None);
+ /// map.insert(1);
+ /// assert_eq!(map.last(), Some(&1));
+ /// map.insert(2);
+ /// assert_eq!(map.last(), Some(&2));
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn last(&self) -> Option<&T> {
+ self.map.last_key_value().map(|(k, _)| k)
+ }
+
+ /// Removes the first value from the set and returns it, if any.
+ /// The first value is always the minimum value in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(1);
+ /// while let Some(n) = set.pop_first() {
+ /// assert_eq!(n, 1);
+ /// }
+ /// assert!(set.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_first(&mut self) -> Option<T> {
+ self.map.first_entry().map(|entry| entry.remove_entry().0)
+ }
+
+ /// Removes the last value from the set and returns it, if any.
+ /// The last value is always the maximum value in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(map_first_last)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(1);
+ /// while let Some(n) = set.pop_last() {
+ /// assert_eq!(n, 1);
+ /// }
+ /// assert!(set.is_empty());
+ /// ```
+ #[unstable(feature = "map_first_last", issue = "62924")]
+ pub fn pop_last(&mut self) -> Option<T> {
+ self.map.last_entry().map(|entry| entry.remove_entry().0)
+ }
+
+ /// Adds a value to the set.
+ ///
+ /// If the set did not have this value present, `true` is returned.
+ ///
+ /// If the set did have this value present, `false` is returned, and the
+ /// entry is not updated. See the [module-level documentation] for more.
+ ///
+ /// [module-level documentation]: index.html#insert-and-complex-keys
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// assert_eq!(set.insert(2), true);
+ /// assert_eq!(set.insert(2), false);
+ /// assert_eq!(set.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, value: T) -> bool {
+ self.map.insert(value, ()).is_none()
+ }
+
+ /// Adds a value to the set, replacing the existing value, if any, that is equal to the given
+ /// one. Returns the replaced value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ /// set.insert(Vec::<i32>::new());
+ ///
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 0);
+ /// set.replace(Vec::with_capacity(10));
+ /// assert_eq!(set.get(&[][..]).unwrap().capacity(), 10);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn replace(&mut self, value: T) -> Option<T> {
+ Recover::replace(&mut self.map, value)
+ }
+
+ /// Removes a value from the set. Returns whether the value was
+ /// present in the set.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set = BTreeSet::new();
+ ///
+ /// set.insert(2);
+ /// assert_eq!(set.remove(&2), true);
+ /// assert_eq!(set.remove(&2), false);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ self.map.remove(value).is_some()
+ }
+
+ /// Removes and returns the value in the set, if any, that is equal to the given one.
+ ///
+ /// The value may be any borrowed form of the set's value type,
+ /// but the ordering on the borrowed form *must* match the
+ /// ordering on the value type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<_> = [1, 2, 3].iter().cloned().collect();
+ /// assert_eq!(set.take(&2), Some(2));
+ /// assert_eq!(set.take(&2), None);
+ /// ```
+ #[stable(feature = "set_recovery", since = "1.9.0")]
+ pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
+ where
+ T: Borrow<Q>,
+ Q: Ord,
+ {
+ Recover::take(&mut self.map, value)
+ }
+
+ /// Moves all elements from `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ ///
+ /// let mut b = BTreeSet::new();
+ /// b.insert(3);
+ /// b.insert(4);
+ /// b.insert(5);
+ ///
+ /// a.append(&mut b);
+ ///
+ /// assert_eq!(a.len(), 5);
+ /// assert_eq!(b.len(), 0);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ /// assert!(a.contains(&3));
+ /// assert!(a.contains(&4));
+ /// assert!(a.contains(&5));
+ /// ```
+ #[stable(feature = "btree_append", since = "1.11.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ self.map.append(&mut other.map);
+ }
+
+ /// Splits the collection into two at the given key. Returns everything after the given key,
+ /// including the key.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut a = BTreeSet::new();
+ /// a.insert(1);
+ /// a.insert(2);
+ /// a.insert(3);
+ /// a.insert(17);
+ /// a.insert(41);
+ ///
+ /// let b = a.split_off(&3);
+ ///
+ /// assert_eq!(a.len(), 2);
+ /// assert_eq!(b.len(), 3);
+ ///
+ /// assert!(a.contains(&1));
+ /// assert!(a.contains(&2));
+ ///
+ /// assert!(b.contains(&3));
+ /// assert!(b.contains(&17));
+ /// assert!(b.contains(&41));
+ /// ```
+ #[stable(feature = "btree_split_off", since = "1.11.0")]
+ pub fn split_off<Q: ?Sized + Ord>(&mut self, key: &Q) -> Self
+ where
+ T: Borrow<Q>,
+ {
+ BTreeSet { map: self.map.split_off(key) }
+ }
+
+ /// Creates an iterator which uses a closure to determine if a value should be removed.
+ ///
+ /// If the closure returns true, then the value is removed and yielded.
+ /// If the closure returns false, the value will remain in the list and will not be yielded
+ /// by the iterator.
+ ///
+ /// If the iterator is only partially consumed or not consumed at all, each of the remaining
+ /// values will still be subjected to the closure and removed and dropped if it returns true.
+ ///
+ /// It is unspecified how many more values will be subjected to the closure
+ /// if a panic occurs in the closure, or if a panic occurs while dropping a value, or if the
+ /// `DrainFilter` itself is leaked.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a set into even and odd values, reusing the original set:
+ ///
+ /// ```
+ /// #![feature(btree_drain_filter)]
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut set: BTreeSet<i32> = (0..8).collect();
+ /// let evens: BTreeSet<_> = set.drain_filter(|v| v % 2 == 0).collect();
+ /// let odds = set;
+ /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![0, 2, 4, 6]);
+ /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7]);
+ /// ```
+ #[unstable(feature = "btree_drain_filter", issue = "70530")]
+ pub fn drain_filter<'a, F>(&'a mut self, pred: F) -> DrainFilter<'a, T, F>
+ where
+ F: 'a + FnMut(&T) -> bool,
+ {
+ DrainFilter { pred, inner: self.map.drain_filter_inner() }
+ }
+}
+
+impl<T> BTreeSet<T> {
+ /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ ///
+ /// Values returned by the iterator are returned in ascending order:
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
+ /// let mut set_iter = set.iter();
+ /// assert_eq!(set_iter.next(), Some(&1));
+ /// assert_eq!(set_iter.next(), Some(&2));
+ /// assert_eq!(set_iter.next(), Some(&3));
+ /// assert_eq!(set_iter.next(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter { iter: self.map.keys() }
+ }
+
+ /// Returns the number of elements in the set.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.insert(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.map.len()
+ }
+
+ /// Returns `true` if the set contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let mut v = BTreeSet::new();
+ /// assert!(v.is_empty());
+ /// v.insert(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> FromIterator<T> for BTreeSet<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> BTreeSet<T> {
+ let mut set = BTreeSet::new();
+ set.extend(iter);
+ set
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for BTreeSet<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Gets an iterator for moving out the `BTreeSet`'s contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect();
+ ///
+ /// let v: Vec<_> = set.into_iter().collect();
+ /// assert_eq!(v, [1, 2, 3, 4]);
+ /// ```
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { iter: self.map.into_iter() }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a BTreeSet<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on BTreeSet.
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+pub struct DrainFilter<'a, T, F>
+where
+ T: 'a,
+ F: 'a + FnMut(&T) -> bool,
+{
+ pred: F,
+ inner: super::map::DrainFilterInner<'a, T, ()>,
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+ F: FnMut(&T) -> bool,
+{
+ fn drop(&mut self) {
+ self.for_each(drop);
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> fmt::Debug for DrainFilter<'_, T, F>
+where
+ T: fmt::Debug,
+ F: FnMut(&T) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("DrainFilter").field(&self.inner.peek().map(|(k, _)| k)).finish()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<'a, T, F> Iterator for DrainFilter<'_, T, F>
+where
+ F: 'a + FnMut(&T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ let pred = &mut self.pred;
+ let mut mapped_pred = |k: &T, _v: &mut ()| pred(k);
+ self.inner.next(&mut mapped_pred).map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[unstable(feature = "btree_drain_filter", issue = "70530")]
+impl<T, F> FusedIterator for DrainFilter<'_, T, F> where F: FnMut(&T) -> bool {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Extend<T> for BTreeSet<T> {
+ #[inline]
+ fn extend<Iter: IntoIterator<Item = T>>(&mut self, iter: Iter) {
+ iter.into_iter().for_each(move |elem| {
+ self.insert(elem);
+ });
+ }
+
+ #[inline]
+ fn extend_one(&mut self, elem: T) {
+ self.insert(elem);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Ord + Copy> Extend<&'a T> for BTreeSet<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &elem: &'a T) {
+ self.insert(elem);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Default for BTreeSet<T> {
+ /// Makes an empty `BTreeSet<T>` with a reasonable choice of B.
+ fn default() -> BTreeSet<T> {
+ BTreeSet::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> Sub<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a - &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2]);
+ /// ```
+ fn sub(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.difference(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitXor<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the symmetric difference of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a ^ &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 4]);
+ /// ```
+ fn bitxor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.symmetric_difference(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitAnd<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the intersection of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![2, 3, 4].into_iter().collect();
+ ///
+ /// let result = &a & &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [2, 3]);
+ /// ```
+ fn bitand(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.intersection(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord + Clone> BitOr<&BTreeSet<T>> for &BTreeSet<T> {
+ type Output = BTreeSet<T>;
+
+ /// Returns the union of `self` and `rhs` as a new `BTreeSet<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::BTreeSet;
+ ///
+ /// let a: BTreeSet<_> = vec![1, 2, 3].into_iter().collect();
+ /// let b: BTreeSet<_> = vec![3, 4, 5].into_iter().collect();
+ ///
+ /// let result = &a | &b;
+ /// let result_vec: Vec<_> = result.into_iter().collect();
+ /// assert_eq!(result_vec, [1, 2, 3, 4, 5]);
+ /// ```
+ fn bitor(self, rhs: &BTreeSet<T>) -> BTreeSet<T> {
+ self.union(rhs).cloned().collect()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Debug> Debug for BTreeSet<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_set().entries(self.iter()).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+ fn clone(&self) -> Self {
+ Iter { iter: self.iter.clone() }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn len(&self) -> usize {
+ self.iter.len()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<T> Clone for Range<'_, T> {
+ fn clone(&self) -> Self {
+ Range { iter: self.iter.clone() }
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> Iterator for Range<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ self.iter.next().map(|(k, _)| k)
+ }
+
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+
+ fn max(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "btree_range", since = "1.17.0")]
+impl<'a, T> DoubleEndedIterator for Range<'a, T> {
+ fn next_back(&mut self) -> Option<&'a T> {
+ self.iter.next_back().map(|(k, _)| k)
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Range<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Difference<'_, T> {
+ fn clone(&self) -> Self {
+ Difference {
+ inner: match &self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => DifferenceInner::Stitch {
+ self_iter: self_iter.clone(),
+ other_iter: other_iter.clone(),
+ },
+ DifferenceInner::Search { self_iter, other_set } => {
+ DifferenceInner::Search { self_iter: self_iter.clone(), other_set }
+ }
+ DifferenceInner::Iterate(iter) => DifferenceInner::Iterate(iter.clone()),
+ },
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Difference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => {
+ let mut self_next = self_iter.next()?;
+ loop {
+ match other_iter.peek().map_or(Less, |other_next| self_next.cmp(other_next)) {
+ Less => return Some(self_next),
+ Equal => {
+ self_next = self_iter.next()?;
+ other_iter.next();
+ }
+ Greater => {
+ other_iter.next();
+ }
+ }
+ }
+ }
+ DifferenceInner::Search { self_iter, other_set } => loop {
+ let self_next = self_iter.next()?;
+ if !other_set.contains(&self_next) {
+ return Some(self_next);
+ }
+ },
+ DifferenceInner::Iterate(iter) => iter.next(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (self_len, other_len) = match &self.inner {
+ DifferenceInner::Stitch { self_iter, other_iter } => {
+ (self_iter.len(), other_iter.len())
+ }
+ DifferenceInner::Search { self_iter, other_set } => (self_iter.len(), other_set.len()),
+ DifferenceInner::Iterate(iter) => (iter.len(), 0),
+ };
+ (self_len.saturating_sub(other_len), Some(self_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Difference<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for SymmetricDifference<'_, T> {
+ fn clone(&self) -> Self {
+ SymmetricDifference(self.0.clone())
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ loop {
+ let (a_next, b_next) = self.0.nexts();
+ if a_next.and(b_next).is_none() {
+ return a_next.or(b_next);
+ }
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (a_len, b_len) = self.0.lens();
+ // No checked_add, because even if a and b refer to the same set,
+ // and T is an empty type, the storage overhead of sets limits
+ // the number of elements to less than half the range of usize.
+ (0, Some(a_len + b_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for SymmetricDifference<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Intersection<'_, T> {
+ fn clone(&self) -> Self {
+ Intersection {
+ inner: match &self.inner {
+ IntersectionInner::Stitch { a, b } => {
+ IntersectionInner::Stitch { a: a.clone(), b: b.clone() }
+ }
+ IntersectionInner::Search { small_iter, large_set } => {
+ IntersectionInner::Search { small_iter: small_iter.clone(), large_set }
+ }
+ IntersectionInner::Answer(answer) => IntersectionInner::Answer(*answer),
+ },
+ }
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Intersection<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ match &mut self.inner {
+ IntersectionInner::Stitch { a, b } => {
+ let mut a_next = a.next()?;
+ let mut b_next = b.next()?;
+ loop {
+ match a_next.cmp(b_next) {
+ Less => a_next = a.next()?,
+ Greater => b_next = b.next()?,
+ Equal => return Some(a_next),
+ }
+ }
+ }
+ IntersectionInner::Search { small_iter, large_set } => loop {
+ let small_next = small_iter.next()?;
+ if large_set.contains(&small_next) {
+ return Some(small_next);
+ }
+ },
+ IntersectionInner::Answer(answer) => answer.take(),
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ match &self.inner {
+ IntersectionInner::Stitch { a, b } => (0, Some(min(a.len(), b.len()))),
+ IntersectionInner::Search { small_iter, .. } => (0, Some(small_iter.len())),
+ IntersectionInner::Answer(None) => (0, Some(0)),
+ IntersectionInner::Answer(Some(_)) => (1, Some(1)),
+ }
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Intersection<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Union<'_, T> {
+ fn clone(&self) -> Self {
+ Union(self.0.clone())
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T: Ord> Iterator for Union<'a, T> {
+ type Item = &'a T;
+
+ fn next(&mut self) -> Option<&'a T> {
+ let (a_next, b_next) = self.0.nexts();
+ a_next.or(b_next)
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let (a_len, b_len) = self.0.lens();
+ // No checked_add - see SymmetricDifference::size_hint.
+ (max(a_len, b_len), Some(a_len + b_len))
+ }
+
+ fn min(mut self) -> Option<&'a T> {
+ self.next()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T: Ord> FusedIterator for Union<'_, T> {}
diff --git a/library/alloc/src/collections/linked_list.rs b/library/alloc/src/collections/linked_list.rs
new file mode 100644
index 00000000000..1f875f6c521
--- /dev/null
+++ b/library/alloc/src/collections/linked_list.rs
@@ -0,0 +1,1904 @@
+//! A doubly-linked list with owned nodes.
+//!
+//! The `LinkedList` allows pushing and popping elements at either end
+//! in constant time.
+//!
+//! NOTE: It is almost always better to use [`Vec`] or [`VecDeque`] because
+//! array-based containers are generally faster,
+//! more memory efficient, and make better use of CPU cache.
+//!
+//! [`Vec`]: ../../vec/struct.Vec.html
+//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::Ordering;
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::iter::{FromIterator, FusedIterator};
+use core::marker::PhantomData;
+use core::mem;
+use core::ptr::NonNull;
+
+use super::SpecExtend;
+use crate::boxed::Box;
+
+#[cfg(test)]
+mod tests;
+
+/// A doubly-linked list with owned nodes.
+///
+/// The `LinkedList` allows pushing and popping elements at either end
+/// in constant time.
+///
+/// NOTE: It is almost always better to use `Vec` or `VecDeque` because
+/// array-based containers are generally faster,
+/// more memory efficient, and make better use of CPU cache.
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct LinkedList<T> {
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+ marker: PhantomData<Box<Node<T>>>,
+}
+
+struct Node<T> {
+ next: Option<NonNull<Node<T>>>,
+ prev: Option<NonNull<Node<T>>>,
+ element: T,
+}
+
+/// An iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.LinkedList.html#method.iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+ marker: PhantomData<&'a Node<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Iter").field(&self.len).finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+ fn clone(&self) -> Self {
+ Iter { ..*self }
+ }
+}
+
+/// A mutable iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`LinkedList`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.LinkedList.html#method.iter_mut
+/// [`LinkedList`]: struct.LinkedList.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+ // We do *not* exclusively own the entire list here, references to node's `element`
+ // have been handed out by the iterator! So be careful when using this; the methods
+ // called must be aware that there can be aliasing pointers to `element`.
+ list: &'a mut LinkedList<T>,
+ head: Option<NonNull<Node<T>>>,
+ tail: Option<NonNull<Node<T>>>,
+ len: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IterMut").field(&self.list).field(&self.len).finish()
+ }
+}
+
+/// An owning iterator over the elements of a `LinkedList`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`LinkedList`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.LinkedList.html#method.into_iter
+/// [`LinkedList`]: struct.LinkedList.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+ list: LinkedList<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.list).finish()
+ }
+}
+
+impl<T> Node<T> {
+ fn new(element: T) -> Self {
+ Node { next: None, prev: None, element }
+ }
+
+ fn into_element(self: Box<Self>) -> T {
+ self.element
+ }
+}
+
+// private methods
+impl<T> LinkedList<T> {
+ /// Adds the given node to the front of the list.
+ #[inline]
+ fn push_front_node(&mut self, mut node: Box<Node<T>>) {
+ // This method takes care not to create mutable references to whole nodes,
+ // to maintain validity of aliasing pointers into `element`.
+ unsafe {
+ node.next = self.head;
+ node.prev = None;
+ let node = Some(Box::leak(node).into());
+
+ match self.head {
+ None => self.tail = node,
+ // Not creating new mutable (unique!) references overlapping `element`.
+ Some(head) => (*head.as_ptr()).prev = node,
+ }
+
+ self.head = node;
+ self.len += 1;
+ }
+ }
+
+ /// Removes and returns the node at the front of the list.
+ #[inline]
+ fn pop_front_node(&mut self) -> Option<Box<Node<T>>> {
+ // This method takes care not to create mutable references to whole nodes,
+ // to maintain validity of aliasing pointers into `element`.
+ self.head.map(|node| unsafe {
+ let node = Box::from_raw(node.as_ptr());
+ self.head = node.next;
+
+ match self.head {
+ None => self.tail = None,
+ // Not creating new mutable (unique!) references overlapping `element`.
+ Some(head) => (*head.as_ptr()).prev = None,
+ }
+
+ self.len -= 1;
+ node
+ })
+ }
+
+ /// Adds the given node to the back of the list.
+ #[inline]
+ fn push_back_node(&mut self, mut node: Box<Node<T>>) {
+ // This method takes care not to create mutable references to whole nodes,
+ // to maintain validity of aliasing pointers into `element`.
+ unsafe {
+ node.next = None;
+ node.prev = self.tail;
+ let node = Some(Box::leak(node).into());
+
+ match self.tail {
+ None => self.head = node,
+ // Not creating new mutable (unique!) references overlapping `element`.
+ Some(tail) => (*tail.as_ptr()).next = node,
+ }
+
+ self.tail = node;
+ self.len += 1;
+ }
+ }
+
+ /// Removes and returns the node at the back of the list.
+ #[inline]
+ fn pop_back_node(&mut self) -> Option<Box<Node<T>>> {
+ // This method takes care not to create mutable references to whole nodes,
+ // to maintain validity of aliasing pointers into `element`.
+ self.tail.map(|node| unsafe {
+ let node = Box::from_raw(node.as_ptr());
+ self.tail = node.prev;
+
+ match self.tail {
+ None => self.head = None,
+ // Not creating new mutable (unique!) references overlapping `element`.
+ Some(tail) => (*tail.as_ptr()).next = None,
+ }
+
+ self.len -= 1;
+ node
+ })
+ }
+
+ /// Unlinks the specified node from the current list.
+ ///
+ /// Warning: this will not check that the provided node belongs to the current list.
+ ///
+ /// This method takes care not to create mutable references to `element`, to
+ /// maintain validity of aliasing pointers.
+ #[inline]
+ unsafe fn unlink_node(&mut self, mut node: NonNull<Node<T>>) {
+ let node = unsafe { node.as_mut() }; // this one is ours now, we can create an &mut.
+
+ // Not creating new mutable (unique!) references overlapping `element`.
+ match node.prev {
+ Some(prev) => unsafe { (*prev.as_ptr()).next = node.next },
+ // this node is the head node
+ None => self.head = node.next,
+ };
+
+ match node.next {
+ Some(next) => unsafe { (*next.as_ptr()).prev = node.prev },
+ // this node is the tail node
+ None => self.tail = node.prev,
+ };
+
+ self.len -= 1;
+ }
+
+ /// Splices a series of nodes between two existing nodes.
+ ///
+ /// Warning: this will not check that the provided node belongs to the two existing lists.
+ #[inline]
+ unsafe fn splice_nodes(
+ &mut self,
+ existing_prev: Option<NonNull<Node<T>>>,
+ existing_next: Option<NonNull<Node<T>>>,
+ mut splice_start: NonNull<Node<T>>,
+ mut splice_end: NonNull<Node<T>>,
+ splice_length: usize,
+ ) {
+ // This method takes care not to create multiple mutable references to whole nodes at the same time,
+ // to maintain validity of aliasing pointers into `element`.
+ if let Some(mut existing_prev) = existing_prev {
+ unsafe {
+ existing_prev.as_mut().next = Some(splice_start);
+ }
+ } else {
+ self.head = Some(splice_start);
+ }
+ if let Some(mut existing_next) = existing_next {
+ unsafe {
+ existing_next.as_mut().prev = Some(splice_end);
+ }
+ } else {
+ self.tail = Some(splice_end);
+ }
+ unsafe {
+ splice_start.as_mut().prev = existing_prev;
+ splice_end.as_mut().next = existing_next;
+ }
+
+ self.len += splice_length;
+ }
+
+ /// Detaches all nodes from a linked list as a series of nodes.
+ #[inline]
+ fn detach_all_nodes(mut self) -> Option<(NonNull<Node<T>>, NonNull<Node<T>>, usize)> {
+ let head = self.head.take();
+ let tail = self.tail.take();
+ let len = mem::replace(&mut self.len, 0);
+ if let Some(head) = head {
+ let tail = tail.unwrap_or_else(|| unsafe { core::hint::unreachable_unchecked() });
+ Some((head, tail, len))
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ unsafe fn split_off_before_node(
+ &mut self,
+ split_node: Option<NonNull<Node<T>>>,
+ at: usize,
+ ) -> Self {
+ // The split node is the new head node of the second part
+ if let Some(mut split_node) = split_node {
+ let first_part_head;
+ let first_part_tail;
+ unsafe {
+ first_part_tail = split_node.as_mut().prev.take();
+ }
+ if let Some(mut tail) = first_part_tail {
+ unsafe {
+ tail.as_mut().next = None;
+ }
+ first_part_head = self.head;
+ } else {
+ first_part_head = None;
+ }
+
+ let first_part = LinkedList {
+ head: first_part_head,
+ tail: first_part_tail,
+ len: at,
+ marker: PhantomData,
+ };
+
+ // Fix the head ptr of the second part
+ self.head = Some(split_node);
+ self.len = self.len - at;
+
+ first_part
+ } else {
+ mem::replace(self, LinkedList::new())
+ }
+ }
+
+ #[inline]
+ unsafe fn split_off_after_node(
+ &mut self,
+ split_node: Option<NonNull<Node<T>>>,
+ at: usize,
+ ) -> Self {
+ // The split node is the new tail node of the first part and owns
+ // the head of the second part.
+ if let Some(mut split_node) = split_node {
+ let second_part_head;
+ let second_part_tail;
+ unsafe {
+ second_part_head = split_node.as_mut().next.take();
+ }
+ if let Some(mut head) = second_part_head {
+ unsafe {
+ head.as_mut().prev = None;
+ }
+ second_part_tail = self.tail;
+ } else {
+ second_part_tail = None;
+ }
+
+ let second_part = LinkedList {
+ head: second_part_head,
+ tail: second_part_tail,
+ len: self.len - at,
+ marker: PhantomData,
+ };
+
+ // Fix the tail ptr of the first part
+ self.tail = Some(split_node);
+ self.len = at;
+
+ second_part
+ } else {
+ mem::replace(self, LinkedList::new())
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for LinkedList<T> {
+ /// Creates an empty `LinkedList<T>`.
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+impl<T> LinkedList<T> {
+ /// Creates an empty `LinkedList`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let list: LinkedList<u32> = LinkedList::new();
+ /// ```
+ #[inline]
+ #[rustc_const_stable(feature = "const_linked_list_new", since = "1.32.0")]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub const fn new() -> Self {
+ LinkedList { head: None, tail: None, len: 0, marker: PhantomData }
+ }
+
+ /// Moves all elements from `other` to the end of the list.
+ ///
+ /// This reuses all the nodes from `other` and moves them into `self`. After
+ /// this operation, `other` becomes empty.
+ ///
+ /// This operation should compute in *O*(1) time and *O*(1) memory.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list1 = LinkedList::new();
+ /// list1.push_back('a');
+ ///
+ /// let mut list2 = LinkedList::new();
+ /// list2.push_back('b');
+ /// list2.push_back('c');
+ ///
+ /// list1.append(&mut list2);
+ ///
+ /// let mut iter = list1.iter();
+ /// assert_eq!(iter.next(), Some(&'a'));
+ /// assert_eq!(iter.next(), Some(&'b'));
+ /// assert_eq!(iter.next(), Some(&'c'));
+ /// assert!(iter.next().is_none());
+ ///
+ /// assert!(list2.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ match self.tail {
+ None => mem::swap(self, other),
+ Some(mut tail) => {
+ // `as_mut` is okay here because we have exclusive access to the entirety
+ // of both lists.
+ if let Some(mut other_head) = other.head.take() {
+ unsafe {
+ tail.as_mut().next = Some(other_head);
+ other_head.as_mut().prev = Some(tail);
+ }
+
+ self.tail = other.tail.take();
+ self.len += mem::replace(&mut other.len, 0);
+ }
+ }
+ }
+ }
+
+ /// Moves all elements from `other` to the begin of the list.
+ #[unstable(feature = "linked_list_prepend", issue = "none")]
+ pub fn prepend(&mut self, other: &mut Self) {
+ match self.head {
+ None => mem::swap(self, other),
+ Some(mut head) => {
+ // `as_mut` is okay here because we have exclusive access to the entirety
+ // of both lists.
+ if let Some(mut other_tail) = other.tail.take() {
+ unsafe {
+ head.as_mut().prev = Some(other_tail);
+ other_tail.as_mut().next = Some(head);
+ }
+
+ self.head = other.head.take();
+ self.len += mem::replace(&mut other.len, 0);
+ }
+ }
+ }
+ }
+
+ /// Provides a forward iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// let mut iter = list.iter();
+ /// assert_eq!(iter.next(), Some(&0));
+ /// assert_eq!(iter.next(), Some(&1));
+ /// assert_eq!(iter.next(), Some(&2));
+ /// assert_eq!(iter.next(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter { head: self.head, tail: self.tail, len: self.len, marker: PhantomData }
+ }
+
+ /// Provides a forward iterator with mutable references.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// for element in list.iter_mut() {
+ /// *element += 10;
+ /// }
+ ///
+ /// let mut iter = list.iter();
+ /// assert_eq!(iter.next(), Some(&10));
+ /// assert_eq!(iter.next(), Some(&11));
+ /// assert_eq!(iter.next(), Some(&12));
+ /// assert_eq!(iter.next(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<'_, T> {
+ IterMut { head: self.head, tail: self.tail, len: self.len, list: self }
+ }
+
+ /// Provides a cursor at the front element.
+ ///
+ /// The cursor is pointing to the "ghost" non-element if the list is empty.
+ #[inline]
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn cursor_front(&self) -> Cursor<'_, T> {
+ Cursor { index: 0, current: self.head, list: self }
+ }
+
+ /// Provides a cursor with editing operations at the front element.
+ ///
+ /// The cursor is pointing to the "ghost" non-element if the list is empty.
+ #[inline]
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn cursor_front_mut(&mut self) -> CursorMut<'_, T> {
+ CursorMut { index: 0, current: self.head, list: self }
+ }
+
+ /// Provides a cursor at the back element.
+ ///
+ /// The cursor is pointing to the "ghost" non-element if the list is empty.
+ #[inline]
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn cursor_back(&self) -> Cursor<'_, T> {
+ Cursor { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
+ }
+
+ /// Provides a cursor with editing operations at the back element.
+ ///
+ /// The cursor is pointing to the "ghost" non-element if the list is empty.
+ #[inline]
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn cursor_back_mut(&mut self) -> CursorMut<'_, T> {
+ CursorMut { index: self.len.checked_sub(1).unwrap_or(0), current: self.tail, list: self }
+ }
+
+ /// Returns `true` if the `LinkedList` is empty.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert!(dl.is_empty());
+ ///
+ /// dl.push_front("foo");
+ /// assert!(!dl.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.head.is_none()
+ }
+
+ /// Returns the length of the `LinkedList`.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// assert_eq!(dl.len(), 1);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.len(), 2);
+ ///
+ /// dl.push_back(3);
+ /// assert_eq!(dl.len(), 3);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.len
+ }
+
+ /// Removes all elements from the `LinkedList`.
+ ///
+ /// This operation should compute in *O*(*n*) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// dl.push_front(1);
+ /// assert_eq!(dl.len(), 2);
+ /// assert_eq!(dl.front(), Some(&1));
+ ///
+ /// dl.clear();
+ /// assert_eq!(dl.len(), 0);
+ /// assert_eq!(dl.front(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ *self = Self::new();
+ }
+
+ /// Returns `true` if the `LinkedList` contains an element equal to the
+ /// given value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<u32> = LinkedList::new();
+ ///
+ /// list.push_back(0);
+ /// list.push_back(1);
+ /// list.push_back(2);
+ ///
+ /// assert_eq!(list.contains(&0), true);
+ /// assert_eq!(list.contains(&10), false);
+ /// ```
+ #[stable(feature = "linked_list_contains", since = "1.12.0")]
+ pub fn contains(&self, x: &T) -> bool
+ where
+ T: PartialEq<T>,
+ {
+ self.iter().any(|e| e == x)
+ }
+
+ /// Provides a reference to the front element, or `None` if the list is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.front(), None);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front(), Some(&1));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front(&self) -> Option<&T> {
+ unsafe { self.head.as_ref().map(|node| &node.as_ref().element) }
+ }
+
+ /// Provides a mutable reference to the front element, or `None` if the list
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.front(), None);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front(), Some(&1));
+ ///
+ /// match dl.front_mut() {
+ /// None => {},
+ /// Some(x) => *x = 5,
+ /// }
+ /// assert_eq!(dl.front(), Some(&5));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front_mut(&mut self) -> Option<&mut T> {
+ unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
+ }
+
+ /// Provides a reference to the back element, or `None` if the list is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.back(), None);
+ ///
+ /// dl.push_back(1);
+ /// assert_eq!(dl.back(), Some(&1));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back(&self) -> Option<&T> {
+ unsafe { self.tail.as_ref().map(|node| &node.as_ref().element) }
+ }
+
+ /// Provides a mutable reference to the back element, or `None` if the list
+ /// is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ /// assert_eq!(dl.back(), None);
+ ///
+ /// dl.push_back(1);
+ /// assert_eq!(dl.back(), Some(&1));
+ ///
+ /// match dl.back_mut() {
+ /// None => {},
+ /// Some(x) => *x = 5,
+ /// }
+ /// assert_eq!(dl.back(), Some(&5));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back_mut(&mut self) -> Option<&mut T> {
+ unsafe { self.tail.as_mut().map(|node| &mut node.as_mut().element) }
+ }
+
+ /// Adds an element first in the list.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut dl = LinkedList::new();
+ ///
+ /// dl.push_front(2);
+ /// assert_eq!(dl.front().unwrap(), &2);
+ ///
+ /// dl.push_front(1);
+ /// assert_eq!(dl.front().unwrap(), &1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_front(&mut self, elt: T) {
+ self.push_front_node(box Node::new(elt));
+ }
+
+ /// Removes the first element and returns it, or `None` if the list is
+ /// empty.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// assert_eq!(d.pop_front(), None);
+ ///
+ /// d.push_front(1);
+ /// d.push_front(3);
+ /// assert_eq!(d.pop_front(), Some(3));
+ /// assert_eq!(d.pop_front(), Some(1));
+ /// assert_eq!(d.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_front(&mut self) -> Option<T> {
+ self.pop_front_node().map(Node::into_element)
+ }
+
+ /// Appends an element to the back of a list.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// d.push_back(1);
+ /// d.push_back(3);
+ /// assert_eq!(3, *d.back().unwrap());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_back(&mut self, elt: T) {
+ self.push_back_node(box Node::new(elt));
+ }
+
+ /// Removes the last element from a list and returns it, or `None` if
+ /// it is empty.
+ ///
+ /// This operation should compute in *O*(1) time.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ /// assert_eq!(d.pop_back(), None);
+ /// d.push_back(1);
+ /// d.push_back(3);
+ /// assert_eq!(d.pop_back(), Some(3));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_back(&mut self) -> Option<T> {
+ self.pop_back_node().map(Node::into_element)
+ }
+
+ /// Splits the list into two at the given index. Returns everything after the given index,
+ /// including the index.
+ ///
+ /// This operation should compute in *O*(*n*) time.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ ///
+ /// d.push_front(1);
+ /// d.push_front(2);
+ /// d.push_front(3);
+ ///
+ /// let mut split = d.split_off(2);
+ ///
+ /// assert_eq!(split.pop_front(), Some(1));
+ /// assert_eq!(split.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn split_off(&mut self, at: usize) -> LinkedList<T> {
+ let len = self.len();
+ assert!(at <= len, "Cannot split off at a nonexistent index");
+ if at == 0 {
+ return mem::take(self);
+ } else if at == len {
+ return Self::new();
+ }
+
+ // Below, we iterate towards the `i-1`th node, either from the start or the end,
+ // depending on which would be faster.
+ let split_node = if at - 1 <= len - 1 - (at - 1) {
+ let mut iter = self.iter_mut();
+ // instead of skipping using .skip() (which creates a new struct),
+ // we skip manually so we can access the head field without
+ // depending on implementation details of Skip
+ for _ in 0..at - 1 {
+ iter.next();
+ }
+ iter.head
+ } else {
+ // better off starting from the end
+ let mut iter = self.iter_mut();
+ for _ in 0..len - 1 - (at - 1) {
+ iter.next_back();
+ }
+ iter.tail
+ };
+ unsafe { self.split_off_after_node(split_node, at) }
+ }
+
+ /// Removes the element at the given index and returns it.
+ ///
+ /// This operation should compute in *O*(*n*) time.
+ ///
+ /// # Panics
+ /// Panics if at >= len
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(linked_list_remove)]
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut d = LinkedList::new();
+ ///
+ /// d.push_front(1);
+ /// d.push_front(2);
+ /// d.push_front(3);
+ ///
+ /// assert_eq!(d.remove(1), 2);
+ /// assert_eq!(d.remove(0), 3);
+ /// assert_eq!(d.remove(0), 1);
+ /// ```
+ #[unstable(feature = "linked_list_remove", issue = "69210")]
+ pub fn remove(&mut self, at: usize) -> T {
+ let len = self.len();
+ assert!(at < len, "Cannot remove at an index outside of the list bounds");
+
+ // Below, we iterate towards the node at the given index, either from
+ // the start or the end, depending on which would be faster.
+ let offset_from_end = len - at - 1;
+ if at <= offset_from_end {
+ let mut cursor = self.cursor_front_mut();
+ for _ in 0..at {
+ cursor.move_next();
+ }
+ cursor.remove_current().unwrap()
+ } else {
+ let mut cursor = self.cursor_back_mut();
+ for _ in 0..offset_from_end {
+ cursor.move_prev();
+ }
+ cursor.remove_current().unwrap()
+ }
+ }
+
+ /// Creates an iterator which uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns true, then the element is removed and yielded.
+ /// If the closure returns false, the element will remain in the list and will not be yielded
+ /// by the iterator.
+ ///
+ /// Note that `drain_filter` lets you mutate every element in the filter closure, regardless of
+ /// whether you choose to keep or remove it.
+ ///
+ /// # Examples
+ ///
+ /// Splitting a list into evens and odds, reusing the original list:
+ ///
+ /// ```
+ /// #![feature(drain_filter)]
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut numbers: LinkedList<u32> = LinkedList::new();
+ /// numbers.extend(&[1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15]);
+ ///
+ /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<LinkedList<_>>();
+ /// let odds = numbers;
+ ///
+ /// assert_eq!(evens.into_iter().collect::<Vec<_>>(), vec![2, 4, 6, 8, 14]);
+ /// assert_eq!(odds.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 9, 11, 13, 15]);
+ /// ```
+ #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+ pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ // avoid borrow issues.
+ let it = self.head;
+ let old_len = self.len;
+
+ DrainFilter { list: self, it, pred: filter, idx: 0, old_len }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for LinkedList<T> {
+ fn drop(&mut self) {
+ struct DropGuard<'a, T>(&'a mut LinkedList<T>);
+
+ impl<'a, T> Drop for DropGuard<'a, T> {
+ fn drop(&mut self) {
+ // Continue the same loop we do below. This only runs when a destructor has
+ // panicked. If another one panics this will abort.
+ while self.0.pop_front_node().is_some() {}
+ }
+ }
+
+ while let Some(node) = self.pop_front_node() {
+ let guard = DropGuard(self);
+ drop(node);
+ mem::forget(guard);
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.head.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &*node.as_ptr();
+ self.len -= 1;
+ self.head = node.next;
+ &node.element
+ })
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len, Some(self.len))
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.tail.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &*node.as_ptr();
+ self.len -= 1;
+ self.tail = node.prev;
+ &node.element
+ })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.head.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &mut *node.as_ptr();
+ self.len -= 1;
+ self.head = node.next;
+ &mut node.element
+ })
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.len, Some(self.len))
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<&'a mut T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ self.tail.map(|node| unsafe {
+ // Need an unbound lifetime to get 'a
+ let node = &mut *node.as_ptr();
+ self.len -= 1;
+ self.tail = node.prev;
+ &mut node.element
+ })
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IterMut<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IterMut<'_, T> {}
+
+impl<T> IterMut<'_, T> {
+ /// Inserts the given element just after the element most recently returned by `.next()`.
+ /// The inserted element does not appear in the iteration.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(linked_list_extras)]
+ ///
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<_> = vec![1, 3, 4].into_iter().collect();
+ ///
+ /// {
+ /// let mut it = list.iter_mut();
+ /// assert_eq!(it.next().unwrap(), &1);
+ /// // insert `2` after `1`
+ /// it.insert_next(2);
+ /// }
+ /// {
+ /// let vec: Vec<_> = list.into_iter().collect();
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(
+ feature = "linked_list_extras",
+ reason = "this is probably better handled by a cursor type -- we'll see",
+ issue = "27794"
+ )]
+ pub fn insert_next(&mut self, element: T) {
+ match self.head {
+ // `push_back` is okay with aliasing `element` references
+ None => self.list.push_back(element),
+ Some(head) => unsafe {
+ let prev = match head.as_ref().prev {
+ // `push_front` is okay with aliasing nodes
+ None => return self.list.push_front(element),
+ Some(prev) => prev,
+ };
+
+ let node = Some(
+ Box::leak(box Node { next: Some(head), prev: Some(prev), element }).into(),
+ );
+
+ // Not creating references to entire nodes to not invalidate the
+ // reference to `element` we handed to the user.
+ (*prev.as_ptr()).next = node;
+ (*head.as_ptr()).prev = node;
+
+ self.list.len += 1;
+ },
+ }
+ }
+
+ /// Provides a reference to the next element, without changing the iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(linked_list_extras)]
+ ///
+ /// use std::collections::LinkedList;
+ ///
+ /// let mut list: LinkedList<_> = vec![1, 2, 3].into_iter().collect();
+ ///
+ /// let mut it = list.iter_mut();
+ /// assert_eq!(it.next().unwrap(), &1);
+ /// assert_eq!(it.peek_next().unwrap(), &2);
+ /// // We just peeked at 2, so it was not consumed from the iterator.
+ /// assert_eq!(it.next().unwrap(), &2);
+ /// ```
+ #[inline]
+ #[unstable(
+ feature = "linked_list_extras",
+ reason = "this is probably better handled by a cursor type -- we'll see",
+ issue = "27794"
+ )]
+ pub fn peek_next(&mut self) -> Option<&mut T> {
+ if self.len == 0 {
+ None
+ } else {
+ unsafe { self.head.as_mut().map(|node| &mut node.as_mut().element) }
+ }
+ }
+}
+
+/// A cursor over a `LinkedList`.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth.
+///
+/// Cursors always rest between two elements in the list, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
+/// tail of the list.
+///
+/// When created, cursors start at the front of the list, or the "ghost" non-element if the list is empty.
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+pub struct Cursor<'a, T: 'a> {
+ index: usize,
+ current: Option<NonNull<Node<T>>>,
+ list: &'a LinkedList<T>,
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T> Clone for Cursor<'_, T> {
+ fn clone(&self) -> Self {
+ let Cursor { index, current, list } = *self;
+ Cursor { index, current, list }
+ }
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T: fmt::Debug> fmt::Debug for Cursor<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Cursor").field(&self.list).field(&self.index()).finish()
+ }
+}
+
+/// A cursor over a `LinkedList` with editing operations.
+///
+/// A `Cursor` is like an iterator, except that it can freely seek back-and-forth, and can
+/// safely mutate the list during iteration. This is because the lifetime of its yielded
+/// references is tied to its own lifetime, instead of just the underlying list. This means
+/// cursors cannot yield multiple elements at once.
+///
+/// Cursors always rest between two elements in the list, and index in a logically circular way.
+/// To accommodate this, there is a "ghost" non-element that yields `None` between the head and
+/// tail of the list.
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+pub struct CursorMut<'a, T: 'a> {
+ index: usize,
+ current: Option<NonNull<Node<T>>>,
+ list: &'a mut LinkedList<T>,
+}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+impl<T: fmt::Debug> fmt::Debug for CursorMut<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("CursorMut").field(&self.list).field(&self.index()).finish()
+ }
+}
+
+impl<'a, T> Cursor<'a, T> {
+ /// Returns the cursor position index within the `LinkedList`.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn index(&self) -> Option<usize> {
+ let _ = self.current?;
+ Some(self.index)
+ }
+
+ /// Moves the cursor to the next element of the `LinkedList`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the first element of the `LinkedList`. If it is pointing to the last
+ /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn move_next(&mut self) {
+ match self.current.take() {
+ // We had no current element; the cursor was sitting at the start position
+ // Next element should be the head of the list
+ None => {
+ self.current = self.list.head;
+ self.index = 0;
+ }
+ // We had a previous element, so let's go to its next
+ Some(current) => unsafe {
+ self.current = current.as_ref().next;
+ self.index += 1;
+ },
+ }
+ }
+
+ /// Moves the cursor to the previous element of the `LinkedList`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the last element of the `LinkedList`. If it is pointing to the first
+ /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn move_prev(&mut self) {
+ match self.current.take() {
+ // No current. We're at the start of the list. Yield None and jump to the end.
+ None => {
+ self.current = self.list.tail;
+ self.index = self.list.len().checked_sub(1).unwrap_or(0);
+ }
+ // Have a prev. Yield it and go to the previous element.
+ Some(current) => unsafe {
+ self.current = current.as_ref().prev;
+ self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
+ },
+ }
+ }
+
+ /// Returns a reference to the element that the cursor is currently
+ /// pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn current(&self) -> Option<&'a T> {
+ unsafe { self.current.map(|current| &(*current.as_ptr()).element) }
+ }
+
+ /// Returns a reference to the next element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the first element of the `LinkedList`. If it is pointing to the last
+ /// element of the `LinkedList` then this returns `None`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn peek_next(&self) -> Option<&'a T> {
+ unsafe {
+ let next = match self.current {
+ None => self.list.head,
+ Some(current) => current.as_ref().next,
+ };
+ next.map(|next| &(*next.as_ptr()).element)
+ }
+ }
+
+ /// Returns a reference to the previous element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the last element of the `LinkedList`. If it is pointing to the first
+ /// element of the `LinkedList` then this returns `None`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn peek_prev(&self) -> Option<&'a T> {
+ unsafe {
+ let prev = match self.current {
+ None => self.list.tail,
+ Some(current) => current.as_ref().prev,
+ };
+ prev.map(|prev| &(*prev.as_ptr()).element)
+ }
+ }
+}
+
+impl<'a, T> CursorMut<'a, T> {
+ /// Returns the cursor position index within the `LinkedList`.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn index(&self) -> Option<usize> {
+ let _ = self.current?;
+ Some(self.index)
+ }
+
+ /// Moves the cursor to the next element of the `LinkedList`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the first element of the `LinkedList`. If it is pointing to the last
+ /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn move_next(&mut self) {
+ match self.current.take() {
+ // We had no current element; the cursor was sitting at the start position
+ // Next element should be the head of the list
+ None => {
+ self.current = self.list.head;
+ self.index = 0;
+ }
+ // We had a previous element, so let's go to its next
+ Some(current) => unsafe {
+ self.current = current.as_ref().next;
+ self.index += 1;
+ },
+ }
+ }
+
+ /// Moves the cursor to the previous element of the `LinkedList`.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this will move it to
+ /// the last element of the `LinkedList`. If it is pointing to the first
+ /// element of the `LinkedList` then this will move it to the "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn move_prev(&mut self) {
+ match self.current.take() {
+ // No current. We're at the start of the list. Yield None and jump to the end.
+ None => {
+ self.current = self.list.tail;
+ self.index = self.list.len().checked_sub(1).unwrap_or(0);
+ }
+ // Have a prev. Yield it and go to the previous element.
+ Some(current) => unsafe {
+ self.current = current.as_ref().prev;
+ self.index = self.index.checked_sub(1).unwrap_or_else(|| self.list.len());
+ },
+ }
+ }
+
+ /// Returns a reference to the element that the cursor is currently
+ /// pointing to.
+ ///
+ /// This returns `None` if the cursor is currently pointing to the
+ /// "ghost" non-element.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn current(&mut self) -> Option<&mut T> {
+ unsafe { self.current.map(|current| &mut (*current.as_ptr()).element) }
+ }
+
+ /// Returns a reference to the next element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the first element of the `LinkedList`. If it is pointing to the last
+ /// element of the `LinkedList` then this returns `None`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn peek_next(&mut self) -> Option<&mut T> {
+ unsafe {
+ let next = match self.current {
+ None => self.list.head,
+ Some(current) => current.as_ref().next,
+ };
+ next.map(|next| &mut (*next.as_ptr()).element)
+ }
+ }
+
+ /// Returns a reference to the previous element.
+ ///
+ /// If the cursor is pointing to the "ghost" non-element then this returns
+ /// the last element of the `LinkedList`. If it is pointing to the first
+ /// element of the `LinkedList` then this returns `None`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn peek_prev(&mut self) -> Option<&mut T> {
+ unsafe {
+ let prev = match self.current {
+ None => self.list.tail,
+ Some(current) => current.as_ref().prev,
+ };
+ prev.map(|prev| &mut (*prev.as_ptr()).element)
+ }
+ }
+
+ /// Returns a read-only cursor pointing to the current element.
+ ///
+ /// The lifetime of the returned `Cursor` is bound to that of the
+ /// `CursorMut`, which means it cannot outlive the `CursorMut` and that the
+ /// `CursorMut` is frozen for the lifetime of the `Cursor`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn as_cursor(&self) -> Cursor<'_, T> {
+ Cursor { list: self.list, current: self.current, index: self.index }
+ }
+}
+
+// Now the list editing operations
+
+impl<'a, T> CursorMut<'a, T> {
+ /// Inserts a new element into the `LinkedList` after the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the front of the `LinkedList`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn insert_after(&mut self, item: T) {
+ unsafe {
+ let spliced_node = Box::leak(Box::new(Node::new(item))).into();
+ let node_next = match self.current {
+ None => self.list.head,
+ Some(node) => node.as_ref().next,
+ };
+ self.list.splice_nodes(self.current, node_next, spliced_node, spliced_node, 1);
+ if self.current.is_none() {
+ // The "ghost" non-element's index has changed.
+ self.index = self.list.len;
+ }
+ }
+ }
+
+ /// Inserts a new element into the `LinkedList` before the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new element is
+ /// inserted at the end of the `LinkedList`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn insert_before(&mut self, item: T) {
+ unsafe {
+ let spliced_node = Box::leak(Box::new(Node::new(item))).into();
+ let node_prev = match self.current {
+ None => self.list.tail,
+ Some(node) => node.as_ref().prev,
+ };
+ self.list.splice_nodes(node_prev, self.current, spliced_node, spliced_node, 1);
+ self.index += 1;
+ }
+ }
+
+ /// Removes the current element from the `LinkedList`.
+ ///
+ /// The element that was removed is returned, and the cursor is
+ /// moved to point to the next element in the `LinkedList`.
+ ///
+ /// If the cursor is currently pointing to the "ghost" non-element then no element
+ /// is removed and `None` is returned.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn remove_current(&mut self) -> Option<T> {
+ let unlinked_node = self.current?;
+ unsafe {
+ self.current = unlinked_node.as_ref().next;
+ self.list.unlink_node(unlinked_node);
+ let unlinked_node = Box::from_raw(unlinked_node.as_ptr());
+ Some(unlinked_node.element)
+ }
+ }
+
+ /// Removes the current element from the `LinkedList` without deallocating the list node.
+ ///
+ /// The node that was removed is returned as a new `LinkedList` containing only this node.
+ /// The cursor is moved to point to the next element in the current `LinkedList`.
+ ///
+ /// If the cursor is currently pointing to the "ghost" non-element then no element
+ /// is removed and `None` is returned.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn remove_current_as_list(&mut self) -> Option<LinkedList<T>> {
+ let mut unlinked_node = self.current?;
+ unsafe {
+ self.current = unlinked_node.as_ref().next;
+ self.list.unlink_node(unlinked_node);
+
+ unlinked_node.as_mut().prev = None;
+ unlinked_node.as_mut().next = None;
+ Some(LinkedList {
+ head: Some(unlinked_node),
+ tail: Some(unlinked_node),
+ len: 1,
+ marker: PhantomData,
+ })
+ }
+ }
+
+ /// Inserts the elements from the given `LinkedList` after the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new elements are
+ /// inserted at the start of the `LinkedList`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn splice_after(&mut self, list: LinkedList<T>) {
+ unsafe {
+ let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
+ Some(parts) => parts,
+ _ => return,
+ };
+ let node_next = match self.current {
+ None => self.list.head,
+ Some(node) => node.as_ref().next,
+ };
+ self.list.splice_nodes(self.current, node_next, splice_head, splice_tail, splice_len);
+ if self.current.is_none() {
+ // The "ghost" non-element's index has changed.
+ self.index = self.list.len;
+ }
+ }
+ }
+
+ /// Inserts the elements from the given `LinkedList` before the current one.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the new elements are
+ /// inserted at the end of the `LinkedList`.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn splice_before(&mut self, list: LinkedList<T>) {
+ unsafe {
+ let (splice_head, splice_tail, splice_len) = match list.detach_all_nodes() {
+ Some(parts) => parts,
+ _ => return,
+ };
+ let node_prev = match self.current {
+ None => self.list.tail,
+ Some(node) => node.as_ref().prev,
+ };
+ self.list.splice_nodes(node_prev, self.current, splice_head, splice_tail, splice_len);
+ self.index += splice_len;
+ }
+ }
+
+ /// Splits the list into two after the current element. This will return a
+ /// new list consisting of everything after the cursor, with the original
+ /// list retaining everything before.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the entire contents
+ /// of the `LinkedList` are moved.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn split_after(&mut self) -> LinkedList<T> {
+ let split_off_idx = if self.index == self.list.len { 0 } else { self.index + 1 };
+ if self.index == self.list.len {
+ // The "ghost" non-element's index has changed to 0.
+ self.index = 0;
+ }
+ unsafe { self.list.split_off_after_node(self.current, split_off_idx) }
+ }
+
+ /// Splits the list into two before the current element. This will return a
+ /// new list consisting of everything before the cursor, with the original
+ /// list retaining everything after.
+ ///
+ /// If the cursor is pointing at the "ghost" non-element then the entire contents
+ /// of the `LinkedList` are moved.
+ #[unstable(feature = "linked_list_cursors", issue = "58533")]
+ pub fn split_before(&mut self) -> LinkedList<T> {
+ let split_off_idx = self.index;
+ self.index = 0;
+ unsafe { self.list.split_off_before_node(self.current, split_off_idx) }
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on LinkedList.
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+pub struct DrainFilter<'a, T: 'a, F: 'a>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ list: &'a mut LinkedList<T>,
+ it: Option<NonNull<Node<T>>>,
+ pred: F,
+ idx: usize,
+ old_len: usize,
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Iterator for DrainFilter<'_, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ while let Some(mut node) = self.it {
+ unsafe {
+ self.it = node.as_ref().next;
+ self.idx += 1;
+
+ if (self.pred)(&mut node.as_mut().element) {
+ // `unlink_node` is okay with aliasing `element` references.
+ self.list.unlink_node(node);
+ return Some(Box::from_raw(node.as_ptr()).element);
+ }
+ }
+ }
+
+ None
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.old_len - self.idx))
+ }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ fn drop(&mut self) {
+ struct DropGuard<'r, 'a, T, F>(&'r mut DrainFilter<'a, T, F>)
+ where
+ F: FnMut(&mut T) -> bool;
+
+ impl<'r, 'a, T, F> Drop for DropGuard<'r, 'a, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ fn drop(&mut self) {
+ self.0.for_each(drop);
+ }
+ }
+
+ while let Some(item) = self.next() {
+ let guard = DropGuard(self);
+ drop(item);
+ mem::forget(guard);
+ }
+ }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T: fmt::Debug, F> fmt::Debug for DrainFilter<'_, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("DrainFilter").field(&self.list).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.list.pop_front()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.list.len, Some(self.list.len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.list.pop_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> FromIterator<T> for LinkedList<T> {
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
+ let mut list = Self::new();
+ list.extend(iter);
+ list
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for LinkedList<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Consumes the list into an iterator yielding elements by value.
+ #[inline]
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { list: self }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a LinkedList<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut LinkedList<T> {
+ type Item = &'a mut T;
+ type IntoIter = IterMut<'a, T>;
+
+ fn into_iter(self) -> IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Extend<T> for LinkedList<T> {
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<I>>::spec_extend(self, iter);
+ }
+
+ #[inline]
+ fn extend_one(&mut self, elem: T) {
+ self.push_back(elem);
+ }
+}
+
+impl<I: IntoIterator> SpecExtend<I> for LinkedList<I::Item> {
+ default fn spec_extend(&mut self, iter: I) {
+ iter.into_iter().for_each(move |elt| self.push_back(elt));
+ }
+}
+
+impl<T> SpecExtend<LinkedList<T>> for LinkedList<T> {
+ fn spec_extend(&mut self, ref mut other: LinkedList<T>) {
+ self.append(other);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &elem: &'a T) {
+ self.push_back(elem);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialEq> PartialEq for LinkedList<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.len() == other.len() && self.iter().eq(other)
+ }
+
+ fn ne(&self, other: &Self) -> bool {
+ self.len() != other.len() || self.iter().ne(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq> Eq for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd> PartialOrd for LinkedList<T> {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.iter().partial_cmp(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Ord for LinkedList<T> {
+ #[inline]
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.iter().cmp(other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for LinkedList<T> {
+ fn clone(&self) -> Self {
+ self.iter().cloned().collect()
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ let mut iter_other = other.iter();
+ if self.len() > other.len() {
+ self.split_off(other.len());
+ }
+ for (elem, elem_other) in self.iter_mut().zip(&mut iter_other) {
+ elem.clone_from(elem_other);
+ }
+ if !iter_other.is_empty() {
+ self.extend(iter_other.cloned());
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for LinkedList<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash> Hash for LinkedList<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+ for elt in self {
+ elt.hash(state);
+ }
+ }
+}
+
+// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
+ x
+ }
+ fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
+ x
+ }
+ fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
+ x
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for LinkedList<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Send for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for Iter<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for IterMut<'_, T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for IterMut<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Send for Cursor<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Sync for Cursor<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Send> Send for CursorMut<'_, T> {}
+
+#[unstable(feature = "linked_list_cursors", issue = "58533")]
+unsafe impl<T: Sync> Sync for CursorMut<'_, T> {}
diff --git a/library/alloc/src/collections/linked_list/tests.rs b/library/alloc/src/collections/linked_list/tests.rs
new file mode 100644
index 00000000000..b8c93a28bba
--- /dev/null
+++ b/library/alloc/src/collections/linked_list/tests.rs
@@ -0,0 +1,457 @@
+use super::*;
+
+use std::thread;
+use std::vec::Vec;
+
+use rand::{thread_rng, RngCore};
+
+fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
+ v.iter().cloned().collect()
+}
+
+pub fn check_links<T>(list: &LinkedList<T>) {
+ unsafe {
+ let mut len = 0;
+ let mut last_ptr: Option<&Node<T>> = None;
+ let mut node_ptr: &Node<T>;
+ match list.head {
+ None => {
+ // tail node should also be None.
+ assert!(list.tail.is_none());
+ assert_eq!(0, list.len);
+ return;
+ }
+ Some(node) => node_ptr = &*node.as_ptr(),
+ }
+ loop {
+ match (last_ptr, node_ptr.prev) {
+ (None, None) => {}
+ (None, _) => panic!("prev link for head"),
+ (Some(p), Some(pptr)) => {
+ assert_eq!(p as *const Node<T>, pptr.as_ptr() as *const Node<T>);
+ }
+ _ => panic!("prev link is none, not good"),
+ }
+ match node_ptr.next {
+ Some(next) => {
+ last_ptr = Some(node_ptr);
+ node_ptr = &*next.as_ptr();
+ len += 1;
+ }
+ None => {
+ len += 1;
+ break;
+ }
+ }
+ }
+
+ // verify that the tail node points to the last node.
+ let tail = list.tail.as_ref().expect("some tail node").as_ref();
+ assert_eq!(tail as *const Node<T>, node_ptr as *const Node<T>);
+ // check that len matches interior links.
+ assert_eq!(len, list.len);
+ }
+}
+
+#[test]
+fn test_append() {
+ // Empty to empty
+ {
+ let mut m = LinkedList::<i32>::new();
+ let mut n = LinkedList::new();
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 0);
+ assert_eq!(n.len(), 0);
+ }
+ // Non-empty to empty
+ {
+ let mut m = LinkedList::new();
+ let mut n = LinkedList::new();
+ n.push_back(2);
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.pop_back(), Some(2));
+ assert_eq!(n.len(), 0);
+ check_links(&m);
+ }
+ // Empty to non-empty
+ {
+ let mut m = LinkedList::new();
+ let mut n = LinkedList::new();
+ m.push_back(2);
+ m.append(&mut n);
+ check_links(&m);
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.pop_back(), Some(2));
+ check_links(&m);
+ }
+
+ // Non-empty to non-empty
+ let v = vec![1, 2, 3, 4, 5];
+ let u = vec![9, 8, 1, 2, 3, 4, 5];
+ let mut m = list_from(&v);
+ let mut n = list_from(&u);
+ m.append(&mut n);
+ check_links(&m);
+ let mut sum = v;
+ sum.extend_from_slice(&u);
+ assert_eq!(sum.len(), m.len());
+ for elt in sum {
+ assert_eq!(m.pop_front(), Some(elt))
+ }
+ assert_eq!(n.len(), 0);
+ // Let's make sure it's working properly, since we
+ // did some direct changes to private members.
+ n.push_back(3);
+ assert_eq!(n.len(), 1);
+ assert_eq!(n.pop_front(), Some(3));
+ check_links(&n);
+}
+
+#[test]
+fn test_clone_from() {
+ // Short cloned from long
+ {
+ let v = vec![1, 2, 3, 4, 5];
+ let u = vec![8, 7, 6, 2, 3, 4, 5];
+ let mut m = list_from(&v);
+ let n = list_from(&u);
+ m.clone_from(&n);
+ check_links(&m);
+ assert_eq!(m, n);
+ for elt in u {
+ assert_eq!(m.pop_front(), Some(elt))
+ }
+ }
+ // Long cloned from short
+ {
+ let v = vec![1, 2, 3, 4, 5];
+ let u = vec![6, 7, 8];
+ let mut m = list_from(&v);
+ let n = list_from(&u);
+ m.clone_from(&n);
+ check_links(&m);
+ assert_eq!(m, n);
+ for elt in u {
+ assert_eq!(m.pop_front(), Some(elt))
+ }
+ }
+ // Two equal length lists
+ {
+ let v = vec![1, 2, 3, 4, 5];
+ let u = vec![9, 8, 1, 2, 3];
+ let mut m = list_from(&v);
+ let n = list_from(&u);
+ m.clone_from(&n);
+ check_links(&m);
+ assert_eq!(m, n);
+ for elt in u {
+ assert_eq!(m.pop_front(), Some(elt))
+ }
+ }
+}
+
+#[test]
+fn test_insert_prev() {
+ let mut m = list_from(&[0, 2, 4, 6, 8]);
+ let len = m.len();
+ {
+ let mut it = m.iter_mut();
+ it.insert_next(-2);
+ loop {
+ match it.next() {
+ None => break,
+ Some(elt) => {
+ it.insert_next(*elt + 1);
+ match it.peek_next() {
+ Some(x) => assert_eq!(*x, *elt + 2),
+ None => assert_eq!(8, *elt),
+ }
+ }
+ }
+ }
+ it.insert_next(0);
+ it.insert_next(1);
+ }
+ check_links(&m);
+ assert_eq!(m.len(), 3 + len * 2);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), [-2, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1]);
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn test_send() {
+ let n = list_from(&[1, 2, 3]);
+ thread::spawn(move || {
+ check_links(&n);
+ let a: &[_] = &[&1, &2, &3];
+ assert_eq!(a, &*n.iter().collect::<Vec<_>>());
+ })
+ .join()
+ .ok()
+ .unwrap();
+}
+
+#[test]
+fn test_fuzz() {
+ for _ in 0..25 {
+ fuzz_test(3);
+ fuzz_test(16);
+ #[cfg(not(miri))] // Miri is too slow
+ fuzz_test(189);
+ }
+}
+
+#[test]
+fn test_26021() {
+ // There was a bug in split_off that failed to null out the RHS's head's prev ptr.
+ // This caused the RHS's dtor to walk up into the LHS at drop and delete all of
+ // its nodes.
+ //
+ // https://github.com/rust-lang/rust/issues/26021
+ let mut v1 = LinkedList::new();
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ let _ = v1.split_off(3); // Dropping this now should not cause laundry consumption
+ assert_eq!(v1.len(), 3);
+
+ assert_eq!(v1.iter().len(), 3);
+ assert_eq!(v1.iter().collect::<Vec<_>>().len(), 3);
+}
+
+#[test]
+fn test_split_off() {
+ let mut v1 = LinkedList::new();
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+ v1.push_front(1);
+
+ // test all splits
+ for ix in 0..1 + v1.len() {
+ let mut a = v1.clone();
+ let b = a.split_off(ix);
+ check_links(&a);
+ check_links(&b);
+ a.extend(b);
+ assert_eq!(v1, a);
+ }
+}
+
+fn fuzz_test(sz: i32) {
+ let mut m: LinkedList<_> = LinkedList::new();
+ let mut v = vec![];
+ for i in 0..sz {
+ check_links(&m);
+ let r: u8 = thread_rng().next_u32() as u8;
+ match r % 6 {
+ 0 => {
+ m.pop_back();
+ v.pop();
+ }
+ 1 => {
+ if !v.is_empty() {
+ m.pop_front();
+ v.remove(0);
+ }
+ }
+ 2 | 4 => {
+ m.push_front(-i);
+ v.insert(0, -i);
+ }
+ 3 | 5 | _ => {
+ m.push_back(i);
+ v.push(i);
+ }
+ }
+ }
+
+ check_links(&m);
+
+ let mut i = 0;
+ for (a, &b) in m.into_iter().zip(&v) {
+ i += 1;
+ assert_eq!(a, b);
+ }
+ assert_eq!(i, v.len());
+}
+
+#[test]
+fn drain_filter_test() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let deleted = m.drain_filter(|v| *v < 4).collect::<Vec<_>>();
+
+ check_links(&m);
+
+ assert_eq!(deleted, &[1, 2, 3]);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), &[4, 5, 6]);
+}
+
+#[test]
+fn drain_to_empty_test() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let deleted = m.drain_filter(|_| true).collect::<Vec<_>>();
+
+ check_links(&m);
+
+ assert_eq!(deleted, &[1, 2, 3, 4, 5, 6]);
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
+}
+
+#[test]
+fn test_cursor_move_peek() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_front();
+ assert_eq!(cursor.current(), Some(&1));
+ assert_eq!(cursor.peek_next(), Some(&2));
+ assert_eq!(cursor.peek_prev(), None);
+ assert_eq!(cursor.index(), Some(0));
+ cursor.move_prev();
+ assert_eq!(cursor.current(), None);
+ assert_eq!(cursor.peek_next(), Some(&1));
+ assert_eq!(cursor.peek_prev(), Some(&6));
+ assert_eq!(cursor.index(), None);
+ cursor.move_next();
+ cursor.move_next();
+ assert_eq!(cursor.current(), Some(&2));
+ assert_eq!(cursor.peek_next(), Some(&3));
+ assert_eq!(cursor.peek_prev(), Some(&1));
+ assert_eq!(cursor.index(), Some(1));
+
+ let mut cursor = m.cursor_back();
+ assert_eq!(cursor.current(), Some(&6));
+ assert_eq!(cursor.peek_next(), None);
+ assert_eq!(cursor.peek_prev(), Some(&5));
+ assert_eq!(cursor.index(), Some(5));
+ cursor.move_next();
+ assert_eq!(cursor.current(), None);
+ assert_eq!(cursor.peek_next(), Some(&1));
+ assert_eq!(cursor.peek_prev(), Some(&6));
+ assert_eq!(cursor.index(), None);
+ cursor.move_prev();
+ cursor.move_prev();
+ assert_eq!(cursor.current(), Some(&5));
+ assert_eq!(cursor.peek_next(), Some(&6));
+ assert_eq!(cursor.peek_prev(), Some(&4));
+ assert_eq!(cursor.index(), Some(4));
+
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_front_mut();
+ assert_eq!(cursor.current(), Some(&mut 1));
+ assert_eq!(cursor.peek_next(), Some(&mut 2));
+ assert_eq!(cursor.peek_prev(), None);
+ assert_eq!(cursor.index(), Some(0));
+ cursor.move_prev();
+ assert_eq!(cursor.current(), None);
+ assert_eq!(cursor.peek_next(), Some(&mut 1));
+ assert_eq!(cursor.peek_prev(), Some(&mut 6));
+ assert_eq!(cursor.index(), None);
+ cursor.move_next();
+ cursor.move_next();
+ assert_eq!(cursor.current(), Some(&mut 2));
+ assert_eq!(cursor.peek_next(), Some(&mut 3));
+ assert_eq!(cursor.peek_prev(), Some(&mut 1));
+ assert_eq!(cursor.index(), Some(1));
+ let mut cursor2 = cursor.as_cursor();
+ assert_eq!(cursor2.current(), Some(&2));
+ assert_eq!(cursor2.index(), Some(1));
+ cursor2.move_next();
+ assert_eq!(cursor2.current(), Some(&3));
+ assert_eq!(cursor2.index(), Some(2));
+ assert_eq!(cursor.current(), Some(&mut 2));
+ assert_eq!(cursor.index(), Some(1));
+
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_back_mut();
+ assert_eq!(cursor.current(), Some(&mut 6));
+ assert_eq!(cursor.peek_next(), None);
+ assert_eq!(cursor.peek_prev(), Some(&mut 5));
+ assert_eq!(cursor.index(), Some(5));
+ cursor.move_next();
+ assert_eq!(cursor.current(), None);
+ assert_eq!(cursor.peek_next(), Some(&mut 1));
+ assert_eq!(cursor.peek_prev(), Some(&mut 6));
+ assert_eq!(cursor.index(), None);
+ cursor.move_prev();
+ cursor.move_prev();
+ assert_eq!(cursor.current(), Some(&mut 5));
+ assert_eq!(cursor.peek_next(), Some(&mut 6));
+ assert_eq!(cursor.peek_prev(), Some(&mut 4));
+ assert_eq!(cursor.index(), Some(4));
+ let mut cursor2 = cursor.as_cursor();
+ assert_eq!(cursor2.current(), Some(&5));
+ assert_eq!(cursor2.index(), Some(4));
+ cursor2.move_prev();
+ assert_eq!(cursor2.current(), Some(&4));
+ assert_eq!(cursor2.index(), Some(3));
+ assert_eq!(cursor.current(), Some(&mut 5));
+ assert_eq!(cursor.index(), Some(4));
+}
+
+#[test]
+fn test_cursor_mut_insert() {
+ let mut m: LinkedList<u32> = LinkedList::new();
+ m.extend(&[1, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_front_mut();
+ cursor.insert_before(7);
+ cursor.insert_after(8);
+ check_links(&m);
+ assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[7, 1, 8, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_front_mut();
+ cursor.move_prev();
+ cursor.insert_before(9);
+ cursor.insert_after(10);
+ check_links(&m);
+ assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[10, 7, 1, 8, 2, 3, 4, 5, 6, 9]);
+ let mut cursor = m.cursor_front_mut();
+ cursor.move_prev();
+ assert_eq!(cursor.remove_current(), None);
+ cursor.move_next();
+ cursor.move_next();
+ assert_eq!(cursor.remove_current(), Some(7));
+ cursor.move_prev();
+ cursor.move_prev();
+ cursor.move_prev();
+ assert_eq!(cursor.remove_current(), Some(9));
+ cursor.move_next();
+ assert_eq!(cursor.remove_current(), Some(10));
+ check_links(&m);
+ assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[1, 8, 2, 3, 4, 5, 6]);
+ let mut cursor = m.cursor_front_mut();
+ let mut p: LinkedList<u32> = LinkedList::new();
+ p.extend(&[100, 101, 102, 103]);
+ let mut q: LinkedList<u32> = LinkedList::new();
+ q.extend(&[200, 201, 202, 203]);
+ cursor.splice_after(p);
+ cursor.splice_before(q);
+ check_links(&m);
+ assert_eq!(
+ m.iter().cloned().collect::<Vec<_>>(),
+ &[200, 201, 202, 203, 1, 100, 101, 102, 103, 8, 2, 3, 4, 5, 6]
+ );
+ let mut cursor = m.cursor_front_mut();
+ cursor.move_prev();
+ let tmp = cursor.split_before();
+ assert_eq!(m.into_iter().collect::<Vec<_>>(), &[]);
+ m = tmp;
+ let mut cursor = m.cursor_front_mut();
+ cursor.move_next();
+ cursor.move_next();
+ cursor.move_next();
+ cursor.move_next();
+ cursor.move_next();
+ cursor.move_next();
+ let tmp = cursor.split_after();
+ assert_eq!(tmp.into_iter().collect::<Vec<_>>(), &[102, 103, 8, 2, 3, 4, 5, 6]);
+ check_links(&m);
+ assert_eq!(m.iter().cloned().collect::<Vec<_>>(), &[200, 201, 202, 203, 1, 100, 101]);
+}
diff --git a/library/alloc/src/collections/mod.rs b/library/alloc/src/collections/mod.rs
new file mode 100644
index 00000000000..6b21e54f66a
--- /dev/null
+++ b/library/alloc/src/collections/mod.rs
@@ -0,0 +1,103 @@
+//! Collection types.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+pub mod binary_heap;
+mod btree;
+pub mod linked_list;
+pub mod vec_deque;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_map {
+ //! A map based on a B-Tree.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use super::btree::map::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub mod btree_set {
+ //! A set based on a B-Tree.
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub use super::btree::set::*;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use binary_heap::BinaryHeap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use btree_map::BTreeMap;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use btree_set::BTreeSet;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use linked_list::LinkedList;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(no_inline)]
+pub use vec_deque::VecDeque;
+
+use crate::alloc::{Layout, LayoutErr};
+use core::fmt::Display;
+
+/// The error type for `try_reserve` methods.
+#[derive(Clone, PartialEq, Eq, Debug)]
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+pub enum TryReserveError {
+ /// Error due to the computed capacity exceeding the collection's maximum
+ /// (usually `isize::MAX` bytes).
+ CapacityOverflow,
+
+ /// The memory allocator returned an error
+ AllocError {
+ /// The layout of allocation request that failed
+ layout: Layout,
+
+ #[doc(hidden)]
+ #[unstable(
+ feature = "container_error_extra",
+ issue = "none",
+ reason = "\
+ Enable exposing the allocator’s custom error value \
+ if an associated type is added in the future: \
+ https://github.com/rust-lang/wg-allocators/issues/23"
+ )]
+ non_exhaustive: (),
+ },
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+impl From<LayoutErr> for TryReserveError {
+ #[inline]
+ fn from(_: LayoutErr) -> Self {
+ TryReserveError::CapacityOverflow
+ }
+}
+
+#[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+impl Display for TryReserveError {
+ fn fmt(
+ &self,
+ fmt: &mut core::fmt::Formatter<'_>,
+ ) -> core::result::Result<(), core::fmt::Error> {
+ fmt.write_str("memory allocation failed")?;
+ let reason = match &self {
+ TryReserveError::CapacityOverflow => {
+ " because the computed capacity exceeded the collection's maximum"
+ }
+ TryReserveError::AllocError { .. } => " because the memory allocator returned a error",
+ };
+ fmt.write_str(reason)
+ }
+}
+
+/// An intermediate trait for specialization of `Extend`.
+#[doc(hidden)]
+trait SpecExtend<I: IntoIterator> {
+ /// Extends `self` with the contents of the given iterator.
+ fn spec_extend(&mut self, iter: I);
+}
diff --git a/library/alloc/src/collections/vec_deque.rs b/library/alloc/src/collections/vec_deque.rs
new file mode 100644
index 00000000000..d3c6d493d6d
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque.rs
@@ -0,0 +1,3117 @@
+//! A double-ended queue implemented with a growable ring buffer.
+//!
+//! This queue has *O*(1) amortized inserts and removals from both ends of the
+//! container. It also has *O*(1) indexing like a vector. The contained elements
+//! are not required to be copyable, and the queue will be sendable if the
+//! contained type is sendable.
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+// ignore-tidy-filelength
+
+use core::cmp::{self, Ordering};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::iter::{once, repeat_with, FromIterator, FusedIterator};
+use core::mem::{self, replace, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{Index, IndexMut, RangeBounds, Try};
+use core::ptr::{self, NonNull};
+use core::slice;
+
+use crate::collections::TryReserveError;
+use crate::raw_vec::RawVec;
+use crate::vec::Vec;
+
+#[stable(feature = "drain", since = "1.6.0")]
+pub use self::drain::Drain;
+
+mod drain;
+
+#[cfg(test)]
+mod tests;
+
+const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
+const MINIMUM_CAPACITY: usize = 1; // 2 - 1
+#[cfg(target_pointer_width = "16")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (16 - 1); // Largest possible power of two
+#[cfg(target_pointer_width = "32")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (32 - 1); // Largest possible power of two
+#[cfg(target_pointer_width = "64")]
+const MAXIMUM_ZST_CAPACITY: usize = 1 << (64 - 1); // Largest possible power of two
+
+/// A double-ended queue implemented with a growable ring buffer.
+///
+/// The "default" usage of this type as a queue is to use [`push_back`] to add to
+/// the queue, and [`pop_front`] to remove from the queue. [`extend`] and [`append`]
+/// push onto the back in this manner, and iterating over `VecDeque` goes front
+/// to back.
+///
+/// [`push_back`]: #method.push_back
+/// [`pop_front`]: #method.pop_front
+/// [`extend`]: #method.extend
+/// [`append`]: #method.append
+#[cfg_attr(not(test), rustc_diagnostic_item = "vecdeque_type")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct VecDeque<T> {
+ // tail and head are pointers into the buffer. Tail always points
+ // to the first element that could be read, Head always points
+ // to where data should be written.
+ // If tail == head the buffer is empty. The length of the ringbuffer
+ // is defined as the distance between the two.
+ tail: usize,
+ head: usize,
+ buf: RawVec<T>,
+}
+
+/// PairSlices pairs up equal length slice parts of two deques
+///
+/// For example, given deques "A" and "B" with the following division into slices:
+///
+/// A: [0 1 2] [3 4 5]
+/// B: [a b] [c d e]
+///
+/// It produces the following sequence of matching slices:
+///
+/// ([0 1], [a b])
+/// (\[2\], \[c\])
+/// ([3 4], [d e])
+///
+/// and the uneven remainder of either A or B is skipped.
+struct PairSlices<'a, 'b, T> {
+ a0: &'a mut [T],
+ a1: &'a mut [T],
+ b0: &'b [T],
+ b1: &'b [T],
+}
+
+impl<'a, 'b, T> PairSlices<'a, 'b, T> {
+ fn from(to: &'a mut VecDeque<T>, from: &'b VecDeque<T>) -> Self {
+ let (a0, a1) = to.as_mut_slices();
+ let (b0, b1) = from.as_slices();
+ PairSlices { a0, a1, b0, b1 }
+ }
+
+ fn has_remainder(&self) -> bool {
+ !self.b0.is_empty()
+ }
+
+ fn remainder(self) -> impl Iterator<Item = &'b [T]> {
+ once(self.b0).chain(once(self.b1))
+ }
+}
+
+impl<'a, 'b, T> Iterator for PairSlices<'a, 'b, T> {
+ type Item = (&'a mut [T], &'b [T]);
+ fn next(&mut self) -> Option<Self::Item> {
+ // Get next part length
+ let part = cmp::min(self.a0.len(), self.b0.len());
+ if part == 0 {
+ return None;
+ }
+ let (p0, p1) = replace(&mut self.a0, &mut []).split_at_mut(part);
+ let (q0, q1) = self.b0.split_at(part);
+
+ // Move a1 into a0, if it's empty (and b1, b0 the same way).
+ self.a0 = p1;
+ self.b0 = q1;
+ if self.a0.is_empty() {
+ self.a0 = replace(&mut self.a1, &mut []);
+ }
+ if self.b0.is_empty() {
+ self.b0 = replace(&mut self.b1, &[]);
+ }
+ Some((p0, q0))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for VecDeque<T> {
+ fn clone(&self) -> VecDeque<T> {
+ self.iter().cloned().collect()
+ }
+
+ fn clone_from(&mut self, other: &Self) {
+ self.truncate(other.len());
+
+ let mut iter = PairSlices::from(self, other);
+ while let Some((dst, src)) = iter.next() {
+ dst.clone_from_slice(&src);
+ }
+
+ if iter.has_remainder() {
+ for remainder in iter.remainder() {
+ self.extend(remainder.iter().cloned());
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for VecDeque<T> {
+ fn drop(&mut self) {
+ /// Runs the destructor for all items in the slice when it gets dropped (normally or
+ /// during unwinding).
+ struct Dropper<'a, T>(&'a mut [T]);
+
+ impl<'a, T> Drop for Dropper<'a, T> {
+ fn drop(&mut self) {
+ unsafe {
+ ptr::drop_in_place(self.0);
+ }
+ }
+ }
+
+ let (front, back) = self.as_mut_slices();
+ unsafe {
+ let _back_dropper = Dropper(back);
+ // use drop for [T]
+ ptr::drop_in_place(front);
+ }
+ // RawVec handles deallocation
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for VecDeque<T> {
+ /// Creates an empty `VecDeque<T>`.
+ #[inline]
+ fn default() -> VecDeque<T> {
+ VecDeque::new()
+ }
+}
+
+impl<T> VecDeque<T> {
+ /// Marginally more convenient
+ #[inline]
+ fn ptr(&self) -> *mut T {
+ self.buf.ptr()
+ }
+
+ /// Marginally more convenient
+ #[inline]
+ fn cap(&self) -> usize {
+ if mem::size_of::<T>() == 0 {
+ // For zero sized types, we are always at maximum capacity
+ MAXIMUM_ZST_CAPACITY
+ } else {
+ self.buf.capacity()
+ }
+ }
+
+ /// Turn ptr into a slice
+ #[inline]
+ unsafe fn buffer_as_slice(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.ptr(), self.cap()) }
+ }
+
+ /// Turn ptr into a mut slice
+ #[inline]
+ unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.ptr(), self.cap()) }
+ }
+
+ /// Moves an element out of the buffer
+ #[inline]
+ unsafe fn buffer_read(&mut self, off: usize) -> T {
+ unsafe { ptr::read(self.ptr().add(off)) }
+ }
+
+ /// Writes an element into the buffer, moving it.
+ #[inline]
+ unsafe fn buffer_write(&mut self, off: usize, value: T) {
+ unsafe {
+ ptr::write(self.ptr().add(off), value);
+ }
+ }
+
+ /// Returns `true` if the buffer is at full capacity.
+ #[inline]
+ fn is_full(&self) -> bool {
+ self.cap() - self.len() == 1
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index.
+ #[inline]
+ fn wrap_index(&self, idx: usize) -> usize {
+ wrap_index(idx, self.cap())
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index + addend.
+ #[inline]
+ fn wrap_add(&self, idx: usize, addend: usize) -> usize {
+ wrap_index(idx.wrapping_add(addend), self.cap())
+ }
+
+ /// Returns the index in the underlying buffer for a given logical element
+ /// index - subtrahend.
+ #[inline]
+ fn wrap_sub(&self, idx: usize, subtrahend: usize) -> usize {
+ wrap_index(idx.wrapping_sub(subtrahend), self.cap())
+ }
+
+ /// Copies a contiguous block of memory len long from src to dst
+ #[inline]
+ unsafe fn copy(&self, dst: usize, src: usize, len: usize) {
+ debug_assert!(
+ dst + len <= self.cap(),
+ "cpy dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap()
+ );
+ debug_assert!(
+ src + len <= self.cap(),
+ "cpy dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap()
+ );
+ unsafe {
+ ptr::copy(self.ptr().add(src), self.ptr().add(dst), len);
+ }
+ }
+
+ /// Copies a contiguous block of memory len long from src to dst
+ #[inline]
+ unsafe fn copy_nonoverlapping(&self, dst: usize, src: usize, len: usize) {
+ debug_assert!(
+ dst + len <= self.cap(),
+ "cno dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap()
+ );
+ debug_assert!(
+ src + len <= self.cap(),
+ "cno dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap()
+ );
+ unsafe {
+ ptr::copy_nonoverlapping(self.ptr().add(src), self.ptr().add(dst), len);
+ }
+ }
+
+ /// Copies a potentially wrapping block of memory len long from src to dest.
+ /// (abs(dst - src) + len) must be no larger than cap() (There must be at
+ /// most one continuous overlapping region between src and dest).
+ unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
+ #[allow(dead_code)]
+ fn diff(a: usize, b: usize) -> usize {
+ if a <= b { b - a } else { a - b }
+ }
+ debug_assert!(
+ cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
+ "wrc dst={} src={} len={} cap={}",
+ dst,
+ src,
+ len,
+ self.cap()
+ );
+
+ if src == dst || len == 0 {
+ return;
+ }
+
+ let dst_after_src = self.wrap_sub(dst, src) < len;
+
+ let src_pre_wrap_len = self.cap() - src;
+ let dst_pre_wrap_len = self.cap() - dst;
+ let src_wraps = src_pre_wrap_len < len;
+ let dst_wraps = dst_pre_wrap_len < len;
+
+ match (dst_after_src, src_wraps, dst_wraps) {
+ (_, false, false) => {
+ // src doesn't wrap, dst doesn't wrap
+ //
+ // S . . .
+ // 1 [_ _ A A B B C C _]
+ // 2 [_ _ A A A A B B _]
+ // D . . .
+ //
+ unsafe {
+ self.copy(dst, src, len);
+ }
+ }
+ (false, false, true) => {
+ // dst before src, src doesn't wrap, dst wraps
+ //
+ // S . . .
+ // 1 [A A B B _ _ _ C C]
+ // 2 [A A B B _ _ _ A A]
+ // 3 [B B B B _ _ _ A A]
+ // . . D .
+ //
+ unsafe {
+ self.copy(dst, src, dst_pre_wrap_len);
+ self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+ }
+ }
+ (true, false, true) => {
+ // src before dst, src doesn't wrap, dst wraps
+ //
+ // S . . .
+ // 1 [C C _ _ _ A A B B]
+ // 2 [B B _ _ _ A A B B]
+ // 3 [B B _ _ _ A A A A]
+ // . . D .
+ //
+ unsafe {
+ self.copy(0, src + dst_pre_wrap_len, len - dst_pre_wrap_len);
+ self.copy(dst, src, dst_pre_wrap_len);
+ }
+ }
+ (false, true, false) => {
+ // dst before src, src wraps, dst doesn't wrap
+ //
+ // . . S .
+ // 1 [C C _ _ _ A A B B]
+ // 2 [C C _ _ _ B B B B]
+ // 3 [C C _ _ _ B B C C]
+ // D . . .
+ //
+ unsafe {
+ self.copy(dst, src, src_pre_wrap_len);
+ self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+ }
+ }
+ (true, true, false) => {
+ // src before dst, src wraps, dst doesn't wrap
+ //
+ // . . S .
+ // 1 [A A B B _ _ _ C C]
+ // 2 [A A A A _ _ _ C C]
+ // 3 [C C A A _ _ _ C C]
+ // D . . .
+ //
+ unsafe {
+ self.copy(dst + src_pre_wrap_len, 0, len - src_pre_wrap_len);
+ self.copy(dst, src, src_pre_wrap_len);
+ }
+ }
+ (false, true, true) => {
+ // dst before src, src wraps, dst wraps
+ //
+ // . . . S .
+ // 1 [A B C D _ E F G H]
+ // 2 [A B C D _ E G H H]
+ // 3 [A B C D _ E G H A]
+ // 4 [B C C D _ E G H A]
+ // . . D . .
+ //
+ debug_assert!(dst_pre_wrap_len > src_pre_wrap_len);
+ let delta = dst_pre_wrap_len - src_pre_wrap_len;
+ unsafe {
+ self.copy(dst, src, src_pre_wrap_len);
+ self.copy(dst + src_pre_wrap_len, 0, delta);
+ self.copy(0, delta, len - dst_pre_wrap_len);
+ }
+ }
+ (true, true, true) => {
+ // src before dst, src wraps, dst wraps
+ //
+ // . . S . .
+ // 1 [A B C D _ E F G H]
+ // 2 [A A B D _ E F G H]
+ // 3 [H A B D _ E F G H]
+ // 4 [H A B D _ E F F G]
+ // . . . D .
+ //
+ debug_assert!(src_pre_wrap_len > dst_pre_wrap_len);
+ let delta = src_pre_wrap_len - dst_pre_wrap_len;
+ unsafe {
+ self.copy(delta, 0, len - src_pre_wrap_len);
+ self.copy(0, self.cap() - delta, delta);
+ self.copy(dst, src, dst_pre_wrap_len);
+ }
+ }
+ }
+ }
+
+ /// Frobs the head and tail sections around to handle the fact that we
+ /// just reallocated. Unsafe because it trusts old_capacity.
+ #[inline]
+ unsafe fn handle_capacity_increase(&mut self, old_capacity: usize) {
+ let new_capacity = self.cap();
+
+ // Move the shortest contiguous section of the ring buffer
+ // T H
+ // [o o o o o o o . ]
+ // T H
+ // A [o o o o o o o . . . . . . . . . ]
+ // H T
+ // [o o . o o o o o ]
+ // T H
+ // B [. . . o o o o o o o . . . . . . ]
+ // H T
+ // [o o o o o . o o ]
+ // H T
+ // C [o o o o o . . . . . . . . . o o ]
+
+ if self.tail <= self.head {
+ // A
+ // Nop
+ } else if self.head < old_capacity - self.tail {
+ // B
+ unsafe {
+ self.copy_nonoverlapping(old_capacity, 0, self.head);
+ }
+ self.head += old_capacity;
+ debug_assert!(self.head > self.tail);
+ } else {
+ // C
+ let new_tail = new_capacity - (old_capacity - self.tail);
+ unsafe {
+ self.copy_nonoverlapping(new_tail, self.tail, old_capacity - self.tail);
+ }
+ self.tail = new_tail;
+ debug_assert!(self.head < self.tail);
+ }
+ debug_assert!(self.head < self.cap());
+ debug_assert!(self.tail < self.cap());
+ debug_assert!(self.cap().count_ones() == 1);
+ }
+}
+
+impl<T> VecDeque<T> {
+ /// Creates an empty `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let vector: VecDeque<u32> = VecDeque::new();
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new() -> VecDeque<T> {
+ VecDeque::with_capacity(INITIAL_CAPACITY)
+ }
+
+ /// Creates an empty `VecDeque` with space for at least `capacity` elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let vector: VecDeque<u32> = VecDeque::with_capacity(10);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(capacity: usize) -> VecDeque<T> {
+ // +1 since the ringbuffer always leaves one space empty
+ let cap = cmp::max(capacity + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+ assert!(cap > capacity, "capacity overflow");
+
+ VecDeque { tail: 0, head: 0, buf: RawVec::with_capacity(cap) }
+ }
+
+ /// Provides a reference to the element at the given index.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// assert_eq!(buf.get(1), Some(&4));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get(&self, index: usize) -> Option<&T> {
+ if index < self.len() {
+ let idx = self.wrap_add(self.tail, index);
+ unsafe { Some(&*self.ptr().add(idx)) }
+ } else {
+ None
+ }
+ }
+
+ /// Provides a mutable reference to the element at the given index.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// if let Some(elem) = buf.get_mut(1) {
+ /// *elem = 7;
+ /// }
+ ///
+ /// assert_eq!(buf[1], 7);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn get_mut(&mut self, index: usize) -> Option<&mut T> {
+ if index < self.len() {
+ let idx = self.wrap_add(self.tail, index);
+ unsafe { Some(&mut *self.ptr().add(idx)) }
+ } else {
+ None
+ }
+ }
+
+ /// Swaps elements at indices `i` and `j`.
+ ///
+ /// `i` and `j` may be equal.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if either index is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// buf.push_back(5);
+ /// assert_eq!(buf, [3, 4, 5]);
+ /// buf.swap(0, 2);
+ /// assert_eq!(buf, [5, 4, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn swap(&mut self, i: usize, j: usize) {
+ assert!(i < self.len());
+ assert!(j < self.len());
+ let ri = self.wrap_add(self.tail, i);
+ let rj = self.wrap_add(self.tail, j);
+ unsafe { ptr::swap(self.ptr().add(ri), self.ptr().add(rj)) }
+ }
+
+ /// Returns the number of elements the `VecDeque` can hold without
+ /// reallocating.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let buf: VecDeque<i32> = VecDeque::with_capacity(10);
+ /// assert!(buf.capacity() >= 10);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.cap() - 1
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to be inserted in the
+ /// given `VecDeque`. Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it requests. Therefore
+ /// capacity can not be relied upon to be precisely minimal. Prefer [`reserve`] if future
+ /// insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+ /// buf.reserve_exact(10);
+ /// assert!(buf.capacity() >= 11);
+ /// ```
+ ///
+ /// [`reserve`]: #method.reserve
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted in the given
+ /// `VecDeque`. The collection may reserve more space to avoid frequent reallocations.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
+ /// buf.reserve(10);
+ /// assert!(buf.capacity() >= 11);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ let old_cap = self.cap();
+ let used_cap = self.len() + 1;
+ let new_cap = used_cap
+ .checked_add(additional)
+ .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+ .expect("capacity overflow");
+
+ if new_cap > old_cap {
+ self.buf.reserve_exact(used_cap, new_cap - used_cap);
+ unsafe {
+ self.handle_capacity_increase(old_cap);
+ }
+ }
+ }
+
+ /// Tries to reserve the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `VecDeque<T>`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore, capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ /// use std::collections::VecDeque;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
+ /// let mut output = VecDeque::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve_exact(data.len())?;
+ ///
+ /// // Now we know this can't OOM(Out-Of-Memory) in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.try_reserve(additional)
+ }
+
+ /// Tries to reserve capacity for at least `additional` more elements to be inserted
+ /// in the given `VecDeque<T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows `usize`, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ /// use std::collections::VecDeque;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<VecDeque<u32>, TryReserveError> {
+ /// let mut output = VecDeque::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ let old_cap = self.cap();
+ let used_cap = self.len() + 1;
+ let new_cap = used_cap
+ .checked_add(additional)
+ .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
+ .ok_or(TryReserveError::CapacityOverflow)?;
+
+ if new_cap > old_cap {
+ self.buf.try_reserve_exact(used_cap, new_cap - used_cap)?;
+ unsafe {
+ self.handle_capacity_increase(old_cap);
+ }
+ }
+ Ok(())
+ }
+
+ /// Shrinks the capacity of the `VecDeque` as much as possible.
+ ///
+ /// It will drop down as close as possible to the length but the allocator may still inform the
+ /// `VecDeque` that there is space for a few more elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::with_capacity(15);
+ /// buf.extend(0..4);
+ /// assert_eq!(buf.capacity(), 15);
+ /// buf.shrink_to_fit();
+ /// assert!(buf.capacity() >= 4);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn shrink_to_fit(&mut self) {
+ self.shrink_to(0);
+ }
+
+ /// Shrinks the capacity of the `VecDeque` with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::with_capacity(15);
+ /// buf.extend(0..4);
+ /// assert_eq!(buf.capacity(), 15);
+ /// buf.shrink_to(6);
+ /// assert!(buf.capacity() >= 6);
+ /// buf.shrink_to(0);
+ /// assert!(buf.capacity() >= 4);
+ /// ```
+ #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ assert!(self.capacity() >= min_capacity, "Tried to shrink to a larger capacity");
+
+ // +1 since the ringbuffer always leaves one space empty
+ // len + 1 can't overflow for an existing, well-formed ringbuffer.
+ let target_cap = cmp::max(cmp::max(min_capacity, self.len()) + 1, MINIMUM_CAPACITY + 1)
+ .next_power_of_two();
+
+ if target_cap < self.cap() {
+ // There are three cases of interest:
+ // All elements are out of desired bounds
+ // Elements are contiguous, and head is out of desired bounds
+ // Elements are discontiguous, and tail is out of desired bounds
+ //
+ // At all other times, element positions are unaffected.
+ //
+ // Indicates that elements at the head should be moved.
+ let head_outside = self.head == 0 || self.head >= target_cap;
+ // Move elements from out of desired bounds (positions after target_cap)
+ if self.tail >= target_cap && head_outside {
+ // T H
+ // [. . . . . . . . o o o o o o o . ]
+ // T H
+ // [o o o o o o o . ]
+ unsafe {
+ self.copy_nonoverlapping(0, self.tail, self.len());
+ }
+ self.head = self.len();
+ self.tail = 0;
+ } else if self.tail != 0 && self.tail < target_cap && head_outside {
+ // T H
+ // [. . . o o o o o o o . . . . . . ]
+ // H T
+ // [o o . o o o o o ]
+ let len = self.wrap_sub(self.head, target_cap);
+ unsafe {
+ self.copy_nonoverlapping(0, target_cap, len);
+ }
+ self.head = len;
+ debug_assert!(self.head < self.tail);
+ } else if self.tail >= target_cap {
+ // H T
+ // [o o o o o . . . . . . . . . o o ]
+ // H T
+ // [o o o o o . o o ]
+ debug_assert!(self.wrap_sub(self.head, 1) < target_cap);
+ let len = self.cap() - self.tail;
+ let new_tail = target_cap - len;
+ unsafe {
+ self.copy_nonoverlapping(new_tail, self.tail, len);
+ }
+ self.tail = new_tail;
+ debug_assert!(self.head < self.tail);
+ }
+
+ self.buf.shrink_to_fit(target_cap);
+
+ debug_assert!(self.head < self.cap());
+ debug_assert!(self.tail < self.cap());
+ debug_assert!(self.cap().count_ones() == 1);
+ }
+ }
+
+ /// Shortens the `VecDeque`, keeping the first `len` elements and dropping
+ /// the rest.
+ ///
+ /// If `len` is greater than the `VecDeque`'s current length, this has no
+ /// effect.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(10);
+ /// buf.push_back(15);
+ /// assert_eq!(buf, [5, 10, 15]);
+ /// buf.truncate(1);
+ /// assert_eq!(buf, [5]);
+ /// ```
+ #[stable(feature = "deque_extras", since = "1.16.0")]
+ pub fn truncate(&mut self, len: usize) {
+ /// Runs the destructor for all items in the slice when it gets dropped (normally or
+ /// during unwinding).
+ struct Dropper<'a, T>(&'a mut [T]);
+
+ impl<'a, T> Drop for Dropper<'a, T> {
+ fn drop(&mut self) {
+ unsafe {
+ ptr::drop_in_place(self.0);
+ }
+ }
+ }
+
+ // Safe because:
+ //
+ // * Any slice passed to `drop_in_place` is valid; the second case has
+ // `len <= front.len()` and returning on `len > self.len()` ensures
+ // `begin <= back.len()` in the first case
+ // * The head of the VecDeque is moved before calling `drop_in_place`,
+ // so no value is dropped twice if `drop_in_place` panics
+ unsafe {
+ if len > self.len() {
+ return;
+ }
+ let num_dropped = self.len() - len;
+ let (front, back) = self.as_mut_slices();
+ if len > front.len() {
+ let begin = len - front.len();
+ let drop_back = back.get_unchecked_mut(begin..) as *mut _;
+ self.head = self.wrap_sub(self.head, num_dropped);
+ ptr::drop_in_place(drop_back);
+ } else {
+ let drop_back = back as *mut _;
+ let drop_front = front.get_unchecked_mut(len..) as *mut _;
+ self.head = self.wrap_sub(self.head, num_dropped);
+
+ // Make sure the second half is dropped even when a destructor
+ // in the first one panics.
+ let _back_dropper = Dropper(&mut *drop_back);
+ ptr::drop_in_place(drop_front);
+ }
+ }
+ }
+
+ /// Returns a front-to-back iterator.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// let b: &[_] = &[&5, &3, &4];
+ /// let c: Vec<&i32> = buf.iter().collect();
+ /// assert_eq!(&c[..], b);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter(&self) -> Iter<'_, T> {
+ Iter { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_slice() } }
+ }
+
+ /// Returns a front-to-back iterator that returns mutable references.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(3);
+ /// buf.push_back(4);
+ /// for num in buf.iter_mut() {
+ /// *num = *num - 2;
+ /// }
+ /// let b: &[_] = &[&mut 3, &mut 1, &mut 2];
+ /// assert_eq!(&buf.iter_mut().collect::<Vec<&mut i32>>()[..], b);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn iter_mut(&mut self) -> IterMut<'_, T> {
+ IterMut { tail: self.tail, head: self.head, ring: unsafe { self.buffer_as_mut_slice() } }
+ }
+
+ /// Returns a pair of slices which contain, in order, the contents of the
+ /// `VecDeque`.
+ ///
+ /// If [`make_contiguous`](#method.make_contiguous) was previously called, all elements
+ /// of the `VecDeque` will be in the first slice and the second slice will be empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ /// vector.push_back(2);
+ ///
+ /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..]));
+ ///
+ /// vector.push_front(10);
+ /// vector.push_front(9);
+ ///
+ /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..]));
+ /// ```
+ #[inline]
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn as_slices(&self) -> (&[T], &[T]) {
+ unsafe {
+ let buf = self.buffer_as_slice();
+ RingSlices::ring_slices(buf, self.head, self.tail)
+ }
+ }
+
+ /// Returns a pair of slices which contain, in order, the contents of the
+ /// `VecDeque`.
+ ///
+ /// If [`make_contiguous`](#method.make_contiguous) was previously called, all elements
+ /// of the `VecDeque` will be in the first slice and the second slice will be empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ ///
+ /// vector.push_front(10);
+ /// vector.push_front(9);
+ ///
+ /// vector.as_mut_slices().0[0] = 42;
+ /// vector.as_mut_slices().1[0] = 24;
+ /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..]));
+ /// ```
+ #[inline]
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) {
+ unsafe {
+ let head = self.head;
+ let tail = self.tail;
+ let buf = self.buffer_as_mut_slice();
+ RingSlices::ring_slices(buf, head, tail)
+ }
+ }
+
+ /// Returns the number of elements in the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// assert_eq!(v.len(), 0);
+ /// v.push_back(1);
+ /// assert_eq!(v.len(), 1);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ count(self.tail, self.head, self.cap())
+ }
+
+ /// Returns `true` if the `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// assert!(v.is_empty());
+ /// v.push_front(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.tail == self.head
+ }
+
+ fn range_start_end<R>(&self, range: R) -> (usize, usize)
+ where
+ R: RangeBounds<usize>,
+ {
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+ assert!(start <= end, "lower bound was too large");
+ assert!(end <= len, "upper bound was too large");
+ (start, end)
+ }
+
+ /// Creates an iterator that covers the specified range in the `VecDeque`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(deque_range)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+ /// let range = v.range(2..).copied().collect::<VecDeque<_>>();
+ /// assert_eq!(range, [3]);
+ ///
+ /// // A full range covers all contents
+ /// let all = v.range(..);
+ /// assert_eq!(all.len(), 3);
+ /// ```
+ #[inline]
+ #[unstable(feature = "deque_range", issue = "74217")]
+ pub fn range<R>(&self, range: R) -> Iter<'_, T>
+ where
+ R: RangeBounds<usize>,
+ {
+ let (start, end) = self.range_start_end(range);
+ let tail = self.wrap_add(self.tail, start);
+ let head = self.wrap_add(self.tail, end);
+ Iter {
+ tail,
+ head,
+ // The shared reference we have in &self is maintained in the '_ of Iter.
+ ring: unsafe { self.buffer_as_slice() },
+ }
+ }
+
+ /// Creates an iterator that covers the specified mutable range in the `VecDeque`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(deque_range)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+ /// for v in v.range_mut(2..) {
+ /// *v *= 2;
+ /// }
+ /// assert_eq!(v, vec![1, 2, 6]);
+ ///
+ /// // A full range covers all contents
+ /// for v in v.range_mut(..) {
+ /// *v *= 2;
+ /// }
+ /// assert_eq!(v, vec![2, 4, 12]);
+ /// ```
+ #[inline]
+ #[unstable(feature = "deque_range", issue = "74217")]
+ pub fn range_mut<R>(&mut self, range: R) -> IterMut<'_, T>
+ where
+ R: RangeBounds<usize>,
+ {
+ let (start, end) = self.range_start_end(range);
+ let tail = self.wrap_add(self.tail, start);
+ let head = self.wrap_add(self.tail, end);
+ IterMut {
+ tail,
+ head,
+ // The shared reference we have in &mut self is maintained in the '_ of IterMut.
+ ring: unsafe { self.buffer_as_mut_slice() },
+ }
+ }
+
+ /// Creates a draining iterator that removes the specified range in the
+ /// `VecDeque` and yields the removed items.
+ ///
+ /// Note 1: The element range is removed even if the iterator is not
+ /// consumed until the end.
+ ///
+ /// Note 2: It is unspecified how many elements are removed from the deque,
+ /// if the `Drain` value is not dropped, but the borrow it holds expires
+ /// (e.g., due to `mem::forget`).
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+ /// let drained = v.drain(2..).collect::<VecDeque<_>>();
+ /// assert_eq!(drained, [3]);
+ /// assert_eq!(v, [1, 2]);
+ ///
+ /// // A full range clears all contents
+ /// v.drain(..);
+ /// assert!(v.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // When the Drain is first created, the source deque is shortened to
+ // make sure no uninitialized or moved-from elements are accessible at
+ // all if the Drain's destructor never gets to run.
+ //
+ // Drain will ptr::read out the values to remove.
+ // When finished, the remaining data will be copied back to cover the hole,
+ // and the head/tail values will be restored correctly.
+ //
+ let (start, end) = self.range_start_end(range);
+
+ // The deque's elements are parted into three segments:
+ // * self.tail -> drain_tail
+ // * drain_tail -> drain_head
+ // * drain_head -> self.head
+ //
+ // T = self.tail; H = self.head; t = drain_tail; h = drain_head
+ //
+ // We store drain_tail as self.head, and drain_head and self.head as
+ // after_tail and after_head respectively on the Drain. This also
+ // truncates the effective array such that if the Drain is leaked, we
+ // have forgotten about the potentially moved values after the start of
+ // the drain.
+ //
+ // T t h H
+ // [. . . o o x x o o . . .]
+ //
+ let drain_tail = self.wrap_add(self.tail, start);
+ let drain_head = self.wrap_add(self.tail, end);
+ let head = self.head;
+
+ // "forget" about the values after the start of the drain until after
+ // the drain is complete and the Drain destructor is run.
+ self.head = drain_tail;
+
+ Drain {
+ deque: NonNull::from(&mut *self),
+ after_tail: drain_head,
+ after_head: head,
+ iter: Iter {
+ tail: drain_tail,
+ head: drain_head,
+ // Crucially, we only create shared references from `self` here and read from
+ // it. We do not write to `self` nor reborrow to a mutable reference.
+ // Hence the raw pointer we created above, for `deque`, remains valid.
+ ring: unsafe { self.buffer_as_slice() },
+ },
+ }
+ }
+
+ /// Clears the `VecDeque`, removing all values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut v = VecDeque::new();
+ /// v.push_back(1);
+ /// v.clear();
+ /// assert!(v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn clear(&mut self) {
+ self.truncate(0);
+ }
+
+ /// Returns `true` if the `VecDeque` contains an element equal to the
+ /// given value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vector: VecDeque<u32> = VecDeque::new();
+ ///
+ /// vector.push_back(0);
+ /// vector.push_back(1);
+ ///
+ /// assert_eq!(vector.contains(&1), true);
+ /// assert_eq!(vector.contains(&10), false);
+ /// ```
+ #[stable(feature = "vec_deque_contains", since = "1.12.0")]
+ pub fn contains(&self, x: &T) -> bool
+ where
+ T: PartialEq<T>,
+ {
+ let (a, b) = self.as_slices();
+ a.contains(x) || b.contains(x)
+ }
+
+ /// Provides a reference to the front element, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.front(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// assert_eq!(d.front(), Some(&1));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front(&self) -> Option<&T> {
+ if !self.is_empty() { Some(&self[0]) } else { None }
+ }
+
+ /// Provides a mutable reference to the front element, or `None` if the
+ /// `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.front_mut(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// match d.front_mut() {
+ /// Some(x) => *x = 9,
+ /// None => (),
+ /// }
+ /// assert_eq!(d.front(), Some(&9));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn front_mut(&mut self) -> Option<&mut T> {
+ if !self.is_empty() { Some(&mut self[0]) } else { None }
+ }
+
+ /// Provides a reference to the back element, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.back(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// assert_eq!(d.back(), Some(&2));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back(&self) -> Option<&T> {
+ if !self.is_empty() { Some(&self[self.len() - 1]) } else { None }
+ }
+
+ /// Provides a mutable reference to the back element, or `None` if the
+ /// `VecDeque` is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// assert_eq!(d.back(), None);
+ ///
+ /// d.push_back(1);
+ /// d.push_back(2);
+ /// match d.back_mut() {
+ /// Some(x) => *x = 9,
+ /// None => (),
+ /// }
+ /// assert_eq!(d.back(), Some(&9));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn back_mut(&mut self) -> Option<&mut T> {
+ let len = self.len();
+ if !self.is_empty() { Some(&mut self[len - 1]) } else { None }
+ }
+
+ /// Removes the first element and returns it, or `None` if the `VecDeque` is
+ /// empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// d.push_back(1);
+ /// d.push_back(2);
+ ///
+ /// assert_eq!(d.pop_front(), Some(1));
+ /// assert_eq!(d.pop_front(), Some(2));
+ /// assert_eq!(d.pop_front(), None);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_front(&mut self) -> Option<T> {
+ if self.is_empty() {
+ None
+ } else {
+ let tail = self.tail;
+ self.tail = self.wrap_add(self.tail, 1);
+ unsafe { Some(self.buffer_read(tail)) }
+ }
+ }
+
+ /// Removes the last element from the `VecDeque` and returns it, or `None` if
+ /// it is empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.pop_back(), None);
+ /// buf.push_back(1);
+ /// buf.push_back(3);
+ /// assert_eq!(buf.pop_back(), Some(3));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop_back(&mut self) -> Option<T> {
+ if self.is_empty() {
+ None
+ } else {
+ self.head = self.wrap_sub(self.head, 1);
+ let head = self.head;
+ unsafe { Some(self.buffer_read(head)) }
+ }
+ }
+
+ /// Prepends an element to the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut d = VecDeque::new();
+ /// d.push_front(1);
+ /// d.push_front(2);
+ /// assert_eq!(d.front(), Some(&2));
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_front(&mut self, value: T) {
+ if self.is_full() {
+ self.grow();
+ }
+
+ self.tail = self.wrap_sub(self.tail, 1);
+ let tail = self.tail;
+ unsafe {
+ self.buffer_write(tail, value);
+ }
+ }
+
+ /// Appends an element to the back of the `VecDeque`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(1);
+ /// buf.push_back(3);
+ /// assert_eq!(3, *buf.back().unwrap());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_back(&mut self, value: T) {
+ if self.is_full() {
+ self.grow();
+ }
+
+ let head = self.head;
+ self.head = self.wrap_add(self.head, 1);
+ unsafe { self.buffer_write(head, value) }
+ }
+
+ #[inline]
+ fn is_contiguous(&self) -> bool {
+ self.tail <= self.head
+ }
+
+ /// Removes an element from anywhere in the `VecDeque` and returns it,
+ /// replacing it with the first element.
+ ///
+ /// This does not preserve ordering, but is *O*(1).
+ ///
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.swap_remove_front(0), None);
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.swap_remove_front(2), Some(3));
+ /// assert_eq!(buf, [2, 1]);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn swap_remove_front(&mut self, index: usize) -> Option<T> {
+ let length = self.len();
+ if length > 0 && index < length && index != 0 {
+ self.swap(index, 0);
+ } else if index >= length {
+ return None;
+ }
+ self.pop_front()
+ }
+
+ /// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
+ /// last element.
+ ///
+ /// This does not preserve ordering, but is *O*(1).
+ ///
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// assert_eq!(buf.swap_remove_back(0), None);
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.swap_remove_back(0), Some(1));
+ /// assert_eq!(buf, [3, 2]);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn swap_remove_back(&mut self, index: usize) -> Option<T> {
+ let length = self.len();
+ if length > 0 && index < length - 1 {
+ self.swap(index, length - 1);
+ } else if index >= length {
+ return None;
+ }
+ self.pop_back()
+ }
+
+ /// Inserts an element at `index` within the `VecDeque`, shifting all elements with indices
+ /// greater than or equal to `index` towards the back.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is greater than `VecDeque`'s length
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut vec_deque = VecDeque::new();
+ /// vec_deque.push_back('a');
+ /// vec_deque.push_back('b');
+ /// vec_deque.push_back('c');
+ /// assert_eq!(vec_deque, &['a', 'b', 'c']);
+ ///
+ /// vec_deque.insert(1, 'd');
+ /// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
+ /// ```
+ #[stable(feature = "deque_extras_15", since = "1.5.0")]
+ pub fn insert(&mut self, index: usize, value: T) {
+ assert!(index <= self.len(), "index out of bounds");
+ if self.is_full() {
+ self.grow();
+ }
+
+ // Move the least number of elements in the ring buffer and insert
+ // the given object
+ //
+ // At most len/2 - 1 elements will be moved. O(min(n, n-i))
+ //
+ // There are three main cases:
+ // Elements are contiguous
+ // - special case when tail is 0
+ // Elements are discontiguous and the insert is in the tail section
+ // Elements are discontiguous and the insert is in the head section
+ //
+ // For each of those there are two more cases:
+ // Insert is closer to tail
+ // Insert is closer to head
+ //
+ // Key: H - self.head
+ // T - self.tail
+ // o - Valid element
+ // I - Insertion element
+ // A - The element that should be after the insertion point
+ // M - Indicates element was moved
+
+ let idx = self.wrap_add(self.tail, index);
+
+ let distance_to_tail = index;
+ let distance_to_head = self.len() - index;
+
+ let contiguous = self.is_contiguous();
+
+ match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+ (true, true, _) if index == 0 => {
+ // push_front
+ //
+ // T
+ // I H
+ // [A o o o o o o . . . . . . . . .]
+ //
+ // H T
+ // [A o o o o o o o . . . . . I]
+ //
+
+ self.tail = self.wrap_sub(self.tail, 1);
+ }
+ (true, true, _) => {
+ unsafe {
+ // contiguous, insert closer to tail:
+ //
+ // T I H
+ // [. . . o o A o o o o . . . . . .]
+ //
+ // T H
+ // [. . o o I A o o o o . . . . . .]
+ // M M
+ //
+ // contiguous, insert closer to tail and tail is 0:
+ //
+ //
+ // T I H
+ // [o o A o o o o . . . . . . . . .]
+ //
+ // H T
+ // [o I A o o o o o . . . . . . . o]
+ // M M
+
+ let new_tail = self.wrap_sub(self.tail, 1);
+
+ self.copy(new_tail, self.tail, 1);
+ // Already moved the tail, so we only copy `index - 1` elements.
+ self.copy(self.tail, self.tail + 1, index - 1);
+
+ self.tail = new_tail;
+ }
+ }
+ (true, false, _) => {
+ unsafe {
+ // contiguous, insert closer to head:
+ //
+ // T I H
+ // [. . . o o o o A o o . . . . . .]
+ //
+ // T H
+ // [. . . o o o o I A o o . . . . .]
+ // M M M
+
+ self.copy(idx + 1, idx, self.head - idx);
+ self.head = self.wrap_add(self.head, 1);
+ }
+ }
+ (false, true, true) => {
+ unsafe {
+ // discontiguous, insert closer to tail, tail section:
+ //
+ // H T I
+ // [o o o o o o . . . . . o o A o o]
+ //
+ // H T
+ // [o o o o o o . . . . o o I A o o]
+ // M M
+
+ self.copy(self.tail - 1, self.tail, index);
+ self.tail -= 1;
+ }
+ }
+ (false, false, true) => {
+ unsafe {
+ // discontiguous, insert closer to head, tail section:
+ //
+ // H T I
+ // [o o . . . . . . . o o o o o A o]
+ //
+ // H T
+ // [o o o . . . . . . o o o o o I A]
+ // M M M M
+
+ // copy elements up to new head
+ self.copy(1, 0, self.head);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(0, self.cap() - 1, 1);
+
+ // move elements from idx to end forward not including ^ element
+ self.copy(idx + 1, idx, self.cap() - 1 - idx);
+
+ self.head += 1;
+ }
+ }
+ (false, true, false) if idx == 0 => {
+ unsafe {
+ // discontiguous, insert is closer to tail, head section,
+ // and is at index zero in the internal buffer:
+ //
+ // I H T
+ // [A o o o o o o o o o . . . o o o]
+ //
+ // H T
+ // [A o o o o o o o o o . . o o o I]
+ // M M M
+
+ // copy elements up to new tail
+ self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(self.cap() - 1, 0, 1);
+
+ self.tail -= 1;
+ }
+ }
+ (false, true, false) => {
+ unsafe {
+ // discontiguous, insert closer to tail, head section:
+ //
+ // I H T
+ // [o o o A o o o o o o . . . o o o]
+ //
+ // H T
+ // [o o I A o o o o o o . . o o o o]
+ // M M M M M M
+
+ // copy elements up to new tail
+ self.copy(self.tail - 1, self.tail, self.cap() - self.tail);
+
+ // copy last element into empty spot at bottom of buffer
+ self.copy(self.cap() - 1, 0, 1);
+
+ // move elements from idx-1 to end forward not including ^ element
+ self.copy(0, 1, idx - 1);
+
+ self.tail -= 1;
+ }
+ }
+ (false, false, false) => {
+ unsafe {
+ // discontiguous, insert closer to head, head section:
+ //
+ // I H T
+ // [o o o o A o o . . . . . . o o o]
+ //
+ // H T
+ // [o o o o I A o o . . . . . o o o]
+ // M M M
+
+ self.copy(idx + 1, idx, self.head - idx);
+ self.head += 1;
+ }
+ }
+ }
+
+ // tail might've been changed so we need to recalculate
+ let new_idx = self.wrap_add(self.tail, index);
+ unsafe {
+ self.buffer_write(new_idx, value);
+ }
+ }
+
+ /// Removes and returns the element at `index` from the `VecDeque`.
+ /// Whichever end is closer to the removal point will be moved to make
+ /// room, and all the affected elements will be moved to new positions.
+ /// Returns `None` if `index` is out of bounds.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(1);
+ /// buf.push_back(2);
+ /// buf.push_back(3);
+ /// assert_eq!(buf, [1, 2, 3]);
+ ///
+ /// assert_eq!(buf.remove(1), Some(2));
+ /// assert_eq!(buf, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(&mut self, index: usize) -> Option<T> {
+ if self.is_empty() || self.len() <= index {
+ return None;
+ }
+
+ // There are three main cases:
+ // Elements are contiguous
+ // Elements are discontiguous and the removal is in the tail section
+ // Elements are discontiguous and the removal is in the head section
+ // - special case when elements are technically contiguous,
+ // but self.head = 0
+ //
+ // For each of those there are two more cases:
+ // Insert is closer to tail
+ // Insert is closer to head
+ //
+ // Key: H - self.head
+ // T - self.tail
+ // o - Valid element
+ // x - Element marked for removal
+ // R - Indicates element that is being removed
+ // M - Indicates element was moved
+
+ let idx = self.wrap_add(self.tail, index);
+
+ let elem = unsafe { Some(self.buffer_read(idx)) };
+
+ let distance_to_tail = index;
+ let distance_to_head = self.len() - index;
+
+ let contiguous = self.is_contiguous();
+
+ match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
+ (true, true, _) => {
+ unsafe {
+ // contiguous, remove closer to tail:
+ //
+ // T R H
+ // [. . . o o x o o o o . . . . . .]
+ //
+ // T H
+ // [. . . . o o o o o o . . . . . .]
+ // M M
+
+ self.copy(self.tail + 1, self.tail, index);
+ self.tail += 1;
+ }
+ }
+ (true, false, _) => {
+ unsafe {
+ // contiguous, remove closer to head:
+ //
+ // T R H
+ // [. . . o o o o x o o . . . . . .]
+ //
+ // T H
+ // [. . . o o o o o o . . . . . . .]
+ // M M
+
+ self.copy(idx, idx + 1, self.head - idx - 1);
+ self.head -= 1;
+ }
+ }
+ (false, true, true) => {
+ unsafe {
+ // discontiguous, remove closer to tail, tail section:
+ //
+ // H T R
+ // [o o o o o o . . . . . o o x o o]
+ //
+ // H T
+ // [o o o o o o . . . . . . o o o o]
+ // M M
+
+ self.copy(self.tail + 1, self.tail, index);
+ self.tail = self.wrap_add(self.tail, 1);
+ }
+ }
+ (false, false, false) => {
+ unsafe {
+ // discontiguous, remove closer to head, head section:
+ //
+ // R H T
+ // [o o o o x o o . . . . . . o o o]
+ //
+ // H T
+ // [o o o o o o . . . . . . . o o o]
+ // M M
+
+ self.copy(idx, idx + 1, self.head - idx - 1);
+ self.head -= 1;
+ }
+ }
+ (false, false, true) => {
+ unsafe {
+ // discontiguous, remove closer to head, tail section:
+ //
+ // H T R
+ // [o o o . . . . . . o o o o o x o]
+ //
+ // H T
+ // [o o . . . . . . . o o o o o o o]
+ // M M M M
+ //
+ // or quasi-discontiguous, remove next to head, tail section:
+ //
+ // H T R
+ // [. . . . . . . . . o o o o o x o]
+ //
+ // T H
+ // [. . . . . . . . . o o o o o o .]
+ // M
+
+ // draw in elements in the tail section
+ self.copy(idx, idx + 1, self.cap() - idx - 1);
+
+ // Prevents underflow.
+ if self.head != 0 {
+ // copy first element into empty spot
+ self.copy(self.cap() - 1, 0, 1);
+
+ // move elements in the head section backwards
+ self.copy(0, 1, self.head - 1);
+ }
+
+ self.head = self.wrap_sub(self.head, 1);
+ }
+ }
+ (false, true, false) => {
+ unsafe {
+ // discontiguous, remove closer to tail, head section:
+ //
+ // R H T
+ // [o o x o o o o o o o . . . o o o]
+ //
+ // H T
+ // [o o o o o o o o o o . . . . o o]
+ // M M M M M
+
+ // draw in elements up to idx
+ self.copy(1, 0, idx);
+
+ // copy last element into empty spot
+ self.copy(0, self.cap() - 1, 1);
+
+ // move elements from tail to end forward, excluding the last one
+ self.copy(self.tail + 1, self.tail, self.cap() - self.tail - 1);
+
+ self.tail = self.wrap_add(self.tail, 1);
+ }
+ }
+ }
+
+ elem
+ }
+
+ /// Splits the `VecDeque` into two at the given index.
+ ///
+ /// Returns a newly allocated `VecDeque`. `self` contains elements `[0, at)`,
+ /// and the returned `VecDeque` contains elements `[at, len)`.
+ ///
+ /// Note that the capacity of `self` does not change.
+ ///
+ /// Element at index 0 is the front of the queue.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = vec![1,2,3].into_iter().collect();
+ /// let buf2 = buf.split_off(1);
+ /// assert_eq!(buf, [1]);
+ /// assert_eq!(buf2, [2, 3]);
+ /// ```
+ #[inline]
+ #[must_use = "use `.truncate()` if you don't need the other half"]
+ #[stable(feature = "split_off", since = "1.4.0")]
+ pub fn split_off(&mut self, at: usize) -> Self {
+ let len = self.len();
+ assert!(at <= len, "`at` out of bounds");
+
+ let other_len = len - at;
+ let mut other = VecDeque::with_capacity(other_len);
+
+ unsafe {
+ let (first_half, second_half) = self.as_slices();
+
+ let first_len = first_half.len();
+ let second_len = second_half.len();
+ if at < first_len {
+ // `at` lies in the first half.
+ let amount_in_first = first_len - at;
+
+ ptr::copy_nonoverlapping(first_half.as_ptr().add(at), other.ptr(), amount_in_first);
+
+ // just take all of the second half.
+ ptr::copy_nonoverlapping(
+ second_half.as_ptr(),
+ other.ptr().add(amount_in_first),
+ second_len,
+ );
+ } else {
+ // `at` lies in the second half, need to factor in the elements we skipped
+ // in the first half.
+ let offset = at - first_len;
+ let amount_in_second = second_len - offset;
+ ptr::copy_nonoverlapping(
+ second_half.as_ptr().add(offset),
+ other.ptr(),
+ amount_in_second,
+ );
+ }
+ }
+
+ // Cleanup where the ends of the buffers are
+ self.head = self.wrap_sub(self.head, other_len);
+ other.head = other.wrap_index(other_len);
+
+ other
+ }
+
+ /// Moves all the elements of `other` into `self`, leaving `other` empty.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new number of elements in self overflows a `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
+ /// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
+ /// buf.append(&mut buf2);
+ /// assert_eq!(buf, [1, 2, 3, 4]);
+ /// assert_eq!(buf2, []);
+ /// ```
+ #[inline]
+ #[stable(feature = "append", since = "1.4.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ // naive impl
+ self.extend(other.drain(..));
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns false.
+ /// This method operates in place, visiting each element exactly once in the
+ /// original order, and preserves the order of the retained elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.extend(1..5);
+ /// buf.retain(|&x| x % 2 == 0);
+ /// assert_eq!(buf, [2, 4]);
+ /// ```
+ ///
+ /// The exact order may be useful for tracking external state, like an index.
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.extend(1..6);
+ ///
+ /// let keep = [false, true, true, false, true];
+ /// let mut i = 0;
+ /// buf.retain(|_| (keep[i], i += 1).0);
+ /// assert_eq!(buf, [2, 3, 5]);
+ /// ```
+ #[stable(feature = "vec_deque_retain", since = "1.4.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ let len = self.len();
+ let mut del = 0;
+ for i in 0..len {
+ if !f(&self[i]) {
+ del += 1;
+ } else if del > 0 {
+ self.swap(i - del, i);
+ }
+ }
+ if del > 0 {
+ self.truncate(len - del);
+ }
+ }
+
+ // This may panic or abort
+ #[inline(never)]
+ fn grow(&mut self) {
+ if self.is_full() {
+ let old_cap = self.cap();
+ // Double the buffer size.
+ self.buf.reserve_exact(old_cap, old_cap);
+ assert!(self.cap() == old_cap * 2);
+ unsafe {
+ self.handle_capacity_increase(old_cap);
+ }
+ debug_assert!(!self.is_full());
+ }
+ }
+
+ /// Modifies the `VecDeque` in-place so that `len()` is equal to `new_len`,
+ /// either by removing excess elements from the back or by appending
+ /// elements generated by calling `generator` to the back.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(10);
+ /// buf.push_back(15);
+ /// assert_eq!(buf, [5, 10, 15]);
+ ///
+ /// buf.resize_with(5, Default::default);
+ /// assert_eq!(buf, [5, 10, 15, 0, 0]);
+ ///
+ /// buf.resize_with(2, || unreachable!());
+ /// assert_eq!(buf, [5, 10]);
+ ///
+ /// let mut state = 100;
+ /// buf.resize_with(5, || { state += 1; state });
+ /// assert_eq!(buf, [5, 10, 101, 102, 103]);
+ /// ```
+ #[stable(feature = "vec_resize_with", since = "1.33.0")]
+ pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend(repeat_with(generator).take(new_len - len))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Rearranges the internal storage of this deque so it is one contiguous slice, which is then returned.
+ ///
+ /// This method does not allocate and does not change the order of the inserted elements.
+ /// As it returns a mutable slice, this can be used to sort or binary search a deque.
+ ///
+ /// Once the internal storage is contiguous, the [`as_slices`](#method.as_slices) and
+ /// [`as_mut_slices`](#method.as_mut_slices) methods will return the entire contents of the
+ /// `VecDeque` in a single slice.
+ ///
+ /// # Examples
+ ///
+ /// Sorting the content of a deque.
+ ///
+ /// ```
+ /// #![feature(deque_make_contiguous)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::with_capacity(15);
+ ///
+ /// buf.push_back(2);
+ /// buf.push_back(1);
+ /// buf.push_front(3);
+ ///
+ /// // sorting the deque
+ /// buf.make_contiguous().sort();
+ /// assert_eq!(buf.as_slices(), (&[1, 2, 3] as &[_], &[] as &[_]));
+ ///
+ /// // sorting it in reverse order
+ /// buf.make_contiguous().sort_by(|a, b| b.cmp(a));
+ /// assert_eq!(buf.as_slices(), (&[3, 2, 1] as &[_], &[] as &[_]));
+ /// ```
+ ///
+ /// Getting immutable access to the contiguous slice.
+ ///
+ /// ```rust
+ /// #![feature(deque_make_contiguous)]
+ ///
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ ///
+ /// buf.push_back(2);
+ /// buf.push_back(1);
+ /// buf.push_front(3);
+ ///
+ /// buf.make_contiguous();
+ /// if let (slice, &[]) = buf.as_slices() {
+ /// // we can now be sure that `slice` contains all elements of the deque,
+ /// // while still having immutable access to `buf`.
+ /// assert_eq!(buf.len(), slice.len());
+ /// assert_eq!(slice, &[3, 2, 1] as &[_]);
+ /// }
+ /// ```
+ #[unstable(feature = "deque_make_contiguous", issue = "70929")]
+ pub fn make_contiguous(&mut self) -> &mut [T] {
+ if self.is_contiguous() {
+ let tail = self.tail;
+ let head = self.head;
+ return unsafe { &mut self.buffer_as_mut_slice()[tail..head] };
+ }
+
+ let buf = self.buf.ptr();
+ let cap = self.cap();
+ let len = self.len();
+
+ let free = self.tail - self.head;
+ let tail_len = cap - self.tail;
+
+ if free >= tail_len {
+ // there is enough free space to copy the tail in one go,
+ // this means that we first shift the head backwards, and then
+ // copy the tail to the correct position.
+ //
+ // from: DEFGH....ABC
+ // to: ABCDEFGH....
+ unsafe {
+ ptr::copy(buf, buf.add(tail_len), self.head);
+ // ...DEFGH.ABC
+ ptr::copy_nonoverlapping(buf.add(self.tail), buf, tail_len);
+ // ABCDEFGH....
+
+ self.tail = 0;
+ self.head = len;
+ }
+ } else if free >= self.head {
+ // there is enough free space to copy the head in one go,
+ // this means that we first shift the tail forwards, and then
+ // copy the head to the correct position.
+ //
+ // from: FGH....ABCDE
+ // to: ...ABCDEFGH.
+ unsafe {
+ ptr::copy(buf.add(self.tail), buf.add(self.head), tail_len);
+ // FGHABCDE....
+ ptr::copy_nonoverlapping(buf, buf.add(self.head + tail_len), self.head);
+ // ...ABCDEFGH.
+
+ self.tail = self.head;
+ self.head = self.tail + len;
+ }
+ } else {
+ // free is smaller than both head and tail,
+ // this means we have to slowly "swap" the tail and the head.
+ //
+ // from: EFGHI...ABCD or HIJK.ABCDEFG
+ // to: ABCDEFGHI... or ABCDEFGHIJK.
+ let mut left_edge: usize = 0;
+ let mut right_edge: usize = self.tail;
+ unsafe {
+ // The general problem looks like this
+ // GHIJKLM...ABCDEF - before any swaps
+ // ABCDEFM...GHIJKL - after 1 pass of swaps
+ // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store
+ // - then restart the algorithm with a new (smaller) store
+ // Sometimes the temp store is reached when the right edge is at the end
+ // of the buffer - this means we've hit the right order with fewer swaps!
+ // E.g
+ // EF..ABCD
+ // ABCDEF.. - after four only swaps we've finished
+ while left_edge < len && right_edge != cap {
+ let mut right_offset = 0;
+ for i in left_edge..right_edge {
+ right_offset = (i - left_edge) % (cap - right_edge);
+ let src: isize = (right_edge + right_offset) as isize;
+ ptr::swap(buf.add(i), buf.offset(src));
+ }
+ let n_ops = right_edge - left_edge;
+ left_edge += n_ops;
+ right_edge += right_offset + 1;
+ }
+
+ self.tail = 0;
+ self.head = len;
+ }
+ }
+
+ let tail = self.tail;
+ let head = self.head;
+ unsafe { &mut self.buffer_as_mut_slice()[tail..head] }
+ }
+
+ /// Rotates the double-ended queue `mid` places to the left.
+ ///
+ /// Equivalently,
+ /// - Rotates item `mid` into the first position.
+ /// - Pops the first `mid` items and pushes them to the end.
+ /// - Rotates `len() - mid` places to the right.
+ ///
+ /// # Panics
+ ///
+ /// If `mid` is greater than `len()`. Note that `mid == len()`
+ /// does _not_ panic and is a no-op rotation.
+ ///
+ /// # Complexity
+ ///
+ /// Takes `*O*(min(mid, len() - mid))` time and no extra space.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = (0..10).collect();
+ ///
+ /// buf.rotate_left(3);
+ /// assert_eq!(buf, [3, 4, 5, 6, 7, 8, 9, 0, 1, 2]);
+ ///
+ /// for i in 1..10 {
+ /// assert_eq!(i * 3 % 10, buf[0]);
+ /// buf.rotate_left(3);
+ /// }
+ /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ /// ```
+ #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
+ pub fn rotate_left(&mut self, mid: usize) {
+ assert!(mid <= self.len());
+ let k = self.len() - mid;
+ if mid <= k {
+ unsafe { self.rotate_left_inner(mid) }
+ } else {
+ unsafe { self.rotate_right_inner(k) }
+ }
+ }
+
+ /// Rotates the double-ended queue `k` places to the right.
+ ///
+ /// Equivalently,
+ /// - Rotates the first item into position `k`.
+ /// - Pops the last `k` items and pushes them to the front.
+ /// - Rotates `len() - k` places to the left.
+ ///
+ /// # Panics
+ ///
+ /// If `k` is greater than `len()`. Note that `k == len()`
+ /// does _not_ panic and is a no-op rotation.
+ ///
+ /// # Complexity
+ ///
+ /// Takes `*O*(min(k, len() - k))` time and no extra space.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf: VecDeque<_> = (0..10).collect();
+ ///
+ /// buf.rotate_right(3);
+ /// assert_eq!(buf, [7, 8, 9, 0, 1, 2, 3, 4, 5, 6]);
+ ///
+ /// for i in 1..10 {
+ /// assert_eq!(0, buf[i * 3 % 10]);
+ /// buf.rotate_right(3);
+ /// }
+ /// assert_eq!(buf, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ /// ```
+ #[stable(feature = "vecdeque_rotate", since = "1.36.0")]
+ pub fn rotate_right(&mut self, k: usize) {
+ assert!(k <= self.len());
+ let mid = self.len() - k;
+ if k <= mid {
+ unsafe { self.rotate_right_inner(k) }
+ } else {
+ unsafe { self.rotate_left_inner(mid) }
+ }
+ }
+
+ // Safety: the following two methods require that the rotation amount
+ // be less than half the length of the deque.
+ //
+ // `wrap_copy` requires that `min(x, cap() - x) + copy_len <= cap()`,
+ // but than `min` is never more than half the capacity, regardless of x,
+ // so it's sound to call here because we're calling with something
+ // less than half the length, which is never above half the capacity.
+
+ unsafe fn rotate_left_inner(&mut self, mid: usize) {
+ debug_assert!(mid * 2 <= self.len());
+ unsafe {
+ self.wrap_copy(self.head, self.tail, mid);
+ }
+ self.head = self.wrap_add(self.head, mid);
+ self.tail = self.wrap_add(self.tail, mid);
+ }
+
+ unsafe fn rotate_right_inner(&mut self, k: usize) {
+ debug_assert!(k * 2 <= self.len());
+ self.head = self.wrap_sub(self.head, k);
+ self.tail = self.wrap_sub(self.tail, k);
+ unsafe {
+ self.wrap_copy(self.tail, self.head, k);
+ }
+ }
+}
+
+impl<T: Clone> VecDeque<T> {
+ /// Modifies the `VecDeque` in-place so that `len()` is equal to new_len,
+ /// either by removing excess elements from the back or by appending clones of `value`
+ /// to the back.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// let mut buf = VecDeque::new();
+ /// buf.push_back(5);
+ /// buf.push_back(10);
+ /// buf.push_back(15);
+ /// assert_eq!(buf, [5, 10, 15]);
+ ///
+ /// buf.resize(2, 0);
+ /// assert_eq!(buf, [5, 10]);
+ ///
+ /// buf.resize(5, 20);
+ /// assert_eq!(buf, [5, 10, 20, 20, 20]);
+ /// ```
+ #[stable(feature = "deque_extras", since = "1.16.0")]
+ pub fn resize(&mut self, new_len: usize, value: T) {
+ self.resize_with(new_len, || value.clone());
+ }
+}
+
+/// Returns the index in the underlying buffer for a given logical element index.
+#[inline]
+fn wrap_index(index: usize, size: usize) -> usize {
+ // size is always a power of 2
+ debug_assert!(size.is_power_of_two());
+ index & (size - 1)
+}
+
+/// Returns the two slices that cover the `VecDeque`'s valid range
+trait RingSlices: Sized {
+ fn slice(self, from: usize, to: usize) -> Self;
+ fn split_at(self, i: usize) -> (Self, Self);
+
+ fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) {
+ let contiguous = tail <= head;
+ if contiguous {
+ let (empty, buf) = buf.split_at(0);
+ (buf.slice(tail, head), empty)
+ } else {
+ let (mid, right) = buf.split_at(tail);
+ let (left, _) = mid.split_at(head);
+ (right, left)
+ }
+ }
+}
+
+impl<T> RingSlices for &[T] {
+ fn slice(self, from: usize, to: usize) -> Self {
+ &self[from..to]
+ }
+ fn split_at(self, i: usize) -> (Self, Self) {
+ (*self).split_at(i)
+ }
+}
+
+impl<T> RingSlices for &mut [T] {
+ fn slice(self, from: usize, to: usize) -> Self {
+ &mut self[from..to]
+ }
+ fn split_at(self, i: usize) -> (Self, Self) {
+ (*self).split_at_mut(i)
+ }
+}
+
+/// Calculate the number of elements left to be read in the buffer
+#[inline]
+fn count(tail: usize, head: usize, size: usize) -> usize {
+ // size is always a power of 2
+ (head.wrapping_sub(tail)) & (size - 1)
+}
+
+/// An iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter`]: struct.VecDeque.html#method.iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Iter<'a, T: 'a> {
+ ring: &'a [T],
+ tail: usize,
+ head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Iter<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ f.debug_tuple("Iter").field(&front).field(&back).finish()
+ }
+}
+
+// FIXME(#26925) Remove in favor of `#[derive(Clone)]`
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Clone for Iter<'_, T> {
+ fn clone(&self) -> Self {
+ Iter { ring: self.ring, tail: self.tail, head: self.head }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a T> {
+ if self.tail == self.head {
+ return None;
+ }
+ let tail = self.tail;
+ self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+ unsafe { Some(self.ring.get_unchecked(tail)) }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = count(self.tail, self.head, self.ring.len());
+ (len, Some(len))
+ }
+
+ fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where
+ F: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = front.iter().fold(accum, &mut f);
+ back.iter().fold(accum, &mut f)
+ }
+
+ fn try_fold<B, F, R>(&mut self, init: B, mut f: F) -> R
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> R,
+ R: Try<Ok = B>,
+ {
+ let (mut iter, final_res);
+ if self.tail <= self.head {
+ // single slice self.ring[self.tail..self.head]
+ iter = self.ring[self.tail..self.head].iter();
+ final_res = iter.try_fold(init, &mut f);
+ } else {
+ // two slices: self.ring[self.tail..], self.ring[..self.head]
+ let (front, back) = self.ring.split_at(self.tail);
+ let mut back_iter = back.iter();
+ let res = back_iter.try_fold(init, &mut f);
+ let len = self.ring.len();
+ self.tail = (self.ring.len() - back_iter.len()) & (len - 1);
+ iter = front[..self.head].iter();
+ final_res = iter.try_fold(res?, &mut f);
+ }
+ self.tail = self.head - iter.len();
+ final_res
+ }
+
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ if n >= count(self.tail, self.head, self.ring.len()) {
+ self.tail = self.head;
+ None
+ } else {
+ self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+ self.next()
+ }
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<&'a T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a T> {
+ if self.tail == self.head {
+ return None;
+ }
+ self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+ unsafe { Some(self.ring.get_unchecked(self.head)) }
+ }
+
+ fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where
+ F: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = back.iter().rfold(accum, &mut f);
+ front.iter().rfold(accum, &mut f)
+ }
+
+ fn try_rfold<B, F, R>(&mut self, init: B, mut f: F) -> R
+ where
+ Self: Sized,
+ F: FnMut(B, Self::Item) -> R,
+ R: Try<Ok = B>,
+ {
+ let (mut iter, final_res);
+ if self.tail <= self.head {
+ // single slice self.ring[self.tail..self.head]
+ iter = self.ring[self.tail..self.head].iter();
+ final_res = iter.try_rfold(init, &mut f);
+ } else {
+ // two slices: self.ring[self.tail..], self.ring[..self.head]
+ let (front, back) = self.ring.split_at(self.tail);
+ let mut front_iter = front[..self.head].iter();
+ let res = front_iter.try_rfold(init, &mut f);
+ self.head = front_iter.len();
+ iter = back.iter();
+ final_res = iter.try_rfold(res?, &mut f);
+ }
+ self.head = self.tail + iter.len();
+ final_res
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for Iter<'_, T> {
+ fn is_empty(&self) -> bool {
+ self.head == self.tail
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Iter<'_, T> {}
+
+/// A mutable iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`iter_mut`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`iter_mut`]: struct.VecDeque.html#method.iter_mut
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IterMut<'a, T: 'a> {
+ ring: &'a mut [T],
+ tail: usize,
+ head: usize,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IterMut<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let (front, back) = RingSlices::ring_slices(&*self.ring, self.head, self.tail);
+ f.debug_tuple("IterMut").field(&front).field(&back).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> Iterator for IterMut<'a, T> {
+ type Item = &'a mut T;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a mut T> {
+ if self.tail == self.head {
+ return None;
+ }
+ let tail = self.tail;
+ self.tail = wrap_index(self.tail.wrapping_add(1), self.ring.len());
+
+ unsafe {
+ let elem = self.ring.get_unchecked_mut(tail);
+ Some(&mut *(elem as *mut _))
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = count(self.tail, self.head, self.ring.len());
+ (len, Some(len))
+ }
+
+ fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where
+ F: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = front.iter_mut().fold(accum, &mut f);
+ back.iter_mut().fold(accum, &mut f)
+ }
+
+ fn nth(&mut self, n: usize) -> Option<Self::Item> {
+ if n >= count(self.tail, self.head, self.ring.len()) {
+ self.tail = self.head;
+ None
+ } else {
+ self.tail = wrap_index(self.tail.wrapping_add(n), self.ring.len());
+ self.next()
+ }
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<&'a mut T> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> DoubleEndedIterator for IterMut<'a, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a mut T> {
+ if self.tail == self.head {
+ return None;
+ }
+ self.head = wrap_index(self.head.wrapping_sub(1), self.ring.len());
+
+ unsafe {
+ let elem = self.ring.get_unchecked_mut(self.head);
+ Some(&mut *(elem as *mut _))
+ }
+ }
+
+ fn rfold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
+ where
+ F: FnMut(Acc, Self::Item) -> Acc,
+ {
+ let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
+ accum = back.iter_mut().rfold(accum, &mut f);
+ front.iter_mut().rfold(accum, &mut f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IterMut<'_, T> {
+ fn is_empty(&self) -> bool {
+ self.head == self.tail
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IterMut<'_, T> {}
+
+/// An owning iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`into_iter`] method on [`VecDeque`]
+/// (provided by the `IntoIterator` trait). See its documentation for more.
+///
+/// [`into_iter`]: struct.VecDeque.html#method.into_iter
+/// [`VecDeque`]: struct.VecDeque.html
+#[derive(Clone)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+ inner: VecDeque<T>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.inner).finish()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.inner.pop_front()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let len = self.inner.len();
+ (len, Some(len))
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.inner.pop_back()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn is_empty(&self) -> bool {
+ self.inner.is_empty()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialEq> PartialEq for VecDeque<A> {
+ fn eq(&self, other: &VecDeque<A>) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+ let (sa, sb) = self.as_slices();
+ let (oa, ob) = other.as_slices();
+ if sa.len() == oa.len() {
+ sa == oa && sb == ob
+ } else if sa.len() < oa.len() {
+ // Always divisible in three sections, for example:
+ // self: [a b c|d e f]
+ // other: [0 1 2 3|4 5]
+ // front = 3, mid = 1,
+ // [a b c] == [0 1 2] && [d] == [3] && [e f] == [4 5]
+ let front = sa.len();
+ let mid = oa.len() - front;
+
+ let (oa_front, oa_mid) = oa.split_at(front);
+ let (sb_mid, sb_back) = sb.split_at(mid);
+ debug_assert_eq!(sa.len(), oa_front.len());
+ debug_assert_eq!(sb_mid.len(), oa_mid.len());
+ debug_assert_eq!(sb_back.len(), ob.len());
+ sa == oa_front && sb_mid == oa_mid && sb_back == ob
+ } else {
+ let front = oa.len();
+ let mid = sa.len() - front;
+
+ let (sa_front, sa_mid) = sa.split_at(front);
+ let (ob_mid, ob_back) = ob.split_at(mid);
+ debug_assert_eq!(sa_front.len(), oa.len());
+ debug_assert_eq!(sa_mid.len(), ob_mid.len());
+ debug_assert_eq!(sb.len(), ob_back.len());
+ sa_front == oa && sa_mid == ob_mid && sb == ob_back
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Eq> Eq for VecDeque<A> {}
+
+macro_rules! __impl_slice_eq1 {
+ ([$($vars:tt)*] $lhs:ty, $rhs:ty, $($constraints:tt)*) => {
+ #[stable(feature = "vec_deque_partial_eq_slice", since = "1.17.0")]
+ impl<A, B, $($vars)*> PartialEq<$rhs> for $lhs
+ where
+ A: PartialEq<B>,
+ $($constraints)*
+ {
+ fn eq(&self, other: &$rhs) -> bool {
+ if self.len() != other.len() {
+ return false;
+ }
+ let (sa, sb) = self.as_slices();
+ let (oa, ob) = other[..].split_at(sa.len());
+ sa == oa && sb == ob
+ }
+ }
+ }
+}
+
+__impl_slice_eq1! { [] VecDeque<A>, Vec<B>, }
+__impl_slice_eq1! { [] VecDeque<A>, &[B], }
+__impl_slice_eq1! { [] VecDeque<A>, &mut [B], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, [B; N], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, &[B; N], }
+__impl_slice_eq1! { [const N: usize] VecDeque<A>, &mut [B; N], }
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: PartialOrd> PartialOrd for VecDeque<A> {
+ fn partial_cmp(&self, other: &VecDeque<A>) -> Option<Ordering> {
+ self.iter().partial_cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Ord> Ord for VecDeque<A> {
+ #[inline]
+ fn cmp(&self, other: &VecDeque<A>) -> Ordering {
+ self.iter().cmp(other.iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A: Hash> Hash for VecDeque<A> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.len().hash(state);
+ let (a, b) = self.as_slices();
+ Hash::hash_slice(a, state);
+ Hash::hash_slice(b, state);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Index<usize> for VecDeque<A> {
+ type Output = A;
+
+ #[inline]
+ fn index(&self, index: usize) -> &A {
+ self.get(index).expect("Out of bounds access")
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> IndexMut<usize> for VecDeque<A> {
+ #[inline]
+ fn index_mut(&mut self, index: usize) -> &mut A {
+ self.get_mut(index).expect("Out of bounds access")
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> FromIterator<A> for VecDeque<A> {
+ fn from_iter<T: IntoIterator<Item = A>>(iter: T) -> VecDeque<A> {
+ let iterator = iter.into_iter();
+ let (lower, _) = iterator.size_hint();
+ let mut deq = VecDeque::with_capacity(lower);
+ deq.extend(iterator);
+ deq
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for VecDeque<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Consumes the `VecDeque` into a front-to-back iterator yielding elements by
+ /// value.
+ fn into_iter(self) -> IntoIter<T> {
+ IntoIter { inner: self }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a VecDeque<T> {
+ type Item = &'a T;
+ type IntoIter = Iter<'a, T>;
+
+ fn into_iter(self) -> Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut VecDeque<T> {
+ type Item = &'a mut T;
+ type IntoIter = IterMut<'a, T>;
+
+ fn into_iter(self) -> IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<A> Extend<A> for VecDeque<A> {
+ fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
+ // This function should be the moral equivalent of:
+ //
+ // for item in iter.into_iter() {
+ // self.push_back(item);
+ // }
+ let mut iter = iter.into_iter();
+ while let Some(element) = iter.next() {
+ if self.len() == self.capacity() {
+ let (lower, _) = iter.size_hint();
+ self.reserve(lower.saturating_add(1));
+ }
+
+ let head = self.head;
+ self.head = self.wrap_add(self.head, 1);
+ unsafe {
+ self.buffer_write(head, element);
+ }
+ }
+ }
+
+ #[inline]
+ fn extend_one(&mut self, elem: A) {
+ self.push_back(elem);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for VecDeque<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &elem: &T) {
+ self.push_back(elem);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for VecDeque<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(self).finish()
+ }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<Vec<T>> for VecDeque<T> {
+ /// Turn a [`Vec<T>`] into a [`VecDeque<T>`].
+ ///
+ /// [`Vec<T>`]: crate::vec::Vec
+ /// [`VecDeque<T>`]: crate::collections::VecDeque
+ ///
+ /// This avoids reallocating where possible, but the conditions for that are
+ /// strict, and subject to change, and so shouldn't be relied upon unless the
+ /// `Vec<T>` came from `From<VecDeque<T>>` and hasn't been reallocated.
+ fn from(other: Vec<T>) -> Self {
+ unsafe {
+ let mut other = ManuallyDrop::new(other);
+ let other_buf = other.as_mut_ptr();
+ let mut buf = RawVec::from_raw_parts(other_buf, other.capacity());
+ let len = other.len();
+
+ // We need to extend the buf if it's not a power of two, too small
+ // or doesn't have at least one free space
+ if !buf.capacity().is_power_of_two()
+ || (buf.capacity() < (MINIMUM_CAPACITY + 1))
+ || (buf.capacity() == len)
+ {
+ let cap = cmp::max(buf.capacity() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
+ buf.reserve_exact(len, cap - len);
+ }
+
+ VecDeque { tail: 0, head: len, buf }
+ }
+ }
+}
+
+#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")]
+impl<T> From<VecDeque<T>> for Vec<T> {
+ /// Turn a [`VecDeque<T>`] into a [`Vec<T>`].
+ ///
+ /// [`Vec<T>`]: crate::vec::Vec
+ /// [`VecDeque<T>`]: crate::collections::VecDeque
+ ///
+ /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if
+ /// the circular buffer doesn't happen to be at the beginning of the allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::collections::VecDeque;
+ ///
+ /// // This one is *O*(1).
+ /// let deque: VecDeque<_> = (1..5).collect();
+ /// let ptr = deque.as_slices().0.as_ptr();
+ /// let vec = Vec::from(deque);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// assert_eq!(vec.as_ptr(), ptr);
+ ///
+ /// // This one needs data rearranging.
+ /// let mut deque: VecDeque<_> = (1..5).collect();
+ /// deque.push_front(9);
+ /// deque.push_front(8);
+ /// let ptr = deque.as_slices().1.as_ptr();
+ /// let vec = Vec::from(deque);
+ /// assert_eq!(vec, [8, 9, 1, 2, 3, 4]);
+ /// assert_eq!(vec.as_ptr(), ptr);
+ /// ```
+ fn from(mut other: VecDeque<T>) -> Self {
+ other.make_contiguous();
+
+ unsafe {
+ let other = ManuallyDrop::new(other);
+ let buf = other.buf.ptr();
+ let len = other.len();
+ let cap = other.cap();
+
+ if other.head != 0 {
+ ptr::copy(buf.add(other.tail), buf, len);
+ }
+ Vec::from_raw_parts(buf, len, cap)
+ }
+ }
+}
diff --git a/library/alloc/src/collections/vec_deque/drain.rs b/library/alloc/src/collections/vec_deque/drain.rs
new file mode 100644
index 00000000000..1ae94de75ad
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque/drain.rs
@@ -0,0 +1,126 @@
+use core::iter::FusedIterator;
+use core::ptr::{self, NonNull};
+use core::{fmt, mem};
+
+use super::{count, Iter, VecDeque};
+
+/// A draining iterator over the elements of a `VecDeque`.
+///
+/// This `struct` is created by the [`drain`] method on [`VecDeque`]. See its
+/// documentation for more.
+///
+/// [`drain`]: struct.VecDeque.html#method.drain
+/// [`VecDeque`]: struct.VecDeque.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+ pub(crate) after_tail: usize,
+ pub(crate) after_head: usize,
+ pub(crate) iter: Iter<'a, T>,
+ pub(crate) deque: NonNull<VecDeque<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Drain")
+ .field(&self.after_tail)
+ .field(&self.after_head)
+ .field(&self.iter)
+ .finish()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Sync> Sync for Drain<'_, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Send> Send for Drain<'_, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Drop for Drain<'_, T> {
+ fn drop(&mut self) {
+ struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
+
+ impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+ fn drop(&mut self) {
+ self.0.for_each(drop);
+
+ let source_deque = unsafe { self.0.deque.as_mut() };
+
+ // T = source_deque_tail; H = source_deque_head; t = drain_tail; h = drain_head
+ //
+ // T t h H
+ // [. . . o o x x o o . . .]
+ //
+ let orig_tail = source_deque.tail;
+ let drain_tail = source_deque.head;
+ let drain_head = self.0.after_tail;
+ let orig_head = self.0.after_head;
+
+ let tail_len = count(orig_tail, drain_tail, source_deque.cap());
+ let head_len = count(drain_head, orig_head, source_deque.cap());
+
+ // Restore the original head value
+ source_deque.head = orig_head;
+
+ match (tail_len, head_len) {
+ (0, 0) => {
+ source_deque.head = 0;
+ source_deque.tail = 0;
+ }
+ (0, _) => {
+ source_deque.tail = drain_head;
+ }
+ (_, 0) => {
+ source_deque.head = drain_tail;
+ }
+ _ => unsafe {
+ if tail_len <= head_len {
+ source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
+ source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
+ } else {
+ source_deque.head = source_deque.wrap_add(drain_tail, head_len);
+ source_deque.wrap_copy(drain_tail, drain_head, head_len);
+ }
+ },
+ }
+ }
+ }
+
+ while let Some(item) = self.next() {
+ let guard = DropGuard(self);
+ drop(item);
+ mem::forget(guard);
+ }
+
+ DropGuard(self);
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|elt| unsafe { ptr::read(elt) })
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|elt| unsafe { ptr::read(elt) })
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
diff --git a/library/alloc/src/collections/vec_deque/tests.rs b/library/alloc/src/collections/vec_deque/tests.rs
new file mode 100644
index 00000000000..e5edfe02a52
--- /dev/null
+++ b/library/alloc/src/collections/vec_deque/tests.rs
@@ -0,0 +1,567 @@
+use super::*;
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_push_back_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::with_capacity(101);
+ b.iter(|| {
+ for i in 0..100 {
+ deq.push_back(i);
+ }
+ deq.head = 0;
+ deq.tail = 0;
+ })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_push_front_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::with_capacity(101);
+ b.iter(|| {
+ for i in 0..100 {
+ deq.push_front(i);
+ }
+ deq.head = 0;
+ deq.tail = 0;
+ })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_pop_back_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::<i32>::with_capacity(101);
+
+ b.iter(|| {
+ deq.head = 100;
+ deq.tail = 0;
+ while !deq.is_empty() {
+ test::black_box(deq.pop_back());
+ }
+ })
+}
+
+#[bench]
+#[cfg_attr(miri, ignore)] // isolated Miri does not support benchmarks
+fn bench_pop_front_100(b: &mut test::Bencher) {
+ let mut deq = VecDeque::<i32>::with_capacity(101);
+
+ b.iter(|| {
+ deq.head = 100;
+ deq.tail = 0;
+ while !deq.is_empty() {
+ test::black_box(deq.pop_front());
+ }
+ })
+}
+
+#[test]
+fn test_swap_front_back_remove() {
+ fn test(back: bool) {
+ // This test checks that every single combination of tail position and length is tested.
+ // Capacity 15 should be large enough to cover every case.
+ let mut tester = VecDeque::with_capacity(15);
+ let usable_cap = tester.capacity();
+ let final_len = usable_cap / 2;
+
+ for len in 0..final_len {
+ let expected: VecDeque<_> =
+ if back { (0..len).collect() } else { (0..len).rev().collect() };
+ for tail_pos in 0..usable_cap {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ if back {
+ for i in 0..len * 2 {
+ tester.push_front(i);
+ }
+ for i in 0..len {
+ assert_eq!(tester.swap_remove_back(i), Some(len * 2 - 1 - i));
+ }
+ } else {
+ for i in 0..len * 2 {
+ tester.push_back(i);
+ }
+ for i in 0..len {
+ let idx = tester.len() - 1 - i;
+ assert_eq!(tester.swap_remove_front(idx), Some(len * 2 - 1 - i));
+ }
+ }
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+ test(true);
+ test(false);
+}
+
+#[test]
+fn test_insert() {
+ // This test checks that every single combination of tail position, length, and
+ // insertion position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+ // len is the length *after* insertion
+ for len in 1..cap {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..cap {
+ for to_insert in 0..len {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ if i != to_insert {
+ tester.push_back(i);
+ }
+ }
+ tester.insert(to_insert, to_insert);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+}
+
+#[test]
+fn make_contiguous_big_tail() {
+ let mut tester = VecDeque::with_capacity(15);
+
+ for i in 0..3 {
+ tester.push_back(i);
+ }
+
+ for i in 3..10 {
+ tester.push_front(i);
+ }
+
+ // 012......9876543
+ assert_eq!(tester.capacity(), 15);
+ assert_eq!((&[9, 8, 7, 6, 5, 4, 3] as &[_], &[0, 1, 2] as &[_]), tester.as_slices());
+
+ let expected_start = tester.head;
+ tester.make_contiguous();
+ assert_eq!(tester.tail, expected_start);
+ assert_eq!((&[9, 8, 7, 6, 5, 4, 3, 0, 1, 2] as &[_], &[] as &[_]), tester.as_slices());
+}
+
+#[test]
+fn make_contiguous_big_head() {
+ let mut tester = VecDeque::with_capacity(15);
+
+ for i in 0..8 {
+ tester.push_back(i);
+ }
+
+ for i in 8..10 {
+ tester.push_front(i);
+ }
+
+ // 01234567......98
+ let expected_start = 0;
+ tester.make_contiguous();
+ assert_eq!(tester.tail, expected_start);
+ assert_eq!((&[9, 8, 0, 1, 2, 3, 4, 5, 6, 7] as &[_], &[] as &[_]), tester.as_slices());
+}
+
+#[test]
+fn make_contiguous_small_free() {
+ let mut tester = VecDeque::with_capacity(15);
+
+ for i in 'A' as u8..'I' as u8 {
+ tester.push_back(i as char);
+ }
+
+ for i in 'I' as u8..'N' as u8 {
+ tester.push_front(i as char);
+ }
+
+ // ABCDEFGH...MLKJI
+ let expected_start = 0;
+ tester.make_contiguous();
+ assert_eq!(tester.tail, expected_start);
+ assert_eq!(
+ (&['M', 'L', 'K', 'J', 'I', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H'] as &[_], &[] as &[_]),
+ tester.as_slices()
+ );
+
+ tester.clear();
+ for i in 'I' as u8..'N' as u8 {
+ tester.push_back(i as char);
+ }
+
+ for i in 'A' as u8..'I' as u8 {
+ tester.push_front(i as char);
+ }
+
+ // IJKLM...HGFEDCBA
+ let expected_start = 0;
+ tester.make_contiguous();
+ assert_eq!(tester.tail, expected_start);
+ assert_eq!(
+ (&['H', 'G', 'F', 'E', 'D', 'C', 'B', 'A', 'I', 'J', 'K', 'L', 'M'] as &[_], &[] as &[_]),
+ tester.as_slices()
+ );
+}
+
+#[test]
+fn test_remove() {
+ // This test checks that every single combination of tail position, length, and
+ // removal position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+ // len is the length *after* removal
+ for len in 0..cap - 1 {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..cap {
+ for to_remove in 0..=len {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ if i == to_remove {
+ tester.push_back(1234);
+ }
+ tester.push_back(i);
+ }
+ if to_remove == len {
+ tester.push_back(1234);
+ }
+ tester.remove(to_remove);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+ }
+}
+
+#[test]
+fn test_range() {
+ let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+ let cap = tester.capacity();
+ for len in 0..=cap {
+ for tail in 0..=cap {
+ for start in 0..=len {
+ for end in start..=len {
+ tester.tail = tail;
+ tester.head = tail;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+
+ // Check that we iterate over the correct values
+ let range: VecDeque<_> = tester.range(start..end).copied().collect();
+ let expected: VecDeque<_> = (start..end).collect();
+ assert_eq!(range, expected);
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_range_mut() {
+ let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+ let cap = tester.capacity();
+ for len in 0..=cap {
+ for tail in 0..=cap {
+ for start in 0..=len {
+ for end in start..=len {
+ tester.tail = tail;
+ tester.head = tail;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+
+ let head_was = tester.head;
+ let tail_was = tester.tail;
+
+ // Check that we iterate over the correct values
+ let range: VecDeque<_> = tester.range_mut(start..end).map(|v| *v).collect();
+ let expected: VecDeque<_> = (start..end).collect();
+ assert_eq!(range, expected);
+
+ // We shouldn't have changed the capacity or made the
+ // head or tail out of bounds
+ assert_eq!(tester.capacity(), cap);
+ assert_eq!(tester.tail, tail_was);
+ assert_eq!(tester.head, head_was);
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_drain() {
+ let mut tester: VecDeque<usize> = VecDeque::with_capacity(7);
+
+ let cap = tester.capacity();
+ for len in 0..=cap {
+ for tail in 0..=cap {
+ for drain_start in 0..=len {
+ for drain_end in drain_start..=len {
+ tester.tail = tail;
+ tester.head = tail;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+
+ // Check that we drain the correct values
+ let drained: VecDeque<_> = tester.drain(drain_start..drain_end).collect();
+ let drained_expected: VecDeque<_> = (drain_start..drain_end).collect();
+ assert_eq!(drained, drained_expected);
+
+ // We shouldn't have changed the capacity or made the
+ // head or tail out of bounds
+ assert_eq!(tester.capacity(), cap);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+
+ // We should see the correct values in the VecDeque
+ let expected: VecDeque<_> = (0..drain_start).chain(drain_end..len).collect();
+ assert_eq!(expected, tester);
+ }
+ }
+ }
+ }
+}
+
+#[test]
+fn test_shrink_to_fit() {
+ // This test checks that every single combination of head and tail position,
+ // is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+ tester.reserve(63);
+ let max_cap = tester.capacity();
+
+ for len in 0..=cap {
+ // 0, 1, 2, .., len - 1
+ let expected = (0..).take(len).collect::<VecDeque<_>>();
+ for tail_pos in 0..=max_cap {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ tester.reserve(63);
+ for i in 0..len {
+ tester.push_back(i);
+ }
+ tester.shrink_to_fit();
+ assert!(tester.capacity() <= cap);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert_eq!(tester, expected);
+ }
+ }
+}
+
+#[test]
+fn test_split_off() {
+ // This test checks that every single combination of tail position, length, and
+ // split position is tested. Capacity 15 should be large enough to cover every case.
+
+ let mut tester = VecDeque::with_capacity(15);
+ // can't guarantee we got 15, so have to get what we got.
+ // 15 would be great, but we will definitely get 2^k - 1, for k >= 4, or else
+ // this test isn't covering what it wants to
+ let cap = tester.capacity();
+
+ // len is the length *before* splitting
+ for len in 0..cap {
+ // index to split at
+ for at in 0..=len {
+ // 0, 1, 2, .., at - 1 (may be empty)
+ let expected_self = (0..).take(at).collect::<VecDeque<_>>();
+ // at, at + 1, .., len - 1 (may be empty)
+ let expected_other = (at..).take(len - at).collect::<VecDeque<_>>();
+
+ for tail_pos in 0..cap {
+ tester.tail = tail_pos;
+ tester.head = tail_pos;
+ for i in 0..len {
+ tester.push_back(i);
+ }
+ let result = tester.split_off(at);
+ assert!(tester.tail < tester.cap());
+ assert!(tester.head < tester.cap());
+ assert!(result.tail < result.cap());
+ assert!(result.head < result.cap());
+ assert_eq!(tester, expected_self);
+ assert_eq!(result, expected_other);
+ }
+ }
+ }
+}
+
+#[test]
+fn test_from_vec() {
+ use crate::vec::Vec;
+ for cap in 0..35 {
+ for len in 0..=cap {
+ let mut vec = Vec::with_capacity(cap);
+ vec.extend(0..len);
+
+ let vd = VecDeque::from(vec.clone());
+ assert!(vd.cap().is_power_of_two());
+ assert_eq!(vd.len(), vec.len());
+ assert!(vd.into_iter().eq(vec));
+ }
+ }
+}
+
+#[test]
+fn test_vec_from_vecdeque() {
+ use crate::vec::Vec;
+
+ fn create_vec_and_test_convert(capacity: usize, offset: usize, len: usize) {
+ let mut vd = VecDeque::with_capacity(capacity);
+ for _ in 0..offset {
+ vd.push_back(0);
+ vd.pop_front();
+ }
+ vd.extend(0..len);
+
+ let vec: Vec<_> = Vec::from(vd.clone());
+ assert_eq!(vec.len(), vd.len());
+ assert!(vec.into_iter().eq(vd));
+ }
+
+ // Miri is too slow
+ let max_pwr = if cfg!(miri) { 5 } else { 7 };
+
+ for cap_pwr in 0..max_pwr {
+ // Make capacity as a (2^x)-1, so that the ring size is 2^x
+ let cap = (2i32.pow(cap_pwr) - 1) as usize;
+
+ // In these cases there is enough free space to solve it with copies
+ for len in 0..((cap + 1) / 2) {
+ // Test contiguous cases
+ for offset in 0..(cap - len) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at end of buffer is bigger than block at start
+ for offset in (cap - len)..(cap - (len / 2)) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at start of buffer is bigger than block at end
+ for offset in (cap - (len / 2))..cap {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+ }
+
+ // Now there's not (necessarily) space to straighten the ring with simple copies,
+ // the ring will use swapping when:
+ // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
+ // right block size > free space && left block size > free space
+ for len in ((cap + 1) / 2)..cap {
+ // Test contiguous cases
+ for offset in 0..(cap - len) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at end of buffer is bigger than block at start
+ for offset in (cap - len)..(cap - (len / 2)) {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+
+ // Test cases where block at start of buffer is bigger than block at end
+ for offset in (cap - (len / 2))..cap {
+ create_vec_and_test_convert(cap, offset, len)
+ }
+ }
+ }
+}
+
+#[test]
+fn test_clone_from() {
+ let m = vec![1; 8];
+ let n = vec![2; 12];
+ for pfv in 0..8 {
+ for pfu in 0..8 {
+ for longer in 0..2 {
+ let (vr, ur) = if longer == 0 { (&m, &n) } else { (&n, &m) };
+ let mut v = VecDeque::from(vr.clone());
+ for _ in 0..pfv {
+ v.push_front(1);
+ }
+ let mut u = VecDeque::from(ur.clone());
+ for _ in 0..pfu {
+ u.push_front(2);
+ }
+ v.clone_from(&u);
+ assert_eq!(&v, &u);
+ }
+ }
+ }
+}
+
+#[test]
+fn test_vec_deque_truncate_drop() {
+ static mut DROPS: u32 = 0;
+ #[derive(Clone)]
+ struct Elem(i32);
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
+ for push_front in 0..=v.len() {
+ let v = v.clone();
+ let mut tester = VecDeque::with_capacity(5);
+ for (index, elem) in v.into_iter().enumerate() {
+ if index < push_front {
+ tester.push_front(elem);
+ } else {
+ tester.push_back(elem);
+ }
+ }
+ assert_eq!(unsafe { DROPS }, 0);
+ tester.truncate(3);
+ assert_eq!(unsafe { DROPS }, 2);
+ tester.truncate(0);
+ assert_eq!(unsafe { DROPS }, 5);
+ unsafe {
+ DROPS = 0;
+ }
+ }
+}
+
+#[test]
+fn issue_53529() {
+ use crate::boxed::Box;
+
+ let mut dst = VecDeque::new();
+ dst.push_front(Box::new(1));
+ dst.push_front(Box::new(2));
+ assert_eq!(*dst.pop_back().unwrap(), 1);
+
+ let mut src = VecDeque::new();
+ src.push_front(Box::new(2));
+ dst.append(&mut src);
+ for a in dst {
+ assert_eq!(*a, 2);
+ }
+}
diff --git a/library/alloc/src/fmt.rs b/library/alloc/src/fmt.rs
new file mode 100644
index 00000000000..26077f3c8d1
--- /dev/null
+++ b/library/alloc/src/fmt.rs
@@ -0,0 +1,588 @@
+//! Utilities for formatting and printing `String`s.
+//!
+//! This module contains the runtime support for the [`format!`] syntax extension.
+//! This macro is implemented in the compiler to emit calls to this module in
+//! order to format arguments at runtime into strings.
+//!
+//! # Usage
+//!
+//! The [`format!`] macro is intended to be familiar to those coming from C's
+//! `printf`/`fprintf` functions or Python's `str.format` function.
+//!
+//! Some examples of the [`format!`] extension are:
+//!
+//! ```
+//! format!("Hello"); // => "Hello"
+//! format!("Hello, {}!", "world"); // => "Hello, world!"
+//! format!("The number is {}", 1); // => "The number is 1"
+//! format!("{:?}", (3, 4)); // => "(3, 4)"
+//! format!("{value}", value=4); // => "4"
+//! format!("{} {}", 1, 2); // => "1 2"
+//! format!("{:04}", 42); // => "0042" with leading zeros
+//! ```
+//!
+//! From these, you can see that the first argument is a format string. It is
+//! required by the compiler for this to be a string literal; it cannot be a
+//! variable passed in (in order to perform validity checking). The compiler
+//! will then parse the format string and determine if the list of arguments
+//! provided is suitable to pass to this format string.
+//!
+//! To convert a single value to a string, use the [`to_string`] method. This
+//! will use the [`Display`] formatting trait.
+//!
+//! ## Positional parameters
+//!
+//! Each formatting argument is allowed to specify which value argument it's
+//! referencing, and if omitted it is assumed to be "the next argument". For
+//! example, the format string `{} {} {}` would take three parameters, and they
+//! would be formatted in the same order as they're given. The format string
+//! `{2} {1} {0}`, however, would format arguments in reverse order.
+//!
+//! Things can get a little tricky once you start intermingling the two types of
+//! positional specifiers. The "next argument" specifier can be thought of as an
+//! iterator over the argument. Each time a "next argument" specifier is seen,
+//! the iterator advances. This leads to behavior like this:
+//!
+//! ```
+//! format!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
+//! ```
+//!
+//! The internal iterator over the argument has not been advanced by the time
+//! the first `{}` is seen, so it prints the first argument. Then upon reaching
+//! the second `{}`, the iterator has advanced forward to the second argument.
+//! Essentially, parameters that explicitly name their argument do not affect
+//! parameters that do not name an argument in terms of positional specifiers.
+//!
+//! A format string is required to use all of its arguments, otherwise it is a
+//! compile-time error. You may refer to the same argument more than once in the
+//! format string.
+//!
+//! ## Named parameters
+//!
+//! Rust itself does not have a Python-like equivalent of named parameters to a
+//! function, but the [`format!`] macro is a syntax extension that allows it to
+//! leverage named parameters. Named parameters are listed at the end of the
+//! argument list and have the syntax:
+//!
+//! ```text
+//! identifier '=' expression
+//! ```
+//!
+//! For example, the following [`format!`] expressions all use named argument:
+//!
+//! ```
+//! format!("{argument}", argument = "test"); // => "test"
+//! format!("{name} {}", 1, name = 2); // => "2 1"
+//! format!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b"
+//! ```
+//!
+//! It is not valid to put positional parameters (those without names) after
+//! arguments that have names. Like with positional parameters, it is not
+//! valid to provide named parameters that are unused by the format string.
+//!
+//! # Formatting Parameters
+//!
+//! Each argument being formatted can be transformed by a number of formatting
+//! parameters (corresponding to `format_spec` in the syntax above). These
+//! parameters affect the string representation of what's being formatted.
+//!
+//! ## Width
+//!
+//! ```
+//! // All of these print "Hello x !"
+//! println!("Hello {:5}!", "x");
+//! println!("Hello {:1$}!", "x", 5);
+//! println!("Hello {1:0$}!", 5, "x");
+//! println!("Hello {:width$}!", "x", width = 5);
+//! ```
+//!
+//! This is a parameter for the "minimum width" that the format should take up.
+//! If the value's string does not fill up this many characters, then the
+//! padding specified by fill/alignment will be used to take up the required
+//! space (see below).
+//!
+//! The value for the width can also be provided as a [`usize`] in the list of
+//! parameters by adding a postfix `$`, indicating that the second argument is
+//! a [`usize`] specifying the width.
+//!
+//! Referring to an argument with the dollar syntax does not affect the "next
+//! argument" counter, so it's usually a good idea to refer to arguments by
+//! position, or use named arguments.
+//!
+//! ## Fill/Alignment
+//!
+//! ```
+//! assert_eq!(format!("Hello {:<5}!", "x"), "Hello x !");
+//! assert_eq!(format!("Hello {:-<5}!", "x"), "Hello x----!");
+//! assert_eq!(format!("Hello {:^5}!", "x"), "Hello x !");
+//! assert_eq!(format!("Hello {:>5}!", "x"), "Hello x!");
+//! ```
+//!
+//! The optional fill character and alignment is provided normally in conjunction with the
+//! [`width`](#width) parameter. It must be defined before `width`, right after the `:`.
+//! This indicates that if the value being formatted is smaller than
+//! `width` some extra characters will be printed around it.
+//! Filling comes in the following variants for different alignments:
+//!
+//! * `[fill]<` - the argument is left-aligned in `width` columns
+//! * `[fill]^` - the argument is center-aligned in `width` columns
+//! * `[fill]>` - the argument is right-aligned in `width` columns
+//!
+//! The default [fill/alignment](#fillalignment) for non-numerics is a space and
+//! left-aligned. The
+//! default for numeric formatters is also a space character but with right-alignment. If
+//! the `0` flag (see below) is specified for numerics, then the implicit fill character is
+//! `0`.
+//!
+//! Note that alignment may not be implemented by some types. In particular, it
+//! is not generally implemented for the `Debug` trait. A good way to ensure
+//! padding is applied is to format your input, then pad this resulting string
+//! to obtain your output:
+//!
+//! ```
+//! println!("Hello {:^15}!", format!("{:?}", Some("hi"))); // => "Hello Some("hi") !"
+//! ```
+//!
+//! ## Sign/`#`/`0`
+//!
+//! ```
+//! assert_eq!(format!("Hello {:+}!", 5), "Hello +5!");
+//! assert_eq!(format!("{:#x}!", 27), "0x1b!");
+//! assert_eq!(format!("Hello {:05}!", 5), "Hello 00005!");
+//! assert_eq!(format!("Hello {:05}!", -5), "Hello -0005!");
+//! assert_eq!(format!("{:#010x}!", 27), "0x0000001b!");
+//! ```
+//!
+//! These are all flags altering the behavior of the formatter.
+//!
+//! * `+` - This is intended for numeric types and indicates that the sign
+//! should always be printed. Positive signs are never printed by
+//! default, and the negative sign is only printed by default for the
+//! `Signed` trait. This flag indicates that the correct sign (`+` or `-`)
+//! should always be printed.
+//! * `-` - Currently not used
+//! * `#` - This flag indicates that the "alternate" form of printing should
+//! be used. The alternate forms are:
+//! * `#?` - pretty-print the [`Debug`] formatting
+//! * `#x` - precedes the argument with a `0x`
+//! * `#X` - precedes the argument with a `0x`
+//! * `#b` - precedes the argument with a `0b`
+//! * `#o` - precedes the argument with a `0o`
+//! * `0` - This is used to indicate for integer formats that the padding to `width` should
+//! both be done with a `0` character as well as be sign-aware. A format
+//! like `{:08}` would yield `00000001` for the integer `1`, while the
+//! same format would yield `-0000001` for the integer `-1`. Notice that
+//! the negative version has one fewer zero than the positive version.
+//! Note that padding zeros are always placed after the sign (if any)
+//! and before the digits. When used together with the `#` flag, a similar
+//! rule applies: padding zeros are inserted after the prefix but before
+//! the digits. The prefix is included in the total width.
+//!
+//! ## Precision
+//!
+//! For non-numeric types, this can be considered a "maximum width". If the resulting string is
+//! longer than this width, then it is truncated down to this many characters and that truncated
+//! value is emitted with proper `fill`, `alignment` and `width` if those parameters are set.
+//!
+//! For integral types, this is ignored.
+//!
+//! For floating-point types, this indicates how many digits after the decimal point should be
+//! printed.
+//!
+//! There are three possible ways to specify the desired `precision`:
+//!
+//! 1. An integer `.N`:
+//!
+//! the integer `N` itself is the precision.
+//!
+//! 2. An integer or name followed by dollar sign `.N$`:
+//!
+//! use format *argument* `N` (which must be a `usize`) as the precision.
+//!
+//! 3. An asterisk `.*`:
+//!
+//! `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
+//! first input holds the `usize` precision, and the second holds the value to print. Note that
+//! in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
+//! to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
+//!
+//! For example, the following calls all print the same thing `Hello x is 0.01000`:
+//!
+//! ```
+//! // Hello {arg 0 ("x")} is {arg 1 (0.01) with precision specified inline (5)}
+//! println!("Hello {0} is {1:.5}", "x", 0.01);
+//!
+//! // Hello {arg 1 ("x")} is {arg 2 (0.01) with precision specified in arg 0 (5)}
+//! println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+//!
+//! // Hello {arg 0 ("x")} is {arg 2 (0.01) with precision specified in arg 1 (5)}
+//! println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {second of next two args (0.01) with precision
+//! // specified in first of next two args (5)}
+//! println!("Hello {} is {:.*}", "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg 2 (0.01) with precision
+//! // specified in its predecessor (5)}
+//! println!("Hello {} is {2:.*}", "x", 5, 0.01);
+//!
+//! // Hello {next arg ("x")} is {arg "number" (0.01) with precision specified
+//! // in arg "prec" (5)}
+//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+//! ```
+//!
+//! While these:
+//!
+//! ```
+//! println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+//! println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+//! println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+//! ```
+//!
+//! print two significantly different things:
+//!
+//! ```text
+//! Hello, `1234.560` has 3 fractional digits
+//! Hello, `123` has 3 characters
+//! Hello, ` 123` has 3 right-aligned characters
+//! ```
+//!
+//! ## Localization
+//!
+//! In some programming languages, the behavior of string formatting functions
+//! depends on the operating system's locale setting. The format functions
+//! provided by Rust's standard library do not have any concept of locale and
+//! will produce the same results on all systems regardless of user
+//! configuration.
+//!
+//! For example, the following code will always print `1.5` even if the system
+//! locale uses a decimal separator other than a dot.
+//!
+//! ```
+//! println!("The value is {}", 1.5);
+//! ```
+//!
+//! # Escaping
+//!
+//! The literal characters `{` and `}` may be included in a string by preceding
+//! them with the same character. For example, the `{` character is escaped with
+//! `{{` and the `}` character is escaped with `}}`.
+//!
+//! ```
+//! assert_eq!(format!("Hello {{}}"), "Hello {}");
+//! assert_eq!(format!("{{ Hello"), "{ Hello");
+//! ```
+//!
+//! # Syntax
+//!
+//! To summarize, here you can find the full grammar of format strings.
+//! The syntax for the formatting language used is drawn from other languages,
+//! so it should not be too alien. Arguments are formatted with Python-like
+//! syntax, meaning that arguments are surrounded by `{}` instead of the C-like
+//! `%`. The actual grammar for the formatting syntax is:
+//!
+//! ```text
+//! format_string := <text> [ maybe-format <text> ] *
+//! maybe-format := '{' '{' | '}' '}' | <format>
+//! format := '{' [ argument ] [ ':' format_spec ] '}'
+//! argument := integer | identifier
+//!
+//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type]
+//! fill := character
+//! align := '<' | '^' | '>'
+//! sign := '+' | '-'
+//! width := count
+//! precision := count | '*'
+//! type := identifier | '?' | ''
+//! count := parameter | integer
+//! parameter := argument '$'
+//! ```
+//!
+//! # Formatting traits
+//!
+//! When requesting that an argument be formatted with a particular type, you
+//! are actually requesting that an argument ascribes to a particular trait.
+//! This allows multiple actual types to be formatted via `{:x}` (like [`i8`] as
+//! well as [`isize`]). The current mapping of types to traits is:
+//!
+//! * *nothing* ⇒ [`Display`]
+//! * `?` ⇒ [`Debug`]
+//! * `x?` ⇒ [`Debug`] with lower-case hexadecimal integers
+//! * `X?` ⇒ [`Debug`] with upper-case hexadecimal integers
+//! * `o` ⇒ [`Octal`](trait.Octal.html)
+//! * `x` ⇒ [`LowerHex`](trait.LowerHex.html)
+//! * `X` ⇒ [`UpperHex`](trait.UpperHex.html)
+//! * `p` ⇒ [`Pointer`](trait.Pointer.html)
+//! * `b` ⇒ [`Binary`]
+//! * `e` ⇒ [`LowerExp`](trait.LowerExp.html)
+//! * `E` ⇒ [`UpperExp`](trait.UpperExp.html)
+//!
+//! What this means is that any type of argument which implements the
+//! [`fmt::Binary`][`Binary`] trait can then be formatted with `{:b}`. Implementations
+//! are provided for these traits for a number of primitive types by the
+//! standard library as well. If no format is specified (as in `{}` or `{:6}`),
+//! then the format trait used is the [`Display`] trait.
+//!
+//! When implementing a format trait for your own type, you will have to
+//! implement a method of the signature:
+//!
+//! ```
+//! # #![allow(dead_code)]
+//! # use std::fmt;
+//! # struct Foo; // our custom type
+//! # impl fmt::Display for Foo {
+//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//! # write!(f, "testing, testing")
+//! # } }
+//! ```
+//!
+//! Your type will be passed as `self` by-reference, and then the function
+//! should emit output into the `f.buf` stream. It is up to each format trait
+//! implementation to correctly adhere to the requested formatting parameters.
+//! The values of these parameters will be listed in the fields of the
+//! [`Formatter`] struct. In order to help with this, the [`Formatter`] struct also
+//! provides some helper methods.
+//!
+//! Additionally, the return value of this function is [`fmt::Result`] which is a
+//! type alias of [`Result`]`<(), `[`std::fmt::Error`]`>`. Formatting implementations
+//! should ensure that they propagate errors from the [`Formatter`] (e.g., when
+//! calling [`write!`]). However, they should never return errors spuriously. That
+//! is, a formatting implementation must and may only return an error if the
+//! passed-in [`Formatter`] returns an error. This is because, contrary to what
+//! the function signature might suggest, string formatting is an infallible
+//! operation. This function only returns a result because writing to the
+//! underlying stream might fail and it must provide a way to propagate the fact
+//! that an error has occurred back up the stack.
+//!
+//! An example of implementing the formatting traits would look
+//! like:
+//!
+//! ```
+//! use std::fmt;
+//!
+//! #[derive(Debug)]
+//! struct Vector2D {
+//! x: isize,
+//! y: isize,
+//! }
+//!
+//! impl fmt::Display for Vector2D {
+//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//! // The `f` value implements the `Write` trait, which is what the
+//! // write! macro is expecting. Note that this formatting ignores the
+//! // various flags provided to format strings.
+//! write!(f, "({}, {})", self.x, self.y)
+//! }
+//! }
+//!
+//! // Different traits allow different forms of output of a type. The meaning
+//! // of this format is to print the magnitude of a vector.
+//! impl fmt::Binary for Vector2D {
+//! fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+//! let magnitude = (self.x * self.x + self.y * self.y) as f64;
+//! let magnitude = magnitude.sqrt();
+//!
+//! // Respect the formatting flags by using the helper method
+//! // `pad_integral` on the Formatter object. See the method
+//! // documentation for details, and the function `pad` can be used
+//! // to pad strings.
+//! let decimals = f.precision().unwrap_or(3);
+//! let string = format!("{:.*}", decimals, magnitude);
+//! f.pad_integral(true, "", &string)
+//! }
+//! }
+//!
+//! fn main() {
+//! let myvector = Vector2D { x: 3, y: 4 };
+//!
+//! println!("{}", myvector); // => "(3, 4)"
+//! println!("{:?}", myvector); // => "Vector2D {x: 3, y:4}"
+//! println!("{:10.3b}", myvector); // => " 5.000"
+//! }
+//! ```
+//!
+//! ### `fmt::Display` vs `fmt::Debug`
+//!
+//! These two formatting traits have distinct purposes:
+//!
+//! - [`fmt::Display`][`Display`] implementations assert that the type can be faithfully
+//! represented as a UTF-8 string at all times. It is **not** expected that
+//! all types implement the [`Display`] trait.
+//! - [`fmt::Debug`][`Debug`] implementations should be implemented for **all** public types.
+//! Output will typically represent the internal state as faithfully as possible.
+//! The purpose of the [`Debug`] trait is to facilitate debugging Rust code. In
+//! most cases, using `#[derive(Debug)]` is sufficient and recommended.
+//!
+//! Some examples of the output from both traits:
+//!
+//! ```
+//! assert_eq!(format!("{} {:?}", 3, 4), "3 4");
+//! assert_eq!(format!("{} {:?}", 'a', 'b'), "a 'b'");
+//! assert_eq!(format!("{} {:?}", "foo\n", "bar\n"), "foo\n \"bar\\n\"");
+//! ```
+//!
+//! # Related macros
+//!
+//! There are a number of related macros in the [`format!`] family. The ones that
+//! are currently implemented are:
+//!
+//! ```ignore (only-for-syntax-highlight)
+//! format! // described above
+//! write! // first argument is a &mut io::Write, the destination
+//! writeln! // same as write but appends a newline
+//! print! // the format string is printed to the standard output
+//! println! // same as print but appends a newline
+//! eprint! // the format string is printed to the standard error
+//! eprintln! // same as eprint but appends a newline
+//! format_args! // described below.
+//! ```
+//!
+//! ### `write!`
+//!
+//! This and [`writeln!`] are two macros which are used to emit the format string
+//! to a specified stream. This is used to prevent intermediate allocations of
+//! format strings and instead directly write the output. Under the hood, this
+//! function is actually invoking the [`write_fmt`] function defined on the
+//! [`std::io::Write`] trait. Example usage is:
+//!
+//! ```
+//! # #![allow(unused_must_use)]
+//! use std::io::Write;
+//! let mut w = Vec::new();
+//! write!(&mut w, "Hello {}!", "world");
+//! ```
+//!
+//! ### `print!`
+//!
+//! This and [`println!`] emit their output to stdout. Similarly to the [`write!`]
+//! macro, the goal of these macros is to avoid intermediate allocations when
+//! printing output. Example usage is:
+//!
+//! ```
+//! print!("Hello {}!", "world");
+//! println!("I have a newline {}", "character at the end");
+//! ```
+//! ### `eprint!`
+//!
+//! The [`eprint!`] and [`eprintln!`] macros are identical to
+//! [`print!`] and [`println!`], respectively, except they emit their
+//! output to stderr.
+//!
+//! ### `format_args!`
+//!
+//! This is a curious macro used to safely pass around
+//! an opaque object describing the format string. This object
+//! does not require any heap allocations to create, and it only
+//! references information on the stack. Under the hood, all of
+//! the related macros are implemented in terms of this. First
+//! off, some example usage is:
+//!
+//! ```
+//! # #![allow(unused_must_use)]
+//! use std::fmt;
+//! use std::io::{self, Write};
+//!
+//! let mut some_writer = io::stdout();
+//! write!(&mut some_writer, "{}", format_args!("print with a {}", "macro"));
+//!
+//! fn my_fmt_fn(args: fmt::Arguments) {
+//! write!(&mut io::stdout(), "{}", args);
+//! }
+//! my_fmt_fn(format_args!(", or a {} too", "function"));
+//! ```
+//!
+//! The result of the [`format_args!`] macro is a value of type [`fmt::Arguments`].
+//! This structure can then be passed to the [`write`] and [`format`] functions
+//! inside this module in order to process the format string.
+//! The goal of this macro is to even further prevent intermediate allocations
+//! when dealing with formatting strings.
+//!
+//! For example, a logging library could use the standard formatting syntax, but
+//! it would internally pass around this structure until it has been determined
+//! where output should go to.
+//!
+//! [`usize`]: ../../std/primitive.usize.html
+//! [`isize`]: ../../std/primitive.isize.html
+//! [`i8`]: ../../std/primitive.i8.html
+//! [`Display`]: trait.Display.html
+//! [`Binary`]: trait.Binary.html
+//! [`fmt::Result`]: type.Result.html
+//! [`Result`]: ../../std/result/enum.Result.html
+//! [`std::fmt::Error`]: struct.Error.html
+//! [`Formatter`]: struct.Formatter.html
+//! [`write!`]: ../../std/macro.write.html
+//! [`Debug`]: trait.Debug.html
+//! [`format!`]: ../../std/macro.format.html
+//! [`to_string`]: ../../std/string/trait.ToString.html
+//! [`writeln!`]: ../../std/macro.writeln.html
+//! [`write_fmt`]: ../../std/io/trait.Write.html#method.write_fmt
+//! [`std::io::Write`]: ../../std/io/trait.Write.html
+//! [`print!`]: ../../std/macro.print.html
+//! [`println!`]: ../../std/macro.println.html
+//! [`eprint!`]: ../../std/macro.eprint.html
+//! [`eprintln!`]: ../../std/macro.eprintln.html
+//! [`write!`]: ../../std/macro.write.html
+//! [`format_args!`]: ../../std/macro.format_args.html
+//! [`fmt::Arguments`]: struct.Arguments.html
+//! [`write`]: fn.write.html
+//! [`format`]: fn.format.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+#[unstable(feature = "fmt_internals", issue = "none")]
+pub use core::fmt::rt;
+#[stable(feature = "fmt_flags_align", since = "1.28.0")]
+pub use core::fmt::Alignment;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::Error;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{write, ArgumentV1, Arguments};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Binary, Octal};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Debug, Display};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{DebugList, DebugMap, DebugSet, DebugStruct, DebugTuple};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{Formatter, Result, Write};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{LowerExp, UpperExp};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::fmt::{LowerHex, Pointer, UpperHex};
+
+use crate::string;
+
+/// The `format` function takes an [`Arguments`] struct and returns the resulting
+/// formatted string.
+///
+/// The [`Arguments`] instance can be created with the [`format_args!`] macro.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// use std::fmt;
+///
+/// let s = fmt::format(format_args!("Hello, {}!", "world"));
+/// assert_eq!(s, "Hello, world!");
+/// ```
+///
+/// Please note that using [`format!`] might be preferable.
+/// Example:
+///
+/// ```
+/// let s = format!("Hello, {}!", "world");
+/// assert_eq!(s, "Hello, world!");
+/// ```
+///
+/// [`Arguments`]: struct.Arguments.html
+/// [`format_args!`]: ../../std/macro.format_args.html
+/// [`format!`]: ../../std/macro.format.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub fn format(args: Arguments<'_>) -> string::String {
+ let capacity = args.estimated_capacity();
+ let mut output = string::String::with_capacity(capacity);
+ output.write_fmt(args).expect("a formatting trait implementation returned an error");
+ output
+}
diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs
new file mode 100644
index 00000000000..90e2d2531c5
--- /dev/null
+++ b/library/alloc/src/lib.rs
@@ -0,0 +1,186 @@
+//! # The Rust core allocation and collections library
+//!
+//! This library provides smart pointers and collections for managing
+//! heap-allocated values.
+//!
+//! This library, like libcore, normally doesn’t need to be used directly
+//! since its contents are re-exported in the [`std` crate](../std/index.html).
+//! Crates that use the `#![no_std]` attribute however will typically
+//! not depend on `std`, so they’d use this crate instead.
+//!
+//! ## Boxed values
+//!
+//! The [`Box`] type is a smart pointer type. There can only be one owner of a
+//! [`Box`], and the owner can decide to mutate the contents, which live on the
+//! heap.
+//!
+//! This type can be sent among threads efficiently as the size of a `Box` value
+//! is the same as that of a pointer. Tree-like data structures are often built
+//! with boxes because each node often has only one owner, the parent.
+//!
+//! ## Reference counted pointers
+//!
+//! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended
+//! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and
+//! only allows access to `&T`, a shared reference.
+//!
+//! This type is useful when inherited mutability (such as using [`Box`]) is too
+//! constraining for an application, and is often paired with the [`Cell`] or
+//! [`RefCell`] types in order to allow mutation.
+//!
+//! ## Atomically reference counted pointers
+//!
+//! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It
+//! provides all the same functionality of [`Rc`], except it requires that the
+//! contained type `T` is shareable. Additionally, [`Arc<T>`][`Arc`] is itself
+//! sendable while [`Rc<T>`][`Rc`] is not.
+//!
+//! This type allows for shared access to the contained data, and is often
+//! paired with synchronization primitives such as mutexes to allow mutation of
+//! shared resources.
+//!
+//! ## Collections
+//!
+//! Implementations of the most common general purpose data structures are
+//! defined in this library. They are re-exported through the
+//! [standard collections library](../std/collections/index.html).
+//!
+//! ## Heap interfaces
+//!
+//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
+//! default global allocator. It is not compatible with the libc allocator API.
+//!
+//! [`Arc`]: sync/index.html
+//! [`Box`]: boxed/index.html
+//! [`Cell`]: ../core/cell/index.html
+//! [`Rc`]: rc/index.html
+//! [`RefCell`]: ../core/cell/index.html
+
+#![allow(unused_attributes)]
+#![stable(feature = "alloc", since = "1.36.0")]
+#![doc(
+ html_root_url = "https://doc.rust-lang.org/nightly/",
+ html_playground_url = "https://play.rust-lang.org/",
+ issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
+ test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
+)]
+#![no_std]
+#![needs_allocator]
+#![warn(deprecated_in_future)]
+#![warn(missing_docs)]
+#![warn(missing_debug_implementations)]
+#![deny(intra_doc_link_resolution_failure)] // rustdoc is run without -D warnings
+#![allow(explicit_outlives_requirements)]
+#![allow(incomplete_features)]
+#![deny(unsafe_op_in_unsafe_fn)]
+#![cfg_attr(not(test), feature(generator_trait))]
+#![cfg_attr(test, feature(test))]
+#![feature(allocator_api)]
+#![feature(allow_internal_unstable)]
+#![feature(arbitrary_self_types)]
+#![feature(box_patterns)]
+#![feature(box_syntax)]
+#![feature(cfg_sanitize)]
+#![feature(cfg_target_has_atomic)]
+#![feature(coerce_unsized)]
+#![feature(const_btree_new)]
+#![feature(const_generics)]
+#![feature(const_in_array_repeat_expressions)]
+#![feature(cow_is_borrowed)]
+#![feature(deque_range)]
+#![feature(dispatch_from_dyn)]
+#![feature(core_intrinsics)]
+#![feature(container_error_extra)]
+#![feature(dropck_eyepatch)]
+#![feature(exact_size_is_empty)]
+#![feature(extend_one)]
+#![feature(fmt_internals)]
+#![feature(fn_traits)]
+#![feature(fundamental)]
+#![feature(internal_uninit_const)]
+#![feature(lang_items)]
+#![feature(layout_for_ptr)]
+#![feature(libc)]
+#![feature(negative_impls)]
+#![feature(new_uninit)]
+#![feature(nll)]
+#![feature(optin_builtin_traits)]
+#![feature(or_patterns)]
+#![feature(pattern)]
+#![feature(ptr_internals)]
+#![feature(ptr_offset_from)]
+#![feature(raw_ref_op)]
+#![feature(rustc_attrs)]
+#![feature(receiver_trait)]
+#![feature(min_specialization)]
+#![feature(staged_api)]
+#![feature(std_internals)]
+#![feature(str_internals)]
+#![feature(trusted_len)]
+#![feature(try_reserve)]
+#![feature(unboxed_closures)]
+#![feature(unicode_internals)]
+#![feature(unsafe_block_in_unsafe_fn)]
+#![feature(unsize)]
+#![feature(unsized_locals)]
+#![feature(allocator_internals)]
+#![feature(slice_partition_dedup)]
+#![feature(maybe_uninit_extra, maybe_uninit_slice)]
+#![feature(alloc_layout_extra)]
+#![feature(try_trait)]
+#![feature(associated_type_bounds)]
+
+// Allow testing this library
+
+#[cfg(test)]
+#[macro_use]
+extern crate std;
+#[cfg(test)]
+extern crate test;
+
+// Module with internal macros used by other modules (needs to be included before other modules).
+#[macro_use]
+mod macros;
+
+// Heaps provided for low-level allocation strategies
+
+pub mod alloc;
+
+// Primitive types using the heaps above
+
+// Need to conditionally define the mod from `boxed.rs` to avoid
+// duplicating the lang-items when building in test cfg; but also need
+// to allow code to have `use boxed::Box;` declarations.
+#[cfg(not(test))]
+pub mod boxed;
+#[cfg(test)]
+mod boxed {
+ pub use std::boxed::Box;
+}
+pub mod borrow;
+pub mod collections;
+pub mod fmt;
+pub mod prelude;
+pub mod raw_vec;
+pub mod rc;
+pub mod slice;
+pub mod str;
+pub mod string;
+#[cfg(target_has_atomic = "ptr")]
+pub mod sync;
+#[cfg(target_has_atomic = "ptr")]
+pub mod task;
+#[cfg(test)]
+mod tests;
+pub mod vec;
+
+#[cfg(not(test))]
+mod std {
+ pub use core::ops; // RangeFull
+}
+
+#[doc(hidden)]
+#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
+pub mod __export {
+ pub use core::format_args;
+}
diff --git a/library/alloc/src/macros.rs b/library/alloc/src/macros.rs
new file mode 100644
index 00000000000..e163a166b49
--- /dev/null
+++ b/library/alloc/src/macros.rs
@@ -0,0 +1,110 @@
+/// Creates a [`Vec`] containing the arguments.
+///
+/// `vec!` allows `Vec`s to be defined with the same syntax as array expressions.
+/// There are two forms of this macro:
+///
+/// - Create a [`Vec`] containing a given list of elements:
+///
+/// ```
+/// let v = vec![1, 2, 3];
+/// assert_eq!(v[0], 1);
+/// assert_eq!(v[1], 2);
+/// assert_eq!(v[2], 3);
+/// ```
+///
+/// - Create a [`Vec`] from a given element and size:
+///
+/// ```
+/// let v = vec![1; 3];
+/// assert_eq!(v, [1, 1, 1]);
+/// ```
+///
+/// Note that unlike array expressions this syntax supports all elements
+/// which implement [`Clone`] and the number of elements doesn't have to be
+/// a constant.
+///
+/// This will use `clone` to duplicate an expression, so one should be careful
+/// using this with types having a nonstandard `Clone` implementation. For
+/// example, `vec![Rc::new(1); 5]` will create a vector of five references
+/// to the same boxed integer value, not five references pointing to independently
+/// boxed integers.
+///
+/// [`Vec`]: ../std/vec/struct.Vec.html
+/// [`Clone`]: ../std/clone/trait.Clone.html
+#[cfg(not(test))]
+#[macro_export]
+#[stable(feature = "rust1", since = "1.0.0")]
+#[allow_internal_unstable(box_syntax)]
+macro_rules! vec {
+ () => (
+ $crate::vec::Vec::new()
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::vec::from_elem($elem, $n)
+ );
+ ($($x:expr),+ $(,)?) => (
+ <[_]>::into_vec(box [$($x),+])
+ );
+}
+
+// HACK(japaric): with cfg(test) the inherent `[T]::into_vec` method, which is
+// required for this macro definition, is not available. Instead use the
+// `slice::into_vec` function which is only available with cfg(test)
+// NB see the slice::hack module in slice.rs for more information
+#[cfg(test)]
+macro_rules! vec {
+ () => (
+ $crate::vec::Vec::new()
+ );
+ ($elem:expr; $n:expr) => (
+ $crate::vec::from_elem($elem, $n)
+ );
+ ($($x:expr),*) => (
+ $crate::slice::into_vec(box [$($x),*])
+ );
+ ($($x:expr,)*) => (vec![$($x),*])
+}
+
+/// Creates a `String` using interpolation of runtime expressions.
+///
+/// The first argument `format!` receives is a format string. This must be a string
+/// literal. The power of the formatting string is in the `{}`s contained.
+///
+/// Additional parameters passed to `format!` replace the `{}`s within the
+/// formatting string in the order given unless named or positional parameters
+/// are used; see [`std::fmt`][fmt] for more information.
+///
+/// A common use for `format!` is concatenation and interpolation of strings.
+/// The same convention is used with [`print!`] and [`write!`] macros,
+/// depending on the intended destination of the string.
+///
+/// To convert a single value to a string, use the [`to_string`] method. This
+/// will use the [`Display`] formatting trait.
+///
+/// [fmt]: ../std/fmt/index.html
+/// [`print!`]: ../std/macro.print.html
+/// [`write!`]: ../std/macro.write.html
+/// [`to_string`]: ../std/string/trait.ToString.html
+/// [`Display`]: ../std/fmt/trait.Display.html
+///
+/// # Panics
+///
+/// `format!` panics if a formatting trait implementation returns an error.
+/// This indicates an incorrect implementation
+/// since `fmt::Write for String` never returns an error itself.
+///
+/// # Examples
+///
+/// ```
+/// format!("test");
+/// format!("hello {}", "world!");
+/// format!("x = {}, y = {y}", 10, y = 30);
+/// ```
+#[macro_export]
+#[stable(feature = "rust1", since = "1.0.0")]
+macro_rules! format {
+ ($($arg:tt)*) => {{
+ let res = $crate::fmt::format($crate::__export::format_args!($($arg)*));
+ res
+ }}
+}
diff --git a/library/alloc/src/prelude/mod.rs b/library/alloc/src/prelude/mod.rs
new file mode 100644
index 00000000000..0534ad3edc7
--- /dev/null
+++ b/library/alloc/src/prelude/mod.rs
@@ -0,0 +1,15 @@
+//! The alloc Prelude
+//!
+//! The purpose of this module is to alleviate imports of commonly-used
+//! items of the `alloc` crate by adding a glob import to the top of modules:
+//!
+//! ```
+//! # #![allow(unused_imports)]
+//! #![feature(alloc_prelude)]
+//! extern crate alloc;
+//! use alloc::prelude::v1::*;
+//! ```
+
+#![unstable(feature = "alloc_prelude", issue = "58935")]
+
+pub mod v1;
diff --git a/library/alloc/src/prelude/v1.rs b/library/alloc/src/prelude/v1.rs
new file mode 100644
index 00000000000..6a53b4ca1f6
--- /dev/null
+++ b/library/alloc/src/prelude/v1.rs
@@ -0,0 +1,14 @@
+//! The first version of the prelude of `alloc` crate.
+//!
+//! See the [module-level documentation](../index.html) for more.
+
+#![unstable(feature = "alloc_prelude", issue = "58935")]
+
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::borrow::ToOwned;
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::boxed::Box;
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::string::{String, ToString};
+#[unstable(feature = "alloc_prelude", issue = "58935")]
+pub use crate::vec::Vec;
diff --git a/library/alloc/src/raw_vec.rs b/library/alloc/src/raw_vec.rs
new file mode 100644
index 00000000000..ed81ce71ddf
--- /dev/null
+++ b/library/alloc/src/raw_vec.rs
@@ -0,0 +1,536 @@
+#![unstable(feature = "raw_vec_internals", reason = "implementation detail", issue = "none")]
+#![doc(hidden)]
+
+use core::alloc::{LayoutErr, MemoryBlock};
+use core::cmp;
+use core::mem::{self, ManuallyDrop, MaybeUninit};
+use core::ops::Drop;
+use core::ptr::{NonNull, Unique};
+use core::slice;
+
+use crate::alloc::{
+ handle_alloc_error,
+ AllocInit::{self, *},
+ AllocRef, Global, Layout,
+ ReallocPlacement::{self, *},
+};
+use crate::boxed::Box;
+use crate::collections::TryReserveError::{self, *};
+
+#[cfg(test)]
+mod tests;
+
+/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
+/// a buffer of memory on the heap without having to worry about all the corner cases
+/// involved. This type is excellent for building your own data structures like Vec and VecDeque.
+/// In particular:
+///
+/// * Produces `Unique::dangling()` on zero-sized types.
+/// * Produces `Unique::dangling()` on zero-length allocations.
+/// * Avoids freeing `Unique::dangling()`.
+/// * Catches all overflows in capacity computations (promotes them to "capacity overflow" panics).
+/// * Guards against 32-bit systems allocating more than isize::MAX bytes.
+/// * Guards against overflowing your length.
+/// * Calls `handle_alloc_error` for fallible allocations.
+/// * Contains a `ptr::Unique` and thus endows the user with all related benefits.
+/// * Uses the excess returned from the allocator to use the largest available capacity.
+///
+/// This type does not in anyway inspect the memory that it manages. When dropped it *will*
+/// free its memory, but it *won't* try to drop its contents. It is up to the user of `RawVec`
+/// to handle the actual things *stored* inside of a `RawVec`.
+///
+/// Note that the excess of a zero-sized types is always infinite, so `capacity()` always returns
+/// `usize::MAX`. This means that you need to be careful when round-tripping this type with a
+/// `Box<[T]>`, since `capacity()` won't yield the length.
+#[allow(missing_debug_implementations)]
+pub struct RawVec<T, A: AllocRef = Global> {
+ ptr: Unique<T>,
+ cap: usize,
+ alloc: A,
+}
+
+impl<T> RawVec<T, Global> {
+ /// HACK(Centril): This exists because `#[unstable]` `const fn`s needn't conform
+ /// to `min_const_fn` and so they cannot be called in `min_const_fn`s either.
+ ///
+ /// If you change `RawVec<T>::new` or dependencies, please take care to not
+ /// introduce anything that would truly violate `min_const_fn`.
+ ///
+ /// NOTE: We could avoid this hack and check conformance with some
+ /// `#[rustc_force_min_const_fn]` attribute which requires conformance
+ /// with `min_const_fn` but does not necessarily allow calling it in
+ /// `stable(...) const fn` / user code not enabling `foo` when
+ /// `#[rustc_const_unstable(feature = "foo", issue = "01234")]` is present.
+ pub const NEW: Self = Self::new();
+
+ /// Creates the biggest possible `RawVec` (on the system heap)
+ /// without allocating. If `T` has positive size, then this makes a
+ /// `RawVec` with capacity `0`. If `T` is zero-sized, then it makes a
+ /// `RawVec` with capacity `usize::MAX`. Useful for implementing
+ /// delayed allocation.
+ pub const fn new() -> Self {
+ Self::new_in(Global)
+ }
+
+ /// Creates a `RawVec` (on the system heap) with exactly the
+ /// capacity and alignment requirements for a `[T; capacity]`. This is
+ /// equivalent to calling `RawVec::new` when `capacity` is `0` or `T` is
+ /// zero-sized. Note that if `T` is zero-sized this means you will
+ /// *not* get a `RawVec` with the requested capacity.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the requested capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM.
+ #[inline]
+ pub fn with_capacity(capacity: usize) -> Self {
+ Self::with_capacity_in(capacity, Global)
+ }
+
+ /// Like `with_capacity`, but guarantees the buffer is zeroed.
+ #[inline]
+ pub fn with_capacity_zeroed(capacity: usize) -> Self {
+ Self::with_capacity_zeroed_in(capacity, Global)
+ }
+
+ /// Reconstitutes a `RawVec` from a pointer and capacity.
+ ///
+ /// # Safety
+ ///
+ /// The `ptr` must be allocated (on the system heap), and with the given `capacity`.
+ /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
+ /// systems). ZST vectors may have a capacity up to `usize::MAX`.
+ /// If the `ptr` and `capacity` come from a `RawVec`, then this is guaranteed.
+ #[inline]
+ pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
+ unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
+ }
+
+ /// Converts a `Box<[T]>` into a `RawVec<T>`.
+ pub fn from_box(slice: Box<[T]>) -> Self {
+ unsafe {
+ let mut slice = ManuallyDrop::new(slice);
+ RawVec::from_raw_parts(slice.as_mut_ptr(), slice.len())
+ }
+ }
+
+ /// Converts the entire buffer into `Box<[MaybeUninit<T>]>` with the specified `len`.
+ ///
+ /// Note that this will correctly reconstitute any `cap` changes
+ /// that may have been performed. (See description of type for details.)
+ ///
+ /// # Safety
+ ///
+ /// * `len` must be greater than or equal to the most recently requested capacity, and
+ /// * `len` must be less than or equal to `self.capacity()`.
+ ///
+ /// Note, that the requested capacity and `self.capacity()` could differ, as
+ /// an allocator could overallocate and return a greater memory block than requested.
+ pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>]> {
+ // Sanity-check one half of the safety requirement (we cannot check the other half).
+ debug_assert!(
+ len <= self.capacity(),
+ "`len` must be smaller than or equal to `self.capacity()`"
+ );
+
+ let me = ManuallyDrop::new(self);
+ unsafe {
+ let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
+ Box::from_raw(slice)
+ }
+ }
+}
+
+impl<T, A: AllocRef> RawVec<T, A> {
+ /// Like `new`, but parameterized over the choice of allocator for
+ /// the returned `RawVec`.
+ pub const fn new_in(alloc: A) -> Self {
+ // `cap: 0` means "unallocated". zero-sized types are ignored.
+ Self { ptr: Unique::dangling(), cap: 0, alloc }
+ }
+
+ /// Like `with_capacity`, but parameterized over the choice of
+ /// allocator for the returned `RawVec`.
+ #[inline]
+ pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+ Self::allocate_in(capacity, Uninitialized, alloc)
+ }
+
+ /// Like `with_capacity_zeroed`, but parameterized over the choice
+ /// of allocator for the returned `RawVec`.
+ #[inline]
+ pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
+ Self::allocate_in(capacity, Zeroed, alloc)
+ }
+
+ fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self {
+ if mem::size_of::<T>() == 0 {
+ Self::new_in(alloc)
+ } else {
+ // We avoid `unwrap_or_else` here because it bloats the amount of
+ // LLVM IR generated.
+ let layout = match Layout::array::<T>(capacity) {
+ Ok(layout) => layout,
+ Err(_) => capacity_overflow(),
+ };
+ match alloc_guard(layout.size()) {
+ Ok(_) => {}
+ Err(_) => capacity_overflow(),
+ }
+ let memory = match alloc.alloc(layout, init) {
+ Ok(memory) => memory,
+ Err(_) => handle_alloc_error(layout),
+ };
+
+ Self {
+ ptr: unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) },
+ cap: Self::capacity_from_bytes(memory.size),
+ alloc,
+ }
+ }
+ }
+
+ /// Reconstitutes a `RawVec` from a pointer, capacity, and allocator.
+ ///
+ /// # Safety
+ ///
+ /// The `ptr` must be allocated (via the given allocator `a`), and with the given `capacity`.
+ /// The `capacity` cannot exceed `isize::MAX` for sized types. (only a concern on 32-bit
+ /// systems). ZST vectors may have a capacity up to `usize::MAX`.
+ /// If the `ptr` and `capacity` come from a `RawVec` created via `a`, then this is guaranteed.
+ #[inline]
+ pub unsafe fn from_raw_parts_in(ptr: *mut T, capacity: usize, a: A) -> Self {
+ Self { ptr: unsafe { Unique::new_unchecked(ptr) }, cap: capacity, alloc: a }
+ }
+
+ /// Gets a raw pointer to the start of the allocation. Note that this is
+ /// `Unique::dangling()` if `capacity == 0` or `T` is zero-sized. In the former case, you must
+ /// be careful.
+ pub fn ptr(&self) -> *mut T {
+ self.ptr.as_ptr()
+ }
+
+ /// Gets the capacity of the allocation.
+ ///
+ /// This will always be `usize::MAX` if `T` is zero-sized.
+ #[inline(always)]
+ pub fn capacity(&self) -> usize {
+ if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
+ }
+
+ /// Returns a shared reference to the allocator backing this `RawVec`.
+ pub fn alloc(&self) -> &A {
+ &self.alloc
+ }
+
+ /// Returns a mutable reference to the allocator backing this `RawVec`.
+ pub fn alloc_mut(&mut self) -> &mut A {
+ &mut self.alloc
+ }
+
+ fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
+ if mem::size_of::<T>() == 0 || self.cap == 0 {
+ None
+ } else {
+ // We have an allocated chunk of memory, so we can bypass runtime
+ // checks to get our current layout.
+ unsafe {
+ let align = mem::align_of::<T>();
+ let size = mem::size_of::<T>() * self.cap;
+ let layout = Layout::from_size_align_unchecked(size, align);
+ Some((self.ptr.cast().into(), layout))
+ }
+ }
+ }
+
+ /// Ensures that the buffer contains at least enough space to hold `len +
+ /// additional` elements. If it doesn't already have enough capacity, will
+ /// reallocate enough space plus comfortable slack space to get amortized
+ /// `O(1)` behavior. Will limit this behavior if it would needlessly cause
+ /// itself to panic.
+ ///
+ /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
+ /// the requested space. This is not really unsafe, but the unsafe
+ /// code *you* write that relies on the behavior of this function may break.
+ ///
+ /// This is ideal for implementing a bulk-push operation like `extend`.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![feature(raw_vec_internals)]
+ /// # extern crate alloc;
+ /// # use std::ptr;
+ /// # use alloc::raw_vec::RawVec;
+ /// struct MyVec<T> {
+ /// buf: RawVec<T>,
+ /// len: usize,
+ /// }
+ ///
+ /// impl<T: Clone> MyVec<T> {
+ /// pub fn push_all(&mut self, elems: &[T]) {
+ /// self.buf.reserve(self.len, elems.len());
+ /// // reserve would have aborted or panicked if the len exceeded
+ /// // `isize::MAX` so this is safe to do unchecked now.
+ /// for x in elems {
+ /// unsafe {
+ /// ptr::write(self.buf.ptr().add(self.len), x.clone());
+ /// }
+ /// self.len += 1;
+ /// }
+ /// }
+ /// }
+ /// # fn main() {
+ /// # let mut vector = MyVec { buf: RawVec::new(), len: 0 };
+ /// # vector.push_all(&[1, 3, 5, 7, 9]);
+ /// # }
+ /// ```
+ pub fn reserve(&mut self, len: usize, additional: usize) {
+ match self.try_reserve(len, additional) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+ Ok(()) => { /* yay */ }
+ }
+ }
+
+ /// The same as `reserve`, but returns on errors instead of panicking or aborting.
+ pub fn try_reserve(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+ if self.needs_to_grow(len, additional) {
+ self.grow_amortized(len, additional)
+ } else {
+ Ok(())
+ }
+ }
+
+ /// Ensures that the buffer contains at least enough space to hold `len +
+ /// additional` elements. If it doesn't already, will reallocate the
+ /// minimum possible amount of memory necessary. Generally this will be
+ /// exactly the amount of memory necessary, but in principle the allocator
+ /// is free to give back more than we asked for.
+ ///
+ /// If `len` exceeds `self.capacity()`, this may fail to actually allocate
+ /// the requested space. This is not really unsafe, but the unsafe code
+ /// *you* write that relies on the behavior of this function may break.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM.
+ pub fn reserve_exact(&mut self, len: usize, additional: usize) {
+ match self.try_reserve_exact(len, additional) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+ Ok(()) => { /* yay */ }
+ }
+ }
+
+ /// The same as `reserve_exact`, but returns on errors instead of panicking or aborting.
+ pub fn try_reserve_exact(
+ &mut self,
+ len: usize,
+ additional: usize,
+ ) -> Result<(), TryReserveError> {
+ if self.needs_to_grow(len, additional) { self.grow_exact(len, additional) } else { Ok(()) }
+ }
+
+ /// Shrinks the allocation down to the specified amount. If the given amount
+ /// is 0, actually completely deallocates.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the given amount is *larger* than the current capacity.
+ ///
+ /// # Aborts
+ ///
+ /// Aborts on OOM.
+ pub fn shrink_to_fit(&mut self, amount: usize) {
+ match self.shrink(amount, MayMove) {
+ Err(CapacityOverflow) => capacity_overflow(),
+ Err(AllocError { layout, .. }) => handle_alloc_error(layout),
+ Ok(()) => { /* yay */ }
+ }
+ }
+}
+
+impl<T, A: AllocRef> RawVec<T, A> {
+ /// Returns if the buffer needs to grow to fulfill the needed extra capacity.
+ /// Mainly used to make inlining reserve-calls possible without inlining `grow`.
+ fn needs_to_grow(&self, len: usize, additional: usize) -> bool {
+ additional > self.capacity().wrapping_sub(len)
+ }
+
+ fn capacity_from_bytes(excess: usize) -> usize {
+ debug_assert_ne!(mem::size_of::<T>(), 0);
+ excess / mem::size_of::<T>()
+ }
+
+ fn set_memory(&mut self, memory: MemoryBlock) {
+ self.ptr = unsafe { Unique::new_unchecked(memory.ptr.cast().as_ptr()) };
+ self.cap = Self::capacity_from_bytes(memory.size);
+ }
+
+ // This method is usually instantiated many times. So we want it to be as
+ // small as possible, to improve compile times. But we also want as much of
+ // its contents to be statically computable as possible, to make the
+ // generated code run faster. Therefore, this method is carefully written
+ // so that all of the code that depends on `T` is within it, while as much
+ // of the code that doesn't depend on `T` as possible is in functions that
+ // are non-generic over `T`.
+ fn grow_amortized(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+ // This is ensured by the calling contexts.
+ debug_assert!(additional > 0);
+
+ if mem::size_of::<T>() == 0 {
+ // Since we return a capacity of `usize::MAX` when `elem_size` is
+ // 0, getting to here necessarily means the `RawVec` is overfull.
+ return Err(CapacityOverflow);
+ }
+
+ // Nothing we can really do about these checks, sadly.
+ let required_cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
+
+ // This guarantees exponential growth. The doubling cannot overflow
+ // because `cap <= isize::MAX` and the type of `cap` is `usize`.
+ let cap = cmp::max(self.cap * 2, required_cap);
+
+ // Tiny Vecs are dumb. Skip to:
+ // - 8 if the element size is 1, because any heap allocators is likely
+ // to round up a request of less than 8 bytes to at least 8 bytes.
+ // - 4 if elements are moderate-sized (<= 1 KiB).
+ // - 1 otherwise, to avoid wasting too much space for very short Vecs.
+ // Note that `min_non_zero_cap` is computed statically.
+ let elem_size = mem::size_of::<T>();
+ let min_non_zero_cap = if elem_size == 1 {
+ 8
+ } else if elem_size <= 1024 {
+ 4
+ } else {
+ 1
+ };
+ let cap = cmp::max(min_non_zero_cap, cap);
+
+ let new_layout = Layout::array::<T>(cap);
+
+ // `finish_grow` is non-generic over `T`.
+ let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+ self.set_memory(memory);
+ Ok(())
+ }
+
+ // The constraints on this method are much the same as those on
+ // `grow_amortized`, but this method is usually instantiated less often so
+ // it's less critical.
+ fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
+ if mem::size_of::<T>() == 0 {
+ // Since we return a capacity of `usize::MAX` when the type size is
+ // 0, getting to here necessarily means the `RawVec` is overfull.
+ return Err(CapacityOverflow);
+ }
+
+ let cap = len.checked_add(additional).ok_or(CapacityOverflow)?;
+ let new_layout = Layout::array::<T>(cap);
+
+ // `finish_grow` is non-generic over `T`.
+ let memory = finish_grow(new_layout, self.current_memory(), &mut self.alloc)?;
+ self.set_memory(memory);
+ Ok(())
+ }
+
+ fn shrink(
+ &mut self,
+ amount: usize,
+ placement: ReallocPlacement,
+ ) -> Result<(), TryReserveError> {
+ assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
+
+ let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
+ let new_size = amount * mem::size_of::<T>();
+
+ let memory = unsafe {
+ self.alloc.shrink(ptr, layout, new_size, placement).map_err(|_| {
+ TryReserveError::AllocError {
+ layout: Layout::from_size_align_unchecked(new_size, layout.align()),
+ non_exhaustive: (),
+ }
+ })?
+ };
+ self.set_memory(memory);
+ Ok(())
+ }
+}
+
+// This function is outside `RawVec` to minimize compile times. See the comment
+// above `RawVec::grow_amortized` for details. (The `A` parameter isn't
+// significant, because the number of different `A` types seen in practice is
+// much smaller than the number of `T` types.)
+fn finish_grow<A>(
+ new_layout: Result<Layout, LayoutErr>,
+ current_memory: Option<(NonNull<u8>, Layout)>,
+ alloc: &mut A,
+) -> Result<MemoryBlock, TryReserveError>
+where
+ A: AllocRef,
+{
+ // Check for the error here to minimize the size of `RawVec::grow_*`.
+ let new_layout = new_layout.map_err(|_| CapacityOverflow)?;
+
+ alloc_guard(new_layout.size())?;
+
+ let memory = if let Some((ptr, old_layout)) = current_memory {
+ debug_assert_eq!(old_layout.align(), new_layout.align());
+ unsafe { alloc.grow(ptr, old_layout, new_layout.size(), MayMove, Uninitialized) }
+ } else {
+ alloc.alloc(new_layout, Uninitialized)
+ }
+ .map_err(|_| AllocError { layout: new_layout, non_exhaustive: () })?;
+
+ Ok(memory)
+}
+
+unsafe impl<#[may_dangle] T, A: AllocRef> Drop for RawVec<T, A> {
+ /// Frees the memory owned by the `RawVec` *without* trying to drop its contents.
+ fn drop(&mut self) {
+ if let Some((ptr, layout)) = self.current_memory() {
+ unsafe { self.alloc.dealloc(ptr, layout) }
+ }
+ }
+}
+
+// We need to guarantee the following:
+// * We don't ever allocate `> isize::MAX` byte-size objects.
+// * We don't overflow `usize::MAX` and actually allocate too little.
+//
+// On 64-bit we just need to check for overflow since trying to allocate
+// `> isize::MAX` bytes will surely fail. On 32-bit and 16-bit we need to add
+// an extra guard for this in case we're running on a platform which can use
+// all 4GB in user-space, e.g., PAE or x32.
+
+#[inline]
+fn alloc_guard(alloc_size: usize) -> Result<(), TryReserveError> {
+ if mem::size_of::<usize>() < 8 && alloc_size > isize::MAX as usize {
+ Err(CapacityOverflow)
+ } else {
+ Ok(())
+ }
+}
+
+// One central function responsible for reporting capacity overflows. This'll
+// ensure that the code generation related to these panics is minimal as there's
+// only one location which panics rather than a bunch throughout the module.
+fn capacity_overflow() -> ! {
+ panic!("capacity overflow");
+}
diff --git a/library/alloc/src/raw_vec/tests.rs b/library/alloc/src/raw_vec/tests.rs
new file mode 100644
index 00000000000..5408faa079c
--- /dev/null
+++ b/library/alloc/src/raw_vec/tests.rs
@@ -0,0 +1,78 @@
+use super::*;
+
+#[test]
+fn allocator_param() {
+ use crate::alloc::AllocErr;
+
+ // Writing a test of integration between third-party
+ // allocators and `RawVec` is a little tricky because the `RawVec`
+ // API does not expose fallible allocation methods, so we
+ // cannot check what happens when allocator is exhausted
+ // (beyond detecting a panic).
+ //
+ // Instead, this just checks that the `RawVec` methods do at
+ // least go through the Allocator API when it reserves
+ // storage.
+
+ // A dumb allocator that consumes a fixed amount of fuel
+ // before allocation attempts start failing.
+ struct BoundedAlloc {
+ fuel: usize,
+ }
+ unsafe impl AllocRef for BoundedAlloc {
+ fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result<MemoryBlock, AllocErr> {
+ let size = layout.size();
+ if size > self.fuel {
+ return Err(AllocErr);
+ }
+ match Global.alloc(layout, init) {
+ ok @ Ok(_) => {
+ self.fuel -= size;
+ ok
+ }
+ err @ Err(_) => err,
+ }
+ }
+ unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
+ unsafe { Global.dealloc(ptr, layout) }
+ }
+ }
+
+ let a = BoundedAlloc { fuel: 500 };
+ let mut v: RawVec<u8, _> = RawVec::with_capacity_in(50, a);
+ assert_eq!(v.alloc.fuel, 450);
+ v.reserve(50, 150); // (causes a realloc, thus using 50 + 150 = 200 units of fuel)
+ assert_eq!(v.alloc.fuel, 250);
+}
+
+#[test]
+fn reserve_does_not_overallocate() {
+ {
+ let mut v: RawVec<u32> = RawVec::new();
+ // First, `reserve` allocates like `reserve_exact`.
+ v.reserve(0, 9);
+ assert_eq!(9, v.capacity());
+ }
+
+ {
+ let mut v: RawVec<u32> = RawVec::new();
+ v.reserve(0, 7);
+ assert_eq!(7, v.capacity());
+ // 97 is more than double of 7, so `reserve` should work
+ // like `reserve_exact`.
+ v.reserve(7, 90);
+ assert_eq!(97, v.capacity());
+ }
+
+ {
+ let mut v: RawVec<u32> = RawVec::new();
+ v.reserve(0, 12);
+ assert_eq!(12, v.capacity());
+ v.reserve(12, 3);
+ // 3 is less than half of 12, so `reserve` must grow
+ // exponentially. At the time of writing this test grow
+ // factor is 2, so new capacity is 24, however, grow factor
+ // of 1.5 is OK too. Hence `>= 18` in assert.
+ assert!(v.capacity() >= 12 + 12 / 2);
+ }
+}
diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs
new file mode 100644
index 00000000000..96dfc2f4251
--- /dev/null
+++ b/library/alloc/src/rc.rs
@@ -0,0 +1,2138 @@
+//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
+//! Counted'.
+//!
+//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
+//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
+//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
+//! given allocation is destroyed, the value stored in that allocation (often
+//! referred to as "inner value") is also dropped.
+//!
+//! Shared references in Rust disallow mutation by default, and [`Rc`]
+//! is no exception: you cannot generally obtain a mutable reference to
+//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
+//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
+//! inside an Rc][mutability].
+//!
+//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
+//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
+//! does not implement [`Send`][send]. As a result, the Rust compiler
+//! will check *at compile time* that you are not sending [`Rc`]s between
+//! threads. If you need multi-threaded, atomic reference counting, use
+//! [`sync::Arc`][arc].
+//!
+//! The [`downgrade`][downgrade] method can be used to create a non-owning
+//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
+//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
+//! already been dropped. In other words, `Weak` pointers do not keep the value
+//! inside the allocation alive; however, they *do* keep the allocation
+//! (the backing store for the inner value) alive.
+//!
+//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
+//! [`Weak`] is used to break cycles. For example, a tree could have strong
+//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
+//! children back to their parents.
+//!
+//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
+//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
+//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
+//! functions, called using function-like syntax:
+//!
+//! ```
+//! use std::rc::Rc;
+//! let my_rc = Rc::new(());
+//!
+//! Rc::downgrade(&my_rc);
+//! ```
+//!
+//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
+//! already been dropped.
+//!
+//! # Cloning references
+//!
+//! Creating a new reference to the same allocation as an existing reference counted pointer
+//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//!
+//! ```
+//! use std::rc::Rc;
+//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
+//! // The two syntaxes below are equivalent.
+//! let a = foo.clone();
+//! let b = Rc::clone(&foo);
+//! // a and b both point to the same memory location as foo.
+//! ```
+//!
+//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+//! the meaning of the code. In the example above, this syntax makes it easier to see that
+//! this code is creating a new reference rather than copying the whole content of foo.
+//!
+//! # Examples
+//!
+//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
+//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
+//! unique ownership, because more than one gadget may belong to the same
+//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
+//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
+//!
+//! ```
+//! use std::rc::Rc;
+//!
+//! struct Owner {
+//! name: String,
+//! // ...other fields
+//! }
+//!
+//! struct Gadget {
+//! id: i32,
+//! owner: Rc<Owner>,
+//! // ...other fields
+//! }
+//!
+//! fn main() {
+//! // Create a reference-counted `Owner`.
+//! let gadget_owner: Rc<Owner> = Rc::new(
+//! Owner {
+//! name: "Gadget Man".to_string(),
+//! }
+//! );
+//!
+//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
+//! // gives us a new pointer to the same `Owner` allocation, incrementing
+//! // the reference count in the process.
+//! let gadget1 = Gadget {
+//! id: 1,
+//! owner: Rc::clone(&gadget_owner),
+//! };
+//! let gadget2 = Gadget {
+//! id: 2,
+//! owner: Rc::clone(&gadget_owner),
+//! };
+//!
+//! // Dispose of our local variable `gadget_owner`.
+//! drop(gadget_owner);
+//!
+//! // Despite dropping `gadget_owner`, we're still able to print out the name
+//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
+//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
+//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
+//! // live. The field projection `gadget1.owner.name` works because
+//! // `Rc<Owner>` automatically dereferences to `Owner`.
+//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
+//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
+//!
+//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
+//! // with them the last counted references to our `Owner`. Gadget Man now
+//! // gets destroyed as well.
+//! }
+//! ```
+//!
+//! If our requirements change, and we also need to be able to traverse from
+//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
+//! to `Gadget` introduces a cycle. This means that their
+//! reference counts can never reach 0, and the allocation will never be destroyed:
+//! a memory leak. In order to get around this, we can use [`Weak`]
+//! pointers.
+//!
+//! Rust actually makes it somewhat difficult to produce this loop in the first
+//! place. In order to end up with two values that point at each other, one of
+//! them needs to be mutable. This is difficult because [`Rc`] enforces
+//! memory safety by only giving out shared references to the value it wraps,
+//! and these don't allow direct mutation. We need to wrap the part of the
+//! value we wish to mutate in a [`RefCell`], which provides *interior
+//! mutability*: a method to achieve mutability through a shared reference.
+//! [`RefCell`] enforces Rust's borrowing rules at runtime.
+//!
+//! ```
+//! use std::rc::Rc;
+//! use std::rc::Weak;
+//! use std::cell::RefCell;
+//!
+//! struct Owner {
+//! name: String,
+//! gadgets: RefCell<Vec<Weak<Gadget>>>,
+//! // ...other fields
+//! }
+//!
+//! struct Gadget {
+//! id: i32,
+//! owner: Rc<Owner>,
+//! // ...other fields
+//! }
+//!
+//! fn main() {
+//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
+//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
+//! // a shared reference.
+//! let gadget_owner: Rc<Owner> = Rc::new(
+//! Owner {
+//! name: "Gadget Man".to_string(),
+//! gadgets: RefCell::new(vec![]),
+//! }
+//! );
+//!
+//! // Create `Gadget`s belonging to `gadget_owner`, as before.
+//! let gadget1 = Rc::new(
+//! Gadget {
+//! id: 1,
+//! owner: Rc::clone(&gadget_owner),
+//! }
+//! );
+//! let gadget2 = Rc::new(
+//! Gadget {
+//! id: 2,
+//! owner: Rc::clone(&gadget_owner),
+//! }
+//! );
+//!
+//! // Add the `Gadget`s to their `Owner`.
+//! {
+//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
+//! gadgets.push(Rc::downgrade(&gadget1));
+//! gadgets.push(Rc::downgrade(&gadget2));
+//!
+//! // `RefCell` dynamic borrow ends here.
+//! }
+//!
+//! // Iterate over our `Gadget`s, printing their details out.
+//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
+//!
+//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
+//! // guarantee the allocation still exists, we need to call
+//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
+//! //
+//! // In this case we know the allocation still exists, so we simply
+//! // `unwrap` the `Option`. In a more complicated program, you might
+//! // need graceful error handling for a `None` result.
+//!
+//! let gadget = gadget_weak.upgrade().unwrap();
+//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
+//! }
+//!
+//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
+//! // are destroyed. There are now no strong (`Rc`) pointers to the
+//! // gadgets, so they are destroyed. This zeroes the reference count on
+//! // Gadget Man, so he gets destroyed as well.
+//! }
+//! ```
+//!
+//! [`Rc`]: struct.Rc.html
+//! [`Weak`]: struct.Weak.html
+//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+//! [`Cell`]: ../../std/cell/struct.Cell.html
+//! [`RefCell`]: ../../std/cell/struct.RefCell.html
+//! [send]: ../../std/marker/trait.Send.html
+//! [arc]: ../../std/sync/struct.Arc.html
+//! [`Deref`]: ../../std/ops/trait.Deref.html
+//! [downgrade]: struct.Rc.html#method.downgrade
+//! [upgrade]: struct.Weak.html#method.upgrade
+//! [`None`]: ../../std/option/enum.Option.html#variant.None
+//! [mutability]: ../../std/cell/index.html#introducing-mutability-inside-of-something-immutable
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+#[cfg(not(test))]
+use crate::boxed::Box;
+#[cfg(test)]
+use std::boxed::Box;
+
+use core::any::Any;
+use core::borrow;
+use core::cell::Cell;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::abort;
+use core::iter;
+use core::marker::{self, PhantomData, Unpin, Unsize};
+use core::mem::{self, align_of_val_raw, forget, size_of_val};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
+use core::pin::Pin;
+use core::ptr::{self, NonNull};
+use core::slice::from_raw_parts_mut;
+
+use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
+use crate::borrow::{Cow, ToOwned};
+use crate::string::String;
+use crate::vec::Vec;
+
+#[cfg(test)]
+mod tests;
+
+// This is repr(C) to future-proof against possible field-reordering, which
+// would interfere with otherwise safe [into|from]_raw() of transmutable
+// inner types.
+#[repr(C)]
+struct RcBox<T: ?Sized> {
+ strong: Cell<usize>,
+ weak: Cell<usize>,
+ value: T,
+}
+
+/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
+/// Counted'.
+///
+/// See the [module-level documentation](./index.html) for more details.
+///
+/// The inherent methods of `Rc` are all associated functions, which means
+/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
+/// `value.get_mut()`. This avoids conflicts with methods of the inner
+/// type `T`.
+///
+/// [get_mut]: #method.get_mut
+#[cfg_attr(not(test), rustc_diagnostic_item = "Rc")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Rc<T: ?Sized> {
+ ptr: NonNull<RcBox<T>>,
+ phantom: PhantomData<RcBox<T>>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> !marker::Send for Rc<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> !marker::Sync for Rc<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Rc<U>> for Rc<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
+
+impl<T: ?Sized> Rc<T> {
+ fn from_inner(ptr: NonNull<RcBox<T>>) -> Self {
+ Self { ptr, phantom: PhantomData }
+ }
+
+ unsafe fn from_ptr(ptr: *mut RcBox<T>) -> Self {
+ Self::from_inner(unsafe { NonNull::new_unchecked(ptr) })
+ }
+}
+
+impl<T> Rc<T> {
+ /// Constructs a new `Rc<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new(value: T) -> Rc<T> {
+ // There is an implicit weak pointer owned by all the strong
+ // pointers, which ensures that the weak destructor never frees
+ // the allocation while the strong destructor is running, even
+ // if the weak pointer is stored inside the strong one.
+ Self::from_inner(
+ Box::leak(box RcBox { strong: Cell::new(1), weak: Cell::new(1), value }).into(),
+ )
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let mut five = Rc::<u32>::new_uninit();
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
+ unsafe {
+ Rc::from_ptr(Rc::allocate_for_layout(Layout::new::<T>(), |mem| {
+ mem as *mut RcBox<mem::MaybeUninit<T>>
+ }))
+ }
+ }
+
+ /// Constructs a new `Rc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
+ /// incorrect usage of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let zero = Rc::<u32>::new_zeroed();
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
+ unsafe {
+ let mut uninit = Self::new_uninit();
+ ptr::write_bytes::<T>(Rc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
+ uninit
+ }
+ }
+
+ /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
+ /// `value` will be pinned in memory and unable to be moved.
+ #[stable(feature = "pin", since = "1.33.0")]
+ pub fn pin(value: T) -> Pin<Rc<T>> {
+ unsafe { Pin::new_unchecked(Rc::new(value)) }
+ }
+
+ /// Returns the inner value, if the `Rc` has exactly one strong reference.
+ ///
+ /// Otherwise, an [`Err`][result] is returned with the same `Rc` that was
+ /// passed in.
+ ///
+ /// This will succeed even if there are outstanding weak references.
+ ///
+ /// [result]: ../../std/result/enum.Result.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new(3);
+ /// assert_eq!(Rc::try_unwrap(x), Ok(3));
+ ///
+ /// let x = Rc::new(4);
+ /// let _y = Rc::clone(&x);
+ /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_unique", since = "1.4.0")]
+ pub fn try_unwrap(this: Self) -> Result<T, Self> {
+ if Rc::strong_count(&this) == 1 {
+ unsafe {
+ let val = ptr::read(&*this); // copy the contained object
+
+ // Indicate to Weaks that they can't be promoted by decrementing
+ // the strong count, and then remove the implicit "strong weak"
+ // pointer while also handling drop logic by just crafting a
+ // fake Weak.
+ this.dec_strong();
+ let _weak = Weak { ptr: this.ptr };
+ forget(this);
+ Ok(val)
+ }
+ } else {
+ Err(this)
+ }
+ }
+}
+
+impl<T> Rc<[T]> {
+ /// Constructs a new reference-counted slice with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
+ unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
+ }
+}
+
+impl<T> Rc<mem::MaybeUninit<T>> {
+ /// Converts to `Rc<T>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the inner value
+ /// really is in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let mut five = Rc::<u32>::new_uninit();
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Rc<T> {
+ Rc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
+ }
+}
+
+impl<T> Rc<[mem::MaybeUninit<T>]> {
+ /// Converts to `Rc<[T]>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the inner value
+ /// really is in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Rc<[T]> {
+ unsafe { Rc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+ }
+}
+
+impl<T: ?Sized> Rc<T> {
+ /// Consumes the `Rc`, returning the wrapped pointer.
+ ///
+ /// To avoid a memory leak the pointer must be converted back to an `Rc` using
+ /// [`Rc::from_raw`][from_raw].
+ ///
+ /// [from_raw]: struct.Rc.html#method.from_raw
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let x_ptr = Rc::into_raw(x);
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub fn into_raw(this: Self) -> *const T {
+ let ptr = Self::as_ptr(&this);
+ mem::forget(this);
+ ptr
+ }
+
+ /// Provides a raw pointer to the data.
+ ///
+ /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
+ /// for as long there are strong counts in the `Rc`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let y = Rc::clone(&x);
+ /// let x_ptr = Rc::as_ptr(&x);
+ /// assert_eq!(x_ptr, Rc::as_ptr(&y));
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
+ /// ```
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub fn as_ptr(this: &Self) -> *const T {
+ let ptr: *mut RcBox<T> = NonNull::as_ptr(this.ptr);
+
+ // SAFETY: This cannot go through Deref::deref or Rc::inner because
+ // this is required to retain raw/mut provenance such that e.g. `get_mut` can
+ // write through the pointer after the Rc is recovered through `from_raw`.
+ unsafe { &raw const (*ptr).value }
+ }
+
+ /// Constructs an `Rc<T>` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to
+ /// [`Rc<U>::into_raw`][into_raw] where `U` must have the same size
+ /// and alignment as `T`. This is trivially true if `U` is `T`.
+ /// Note that if `U` is not `T` but has the same size and alignment, this is
+ /// basically like transmuting references of different types. See
+ /// [`mem::transmute`][transmute] for more information on what
+ /// restrictions apply in this case.
+ ///
+ /// The user of `from_raw` has to make sure a specific value of `T` is only
+ /// dropped once.
+ ///
+ /// This function is unsafe because improper use may lead to memory unsafety,
+ /// even if the returned `Rc<T>` is never accessed.
+ ///
+ /// [into_raw]: struct.Rc.html#method.into_raw
+ /// [transmute]: ../../std/mem/fn.transmute.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let x_ptr = Rc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Rc` to prevent leak.
+ /// let x = Rc::from_raw(x_ptr);
+ /// assert_eq!(&*x, "hello");
+ ///
+ /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ let offset = unsafe { data_offset(ptr) };
+
+ // Reverse the offset to find the original RcBox.
+ let fake_ptr = ptr as *mut RcBox<T>;
+ let rc_ptr = unsafe { set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset)) };
+
+ unsafe { Self::from_ptr(rc_ptr) }
+ }
+
+ /// Consumes the `Rc`, returning the wrapped pointer as `NonNull<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(rc_into_raw_non_null)]
+ /// #![allow(deprecated)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let x = Rc::new("hello".to_owned());
+ /// let ptr = Rc::into_raw_non_null(x);
+ /// let deref = unsafe { ptr.as_ref() };
+ /// assert_eq!(deref, "hello");
+ /// ```
+ #[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
+ #[rustc_deprecated(since = "1.44.0", reason = "use `Rc::into_raw` instead")]
+ #[inline]
+ pub fn into_raw_non_null(this: Self) -> NonNull<T> {
+ // safe because Rc guarantees its pointer is non-null
+ unsafe { NonNull::new_unchecked(Rc::into_raw(this) as *mut _) }
+ }
+
+ /// Creates a new [`Weak`][weak] pointer to this allocation.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// let weak_five = Rc::downgrade(&five);
+ /// ```
+ #[stable(feature = "rc_weak", since = "1.4.0")]
+ pub fn downgrade(this: &Self) -> Weak<T> {
+ this.inc_weak();
+ // Make sure we do not create a dangling Weak
+ debug_assert!(!is_dangling(this.ptr));
+ Weak { ptr: this.ptr }
+ }
+
+ /// Gets the number of [`Weak`][weak] pointers to this allocation.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// let _weak_five = Rc::downgrade(&five);
+ ///
+ /// assert_eq!(1, Rc::weak_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_counts", since = "1.15.0")]
+ pub fn weak_count(this: &Self) -> usize {
+ this.weak() - 1
+ }
+
+ /// Gets the number of strong (`Rc`) pointers to this allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// let _also_five = Rc::clone(&five);
+ ///
+ /// assert_eq!(2, Rc::strong_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_counts", since = "1.15.0")]
+ pub fn strong_count(this: &Self) -> usize {
+ this.strong()
+ }
+
+ /// Returns `true` if there are no other `Rc` or [`Weak`][weak] pointers to
+ /// this allocation.
+ ///
+ /// [weak]: struct.Weak.html
+ #[inline]
+ fn is_unique(this: &Self) -> bool {
+ Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
+ }
+
+ /// Returns a mutable reference into the given `Rc`, if there are
+ /// no other `Rc` or [`Weak`][weak] pointers to the same allocation.
+ ///
+ /// Returns [`None`] otherwise, because it is not safe to
+ /// mutate a shared value.
+ ///
+ /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+ /// the inner value when there are other pointers.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [make_mut]: struct.Rc.html#method.make_mut
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let mut x = Rc::new(3);
+ /// *Rc::get_mut(&mut x).unwrap() = 4;
+ /// assert_eq!(*x, 4);
+ ///
+ /// let _y = Rc::clone(&x);
+ /// assert!(Rc::get_mut(&mut x).is_none());
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_unique", since = "1.4.0")]
+ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+ if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
+ }
+
+ /// Returns a mutable reference into the given `Rc`,
+ /// without any check.
+ ///
+ /// See also [`get_mut`], which is safe and does appropriate checks.
+ ///
+ /// [`get_mut`]: struct.Rc.html#method.get_mut
+ ///
+ /// # Safety
+ ///
+ /// Any other `Rc` or [`Weak`] pointers to the same allocation must not be dereferenced
+ /// for the duration of the returned borrow.
+ /// This is trivially the case if no such pointers exist,
+ /// for example immediately after `Rc::new`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::rc::Rc;
+ ///
+ /// let mut x = Rc::new(String::new());
+ /// unsafe {
+ /// Rc::get_mut_unchecked(&mut x).push_str("foo")
+ /// }
+ /// assert_eq!(*x, "foo");
+ /// ```
+ #[inline]
+ #[unstable(feature = "get_mut_unchecked", issue = "63292")]
+ pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
+ unsafe { &mut this.ptr.as_mut().value }
+ }
+
+ #[inline]
+ #[stable(feature = "ptr_eq", since = "1.17.0")]
+ /// Returns `true` if the two `Rc`s point to the same allocation
+ /// (in a vein similar to [`ptr::eq`]).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ /// let same_five = Rc::clone(&five);
+ /// let other_five = Rc::new(5);
+ ///
+ /// assert!(Rc::ptr_eq(&five, &same_five));
+ /// assert!(!Rc::ptr_eq(&five, &other_five));
+ /// ```
+ ///
+ /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+ this.ptr.as_ptr() == other.ptr.as_ptr()
+ }
+}
+
+impl<T: Clone> Rc<T> {
+ /// Makes a mutable reference into the given `Rc`.
+ ///
+ /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
+ /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
+ /// referred to as clone-on-write.
+ ///
+ /// If there are no other `Rc` pointers to this allocation, then [`Weak`]
+ /// pointers to this allocation will be disassociated.
+ ///
+ /// See also [`get_mut`], which will fail rather than cloning.
+ ///
+ /// [`Weak`]: struct.Weak.html
+ /// [`clone`]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [`get_mut`]: struct.Rc.html#method.get_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let mut data = Rc::new(5);
+ ///
+ /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
+ /// let mut other_data = Rc::clone(&data); // Won't clone inner data
+ /// *Rc::make_mut(&mut data) += 1; // Clones inner data
+ /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
+ /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
+ ///
+ /// // Now `data` and `other_data` point to different allocations.
+ /// assert_eq!(*data, 8);
+ /// assert_eq!(*other_data, 12);
+ /// ```
+ ///
+ /// [`Weak`] pointers will be disassociated:
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let mut data = Rc::new(75);
+ /// let weak = Rc::downgrade(&data);
+ ///
+ /// assert!(75 == *data);
+ /// assert!(75 == *weak.upgrade().unwrap());
+ ///
+ /// *Rc::make_mut(&mut data) += 1;
+ ///
+ /// assert!(76 == *data);
+ /// assert!(weak.upgrade().is_none());
+ /// ```
+ #[inline]
+ #[stable(feature = "rc_unique", since = "1.4.0")]
+ pub fn make_mut(this: &mut Self) -> &mut T {
+ if Rc::strong_count(this) != 1 {
+ // Gotta clone the data, there are other Rcs
+ *this = Rc::new((**this).clone())
+ } else if Rc::weak_count(this) != 0 {
+ // Can just steal the data, all that's left is Weaks
+ unsafe {
+ let mut swap = Rc::new(ptr::read(&this.ptr.as_ref().value));
+ mem::swap(this, &mut swap);
+ swap.dec_strong();
+ // Remove implicit strong-weak ref (no need to craft a fake
+ // Weak here -- we know other Weaks can clean up for us)
+ swap.dec_weak();
+ forget(swap);
+ }
+ }
+ // This unsafety is ok because we're guaranteed that the pointer
+ // returned is the *only* pointer that will ever be returned to T. Our
+ // reference count is guaranteed to be 1 at this point, and we required
+ // the `Rc<T>` itself to be `mut`, so we're returning the only possible
+ // reference to the allocation.
+ unsafe { &mut this.ptr.as_mut().value }
+ }
+}
+
+impl Rc<dyn Any> {
+ #[inline]
+ #[stable(feature = "rc_downcast", since = "1.29.0")]
+ /// Attempt to downcast the `Rc<dyn Any>` to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ /// use std::rc::Rc;
+ ///
+ /// fn print_if_string(value: Rc<dyn Any>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Rc::new(my_string));
+ /// print_if_string(Rc::new(0i8));
+ /// ```
+ pub fn downcast<T: Any>(self) -> Result<Rc<T>, Rc<dyn Any>> {
+ if (*self).is::<T>() {
+ let ptr = self.ptr.cast::<RcBox<T>>();
+ forget(self);
+ Ok(Rc::from_inner(ptr))
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<T: ?Sized> Rc<T> {
+ /// Allocates an `RcBox<T>` with sufficient space for
+ /// a possibly-unsized inner value where the value has the layout provided.
+ ///
+ /// The function `mem_to_rcbox` is called with the data pointer
+ /// and must return back a (potentially fat)-pointer for the `RcBox<T>`.
+ unsafe fn allocate_for_layout(
+ value_layout: Layout,
+ mem_to_rcbox: impl FnOnce(*mut u8) -> *mut RcBox<T>,
+ ) -> *mut RcBox<T> {
+ // Calculate layout using the given value layout.
+ // Previously, layout was calculated on the expression
+ // `&*(ptr as *const RcBox<T>)`, but this created a misaligned
+ // reference (see #54908).
+ let layout = Layout::new::<RcBox<()>>().extend(value_layout).unwrap().0.pad_to_align();
+
+ // Allocate for the layout.
+ let mem = Global
+ .alloc(layout, AllocInit::Uninitialized)
+ .unwrap_or_else(|_| handle_alloc_error(layout));
+
+ // Initialize the RcBox
+ let inner = mem_to_rcbox(mem.ptr.as_ptr());
+ unsafe {
+ debug_assert_eq!(Layout::for_value(&*inner), layout);
+
+ ptr::write(&mut (*inner).strong, Cell::new(1));
+ ptr::write(&mut (*inner).weak, Cell::new(1));
+ }
+
+ inner
+ }
+
+ /// Allocates an `RcBox<T>` with sufficient space for an unsized inner value
+ unsafe fn allocate_for_ptr(ptr: *const T) -> *mut RcBox<T> {
+ // Allocate for the `RcBox<T>` using the given value.
+ unsafe {
+ Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+ set_data_ptr(ptr as *mut T, mem) as *mut RcBox<T>
+ })
+ }
+ }
+
+ fn from_box(v: Box<T>) -> Rc<T> {
+ unsafe {
+ let box_unique = Box::into_unique(v);
+ let bptr = box_unique.as_ptr();
+
+ let value_size = size_of_val(&*bptr);
+ let ptr = Self::allocate_for_ptr(bptr);
+
+ // Copy value as bytes
+ ptr::copy_nonoverlapping(
+ bptr as *const T as *const u8,
+ &mut (*ptr).value as *mut _ as *mut u8,
+ value_size,
+ );
+
+ // Free the allocation without dropping its contents
+ box_free(box_unique);
+
+ Self::from_ptr(ptr)
+ }
+ }
+}
+
+impl<T> Rc<[T]> {
+ /// Allocates an `RcBox<[T]>` with the given length.
+ unsafe fn allocate_for_slice(len: usize) -> *mut RcBox<[T]> {
+ unsafe {
+ Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+ ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut RcBox<[T]>
+ })
+ }
+ }
+}
+
+/// Sets the data pointer of a `?Sized` raw pointer.
+///
+/// For a slice/trait object, this sets the `data` field and leaves the rest
+/// unchanged. For a sized raw pointer, this simply sets the pointer.
+unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
+ unsafe {
+ ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
+ }
+ ptr
+}
+
+impl<T> Rc<[T]> {
+ /// Copy elements from slice into newly allocated Rc<\[T\]>
+ ///
+ /// Unsafe because the caller must either take ownership or bind `T: Copy`
+ unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
+ unsafe {
+ let ptr = Self::allocate_for_slice(v.len());
+ ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).value as *mut [T] as *mut T, v.len());
+ Self::from_ptr(ptr)
+ }
+ }
+
+ /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
+ ///
+ /// Behavior is undefined should the size be wrong.
+ unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Rc<[T]> {
+ // Panic guard while cloning T elements.
+ // In the event of a panic, elements that have been written
+ // into the new RcBox will be dropped, then the memory freed.
+ struct Guard<T> {
+ mem: NonNull<u8>,
+ elems: *mut T,
+ layout: Layout,
+ n_elems: usize,
+ }
+
+ impl<T> Drop for Guard<T> {
+ fn drop(&mut self) {
+ unsafe {
+ let slice = from_raw_parts_mut(self.elems, self.n_elems);
+ ptr::drop_in_place(slice);
+
+ Global.dealloc(self.mem, self.layout);
+ }
+ }
+ }
+
+ unsafe {
+ let ptr = Self::allocate_for_slice(len);
+
+ let mem = ptr as *mut _ as *mut u8;
+ let layout = Layout::for_value(&*ptr);
+
+ // Pointer to first element
+ let elems = &mut (*ptr).value as *mut [T] as *mut T;
+
+ let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
+
+ for (i, item) in iter.enumerate() {
+ ptr::write(elems.add(i), item);
+ guard.n_elems += 1;
+ }
+
+ // All clear. Forget the guard so it doesn't free the new RcBox.
+ forget(guard);
+
+ Self::from_ptr(ptr)
+ }
+ }
+}
+
+/// Specialization trait used for `From<&[T]>`.
+trait RcFromSlice<T> {
+ fn from_slice(slice: &[T]) -> Self;
+}
+
+impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
+ #[inline]
+ default fn from_slice(v: &[T]) -> Self {
+ unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
+ }
+}
+
+impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
+ #[inline]
+ fn from_slice(v: &[T]) -> Self {
+ unsafe { Rc::copy_from_slice(v) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Rc<T> {
+ type Target = T;
+
+ #[inline(always)]
+ fn deref(&self) -> &T {
+ &self.inner().value
+ }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Rc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
+ /// Drops the `Rc`.
+ ///
+ /// This will decrement the strong reference count. If the strong reference
+ /// count reaches zero then the only other references (if any) are
+ /// [`Weak`], so we `drop` the inner value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Rc::new(Foo);
+ /// let foo2 = Rc::clone(&foo);
+ ///
+ /// drop(foo); // Doesn't print anything
+ /// drop(foo2); // Prints "dropped!"
+ /// ```
+ ///
+ /// [`Weak`]: ../../std/rc/struct.Weak.html
+ fn drop(&mut self) {
+ unsafe {
+ self.dec_strong();
+ if self.strong() == 0 {
+ // destroy the contained object
+ ptr::drop_in_place(self.ptr.as_mut());
+
+ // remove the implicit "strong weak" pointer now that we've
+ // destroyed the contents.
+ self.dec_weak();
+
+ if self.weak() == 0 {
+ Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Clone for Rc<T> {
+ /// Makes a clone of the `Rc` pointer.
+ ///
+ /// This creates another pointer to the same allocation, increasing the
+ /// strong reference count.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// let _ = Rc::clone(&five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Rc<T> {
+ self.inc_strong();
+ Self::from_inner(self.ptr)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Rc<T> {
+ /// Creates a new `Rc<T>`, with the `Default` value for `T`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let x: Rc<i32> = Default::default();
+ /// assert_eq!(*x, 0);
+ /// ```
+ #[inline]
+ fn default() -> Rc<T> {
+ Rc::new(Default::default())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+trait RcEqIdent<T: ?Sized + PartialEq> {
+ fn eq(&self, other: &Rc<T>) -> bool;
+ fn ne(&self, other: &Rc<T>) -> bool;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> RcEqIdent<T> for Rc<T> {
+ #[inline]
+ default fn eq(&self, other: &Rc<T>) -> bool {
+ **self == **other
+ }
+
+ #[inline]
+ default fn ne(&self, other: &Rc<T>) -> bool {
+ **self != **other
+ }
+}
+
+// Hack to allow specializing on `Eq` even though `Eq` has a method.
+#[rustc_unsafe_specialization_marker]
+pub(crate) trait MarkerEq: PartialEq<Self> {}
+
+impl<T: Eq> MarkerEq for T {}
+
+/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
+/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
+/// store large values, that are slow to clone, but also heavy to check for equality, causing this
+/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
+/// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + MarkerEq> RcEqIdent<T> for Rc<T> {
+ #[inline]
+ fn eq(&self, other: &Rc<T>) -> bool {
+ Rc::ptr_eq(self, other) || **self == **other
+ }
+
+ #[inline]
+ fn ne(&self, other: &Rc<T>) -> bool {
+ !Rc::ptr_eq(self, other) && **self != **other
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Rc<T> {
+ /// Equality for two `Rc`s.
+ ///
+ /// Two `Rc`s are equal if their inner values are equal, even if they are
+ /// stored in different allocation.
+ ///
+ /// If `T` also implements `Eq` (implying reflexivity of equality),
+ /// two `Rc`s that point to the same allocation are
+ /// always equal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five == Rc::new(5));
+ /// ```
+ #[inline]
+ fn eq(&self, other: &Rc<T>) -> bool {
+ RcEqIdent::eq(self, other)
+ }
+
+ /// Inequality for two `Rc`s.
+ ///
+ /// Two `Rc`s are unequal if their inner values are unequal.
+ ///
+ /// If `T` also implements `Eq` (implying reflexivity of equality),
+ /// two `Rc`s that point to the same allocation are
+ /// never unequal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five != Rc::new(6));
+ /// ```
+ #[inline]
+ fn ne(&self, other: &Rc<T>) -> bool {
+ RcEqIdent::ne(self, other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Rc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Rc<T> {
+ /// Partial comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `partial_cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
+ /// ```
+ #[inline(always)]
+ fn partial_cmp(&self, other: &Rc<T>) -> Option<Ordering> {
+ (**self).partial_cmp(&**other)
+ }
+
+ /// Less-than comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `<` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five < Rc::new(6));
+ /// ```
+ #[inline(always)]
+ fn lt(&self, other: &Rc<T>) -> bool {
+ **self < **other
+ }
+
+ /// 'Less than or equal to' comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `<=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five <= Rc::new(5));
+ /// ```
+ #[inline(always)]
+ fn le(&self, other: &Rc<T>) -> bool {
+ **self <= **other
+ }
+
+ /// Greater-than comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `>` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five > Rc::new(4));
+ /// ```
+ #[inline(always)]
+ fn gt(&self, other: &Rc<T>) -> bool {
+ **self > **other
+ }
+
+ /// 'Greater than or equal to' comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `>=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert!(five >= Rc::new(5));
+ /// ```
+ #[inline(always)]
+ fn ge(&self, other: &Rc<T>) -> bool {
+ **self >= **other
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Rc<T> {
+ /// Comparison for two `Rc`s.
+ ///
+ /// The two are compared by calling `cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
+ /// ```
+ #[inline]
+ fn cmp(&self, other: &Rc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Rc<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Display> fmt::Display for Rc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Rc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Rc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Pointer::fmt(&(&**self as *const T), f)
+ }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Rc<T> {
+ fn from(t: T) -> Self {
+ Rc::new(t)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: Clone> From<&[T]> for Rc<[T]> {
+ #[inline]
+ fn from(v: &[T]) -> Rc<[T]> {
+ <Self as RcFromSlice<T>>::from_slice(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<&str> for Rc<str> {
+ #[inline]
+ fn from(v: &str) -> Rc<str> {
+ let rc = Rc::<[u8]>::from(v.as_bytes());
+ unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<String> for Rc<str> {
+ #[inline]
+ fn from(v: String) -> Rc<str> {
+ Rc::from(&v[..])
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: ?Sized> From<Box<T>> for Rc<T> {
+ #[inline]
+ fn from(v: Box<T>) -> Rc<T> {
+ Rc::from_box(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T> From<Vec<T>> for Rc<[T]> {
+ #[inline]
+ fn from(mut v: Vec<T>) -> Rc<[T]> {
+ unsafe {
+ let rc = Rc::copy_from_slice(&v);
+
+ // Allow the Vec to free its memory, but not destroy its contents
+ v.set_len(0);
+
+ rc
+ }
+ }
+}
+
+#[stable(feature = "shared_from_cow", since = "1.45.0")]
+impl<'a, B> From<Cow<'a, B>> for Rc<B>
+where
+ B: ToOwned + ?Sized,
+ Rc<B>: From<&'a B> + From<B::Owned>,
+{
+ #[inline]
+ fn from(cow: Cow<'a, B>) -> Rc<B> {
+ match cow {
+ Cow::Borrowed(s) => Rc::from(s),
+ Cow::Owned(s) => Rc::from(s),
+ }
+ }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Rc<[T]>> for Rc<[T; N]> {
+ type Error = Rc<[T]>;
+
+ fn try_from(boxed_slice: Rc<[T]>) -> Result<Self, Self::Error> {
+ if boxed_slice.len() == N {
+ Ok(unsafe { Rc::from_raw(Rc::into_raw(boxed_slice) as *mut [T; N]) })
+ } else {
+ Err(boxed_slice)
+ }
+ }
+}
+
+#[stable(feature = "shared_from_iter", since = "1.37.0")]
+impl<T> iter::FromIterator<T> for Rc<[T]> {
+ /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
+ ///
+ /// # Performance characteristics
+ ///
+ /// ## The general case
+ ///
+ /// In the general case, collecting into `Rc<[T]>` is done by first
+ /// collecting into a `Vec<T>`. That is, when writing the following:
+ ///
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
+ /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+ /// ```
+ ///
+ /// this behaves as if we wrote:
+ ///
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
+ /// .collect::<Vec<_>>() // The first set of allocations happens here.
+ /// .into(); // A second allocation for `Rc<[T]>` happens here.
+ /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+ /// ```
+ ///
+ /// This will allocate as many times as needed for constructing the `Vec<T>`
+ /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
+ ///
+ /// ## Iterators of known length
+ ///
+ /// When your `Iterator` implements `TrustedLen` and is of an exact size,
+ /// a single allocation will be made for the `Rc<[T]>`. For example:
+ ///
+ /// ```rust
+ /// # use std::rc::Rc;
+ /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
+ /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
+ /// ```
+ fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
+ ToRcSlice::to_rc_slice(iter.into_iter())
+ }
+}
+
+/// Specialization trait used for collecting into `Rc<[T]>`.
+trait ToRcSlice<T>: Iterator<Item = T> + Sized {
+ fn to_rc_slice(self) -> Rc<[T]>;
+}
+
+impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
+ default fn to_rc_slice(self) -> Rc<[T]> {
+ self.collect::<Vec<T>>().into()
+ }
+}
+
+impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
+ fn to_rc_slice(self) -> Rc<[T]> {
+ // This is the case for a `TrustedLen` iterator.
+ let (low, high) = self.size_hint();
+ if let Some(high) = high {
+ debug_assert_eq!(
+ low,
+ high,
+ "TrustedLen iterator's size hint is not exact: {:?}",
+ (low, high)
+ );
+
+ unsafe {
+ // SAFETY: We need to ensure that the iterator has an exact length and we have.
+ Rc::from_iter_exact(self, low)
+ }
+ } else {
+ // Fall back to normal implementation.
+ self.collect::<Vec<T>>().into()
+ }
+ }
+}
+
+/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an [`Option`]`<`[`Rc`]`<T>>`.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Rc`] pointers, since mutual owning references
+/// would never allow either [`Rc`] to be dropped. For example, a tree could
+/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
+///
+/// [`Rc`]: struct.Rc.html
+/// [`Rc::downgrade`]: struct.Rc.html#method.downgrade
+/// [`upgrade`]: struct.Weak.html#method.upgrade
+/// [`Option`]: ../../std/option/enum.Option.html
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+#[stable(feature = "rc_weak", since = "1.4.0")]
+pub struct Weak<T: ?Sized> {
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
+ // but it is not necessarily a valid pointer.
+ // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
+ // to allocate space on the heap. That's not a value a real pointer
+ // will ever have because RcBox has alignment at least 2.
+ // This is only possible when `T: Sized`; unsized `T` never dangle.
+ ptr: NonNull<RcBox<T>>,
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> !marker::Send for Weak<T> {}
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> !marker::Sync for Weak<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
+
+impl<T> Weak<T> {
+ /// Constructs a new `Weak<T>`, without allocating any memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: #method.upgrade
+ /// [`None`]: ../../std/option/enum.Option.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Weak;
+ ///
+ /// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[stable(feature = "downgraded_weak", since = "1.10.0")]
+ pub fn new() -> Weak<T> {
+ Weak { ptr: NonNull::new(usize::MAX as *mut RcBox<T>).expect("MAX is not 0") }
+ }
+
+ /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
+ ///
+ /// The pointer is valid only if there are some strong references. The pointer may be dangling,
+ /// unaligned or even [`null`] otherwise.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ /// use std::ptr;
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ /// let weak = Rc::downgrade(&strong);
+ /// // Both point to the same object
+ /// assert!(ptr::eq(&*strong, weak.as_ptr()));
+ /// // The strong here keeps it alive, so we can still access the object.
+ /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+ ///
+ /// drop(strong);
+ /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+ /// // undefined behaviour.
+ /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
+ /// ```
+ ///
+ /// [`null`]: ../../std/ptr/fn.null.html
+ #[stable(feature = "rc_as_ptr", since = "1.45.0")]
+ pub fn as_ptr(&self) -> *const T {
+ let ptr: *mut RcBox<T> = NonNull::as_ptr(self.ptr);
+
+ // SAFETY: we must offset the pointer manually, and said pointer may be
+ // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
+ // because we know that a pointer to unsized T was derived from a real
+ // unsized T, as dangling weaks are only created for sized T. wrapping_offset
+ // is used so that we can use the same code path for the non-dangling
+ // unsized case and the potentially dangling sized case.
+ unsafe {
+ let offset = data_offset(ptr as *mut T);
+ set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+ }
+ }
+
+ /// Consumes the `Weak<T>` and turns it into a raw pointer.
+ ///
+ /// This converts the weak pointer into a raw pointer, preserving the original weak count. It
+ /// can be turned back into the `Weak<T>` with [`from_raw`].
+ ///
+ /// The same restrictions of accessing the target of the pointer as with
+ /// [`as_ptr`] apply.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ /// let weak = Rc::downgrade(&strong);
+ /// let raw = weak.into_raw();
+ ///
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ /// assert_eq!("hello", unsafe { &*raw });
+ ///
+ /// drop(unsafe { Weak::from_raw(raw) });
+ /// assert_eq!(0, Rc::weak_count(&strong));
+ /// ```
+ ///
+ /// [`from_raw`]: struct.Weak.html#method.from_raw
+ /// [`as_ptr`]: struct.Weak.html#method.as_ptr
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub fn into_raw(self) -> *const T {
+ let result = self.as_ptr();
+ mem::forget(self);
+ result
+ }
+
+ /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
+ ///
+ /// This can be used to safely get a strong reference (by calling [`upgrade`]
+ /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+ ///
+ /// It takes ownership of one weak count (with the exception of pointers created by [`new`],
+ /// as these don't have any corresponding weak count).
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have originated from the [`into_raw`] and must still own its potential
+ /// weak reference count.
+ ///
+ /// It is allowed for the strong count to be 0 at the time of calling this, but the weak count
+ /// must be non-zero or the pointer must have originated from a dangling `Weak<T>` (one created
+ /// by [`new`]).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let strong = Rc::new("hello".to_owned());
+ ///
+ /// let raw_1 = Rc::downgrade(&strong).into_raw();
+ /// let raw_2 = Rc::downgrade(&strong).into_raw();
+ ///
+ /// assert_eq!(2, Rc::weak_count(&strong));
+ ///
+ /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+ /// assert_eq!(1, Rc::weak_count(&strong));
+ ///
+ /// drop(strong);
+ ///
+ /// // Decrement the last weak count.
+ /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+ /// ```
+ ///
+ /// [`into_raw`]: struct.Weak.html#method.into_raw
+ /// [`upgrade`]: struct.Weak.html#method.upgrade
+ /// [`Rc`]: struct.Rc.html
+ /// [`Weak`]: struct.Weak.html
+ /// [`new`]: struct.Weak.html#method.new
+ /// [`forget`]: ../../std/mem/fn.forget.html
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ if ptr.is_null() {
+ Self::new()
+ } else {
+ // See Rc::from_raw for details
+ unsafe {
+ let offset = data_offset(ptr);
+ let fake_ptr = ptr as *mut RcBox<T>;
+ let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+ Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
+ }
+ }
+ }
+}
+
+pub(crate) fn is_dangling<T: ?Sized>(ptr: NonNull<T>) -> bool {
+ let address = ptr.as_ptr() as *mut () as usize;
+ address == usize::MAX
+}
+
+impl<T: ?Sized> Weak<T> {
+ /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
+ /// dropping of the inner value if successful.
+ ///
+ /// Returns [`None`] if the inner value has since been dropped.
+ ///
+ /// [`Rc`]: struct.Rc.html
+ /// [`None`]: ../../std/option/enum.Option.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let five = Rc::new(5);
+ ///
+ /// let weak_five = Rc::downgrade(&five);
+ ///
+ /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
+ /// assert!(strong_five.is_some());
+ ///
+ /// // Destroy all strong pointers.
+ /// drop(strong_five);
+ /// drop(five);
+ ///
+ /// assert!(weak_five.upgrade().is_none());
+ /// ```
+ #[stable(feature = "rc_weak", since = "1.4.0")]
+ pub fn upgrade(&self) -> Option<Rc<T>> {
+ let inner = self.inner()?;
+ if inner.strong() == 0 {
+ None
+ } else {
+ inner.inc_strong();
+ Some(Rc::from_inner(self.ptr))
+ }
+ }
+
+ /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
+ ///
+ /// If `self` was created using [`Weak::new`], this will return 0.
+ ///
+ /// [`Weak::new`]: #method.new
+ #[stable(feature = "weak_counts", since = "1.41.0")]
+ pub fn strong_count(&self) -> usize {
+ if let Some(inner) = self.inner() { inner.strong() } else { 0 }
+ }
+
+ /// Gets the number of `Weak` pointers pointing to this allocation.
+ ///
+ /// If no strong pointers remain, this will return zero.
+ #[stable(feature = "weak_counts", since = "1.41.0")]
+ pub fn weak_count(&self) -> usize {
+ self.inner()
+ .map(|inner| {
+ if inner.strong() > 0 {
+ inner.weak() - 1 // subtract the implicit weak ptr
+ } else {
+ 0
+ }
+ })
+ .unwrap_or(0)
+ }
+
+ /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
+ /// (i.e., when this `Weak` was created by `Weak::new`).
+ #[inline]
+ fn inner(&self) -> Option<&RcBox<T>> {
+ if is_dangling(self.ptr) { None } else { Some(unsafe { self.ptr.as_ref() }) }
+ }
+
+ /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+ /// [`ptr::eq`]), or if both don't point to any allocation
+ /// (because they were created with `Weak::new()`).
+ ///
+ /// # Notes
+ ///
+ /// Since this compares pointers it means that `Weak::new()` will equal each
+ /// other, even though they don't point to any allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Rc;
+ ///
+ /// let first_rc = Rc::new(5);
+ /// let first = Rc::downgrade(&first_rc);
+ /// let second = Rc::downgrade(&first_rc);
+ ///
+ /// assert!(first.ptr_eq(&second));
+ ///
+ /// let third_rc = Rc::new(5);
+ /// let third = Rc::downgrade(&third_rc);
+ ///
+ /// assert!(!first.ptr_eq(&third));
+ /// ```
+ ///
+ /// Comparing `Weak::new`.
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let first = Weak::new();
+ /// let second = Weak::new();
+ /// assert!(first.ptr_eq(&second));
+ ///
+ /// let third_rc = Rc::new(());
+ /// let third = Rc::downgrade(&third_rc);
+ /// assert!(!first.ptr_eq(&third));
+ /// ```
+ ///
+ /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+ #[inline]
+ #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ self.ptr.as_ptr() == other.ptr.as_ptr()
+ }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> Drop for Weak<T> {
+ /// Drops the `Weak` pointer.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Rc::new(Foo);
+ /// let weak_foo = Rc::downgrade(&foo);
+ /// let other_weak_foo = Weak::clone(&weak_foo);
+ ///
+ /// drop(weak_foo); // Doesn't print anything
+ /// drop(foo); // Prints "dropped!"
+ ///
+ /// assert!(other_weak_foo.upgrade().is_none());
+ /// ```
+ fn drop(&mut self) {
+ if let Some(inner) = self.inner() {
+ inner.dec_weak();
+ // the weak count starts at 1, and will only go to zero if all
+ // the strong pointers have disappeared.
+ if inner.weak() == 0 {
+ unsafe {
+ Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref()));
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized> Clone for Weak<T> {
+ /// Makes a clone of the `Weak` pointer that points to the same allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::{Rc, Weak};
+ ///
+ /// let weak_five = Rc::downgrade(&Rc::new(5));
+ ///
+ /// let _ = Weak::clone(&weak_five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Weak<T> {
+ if let Some(inner) = self.inner() {
+ inner.inc_weak()
+ }
+ Weak { ptr: self.ptr }
+ }
+}
+
+#[stable(feature = "rc_weak", since = "1.4.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "(Weak)")
+ }
+}
+
+#[stable(feature = "downgraded_weak", since = "1.10.0")]
+impl<T> Default for Weak<T> {
+ /// Constructs a new `Weak<T>`, allocating memory for `T` without initializing
+ /// it. Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`None`]: ../../std/option/enum.Option.html
+ /// [`upgrade`]: ../../std/rc/struct.Weak.html#method.upgrade
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::rc::Weak;
+ ///
+ /// let empty: Weak<i64> = Default::default();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ fn default() -> Weak<T> {
+ Weak::new()
+ }
+}
+
+// NOTE: We checked_add here to deal with mem::forget safely. In particular
+// if you mem::forget Rcs (or Weaks), the ref-count can overflow, and then
+// you can free the allocation while outstanding Rcs (or Weaks) exist.
+// We abort because this is such a degenerate scenario that we don't care about
+// what happens -- no real program should ever experience this.
+//
+// This should have negligible overhead since you don't actually need to
+// clone these much in Rust thanks to ownership and move-semantics.
+
+#[doc(hidden)]
+trait RcBoxPtr<T: ?Sized> {
+ fn inner(&self) -> &RcBox<T>;
+
+ #[inline]
+ fn strong(&self) -> usize {
+ self.inner().strong.get()
+ }
+
+ #[inline]
+ fn inc_strong(&self) {
+ let strong = self.strong();
+
+ // We want to abort on overflow instead of dropping the value.
+ // The reference count will never be zero when this is called;
+ // nevertheless, we insert an abort here to hint LLVM at
+ // an otherwise missed optimization.
+ if strong == 0 || strong == usize::MAX {
+ abort();
+ }
+ self.inner().strong.set(strong + 1);
+ }
+
+ #[inline]
+ fn dec_strong(&self) {
+ self.inner().strong.set(self.strong() - 1);
+ }
+
+ #[inline]
+ fn weak(&self) -> usize {
+ self.inner().weak.get()
+ }
+
+ #[inline]
+ fn inc_weak(&self) {
+ let weak = self.weak();
+
+ // We want to abort on overflow instead of dropping the value.
+ // The reference count will never be zero when this is called;
+ // nevertheless, we insert an abort here to hint LLVM at
+ // an otherwise missed optimization.
+ if weak == 0 || weak == usize::MAX {
+ abort();
+ }
+ self.inner().weak.set(weak + 1);
+ }
+
+ #[inline]
+ fn dec_weak(&self) {
+ self.inner().weak.set(self.weak() - 1);
+ }
+}
+
+impl<T: ?Sized> RcBoxPtr<T> for Rc<T> {
+ #[inline(always)]
+ fn inner(&self) -> &RcBox<T> {
+ unsafe { self.ptr.as_ref() }
+ }
+}
+
+impl<T: ?Sized> RcBoxPtr<T> for RcBox<T> {
+ #[inline(always)]
+ fn inner(&self) -> &RcBox<T> {
+ self
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Rc<T> {
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Rc<T> {
+ fn as_ref(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Rc<T> {}
+
+/// Get the offset within an `ArcInner` for
+/// a payload of type described by a pointer.
+///
+/// # Safety
+///
+/// This has the same safety requirements as `align_of_val_raw`. In effect:
+///
+/// - This function is safe for any argument if `T` is sized, and
+/// - if `T` is unsized, the pointer must have appropriate pointer metadata
+/// aquired from the real instance that you are getting this offset for.
+unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
+ // Align the unsized value to the end of the `RcBox`.
+ // Because it is ?Sized, it will always be the last field in memory.
+ // Note: This is a detail of the current implementation of the compiler,
+ // and is not a guaranteed language detail. Do not rely on it outside of std.
+ unsafe { data_offset_align(align_of_val_raw(ptr)) }
+}
+
+#[inline]
+fn data_offset_align(align: usize) -> isize {
+ let layout = Layout::new::<RcBox<()>>();
+ (layout.size() + layout.padding_needed_for(align)) as isize
+}
diff --git a/library/alloc/src/rc/tests.rs b/library/alloc/src/rc/tests.rs
new file mode 100644
index 00000000000..e88385faf4f
--- /dev/null
+++ b/library/alloc/src/rc/tests.rs
@@ -0,0 +1,436 @@
+use super::*;
+
+use std::boxed::Box;
+use std::cell::RefCell;
+use std::clone::Clone;
+use std::convert::{From, TryInto};
+use std::mem::drop;
+use std::option::Option::{self, None, Some};
+use std::result::Result::{Err, Ok};
+
+#[test]
+fn test_clone() {
+ let x = Rc::new(RefCell::new(5));
+ let y = x.clone();
+ *x.borrow_mut() = 20;
+ assert_eq!(*y.borrow(), 20);
+}
+
+#[test]
+fn test_simple() {
+ let x = Rc::new(5);
+ assert_eq!(*x, 5);
+}
+
+#[test]
+fn test_simple_clone() {
+ let x = Rc::new(5);
+ let y = x.clone();
+ assert_eq!(*x, 5);
+ assert_eq!(*y, 5);
+}
+
+#[test]
+fn test_destructor() {
+ let x: Rc<Box<_>> = Rc::new(box 5);
+ assert_eq!(**x, 5);
+}
+
+#[test]
+fn test_live() {
+ let x = Rc::new(5);
+ let y = Rc::downgrade(&x);
+ assert!(y.upgrade().is_some());
+}
+
+#[test]
+fn test_dead() {
+ let x = Rc::new(5);
+ let y = Rc::downgrade(&x);
+ drop(x);
+ assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn weak_self_cyclic() {
+ struct Cycle {
+ x: RefCell<Option<Weak<Cycle>>>,
+ }
+
+ let a = Rc::new(Cycle { x: RefCell::new(None) });
+ let b = Rc::downgrade(&a.clone());
+ *a.x.borrow_mut() = Some(b);
+
+ // hopefully we don't double-free (or leak)...
+}
+
+#[test]
+fn is_unique() {
+ let x = Rc::new(3);
+ assert!(Rc::is_unique(&x));
+ let y = x.clone();
+ assert!(!Rc::is_unique(&x));
+ drop(y);
+ assert!(Rc::is_unique(&x));
+ let w = Rc::downgrade(&x);
+ assert!(!Rc::is_unique(&x));
+ drop(w);
+ assert!(Rc::is_unique(&x));
+}
+
+#[test]
+fn test_strong_count() {
+ let a = Rc::new(0);
+ assert!(Rc::strong_count(&a) == 1);
+ let w = Rc::downgrade(&a);
+ assert!(Rc::strong_count(&a) == 1);
+ let b = w.upgrade().expect("upgrade of live rc failed");
+ assert!(Rc::strong_count(&b) == 2);
+ assert!(Rc::strong_count(&a) == 2);
+ drop(w);
+ drop(a);
+ assert!(Rc::strong_count(&b) == 1);
+ let c = b.clone();
+ assert!(Rc::strong_count(&b) == 2);
+ assert!(Rc::strong_count(&c) == 2);
+}
+
+#[test]
+fn test_weak_count() {
+ let a = Rc::new(0);
+ assert!(Rc::strong_count(&a) == 1);
+ assert!(Rc::weak_count(&a) == 0);
+ let w = Rc::downgrade(&a);
+ assert!(Rc::strong_count(&a) == 1);
+ assert!(Rc::weak_count(&a) == 1);
+ drop(w);
+ assert!(Rc::strong_count(&a) == 1);
+ assert!(Rc::weak_count(&a) == 0);
+ let c = a.clone();
+ assert!(Rc::strong_count(&a) == 2);
+ assert!(Rc::weak_count(&a) == 0);
+ drop(c);
+}
+
+#[test]
+fn weak_counts() {
+ assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
+ assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
+
+ let a = Rc::new(0);
+ let w = Rc::downgrade(&a);
+ assert_eq!(Weak::strong_count(&w), 1);
+ assert_eq!(Weak::weak_count(&w), 1);
+ let w2 = w.clone();
+ assert_eq!(Weak::strong_count(&w), 1);
+ assert_eq!(Weak::weak_count(&w), 2);
+ assert_eq!(Weak::strong_count(&w2), 1);
+ assert_eq!(Weak::weak_count(&w2), 2);
+ drop(w);
+ assert_eq!(Weak::strong_count(&w2), 1);
+ assert_eq!(Weak::weak_count(&w2), 1);
+ let a2 = a.clone();
+ assert_eq!(Weak::strong_count(&w2), 2);
+ assert_eq!(Weak::weak_count(&w2), 1);
+ drop(a2);
+ drop(a);
+ assert_eq!(Weak::strong_count(&w2), 0);
+ assert_eq!(Weak::weak_count(&w2), 0);
+ drop(w2);
+}
+
+#[test]
+fn try_unwrap() {
+ let x = Rc::new(3);
+ assert_eq!(Rc::try_unwrap(x), Ok(3));
+ let x = Rc::new(4);
+ let _y = x.clone();
+ assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4)));
+ let x = Rc::new(5);
+ let _w = Rc::downgrade(&x);
+ assert_eq!(Rc::try_unwrap(x), Ok(5));
+}
+
+#[test]
+fn into_from_raw() {
+ let x = Rc::new(box "hello");
+ let y = x.clone();
+
+ let x_ptr = Rc::into_raw(x);
+ drop(y);
+ unsafe {
+ assert_eq!(**x_ptr, "hello");
+
+ let x = Rc::from_raw(x_ptr);
+ assert_eq!(**x, "hello");
+
+ assert_eq!(Rc::try_unwrap(x).map(|x| *x), Ok("hello"));
+ }
+}
+
+#[test]
+fn test_into_from_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let rc: Rc<str> = Rc::from("foo");
+
+ let ptr = Rc::into_raw(rc.clone());
+ let rc2 = unsafe { Rc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert_eq!(rc, rc2);
+
+ let rc: Rc<dyn Display> = Rc::new(123);
+
+ let ptr = Rc::into_raw(rc.clone());
+ let rc2 = unsafe { Rc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert_eq!(rc2.to_string(), "123");
+}
+
+#[test]
+fn get_mut() {
+ let mut x = Rc::new(3);
+ *Rc::get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+ let y = x.clone();
+ assert!(Rc::get_mut(&mut x).is_none());
+ drop(y);
+ assert!(Rc::get_mut(&mut x).is_some());
+ let _w = Rc::downgrade(&x);
+ assert!(Rc::get_mut(&mut x).is_none());
+}
+
+#[test]
+fn test_cowrc_clone_make_unique() {
+ let mut cow0 = Rc::new(75);
+ let mut cow1 = cow0.clone();
+ let mut cow2 = cow1.clone();
+
+ assert!(75 == *Rc::make_mut(&mut cow0));
+ assert!(75 == *Rc::make_mut(&mut cow1));
+ assert!(75 == *Rc::make_mut(&mut cow2));
+
+ *Rc::make_mut(&mut cow0) += 1;
+ *Rc::make_mut(&mut cow1) += 2;
+ *Rc::make_mut(&mut cow2) += 3;
+
+ assert!(76 == *cow0);
+ assert!(77 == *cow1);
+ assert!(78 == *cow2);
+
+ // none should point to the same backing memory
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 != *cow2);
+}
+
+#[test]
+fn test_cowrc_clone_unique2() {
+ let mut cow0 = Rc::new(75);
+ let cow1 = cow0.clone();
+ let cow2 = cow1.clone();
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ *Rc::make_mut(&mut cow0) += 1;
+
+ assert!(76 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ // cow1 and cow2 should share the same contents
+ // cow0 should have a unique reference
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 == *cow2);
+}
+
+#[test]
+fn test_cowrc_clone_weak() {
+ let mut cow0 = Rc::new(75);
+ let cow1_weak = Rc::downgrade(&cow0);
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1_weak.upgrade().unwrap());
+
+ *Rc::make_mut(&mut cow0) += 1;
+
+ assert!(76 == *cow0);
+ assert!(cow1_weak.upgrade().is_none());
+}
+
+#[test]
+fn test_show() {
+ let foo = Rc::new(75);
+ assert_eq!(format!("{:?}", foo), "75");
+}
+
+#[test]
+fn test_unsized() {
+ let foo: Rc<[i32]> = Rc::new([1, 2, 3]);
+ assert_eq!(foo, foo.clone());
+}
+
+#[test]
+fn test_from_owned() {
+ let foo = 123;
+ let foo_rc = Rc::from(foo);
+ assert!(123 == *foo_rc);
+}
+
+#[test]
+fn test_new_weak() {
+ let foo: Weak<usize> = Weak::new();
+ assert!(foo.upgrade().is_none());
+}
+
+#[test]
+fn test_ptr_eq() {
+ let five = Rc::new(5);
+ let same_five = five.clone();
+ let other_five = Rc::new(5);
+
+ assert!(Rc::ptr_eq(&five, &same_five));
+ assert!(!Rc::ptr_eq(&five, &other_five));
+}
+
+#[test]
+fn test_from_str() {
+ let r: Rc<str> = Rc::from("foo");
+
+ assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_copy_from_slice() {
+ let s: &[u32] = &[1, 2, 3];
+ let r: Rc<[u32]> = Rc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_clone_from_slice() {
+ #[derive(Clone, Debug, Eq, PartialEq)]
+ struct X(u32);
+
+ let s: &[X] = &[X(1), X(2), X(3)];
+ let r: Rc<[X]> = Rc::from(s);
+
+ assert_eq!(&r[..], s);
+}
+
+#[test]
+#[should_panic]
+fn test_clone_from_slice_panic() {
+ use std::string::{String, ToString};
+
+ struct Fail(u32, String);
+
+ impl Clone for Fail {
+ fn clone(&self) -> Fail {
+ if self.0 == 2 {
+ panic!();
+ }
+ Fail(self.0, self.1.clone())
+ }
+ }
+
+ let s: &[Fail] =
+ &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
+
+ // Should panic, but not cause memory corruption
+ let _r: Rc<[Fail]> = Rc::from(s);
+}
+
+#[test]
+fn test_from_box() {
+ let b: Box<u32> = box 123;
+ let r: Rc<u32> = Rc::from(b);
+
+ assert_eq!(*r, 123);
+}
+
+#[test]
+fn test_from_box_str() {
+ use std::string::String;
+
+ let s = String::from("foo").into_boxed_str();
+ let r: Rc<str> = Rc::from(s);
+
+ assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_from_box_slice() {
+ let s = vec![1, 2, 3].into_boxed_slice();
+ let r: Rc<[u32]> = Rc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_from_box_trait() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let b: Box<dyn Display> = box 123;
+ let r: Rc<dyn Display> = Rc::from(b);
+
+ assert_eq!(r.to_string(), "123");
+}
+
+#[test]
+fn test_from_box_trait_zero_sized() {
+ use std::fmt::Debug;
+
+ let b: Box<dyn Debug> = box ();
+ let r: Rc<dyn Debug> = Rc::from(b);
+
+ assert_eq!(format!("{:?}", r), "()");
+}
+
+#[test]
+fn test_from_vec() {
+ let v = vec![1, 2, 3];
+ let r: Rc<[u32]> = Rc::from(v);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_downcast() {
+ use std::any::Any;
+
+ let r1: Rc<dyn Any> = Rc::new(i32::MAX);
+ let r2: Rc<dyn Any> = Rc::new("abc");
+
+ assert!(r1.clone().downcast::<u32>().is_err());
+
+ let r1i32 = r1.downcast::<i32>();
+ assert!(r1i32.is_ok());
+ assert_eq!(r1i32.unwrap(), Rc::new(i32::MAX));
+
+ assert!(r2.clone().downcast::<i32>().is_err());
+
+ let r2str = r2.downcast::<&'static str>();
+ assert!(r2str.is_ok());
+ assert_eq!(r2str.unwrap(), Rc::new("abc"));
+}
+
+#[test]
+fn test_array_from_slice() {
+ let v = vec![1, 2, 3];
+ let r: Rc<[u32]> = Rc::from(v);
+
+ let a: Result<Rc<[u32; 3]>, _> = r.clone().try_into();
+ assert!(a.is_ok());
+
+ let a: Result<Rc<[u32; 2]>, _> = r.clone().try_into();
+ assert!(a.is_err());
+}
diff --git a/library/alloc/src/slice.rs b/library/alloc/src/slice.rs
new file mode 100644
index 00000000000..3d51115fe01
--- /dev/null
+++ b/library/alloc/src/slice.rs
@@ -0,0 +1,1069 @@
+//! A dynamically-sized view into a contiguous sequence, `[T]`.
+//!
+//! *[See also the slice primitive type](../../std/primitive.slice.html).*
+//!
+//! Slices are a view into a block of memory represented as a pointer and a
+//! length.
+//!
+//! ```
+//! // slicing a Vec
+//! let vec = vec![1, 2, 3];
+//! let int_slice = &vec[..];
+//! // coercing an array to a slice
+//! let str_slice: &[&str] = &["one", "two", "three"];
+//! ```
+//!
+//! Slices are either mutable or shared. The shared slice type is `&[T]`,
+//! while the mutable slice type is `&mut [T]`, where `T` represents the element
+//! type. For example, you can mutate the block of memory that a mutable slice
+//! points to:
+//!
+//! ```
+//! let x = &mut [1, 2, 3];
+//! x[1] = 7;
+//! assert_eq!(x, &[1, 7, 3]);
+//! ```
+//!
+//! Here are some of the things this module contains:
+//!
+//! ## Structs
+//!
+//! There are several structs that are useful for slices, such as [`Iter`], which
+//! represents iteration over a slice.
+//!
+//! ## Trait Implementations
+//!
+//! There are several implementations of common traits for slices. Some examples
+//! include:
+//!
+//! * [`Clone`]
+//! * [`Eq`], [`Ord`] - for slices whose element type are [`Eq`] or [`Ord`].
+//! * [`Hash`] - for slices whose element type is [`Hash`].
+//!
+//! ## Iteration
+//!
+//! The slices implement `IntoIterator`. The iterator yields references to the
+//! slice elements.
+//!
+//! ```
+//! let numbers = &[0, 1, 2];
+//! for n in numbers {
+//! println!("{} is a number!", n);
+//! }
+//! ```
+//!
+//! The mutable slice yields mutable references to the elements:
+//!
+//! ```
+//! let mut scores = [7, 8, 9];
+//! for score in &mut scores[..] {
+//! *score += 1;
+//! }
+//! ```
+//!
+//! This iterator yields mutable references to the slice's elements, so while
+//! the element type of the slice is `i32`, the element type of the iterator is
+//! `&mut i32`.
+//!
+//! * [`.iter`] and [`.iter_mut`] are the explicit methods to return the default
+//! iterators.
+//! * Further methods that return iterators are [`.split`], [`.splitn`],
+//! [`.chunks`], [`.windows`] and more.
+//!
+//! [`Clone`]: ../../std/clone/trait.Clone.html
+//! [`Eq`]: ../../std/cmp/trait.Eq.html
+//! [`Ord`]: ../../std/cmp/trait.Ord.html
+//! [`Iter`]: struct.Iter.html
+//! [`Hash`]: ../../std/hash/trait.Hash.html
+//! [`.iter`]: ../../std/primitive.slice.html#method.iter
+//! [`.iter_mut`]: ../../std/primitive.slice.html#method.iter_mut
+//! [`.split`]: ../../std/primitive.slice.html#method.split
+//! [`.splitn`]: ../../std/primitive.slice.html#method.splitn
+//! [`.chunks`]: ../../std/primitive.slice.html#method.chunks
+//! [`.windows`]: ../../std/primitive.slice.html#method.windows
+#![stable(feature = "rust1", since = "1.0.0")]
+// Many of the usings in this module are only used in the test configuration.
+// It's cleaner to just turn off the unused_imports warning than to fix them.
+#![cfg_attr(test, allow(unused_imports, dead_code))]
+
+use core::borrow::{Borrow, BorrowMut};
+use core::cmp::Ordering::{self, Less};
+use core::mem::{self, size_of};
+use core::ptr;
+
+use crate::borrow::ToOwned;
+use crate::boxed::Box;
+use crate::vec::Vec;
+
+#[stable(feature = "slice_get_slice", since = "1.28.0")]
+pub use core::slice::SliceIndex;
+#[stable(feature = "from_ref", since = "1.28.0")]
+pub use core::slice::{from_mut, from_ref};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{from_raw_parts, from_raw_parts_mut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{Chunks, Windows};
+#[stable(feature = "chunks_exact", since = "1.31.0")]
+pub use core::slice::{ChunksExact, ChunksExactMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{ChunksMut, Split, SplitMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{Iter, IterMut};
+#[stable(feature = "rchunks", since = "1.31.0")]
+pub use core::slice::{RChunks, RChunksExact, RChunksExactMut, RChunksMut};
+#[stable(feature = "slice_rsplit", since = "1.27.0")]
+pub use core::slice::{RSplit, RSplitMut};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::slice::{RSplitN, RSplitNMut, SplitN, SplitNMut};
+
+////////////////////////////////////////////////////////////////////////////////
+// Basic slice extension methods
+////////////////////////////////////////////////////////////////////////////////
+
+// HACK(japaric) needed for the implementation of `vec!` macro during testing
+// N.B., see the `hack` module in this file for more details.
+#[cfg(test)]
+pub use hack::into_vec;
+
+// HACK(japaric) needed for the implementation of `Vec::clone` during testing
+// N.B., see the `hack` module in this file for more details.
+#[cfg(test)]
+pub use hack::to_vec;
+
+// HACK(japaric): With cfg(test) `impl [T]` is not available, these three
+// functions are actually methods that are in `impl [T]` but not in
+// `core::slice::SliceExt` - we need to supply these functions for the
+// `test_permutations` test
+mod hack {
+ use crate::boxed::Box;
+ use crate::vec::Vec;
+
+ // We shouldn't add inline attribute to this since this is used in
+ // `vec!` macro mostly and causes perf regression. See #71204 for
+ // discussion and perf results.
+ pub fn into_vec<T>(b: Box<[T]>) -> Vec<T> {
+ unsafe {
+ let len = b.len();
+ let b = Box::into_raw(b);
+ Vec::from_raw_parts(b as *mut T, len, len)
+ }
+ }
+
+ #[inline]
+ pub fn to_vec<T>(s: &[T]) -> Vec<T>
+ where
+ T: Clone,
+ {
+ let mut vec = Vec::with_capacity(s.len());
+ vec.extend_from_slice(s);
+ vec
+ }
+}
+
+#[lang = "slice_alloc"]
+#[cfg(not(test))]
+impl<T> [T] {
+ /// Sorts the slice.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
+ ///
+ /// When applicable, unstable sorting is preferred because it is generally faster than stable
+ /// sorting and it doesn't allocate auxiliary memory.
+ /// See [`sort_unstable`](#method.sort_unstable).
+ ///
+ /// # Current implementation
+ ///
+ /// The current algorithm is an adaptive, iterative merge sort inspired by
+ /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+ /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+ /// two or more sorted sequences concatenated one after another.
+ ///
+ /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+ /// non-allocating insertion sort is used instead.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = [-5, 4, 1, -3, 2];
+ ///
+ /// v.sort();
+ /// assert!(v == [-5, -3, 1, 2, 4]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn sort(&mut self)
+ where
+ T: Ord,
+ {
+ merge_sort(self, |a, b| a.lt(b));
+ }
+
+ /// Sorts the slice with a comparator function.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements) and `O(n * log(n))` worst-case.
+ ///
+ /// The comparator function must define a total ordering for the elements in the slice. If
+ /// the ordering is not total, the order of the elements is unspecified. An order is a
+ /// total order if it is (for all `a`, `b` and `c`):
+ ///
+ /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and
+ /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`.
+ ///
+ /// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
+ /// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
+ ///
+ /// ```
+ /// let mut floats = [5f64, 4.0, 1.0, 3.0, 2.0];
+ /// floats.sort_by(|a, b| a.partial_cmp(b).unwrap());
+ /// assert_eq!(floats, [1.0, 2.0, 3.0, 4.0, 5.0]);
+ /// ```
+ ///
+ /// When applicable, unstable sorting is preferred because it is generally faster than stable
+ /// sorting and it doesn't allocate auxiliary memory.
+ /// See [`sort_unstable_by`](#method.sort_unstable_by).
+ ///
+ /// # Current implementation
+ ///
+ /// The current algorithm is an adaptive, iterative merge sort inspired by
+ /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+ /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+ /// two or more sorted sequences concatenated one after another.
+ ///
+ /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+ /// non-allocating insertion sort is used instead.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = [5, 4, 1, 3, 2];
+ /// v.sort_by(|a, b| a.cmp(b));
+ /// assert!(v == [1, 2, 3, 4, 5]);
+ ///
+ /// // reverse sorting
+ /// v.sort_by(|a, b| b.cmp(a));
+ /// assert!(v == [5, 4, 3, 2, 1]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn sort_by<F>(&mut self, mut compare: F)
+ where
+ F: FnMut(&T, &T) -> Ordering,
+ {
+ merge_sort(self, |a, b| compare(a, b) == Less);
+ }
+
+ /// Sorts the slice with a key extraction function.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n * log(n))`
+ /// worst-case, where the key function is `O(m)`.
+ ///
+ /// For expensive key functions (e.g. functions that are not simple property accesses or
+ /// basic operations), [`sort_by_cached_key`](#method.sort_by_cached_key) is likely to be
+ /// significantly faster, as it does not recompute element keys.
+ ///
+ /// When applicable, unstable sorting is preferred because it is generally faster than stable
+ /// sorting and it doesn't allocate auxiliary memory.
+ /// See [`sort_unstable_by_key`](#method.sort_unstable_by_key).
+ ///
+ /// # Current implementation
+ ///
+ /// The current algorithm is an adaptive, iterative merge sort inspired by
+ /// [timsort](https://en.wikipedia.org/wiki/Timsort).
+ /// It is designed to be very fast in cases where the slice is nearly sorted, or consists of
+ /// two or more sorted sequences concatenated one after another.
+ ///
+ /// Also, it allocates temporary storage half the size of `self`, but for short slices a
+ /// non-allocating insertion sort is used instead.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = [-5i32, 4, 1, -3, 2];
+ ///
+ /// v.sort_by_key(|k| k.abs());
+ /// assert!(v == [1, 2, -3, 4, -5]);
+ /// ```
+ #[stable(feature = "slice_sort_by_key", since = "1.7.0")]
+ #[inline]
+ pub fn sort_by_key<K, F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> K,
+ K: Ord,
+ {
+ merge_sort(self, |a, b| f(a).lt(&f(b)));
+ }
+
+ /// Sorts the slice with a key extraction function.
+ ///
+ /// During sorting, the key function is called only once per element.
+ ///
+ /// This sort is stable (i.e., does not reorder equal elements) and `O(m * n + n * log(n))`
+ /// worst-case, where the key function is `O(m)`.
+ ///
+ /// For simple key functions (e.g., functions that are property accesses or
+ /// basic operations), [`sort_by_key`](#method.sort_by_key) is likely to be
+ /// faster.
+ ///
+ /// # Current implementation
+ ///
+ /// The current algorithm is based on [pattern-defeating quicksort][pdqsort] by Orson Peters,
+ /// which combines the fast average case of randomized quicksort with the fast worst case of
+ /// heapsort, while achieving linear time on slices with certain patterns. It uses some
+ /// randomization to avoid degenerate cases, but with a fixed seed to always provide
+ /// deterministic behavior.
+ ///
+ /// In the worst case, the algorithm allocates temporary storage in a `Vec<(K, usize)>` the
+ /// length of the slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = [-5i32, 4, 32, -3, 2];
+ ///
+ /// v.sort_by_cached_key(|k| k.to_string());
+ /// assert!(v == [-3, -5, 2, 32, 4]);
+ /// ```
+ ///
+ /// [pdqsort]: https://github.com/orlp/pdqsort
+ #[stable(feature = "slice_sort_by_cached_key", since = "1.34.0")]
+ #[inline]
+ pub fn sort_by_cached_key<K, F>(&mut self, f: F)
+ where
+ F: FnMut(&T) -> K,
+ K: Ord,
+ {
+ // Helper macro for indexing our vector by the smallest possible type, to reduce allocation.
+ macro_rules! sort_by_key {
+ ($t:ty, $slice:ident, $f:ident) => {{
+ let mut indices: Vec<_> =
+ $slice.iter().map($f).enumerate().map(|(i, k)| (k, i as $t)).collect();
+ // The elements of `indices` are unique, as they are indexed, so any sort will be
+ // stable with respect to the original slice. We use `sort_unstable` here because
+ // it requires less memory allocation.
+ indices.sort_unstable();
+ for i in 0..$slice.len() {
+ let mut index = indices[i].1;
+ while (index as usize) < i {
+ index = indices[index as usize].1;
+ }
+ indices[i].1 = index;
+ $slice.swap(i, index as usize);
+ }
+ }};
+ }
+
+ let sz_u8 = mem::size_of::<(K, u8)>();
+ let sz_u16 = mem::size_of::<(K, u16)>();
+ let sz_u32 = mem::size_of::<(K, u32)>();
+ let sz_usize = mem::size_of::<(K, usize)>();
+
+ let len = self.len();
+ if len < 2 {
+ return;
+ }
+ if sz_u8 < sz_u16 && len <= (u8::MAX as usize) {
+ return sort_by_key!(u8, self, f);
+ }
+ if sz_u16 < sz_u32 && len <= (u16::MAX as usize) {
+ return sort_by_key!(u16, self, f);
+ }
+ if sz_u32 < sz_usize && len <= (u32::MAX as usize) {
+ return sort_by_key!(u32, self, f);
+ }
+ sort_by_key!(usize, self, f)
+ }
+
+ /// Copies `self` into a new `Vec`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let s = [10, 40, 30];
+ /// let x = s.to_vec();
+ /// // Here, `s` and `x` can be modified independently.
+ /// ```
+ #[rustc_conversion_suggestion]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn to_vec(&self) -> Vec<T>
+ where
+ T: Clone,
+ {
+ // N.B., see the `hack` module in this file for more details.
+ hack::to_vec(self)
+ }
+
+ /// Converts `self` into a vector without clones or allocation.
+ ///
+ /// The resulting vector can be converted back into a box via
+ /// `Vec<T>`'s `into_boxed_slice` method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let s: Box<[i32]> = Box::new([10, 40, 30]);
+ /// let x = s.into_vec();
+ /// // `s` cannot be used anymore because it has been converted into `x`.
+ ///
+ /// assert_eq!(x, vec![10, 40, 30]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn into_vec(self: Box<Self>) -> Vec<T> {
+ // N.B., see the `hack` module in this file for more details.
+ hack::into_vec(self)
+ }
+
+ /// Creates a vector by repeating a slice `n` times.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if the capacity would overflow.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// assert_eq!([1, 2].repeat(3), vec![1, 2, 1, 2, 1, 2]);
+ /// ```
+ ///
+ /// A panic upon overflow:
+ ///
+ /// ```should_panic
+ /// // this will panic at runtime
+ /// b"0123456789abcdef".repeat(usize::MAX);
+ /// ```
+ #[stable(feature = "repeat_generic_slice", since = "1.40.0")]
+ pub fn repeat(&self, n: usize) -> Vec<T>
+ where
+ T: Copy,
+ {
+ if n == 0 {
+ return Vec::new();
+ }
+
+ // If `n` is larger than zero, it can be split as
+ // `n = 2^expn + rem (2^expn > rem, expn >= 0, rem >= 0)`.
+ // `2^expn` is the number represented by the leftmost '1' bit of `n`,
+ // and `rem` is the remaining part of `n`.
+
+ // Using `Vec` to access `set_len()`.
+ let capacity = self.len().checked_mul(n).expect("capacity overflow");
+ let mut buf = Vec::with_capacity(capacity);
+
+ // `2^expn` repetition is done by doubling `buf` `expn`-times.
+ buf.extend(self);
+ {
+ let mut m = n >> 1;
+ // If `m > 0`, there are remaining bits up to the leftmost '1'.
+ while m > 0 {
+ // `buf.extend(buf)`:
+ unsafe {
+ ptr::copy_nonoverlapping(
+ buf.as_ptr(),
+ (buf.as_mut_ptr() as *mut T).add(buf.len()),
+ buf.len(),
+ );
+ // `buf` has capacity of `self.len() * n`.
+ let buf_len = buf.len();
+ buf.set_len(buf_len * 2);
+ }
+
+ m >>= 1;
+ }
+ }
+
+ // `rem` (`= n - 2^expn`) repetition is done by copying
+ // first `rem` repetitions from `buf` itself.
+ let rem_len = capacity - buf.len(); // `self.len() * rem`
+ if rem_len > 0 {
+ // `buf.extend(buf[0 .. rem_len])`:
+ unsafe {
+ // This is non-overlapping since `2^expn > rem`.
+ ptr::copy_nonoverlapping(
+ buf.as_ptr(),
+ (buf.as_mut_ptr() as *mut T).add(buf.len()),
+ rem_len,
+ );
+ // `buf.len() + rem_len` equals to `buf.capacity()` (`= self.len() * n`).
+ buf.set_len(capacity);
+ }
+ }
+ buf
+ }
+
+ /// Flattens a slice of `T` into a single value `Self::Output`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// assert_eq!(["hello", "world"].concat(), "helloworld");
+ /// assert_eq!([[1, 2], [3, 4]].concat(), [1, 2, 3, 4]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn concat<Item: ?Sized>(&self) -> <Self as Concat<Item>>::Output
+ where
+ Self: Concat<Item>,
+ {
+ Concat::concat(self)
+ }
+
+ /// Flattens a slice of `T` into a single value `Self::Output`, placing a
+ /// given separator between each.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// assert_eq!(["hello", "world"].join(" "), "hello world");
+ /// assert_eq!([[1, 2], [3, 4]].join(&0), [1, 2, 0, 3, 4]);
+ /// assert_eq!([[1, 2], [3, 4]].join(&[0, 0][..]), [1, 2, 0, 0, 3, 4]);
+ /// ```
+ #[stable(feature = "rename_connect_to_join", since = "1.3.0")]
+ pub fn join<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
+ where
+ Self: Join<Separator>,
+ {
+ Join::join(self, sep)
+ }
+
+ /// Flattens a slice of `T` into a single value `Self::Output`, placing a
+ /// given separator between each.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(deprecated)]
+ /// assert_eq!(["hello", "world"].connect(" "), "hello world");
+ /// assert_eq!([[1, 2], [3, 4]].connect(&0), [1, 2, 0, 3, 4]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[rustc_deprecated(since = "1.3.0", reason = "renamed to join")]
+ pub fn connect<Separator>(&self, sep: Separator) -> <Self as Join<Separator>>::Output
+ where
+ Self: Join<Separator>,
+ {
+ Join::join(self, sep)
+ }
+}
+
+#[lang = "slice_u8_alloc"]
+#[cfg(not(test))]
+impl [u8] {
+ /// Returns a vector containing a copy of this slice where each byte
+ /// is mapped to its ASCII upper case equivalent.
+ ///
+ /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
+ /// but non-ASCII letters are unchanged.
+ ///
+ /// To uppercase the value in-place, use [`make_ascii_uppercase`].
+ ///
+ /// [`make_ascii_uppercase`]: #method.make_ascii_uppercase
+ #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[inline]
+ pub fn to_ascii_uppercase(&self) -> Vec<u8> {
+ let mut me = self.to_vec();
+ me.make_ascii_uppercase();
+ me
+ }
+
+ /// Returns a vector containing a copy of this slice where each byte
+ /// is mapped to its ASCII lower case equivalent.
+ ///
+ /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
+ /// but non-ASCII letters are unchanged.
+ ///
+ /// To lowercase the value in-place, use [`make_ascii_lowercase`].
+ ///
+ /// [`make_ascii_lowercase`]: #method.make_ascii_lowercase
+ #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[inline]
+ pub fn to_ascii_lowercase(&self) -> Vec<u8> {
+ let mut me = self.to_vec();
+ me.make_ascii_lowercase();
+ me
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Extension traits for slices over specific kinds of data
+////////////////////////////////////////////////////////////////////////////////
+
+/// Helper trait for [`[T]::concat`](../../std/primitive.slice.html#method.concat).
+///
+/// Note: the `Item` type parameter is not used in this trait,
+/// but it allows impls to be more generic.
+/// Without it, we get this error:
+///
+/// ```error
+/// error[E0207]: the type parameter `T` is not constrained by the impl trait, self type, or predica
+/// --> src/liballoc/slice.rs:608:6
+/// |
+/// 608 | impl<T: Clone, V: Borrow<[T]>> Concat for [V] {
+/// | ^ unconstrained type parameter
+/// ```
+///
+/// This is because there could exist `V` types with multiple `Borrow<[_]>` impls,
+/// such that multiple `T` types would apply:
+///
+/// ```
+/// # #[allow(dead_code)]
+/// pub struct Foo(Vec<u32>, Vec<String>);
+///
+/// impl std::borrow::Borrow<[u32]> for Foo {
+/// fn borrow(&self) -> &[u32] { &self.0 }
+/// }
+///
+/// impl std::borrow::Borrow<[String]> for Foo {
+/// fn borrow(&self) -> &[String] { &self.1 }
+/// }
+/// ```
+#[unstable(feature = "slice_concat_trait", issue = "27747")]
+pub trait Concat<Item: ?Sized> {
+ #[unstable(feature = "slice_concat_trait", issue = "27747")]
+ /// The resulting type after concatenation
+ type Output;
+
+ /// Implementation of [`[T]::concat`](../../std/primitive.slice.html#method.concat)
+ #[unstable(feature = "slice_concat_trait", issue = "27747")]
+ fn concat(slice: &Self) -> Self::Output;
+}
+
+/// Helper trait for [`[T]::join`](../../std/primitive.slice.html#method.join)
+#[unstable(feature = "slice_concat_trait", issue = "27747")]
+pub trait Join<Separator> {
+ #[unstable(feature = "slice_concat_trait", issue = "27747")]
+ /// The resulting type after concatenation
+ type Output;
+
+ /// Implementation of [`[T]::join`](../../std/primitive.slice.html#method.join)
+ #[unstable(feature = "slice_concat_trait", issue = "27747")]
+ fn join(slice: &Self, sep: Separator) -> Self::Output;
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Concat<T> for [V] {
+ type Output = Vec<T>;
+
+ fn concat(slice: &Self) -> Vec<T> {
+ let size = slice.iter().map(|slice| slice.borrow().len()).sum();
+ let mut result = Vec::with_capacity(size);
+ for v in slice {
+ result.extend_from_slice(v.borrow())
+ }
+ result
+ }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Join<&T> for [V] {
+ type Output = Vec<T>;
+
+ fn join(slice: &Self, sep: &T) -> Vec<T> {
+ let mut iter = slice.iter();
+ let first = match iter.next() {
+ Some(first) => first,
+ None => return vec![],
+ };
+ let size = slice.iter().map(|v| v.borrow().len()).sum::<usize>() + slice.len() - 1;
+ let mut result = Vec::with_capacity(size);
+ result.extend_from_slice(first.borrow());
+
+ for v in iter {
+ result.push(sep.clone());
+ result.extend_from_slice(v.borrow())
+ }
+ result
+ }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<T: Clone, V: Borrow<[T]>> Join<&[T]> for [V] {
+ type Output = Vec<T>;
+
+ fn join(slice: &Self, sep: &[T]) -> Vec<T> {
+ let mut iter = slice.iter();
+ let first = match iter.next() {
+ Some(first) => first,
+ None => return vec![],
+ };
+ let size =
+ slice.iter().map(|v| v.borrow().len()).sum::<usize>() + sep.len() * (slice.len() - 1);
+ let mut result = Vec::with_capacity(size);
+ result.extend_from_slice(first.borrow());
+
+ for v in iter {
+ result.extend_from_slice(sep);
+ result.extend_from_slice(v.borrow())
+ }
+ result
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Standard trait implementations for slices
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Borrow<[T]> for Vec<T> {
+ fn borrow(&self) -> &[T] {
+ &self[..]
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> BorrowMut<[T]> for Vec<T> {
+ fn borrow_mut(&mut self) -> &mut [T] {
+ &mut self[..]
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> ToOwned for [T] {
+ type Owned = Vec<T>;
+ #[cfg(not(test))]
+ fn to_owned(&self) -> Vec<T> {
+ self.to_vec()
+ }
+
+ #[cfg(test)]
+ fn to_owned(&self) -> Vec<T> {
+ hack::to_vec(self)
+ }
+
+ fn clone_into(&self, target: &mut Vec<T>) {
+ // drop anything in target that will not be overwritten
+ target.truncate(self.len());
+
+ // target.len <= self.len due to the truncate above, so the
+ // slices here are always in-bounds.
+ let (init, tail) = self.split_at(target.len());
+
+ // reuse the contained values' allocations/resources.
+ target.clone_from_slice(init);
+ target.extend_from_slice(tail);
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Sorting
+////////////////////////////////////////////////////////////////////////////////
+
+/// Inserts `v[0]` into pre-sorted sequence `v[1..]` so that whole `v[..]` becomes sorted.
+///
+/// This is the integral subroutine of insertion sort.
+fn insert_head<T, F>(v: &mut [T], is_less: &mut F)
+where
+ F: FnMut(&T, &T) -> bool,
+{
+ if v.len() >= 2 && is_less(&v[1], &v[0]) {
+ unsafe {
+ // There are three ways to implement insertion here:
+ //
+ // 1. Swap adjacent elements until the first one gets to its final destination.
+ // However, this way we copy data around more than is necessary. If elements are big
+ // structures (costly to copy), this method will be slow.
+ //
+ // 2. Iterate until the right place for the first element is found. Then shift the
+ // elements succeeding it to make room for it and finally place it into the
+ // remaining hole. This is a good method.
+ //
+ // 3. Copy the first element into a temporary variable. Iterate until the right place
+ // for it is found. As we go along, copy every traversed element into the slot
+ // preceding it. Finally, copy data from the temporary variable into the remaining
+ // hole. This method is very good. Benchmarks demonstrated slightly better
+ // performance than with the 2nd method.
+ //
+ // All methods were benchmarked, and the 3rd showed best results. So we chose that one.
+ let mut tmp = mem::ManuallyDrop::new(ptr::read(&v[0]));
+
+ // Intermediate state of the insertion process is always tracked by `hole`, which
+ // serves two purposes:
+ // 1. Protects integrity of `v` from panics in `is_less`.
+ // 2. Fills the remaining hole in `v` in the end.
+ //
+ // Panic safety:
+ //
+ // If `is_less` panics at any point during the process, `hole` will get dropped and
+ // fill the hole in `v` with `tmp`, thus ensuring that `v` still holds every object it
+ // initially held exactly once.
+ let mut hole = InsertionHole { src: &mut *tmp, dest: &mut v[1] };
+ ptr::copy_nonoverlapping(&v[1], &mut v[0], 1);
+
+ for i in 2..v.len() {
+ if !is_less(&v[i], &*tmp) {
+ break;
+ }
+ ptr::copy_nonoverlapping(&v[i], &mut v[i - 1], 1);
+ hole.dest = &mut v[i];
+ }
+ // `hole` gets dropped and thus copies `tmp` into the remaining hole in `v`.
+ }
+ }
+
+ // When dropped, copies from `src` into `dest`.
+ struct InsertionHole<T> {
+ src: *mut T,
+ dest: *mut T,
+ }
+
+ impl<T> Drop for InsertionHole<T> {
+ fn drop(&mut self) {
+ unsafe {
+ ptr::copy_nonoverlapping(self.src, self.dest, 1);
+ }
+ }
+ }
+}
+
+/// Merges non-decreasing runs `v[..mid]` and `v[mid..]` using `buf` as temporary storage, and
+/// stores the result into `v[..]`.
+///
+/// # Safety
+///
+/// The two slices must be non-empty and `mid` must be in bounds. Buffer `buf` must be long enough
+/// to hold a copy of the shorter slice. Also, `T` must not be a zero-sized type.
+unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, is_less: &mut F)
+where
+ F: FnMut(&T, &T) -> bool,
+{
+ let len = v.len();
+ let v = v.as_mut_ptr();
+ let (v_mid, v_end) = unsafe { (v.add(mid), v.add(len)) };
+
+ // The merge process first copies the shorter run into `buf`. Then it traces the newly copied
+ // run and the longer run forwards (or backwards), comparing their next unconsumed elements and
+ // copying the lesser (or greater) one into `v`.
+ //
+ // As soon as the shorter run is fully consumed, the process is done. If the longer run gets
+ // consumed first, then we must copy whatever is left of the shorter run into the remaining
+ // hole in `v`.
+ //
+ // Intermediate state of the process is always tracked by `hole`, which serves two purposes:
+ // 1. Protects integrity of `v` from panics in `is_less`.
+ // 2. Fills the remaining hole in `v` if the longer run gets consumed first.
+ //
+ // Panic safety:
+ //
+ // If `is_less` panics at any point during the process, `hole` will get dropped and fill the
+ // hole in `v` with the unconsumed range in `buf`, thus ensuring that `v` still holds every
+ // object it initially held exactly once.
+ let mut hole;
+
+ if mid <= len - mid {
+ // The left run is shorter.
+ unsafe {
+ ptr::copy_nonoverlapping(v, buf, mid);
+ hole = MergeHole { start: buf, end: buf.add(mid), dest: v };
+ }
+
+ // Initially, these pointers point to the beginnings of their arrays.
+ let left = &mut hole.start;
+ let mut right = v_mid;
+ let out = &mut hole.dest;
+
+ while *left < hole.end && right < v_end {
+ // Consume the lesser side.
+ // If equal, prefer the left run to maintain stability.
+ unsafe {
+ let to_copy = if is_less(&*right, &**left) {
+ get_and_increment(&mut right)
+ } else {
+ get_and_increment(left)
+ };
+ ptr::copy_nonoverlapping(to_copy, get_and_increment(out), 1);
+ }
+ }
+ } else {
+ // The right run is shorter.
+ unsafe {
+ ptr::copy_nonoverlapping(v_mid, buf, len - mid);
+ hole = MergeHole { start: buf, end: buf.add(len - mid), dest: v_mid };
+ }
+
+ // Initially, these pointers point past the ends of their arrays.
+ let left = &mut hole.dest;
+ let right = &mut hole.end;
+ let mut out = v_end;
+
+ while v < *left && buf < *right {
+ // Consume the greater side.
+ // If equal, prefer the right run to maintain stability.
+ unsafe {
+ let to_copy = if is_less(&*right.offset(-1), &*left.offset(-1)) {
+ decrement_and_get(left)
+ } else {
+ decrement_and_get(right)
+ };
+ ptr::copy_nonoverlapping(to_copy, decrement_and_get(&mut out), 1);
+ }
+ }
+ }
+ // Finally, `hole` gets dropped. If the shorter run was not fully consumed, whatever remains of
+ // it will now be copied into the hole in `v`.
+
+ unsafe fn get_and_increment<T>(ptr: &mut *mut T) -> *mut T {
+ let old = *ptr;
+ *ptr = unsafe { ptr.offset(1) };
+ old
+ }
+
+ unsafe fn decrement_and_get<T>(ptr: &mut *mut T) -> *mut T {
+ *ptr = unsafe { ptr.offset(-1) };
+ *ptr
+ }
+
+ // When dropped, copies the range `start..end` into `dest..`.
+ struct MergeHole<T> {
+ start: *mut T,
+ end: *mut T,
+ dest: *mut T,
+ }
+
+ impl<T> Drop for MergeHole<T> {
+ fn drop(&mut self) {
+ // `T` is not a zero-sized type, so it's okay to divide by its size.
+ let len = (self.end as usize - self.start as usize) / mem::size_of::<T>();
+ unsafe {
+ ptr::copy_nonoverlapping(self.start, self.dest, len);
+ }
+ }
+ }
+}
+
+/// This merge sort borrows some (but not all) ideas from TimSort, which is described in detail
+/// [here](http://svn.python.org/projects/python/trunk/Objects/listsort.txt).
+///
+/// The algorithm identifies strictly descending and non-descending subsequences, which are called
+/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
+/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
+/// satisfied:
+///
+/// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
+/// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
+///
+/// The invariants ensure that the total running time is `O(n * log(n))` worst-case.
+fn merge_sort<T, F>(v: &mut [T], mut is_less: F)
+where
+ F: FnMut(&T, &T) -> bool,
+{
+ // Slices of up to this length get sorted using insertion sort.
+ const MAX_INSERTION: usize = 20;
+ // Very short runs are extended using insertion sort to span at least this many elements.
+ const MIN_RUN: usize = 10;
+
+ // Sorting has no meaningful behavior on zero-sized types.
+ if size_of::<T>() == 0 {
+ return;
+ }
+
+ let len = v.len();
+
+ // Short arrays get sorted in-place via insertion sort to avoid allocations.
+ if len <= MAX_INSERTION {
+ if len >= 2 {
+ for i in (0..len - 1).rev() {
+ insert_head(&mut v[i..], &mut is_less);
+ }
+ }
+ return;
+ }
+
+ // Allocate a buffer to use as scratch memory. We keep the length 0 so we can keep in it
+ // shallow copies of the contents of `v` without risking the dtors running on copies if
+ // `is_less` panics. When merging two sorted runs, this buffer holds a copy of the shorter run,
+ // which will always have length at most `len / 2`.
+ let mut buf = Vec::with_capacity(len / 2);
+
+ // In order to identify natural runs in `v`, we traverse it backwards. That might seem like a
+ // strange decision, but consider the fact that merges more often go in the opposite direction
+ // (forwards). According to benchmarks, merging forwards is slightly faster than merging
+ // backwards. To conclude, identifying runs by traversing backwards improves performance.
+ let mut runs = vec![];
+ let mut end = len;
+ while end > 0 {
+ // Find the next natural run, and reverse it if it's strictly descending.
+ let mut start = end - 1;
+ if start > 0 {
+ start -= 1;
+ unsafe {
+ if is_less(v.get_unchecked(start + 1), v.get_unchecked(start)) {
+ while start > 0 && is_less(v.get_unchecked(start), v.get_unchecked(start - 1)) {
+ start -= 1;
+ }
+ v[start..end].reverse();
+ } else {
+ while start > 0 && !is_less(v.get_unchecked(start), v.get_unchecked(start - 1))
+ {
+ start -= 1;
+ }
+ }
+ }
+ }
+
+ // Insert some more elements into the run if it's too short. Insertion sort is faster than
+ // merge sort on short sequences, so this significantly improves performance.
+ while start > 0 && end - start < MIN_RUN {
+ start -= 1;
+ insert_head(&mut v[start..end], &mut is_less);
+ }
+
+ // Push this run onto the stack.
+ runs.push(Run { start, len: end - start });
+ end = start;
+
+ // Merge some pairs of adjacent runs to satisfy the invariants.
+ while let Some(r) = collapse(&runs) {
+ let left = runs[r + 1];
+ let right = runs[r];
+ unsafe {
+ merge(
+ &mut v[left.start..right.start + right.len],
+ left.len,
+ buf.as_mut_ptr(),
+ &mut is_less,
+ );
+ }
+ runs[r] = Run { start: left.start, len: left.len + right.len };
+ runs.remove(r + 1);
+ }
+ }
+
+ // Finally, exactly one run must remain in the stack.
+ debug_assert!(runs.len() == 1 && runs[0].start == 0 && runs[0].len == len);
+
+ // Examines the stack of runs and identifies the next pair of runs to merge. More specifically,
+ // if `Some(r)` is returned, that means `runs[r]` and `runs[r + 1]` must be merged next. If the
+ // algorithm should continue building a new run instead, `None` is returned.
+ //
+ // TimSort is infamous for its buggy implementations, as described here:
+ // http://envisage-project.eu/timsort-specification-and-verification/
+ //
+ // The gist of the story is: we must enforce the invariants on the top four runs on the stack.
+ // Enforcing them on just top three is not sufficient to ensure that the invariants will still
+ // hold for *all* runs in the stack.
+ //
+ // This function correctly checks invariants for the top four runs. Additionally, if the top
+ // run starts at index 0, it will always demand a merge operation until the stack is fully
+ // collapsed, in order to complete the sort.
+ #[inline]
+ fn collapse(runs: &[Run]) -> Option<usize> {
+ let n = runs.len();
+ if n >= 2
+ && (runs[n - 1].start == 0
+ || runs[n - 2].len <= runs[n - 1].len
+ || (n >= 3 && runs[n - 3].len <= runs[n - 2].len + runs[n - 1].len)
+ || (n >= 4 && runs[n - 4].len <= runs[n - 3].len + runs[n - 2].len))
+ {
+ if n >= 3 && runs[n - 3].len < runs[n - 1].len { Some(n - 3) } else { Some(n - 2) }
+ } else {
+ None
+ }
+ }
+
+ #[derive(Clone, Copy)]
+ struct Run {
+ start: usize,
+ len: usize,
+ }
+}
diff --git a/library/alloc/src/str.rs b/library/alloc/src/str.rs
new file mode 100644
index 00000000000..339592728ac
--- /dev/null
+++ b/library/alloc/src/str.rs
@@ -0,0 +1,576 @@
+//! Unicode string slices.
+//!
+//! *[See also the `str` primitive type](../../std/primitive.str.html).*
+//!
+//! The `&str` type is one of the two main string types, the other being `String`.
+//! Unlike its `String` counterpart, its contents are borrowed.
+//!
+//! # Basic Usage
+//!
+//! A basic string declaration of `&str` type:
+//!
+//! ```
+//! let hello_world = "Hello, World!";
+//! ```
+//!
+//! Here we have declared a string literal, also known as a string slice.
+//! String literals have a static lifetime, which means the string `hello_world`
+//! is guaranteed to be valid for the duration of the entire program.
+//! We can explicitly specify `hello_world`'s lifetime as well:
+//!
+//! ```
+//! let hello_world: &'static str = "Hello, world!";
+//! ```
+
+#![stable(feature = "rust1", since = "1.0.0")]
+// Many of the usings in this module are only used in the test configuration.
+// It's cleaner to just turn off the unused_imports warning than to fix them.
+#![allow(unused_imports)]
+
+use core::borrow::{Borrow, BorrowMut};
+use core::iter::FusedIterator;
+use core::mem;
+use core::ptr;
+use core::str::pattern::{DoubleEndedSearcher, Pattern, ReverseSearcher, Searcher};
+use core::unicode::conversions;
+
+use crate::borrow::ToOwned;
+use crate::boxed::Box;
+use crate::slice::{Concat, Join, SliceIndex};
+use crate::string::String;
+use crate::vec::Vec;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::pattern;
+#[stable(feature = "encode_utf16", since = "1.8.0")]
+pub use core::str::EncodeUtf16;
+#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
+pub use core::str::SplitAsciiWhitespace;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::SplitWhitespace;
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{from_utf8, from_utf8_mut, Bytes, CharIndices, Chars};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{from_utf8_unchecked, from_utf8_unchecked_mut, ParseBoolError};
+#[stable(feature = "str_escape", since = "1.34.0")]
+pub use core::str::{EscapeDebug, EscapeDefault, EscapeUnicode};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{FromStr, Utf8Error};
+#[allow(deprecated)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{Lines, LinesAny};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{MatchIndices, RMatchIndices};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{Matches, RMatches};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplit, Split};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplitN, SplitN};
+#[stable(feature = "rust1", since = "1.0.0")]
+pub use core::str::{RSplitTerminator, SplitTerminator};
+
+/// Note: `str` in `Concat<str>` is not meaningful here.
+/// This type parameter of the trait only exists to enable another impl.
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<S: Borrow<str>> Concat<str> for [S] {
+ type Output = String;
+
+ fn concat(slice: &Self) -> String {
+ Join::join(slice, "")
+ }
+}
+
+#[unstable(feature = "slice_concat_ext", issue = "27747")]
+impl<S: Borrow<str>> Join<&str> for [S] {
+ type Output = String;
+
+ fn join(slice: &Self, sep: &str) -> String {
+ unsafe { String::from_utf8_unchecked(join_generic_copy(slice, sep.as_bytes())) }
+ }
+}
+
+macro_rules! spezialize_for_lengths {
+ ($separator:expr, $target:expr, $iter:expr; $($num:expr),*) => {
+ let mut target = $target;
+ let iter = $iter;
+ let sep_bytes = $separator;
+ match $separator.len() {
+ $(
+ // loops with hardcoded sizes run much faster
+ // specialize the cases with small separator lengths
+ $num => {
+ for s in iter {
+ copy_slice_and_advance!(target, sep_bytes);
+ copy_slice_and_advance!(target, s.borrow().as_ref());
+ }
+ },
+ )*
+ _ => {
+ // arbitrary non-zero size fallback
+ for s in iter {
+ copy_slice_and_advance!(target, sep_bytes);
+ copy_slice_and_advance!(target, s.borrow().as_ref());
+ }
+ }
+ }
+ };
+}
+
+macro_rules! copy_slice_and_advance {
+ ($target:expr, $bytes:expr) => {
+ let len = $bytes.len();
+ let (head, tail) = { $target }.split_at_mut(len);
+ head.copy_from_slice($bytes);
+ $target = tail;
+ };
+}
+
+// Optimized join implementation that works for both Vec<T> (T: Copy) and String's inner vec
+// Currently (2018-05-13) there is a bug with type inference and specialization (see issue #36262)
+// For this reason SliceConcat<T> is not specialized for T: Copy and SliceConcat<str> is the
+// only user of this function. It is left in place for the time when that is fixed.
+//
+// the bounds for String-join are S: Borrow<str> and for Vec-join Borrow<[T]>
+// [T] and str both impl AsRef<[T]> for some T
+// => s.borrow().as_ref() and we always have slices
+fn join_generic_copy<B, T, S>(slice: &[S], sep: &[T]) -> Vec<T>
+where
+ T: Copy,
+ B: AsRef<[T]> + ?Sized,
+ S: Borrow<B>,
+{
+ let sep_len = sep.len();
+ let mut iter = slice.iter();
+
+ // the first slice is the only one without a separator preceding it
+ let first = match iter.next() {
+ Some(first) => first,
+ None => return vec![],
+ };
+
+ // compute the exact total length of the joined Vec
+ // if the `len` calculation overflows, we'll panic
+ // we would have run out of memory anyway and the rest of the function requires
+ // the entire Vec pre-allocated for safety
+ let len = sep_len
+ .checked_mul(iter.len())
+ .and_then(|n| {
+ slice.iter().map(|s| s.borrow().as_ref().len()).try_fold(n, usize::checked_add)
+ })
+ .expect("attempt to join into collection with len > usize::MAX");
+
+ // crucial for safety
+ let mut result = Vec::with_capacity(len);
+ assert!(result.capacity() >= len);
+
+ result.extend_from_slice(first.borrow().as_ref());
+
+ unsafe {
+ {
+ let pos = result.len();
+ let target = result.get_unchecked_mut(pos..len);
+
+ // copy separator and slices over without bounds checks
+ // generate loops with hardcoded offsets for small separators
+ // massive improvements possible (~ x2)
+ spezialize_for_lengths!(sep, target, iter; 0, 1, 2, 3, 4);
+ }
+ result.set_len(len);
+ }
+ result
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Borrow<str> for String {
+ #[inline]
+ fn borrow(&self) -> &str {
+ &self[..]
+ }
+}
+
+#[stable(feature = "string_borrow_mut", since = "1.36.0")]
+impl BorrowMut<str> for String {
+ #[inline]
+ fn borrow_mut(&mut self) -> &mut str {
+ &mut self[..]
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ToOwned for str {
+ type Owned = String;
+ #[inline]
+ fn to_owned(&self) -> String {
+ unsafe { String::from_utf8_unchecked(self.as_bytes().to_owned()) }
+ }
+
+ fn clone_into(&self, target: &mut String) {
+ let mut b = mem::take(target).into_bytes();
+ self.as_bytes().clone_into(&mut b);
+ *target = unsafe { String::from_utf8_unchecked(b) }
+ }
+}
+
+/// Methods for string slices.
+#[lang = "str_alloc"]
+#[cfg(not(test))]
+impl str {
+ /// Converts a `Box<str>` into a `Box<[u8]>` without copying or allocating.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = "this is a string";
+ /// let boxed_str = s.to_owned().into_boxed_str();
+ /// let boxed_bytes = boxed_str.into_boxed_bytes();
+ /// assert_eq!(*boxed_bytes, *s.as_bytes());
+ /// ```
+ #[stable(feature = "str_box_extras", since = "1.20.0")]
+ #[inline]
+ pub fn into_boxed_bytes(self: Box<str>) -> Box<[u8]> {
+ self.into()
+ }
+
+ /// Replaces all matches of a pattern with another string.
+ ///
+ /// `replace` creates a new [`String`], and copies the data from this string slice into it.
+ /// While doing so, it attempts to find matches of a pattern. If it finds any, it
+ /// replaces them with the replacement string slice.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = "this is old";
+ ///
+ /// assert_eq!("this is new", s.replace("old", "new"));
+ /// ```
+ ///
+ /// When the pattern doesn't match:
+ ///
+ /// ```
+ /// let s = "this is old";
+ /// assert_eq!(s, s.replace("cookie monster", "little lamb"));
+ /// ```
+ #[must_use = "this returns the replaced string as a new allocation, \
+ without modifying the original"]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn replace<'a, P: Pattern<'a>>(&'a self, from: P, to: &str) -> String {
+ let mut result = String::new();
+ let mut last_end = 0;
+ for (start, part) in self.match_indices(from) {
+ result.push_str(unsafe { self.get_unchecked(last_end..start) });
+ result.push_str(to);
+ last_end = start + part.len();
+ }
+ result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
+ result
+ }
+
+ /// Replaces first N matches of a pattern with another string.
+ ///
+ /// `replacen` creates a new [`String`], and copies the data from this string slice into it.
+ /// While doing so, it attempts to find matches of a pattern. If it finds any, it
+ /// replaces them with the replacement string slice at most `count` times.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = "foo foo 123 foo";
+ /// assert_eq!("new new 123 foo", s.replacen("foo", "new", 2));
+ /// assert_eq!("faa fao 123 foo", s.replacen('o', "a", 3));
+ /// assert_eq!("foo foo new23 foo", s.replacen(char::is_numeric, "new", 1));
+ /// ```
+ ///
+ /// When the pattern doesn't match:
+ ///
+ /// ```
+ /// let s = "this is old";
+ /// assert_eq!(s, s.replacen("cookie monster", "little lamb", 10));
+ /// ```
+ #[must_use = "this returns the replaced string as a new allocation, \
+ without modifying the original"]
+ #[stable(feature = "str_replacen", since = "1.16.0")]
+ pub fn replacen<'a, P: Pattern<'a>>(&'a self, pat: P, to: &str, count: usize) -> String {
+ // Hope to reduce the times of re-allocation
+ let mut result = String::with_capacity(32);
+ let mut last_end = 0;
+ for (start, part) in self.match_indices(pat).take(count) {
+ result.push_str(unsafe { self.get_unchecked(last_end..start) });
+ result.push_str(to);
+ last_end = start + part.len();
+ }
+ result.push_str(unsafe { self.get_unchecked(last_end..self.len()) });
+ result
+ }
+
+ /// Returns the lowercase equivalent of this string slice, as a new [`String`].
+ ///
+ /// 'Lowercase' is defined according to the terms of the Unicode Derived Core Property
+ /// `Lowercase`.
+ ///
+ /// Since some characters can expand into multiple characters when changing
+ /// the case, this function returns a [`String`] instead of modifying the
+ /// parameter in-place.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = "HELLO";
+ ///
+ /// assert_eq!("hello", s.to_lowercase());
+ /// ```
+ ///
+ /// A tricky example, with sigma:
+ ///
+ /// ```
+ /// let sigma = "Σ";
+ ///
+ /// assert_eq!("σ", sigma.to_lowercase());
+ ///
+ /// // but at the end of a word, it's ς, not σ:
+ /// let odysseus = "ὈΔΥΣΣΕΎΣ";
+ ///
+ /// assert_eq!("ὀδυσσεύς", odysseus.to_lowercase());
+ /// ```
+ ///
+ /// Languages without case are not changed:
+ ///
+ /// ```
+ /// let new_year = "农历新年";
+ ///
+ /// assert_eq!(new_year, new_year.to_lowercase());
+ /// ```
+ #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
+ pub fn to_lowercase(&self) -> String {
+ let mut s = String::with_capacity(self.len());
+ for (i, c) in self[..].char_indices() {
+ if c == 'Σ' {
+ // Σ maps to σ, except at the end of a word where it maps to ς.
+ // This is the only conditional (contextual) but language-independent mapping
+ // in `SpecialCasing.txt`,
+ // so hard-code it rather than have a generic "condition" mechanism.
+ // See https://github.com/rust-lang/rust/issues/26035
+ map_uppercase_sigma(self, i, &mut s)
+ } else {
+ match conversions::to_lower(c) {
+ [a, '\0', _] => s.push(a),
+ [a, b, '\0'] => {
+ s.push(a);
+ s.push(b);
+ }
+ [a, b, c] => {
+ s.push(a);
+ s.push(b);
+ s.push(c);
+ }
+ }
+ }
+ }
+ return s;
+
+ fn map_uppercase_sigma(from: &str, i: usize, to: &mut String) {
+ // See http://www.unicode.org/versions/Unicode7.0.0/ch03.pdf#G33992
+ // for the definition of `Final_Sigma`.
+ debug_assert!('Σ'.len_utf8() == 2);
+ let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev())
+ && !case_ignoreable_then_cased(from[i + 2..].chars());
+ to.push_str(if is_word_final { "ς" } else { "σ" });
+ }
+
+ fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {
+ use core::unicode::derived_property::{Case_Ignorable, Cased};
+ match iter.skip_while(|&c| Case_Ignorable(c)).next() {
+ Some(c) => Cased(c),
+ None => false,
+ }
+ }
+ }
+
+ /// Returns the uppercase equivalent of this string slice, as a new [`String`].
+ ///
+ /// 'Uppercase' is defined according to the terms of the Unicode Derived Core Property
+ /// `Uppercase`.
+ ///
+ /// Since some characters can expand into multiple characters when changing
+ /// the case, this function returns a [`String`] instead of modifying the
+ /// parameter in-place.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = "hello";
+ ///
+ /// assert_eq!("HELLO", s.to_uppercase());
+ /// ```
+ ///
+ /// Scripts without case are not changed:
+ ///
+ /// ```
+ /// let new_year = "农历新年";
+ ///
+ /// assert_eq!(new_year, new_year.to_uppercase());
+ /// ```
+ ///
+ /// One character can become multiple:
+ /// ```
+ /// let s = "tschüß";
+ ///
+ /// assert_eq!("TSCHÜSS", s.to_uppercase());
+ /// ```
+ #[stable(feature = "unicode_case_mapping", since = "1.2.0")]
+ pub fn to_uppercase(&self) -> String {
+ let mut s = String::with_capacity(self.len());
+ for c in self[..].chars() {
+ match conversions::to_upper(c) {
+ [a, '\0', _] => s.push(a),
+ [a, b, '\0'] => {
+ s.push(a);
+ s.push(b);
+ }
+ [a, b, c] => {
+ s.push(a);
+ s.push(b);
+ s.push(c);
+ }
+ }
+ }
+ s
+ }
+
+ /// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
+ ///
+ /// [`Box<str>`]: Box
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let string = String::from("birthday gift");
+ /// let boxed_str = string.clone().into_boxed_str();
+ ///
+ /// assert_eq!(boxed_str.into_string(), string);
+ /// ```
+ #[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
+ pub fn into_string(self: Box<str>) -> String {
+ let slice = Box::<[u8]>::from(self);
+ unsafe { String::from_utf8_unchecked(slice.into_vec()) }
+ }
+
+ /// Creates a new [`String`] by repeating a string `n` times.
+ ///
+ /// # Panics
+ ///
+ /// This function will panic if the capacity would overflow.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// assert_eq!("abc".repeat(4), String::from("abcabcabcabc"));
+ /// ```
+ ///
+ /// A panic upon overflow:
+ ///
+ /// ```should_panic
+ /// // this will panic at runtime
+ /// "0123456789abcdef".repeat(usize::MAX);
+ /// ```
+ #[stable(feature = "repeat_str", since = "1.16.0")]
+ pub fn repeat(&self, n: usize) -> String {
+ unsafe { String::from_utf8_unchecked(self.as_bytes().repeat(n)) }
+ }
+
+ /// Returns a copy of this string where each character is mapped to its
+ /// ASCII upper case equivalent.
+ ///
+ /// ASCII letters 'a' to 'z' are mapped to 'A' to 'Z',
+ /// but non-ASCII letters are unchanged.
+ ///
+ /// To uppercase the value in-place, use [`make_ascii_uppercase`].
+ ///
+ /// To uppercase ASCII characters in addition to non-ASCII characters, use
+ /// [`to_uppercase`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let s = "Grüße, Jürgen ❤";
+ ///
+ /// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase());
+ /// ```
+ ///
+ /// [`make_ascii_uppercase`]: str::make_ascii_uppercase
+ /// [`to_uppercase`]: #method.to_uppercase
+ #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[inline]
+ pub fn to_ascii_uppercase(&self) -> String {
+ let mut bytes = self.as_bytes().to_vec();
+ bytes.make_ascii_uppercase();
+ // make_ascii_uppercase() preserves the UTF-8 invariant.
+ unsafe { String::from_utf8_unchecked(bytes) }
+ }
+
+ /// Returns a copy of this string where each character is mapped to its
+ /// ASCII lower case equivalent.
+ ///
+ /// ASCII letters 'A' to 'Z' are mapped to 'a' to 'z',
+ /// but non-ASCII letters are unchanged.
+ ///
+ /// To lowercase the value in-place, use [`make_ascii_lowercase`].
+ ///
+ /// To lowercase ASCII characters in addition to non-ASCII characters, use
+ /// [`to_lowercase`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let s = "Grüße, Jürgen ❤";
+ ///
+ /// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase());
+ /// ```
+ ///
+ /// [`make_ascii_lowercase`]: str::make_ascii_lowercase
+ /// [`to_lowercase`]: #method.to_lowercase
+ #[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
+ #[inline]
+ pub fn to_ascii_lowercase(&self) -> String {
+ let mut bytes = self.as_bytes().to_vec();
+ bytes.make_ascii_lowercase();
+ // make_ascii_lowercase() preserves the UTF-8 invariant.
+ unsafe { String::from_utf8_unchecked(bytes) }
+ }
+}
+
+/// Converts a boxed slice of bytes to a boxed string slice without checking
+/// that the string contains valid UTF-8.
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// let smile_utf8 = Box::new([226, 152, 186]);
+/// let smile = unsafe { std::str::from_boxed_utf8_unchecked(smile_utf8) };
+///
+/// assert_eq!("☺", &*smile);
+/// ```
+#[stable(feature = "str_box_extras", since = "1.20.0")]
+#[inline]
+pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
+ unsafe { Box::from_raw(Box::into_raw(v) as *mut str) }
+}
diff --git a/library/alloc/src/string.rs b/library/alloc/src/string.rs
new file mode 100644
index 00000000000..05398ca68c8
--- /dev/null
+++ b/library/alloc/src/string.rs
@@ -0,0 +1,2504 @@
+//! A UTF-8 encoded, growable string.
+//!
+//! This module contains the [`String`] type, a trait for converting
+//! [`ToString`]s, and several error types that may result from working with
+//! [`String`]s.
+//!
+//! # Examples
+//!
+//! There are multiple ways to create a new [`String`] from a string literal:
+//!
+//! ```
+//! let s = "Hello".to_string();
+//!
+//! let s = String::from("world");
+//! let s: String = "also this".into();
+//! ```
+//!
+//! You can create a new [`String`] from an existing one by concatenating with
+//! `+`:
+//!
+//! ```
+//! let s = "Hello".to_string();
+//!
+//! let message = s + " world!";
+//! ```
+//!
+//! If you have a vector of valid UTF-8 bytes, you can make a [`String`] out of
+//! it. You can do the reverse too.
+//!
+//! ```
+//! let sparkle_heart = vec![240, 159, 146, 150];
+//!
+//! // We know these bytes are valid, so we'll use `unwrap()`.
+//! let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+//!
+//! assert_eq!("💖", sparkle_heart);
+//!
+//! let bytes = sparkle_heart.into_bytes();
+//!
+//! assert_eq!(bytes, [240, 159, 146, 150]);
+//! ```
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::char::{decode_utf16, REPLACEMENT_CHARACTER};
+use core::fmt;
+use core::hash;
+use core::iter::{FromIterator, FusedIterator};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Add, AddAssign, Index, IndexMut, RangeBounds};
+use core::ptr;
+use core::str::{lossy, pattern::Pattern};
+
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::collections::TryReserveError;
+use crate::str::{self, from_boxed_utf8_unchecked, Chars, FromStr, Utf8Error};
+use crate::vec::Vec;
+
+/// A UTF-8 encoded, growable string.
+///
+/// The `String` type is the most common string type that has ownership over the
+/// contents of the string. It has a close relationship with its borrowed
+/// counterpart, the primitive [`str`].
+///
+/// # Examples
+///
+/// You can create a `String` from [a literal string][`str`] with [`String::from`]:
+///
+/// [`String::from`]: From::from
+///
+/// ```
+/// let hello = String::from("Hello, world!");
+/// ```
+///
+/// You can append a [`char`] to a `String` with the [`push`] method, and
+/// append a [`&str`] with the [`push_str`] method:
+///
+/// ```
+/// let mut hello = String::from("Hello, ");
+///
+/// hello.push('w');
+/// hello.push_str("orld!");
+/// ```
+///
+/// [`push`]: String::push
+/// [`push_str`]: String::push_str
+///
+/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
+/// the [`from_utf8`] method:
+///
+/// ```
+/// // some bytes, in a vector
+/// let sparkle_heart = vec![240, 159, 146, 150];
+///
+/// // We know these bytes are valid, so we'll use `unwrap()`.
+/// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+///
+/// assert_eq!("💖", sparkle_heart);
+/// ```
+///
+/// [`from_utf8`]: String::from_utf8
+///
+/// # UTF-8
+///
+/// `String`s are always valid UTF-8. This has a few implications, the first of
+/// which is that if you need a non-UTF-8 string, consider [`OsString`]. It is
+/// similar, but without the UTF-8 constraint. The second implication is that
+/// you cannot index into a `String`:
+///
+/// ```compile_fail,E0277
+/// let s = "hello";
+///
+/// println!("The first letter of s is {}", s[0]); // ERROR!!!
+/// ```
+///
+/// [`OsString`]: ../../std/ffi/struct.OsString.html
+///
+/// Indexing is intended to be a constant-time operation, but UTF-8 encoding
+/// does not allow us to do this. Furthermore, it's not clear what sort of
+/// thing the index should return: a byte, a codepoint, or a grapheme cluster.
+/// The [`bytes`] and [`chars`] methods return iterators over the first
+/// two, respectively.
+///
+/// [`bytes`]: str::bytes
+/// [`chars`]: str::chars
+///
+/// # Deref
+///
+/// `String`s implement [`Deref`]`<Target=str>`, and so inherit all of [`str`]'s
+/// methods. In addition, this means that you can pass a `String` to a
+/// function which takes a [`&str`] by using an ampersand (`&`):
+///
+/// ```
+/// fn takes_str(s: &str) { }
+///
+/// let s = String::from("Hello");
+///
+/// takes_str(&s);
+/// ```
+///
+/// This will create a [`&str`] from the `String` and pass it in. This
+/// conversion is very inexpensive, and so generally, functions will accept
+/// [`&str`]s as arguments unless they need a `String` for some specific
+/// reason.
+///
+/// In certain cases Rust doesn't have enough information to make this
+/// conversion, known as [`Deref`] coercion. In the following example a string
+/// slice [`&'a str`][`&str`] implements the trait `TraitExample`, and the function
+/// `example_func` takes anything that implements the trait. In this case Rust
+/// would need to make two implicit conversions, which Rust doesn't have the
+/// means to do. For that reason, the following example will not compile.
+///
+/// ```compile_fail,E0277
+/// trait TraitExample {}
+///
+/// impl<'a> TraitExample for &'a str {}
+///
+/// fn example_func<A: TraitExample>(example_arg: A) {}
+///
+/// let example_string = String::from("example_string");
+/// example_func(&example_string);
+/// ```
+///
+/// There are two options that would work instead. The first would be to
+/// change the line `example_func(&example_string);` to
+/// `example_func(example_string.as_str());`, using the method [`as_str()`]
+/// to explicitly extract the string slice containing the string. The second
+/// way changes `example_func(&example_string);` to
+/// `example_func(&*example_string);`. In this case we are dereferencing a
+/// `String` to a [`str`][`&str`], then referencing the [`str`][`&str`] back to
+/// [`&str`]. The second way is more idiomatic, however both work to do the
+/// conversion explicitly rather than relying on the implicit conversion.
+///
+/// # Representation
+///
+/// A `String` is made up of three components: a pointer to some bytes, a
+/// length, and a capacity. The pointer points to an internal buffer `String`
+/// uses to store its data. The length is the number of bytes currently stored
+/// in the buffer, and the capacity is the size of the buffer in bytes. As such,
+/// the length will always be less than or equal to the capacity.
+///
+/// This buffer is always stored on the heap.
+///
+/// You can look at these with the [`as_ptr`], [`len`], and [`capacity`]
+/// methods:
+///
+/// ```
+/// use std::mem;
+///
+/// let story = String::from("Once upon a time...");
+///
+// FIXME Update this when vec_into_raw_parts is stabilized
+/// // Prevent automatically dropping the String's data
+/// let mut story = mem::ManuallyDrop::new(story);
+///
+/// let ptr = story.as_mut_ptr();
+/// let len = story.len();
+/// let capacity = story.capacity();
+///
+/// // story has nineteen bytes
+/// assert_eq!(19, len);
+///
+/// // We can re-build a String out of ptr, len, and capacity. This is all
+/// // unsafe because we are responsible for making sure the components are
+/// // valid:
+/// let s = unsafe { String::from_raw_parts(ptr, len, capacity) } ;
+///
+/// assert_eq!(String::from("Once upon a time..."), s);
+/// ```
+///
+/// [`as_ptr`]: str::as_ptr
+/// [`len`]: String::len
+/// [`capacity`]: String::capacity
+///
+/// If a `String` has enough capacity, adding elements to it will not
+/// re-allocate. For example, consider this program:
+///
+/// ```
+/// let mut s = String::new();
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+/// s.push_str("hello");
+/// println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// This will output the following:
+///
+/// ```text
+/// 0
+/// 5
+/// 10
+/// 20
+/// 20
+/// 40
+/// ```
+///
+/// At first, we have no memory allocated at all, but as we append to the
+/// string, it increases its capacity appropriately. If we instead use the
+/// [`with_capacity`] method to allocate the correct capacity initially:
+///
+/// ```
+/// let mut s = String::with_capacity(25);
+///
+/// println!("{}", s.capacity());
+///
+/// for _ in 0..5 {
+/// s.push_str("hello");
+/// println!("{}", s.capacity());
+/// }
+/// ```
+///
+/// [`with_capacity`]: String::with_capacity
+///
+/// We end up with a different output:
+///
+/// ```text
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// 25
+/// ```
+///
+/// Here, there's no need to allocate more memory inside the loop.
+///
+/// [`str`]: type@str
+/// [`&str`]: type@str
+/// [`Deref`]: core::ops::Deref
+/// [`as_str()`]: String::as_str
+#[derive(PartialOrd, Eq, Ord)]
+#[cfg_attr(not(test), rustc_diagnostic_item = "string_type")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct String {
+ vec: Vec<u8>,
+}
+
+/// A possible error value when converting a `String` from a UTF-8 byte vector.
+///
+/// This type is the error type for the [`from_utf8`] method on [`String`]. It
+/// is designed in such a way to carefully avoid reallocations: the
+/// [`into_bytes`] method will give back the byte vector that was used in the
+/// conversion attempt.
+///
+/// [`from_utf8`]: String::from_utf8
+/// [`into_bytes`]: FromUtf8Error::into_bytes
+///
+/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
+/// through the [`utf8_error`] method.
+///
+/// [`Utf8Error`]: core::str::Utf8Error
+/// [`std::str`]: core::str
+/// [`&str`]: str
+/// [`utf8_error`]: Self::utf8_error
+///
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// // some invalid bytes, in a vector
+/// let bytes = vec![0, 159];
+///
+/// let value = String::from_utf8(bytes);
+///
+/// assert!(value.is_err());
+/// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FromUtf8Error {
+ bytes: Vec<u8>,
+ error: Utf8Error,
+}
+
+/// A possible error value when converting a `String` from a UTF-16 byte slice.
+///
+/// This type is the error type for the [`from_utf16`] method on [`String`].
+///
+/// [`from_utf16`]: String::from_utf16
+/// # Examples
+///
+/// Basic usage:
+///
+/// ```
+/// // 𝄞mu<invalid>ic
+/// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+/// 0xD800, 0x0069, 0x0063];
+///
+/// assert!(String::from_utf16(v).is_err());
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+#[derive(Debug)]
+pub struct FromUtf16Error(());
+
+impl String {
+ /// Creates a new empty `String`.
+ ///
+ /// Given that the `String` is empty, this will not allocate any initial
+ /// buffer. While that means that this initial operation is very
+ /// inexpensive, it may cause excessive allocation later when you add
+ /// data. If you have an idea of how much data the `String` will hold,
+ /// consider the [`with_capacity`] method to prevent excessive
+ /// re-allocation.
+ ///
+ /// [`with_capacity`]: String::with_capacity
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::new();
+ /// ```
+ #[inline]
+ #[rustc_const_stable(feature = "const_string_new", since = "1.32.0")]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub const fn new() -> String {
+ String { vec: Vec::new() }
+ }
+
+ /// Creates a new empty `String` with a particular capacity.
+ ///
+ /// `String`s have an internal buffer to hold their data. The capacity is
+ /// the length of that buffer, and can be queried with the [`capacity`]
+ /// method. This method creates an empty `String`, but one with an initial
+ /// buffer that can hold `capacity` bytes. This is useful when you may be
+ /// appending a bunch of data to the `String`, reducing the number of
+ /// reallocations it needs to do.
+ ///
+ /// [`capacity`]: String::capacity
+ ///
+ /// If the given capacity is `0`, no allocation will occur, and this method
+ /// is identical to the [`new`] method.
+ ///
+ /// [`new`]: String::new
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::with_capacity(10);
+ ///
+ /// // The String contains no chars, even though it has capacity for more
+ /// assert_eq!(s.len(), 0);
+ ///
+ /// // These are all done without reallocating...
+ /// let cap = s.capacity();
+ /// for _ in 0..10 {
+ /// s.push('a');
+ /// }
+ ///
+ /// assert_eq!(s.capacity(), cap);
+ ///
+ /// // ...but this may make the string reallocate
+ /// s.push('a');
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(capacity: usize) -> String {
+ String { vec: Vec::with_capacity(capacity) }
+ }
+
+ // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
+ // required for this method definition, is not available. Since we don't
+ // require this method for testing purposes, I'll just stub it
+ // NB see the slice::hack module in slice.rs for more information
+ #[inline]
+ #[cfg(test)]
+ pub fn from_str(_: &str) -> String {
+ panic!("not available with cfg(test)");
+ }
+
+ /// Converts a vector of bytes to a `String`.
+ ///
+ /// A string ([`String`]) is made of bytes ([`u8`]), and a vector of bytes
+ /// ([`Vec<u8>`]) is made of bytes, so this function converts between the
+ /// two. Not all byte slices are valid `String`s, however: `String`
+ /// requires that it is valid UTF-8. `from_utf8()` checks to ensure that
+ /// the bytes are valid UTF-8, and then does the conversion.
+ ///
+ /// If you are sure that the byte slice is valid UTF-8, and you don't want
+ /// to incur the overhead of the validity check, there is an unsafe version
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
+ /// but skips the check.
+ ///
+ /// This method will take care to not copy the vector, for efficiency's
+ /// sake.
+ ///
+ /// If you need a [`&str`] instead of a `String`, consider
+ /// [`str::from_utf8`].
+ ///
+ /// The inverse of this method is [`into_bytes`].
+ ///
+ /// # Errors
+ ///
+ /// Returns [`Err`] if the slice is not UTF-8 with a description as to why the
+ /// provided bytes are not UTF-8. The vector you moved in is also included.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some bytes, in a vector
+ /// let sparkle_heart = vec![240, 159, 146, 150];
+ ///
+ /// // We know these bytes are valid, so we'll use `unwrap()`.
+ /// let sparkle_heart = String::from_utf8(sparkle_heart).unwrap();
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ ///
+ /// Incorrect bytes:
+ ///
+ /// ```
+ /// // some invalid bytes, in a vector
+ /// let sparkle_heart = vec![0, 159, 146, 150];
+ ///
+ /// assert!(String::from_utf8(sparkle_heart).is_err());
+ /// ```
+ ///
+ /// See the docs for [`FromUtf8Error`] for more details on what you can do
+ /// with this error.
+ ///
+ /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
+ /// [`Vec<u8>`]: crate::vec::Vec
+ /// [`&str`]: str
+ /// [`into_bytes`]: String::into_bytes
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
+ match str::from_utf8(&vec) {
+ Ok(..) => Ok(String { vec }),
+ Err(e) => Err(FromUtf8Error { bytes: vec, error: e }),
+ }
+ }
+
+ /// Converts a slice of bytes to a string, including invalid characters.
+ ///
+ /// Strings are made of bytes ([`u8`]), and a slice of bytes
+ /// ([`&[u8]`][byteslice]) is made of bytes, so this function converts
+ /// between the two. Not all byte slices are valid strings, however: strings
+ /// are required to be valid UTF-8. During this conversion,
+ /// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with
+ /// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: �
+ ///
+ /// [byteslice]: ../../std/primitive.slice.html
+ /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
+ ///
+ /// If you are sure that the byte slice is valid UTF-8, and you don't want
+ /// to incur the overhead of the conversion, there is an unsafe version
+ /// of this function, [`from_utf8_unchecked`], which has the same behavior
+ /// but skips the checks.
+ ///
+ /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
+ ///
+ /// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
+ /// UTF-8, then we need to insert the replacement characters, which will
+ /// change the size of the string, and hence, require a `String`. But if
+ /// it's already valid UTF-8, we don't need a new allocation. This return
+ /// type allows us to handle both cases.
+ ///
+ /// [`Cow<'a, str>`]: crate::borrow::Cow
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some bytes, in a vector
+ /// let sparkle_heart = vec![240, 159, 146, 150];
+ ///
+ /// let sparkle_heart = String::from_utf8_lossy(&sparkle_heart);
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ ///
+ /// Incorrect bytes:
+ ///
+ /// ```
+ /// // some invalid bytes
+ /// let input = b"Hello \xF0\x90\x80World";
+ /// let output = String::from_utf8_lossy(input);
+ ///
+ /// assert_eq!("Hello �World", output);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn from_utf8_lossy(v: &[u8]) -> Cow<'_, str> {
+ let mut iter = lossy::Utf8Lossy::from_bytes(v).chunks();
+
+ let (first_valid, first_broken) = if let Some(chunk) = iter.next() {
+ let lossy::Utf8LossyChunk { valid, broken } = chunk;
+ if valid.len() == v.len() {
+ debug_assert!(broken.is_empty());
+ return Cow::Borrowed(valid);
+ }
+ (valid, broken)
+ } else {
+ return Cow::Borrowed("");
+ };
+
+ const REPLACEMENT: &str = "\u{FFFD}";
+
+ let mut res = String::with_capacity(v.len());
+ res.push_str(first_valid);
+ if !first_broken.is_empty() {
+ res.push_str(REPLACEMENT);
+ }
+
+ for lossy::Utf8LossyChunk { valid, broken } in iter {
+ res.push_str(valid);
+ if !broken.is_empty() {
+ res.push_str(REPLACEMENT);
+ }
+ }
+
+ Cow::Owned(res)
+ }
+
+ /// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
+ /// if `v` contains any invalid data.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // 𝄞music
+ /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+ /// 0x0073, 0x0069, 0x0063];
+ /// assert_eq!(String::from("𝄞music"),
+ /// String::from_utf16(v).unwrap());
+ ///
+ /// // 𝄞mu<invalid>ic
+ /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+ /// 0xD800, 0x0069, 0x0063];
+ /// assert!(String::from_utf16(v).is_err());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn from_utf16(v: &[u16]) -> Result<String, FromUtf16Error> {
+ // This isn't done via collect::<Result<_, _>>() for performance reasons.
+ // FIXME: the function can be simplified again when #48994 is closed.
+ let mut ret = String::with_capacity(v.len());
+ for c in decode_utf16(v.iter().cloned()) {
+ if let Ok(c) = c {
+ ret.push(c);
+ } else {
+ return Err(FromUtf16Error(()));
+ }
+ }
+ Ok(ret)
+ }
+
+ /// Decode a UTF-16 encoded slice `v` into a `String`, replacing
+ /// invalid data with [the replacement character (`U+FFFD`)][U+FFFD].
+ ///
+ /// Unlike [`from_utf8_lossy`] which returns a [`Cow<'a, str>`],
+ /// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8
+ /// conversion requires a memory allocation.
+ ///
+ /// [`from_utf8_lossy`]: String::from_utf8_lossy
+ /// [`Cow<'a, str>`]: crate::borrow::Cow
+ /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // 𝄞mus<invalid>ic<invalid>
+ /// let v = &[0xD834, 0xDD1E, 0x006d, 0x0075,
+ /// 0x0073, 0xDD1E, 0x0069, 0x0063,
+ /// 0xD834];
+ ///
+ /// assert_eq!(String::from("𝄞mus\u{FFFD}ic\u{FFFD}"),
+ /// String::from_utf16_lossy(v));
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn from_utf16_lossy(v: &[u16]) -> String {
+ decode_utf16(v.iter().cloned()).map(|r| r.unwrap_or(REPLACEMENT_CHARACTER)).collect()
+ }
+
+ /// Decomposes a `String` into its raw components.
+ ///
+ /// Returns the raw pointer to the underlying data, the length of
+ /// the string (in bytes), and the allocated capacity of the data
+ /// (in bytes). These are the same arguments in the same order as
+ /// the arguments to [`from_raw_parts`].
+ ///
+ /// After calling this function, the caller is responsible for the
+ /// memory previously managed by the `String`. The only way to do
+ /// this is to convert the raw pointer, length, and capacity back
+ /// into a `String` with the [`from_raw_parts`] function, allowing
+ /// the destructor to perform the cleanup.
+ ///
+ /// [`from_raw_parts`]: String::from_raw_parts
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(vec_into_raw_parts)]
+ /// let s = String::from("hello");
+ ///
+ /// let (ptr, len, cap) = s.into_raw_parts();
+ ///
+ /// let rebuilt = unsafe { String::from_raw_parts(ptr, len, cap) };
+ /// assert_eq!(rebuilt, "hello");
+ /// ```
+ #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
+ pub fn into_raw_parts(self) -> (*mut u8, usize, usize) {
+ self.vec.into_raw_parts()
+ }
+
+ /// Creates a new `String` from a length, capacity, and pointer.
+ ///
+ /// # Safety
+ ///
+ /// This is highly unsafe, due to the number of invariants that aren't
+ /// checked:
+ ///
+ /// * The memory at `buf` needs to have been previously allocated by the
+ /// same allocator the standard library uses, with a required alignment of exactly 1.
+ /// * `length` needs to be less than or equal to `capacity`.
+ /// * `capacity` needs to be the correct value.
+ /// * The first `length` bytes at `buf` need to be valid UTF-8.
+ ///
+ /// Violating these may cause problems like corrupting the allocator's
+ /// internal data structures.
+ ///
+ /// The ownership of `buf` is effectively transferred to the
+ /// `String` which may then deallocate, reallocate or change the
+ /// contents of memory pointed to by the pointer at will. Ensure
+ /// that nothing else uses the pointer after calling this
+ /// function.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// use std::mem;
+ ///
+ /// unsafe {
+ /// let s = String::from("hello");
+ ///
+ // FIXME Update this when vec_into_raw_parts is stabilized
+ /// // Prevent automatically dropping the String's data
+ /// let mut s = mem::ManuallyDrop::new(s);
+ ///
+ /// let ptr = s.as_mut_ptr();
+ /// let len = s.len();
+ /// let capacity = s.capacity();
+ ///
+ /// let s = String::from_raw_parts(ptr, len, capacity);
+ ///
+ /// assert_eq!(String::from("hello"), s);
+ /// }
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub unsafe fn from_raw_parts(buf: *mut u8, length: usize, capacity: usize) -> String {
+ unsafe { String { vec: Vec::from_raw_parts(buf, length, capacity) } }
+ }
+
+ /// Converts a vector of bytes to a `String` without checking that the
+ /// string contains valid UTF-8.
+ ///
+ /// See the safe version, [`from_utf8`], for more details.
+ ///
+ /// [`from_utf8`]: String::from_utf8
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because it does not check that the bytes passed
+ /// to it are valid UTF-8. If this constraint is violated, it may cause
+ /// memory unsafety issues with future users of the `String`, as the rest of
+ /// the standard library assumes that `String`s are valid UTF-8.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some bytes, in a vector
+ /// let sparkle_heart = vec![240, 159, 146, 150];
+ ///
+ /// let sparkle_heart = unsafe {
+ /// String::from_utf8_unchecked(sparkle_heart)
+ /// };
+ ///
+ /// assert_eq!("💖", sparkle_heart);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub unsafe fn from_utf8_unchecked(bytes: Vec<u8>) -> String {
+ String { vec: bytes }
+ }
+
+ /// Converts a `String` into a byte vector.
+ ///
+ /// This consumes the `String`, so we do not need to copy its contents.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::from("hello");
+ /// let bytes = s.into_bytes();
+ ///
+ /// assert_eq!(&[104, 101, 108, 108, 111][..], &bytes[..]);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_bytes(self) -> Vec<u8> {
+ self.vec
+ }
+
+ /// Extracts a string slice containing the entire `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::from("foo");
+ ///
+ /// assert_eq!("foo", s.as_str());
+ /// ```
+ #[inline]
+ #[stable(feature = "string_as_str", since = "1.7.0")]
+ pub fn as_str(&self) -> &str {
+ self
+ }
+
+ /// Converts a `String` into a mutable string slice.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foobar");
+ /// let s_mut_str = s.as_mut_str();
+ ///
+ /// s_mut_str.make_ascii_uppercase();
+ ///
+ /// assert_eq!("FOOBAR", s_mut_str);
+ /// ```
+ #[inline]
+ #[stable(feature = "string_as_str", since = "1.7.0")]
+ pub fn as_mut_str(&mut self) -> &mut str {
+ self
+ }
+
+ /// Appends a given string slice onto the end of this `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foo");
+ ///
+ /// s.push_str("bar");
+ ///
+ /// assert_eq!("foobar", s);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push_str(&mut self, string: &str) {
+ self.vec.extend_from_slice(string.as_bytes())
+ }
+
+ /// Returns this `String`'s capacity, in bytes.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::with_capacity(10);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.vec.capacity()
+ }
+
+ /// Ensures that this `String`'s capacity is at least `additional` bytes
+ /// larger than its length.
+ ///
+ /// The capacity may be increased by more than `additional` bytes if it
+ /// chooses, to prevent frequent reallocations.
+ ///
+ /// If you do not want this "at least" behavior, see the [`reserve_exact`]
+ /// method.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows [`usize`].
+ ///
+ /// [`reserve_exact`]: String::reserve_exact
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::new();
+ ///
+ /// s.reserve(10);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ ///
+ /// This may not actually increase the capacity:
+ ///
+ /// ```
+ /// let mut s = String::with_capacity(10);
+ /// s.push('a');
+ /// s.push('b');
+ ///
+ /// // s now has a length of 2 and a capacity of 10
+ /// assert_eq!(2, s.len());
+ /// assert_eq!(10, s.capacity());
+ ///
+ /// // Since we already have an extra 8 capacity, calling this...
+ /// s.reserve(8);
+ ///
+ /// // ... doesn't actually increase.
+ /// assert_eq!(10, s.capacity());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ self.vec.reserve(additional)
+ }
+
+ /// Ensures that this `String`'s capacity is `additional` bytes
+ /// larger than its length.
+ ///
+ /// Consider using the [`reserve`] method unless you absolutely know
+ /// better than the allocator.
+ ///
+ /// [`reserve`]: String::reserve
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::new();
+ ///
+ /// s.reserve_exact(10);
+ ///
+ /// assert!(s.capacity() >= 10);
+ /// ```
+ ///
+ /// This may not actually increase the capacity:
+ ///
+ /// ```
+ /// let mut s = String::with_capacity(10);
+ /// s.push('a');
+ /// s.push('b');
+ ///
+ /// // s now has a length of 2 and a capacity of 10
+ /// assert_eq!(2, s.len());
+ /// assert_eq!(10, s.capacity());
+ ///
+ /// // Since we already have an extra 8 capacity, calling this...
+ /// s.reserve_exact(8);
+ ///
+ /// // ... doesn't actually increase.
+ /// assert_eq!(10, s.capacity());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.vec.reserve_exact(additional)
+ }
+
+ /// Tries to reserve capacity for at least `additional` more elements to be inserted
+ /// in the given `String`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ ///
+ /// fn process_data(data: &str) -> Result<String, TryReserveError> {
+ /// let mut output = String::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.push_str(data);
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.vec.try_reserve(additional)
+ }
+
+ /// Tries to reserves the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `String`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore, capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ ///
+ /// fn process_data(data: &str) -> Result<String, TryReserveError> {
+ /// let mut output = String::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.push_str(data);
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data("rust").expect("why is the test harness OOMing on 4 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.vec.try_reserve_exact(additional)
+ }
+
+ /// Shrinks the capacity of this `String` to match its length.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foo");
+ ///
+ /// s.reserve(100);
+ /// assert!(s.capacity() >= 100);
+ ///
+ /// s.shrink_to_fit();
+ /// assert_eq!(3, s.capacity());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn shrink_to_fit(&mut self) {
+ self.vec.shrink_to_fit()
+ }
+
+ /// Shrinks the capacity of this `String` with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// let mut s = String::from("foo");
+ ///
+ /// s.reserve(100);
+ /// assert!(s.capacity() >= 100);
+ ///
+ /// s.shrink_to(10);
+ /// assert!(s.capacity() >= 10);
+ /// s.shrink_to(0);
+ /// assert!(s.capacity() >= 3);
+ /// ```
+ #[inline]
+ #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ self.vec.shrink_to(min_capacity)
+ }
+
+ /// Appends the given [`char`] to the end of this `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("abc");
+ ///
+ /// s.push('1');
+ /// s.push('2');
+ /// s.push('3');
+ ///
+ /// assert_eq!("abc123", s);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push(&mut self, ch: char) {
+ match ch.len_utf8() {
+ 1 => self.vec.push(ch as u8),
+ _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
+ }
+ }
+
+ /// Returns a byte slice of this `String`'s contents.
+ ///
+ /// The inverse of this method is [`from_utf8`].
+ ///
+ /// [`from_utf8`]: String::from_utf8
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::from("hello");
+ ///
+ /// assert_eq!(&[104, 101, 108, 108, 111], s.as_bytes());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn as_bytes(&self) -> &[u8] {
+ &self.vec
+ }
+
+ /// Shortens this `String` to the specified length.
+ ///
+ /// If `new_len` is greater than the string's current length, this has no
+ /// effect.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the string
+ ///
+ /// # Panics
+ ///
+ /// Panics if `new_len` does not lie on a [`char`] boundary.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("hello");
+ ///
+ /// s.truncate(2);
+ ///
+ /// assert_eq!("he", s);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn truncate(&mut self, new_len: usize) {
+ if new_len <= self.len() {
+ assert!(self.is_char_boundary(new_len));
+ self.vec.truncate(new_len)
+ }
+ }
+
+ /// Removes the last character from the string buffer and returns it.
+ ///
+ /// Returns [`None`] if this `String` is empty.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foo");
+ ///
+ /// assert_eq!(s.pop(), Some('o'));
+ /// assert_eq!(s.pop(), Some('o'));
+ /// assert_eq!(s.pop(), Some('f'));
+ ///
+ /// assert_eq!(s.pop(), None);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop(&mut self) -> Option<char> {
+ let ch = self.chars().rev().next()?;
+ let newlen = self.len() - ch.len_utf8();
+ unsafe {
+ self.vec.set_len(newlen);
+ }
+ Some(ch)
+ }
+
+ /// Removes a [`char`] from this `String` at a byte position and returns it.
+ ///
+ /// This is an *O*(*n*) operation, as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than or equal to the `String`'s length,
+ /// or if it does not lie on a [`char`] boundary.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foo");
+ ///
+ /// assert_eq!(s.remove(0), 'f');
+ /// assert_eq!(s.remove(1), 'o');
+ /// assert_eq!(s.remove(0), 'o');
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(&mut self, idx: usize) -> char {
+ let ch = match self[idx..].chars().next() {
+ Some(ch) => ch,
+ None => panic!("cannot remove a char from the end of a string"),
+ };
+
+ let next = idx + ch.len_utf8();
+ let len = self.len();
+ unsafe {
+ ptr::copy(self.vec.as_ptr().add(next), self.vec.as_mut_ptr().add(idx), len - next);
+ self.vec.set_len(len - (next - idx));
+ }
+ ch
+ }
+
+ /// Retains only the characters specified by the predicate.
+ ///
+ /// In other words, remove all characters `c` such that `f(c)` returns `false`.
+ /// This method operates in place, visiting each character exactly once in the
+ /// original order, and preserves the order of the retained characters.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut s = String::from("f_o_ob_ar");
+ ///
+ /// s.retain(|c| c != '_');
+ ///
+ /// assert_eq!(s, "foobar");
+ /// ```
+ ///
+ /// The exact order may be useful for tracking external state, like an index.
+ ///
+ /// ```
+ /// let mut s = String::from("abcde");
+ /// let keep = [false, true, true, false, true];
+ /// let mut i = 0;
+ /// s.retain(|_| (keep[i], i += 1).0);
+ /// assert_eq!(s, "bce");
+ /// ```
+ #[inline]
+ #[stable(feature = "string_retain", since = "1.26.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(char) -> bool,
+ {
+ let len = self.len();
+ let mut del_bytes = 0;
+ let mut idx = 0;
+
+ while idx < len {
+ let ch = unsafe { self.get_unchecked(idx..len).chars().next().unwrap() };
+ let ch_len = ch.len_utf8();
+
+ if !f(ch) {
+ del_bytes += ch_len;
+ } else if del_bytes > 0 {
+ unsafe {
+ ptr::copy(
+ self.vec.as_ptr().add(idx),
+ self.vec.as_mut_ptr().add(idx - del_bytes),
+ ch_len,
+ );
+ }
+ }
+
+ // Point idx to the next char
+ idx += ch_len;
+ }
+
+ if del_bytes > 0 {
+ unsafe {
+ self.vec.set_len(len - del_bytes);
+ }
+ }
+ }
+
+ /// Inserts a character into this `String` at a byte position.
+ ///
+ /// This is an *O*(*n*) operation as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than the `String`'s length, or if it does not
+ /// lie on a [`char`] boundary.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::with_capacity(3);
+ ///
+ /// s.insert(0, 'f');
+ /// s.insert(1, 'o');
+ /// s.insert(2, 'o');
+ ///
+ /// assert_eq!("foo", s);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, idx: usize, ch: char) {
+ assert!(self.is_char_boundary(idx));
+ let mut bits = [0; 4];
+ let bits = ch.encode_utf8(&mut bits).as_bytes();
+
+ unsafe {
+ self.insert_bytes(idx, bits);
+ }
+ }
+
+ unsafe fn insert_bytes(&mut self, idx: usize, bytes: &[u8]) {
+ let len = self.len();
+ let amt = bytes.len();
+ self.vec.reserve(amt);
+
+ unsafe {
+ ptr::copy(self.vec.as_ptr().add(idx), self.vec.as_mut_ptr().add(idx + amt), len - idx);
+ ptr::copy(bytes.as_ptr(), self.vec.as_mut_ptr().add(idx), amt);
+ self.vec.set_len(len + amt);
+ }
+ }
+
+ /// Inserts a string slice into this `String` at a byte position.
+ ///
+ /// This is an *O*(*n*) operation as it requires copying every element in the
+ /// buffer.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `idx` is larger than the `String`'s length, or if it does not
+ /// lie on a [`char`] boundary.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("bar");
+ ///
+ /// s.insert_str(0, "foo");
+ ///
+ /// assert_eq!("foobar", s);
+ /// ```
+ #[inline]
+ #[stable(feature = "insert_str", since = "1.16.0")]
+ pub fn insert_str(&mut self, idx: usize, string: &str) {
+ assert!(self.is_char_boundary(idx));
+
+ unsafe {
+ self.insert_bytes(idx, string.as_bytes());
+ }
+ }
+
+ /// Returns a mutable reference to the contents of this `String`.
+ ///
+ /// # Safety
+ ///
+ /// This function is unsafe because it does not check that the bytes passed
+ /// to it are valid UTF-8. If this constraint is violated, it may cause
+ /// memory unsafety issues with future users of the `String`, as the rest of
+ /// the standard library assumes that `String`s are valid UTF-8.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("hello");
+ ///
+ /// unsafe {
+ /// let vec = s.as_mut_vec();
+ /// assert_eq!(&[104, 101, 108, 108, 111][..], &vec[..]);
+ ///
+ /// vec.reverse();
+ /// }
+ /// assert_eq!(s, "olleh");
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub unsafe fn as_mut_vec(&mut self) -> &mut Vec<u8> {
+ &mut self.vec
+ }
+
+ /// Returns the length of this `String`, in bytes, not [`char`]s or
+ /// graphemes. In other words, it may not be what a human considers the
+ /// length of the string.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let a = String::from("foo");
+ /// assert_eq!(a.len(), 3);
+ ///
+ /// let fancy_f = String::from("ƒoo");
+ /// assert_eq!(fancy_f.len(), 4);
+ /// assert_eq!(fancy_f.chars().count(), 3);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.vec.len()
+ }
+
+ /// Returns `true` if this `String` has a length of zero, and `false` otherwise.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut v = String::new();
+ /// assert!(v.is_empty());
+ ///
+ /// v.push('a');
+ /// assert!(!v.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Splits the string into two at the given index.
+ ///
+ /// Returns a newly allocated `String`. `self` contains bytes `[0, at)`, and
+ /// the returned `String` contains bytes `[at, len)`. `at` must be on the
+ /// boundary of a UTF-8 code point.
+ ///
+ /// Note that the capacity of `self` does not change.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at` is not on a `UTF-8` code point boundary, or if it is beyond the last
+ /// code point of the string.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # fn main() {
+ /// let mut hello = String::from("Hello, World!");
+ /// let world = hello.split_off(7);
+ /// assert_eq!(hello, "Hello, ");
+ /// assert_eq!(world, "World!");
+ /// # }
+ /// ```
+ #[inline]
+ #[stable(feature = "string_split_off", since = "1.16.0")]
+ #[must_use = "use `.truncate()` if you don't need the other half"]
+ pub fn split_off(&mut self, at: usize) -> String {
+ assert!(self.is_char_boundary(at));
+ let other = self.vec.split_off(at);
+ unsafe { String::from_utf8_unchecked(other) }
+ }
+
+ /// Truncates this `String`, removing all contents.
+ ///
+ /// While this means the `String` will have a length of zero, it does not
+ /// touch its capacity.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("foo");
+ ///
+ /// s.clear();
+ ///
+ /// assert!(s.is_empty());
+ /// assert_eq!(0, s.len());
+ /// assert_eq!(3, s.capacity());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.vec.clear()
+ }
+
+ /// Creates a draining iterator that removes the specified range in the `String`
+ /// and yields the removed `chars`.
+ ///
+ /// Note: The element range is removed even if the iterator is not
+ /// consumed until the end.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point or end point do not lie on a [`char`]
+ /// boundary, or if they're out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("α is alpha, β is beta");
+ /// let beta_offset = s.find('β').unwrap_or(s.len());
+ ///
+ /// // Remove the range up until the β from the string
+ /// let t: String = s.drain(..beta_offset).collect();
+ /// assert_eq!(t, "α is alpha, ");
+ /// assert_eq!(s, "β is beta");
+ ///
+ /// // A full range clears the string
+ /// s.drain(..);
+ /// assert_eq!(s, "");
+ /// ```
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain<R>(&mut self, range: R) -> Drain<'_>
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // The String version of Drain does not have the memory safety issues
+ // of the vector version. The data is just plain bytes.
+ // Because the range removal happens in Drop, if the Drain iterator is leaked,
+ // the removal will not happen.
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+
+ // Take out two simultaneous borrows. The &mut String won't be accessed
+ // until iteration is over, in Drop.
+ let self_ptr = self as *mut _;
+ // slicing does the appropriate bounds checks
+ let chars_iter = self[start..end].chars();
+
+ Drain { start, end, iter: chars_iter, string: self_ptr }
+ }
+
+ /// Removes the specified range in the string,
+ /// and replaces it with the given string.
+ /// The given string doesn't need to be the same length as the range.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point or end point do not lie on a [`char`]
+ /// boundary, or if they're out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let mut s = String::from("α is alpha, β is beta");
+ /// let beta_offset = s.find('β').unwrap_or(s.len());
+ ///
+ /// // Replace the range up until the β from the string
+ /// s.replace_range(..beta_offset, "Α is capital alpha; ");
+ /// assert_eq!(s, "Α is capital alpha; β is beta");
+ /// ```
+ #[stable(feature = "splice", since = "1.27.0")]
+ pub fn replace_range<R>(&mut self, range: R, replace_with: &str)
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // Replace_range does not have the memory safety issues of a vector Splice.
+ // of the vector version. The data is just plain bytes.
+
+ match range.start_bound() {
+ Included(&n) => assert!(self.is_char_boundary(n)),
+ Excluded(&n) => assert!(self.is_char_boundary(n + 1)),
+ Unbounded => {}
+ };
+ match range.end_bound() {
+ Included(&n) => assert!(self.is_char_boundary(n + 1)),
+ Excluded(&n) => assert!(self.is_char_boundary(n)),
+ Unbounded => {}
+ };
+
+ unsafe { self.as_mut_vec() }.splice(range, replace_with.bytes());
+ }
+
+ /// Converts this `String` into a [`Box`]`<`[`str`]`>`.
+ ///
+ /// This will drop any excess capacity.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s = String::from("hello");
+ ///
+ /// let b = s.into_boxed_str();
+ /// ```
+ #[stable(feature = "box_str", since = "1.4.0")]
+ #[inline]
+ pub fn into_boxed_str(self) -> Box<str> {
+ let slice = self.vec.into_boxed_slice();
+ unsafe { from_boxed_utf8_unchecked(slice) }
+ }
+}
+
+impl FromUtf8Error {
+ /// Returns a slice of [`u8`]s bytes that were attempted to convert to a `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some invalid bytes, in a vector
+ /// let bytes = vec![0, 159];
+ ///
+ /// let value = String::from_utf8(bytes);
+ ///
+ /// assert_eq!(&[0, 159], value.unwrap_err().as_bytes());
+ /// ```
+ #[stable(feature = "from_utf8_error_as_bytes", since = "1.26.0")]
+ pub fn as_bytes(&self) -> &[u8] {
+ &self.bytes[..]
+ }
+
+ /// Returns the bytes that were attempted to convert to a `String`.
+ ///
+ /// This method is carefully constructed to avoid allocation. It will
+ /// consume the error, moving out the bytes, so that a copy of the bytes
+ /// does not need to be made.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some invalid bytes, in a vector
+ /// let bytes = vec![0, 159];
+ ///
+ /// let value = String::from_utf8(bytes);
+ ///
+ /// assert_eq!(vec![0, 159], value.unwrap_err().into_bytes());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_bytes(self) -> Vec<u8> {
+ self.bytes
+ }
+
+ /// Fetch a `Utf8Error` to get more details about the conversion failure.
+ ///
+ /// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
+ /// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
+ /// an analogue to `FromUtf8Error`. See its documentation for more details
+ /// on using it.
+ ///
+ /// [`std::str`]: core::str
+ /// [`&str`]: str
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// // some invalid bytes, in a vector
+ /// let bytes = vec![0, 159];
+ ///
+ /// let error = String::from_utf8(bytes).unwrap_err().utf8_error();
+ ///
+ /// // the first byte is invalid here
+ /// assert_eq!(1, error.valid_up_to());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn utf8_error(&self) -> Utf8Error {
+ self.error
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for FromUtf8Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&self.error, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for FromUtf16Error {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt("invalid utf-16: lone surrogate found", f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Clone for String {
+ fn clone(&self) -> Self {
+ String { vec: self.vec.clone() }
+ }
+
+ fn clone_from(&mut self, source: &Self) {
+ self.vec.clone_from(&source.vec);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl FromIterator<char> for String {
+ fn from_iter<I: IntoIterator<Item = char>>(iter: I) -> String {
+ let mut buf = String::new();
+ buf.extend(iter);
+ buf
+ }
+}
+
+#[stable(feature = "string_from_iter_by_ref", since = "1.17.0")]
+impl<'a> FromIterator<&'a char> for String {
+ fn from_iter<I: IntoIterator<Item = &'a char>>(iter: I) -> String {
+ let mut buf = String::new();
+ buf.extend(iter);
+ buf
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> FromIterator<&'a str> for String {
+ fn from_iter<I: IntoIterator<Item = &'a str>>(iter: I) -> String {
+ let mut buf = String::new();
+ buf.extend(iter);
+ buf
+ }
+}
+
+#[stable(feature = "extend_string", since = "1.4.0")]
+impl FromIterator<String> for String {
+ fn from_iter<I: IntoIterator<Item = String>>(iter: I) -> String {
+ let mut iterator = iter.into_iter();
+
+ // Because we're iterating over `String`s, we can avoid at least
+ // one allocation by getting the first string from the iterator
+ // and appending to it all the subsequent strings.
+ match iterator.next() {
+ None => String::new(),
+ Some(mut buf) => {
+ buf.extend(iterator);
+ buf
+ }
+ }
+ }
+}
+
+#[stable(feature = "box_str2", since = "1.45.0")]
+impl FromIterator<Box<str>> for String {
+ fn from_iter<I: IntoIterator<Item = Box<str>>>(iter: I) -> String {
+ let mut buf = String::new();
+ buf.extend(iter);
+ buf
+ }
+}
+
+#[stable(feature = "herd_cows", since = "1.19.0")]
+impl<'a> FromIterator<Cow<'a, str>> for String {
+ fn from_iter<I: IntoIterator<Item = Cow<'a, str>>>(iter: I) -> String {
+ let mut iterator = iter.into_iter();
+
+ // Because we're iterating over CoWs, we can (potentially) avoid at least
+ // one allocation by getting the first item and appending to it all the
+ // subsequent items.
+ match iterator.next() {
+ None => String::new(),
+ Some(cow) => {
+ let mut buf = cow.into_owned();
+ buf.extend(iterator);
+ buf
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Extend<char> for String {
+ fn extend<I: IntoIterator<Item = char>>(&mut self, iter: I) {
+ let iterator = iter.into_iter();
+ let (lower_bound, _) = iterator.size_hint();
+ self.reserve(lower_bound);
+ iterator.for_each(move |c| self.push(c));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, c: char) {
+ self.push(c);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a> Extend<&'a char> for String {
+ fn extend<I: IntoIterator<Item = &'a char>>(&mut self, iter: I) {
+ self.extend(iter.into_iter().cloned());
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &c: &'a char) {
+ self.push(c);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> Extend<&'a str> for String {
+ fn extend<I: IntoIterator<Item = &'a str>>(&mut self, iter: I) {
+ iter.into_iter().for_each(move |s| self.push_str(s));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, s: &'a str) {
+ self.push_str(s);
+ }
+}
+
+#[stable(feature = "box_str2", since = "1.45.0")]
+impl Extend<Box<str>> for String {
+ fn extend<I: IntoIterator<Item = Box<str>>>(&mut self, iter: I) {
+ iter.into_iter().for_each(move |s| self.push_str(&s));
+ }
+}
+
+#[stable(feature = "extend_string", since = "1.4.0")]
+impl Extend<String> for String {
+ fn extend<I: IntoIterator<Item = String>>(&mut self, iter: I) {
+ iter.into_iter().for_each(move |s| self.push_str(&s));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, s: String) {
+ self.push_str(&s);
+ }
+}
+
+#[stable(feature = "herd_cows", since = "1.19.0")]
+impl<'a> Extend<Cow<'a, str>> for String {
+ fn extend<I: IntoIterator<Item = Cow<'a, str>>>(&mut self, iter: I) {
+ iter.into_iter().for_each(move |s| self.push_str(&s));
+ }
+
+ #[inline]
+ fn extend_one(&mut self, s: Cow<'a, str>) {
+ self.push_str(&s);
+ }
+}
+
+/// A convenience impl that delegates to the impl for `&str`.
+///
+/// # Examples
+///
+/// ```
+/// assert_eq!(String::from("Hello world").find("world"), Some(6));
+/// ```
+#[unstable(
+ feature = "pattern",
+ reason = "API not fully fleshed out and ready to be stabilized",
+ issue = "27721"
+)]
+impl<'a, 'b> Pattern<'a> for &'b String {
+ type Searcher = <&'b str as Pattern<'a>>::Searcher;
+
+ fn into_searcher(self, haystack: &'a str) -> <&'b str as Pattern<'a>>::Searcher {
+ self[..].into_searcher(haystack)
+ }
+
+ #[inline]
+ fn is_contained_in(self, haystack: &'a str) -> bool {
+ self[..].is_contained_in(haystack)
+ }
+
+ #[inline]
+ fn is_prefix_of(self, haystack: &'a str) -> bool {
+ self[..].is_prefix_of(haystack)
+ }
+
+ #[inline]
+ fn strip_prefix_of(self, haystack: &'a str) -> Option<&'a str> {
+ self[..].strip_prefix_of(haystack)
+ }
+
+ #[inline]
+ fn is_suffix_of(self, haystack: &'a str) -> bool {
+ self[..].is_suffix_of(haystack)
+ }
+
+ #[inline]
+ fn strip_suffix_of(self, haystack: &'a str) -> Option<&'a str> {
+ self[..].strip_suffix_of(haystack)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl PartialEq for String {
+ #[inline]
+ fn eq(&self, other: &String) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+ #[inline]
+ fn ne(&self, other: &String) -> bool {
+ PartialEq::ne(&self[..], &other[..])
+ }
+}
+
+macro_rules! impl_eq {
+ ($lhs:ty, $rhs: ty) => {
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[allow(unused_lifetimes)]
+ impl<'a, 'b> PartialEq<$rhs> for $lhs {
+ #[inline]
+ fn eq(&self, other: &$rhs) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+ #[inline]
+ fn ne(&self, other: &$rhs) -> bool {
+ PartialEq::ne(&self[..], &other[..])
+ }
+ }
+
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[allow(unused_lifetimes)]
+ impl<'a, 'b> PartialEq<$lhs> for $rhs {
+ #[inline]
+ fn eq(&self, other: &$lhs) -> bool {
+ PartialEq::eq(&self[..], &other[..])
+ }
+ #[inline]
+ fn ne(&self, other: &$lhs) -> bool {
+ PartialEq::ne(&self[..], &other[..])
+ }
+ }
+ };
+}
+
+impl_eq! { String, str }
+impl_eq! { String, &'a str }
+impl_eq! { Cow<'a, str>, str }
+impl_eq! { Cow<'a, str>, &'b str }
+impl_eq! { Cow<'a, str>, String }
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Default for String {
+ /// Creates an empty `String`.
+ #[inline]
+ fn default() -> String {
+ String::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Display for String {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Debug for String {
+ #[inline]
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl hash::Hash for String {
+ #[inline]
+ fn hash<H: hash::Hasher>(&self, hasher: &mut H) {
+ (**self).hash(hasher)
+ }
+}
+
+/// Implements the `+` operator for concatenating two strings.
+///
+/// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if
+/// necessary). This is done to avoid allocating a new `String` and copying the entire contents on
+/// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by
+/// repeated concatenation.
+///
+/// The string on the right-hand side is only borrowed; its contents are copied into the returned
+/// `String`.
+///
+/// # Examples
+///
+/// Concatenating two `String`s takes the first by value and borrows the second:
+///
+/// ```
+/// let a = String::from("hello");
+/// let b = String::from(" world");
+/// let c = a + &b;
+/// // `a` is moved and can no longer be used here.
+/// ```
+///
+/// If you want to keep using the first `String`, you can clone it and append to the clone instead:
+///
+/// ```
+/// let a = String::from("hello");
+/// let b = String::from(" world");
+/// let c = a.clone() + &b;
+/// // `a` is still valid here.
+/// ```
+///
+/// Concatenating `&str` slices can be done by converting the first to a `String`:
+///
+/// ```
+/// let a = "hello";
+/// let b = " world";
+/// let c = a.to_string() + b;
+/// ```
+#[stable(feature = "rust1", since = "1.0.0")]
+impl Add<&str> for String {
+ type Output = String;
+
+ #[inline]
+ fn add(mut self, other: &str) -> String {
+ self.push_str(other);
+ self
+ }
+}
+
+/// Implements the `+=` operator for appending to a `String`.
+///
+/// This has the same behavior as the [`push_str`][String::push_str] method.
+#[stable(feature = "stringaddassign", since = "1.12.0")]
+impl AddAssign<&str> for String {
+ #[inline]
+ fn add_assign(&mut self, other: &str) {
+ self.push_str(other);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::Range<usize>> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::Range<usize>) -> &str {
+ &self[..][index]
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeTo<usize>> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeTo<usize>) -> &str {
+ &self[..][index]
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeFrom<usize>> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeFrom<usize>) -> &str {
+ &self[..][index]
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Index<ops::RangeFull> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, _index: ops::RangeFull) -> &str {
+ unsafe { str::from_utf8_unchecked(&self.vec) }
+ }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::Index<ops::RangeInclusive<usize>> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeInclusive<usize>) -> &str {
+ Index::index(&**self, index)
+ }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::Index<ops::RangeToInclusive<usize>> for String {
+ type Output = str;
+
+ #[inline]
+ fn index(&self, index: ops::RangeToInclusive<usize>) -> &str {
+ Index::index(&**self, index)
+ }
+}
+
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::Range<usize>> for String {
+ #[inline]
+ fn index_mut(&mut self, index: ops::Range<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeTo<usize>> for String {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeTo<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeFrom<usize>> for String {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeFrom<usize>) -> &mut str {
+ &mut self[..][index]
+ }
+}
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::IndexMut<ops::RangeFull> for String {
+ #[inline]
+ fn index_mut(&mut self, _index: ops::RangeFull) -> &mut str {
+ unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+ }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::IndexMut<ops::RangeInclusive<usize>> for String {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeInclusive<usize>) -> &mut str {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+#[stable(feature = "inclusive_range", since = "1.26.0")]
+impl ops::IndexMut<ops::RangeToInclusive<usize>> for String {
+ #[inline]
+ fn index_mut(&mut self, index: ops::RangeToInclusive<usize>) -> &mut str {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl ops::Deref for String {
+ type Target = str;
+
+ #[inline]
+ fn deref(&self) -> &str {
+ unsafe { str::from_utf8_unchecked(&self.vec) }
+ }
+}
+
+#[stable(feature = "derefmut_for_string", since = "1.3.0")]
+impl ops::DerefMut for String {
+ #[inline]
+ fn deref_mut(&mut self) -> &mut str {
+ unsafe { str::from_utf8_unchecked_mut(&mut *self.vec) }
+ }
+}
+
+/// A type alias for [`Infallible`].
+///
+/// This alias exists for backwards compatibility, and may be eventually deprecated.
+///
+/// [`Infallible`]: core::convert::Infallible
+#[stable(feature = "str_parse_error", since = "1.5.0")]
+pub type ParseError = core::convert::Infallible;
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl FromStr for String {
+ type Err = core::convert::Infallible;
+ #[inline]
+ fn from_str(s: &str) -> Result<String, Self::Err> {
+ Ok(String::from(s))
+ }
+}
+
+/// A trait for converting a value to a `String`.
+///
+/// This trait is automatically implemented for any type which implements the
+/// [`Display`] trait. As such, `ToString` shouldn't be implemented directly:
+/// [`Display`] should be implemented instead, and you get the `ToString`
+/// implementation for free.
+///
+/// [`Display`]: fmt::Display
+#[stable(feature = "rust1", since = "1.0.0")]
+pub trait ToString {
+ /// Converts the given value to a `String`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let i = 5;
+ /// let five = String::from("5");
+ ///
+ /// assert_eq!(five, i.to_string());
+ /// ```
+ #[rustc_conversion_suggestion]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ fn to_string(&self) -> String;
+}
+
+/// # Panics
+///
+/// In this implementation, the `to_string` method panics
+/// if the `Display` implementation returns an error.
+/// This indicates an incorrect `Display` implementation
+/// since `fmt::Write for String` never returns an error itself.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Display + ?Sized> ToString for T {
+ #[inline]
+ default fn to_string(&self) -> String {
+ use fmt::Write;
+ let mut buf = String::new();
+ buf.write_fmt(format_args!("{}", self))
+ .expect("a Display implementation returned an error unexpectedly");
+ buf.shrink_to_fit();
+ buf
+ }
+}
+
+#[stable(feature = "char_to_string_specialization", since = "1.46.0")]
+impl ToString for char {
+ #[inline]
+ fn to_string(&self) -> String {
+ String::from(self.encode_utf8(&mut [0; 4]))
+ }
+}
+
+#[stable(feature = "str_to_string_specialization", since = "1.9.0")]
+impl ToString for str {
+ #[inline]
+ fn to_string(&self) -> String {
+ String::from(self)
+ }
+}
+
+#[stable(feature = "cow_str_to_string_specialization", since = "1.17.0")]
+impl ToString for Cow<'_, str> {
+ #[inline]
+ fn to_string(&self) -> String {
+ self[..].to_owned()
+ }
+}
+
+#[stable(feature = "string_to_string_specialization", since = "1.17.0")]
+impl ToString for String {
+ #[inline]
+ fn to_string(&self) -> String {
+ self.to_owned()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl AsRef<str> for String {
+ #[inline]
+ fn as_ref(&self) -> &str {
+ self
+ }
+}
+
+#[stable(feature = "string_as_mut", since = "1.43.0")]
+impl AsMut<str> for String {
+ #[inline]
+ fn as_mut(&mut self) -> &mut str {
+ self
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl AsRef<[u8]> for String {
+ #[inline]
+ fn as_ref(&self) -> &[u8] {
+ self.as_bytes()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl From<&str> for String {
+ #[inline]
+ fn from(s: &str) -> String {
+ s.to_owned()
+ }
+}
+
+#[stable(feature = "from_mut_str_for_string", since = "1.44.0")]
+impl From<&mut str> for String {
+ /// Converts a `&mut str` into a `String`.
+ ///
+ /// The result is allocated on the heap.
+ #[inline]
+ fn from(s: &mut str) -> String {
+ s.to_owned()
+ }
+}
+
+#[stable(feature = "from_ref_string", since = "1.35.0")]
+impl From<&String> for String {
+ #[inline]
+ fn from(s: &String) -> String {
+ s.clone()
+ }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "string_from_box", since = "1.18.0")]
+impl From<Box<str>> for String {
+ /// Converts the given boxed `str` slice to a `String`.
+ /// It is notable that the `str` slice is owned.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s1: String = String::from("hello world");
+ /// let s2: Box<str> = s1.into_boxed_str();
+ /// let s3: String = String::from(s2);
+ ///
+ /// assert_eq!("hello world", s3)
+ /// ```
+ fn from(s: Box<str>) -> String {
+ s.into_string()
+ }
+}
+
+#[stable(feature = "box_from_str", since = "1.20.0")]
+impl From<String> for Box<str> {
+ /// Converts the given `String` to a boxed `str` slice that is owned.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s1: String = String::from("hello world");
+ /// let s2: Box<str> = Box::from(s1);
+ /// let s3: String = String::from(s2);
+ ///
+ /// assert_eq!("hello world", s3)
+ /// ```
+ fn from(s: String) -> Box<str> {
+ s.into_boxed_str()
+ }
+}
+
+#[stable(feature = "string_from_cow_str", since = "1.14.0")]
+impl<'a> From<Cow<'a, str>> for String {
+ fn from(s: Cow<'a, str>) -> String {
+ s.into_owned()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> From<&'a str> for Cow<'a, str> {
+ #[inline]
+ fn from(s: &'a str) -> Cow<'a, str> {
+ Cow::Borrowed(s)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a> From<String> for Cow<'a, str> {
+ #[inline]
+ fn from(s: String) -> Cow<'a, str> {
+ Cow::Owned(s)
+ }
+}
+
+#[stable(feature = "cow_from_string_ref", since = "1.28.0")]
+impl<'a> From<&'a String> for Cow<'a, str> {
+ #[inline]
+ fn from(s: &'a String) -> Cow<'a, str> {
+ Cow::Borrowed(s.as_str())
+ }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<char> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = char>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a, 'b> FromIterator<&'b str> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = &'b str>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+#[stable(feature = "cow_str_from_iter", since = "1.12.0")]
+impl<'a> FromIterator<String> for Cow<'a, str> {
+ fn from_iter<I: IntoIterator<Item = String>>(it: I) -> Cow<'a, str> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
+impl From<String> for Vec<u8> {
+ /// Converts the given `String` to a vector `Vec` that holds values of type `u8`.
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// let s1 = String::from("hello world");
+ /// let v1 = Vec::from(s1);
+ ///
+ /// for b in v1 {
+ /// println!("{}", b);
+ /// }
+ /// ```
+ fn from(string: String) -> Vec<u8> {
+ string.into_bytes()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl fmt::Write for String {
+ #[inline]
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ self.push_str(s);
+ Ok(())
+ }
+
+ #[inline]
+ fn write_char(&mut self, c: char) -> fmt::Result {
+ self.push(c);
+ Ok(())
+ }
+}
+
+/// A draining iterator for `String`.
+///
+/// This struct is created by the [`drain`] method on [`String`]. See its
+/// documentation for more.
+///
+/// [`drain`]: String::drain
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a> {
+ /// Will be used as &'a mut String in the destructor
+ string: *mut String,
+ /// Start of part to remove
+ start: usize,
+ /// End of part to remove
+ end: usize,
+ /// Current remaining range to remove
+ iter: Chars<'a>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl fmt::Debug for Drain<'_> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.pad("Drain { .. }")
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl Sync for Drain<'_> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl Send for Drain<'_> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl Drop for Drain<'_> {
+ fn drop(&mut self) {
+ unsafe {
+ // Use Vec::drain. "Reaffirm" the bounds checks to avoid
+ // panic code being inserted again.
+ let self_vec = (*self.string).as_mut_vec();
+ if self.start <= self.end && self.end <= self_vec.len() {
+ self_vec.drain(self.start..self.end);
+ }
+ }
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl Iterator for Drain<'_> {
+ type Item = char;
+
+ #[inline]
+ fn next(&mut self) -> Option<char> {
+ self.iter.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+
+ #[inline]
+ fn last(mut self) -> Option<char> {
+ self.next_back()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl DoubleEndedIterator for Drain<'_> {
+ #[inline]
+ fn next_back(&mut self) -> Option<char> {
+ self.iter.next_back()
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl FusedIterator for Drain<'_> {}
+
+#[stable(feature = "from_char_for_string", since = "1.46.0")]
+impl From<char> for String {
+ #[inline]
+ fn from(c: char) -> Self {
+ c.to_string()
+ }
+}
diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs
new file mode 100644
index 00000000000..8a5f1ee5076
--- /dev/null
+++ b/library/alloc/src/sync.rs
@@ -0,0 +1,2294 @@
+#![stable(feature = "rust1", since = "1.0.0")]
+
+//! Thread-safe reference-counting pointers.
+//!
+//! See the [`Arc<T>`][arc] documentation for more details.
+//!
+//! [arc]: struct.Arc.html
+
+use core::any::Any;
+use core::borrow;
+use core::cmp::Ordering;
+use core::convert::{From, TryFrom};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::abort;
+use core::iter;
+use core::marker::{PhantomData, Unpin, Unsize};
+use core::mem::{self, align_of_val, size_of_val};
+use core::ops::{CoerceUnsized, Deref, DispatchFromDyn, Receiver};
+use core::pin::Pin;
+use core::ptr::{self, NonNull};
+use core::slice::from_raw_parts_mut;
+use core::sync::atomic;
+use core::sync::atomic::Ordering::{Acquire, Relaxed, Release, SeqCst};
+
+use crate::alloc::{box_free, handle_alloc_error, AllocInit, AllocRef, Global, Layout};
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::rc::is_dangling;
+use crate::string::String;
+use crate::vec::Vec;
+
+#[cfg(test)]
+mod tests;
+
+/// A soft limit on the amount of references that may be made to an `Arc`.
+///
+/// Going above this limit will abort your program (although not
+/// necessarily) at _exactly_ `MAX_REFCOUNT + 1` references.
+const MAX_REFCOUNT: usize = (isize::MAX) as usize;
+
+#[cfg(not(sanitize = "thread"))]
+macro_rules! acquire {
+ ($x:expr) => {
+ atomic::fence(Acquire)
+ };
+}
+
+// ThreadSanitizer does not support memory fences. To avoid false positive
+// reports in Arc / Weak implementation use atomic loads for synchronization
+// instead.
+#[cfg(sanitize = "thread")]
+macro_rules! acquire {
+ ($x:expr) => {
+ $x.load(Acquire)
+ };
+}
+
+/// A thread-safe reference-counting pointer. 'Arc' stands for 'Atomically
+/// Reference Counted'.
+///
+/// The type `Arc<T>` provides shared ownership of a value of type `T`,
+/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces
+/// a new `Arc` instance, which points to the same allocation on the heap as the
+/// source `Arc`, while increasing a reference count. When the last `Arc`
+/// pointer to a given allocation is destroyed, the value stored in that allocation (often
+/// referred to as "inner value") is also dropped.
+///
+/// Shared references in Rust disallow mutation by default, and `Arc` is no
+/// exception: you cannot generally obtain a mutable reference to something
+/// inside an `Arc`. If you need to mutate through an `Arc`, use
+/// [`Mutex`][mutex], [`RwLock`][rwlock], or one of the [`Atomic`][atomic]
+/// types.
+///
+/// ## Thread Safety
+///
+/// Unlike [`Rc<T>`], `Arc<T>` uses atomic operations for its reference
+/// counting. This means that it is thread-safe. The disadvantage is that
+/// atomic operations are more expensive than ordinary memory accesses. If you
+/// are not sharing reference-counted allocations between threads, consider using
+/// [`Rc<T>`] for lower overhead. [`Rc<T>`] is a safe default, because the
+/// compiler will catch any attempt to send an [`Rc<T>`] between threads.
+/// However, a library might choose `Arc<T>` in order to give library consumers
+/// more flexibility.
+///
+/// `Arc<T>` will implement [`Send`] and [`Sync`] as long as the `T` implements
+/// [`Send`] and [`Sync`]. Why can't you put a non-thread-safe type `T` in an
+/// `Arc<T>` to make it thread-safe? This may be a bit counter-intuitive at
+/// first: after all, isn't the point of `Arc<T>` thread safety? The key is
+/// this: `Arc<T>` makes it thread safe to have multiple ownership of the same
+/// data, but it doesn't add thread safety to its data. Consider
+/// `Arc<`[`RefCell<T>`]`>`. [`RefCell<T>`] isn't [`Sync`], and if `Arc<T>` was always
+/// [`Send`], `Arc<`[`RefCell<T>`]`>` would be as well. But then we'd have a problem:
+/// [`RefCell<T>`] is not thread safe; it keeps track of the borrowing count using
+/// non-atomic operations.
+///
+/// In the end, this means that you may need to pair `Arc<T>` with some sort of
+/// [`std::sync`] type, usually [`Mutex<T>`][mutex].
+///
+/// ## Breaking cycles with `Weak`
+///
+/// The [`downgrade`][downgrade] method can be used to create a non-owning
+/// [`Weak`][weak] pointer. A [`Weak`][weak] pointer can be [`upgrade`][upgrade]d
+/// to an `Arc`, but this will return [`None`] if the value stored in the allocation has
+/// already been dropped. In other words, `Weak` pointers do not keep the value
+/// inside the allocation alive; however, they *do* keep the allocation
+/// (the backing store for the value) alive.
+///
+/// A cycle between `Arc` pointers will never be deallocated. For this reason,
+/// [`Weak`][weak] is used to break cycles. For example, a tree could have
+/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
+/// pointers from children back to their parents.
+///
+/// # Cloning references
+///
+/// Creating a new reference from an existing reference counted pointer is done using the
+/// `Clone` trait implemented for [`Arc<T>`][arc] and [`Weak<T>`][weak].
+///
+/// ```
+/// use std::sync::Arc;
+/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
+/// // The two syntaxes below are equivalent.
+/// let a = foo.clone();
+/// let b = Arc::clone(&foo);
+/// // a, b, and foo are all Arcs that point to the same memory location
+/// ```
+///
+/// ## `Deref` behavior
+///
+/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
+/// so you can call `T`'s methods on a value of type `Arc<T>`. To avoid name
+/// clashes with `T`'s methods, the methods of `Arc<T>` itself are associated
+/// functions, called using function-like syntax:
+///
+/// ```
+/// use std::sync::Arc;
+/// let my_arc = Arc::new(());
+///
+/// Arc::downgrade(&my_arc);
+/// ```
+///
+/// [`Weak<T>`][weak] does not auto-dereference to `T`, because the inner value may have
+/// already been dropped.
+///
+/// [arc]: struct.Arc.html
+/// [weak]: struct.Weak.html
+/// [`Rc<T>`]: ../../std/rc/struct.Rc.html
+/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+/// [mutex]: ../../std/sync/struct.Mutex.html
+/// [rwlock]: ../../std/sync/struct.RwLock.html
+/// [atomic]: ../../std/sync/atomic/index.html
+/// [`Send`]: ../../std/marker/trait.Send.html
+/// [`Sync`]: ../../std/marker/trait.Sync.html
+/// [deref]: ../../std/ops/trait.Deref.html
+/// [downgrade]: struct.Arc.html#method.downgrade
+/// [upgrade]: struct.Weak.html#method.upgrade
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+/// [`RefCell<T>`]: ../../std/cell/struct.RefCell.html
+/// [`std::sync`]: ../../std/sync/index.html
+/// [`Arc::clone(&from)`]: #method.clone
+///
+/// # Examples
+///
+/// Sharing some immutable data between threads:
+///
+// Note that we **do not** run these tests here. The windows builders get super
+// unhappy if a thread outlives the main thread and then exits at the same time
+// (something deadlocks) so we just avoid this entirely by not running these
+// tests.
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::thread;
+///
+/// let five = Arc::new(5);
+///
+/// for _ in 0..10 {
+/// let five = Arc::clone(&five);
+///
+/// thread::spawn(move || {
+/// println!("{:?}", five);
+/// });
+/// }
+/// ```
+///
+/// Sharing a mutable [`AtomicUsize`]:
+///
+/// [`AtomicUsize`]: ../../std/sync/atomic/struct.AtomicUsize.html
+///
+/// ```no_run
+/// use std::sync::Arc;
+/// use std::sync::atomic::{AtomicUsize, Ordering};
+/// use std::thread;
+///
+/// let val = Arc::new(AtomicUsize::new(5));
+///
+/// for _ in 0..10 {
+/// let val = Arc::clone(&val);
+///
+/// thread::spawn(move || {
+/// let v = val.fetch_add(1, Ordering::SeqCst);
+/// println!("{:?}", v);
+/// });
+/// }
+/// ```
+///
+/// See the [`rc` documentation][rc_examples] for more examples of reference
+/// counting in general.
+///
+/// [rc_examples]: ../../std/rc/index.html#examples
+#[cfg_attr(not(test), rustc_diagnostic_item = "Arc")]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct Arc<T: ?Sized> {
+ ptr: NonNull<ArcInner<T>>,
+ phantom: PhantomData<ArcInner<T>>,
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Arc<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Arc<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Arc<U>> for Arc<T> {}
+
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Arc<U>> for Arc<T> {}
+
+impl<T: ?Sized> Arc<T> {
+ fn from_inner(ptr: NonNull<ArcInner<T>>) -> Self {
+ Self { ptr, phantom: PhantomData }
+ }
+
+ unsafe fn from_ptr(ptr: *mut ArcInner<T>) -> Self {
+ unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
+ }
+}
+
+/// `Weak` is a version of [`Arc`] that holds a non-owning reference to the
+/// managed allocation. The allocation is accessed by calling [`upgrade`] on the `Weak`
+/// pointer, which returns an [`Option`]`<`[`Arc`]`<T>>`.
+///
+/// Since a `Weak` reference does not count towards ownership, it will not
+/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
+/// guarantees about the value still being present. Thus it may return [`None`]
+/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
+/// itself (the backing store) from being deallocated.
+///
+/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
+/// managed by [`Arc`] without preventing its inner value from being dropped. It is also used to
+/// prevent circular references between [`Arc`] pointers, since mutual owning references
+/// would never allow either [`Arc`] to be dropped. For example, a tree could
+/// have strong [`Arc`] pointers from parent nodes to children, and `Weak`
+/// pointers from children back to their parents.
+///
+/// The typical way to obtain a `Weak` pointer is to call [`Arc::downgrade`].
+///
+/// [`Arc`]: struct.Arc.html
+/// [`Arc::downgrade`]: struct.Arc.html#method.downgrade
+/// [`upgrade`]: struct.Weak.html#method.upgrade
+/// [`Option`]: ../../std/option/enum.Option.html
+/// [`None`]: ../../std/option/enum.Option.html#variant.None
+#[stable(feature = "arc_weak", since = "1.4.0")]
+pub struct Weak<T: ?Sized> {
+ // This is a `NonNull` to allow optimizing the size of this type in enums,
+ // but it is not necessarily a valid pointer.
+ // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
+ // to allocate space on the heap. That's not a value a real pointer
+ // will ever have because RcBox has alignment at least 2.
+ // This is only possible when `T: Sized`; unsized `T` never dangle.
+ ptr: NonNull<ArcInner<T>>,
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Send for Weak<T> {}
+#[stable(feature = "arc_weak", since = "1.4.0")]
+unsafe impl<T: ?Sized + Sync + Send> Sync for Weak<T> {}
+
+#[unstable(feature = "coerce_unsized", issue = "27732")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> CoerceUnsized<Weak<U>> for Weak<T> {}
+#[unstable(feature = "dispatch_from_dyn", issue = "none")]
+impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Weak<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "(Weak)")
+ }
+}
+
+// This is repr(C) to future-proof against possible field-reordering, which
+// would interfere with otherwise safe [into|from]_raw() of transmutable
+// inner types.
+#[repr(C)]
+struct ArcInner<T: ?Sized> {
+ strong: atomic::AtomicUsize,
+
+ // the value usize::MAX acts as a sentinel for temporarily "locking" the
+ // ability to upgrade weak pointers or downgrade strong ones; this is used
+ // to avoid races in `make_mut` and `get_mut`.
+ weak: atomic::AtomicUsize,
+
+ data: T,
+}
+
+unsafe impl<T: ?Sized + Sync + Send> Send for ArcInner<T> {}
+unsafe impl<T: ?Sized + Sync + Send> Sync for ArcInner<T> {}
+
+impl<T> Arc<T> {
+ /// Constructs a new `Arc<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn new(data: T) -> Arc<T> {
+ // Start the weak pointer count as 1 which is the weak pointer that's
+ // held by all the strong pointers (kinda), see std/rc.rs for more info
+ let x: Box<_> = box ArcInner {
+ strong: atomic::AtomicUsize::new(1),
+ weak: atomic::AtomicUsize::new(1),
+ data,
+ };
+ Self::from_inner(Box::leak(x).into())
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let mut five = Arc::<u32>::new_uninit();
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit() -> Arc<mem::MaybeUninit<T>> {
+ unsafe {
+ Arc::from_ptr(Arc::allocate_for_layout(Layout::new::<T>(), |mem| {
+ mem as *mut ArcInner<mem::MaybeUninit<T>>
+ }))
+ }
+ }
+
+ /// Constructs a new `Arc` with uninitialized contents, with the memory
+ /// being filled with `0` bytes.
+ ///
+ /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and incorrect usage
+ /// of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let zero = Arc::<u32>::new_zeroed();
+ /// let zero = unsafe { zero.assume_init() };
+ ///
+ /// assert_eq!(*zero, 0)
+ /// ```
+ ///
+ /// [zeroed]: ../../std/mem/union.MaybeUninit.html#method.zeroed
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_zeroed() -> Arc<mem::MaybeUninit<T>> {
+ unsafe {
+ let mut uninit = Self::new_uninit();
+ ptr::write_bytes::<T>(Arc::get_mut_unchecked(&mut uninit).as_mut_ptr(), 0, 1);
+ uninit
+ }
+ }
+
+ /// Constructs a new `Pin<Arc<T>>`. If `T` does not implement `Unpin`, then
+ /// `data` will be pinned in memory and unable to be moved.
+ #[stable(feature = "pin", since = "1.33.0")]
+ pub fn pin(data: T) -> Pin<Arc<T>> {
+ unsafe { Pin::new_unchecked(Arc::new(data)) }
+ }
+
+ /// Returns the inner value, if the `Arc` has exactly one strong reference.
+ ///
+ /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was
+ /// passed in.
+ ///
+ /// This will succeed even if there are outstanding weak references.
+ ///
+ /// [result]: ../../std/result/enum.Result.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new(3);
+ /// assert_eq!(Arc::try_unwrap(x), Ok(3));
+ ///
+ /// let x = Arc::new(4);
+ /// let _y = Arc::clone(&x);
+ /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn try_unwrap(this: Self) -> Result<T, Self> {
+ if this.inner().strong.compare_exchange(1, 0, Relaxed, Relaxed).is_err() {
+ return Err(this);
+ }
+
+ acquire!(this.inner().strong);
+
+ unsafe {
+ let elem = ptr::read(&this.ptr.as_ref().data);
+
+ // Make a weak pointer to clean up the implicit strong-weak reference
+ let _weak = Weak { ptr: this.ptr };
+ mem::forget(this);
+
+ Ok(elem)
+ }
+ }
+}
+
+impl<T> Arc<[T]> {
+ /// Constructs a new reference-counted slice with uninitialized contents.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ pub fn new_uninit_slice(len: usize) -> Arc<[mem::MaybeUninit<T>]> {
+ unsafe { Arc::from_ptr(Arc::allocate_for_slice(len)) }
+ }
+}
+
+impl<T> Arc<mem::MaybeUninit<T>> {
+ /// Converts to `Arc<T>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the inner value
+ /// really is in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let mut five = Arc::<u32>::new_uninit();
+ ///
+ /// let five = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
+ ///
+ /// five.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*five, 5)
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Arc<T> {
+ Arc::from_inner(mem::ManuallyDrop::new(self).ptr.cast())
+ }
+}
+
+impl<T> Arc<[mem::MaybeUninit<T>]> {
+ /// Converts to `Arc<[T]>`.
+ ///
+ /// # Safety
+ ///
+ /// As with [`MaybeUninit::assume_init`],
+ /// it is up to the caller to guarantee that the inner value
+ /// really is in an initialized state.
+ /// Calling this when the content is not yet fully initialized
+ /// causes immediate undefined behavior.
+ ///
+ /// [`MaybeUninit::assume_init`]: ../../std/mem/union.MaybeUninit.html#method.assume_init
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(new_uninit)]
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let mut values = Arc::<[u32]>::new_uninit_slice(3);
+ ///
+ /// let values = unsafe {
+ /// // Deferred initialization:
+ /// Arc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
+ /// Arc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
+ /// Arc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
+ ///
+ /// values.assume_init()
+ /// };
+ ///
+ /// assert_eq!(*values, [1, 2, 3])
+ /// ```
+ #[unstable(feature = "new_uninit", issue = "63291")]
+ #[inline]
+ pub unsafe fn assume_init(self) -> Arc<[T]> {
+ unsafe { Arc::from_ptr(mem::ManuallyDrop::new(self).ptr.as_ptr() as _) }
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Consumes the `Arc`, returning the wrapped pointer.
+ ///
+ /// To avoid a memory leak the pointer must be converted back to an `Arc` using
+ /// [`Arc::from_raw`][from_raw].
+ ///
+ /// [from_raw]: struct.Arc.html#method.from_raw
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new("hello".to_owned());
+ /// let x_ptr = Arc::into_raw(x);
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub fn into_raw(this: Self) -> *const T {
+ let ptr = Self::as_ptr(&this);
+ mem::forget(this);
+ ptr
+ }
+
+ /// Provides a raw pointer to the data.
+ ///
+ /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
+ /// as long as there are strong counts in the `Arc`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new("hello".to_owned());
+ /// let y = Arc::clone(&x);
+ /// let x_ptr = Arc::as_ptr(&x);
+ /// assert_eq!(x_ptr, Arc::as_ptr(&y));
+ /// assert_eq!(unsafe { &*x_ptr }, "hello");
+ /// ```
+ #[stable(feature = "rc_as_ptr", since = "1.45.0")]
+ pub fn as_ptr(this: &Self) -> *const T {
+ let ptr: *mut ArcInner<T> = NonNull::as_ptr(this.ptr);
+
+ // SAFETY: This cannot go through Deref::deref or RcBoxPtr::inner because
+ // this is required to retain raw/mut provenance such that e.g. `get_mut` can
+ // write through the pointer after the Rc is recovered through `from_raw`.
+ unsafe { &raw const (*ptr).data }
+ }
+
+ /// Constructs an `Arc<T>` from a raw pointer.
+ ///
+ /// The raw pointer must have been previously returned by a call to
+ /// [`Arc<U>::into_raw`][into_raw] where `U` must have the same size and
+ /// alignment as `T`. This is trivially true if `U` is `T`.
+ /// Note that if `U` is not `T` but has the same size and alignment, this is
+ /// basically like transmuting references of different types. See
+ /// [`mem::transmute`][transmute] for more information on what
+ /// restrictions apply in this case.
+ ///
+ /// The user of `from_raw` has to make sure a specific value of `T` is only
+ /// dropped once.
+ ///
+ /// This function is unsafe because improper use may lead to memory unsafety,
+ /// even if the returned `Arc<T>` is never accessed.
+ ///
+ /// [into_raw]: struct.Arc.html#method.into_raw
+ /// [transmute]: ../../std/mem/fn.transmute.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new("hello".to_owned());
+ /// let x_ptr = Arc::into_raw(x);
+ ///
+ /// unsafe {
+ /// // Convert back to an `Arc` to prevent leak.
+ /// let x = Arc::from_raw(x_ptr);
+ /// assert_eq!(&*x, "hello");
+ ///
+ /// // Further calls to `Arc::from_raw(x_ptr)` would be memory-unsafe.
+ /// }
+ ///
+ /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
+ /// ```
+ #[stable(feature = "rc_raw", since = "1.17.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ unsafe {
+ let offset = data_offset(ptr);
+
+ // Reverse the offset to find the original ArcInner.
+ let fake_ptr = ptr as *mut ArcInner<T>;
+ let arc_ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+
+ Self::from_ptr(arc_ptr)
+ }
+ }
+
+ /// Consumes the `Arc`, returning the wrapped pointer as `NonNull<T>`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(rc_into_raw_non_null)]
+ /// #![allow(deprecated)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let x = Arc::new("hello".to_owned());
+ /// let ptr = Arc::into_raw_non_null(x);
+ /// let deref = unsafe { ptr.as_ref() };
+ /// assert_eq!(deref, "hello");
+ /// ```
+ #[unstable(feature = "rc_into_raw_non_null", issue = "47336")]
+ #[rustc_deprecated(since = "1.44.0", reason = "use `Arc::into_raw` instead")]
+ #[inline]
+ pub fn into_raw_non_null(this: Self) -> NonNull<T> {
+ // safe because Arc guarantees its pointer is non-null
+ unsafe { NonNull::new_unchecked(Arc::into_raw(this) as *mut _) }
+ }
+
+ /// Creates a new [`Weak`][weak] pointer to this allocation.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// let weak_five = Arc::downgrade(&five);
+ /// ```
+ #[stable(feature = "arc_weak", since = "1.4.0")]
+ pub fn downgrade(this: &Self) -> Weak<T> {
+ // This Relaxed is OK because we're checking the value in the CAS
+ // below.
+ let mut cur = this.inner().weak.load(Relaxed);
+
+ loop {
+ // check if the weak counter is currently "locked"; if so, spin.
+ if cur == usize::MAX {
+ cur = this.inner().weak.load(Relaxed);
+ continue;
+ }
+
+ // NOTE: this code currently ignores the possibility of overflow
+ // into usize::MAX; in general both Rc and Arc need to be adjusted
+ // to deal with overflow.
+
+ // Unlike with Clone(), we need this to be an Acquire read to
+ // synchronize with the write coming from `is_unique`, so that the
+ // events prior to that write happen before this read.
+ match this.inner().weak.compare_exchange_weak(cur, cur + 1, Acquire, Relaxed) {
+ Ok(_) => {
+ // Make sure we do not create a dangling Weak
+ debug_assert!(!is_dangling(this.ptr));
+ return Weak { ptr: this.ptr };
+ }
+ Err(old) => cur = old,
+ }
+ }
+ }
+
+ /// Gets the number of [`Weak`][weak] pointers to this allocation.
+ ///
+ /// [weak]: struct.Weak.html
+ ///
+ /// # Safety
+ ///
+ /// This method by itself is safe, but using it correctly requires extra care.
+ /// Another thread can change the weak count at any time,
+ /// including potentially between calling this method and acting on the result.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let _weak_five = Arc::downgrade(&five);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` or `Weak` between threads.
+ /// assert_eq!(1, Arc::weak_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_counts", since = "1.15.0")]
+ pub fn weak_count(this: &Self) -> usize {
+ let cnt = this.inner().weak.load(SeqCst);
+ // If the weak count is currently locked, the value of the
+ // count was 0 just before taking the lock.
+ if cnt == usize::MAX { 0 } else { cnt - 1 }
+ }
+
+ /// Gets the number of strong (`Arc`) pointers to this allocation.
+ ///
+ /// # Safety
+ ///
+ /// This method by itself is safe, but using it correctly requires extra care.
+ /// Another thread can change the strong count at any time,
+ /// including potentially between calling this method and acting on the result.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let _also_five = Arc::clone(&five);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_counts", since = "1.15.0")]
+ pub fn strong_count(this: &Self) -> usize {
+ this.inner().strong.load(SeqCst)
+ }
+
+ /// Increments the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) for the duration of this method.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(arc_mutate_strong_count)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::incr_strong_count(ptr);
+ ///
+ /// // This assertion is deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+ pub unsafe fn incr_strong_count(ptr: *const T) {
+ // Retain Arc, but don't touch refcount by wrapping in ManuallyDrop
+ let arc = unsafe { mem::ManuallyDrop::new(Arc::<T>::from_raw(ptr)) };
+ // Now increase refcount, but don't drop new refcount either
+ let _arc_clone: mem::ManuallyDrop<_> = arc.clone();
+ }
+
+ /// Decrements the strong reference count on the `Arc<T>` associated with the
+ /// provided pointer by one.
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have been obtained through `Arc::into_raw`, and the
+ /// associated `Arc` instance must be valid (i.e. the strong count must be at
+ /// least 1) when invoking this method. This method can be used to release the final
+ /// `Arc` and backing storage, but **should not** be called after the final `Arc` has been
+ /// released.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(arc_mutate_strong_count)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// unsafe {
+ /// let ptr = Arc::into_raw(five);
+ /// Arc::incr_strong_count(ptr);
+ ///
+ /// // Those assertions are deterministic because we haven't shared
+ /// // the `Arc` between threads.
+ /// let five = Arc::from_raw(ptr);
+ /// assert_eq!(2, Arc::strong_count(&five));
+ /// Arc::decr_strong_count(ptr);
+ /// assert_eq!(1, Arc::strong_count(&five));
+ /// }
+ /// ```
+ #[inline]
+ #[unstable(feature = "arc_mutate_strong_count", issue = "71983")]
+ pub unsafe fn decr_strong_count(ptr: *const T) {
+ unsafe { mem::drop(Arc::from_raw(ptr)) };
+ }
+
+ #[inline]
+ fn inner(&self) -> &ArcInner<T> {
+ // This unsafety is ok because while this arc is alive we're guaranteed
+ // that the inner pointer is valid. Furthermore, we know that the
+ // `ArcInner` structure itself is `Sync` because the inner data is
+ // `Sync` as well, so we're ok loaning out an immutable pointer to these
+ // contents.
+ unsafe { self.ptr.as_ref() }
+ }
+
+ // Non-inlined part of `drop`.
+ #[inline(never)]
+ unsafe fn drop_slow(&mut self) {
+ // Destroy the data at this time, even though we may not free the box
+ // allocation itself (there may still be weak pointers lying around).
+ unsafe { ptr::drop_in_place(Self::get_mut_unchecked(self)) };
+
+ // Drop the weak ref collectively held by all strong references
+ drop(Weak { ptr: self.ptr });
+ }
+
+ #[inline]
+ #[stable(feature = "ptr_eq", since = "1.17.0")]
+ /// Returns `true` if the two `Arc`s point to the same allocation
+ /// (in a vein similar to [`ptr::eq`]).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ /// let same_five = Arc::clone(&five);
+ /// let other_five = Arc::new(5);
+ ///
+ /// assert!(Arc::ptr_eq(&five, &same_five));
+ /// assert!(!Arc::ptr_eq(&five, &other_five));
+ /// ```
+ ///
+ /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+ pub fn ptr_eq(this: &Self, other: &Self) -> bool {
+ this.ptr.as_ptr() == other.ptr.as_ptr()
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Allocates an `ArcInner<T>` with sufficient space for
+ /// a possibly-unsized inner value where the value has the layout provided.
+ ///
+ /// The function `mem_to_arcinner` is called with the data pointer
+ /// and must return back a (potentially fat)-pointer for the `ArcInner<T>`.
+ unsafe fn allocate_for_layout(
+ value_layout: Layout,
+ mem_to_arcinner: impl FnOnce(*mut u8) -> *mut ArcInner<T>,
+ ) -> *mut ArcInner<T> {
+ // Calculate layout using the given value layout.
+ // Previously, layout was calculated on the expression
+ // `&*(ptr as *const ArcInner<T>)`, but this created a misaligned
+ // reference (see #54908).
+ let layout = Layout::new::<ArcInner<()>>().extend(value_layout).unwrap().0.pad_to_align();
+
+ let mem = Global
+ .alloc(layout, AllocInit::Uninitialized)
+ .unwrap_or_else(|_| handle_alloc_error(layout));
+
+ // Initialize the ArcInner
+ let inner = mem_to_arcinner(mem.ptr.as_ptr());
+ debug_assert_eq!(unsafe { Layout::for_value(&*inner) }, layout);
+
+ unsafe {
+ ptr::write(&mut (*inner).strong, atomic::AtomicUsize::new(1));
+ ptr::write(&mut (*inner).weak, atomic::AtomicUsize::new(1));
+ }
+
+ inner
+ }
+
+ /// Allocates an `ArcInner<T>` with sufficient space for an unsized inner value.
+ unsafe fn allocate_for_ptr(ptr: *const T) -> *mut ArcInner<T> {
+ // Allocate for the `ArcInner<T>` using the given value.
+ unsafe {
+ Self::allocate_for_layout(Layout::for_value(&*ptr), |mem| {
+ set_data_ptr(ptr as *mut T, mem) as *mut ArcInner<T>
+ })
+ }
+ }
+
+ fn from_box(v: Box<T>) -> Arc<T> {
+ unsafe {
+ let box_unique = Box::into_unique(v);
+ let bptr = box_unique.as_ptr();
+
+ let value_size = size_of_val(&*bptr);
+ let ptr = Self::allocate_for_ptr(bptr);
+
+ // Copy value as bytes
+ ptr::copy_nonoverlapping(
+ bptr as *const T as *const u8,
+ &mut (*ptr).data as *mut _ as *mut u8,
+ value_size,
+ );
+
+ // Free the allocation without dropping its contents
+ box_free(box_unique);
+
+ Self::from_ptr(ptr)
+ }
+ }
+}
+
+impl<T> Arc<[T]> {
+ /// Allocates an `ArcInner<[T]>` with the given length.
+ unsafe fn allocate_for_slice(len: usize) -> *mut ArcInner<[T]> {
+ unsafe {
+ Self::allocate_for_layout(Layout::array::<T>(len).unwrap(), |mem| {
+ ptr::slice_from_raw_parts_mut(mem as *mut T, len) as *mut ArcInner<[T]>
+ })
+ }
+ }
+}
+
+/// Sets the data pointer of a `?Sized` raw pointer.
+///
+/// For a slice/trait object, this sets the `data` field and leaves the rest
+/// unchanged. For a sized raw pointer, this simply sets the pointer.
+unsafe fn set_data_ptr<T: ?Sized, U>(mut ptr: *mut T, data: *mut U) -> *mut T {
+ unsafe {
+ ptr::write(&mut ptr as *mut _ as *mut *mut u8, data as *mut u8);
+ }
+ ptr
+}
+
+impl<T> Arc<[T]> {
+ /// Copy elements from slice into newly allocated Arc<\[T\]>
+ ///
+ /// Unsafe because the caller must either take ownership or bind `T: Copy`.
+ unsafe fn copy_from_slice(v: &[T]) -> Arc<[T]> {
+ unsafe {
+ let ptr = Self::allocate_for_slice(v.len());
+
+ ptr::copy_nonoverlapping(v.as_ptr(), &mut (*ptr).data as *mut [T] as *mut T, v.len());
+
+ Self::from_ptr(ptr)
+ }
+ }
+
+ /// Constructs an `Arc<[T]>` from an iterator known to be of a certain size.
+ ///
+ /// Behavior is undefined should the size be wrong.
+ unsafe fn from_iter_exact(iter: impl iter::Iterator<Item = T>, len: usize) -> Arc<[T]> {
+ // Panic guard while cloning T elements.
+ // In the event of a panic, elements that have been written
+ // into the new ArcInner will be dropped, then the memory freed.
+ struct Guard<T> {
+ mem: NonNull<u8>,
+ elems: *mut T,
+ layout: Layout,
+ n_elems: usize,
+ }
+
+ impl<T> Drop for Guard<T> {
+ fn drop(&mut self) {
+ unsafe {
+ let slice = from_raw_parts_mut(self.elems, self.n_elems);
+ ptr::drop_in_place(slice);
+
+ Global.dealloc(self.mem.cast(), self.layout);
+ }
+ }
+ }
+
+ unsafe {
+ let ptr = Self::allocate_for_slice(len);
+
+ let mem = ptr as *mut _ as *mut u8;
+ let layout = Layout::for_value(&*ptr);
+
+ // Pointer to first element
+ let elems = &mut (*ptr).data as *mut [T] as *mut T;
+
+ let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
+
+ for (i, item) in iter.enumerate() {
+ ptr::write(elems.add(i), item);
+ guard.n_elems += 1;
+ }
+
+ // All clear. Forget the guard so it doesn't free the new ArcInner.
+ mem::forget(guard);
+
+ Self::from_ptr(ptr)
+ }
+ }
+}
+
+/// Specialization trait used for `From<&[T]>`.
+trait ArcFromSlice<T> {
+ fn from_slice(slice: &[T]) -> Self;
+}
+
+impl<T: Clone> ArcFromSlice<T> for Arc<[T]> {
+ #[inline]
+ default fn from_slice(v: &[T]) -> Self {
+ unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
+ }
+}
+
+impl<T: Copy> ArcFromSlice<T> for Arc<[T]> {
+ #[inline]
+ fn from_slice(v: &[T]) -> Self {
+ unsafe { Arc::copy_from_slice(v) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Clone for Arc<T> {
+ /// Makes a clone of the `Arc` pointer.
+ ///
+ /// This creates another pointer to the same allocation, increasing the
+ /// strong reference count.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// let _ = Arc::clone(&five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Arc<T> {
+ // Using a relaxed ordering is alright here, as knowledge of the
+ // original reference prevents other threads from erroneously deleting
+ // the object.
+ //
+ // As explained in the [Boost documentation][1], Increasing the
+ // reference counter can always be done with memory_order_relaxed: New
+ // references to an object can only be formed from an existing
+ // reference, and passing an existing reference from one thread to
+ // another must already provide any required synchronization.
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ let old_size = self.inner().strong.fetch_add(1, Relaxed);
+
+ // However we need to guard against massive refcounts in case someone
+ // is `mem::forget`ing Arcs. If we don't do this the count can overflow
+ // and users will use-after free. We racily saturate to `isize::MAX` on
+ // the assumption that there aren't ~2 billion threads incrementing
+ // the reference count at once. This branch will never be taken in
+ // any realistic program.
+ //
+ // We abort because such a program is incredibly degenerate, and we
+ // don't care to support it.
+ if old_size > MAX_REFCOUNT {
+ abort();
+ }
+
+ Self::from_inner(self.ptr)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> Deref for Arc<T> {
+ type Target = T;
+
+ #[inline]
+ fn deref(&self) -> &T {
+ &self.inner().data
+ }
+}
+
+#[unstable(feature = "receiver_trait", issue = "none")]
+impl<T: ?Sized> Receiver for Arc<T> {}
+
+impl<T: Clone> Arc<T> {
+ /// Makes a mutable reference into the given `Arc`.
+ ///
+ /// If there are other `Arc` or [`Weak`][weak] pointers to the same allocation,
+ /// then `make_mut` will create a new allocation and invoke [`clone`][clone] on the inner value
+ /// to ensure unique ownership. This is also referred to as clone-on-write.
+ ///
+ /// Note that this differs from the behavior of [`Rc::make_mut`] which disassociates
+ /// any remaining `Weak` pointers.
+ ///
+ /// See also [`get_mut`][get_mut], which will fail rather than cloning.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [get_mut]: struct.Arc.html#method.get_mut
+ /// [`Rc::make_mut`]: ../rc/struct.Rc.html#method.make_mut
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let mut data = Arc::new(5);
+ ///
+ /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
+ /// let mut other_data = Arc::clone(&data); // Won't clone inner data
+ /// *Arc::make_mut(&mut data) += 1; // Clones inner data
+ /// *Arc::make_mut(&mut data) += 1; // Won't clone anything
+ /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
+ ///
+ /// // Now `data` and `other_data` point to different allocations.
+ /// assert_eq!(*data, 8);
+ /// assert_eq!(*other_data, 12);
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn make_mut(this: &mut Self) -> &mut T {
+ // Note that we hold both a strong reference and a weak reference.
+ // Thus, releasing our strong reference only will not, by itself, cause
+ // the memory to be deallocated.
+ //
+ // Use Acquire to ensure that we see any writes to `weak` that happen
+ // before release writes (i.e., decrements) to `strong`. Since we hold a
+ // weak count, there's no chance the ArcInner itself could be
+ // deallocated.
+ if this.inner().strong.compare_exchange(1, 0, Acquire, Relaxed).is_err() {
+ // Another strong pointer exists; clone
+ *this = Arc::new((**this).clone());
+ } else if this.inner().weak.load(Relaxed) != 1 {
+ // Relaxed suffices in the above because this is fundamentally an
+ // optimization: we are always racing with weak pointers being
+ // dropped. Worst case, we end up allocated a new Arc unnecessarily.
+
+ // We removed the last strong ref, but there are additional weak
+ // refs remaining. We'll move the contents to a new Arc, and
+ // invalidate the other weak refs.
+
+ // Note that it is not possible for the read of `weak` to yield
+ // usize::MAX (i.e., locked), since the weak count can only be
+ // locked by a thread with a strong reference.
+
+ // Materialize our own implicit weak pointer, so that it can clean
+ // up the ArcInner as needed.
+ let weak = Weak { ptr: this.ptr };
+
+ // mark the data itself as already deallocated
+ unsafe {
+ // there is no data race in the implicit write caused by `read`
+ // here (due to zeroing) because data is no longer accessed by
+ // other threads (due to there being no more strong refs at this
+ // point).
+ let mut swap = Arc::new(ptr::read(&weak.ptr.as_ref().data));
+ mem::swap(this, &mut swap);
+ mem::forget(swap);
+ }
+ } else {
+ // We were the sole reference of either kind; bump back up the
+ // strong ref count.
+ this.inner().strong.store(1, Release);
+ }
+
+ // As with `get_mut()`, the unsafety is ok because our reference was
+ // either unique to begin with, or became one upon cloning the contents.
+ unsafe { Self::get_mut_unchecked(this) }
+ }
+}
+
+impl<T: ?Sized> Arc<T> {
+ /// Returns a mutable reference into the given `Arc`, if there are
+ /// no other `Arc` or [`Weak`][weak] pointers to the same allocation.
+ ///
+ /// Returns [`None`][option] otherwise, because it is not safe to
+ /// mutate a shared value.
+ ///
+ /// See also [`make_mut`][make_mut], which will [`clone`][clone]
+ /// the inner value when there are other pointers.
+ ///
+ /// [weak]: struct.Weak.html
+ /// [option]: ../../std/option/enum.Option.html
+ /// [make_mut]: struct.Arc.html#method.make_mut
+ /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let mut x = Arc::new(3);
+ /// *Arc::get_mut(&mut x).unwrap() = 4;
+ /// assert_eq!(*x, 4);
+ ///
+ /// let _y = Arc::clone(&x);
+ /// assert!(Arc::get_mut(&mut x).is_none());
+ /// ```
+ #[inline]
+ #[stable(feature = "arc_unique", since = "1.4.0")]
+ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
+ if this.is_unique() {
+ // This unsafety is ok because we're guaranteed that the pointer
+ // returned is the *only* pointer that will ever be returned to T. Our
+ // reference count is guaranteed to be 1 at this point, and we required
+ // the Arc itself to be `mut`, so we're returning the only possible
+ // reference to the inner data.
+ unsafe { Some(Arc::get_mut_unchecked(this)) }
+ } else {
+ None
+ }
+ }
+
+ /// Returns a mutable reference into the given `Arc`,
+ /// without any check.
+ ///
+ /// See also [`get_mut`], which is safe and does appropriate checks.
+ ///
+ /// [`get_mut`]: struct.Arc.html#method.get_mut
+ ///
+ /// # Safety
+ ///
+ /// Any other `Arc` or [`Weak`] pointers to the same allocation must not be dereferenced
+ /// for the duration of the returned borrow.
+ /// This is trivially the case if no such pointers exist,
+ /// for example immediately after `Arc::new`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(get_mut_unchecked)]
+ ///
+ /// use std::sync::Arc;
+ ///
+ /// let mut x = Arc::new(String::new());
+ /// unsafe {
+ /// Arc::get_mut_unchecked(&mut x).push_str("foo")
+ /// }
+ /// assert_eq!(*x, "foo");
+ /// ```
+ #[inline]
+ #[unstable(feature = "get_mut_unchecked", issue = "63292")]
+ pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
+ // We are careful to *not* create a reference covering the "count" fields, as
+ // this would alias with concurrent access to the reference counts (e.g. by `Weak`).
+ unsafe { &mut (*this.ptr.as_ptr()).data }
+ }
+
+ /// Determine whether this is the unique reference (including weak refs) to
+ /// the underlying data.
+ ///
+ /// Note that this requires locking the weak ref count.
+ fn is_unique(&mut self) -> bool {
+ // lock the weak pointer count if we appear to be the sole weak pointer
+ // holder.
+ //
+ // The acquire label here ensures a happens-before relationship with any
+ // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
+ // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
+ // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
+ if self.inner().weak.compare_exchange(1, usize::MAX, Acquire, Relaxed).is_ok() {
+ // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
+ // counter in `drop` -- the only access that happens when any but the last reference
+ // is being dropped.
+ let unique = self.inner().strong.load(Acquire) == 1;
+
+ // The release write here synchronizes with a read in `downgrade`,
+ // effectively preventing the above read of `strong` from happening
+ // after the write.
+ self.inner().weak.store(1, Release); // release the lock
+ unique
+ } else {
+ false
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
+ /// Drops the `Arc`.
+ ///
+ /// This will decrement the strong reference count. If the strong reference
+ /// count reaches zero then the only other references (if any) are
+ /// [`Weak`], so we `drop` the inner value.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Arc::new(Foo);
+ /// let foo2 = Arc::clone(&foo);
+ ///
+ /// drop(foo); // Doesn't print anything
+ /// drop(foo2); // Prints "dropped!"
+ /// ```
+ ///
+ /// [`Weak`]: ../../std/sync/struct.Weak.html
+ #[inline]
+ fn drop(&mut self) {
+ // Because `fetch_sub` is already atomic, we do not need to synchronize
+ // with other threads unless we are going to delete the object. This
+ // same logic applies to the below `fetch_sub` to the `weak` count.
+ if self.inner().strong.fetch_sub(1, Release) != 1 {
+ return;
+ }
+
+ // This fence is needed to prevent reordering of use of the data and
+ // deletion of the data. Because it is marked `Release`, the decreasing
+ // of the reference count synchronizes with this `Acquire` fence. This
+ // means that use of the data happens before decreasing the reference
+ // count, which happens before this fence, which happens before the
+ // deletion of the data.
+ //
+ // As explained in the [Boost documentation][1],
+ //
+ // > It is important to enforce any possible access to the object in one
+ // > thread (through an existing reference) to *happen before* deleting
+ // > the object in a different thread. This is achieved by a "release"
+ // > operation after dropping a reference (any access to the object
+ // > through this reference must obviously happened before), and an
+ // > "acquire" operation before deleting the object.
+ //
+ // In particular, while the contents of an Arc are usually immutable, it's
+ // possible to have interior writes to something like a Mutex<T>. Since a
+ // Mutex is not acquired when it is deleted, we can't rely on its
+ // synchronization logic to make writes in thread A visible to a destructor
+ // running in thread B.
+ //
+ // Also note that the Acquire fence here could probably be replaced with an
+ // Acquire load, which could improve performance in highly-contended
+ // situations. See [2].
+ //
+ // [1]: (www.boost.org/doc/libs/1_55_0/doc/html/atomic/usage_examples.html)
+ // [2]: (https://github.com/rust-lang/rust/pull/41714)
+ acquire!(self.inner().strong);
+
+ unsafe {
+ self.drop_slow();
+ }
+ }
+}
+
+impl Arc<dyn Any + Send + Sync> {
+ #[inline]
+ #[stable(feature = "rc_downcast", since = "1.29.0")]
+ /// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::any::Any;
+ /// use std::sync::Arc;
+ ///
+ /// fn print_if_string(value: Arc<dyn Any + Send + Sync>) {
+ /// if let Ok(string) = value.downcast::<String>() {
+ /// println!("String ({}): {}", string.len(), string);
+ /// }
+ /// }
+ ///
+ /// let my_string = "Hello World".to_string();
+ /// print_if_string(Arc::new(my_string));
+ /// print_if_string(Arc::new(0i8));
+ /// ```
+ pub fn downcast<T>(self) -> Result<Arc<T>, Self>
+ where
+ T: Any + Send + Sync + 'static,
+ {
+ if (*self).is::<T>() {
+ let ptr = self.ptr.cast::<ArcInner<T>>();
+ mem::forget(self);
+ Ok(Arc::from_inner(ptr))
+ } else {
+ Err(self)
+ }
+ }
+}
+
+impl<T> Weak<T> {
+ /// Constructs a new `Weak<T>`, without allocating any memory.
+ /// Calling [`upgrade`] on the return value always gives [`None`].
+ ///
+ /// [`upgrade`]: struct.Weak.html#method.upgrade
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Weak;
+ ///
+ /// let empty: Weak<i64> = Weak::new();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ #[stable(feature = "downgraded_weak", since = "1.10.0")]
+ pub fn new() -> Weak<T> {
+ Weak { ptr: NonNull::new(usize::MAX as *mut ArcInner<T>).expect("MAX is not 0") }
+ }
+
+ /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
+ ///
+ /// The pointer is valid only if there are some strong references. The pointer may be dangling,
+ /// unaligned or even [`null`] otherwise.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ /// use std::ptr;
+ ///
+ /// let strong = Arc::new("hello".to_owned());
+ /// let weak = Arc::downgrade(&strong);
+ /// // Both point to the same object
+ /// assert!(ptr::eq(&*strong, weak.as_ptr()));
+ /// // The strong here keeps it alive, so we can still access the object.
+ /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
+ ///
+ /// drop(strong);
+ /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
+ /// // undefined behaviour.
+ /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
+ /// ```
+ ///
+ /// [`null`]: ../../std/ptr/fn.null.html
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub fn as_ptr(&self) -> *const T {
+ let ptr: *mut ArcInner<T> = NonNull::as_ptr(self.ptr);
+
+ // SAFETY: we must offset the pointer manually, and said pointer may be
+ // a dangling weak (usize::MAX) if T is sized. data_offset is safe to call,
+ // because we know that a pointer to unsized T was derived from a real
+ // unsized T, as dangling weaks are only created for sized T. wrapping_offset
+ // is used so that we can use the same code path for the non-dangling
+ // unsized case and the potentially dangling sized case.
+ unsafe {
+ let offset = data_offset(ptr as *mut T);
+ set_data_ptr(ptr as *mut T, (ptr as *mut u8).wrapping_offset(offset))
+ }
+ }
+
+ /// Consumes the `Weak<T>` and turns it into a raw pointer.
+ ///
+ /// This converts the weak pointer into a raw pointer, preserving the original weak count. It
+ /// can be turned back into the `Weak<T>` with [`from_raw`].
+ ///
+ /// The same restrictions of accessing the target of the pointer as with
+ /// [`as_ptr`] apply.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let strong = Arc::new("hello".to_owned());
+ /// let weak = Arc::downgrade(&strong);
+ /// let raw = weak.into_raw();
+ ///
+ /// assert_eq!(1, Arc::weak_count(&strong));
+ /// assert_eq!("hello", unsafe { &*raw });
+ ///
+ /// drop(unsafe { Weak::from_raw(raw) });
+ /// assert_eq!(0, Arc::weak_count(&strong));
+ /// ```
+ ///
+ /// [`from_raw`]: struct.Weak.html#method.from_raw
+ /// [`as_ptr`]: struct.Weak.html#method.as_ptr
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub fn into_raw(self) -> *const T {
+ let result = self.as_ptr();
+ mem::forget(self);
+ result
+ }
+
+ /// Converts a raw pointer previously created by [`into_raw`] back into
+ /// `Weak<T>`.
+ ///
+ /// This can be used to safely get a strong reference (by calling [`upgrade`]
+ /// later) or to deallocate the weak count by dropping the `Weak<T>`.
+ ///
+ /// It takes ownership of one weak count (with the exception of pointers created by [`new`],
+ /// as these don't have any corresponding weak count).
+ ///
+ /// # Safety
+ ///
+ /// The pointer must have originated from the [`into_raw`] and must still own its potential
+ /// weak reference count.
+ ///
+ /// It is allowed for the strong count to be 0 at the time of calling this, but the weak count
+ /// must be non-zero or the pointer must have originated from a dangling `Weak<T>` (one created
+ /// by [`new`]).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let strong = Arc::new("hello".to_owned());
+ ///
+ /// let raw_1 = Arc::downgrade(&strong).into_raw();
+ /// let raw_2 = Arc::downgrade(&strong).into_raw();
+ ///
+ /// assert_eq!(2, Arc::weak_count(&strong));
+ ///
+ /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
+ /// assert_eq!(1, Arc::weak_count(&strong));
+ ///
+ /// drop(strong);
+ ///
+ /// // Decrement the last weak count.
+ /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
+ /// ```
+ ///
+ /// [`new`]: struct.Weak.html#method.new
+ /// [`into_raw`]: struct.Weak.html#method.into_raw
+ /// [`upgrade`]: struct.Weak.html#method.upgrade
+ /// [`Weak`]: struct.Weak.html
+ /// [`Arc`]: struct.Arc.html
+ /// [`forget`]: ../../std/mem/fn.forget.html
+ #[stable(feature = "weak_into_raw", since = "1.45.0")]
+ pub unsafe fn from_raw(ptr: *const T) -> Self {
+ if ptr.is_null() {
+ Self::new()
+ } else {
+ // See Arc::from_raw for details
+ unsafe {
+ let offset = data_offset(ptr);
+ let fake_ptr = ptr as *mut ArcInner<T>;
+ let ptr = set_data_ptr(fake_ptr, (ptr as *mut u8).offset(-offset));
+ Weak { ptr: NonNull::new(ptr).expect("Invalid pointer passed to from_raw") }
+ }
+ }
+ }
+}
+
+/// Helper type to allow accessing the reference counts without
+/// making any assertions about the data field.
+struct WeakInner<'a> {
+ weak: &'a atomic::AtomicUsize,
+ strong: &'a atomic::AtomicUsize,
+}
+
+impl<T: ?Sized> Weak<T> {
+ /// Attempts to upgrade the `Weak` pointer to an [`Arc`], delaying
+ /// dropping of the inner value if successful.
+ ///
+ /// Returns [`None`] if the inner value has since been dropped.
+ ///
+ /// [`Arc`]: struct.Arc.html
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// let weak_five = Arc::downgrade(&five);
+ ///
+ /// let strong_five: Option<Arc<_>> = weak_five.upgrade();
+ /// assert!(strong_five.is_some());
+ ///
+ /// // Destroy all strong pointers.
+ /// drop(strong_five);
+ /// drop(five);
+ ///
+ /// assert!(weak_five.upgrade().is_none());
+ /// ```
+ #[stable(feature = "arc_weak", since = "1.4.0")]
+ pub fn upgrade(&self) -> Option<Arc<T>> {
+ // We use a CAS loop to increment the strong count instead of a
+ // fetch_add because once the count hits 0 it must never be above 0.
+ let inner = self.inner()?;
+
+ // Relaxed load because any write of 0 that we can observe
+ // leaves the field in a permanently zero state (so a
+ // "stale" read of 0 is fine), and any other value is
+ // confirmed via the CAS below.
+ let mut n = inner.strong.load(Relaxed);
+
+ loop {
+ if n == 0 {
+ return None;
+ }
+
+ // See comments in `Arc::clone` for why we do this (for `mem::forget`).
+ if n > MAX_REFCOUNT {
+ abort();
+ }
+
+ // Relaxed is valid for the same reason it is on Arc's Clone impl
+ match inner.strong.compare_exchange_weak(n, n + 1, Relaxed, Relaxed) {
+ Ok(_) => return Some(Arc::from_inner(self.ptr)), // null checked above
+ Err(old) => n = old,
+ }
+ }
+ }
+
+ /// Gets the number of strong (`Arc`) pointers pointing to this allocation.
+ ///
+ /// If `self` was created using [`Weak::new`], this will return 0.
+ ///
+ /// [`Weak::new`]: #method.new
+ #[stable(feature = "weak_counts", since = "1.41.0")]
+ pub fn strong_count(&self) -> usize {
+ if let Some(inner) = self.inner() { inner.strong.load(SeqCst) } else { 0 }
+ }
+
+ /// Gets an approximation of the number of `Weak` pointers pointing to this
+ /// allocation.
+ ///
+ /// If `self` was created using [`Weak::new`], or if there are no remaining
+ /// strong pointers, this will return 0.
+ ///
+ /// # Accuracy
+ ///
+ /// Due to implementation details, the returned value can be off by 1 in
+ /// either direction when other threads are manipulating any `Arc`s or
+ /// `Weak`s pointing to the same allocation.
+ ///
+ /// [`Weak::new`]: #method.new
+ #[stable(feature = "weak_counts", since = "1.41.0")]
+ pub fn weak_count(&self) -> usize {
+ self.inner()
+ .map(|inner| {
+ let weak = inner.weak.load(SeqCst);
+ let strong = inner.strong.load(SeqCst);
+ if strong == 0 {
+ 0
+ } else {
+ // Since we observed that there was at least one strong pointer
+ // after reading the weak count, we know that the implicit weak
+ // reference (present whenever any strong references are alive)
+ // was still around when we observed the weak count, and can
+ // therefore safely subtract it.
+ weak - 1
+ }
+ })
+ .unwrap_or(0)
+ }
+
+ /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
+ /// (i.e., when this `Weak` was created by `Weak::new`).
+ #[inline]
+ fn inner(&self) -> Option<WeakInner<'_>> {
+ if is_dangling(self.ptr) {
+ None
+ } else {
+ // We are careful to *not* create a reference covering the "data" field, as
+ // the field may be mutated concurrently (for example, if the last `Arc`
+ // is dropped, the data field will be dropped in-place).
+ Some(unsafe {
+ let ptr = self.ptr.as_ptr();
+ WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
+ })
+ }
+ }
+
+ /// Returns `true` if the two `Weak`s point to the same allocation (similar to
+ /// [`ptr::eq`]), or if both don't point to any allocation
+ /// (because they were created with `Weak::new()`).
+ ///
+ /// # Notes
+ ///
+ /// Since this compares pointers it means that `Weak::new()` will equal each
+ /// other, even though they don't point to any allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let first_rc = Arc::new(5);
+ /// let first = Arc::downgrade(&first_rc);
+ /// let second = Arc::downgrade(&first_rc);
+ ///
+ /// assert!(first.ptr_eq(&second));
+ ///
+ /// let third_rc = Arc::new(5);
+ /// let third = Arc::downgrade(&third_rc);
+ ///
+ /// assert!(!first.ptr_eq(&third));
+ /// ```
+ ///
+ /// Comparing `Weak::new`.
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let first = Weak::new();
+ /// let second = Weak::new();
+ /// assert!(first.ptr_eq(&second));
+ ///
+ /// let third_rc = Arc::new(());
+ /// let third = Arc::downgrade(&third_rc);
+ /// assert!(!first.ptr_eq(&third));
+ /// ```
+ ///
+ /// [`ptr::eq`]: ../../std/ptr/fn.eq.html
+ #[inline]
+ #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
+ pub fn ptr_eq(&self, other: &Self) -> bool {
+ self.ptr.as_ptr() == other.ptr.as_ptr()
+ }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Clone for Weak<T> {
+ /// Makes a clone of the `Weak` pointer that points to the same allocation.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// let weak_five = Arc::downgrade(&Arc::new(5));
+ ///
+ /// let _ = Weak::clone(&weak_five);
+ /// ```
+ #[inline]
+ fn clone(&self) -> Weak<T> {
+ let inner = if let Some(inner) = self.inner() {
+ inner
+ } else {
+ return Weak { ptr: self.ptr };
+ };
+ // See comments in Arc::clone() for why this is relaxed. This can use a
+ // fetch_add (ignoring the lock) because the weak count is only locked
+ // where are *no other* weak pointers in existence. (So we can't be
+ // running this code in that case).
+ let old_size = inner.weak.fetch_add(1, Relaxed);
+
+ // See comments in Arc::clone() for why we do this (for mem::forget).
+ if old_size > MAX_REFCOUNT {
+ abort();
+ }
+
+ Weak { ptr: self.ptr }
+ }
+}
+
+#[stable(feature = "downgraded_weak", since = "1.10.0")]
+impl<T> Default for Weak<T> {
+ /// Constructs a new `Weak<T>`, without allocating memory.
+ /// Calling [`upgrade`] on the return value always
+ /// gives [`None`].
+ ///
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`upgrade`]: ../../std/sync/struct.Weak.html#method.upgrade
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Weak;
+ ///
+ /// let empty: Weak<i64> = Default::default();
+ /// assert!(empty.upgrade().is_none());
+ /// ```
+ fn default() -> Weak<T> {
+ Weak::new()
+ }
+}
+
+#[stable(feature = "arc_weak", since = "1.4.0")]
+impl<T: ?Sized> Drop for Weak<T> {
+ /// Drops the `Weak` pointer.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::{Arc, Weak};
+ ///
+ /// struct Foo;
+ ///
+ /// impl Drop for Foo {
+ /// fn drop(&mut self) {
+ /// println!("dropped!");
+ /// }
+ /// }
+ ///
+ /// let foo = Arc::new(Foo);
+ /// let weak_foo = Arc::downgrade(&foo);
+ /// let other_weak_foo = Weak::clone(&weak_foo);
+ ///
+ /// drop(weak_foo); // Doesn't print anything
+ /// drop(foo); // Prints "dropped!"
+ ///
+ /// assert!(other_weak_foo.upgrade().is_none());
+ /// ```
+ fn drop(&mut self) {
+ // If we find out that we were the last weak pointer, then its time to
+ // deallocate the data entirely. See the discussion in Arc::drop() about
+ // the memory orderings
+ //
+ // It's not necessary to check for the locked state here, because the
+ // weak count can only be locked if there was precisely one weak ref,
+ // meaning that drop could only subsequently run ON that remaining weak
+ // ref, which can only happen after the lock is released.
+ let inner = if let Some(inner) = self.inner() { inner } else { return };
+
+ if inner.weak.fetch_sub(1, Release) == 1 {
+ acquire!(inner.weak);
+ unsafe { Global.dealloc(self.ptr.cast(), Layout::for_value(self.ptr.as_ref())) }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+trait ArcEqIdent<T: ?Sized + PartialEq> {
+ fn eq(&self, other: &Arc<T>) -> bool;
+ fn ne(&self, other: &Arc<T>) -> bool;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> ArcEqIdent<T> for Arc<T> {
+ #[inline]
+ default fn eq(&self, other: &Arc<T>) -> bool {
+ **self == **other
+ }
+ #[inline]
+ default fn ne(&self, other: &Arc<T>) -> bool {
+ **self != **other
+ }
+}
+
+/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
+/// would otherwise add a cost to all equality checks on refs. We assume that `Arc`s are used to
+/// store large values, that are slow to clone, but also heavy to check for equality, causing this
+/// cost to pay off more easily. It's also more likely to have two `Arc` clones, that point to
+/// the same value, than two `&T`s.
+///
+/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + crate::rc::MarkerEq> ArcEqIdent<T> for Arc<T> {
+ #[inline]
+ fn eq(&self, other: &Arc<T>) -> bool {
+ Arc::ptr_eq(self, other) || **self == **other
+ }
+
+ #[inline]
+ fn ne(&self, other: &Arc<T>) -> bool {
+ !Arc::ptr_eq(self, other) && **self != **other
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialEq> PartialEq for Arc<T> {
+ /// Equality for two `Arc`s.
+ ///
+ /// Two `Arc`s are equal if their inner values are equal, even if they are
+ /// stored in different allocation.
+ ///
+ /// If `T` also implements `Eq` (implying reflexivity of equality),
+ /// two `Arc`s that point to the same allocation are always equal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five == Arc::new(5));
+ /// ```
+ #[inline]
+ fn eq(&self, other: &Arc<T>) -> bool {
+ ArcEqIdent::eq(self, other)
+ }
+
+ /// Inequality for two `Arc`s.
+ ///
+ /// Two `Arc`s are unequal if their inner values are unequal.
+ ///
+ /// If `T` also implements `Eq` (implying reflexivity of equality),
+ /// two `Arc`s that point to the same value are never unequal.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five != Arc::new(6));
+ /// ```
+ #[inline]
+ fn ne(&self, other: &Arc<T>) -> bool {
+ ArcEqIdent::ne(self, other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + PartialOrd> PartialOrd for Arc<T> {
+ /// Partial comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `partial_cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6)));
+ /// ```
+ fn partial_cmp(&self, other: &Arc<T>) -> Option<Ordering> {
+ (**self).partial_cmp(&**other)
+ }
+
+ /// Less-than comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `<` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five < Arc::new(6));
+ /// ```
+ fn lt(&self, other: &Arc<T>) -> bool {
+ *(*self) < *(*other)
+ }
+
+ /// 'Less than or equal to' comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `<=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five <= Arc::new(5));
+ /// ```
+ fn le(&self, other: &Arc<T>) -> bool {
+ *(*self) <= *(*other)
+ }
+
+ /// Greater-than comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `>` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five > Arc::new(4));
+ /// ```
+ fn gt(&self, other: &Arc<T>) -> bool {
+ *(*self) > *(*other)
+ }
+
+ /// 'Greater than or equal to' comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `>=` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert!(five >= Arc::new(5));
+ /// ```
+ fn ge(&self, other: &Arc<T>) -> bool {
+ *(*self) >= *(*other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Ord> Ord for Arc<T> {
+ /// Comparison for two `Arc`s.
+ ///
+ /// The two are compared by calling `cmp()` on their inner values.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ /// use std::cmp::Ordering;
+ ///
+ /// let five = Arc::new(5);
+ ///
+ /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6)));
+ /// ```
+ fn cmp(&self, other: &Arc<T>) -> Ordering {
+ (**self).cmp(&**other)
+ }
+}
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Eq> Eq for Arc<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Display> fmt::Display for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + fmt::Debug> fmt::Debug for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> fmt::Pointer for Arc<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Pointer::fmt(&(&**self as *const T), f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Default> Default for Arc<T> {
+ /// Creates a new `Arc<T>`, with the `Default` value for `T`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::sync::Arc;
+ ///
+ /// let x: Arc<i32> = Default::default();
+ /// assert_eq!(*x, 0);
+ /// ```
+ fn default() -> Arc<T> {
+ Arc::new(Default::default())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized + Hash> Hash for Arc<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ (**self).hash(state)
+ }
+}
+
+#[stable(feature = "from_for_ptrs", since = "1.6.0")]
+impl<T> From<T> for Arc<T> {
+ fn from(t: T) -> Self {
+ Arc::new(t)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: Clone> From<&[T]> for Arc<[T]> {
+ #[inline]
+ fn from(v: &[T]) -> Arc<[T]> {
+ <Self as ArcFromSlice<T>>::from_slice(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<&str> for Arc<str> {
+ #[inline]
+ fn from(v: &str) -> Arc<str> {
+ let arc = Arc::<[u8]>::from(v.as_bytes());
+ unsafe { Arc::from_raw(Arc::into_raw(arc) as *const str) }
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl From<String> for Arc<str> {
+ #[inline]
+ fn from(v: String) -> Arc<str> {
+ Arc::from(&v[..])
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T: ?Sized> From<Box<T>> for Arc<T> {
+ #[inline]
+ fn from(v: Box<T>) -> Arc<T> {
+ Arc::from_box(v)
+ }
+}
+
+#[stable(feature = "shared_from_slice", since = "1.21.0")]
+impl<T> From<Vec<T>> for Arc<[T]> {
+ #[inline]
+ fn from(mut v: Vec<T>) -> Arc<[T]> {
+ unsafe {
+ let arc = Arc::copy_from_slice(&v);
+
+ // Allow the Vec to free its memory, but not destroy its contents
+ v.set_len(0);
+
+ arc
+ }
+ }
+}
+
+#[stable(feature = "shared_from_cow", since = "1.45.0")]
+impl<'a, B> From<Cow<'a, B>> for Arc<B>
+where
+ B: ToOwned + ?Sized,
+ Arc<B>: From<&'a B> + From<B::Owned>,
+{
+ #[inline]
+ fn from(cow: Cow<'a, B>) -> Arc<B> {
+ match cow {
+ Cow::Borrowed(s) => Arc::from(s),
+ Cow::Owned(s) => Arc::from(s),
+ }
+ }
+}
+
+#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
+impl<T, const N: usize> TryFrom<Arc<[T]>> for Arc<[T; N]> {
+ type Error = Arc<[T]>;
+
+ fn try_from(boxed_slice: Arc<[T]>) -> Result<Self, Self::Error> {
+ if boxed_slice.len() == N {
+ Ok(unsafe { Arc::from_raw(Arc::into_raw(boxed_slice) as *mut [T; N]) })
+ } else {
+ Err(boxed_slice)
+ }
+ }
+}
+
+#[stable(feature = "shared_from_iter", since = "1.37.0")]
+impl<T> iter::FromIterator<T> for Arc<[T]> {
+ /// Takes each element in the `Iterator` and collects it into an `Arc<[T]>`.
+ ///
+ /// # Performance characteristics
+ ///
+ /// ## The general case
+ ///
+ /// In the general case, collecting into `Arc<[T]>` is done by first
+ /// collecting into a `Vec<T>`. That is, when writing the following:
+ ///
+ /// ```rust
+ /// # use std::sync::Arc;
+ /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
+ /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+ /// ```
+ ///
+ /// this behaves as if we wrote:
+ ///
+ /// ```rust
+ /// # use std::sync::Arc;
+ /// let evens: Arc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
+ /// .collect::<Vec<_>>() // The first set of allocations happens here.
+ /// .into(); // A second allocation for `Arc<[T]>` happens here.
+ /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
+ /// ```
+ ///
+ /// This will allocate as many times as needed for constructing the `Vec<T>`
+ /// and then it will allocate once for turning the `Vec<T>` into the `Arc<[T]>`.
+ ///
+ /// ## Iterators of known length
+ ///
+ /// When your `Iterator` implements `TrustedLen` and is of an exact size,
+ /// a single allocation will be made for the `Arc<[T]>`. For example:
+ ///
+ /// ```rust
+ /// # use std::sync::Arc;
+ /// let evens: Arc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
+ /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
+ /// ```
+ fn from_iter<I: iter::IntoIterator<Item = T>>(iter: I) -> Self {
+ ToArcSlice::to_arc_slice(iter.into_iter())
+ }
+}
+
+/// Specialization trait used for collecting into `Arc<[T]>`.
+trait ToArcSlice<T>: Iterator<Item = T> + Sized {
+ fn to_arc_slice(self) -> Arc<[T]>;
+}
+
+impl<T, I: Iterator<Item = T>> ToArcSlice<T> for I {
+ default fn to_arc_slice(self) -> Arc<[T]> {
+ self.collect::<Vec<T>>().into()
+ }
+}
+
+impl<T, I: iter::TrustedLen<Item = T>> ToArcSlice<T> for I {
+ fn to_arc_slice(self) -> Arc<[T]> {
+ // This is the case for a `TrustedLen` iterator.
+ let (low, high) = self.size_hint();
+ if let Some(high) = high {
+ debug_assert_eq!(
+ low,
+ high,
+ "TrustedLen iterator's size hint is not exact: {:?}",
+ (low, high)
+ );
+
+ unsafe {
+ // SAFETY: We need to ensure that the iterator has an exact length and we have.
+ Arc::from_iter_exact(self, low)
+ }
+ } else {
+ // Fall back to normal implementation.
+ self.collect::<Vec<T>>().into()
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: ?Sized> borrow::Borrow<T> for Arc<T> {
+ fn borrow(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
+impl<T: ?Sized> AsRef<T> for Arc<T> {
+ fn as_ref(&self) -> &T {
+ &**self
+ }
+}
+
+#[stable(feature = "pin", since = "1.33.0")]
+impl<T: ?Sized> Unpin for Arc<T> {}
+
+/// Get the offset within an `ArcInner` for
+/// a payload of type described by a pointer.
+///
+/// # Safety
+///
+/// This has the same safety requirements as `align_of_val_raw`. In effect:
+///
+/// - This function is safe for any argument if `T` is sized, and
+/// - if `T` is unsized, the pointer must have appropriate pointer metadata
+/// aquired from the real instance that you are getting this offset for.
+unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> isize {
+ // Align the unsized value to the end of the `ArcInner`.
+ // Because it is `?Sized`, it will always be the last field in memory.
+ // Note: This is a detail of the current implementation of the compiler,
+ // and is not a guaranteed language detail. Do not rely on it outside of std.
+ unsafe { data_offset_align(align_of_val(&*ptr)) }
+}
+
+#[inline]
+fn data_offset_align(align: usize) -> isize {
+ let layout = Layout::new::<ArcInner<()>>();
+ (layout.size() + layout.padding_needed_for(align)) as isize
+}
diff --git a/library/alloc/src/sync/tests.rs b/library/alloc/src/sync/tests.rs
new file mode 100644
index 00000000000..6f08cd7f123
--- /dev/null
+++ b/library/alloc/src/sync/tests.rs
@@ -0,0 +1,494 @@
+use super::*;
+
+use std::boxed::Box;
+use std::clone::Clone;
+use std::convert::{From, TryInto};
+use std::mem::drop;
+use std::ops::Drop;
+use std::option::Option::{self, None, Some};
+use std::sync::atomic::{
+ self,
+ Ordering::{Acquire, SeqCst},
+};
+use std::sync::mpsc::channel;
+use std::sync::Mutex;
+use std::thread;
+
+use crate::vec::Vec;
+
+struct Canary(*mut atomic::AtomicUsize);
+
+impl Drop for Canary {
+ fn drop(&mut self) {
+ unsafe {
+ match *self {
+ Canary(c) => {
+ (*c).fetch_add(1, SeqCst);
+ }
+ }
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn manually_share_arc() {
+ let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+ let arc_v = Arc::new(v);
+
+ let (tx, rx) = channel();
+
+ let _t = thread::spawn(move || {
+ let arc_v: Arc<Vec<i32>> = rx.recv().unwrap();
+ assert_eq!((*arc_v)[3], 4);
+ });
+
+ tx.send(arc_v.clone()).unwrap();
+
+ assert_eq!((*arc_v)[2], 3);
+ assert_eq!((*arc_v)[4], 5);
+}
+
+#[test]
+fn test_arc_get_mut() {
+ let mut x = Arc::new(3);
+ *Arc::get_mut(&mut x).unwrap() = 4;
+ assert_eq!(*x, 4);
+ let y = x.clone();
+ assert!(Arc::get_mut(&mut x).is_none());
+ drop(y);
+ assert!(Arc::get_mut(&mut x).is_some());
+ let _w = Arc::downgrade(&x);
+ assert!(Arc::get_mut(&mut x).is_none());
+}
+
+#[test]
+fn weak_counts() {
+ assert_eq!(Weak::weak_count(&Weak::<u64>::new()), 0);
+ assert_eq!(Weak::strong_count(&Weak::<u64>::new()), 0);
+
+ let a = Arc::new(0);
+ let w = Arc::downgrade(&a);
+ assert_eq!(Weak::strong_count(&w), 1);
+ assert_eq!(Weak::weak_count(&w), 1);
+ let w2 = w.clone();
+ assert_eq!(Weak::strong_count(&w), 1);
+ assert_eq!(Weak::weak_count(&w), 2);
+ assert_eq!(Weak::strong_count(&w2), 1);
+ assert_eq!(Weak::weak_count(&w2), 2);
+ drop(w);
+ assert_eq!(Weak::strong_count(&w2), 1);
+ assert_eq!(Weak::weak_count(&w2), 1);
+ let a2 = a.clone();
+ assert_eq!(Weak::strong_count(&w2), 2);
+ assert_eq!(Weak::weak_count(&w2), 1);
+ drop(a2);
+ drop(a);
+ assert_eq!(Weak::strong_count(&w2), 0);
+ assert_eq!(Weak::weak_count(&w2), 0);
+ drop(w2);
+}
+
+#[test]
+fn try_unwrap() {
+ let x = Arc::new(3);
+ assert_eq!(Arc::try_unwrap(x), Ok(3));
+ let x = Arc::new(4);
+ let _y = x.clone();
+ assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4)));
+ let x = Arc::new(5);
+ let _w = Arc::downgrade(&x);
+ assert_eq!(Arc::try_unwrap(x), Ok(5));
+}
+
+#[test]
+fn into_from_raw() {
+ let x = Arc::new(box "hello");
+ let y = x.clone();
+
+ let x_ptr = Arc::into_raw(x);
+ drop(y);
+ unsafe {
+ assert_eq!(**x_ptr, "hello");
+
+ let x = Arc::from_raw(x_ptr);
+ assert_eq!(**x, "hello");
+
+ assert_eq!(Arc::try_unwrap(x).map(|x| *x), Ok("hello"));
+ }
+}
+
+#[test]
+fn test_into_from_raw_unsized() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let arc: Arc<str> = Arc::from("foo");
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }, "foo");
+ assert_eq!(arc, arc2);
+
+ let arc: Arc<dyn Display> = Arc::new(123);
+
+ let ptr = Arc::into_raw(arc.clone());
+ let arc2 = unsafe { Arc::from_raw(ptr) };
+
+ assert_eq!(unsafe { &*ptr }.to_string(), "123");
+ assert_eq!(arc2.to_string(), "123");
+}
+
+#[test]
+fn test_cowarc_clone_make_mut() {
+ let mut cow0 = Arc::new(75);
+ let mut cow1 = cow0.clone();
+ let mut cow2 = cow1.clone();
+
+ assert!(75 == *Arc::make_mut(&mut cow0));
+ assert!(75 == *Arc::make_mut(&mut cow1));
+ assert!(75 == *Arc::make_mut(&mut cow2));
+
+ *Arc::make_mut(&mut cow0) += 1;
+ *Arc::make_mut(&mut cow1) += 2;
+ *Arc::make_mut(&mut cow2) += 3;
+
+ assert!(76 == *cow0);
+ assert!(77 == *cow1);
+ assert!(78 == *cow2);
+
+ // none should point to the same backing memory
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 != *cow2);
+}
+
+#[test]
+fn test_cowarc_clone_unique2() {
+ let mut cow0 = Arc::new(75);
+ let cow1 = cow0.clone();
+ let cow2 = cow1.clone();
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ *Arc::make_mut(&mut cow0) += 1;
+ assert!(76 == *cow0);
+ assert!(75 == *cow1);
+ assert!(75 == *cow2);
+
+ // cow1 and cow2 should share the same contents
+ // cow0 should have a unique reference
+ assert!(*cow0 != *cow1);
+ assert!(*cow0 != *cow2);
+ assert!(*cow1 == *cow2);
+}
+
+#[test]
+fn test_cowarc_clone_weak() {
+ let mut cow0 = Arc::new(75);
+ let cow1_weak = Arc::downgrade(&cow0);
+
+ assert!(75 == *cow0);
+ assert!(75 == *cow1_weak.upgrade().unwrap());
+
+ *Arc::make_mut(&mut cow0) += 1;
+
+ assert!(76 == *cow0);
+ assert!(cow1_weak.upgrade().is_none());
+}
+
+#[test]
+fn test_live() {
+ let x = Arc::new(5);
+ let y = Arc::downgrade(&x);
+ assert!(y.upgrade().is_some());
+}
+
+#[test]
+fn test_dead() {
+ let x = Arc::new(5);
+ let y = Arc::downgrade(&x);
+ drop(x);
+ assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn weak_self_cyclic() {
+ struct Cycle {
+ x: Mutex<Option<Weak<Cycle>>>,
+ }
+
+ let a = Arc::new(Cycle { x: Mutex::new(None) });
+ let b = Arc::downgrade(&a.clone());
+ *a.x.lock().unwrap() = Some(b);
+
+ // hopefully we don't double-free (or leak)...
+}
+
+#[test]
+fn drop_arc() {
+ let mut canary = atomic::AtomicUsize::new(0);
+ let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+ drop(x);
+ assert!(canary.load(Acquire) == 1);
+}
+
+#[test]
+fn drop_arc_weak() {
+ let mut canary = atomic::AtomicUsize::new(0);
+ let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
+ let arc_weak = Arc::downgrade(&arc);
+ assert!(canary.load(Acquire) == 0);
+ drop(arc);
+ assert!(canary.load(Acquire) == 1);
+ drop(arc_weak);
+}
+
+#[test]
+fn test_strong_count() {
+ let a = Arc::new(0);
+ assert!(Arc::strong_count(&a) == 1);
+ let w = Arc::downgrade(&a);
+ assert!(Arc::strong_count(&a) == 1);
+ let b = w.upgrade().expect("");
+ assert!(Arc::strong_count(&b) == 2);
+ assert!(Arc::strong_count(&a) == 2);
+ drop(w);
+ drop(a);
+ assert!(Arc::strong_count(&b) == 1);
+ let c = b.clone();
+ assert!(Arc::strong_count(&b) == 2);
+ assert!(Arc::strong_count(&c) == 2);
+}
+
+#[test]
+fn test_weak_count() {
+ let a = Arc::new(0);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 0);
+ let w = Arc::downgrade(&a);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 1);
+ let x = w.clone();
+ assert!(Arc::weak_count(&a) == 2);
+ drop(w);
+ drop(x);
+ assert!(Arc::strong_count(&a) == 1);
+ assert!(Arc::weak_count(&a) == 0);
+ let c = a.clone();
+ assert!(Arc::strong_count(&a) == 2);
+ assert!(Arc::weak_count(&a) == 0);
+ let d = Arc::downgrade(&c);
+ assert!(Arc::weak_count(&c) == 1);
+ assert!(Arc::strong_count(&c) == 2);
+
+ drop(a);
+ drop(c);
+ drop(d);
+}
+
+#[test]
+fn show_arc() {
+ let a = Arc::new(5);
+ assert_eq!(format!("{:?}", a), "5");
+}
+
+// Make sure deriving works with Arc<T>
+#[derive(Eq, Ord, PartialEq, PartialOrd, Clone, Debug, Default)]
+struct Foo {
+ inner: Arc<i32>,
+}
+
+#[test]
+fn test_unsized() {
+ let x: Arc<[i32]> = Arc::new([1, 2, 3]);
+ assert_eq!(format!("{:?}", x), "[1, 2, 3]");
+ let y = Arc::downgrade(&x.clone());
+ drop(x);
+ assert!(y.upgrade().is_none());
+}
+
+#[test]
+fn test_from_owned() {
+ let foo = 123;
+ let foo_arc = Arc::from(foo);
+ assert!(123 == *foo_arc);
+}
+
+#[test]
+fn test_new_weak() {
+ let foo: Weak<usize> = Weak::new();
+ assert!(foo.upgrade().is_none());
+}
+
+#[test]
+fn test_ptr_eq() {
+ let five = Arc::new(5);
+ let same_five = five.clone();
+ let other_five = Arc::new(5);
+
+ assert!(Arc::ptr_eq(&five, &same_five));
+ assert!(!Arc::ptr_eq(&five, &other_five));
+}
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn test_weak_count_locked() {
+ let mut a = Arc::new(atomic::AtomicBool::new(false));
+ let a2 = a.clone();
+ let t = thread::spawn(move || {
+ // Miri is too slow
+ let count = if cfg!(miri) { 1000 } else { 1000000 };
+ for _i in 0..count {
+ Arc::get_mut(&mut a);
+ }
+ a.store(true, SeqCst);
+ });
+
+ while !a2.load(SeqCst) {
+ let n = Arc::weak_count(&a2);
+ assert!(n < 2, "bad weak count: {}", n);
+ #[cfg(miri)] // Miri's scheduler does not guarantee liveness, and thus needs this hint.
+ atomic::spin_loop_hint();
+ }
+ t.join().unwrap();
+}
+
+#[test]
+fn test_from_str() {
+ let r: Arc<str> = Arc::from("foo");
+
+ assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_copy_from_slice() {
+ let s: &[u32] = &[1, 2, 3];
+ let r: Arc<[u32]> = Arc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_clone_from_slice() {
+ #[derive(Clone, Debug, Eq, PartialEq)]
+ struct X(u32);
+
+ let s: &[X] = &[X(1), X(2), X(3)];
+ let r: Arc<[X]> = Arc::from(s);
+
+ assert_eq!(&r[..], s);
+}
+
+#[test]
+#[should_panic]
+fn test_clone_from_slice_panic() {
+ use std::string::{String, ToString};
+
+ struct Fail(u32, String);
+
+ impl Clone for Fail {
+ fn clone(&self) -> Fail {
+ if self.0 == 2 {
+ panic!();
+ }
+ Fail(self.0, self.1.clone())
+ }
+ }
+
+ let s: &[Fail] =
+ &[Fail(0, "foo".to_string()), Fail(1, "bar".to_string()), Fail(2, "baz".to_string())];
+
+ // Should panic, but not cause memory corruption
+ let _r: Arc<[Fail]> = Arc::from(s);
+}
+
+#[test]
+fn test_from_box() {
+ let b: Box<u32> = box 123;
+ let r: Arc<u32> = Arc::from(b);
+
+ assert_eq!(*r, 123);
+}
+
+#[test]
+fn test_from_box_str() {
+ use std::string::String;
+
+ let s = String::from("foo").into_boxed_str();
+ let r: Arc<str> = Arc::from(s);
+
+ assert_eq!(&r[..], "foo");
+}
+
+#[test]
+fn test_from_box_slice() {
+ let s = vec![1, 2, 3].into_boxed_slice();
+ let r: Arc<[u32]> = Arc::from(s);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_from_box_trait() {
+ use std::fmt::Display;
+ use std::string::ToString;
+
+ let b: Box<dyn Display> = box 123;
+ let r: Arc<dyn Display> = Arc::from(b);
+
+ assert_eq!(r.to_string(), "123");
+}
+
+#[test]
+fn test_from_box_trait_zero_sized() {
+ use std::fmt::Debug;
+
+ let b: Box<dyn Debug> = box ();
+ let r: Arc<dyn Debug> = Arc::from(b);
+
+ assert_eq!(format!("{:?}", r), "()");
+}
+
+#[test]
+fn test_from_vec() {
+ let v = vec![1, 2, 3];
+ let r: Arc<[u32]> = Arc::from(v);
+
+ assert_eq!(&r[..], [1, 2, 3]);
+}
+
+#[test]
+fn test_downcast() {
+ use std::any::Any;
+
+ let r1: Arc<dyn Any + Send + Sync> = Arc::new(i32::MAX);
+ let r2: Arc<dyn Any + Send + Sync> = Arc::new("abc");
+
+ assert!(r1.clone().downcast::<u32>().is_err());
+
+ let r1i32 = r1.downcast::<i32>();
+ assert!(r1i32.is_ok());
+ assert_eq!(r1i32.unwrap(), Arc::new(i32::MAX));
+
+ assert!(r2.clone().downcast::<i32>().is_err());
+
+ let r2str = r2.downcast::<&'static str>();
+ assert!(r2str.is_ok());
+ assert_eq!(r2str.unwrap(), Arc::new("abc"));
+}
+
+#[test]
+fn test_array_from_slice() {
+ let v = vec![1, 2, 3];
+ let r: Arc<[u32]> = Arc::from(v);
+
+ let a: Result<Arc<[u32; 3]>, _> = r.clone().try_into();
+ assert!(a.is_ok());
+
+ let a: Result<Arc<[u32; 2]>, _> = r.clone().try_into();
+ assert!(a.is_err());
+}
diff --git a/library/alloc/src/task.rs b/library/alloc/src/task.rs
new file mode 100644
index 00000000000..252e04a4105
--- /dev/null
+++ b/library/alloc/src/task.rs
@@ -0,0 +1,91 @@
+#![unstable(feature = "wake_trait", issue = "69912")]
+//! Types and Traits for working with asynchronous tasks.
+use core::mem::ManuallyDrop;
+use core::task::{RawWaker, RawWakerVTable, Waker};
+
+use crate::sync::Arc;
+
+/// The implementation of waking a task on an executor.
+///
+/// This trait can be used to create a [`Waker`]. An executor can define an
+/// implementation of this trait, and use that to construct a Waker to pass
+/// to the tasks that are executed on that executor.
+///
+/// This trait is a memory-safe and ergonomic alternative to constructing a
+/// [`RawWaker`]. It supports the common executor design in which the data used
+/// to wake up a task is stored in an [`Arc`][arc]. Some executors (especially
+/// those for embedded systems) cannot use this API, which is why [`RawWaker`]
+/// exists as an alternative for those systems.
+///
+/// [arc]: ../../std/sync/struct.Arc.html
+#[unstable(feature = "wake_trait", issue = "69912")]
+pub trait Wake {
+ /// Wake this task.
+ #[unstable(feature = "wake_trait", issue = "69912")]
+ fn wake(self: Arc<Self>);
+
+ /// Wake this task without consuming the waker.
+ ///
+ /// If an executor supports a cheaper way to wake without consuming the
+ /// waker, it should override this method. By default, it clones the
+ /// [`Arc`] and calls `wake` on the clone.
+ #[unstable(feature = "wake_trait", issue = "69912")]
+ fn wake_by_ref(self: &Arc<Self>) {
+ self.clone().wake();
+ }
+}
+
+#[unstable(feature = "wake_trait", issue = "69912")]
+impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for Waker {
+ fn from(waker: Arc<W>) -> Waker {
+ // SAFETY: This is safe because raw_waker safely constructs
+ // a RawWaker from Arc<W>.
+ unsafe { Waker::from_raw(raw_waker(waker)) }
+ }
+}
+
+#[unstable(feature = "wake_trait", issue = "69912")]
+impl<W: Wake + Send + Sync + 'static> From<Arc<W>> for RawWaker {
+ fn from(waker: Arc<W>) -> RawWaker {
+ raw_waker(waker)
+ }
+}
+
+// NB: This private function for constructing a RawWaker is used, rather than
+// inlining this into the `From<Arc<W>> for RawWaker` impl, to ensure that
+// the safety of `From<Arc<W>> for Waker` does not depend on the correct
+// trait dispatch - instead both impls call this function directly and
+// explicitly.
+#[inline(always)]
+fn raw_waker<W: Wake + Send + Sync + 'static>(waker: Arc<W>) -> RawWaker {
+ // Increment the reference count of the arc to clone it.
+ unsafe fn clone_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) -> RawWaker {
+ unsafe { Arc::incr_strong_count(waker as *const W) };
+ RawWaker::new(
+ waker as *const (),
+ &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
+ )
+ }
+
+ // Wake by value, moving the Arc into the Wake::wake function
+ unsafe fn wake<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+ let waker = unsafe { Arc::from_raw(waker as *const W) };
+ <W as Wake>::wake(waker);
+ }
+
+ // Wake by reference, wrap the waker in ManuallyDrop to avoid dropping it
+ unsafe fn wake_by_ref<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+ let waker = unsafe { ManuallyDrop::new(Arc::from_raw(waker as *const W)) };
+ <W as Wake>::wake_by_ref(&waker);
+ }
+
+ // Decrement the reference count of the Arc on drop
+ unsafe fn drop_waker<W: Wake + Send + Sync + 'static>(waker: *const ()) {
+ unsafe { Arc::decr_strong_count(waker as *const W) };
+ }
+
+ RawWaker::new(
+ Arc::into_raw(waker) as *const (),
+ &RawWakerVTable::new(clone_waker::<W>, wake::<W>, wake_by_ref::<W>, drop_waker::<W>),
+ )
+}
diff --git a/library/alloc/src/tests.rs b/library/alloc/src/tests.rs
new file mode 100644
index 00000000000..bddaab0c761
--- /dev/null
+++ b/library/alloc/src/tests.rs
@@ -0,0 +1,151 @@
+//! Test for `boxed` mod.
+
+use core::any::Any;
+use core::clone::Clone;
+use core::convert::TryInto;
+use core::ops::Deref;
+use core::result::Result::{Err, Ok};
+
+use std::boxed::Box;
+
+#[test]
+fn test_owned_clone() {
+ let a = Box::new(5);
+ let b: Box<i32> = a.clone();
+ assert!(a == b);
+}
+
+#[derive(PartialEq, Eq)]
+struct Test;
+
+#[test]
+fn any_move() {
+ let a = Box::new(8) as Box<dyn Any>;
+ let b = Box::new(Test) as Box<dyn Any>;
+
+ match a.downcast::<i32>() {
+ Ok(a) => {
+ assert!(a == Box::new(8));
+ }
+ Err(..) => panic!(),
+ }
+ match b.downcast::<Test>() {
+ Ok(a) => {
+ assert!(a == Box::new(Test));
+ }
+ Err(..) => panic!(),
+ }
+
+ let a = Box::new(8) as Box<dyn Any>;
+ let b = Box::new(Test) as Box<dyn Any>;
+
+ assert!(a.downcast::<Box<Test>>().is_err());
+ assert!(b.downcast::<Box<i32>>().is_err());
+}
+
+#[test]
+fn test_show() {
+ let a = Box::new(8) as Box<dyn Any>;
+ let b = Box::new(Test) as Box<dyn Any>;
+ let a_str = format!("{:?}", a);
+ let b_str = format!("{:?}", b);
+ assert_eq!(a_str, "Any");
+ assert_eq!(b_str, "Any");
+
+ static EIGHT: usize = 8;
+ static TEST: Test = Test;
+ let a = &EIGHT as &dyn Any;
+ let b = &TEST as &dyn Any;
+ let s = format!("{:?}", a);
+ assert_eq!(s, "Any");
+ let s = format!("{:?}", b);
+ assert_eq!(s, "Any");
+}
+
+#[test]
+fn deref() {
+ fn homura<T: Deref<Target = i32>>(_: T) {}
+ homura(Box::new(765));
+}
+
+#[test]
+fn raw_sized() {
+ let x = Box::new(17);
+ let p = Box::into_raw(x);
+ unsafe {
+ assert_eq!(17, *p);
+ *p = 19;
+ let y = Box::from_raw(p);
+ assert_eq!(19, *y);
+ }
+}
+
+#[test]
+fn raw_trait() {
+ trait Foo {
+ fn get(&self) -> u32;
+ fn set(&mut self, value: u32);
+ }
+
+ struct Bar(u32);
+
+ impl Foo for Bar {
+ fn get(&self) -> u32 {
+ self.0
+ }
+
+ fn set(&mut self, value: u32) {
+ self.0 = value;
+ }
+ }
+
+ let x: Box<dyn Foo> = Box::new(Bar(17));
+ let p = Box::into_raw(x);
+ unsafe {
+ assert_eq!(17, (*p).get());
+ (*p).set(19);
+ let y: Box<dyn Foo> = Box::from_raw(p);
+ assert_eq!(19, y.get());
+ }
+}
+
+#[test]
+fn f64_slice() {
+ let slice: &[f64] = &[-1.0, 0.0, 1.0, f64::INFINITY];
+ let boxed: Box<[f64]> = Box::from(slice);
+ assert_eq!(&*boxed, slice)
+}
+
+#[test]
+fn i64_slice() {
+ let slice: &[i64] = &[i64::MIN, -2, -1, 0, 1, 2, i64::MAX];
+ let boxed: Box<[i64]> = Box::from(slice);
+ assert_eq!(&*boxed, slice)
+}
+
+#[test]
+fn str_slice() {
+ let s = "Hello, world!";
+ let boxed: Box<str> = Box::from(s);
+ assert_eq!(&*boxed, s)
+}
+
+#[test]
+fn boxed_slice_from_iter() {
+ let iter = 0..100;
+ let boxed: Box<[u32]> = iter.collect();
+ assert_eq!(boxed.len(), 100);
+ assert_eq!(boxed[7], 7);
+}
+
+#[test]
+fn test_array_from_slice() {
+ let v = vec![1, 2, 3];
+ let r: Box<[u32]> = v.into_boxed_slice();
+
+ let a: Result<Box<[u32; 3]>, _> = r.clone().try_into();
+ assert!(a.is_ok());
+
+ let a: Result<Box<[u32; 2]>, _> = r.clone().try_into();
+ assert!(a.is_err());
+}
diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec.rs
new file mode 100644
index 00000000000..f5a3d0cd4af
--- /dev/null
+++ b/library/alloc/src/vec.rs
@@ -0,0 +1,3122 @@
+// ignore-tidy-filelength
+//! A contiguous growable array type with heap-allocated contents, written
+//! `Vec<T>`.
+//!
+//! Vectors have `O(1)` indexing, amortized `O(1)` push (to the end) and
+//! `O(1)` pop (from the end).
+//!
+//! Vectors ensure they never allocate more than `isize::MAX` bytes.
+//!
+//! # Examples
+//!
+//! You can explicitly create a [`Vec<T>`] with [`new`]:
+//!
+//! ```
+//! let v: Vec<i32> = Vec::new();
+//! ```
+//!
+//! ...or by using the [`vec!`] macro:
+//!
+//! ```
+//! let v: Vec<i32> = vec![];
+//!
+//! let v = vec![1, 2, 3, 4, 5];
+//!
+//! let v = vec![0; 10]; // ten zeroes
+//! ```
+//!
+//! You can [`push`] values onto the end of a vector (which will grow the vector
+//! as needed):
+//!
+//! ```
+//! let mut v = vec![1, 2];
+//!
+//! v.push(3);
+//! ```
+//!
+//! Popping values works in much the same way:
+//!
+//! ```
+//! let mut v = vec![1, 2];
+//!
+//! let two = v.pop();
+//! ```
+//!
+//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits):
+//!
+//! ```
+//! let mut v = vec![1, 2, 3];
+//! let three = v[2];
+//! v[1] = v[1] + 5;
+//! ```
+//!
+//! [`Vec<T>`]: ../../std/vec/struct.Vec.html
+//! [`new`]: ../../std/vec/struct.Vec.html#method.new
+//! [`push`]: ../../std/vec/struct.Vec.html#method.push
+//! [`Index`]: ../../std/ops/trait.Index.html
+//! [`IndexMut`]: ../../std/ops/trait.IndexMut.html
+//! [`vec!`]: ../../std/macro.vec.html
+
+#![stable(feature = "rust1", since = "1.0.0")]
+
+use core::cmp::{self, Ordering};
+use core::fmt;
+use core::hash::{Hash, Hasher};
+use core::intrinsics::{arith_offset, assume};
+use core::iter::{FromIterator, FusedIterator, TrustedLen};
+use core::marker::PhantomData;
+use core::mem::{self, ManuallyDrop};
+use core::ops::Bound::{Excluded, Included, Unbounded};
+use core::ops::{self, Index, IndexMut, RangeBounds};
+use core::ptr::{self, NonNull};
+use core::slice::{self, SliceIndex};
+
+use crate::borrow::{Cow, ToOwned};
+use crate::boxed::Box;
+use crate::collections::TryReserveError;
+use crate::raw_vec::RawVec;
+
+/// A contiguous growable array type, written `Vec<T>` but pronounced 'vector'.
+///
+/// # Examples
+///
+/// ```
+/// let mut vec = Vec::new();
+/// vec.push(1);
+/// vec.push(2);
+///
+/// assert_eq!(vec.len(), 2);
+/// assert_eq!(vec[0], 1);
+///
+/// assert_eq!(vec.pop(), Some(2));
+/// assert_eq!(vec.len(), 1);
+///
+/// vec[0] = 7;
+/// assert_eq!(vec[0], 7);
+///
+/// vec.extend([1, 2, 3].iter().copied());
+///
+/// for x in &vec {
+/// println!("{}", x);
+/// }
+/// assert_eq!(vec, [7, 1, 2, 3]);
+/// ```
+///
+/// The [`vec!`] macro is provided to make initialization more convenient:
+///
+/// ```
+/// let mut vec = vec![1, 2, 3];
+/// vec.push(4);
+/// assert_eq!(vec, [1, 2, 3, 4]);
+/// ```
+///
+/// It can also initialize each element of a `Vec<T>` with a given value.
+/// This may be more efficient than performing allocation and initialization
+/// in separate steps, especially when initializing a vector of zeros:
+///
+/// ```
+/// let vec = vec![0; 5];
+/// assert_eq!(vec, [0, 0, 0, 0, 0]);
+///
+/// // The following is equivalent, but potentially slower:
+/// let mut vec1 = Vec::with_capacity(5);
+/// vec1.resize(5, 0);
+/// ```
+///
+/// Use a `Vec<T>` as an efficient stack:
+///
+/// ```
+/// let mut stack = Vec::new();
+///
+/// stack.push(1);
+/// stack.push(2);
+/// stack.push(3);
+///
+/// while let Some(top) = stack.pop() {
+/// // Prints 3, 2, 1
+/// println!("{}", top);
+/// }
+/// ```
+///
+/// # Indexing
+///
+/// The `Vec` type allows to access values by index, because it implements the
+/// [`Index`] trait. An example will be more explicit:
+///
+/// ```
+/// let v = vec![0, 2, 4, 6];
+/// println!("{}", v[1]); // it will display '2'
+/// ```
+///
+/// However be careful: if you try to access an index which isn't in the `Vec`,
+/// your software will panic! You cannot do this:
+///
+/// ```should_panic
+/// let v = vec![0, 2, 4, 6];
+/// println!("{}", v[6]); // it will panic!
+/// ```
+///
+/// Use [`get`] and [`get_mut`] if you want to check whether the index is in
+/// the `Vec`.
+///
+/// # Slicing
+///
+/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects.
+/// To get a slice, use `&`. Example:
+///
+/// ```
+/// fn read_slice(slice: &[usize]) {
+/// // ...
+/// }
+///
+/// let v = vec![0, 1];
+/// read_slice(&v);
+///
+/// // ... and that's all!
+/// // you can also do it like this:
+/// let x : &[usize] = &v;
+/// ```
+///
+/// In Rust, it's more common to pass slices as arguments rather than vectors
+/// when you just want to provide read access. The same goes for [`String`] and
+/// [`&str`].
+///
+/// # Capacity and reallocation
+///
+/// The capacity of a vector is the amount of space allocated for any future
+/// elements that will be added onto the vector. This is not to be confused with
+/// the *length* of a vector, which specifies the number of actual elements
+/// within the vector. If a vector's length exceeds its capacity, its capacity
+/// will automatically be increased, but its elements will have to be
+/// reallocated.
+///
+/// For example, a vector with capacity 10 and length 0 would be an empty vector
+/// with space for 10 more elements. Pushing 10 or fewer elements onto the
+/// vector will not change its capacity or cause reallocation to occur. However,
+/// if the vector's length is increased to 11, it will have to reallocate, which
+/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`]
+/// whenever possible to specify how big the vector is expected to get.
+///
+/// # Guarantees
+///
+/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees
+/// about its design. This ensures that it's as low-overhead as possible in
+/// the general case, and can be correctly manipulated in primitive ways
+/// by unsafe code. Note that these guarantees refer to an unqualified `Vec<T>`.
+/// If additional type parameters are added (e.g., to support custom allocators),
+/// overriding their defaults may change the behavior.
+///
+/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length)
+/// triplet. No more, no less. The order of these fields is completely
+/// unspecified, and you should use the appropriate methods to modify these.
+/// The pointer will never be null, so this type is null-pointer-optimized.
+///
+/// However, the pointer may not actually point to allocated memory. In particular,
+/// if you construct a `Vec` with capacity 0 via [`Vec::new`], [`vec![]`][`vec!`],
+/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit`]
+/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized
+/// types inside a `Vec`, it will not allocate space for them. *Note that in this case
+/// the `Vec` may not report a [`capacity`] of 0*. `Vec` will allocate if and only
+/// if [`mem::size_of::<T>`]`() * capacity() > 0`. In general, `Vec`'s allocation
+/// details are very subtle &mdash; if you intend to allocate memory using a `Vec`
+/// and use it for something else (either to pass to unsafe code, or to build your
+/// own memory-backed collection), be sure to deallocate this memory by using
+/// `from_raw_parts` to recover the `Vec` and then dropping it.
+///
+/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap
+/// (as defined by the allocator Rust is configured to use by default), and its
+/// pointer points to [`len`] initialized, contiguous elements in order (what
+/// you would see if you coerced it to a slice), followed by [`capacity`]` -
+/// `[`len`] logically uninitialized, contiguous elements.
+///
+/// `Vec` will never perform a "small optimization" where elements are actually
+/// stored on the stack for two reasons:
+///
+/// * It would make it more difficult for unsafe code to correctly manipulate
+/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were
+/// only moved, and it would be more difficult to determine if a `Vec` had
+/// actually allocated memory.
+///
+/// * It would penalize the general case, incurring an additional branch
+/// on every access.
+///
+/// `Vec` will never automatically shrink itself, even if completely empty. This
+/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec`
+/// and then filling it back up to the same [`len`] should incur no calls to
+/// the allocator. If you wish to free up unused memory, use
+/// [`shrink_to_fit`].
+///
+/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is
+/// sufficient. [`push`] and [`insert`] *will* (re)allocate if
+/// [`len`]` == `[`capacity`]. That is, the reported capacity is completely
+/// accurate, and can be relied on. It can even be used to manually free the memory
+/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even
+/// when not necessary.
+///
+/// `Vec` does not guarantee any particular growth strategy when reallocating
+/// when full, nor when [`reserve`] is called. The current strategy is basic
+/// and it may prove desirable to use a non-constant growth factor. Whatever
+/// strategy is used will of course guarantee `O(1)` amortized [`push`].
+///
+/// `vec![x; n]`, `vec![a, b, c, d]`, and
+/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec`
+/// with exactly the requested capacity. If [`len`]` == `[`capacity`],
+/// (as is the case for the [`vec!`] macro), then a `Vec<T>` can be converted to
+/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements.
+///
+/// `Vec` will not specifically overwrite any data that is removed from it,
+/// but also won't specifically preserve it. Its uninitialized memory is
+/// scratch space that it may use however it wants. It will generally just do
+/// whatever is most efficient or otherwise easy to implement. Do not rely on
+/// removed data to be erased for security purposes. Even if you drop a `Vec`, its
+/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory
+/// first, that may not actually happen because the optimizer does not consider
+/// this a side-effect that must be preserved. There is one case which we will
+/// not break, however: using `unsafe` code to write to the excess capacity,
+/// and then increasing the length to match, is always valid.
+///
+/// `Vec` does not currently guarantee the order in which elements are dropped.
+/// The order has changed in the past and may change again.
+///
+/// [`vec!`]: ../../std/macro.vec.html
+/// [`get`]: ../../std/vec/struct.Vec.html#method.get
+/// [`get_mut`]: ../../std/vec/struct.Vec.html#method.get_mut
+/// [`Index`]: ../../std/ops/trait.Index.html
+/// [`String`]: ../../std/string/struct.String.html
+/// [`&str`]: ../../std/primitive.str.html
+/// [`Vec::with_capacity`]: ../../std/vec/struct.Vec.html#method.with_capacity
+/// [`Vec::new`]: ../../std/vec/struct.Vec.html#method.new
+/// [`shrink_to_fit`]: ../../std/vec/struct.Vec.html#method.shrink_to_fit
+/// [`capacity`]: ../../std/vec/struct.Vec.html#method.capacity
+/// [`mem::size_of::<T>`]: ../../std/mem/fn.size_of.html
+/// [`len`]: ../../std/vec/struct.Vec.html#method.len
+/// [`push`]: ../../std/vec/struct.Vec.html#method.push
+/// [`insert`]: ../../std/vec/struct.Vec.html#method.insert
+/// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve
+/// [owned slice]: ../../std/boxed/struct.Box.html
+#[stable(feature = "rust1", since = "1.0.0")]
+#[cfg_attr(not(test), rustc_diagnostic_item = "vec_type")]
+pub struct Vec<T> {
+ buf: RawVec<T>,
+ len: usize,
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Inherent methods
+////////////////////////////////////////////////////////////////////////////////
+
+impl<T> Vec<T> {
+ /// Constructs a new, empty `Vec<T>`.
+ ///
+ /// The vector will not allocate until elements are pushed onto it.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// let mut vec: Vec<i32> = Vec::new();
+ /// ```
+ #[inline]
+ #[rustc_const_stable(feature = "const_vec_new", since = "1.39.0")]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub const fn new() -> Vec<T> {
+ Vec { buf: RawVec::NEW, len: 0 }
+ }
+
+ /// Constructs a new, empty `Vec<T>` with the specified capacity.
+ ///
+ /// The vector will be able to hold exactly `capacity` elements without
+ /// reallocating. If `capacity` is 0, the vector will not allocate.
+ ///
+ /// It is important to note that although the returned vector has the
+ /// *capacity* specified, the vector will have a zero *length*. For an
+ /// explanation of the difference between length and capacity, see
+ /// *[Capacity and reallocation]*.
+ ///
+ /// [Capacity and reallocation]: #capacity-and-reallocation
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = Vec::with_capacity(10);
+ ///
+ /// // The vector contains no items, even though it has capacity for more
+ /// assert_eq!(vec.len(), 0);
+ /// assert_eq!(vec.capacity(), 10);
+ ///
+ /// // These are all done without reallocating...
+ /// for i in 0..10 {
+ /// vec.push(i);
+ /// }
+ /// assert_eq!(vec.len(), 10);
+ /// assert_eq!(vec.capacity(), 10);
+ ///
+ /// // ...but this may make the vector reallocate
+ /// vec.push(11);
+ /// assert_eq!(vec.len(), 11);
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn with_capacity(capacity: usize) -> Vec<T> {
+ Vec { buf: RawVec::with_capacity(capacity), len: 0 }
+ }
+
+ /// Decomposes a `Vec<T>` into its raw components.
+ ///
+ /// Returns the raw pointer to the underlying data, the length of
+ /// the vector (in elements), and the allocated capacity of the
+ /// data (in elements). These are the same arguments in the same
+ /// order as the arguments to [`from_raw_parts`].
+ ///
+ /// After calling this function, the caller is responsible for the
+ /// memory previously managed by the `Vec`. The only way to do
+ /// this is to convert the raw pointer, length, and capacity back
+ /// into a `Vec` with the [`from_raw_parts`] function, allowing
+ /// the destructor to perform the cleanup.
+ ///
+ /// [`from_raw_parts`]: #method.from_raw_parts
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(vec_into_raw_parts)]
+ /// let v: Vec<i32> = vec![-1, 0, 1];
+ ///
+ /// let (ptr, len, cap) = v.into_raw_parts();
+ ///
+ /// let rebuilt = unsafe {
+ /// // We can now make changes to the components, such as
+ /// // transmuting the raw pointer to a compatible type.
+ /// let ptr = ptr as *mut u32;
+ ///
+ /// Vec::from_raw_parts(ptr, len, cap)
+ /// };
+ /// assert_eq!(rebuilt, [4294967295, 0, 1]);
+ /// ```
+ #[unstable(feature = "vec_into_raw_parts", reason = "new API", issue = "65816")]
+ pub fn into_raw_parts(self) -> (*mut T, usize, usize) {
+ let mut me = ManuallyDrop::new(self);
+ (me.as_mut_ptr(), me.len(), me.capacity())
+ }
+
+ /// Creates a `Vec<T>` directly from the raw components of another vector.
+ ///
+ /// # Safety
+ ///
+ /// This is highly unsafe, due to the number of invariants that aren't
+ /// checked:
+ ///
+ /// * `ptr` needs to have been previously allocated via [`String`]/`Vec<T>`
+ /// (at least, it's highly likely to be incorrect if it wasn't).
+ /// * `T` needs to have the same size and alignment as what `ptr` was allocated with.
+ /// (`T` having a less strict alignment is not sufficient, the alignment really
+ /// needs to be equal to satsify the [`dealloc`] requirement that memory must be
+ /// allocated and deallocated with the same layout.)
+ /// * `length` needs to be less than or equal to `capacity`.
+ /// * `capacity` needs to be the capacity that the pointer was allocated with.
+ ///
+ /// Violating these may cause problems like corrupting the allocator's
+ /// internal data structures. For example it is **not** safe
+ /// to build a `Vec<u8>` from a pointer to a C `char` array with length `size_t`.
+ /// It's also not safe to build one from a `Vec<u16>` and its length, because
+ /// the allocator cares about the alignment, and these two types have different
+ /// alignments. The buffer was allocated with alignment 2 (for `u16`), but after
+ /// turning it into a `Vec<u8>` it'll be deallocated with alignment 1.
+ ///
+ /// The ownership of `ptr` is effectively transferred to the
+ /// `Vec<T>` which may then deallocate, reallocate or change the
+ /// contents of memory pointed to by the pointer at will. Ensure
+ /// that nothing else uses the pointer after calling this
+ /// function.
+ ///
+ /// [`String`]: ../../std/string/struct.String.html
+ /// [`dealloc`]: ../../alloc/alloc/trait.GlobalAlloc.html#tymethod.dealloc
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::ptr;
+ /// use std::mem;
+ ///
+ /// let v = vec![1, 2, 3];
+ ///
+ // FIXME Update this when vec_into_raw_parts is stabilized
+ /// // Prevent running `v`'s destructor so we are in complete control
+ /// // of the allocation.
+ /// let mut v = mem::ManuallyDrop::new(v);
+ ///
+ /// // Pull out the various important pieces of information about `v`
+ /// let p = v.as_mut_ptr();
+ /// let len = v.len();
+ /// let cap = v.capacity();
+ ///
+ /// unsafe {
+ /// // Overwrite memory with 4, 5, 6
+ /// for i in 0..len as isize {
+ /// ptr::write(p.offset(i), 4 + i);
+ /// }
+ ///
+ /// // Put everything back together into a Vec
+ /// let rebuilt = Vec::from_raw_parts(p, len, cap);
+ /// assert_eq!(rebuilt, [4, 5, 6]);
+ /// }
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub unsafe fn from_raw_parts(ptr: *mut T, length: usize, capacity: usize) -> Vec<T> {
+ unsafe { Vec { buf: RawVec::from_raw_parts(ptr, capacity), len: length } }
+ }
+
+ /// Returns the number of elements the vector can hold without
+ /// reallocating.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let vec: Vec<i32> = Vec::with_capacity(10);
+ /// assert_eq!(vec.capacity(), 10);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn capacity(&self) -> usize {
+ self.buf.capacity()
+ }
+
+ /// Reserves capacity for at least `additional` more elements to be inserted
+ /// in the given `Vec<T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1];
+ /// vec.reserve(10);
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve(&mut self, additional: usize) {
+ self.buf.reserve(self.len, additional);
+ }
+
+ /// Reserves the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `Vec<T>`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore, capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity overflows `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1];
+ /// vec.reserve_exact(10);
+ /// assert!(vec.capacity() >= 11);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn reserve_exact(&mut self, additional: usize) {
+ self.buf.reserve_exact(self.len, additional);
+ }
+
+ /// Tries to reserve capacity for at least `additional` more elements to be inserted
+ /// in the given `Vec<T>`. The collection may reserve more space to avoid
+ /// frequent reallocations. After calling `reserve`, capacity will be
+ /// greater than or equal to `self.len() + additional`. Does nothing if
+ /// capacity is already sufficient.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
+ /// let mut output = Vec::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.buf.try_reserve(self.len, additional)
+ }
+
+ /// Tries to reserves the minimum capacity for exactly `additional` more elements to
+ /// be inserted in the given `Vec<T>`. After calling `reserve_exact`,
+ /// capacity will be greater than or equal to `self.len() + additional`.
+ /// Does nothing if the capacity is already sufficient.
+ ///
+ /// Note that the allocator may give the collection more space than it
+ /// requests. Therefore, capacity can not be relied upon to be precisely
+ /// minimal. Prefer `reserve` if future insertions are expected.
+ ///
+ /// # Errors
+ ///
+ /// If the capacity overflows, or the allocator reports a failure, then an error
+ /// is returned.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(try_reserve)]
+ /// use std::collections::TryReserveError;
+ ///
+ /// fn process_data(data: &[u32]) -> Result<Vec<u32>, TryReserveError> {
+ /// let mut output = Vec::new();
+ ///
+ /// // Pre-reserve the memory, exiting if we can't
+ /// output.try_reserve(data.len())?;
+ ///
+ /// // Now we know this can't OOM in the middle of our complex work
+ /// output.extend(data.iter().map(|&val| {
+ /// val * 2 + 5 // very complicated
+ /// }));
+ ///
+ /// Ok(output)
+ /// }
+ /// # process_data(&[1, 2, 3]).expect("why is the test harness OOMing on 12 bytes?");
+ /// ```
+ #[unstable(feature = "try_reserve", reason = "new API", issue = "48043")]
+ pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
+ self.buf.try_reserve_exact(self.len, additional)
+ }
+
+ /// Shrinks the capacity of the vector as much as possible.
+ ///
+ /// It will drop down as close as possible to the length but the allocator
+ /// may still inform the vector that there is space for a few more elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = Vec::with_capacity(10);
+ /// vec.extend([1, 2, 3].iter().cloned());
+ /// assert_eq!(vec.capacity(), 10);
+ /// vec.shrink_to_fit();
+ /// assert!(vec.capacity() >= 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn shrink_to_fit(&mut self) {
+ if self.capacity() != self.len {
+ self.buf.shrink_to_fit(self.len);
+ }
+ }
+
+ /// Shrinks the capacity of the vector with a lower bound.
+ ///
+ /// The capacity will remain at least as large as both the length
+ /// and the supplied value.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the current capacity is smaller than the supplied
+ /// minimum capacity.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(shrink_to)]
+ /// let mut vec = Vec::with_capacity(10);
+ /// vec.extend([1, 2, 3].iter().cloned());
+ /// assert_eq!(vec.capacity(), 10);
+ /// vec.shrink_to(4);
+ /// assert!(vec.capacity() >= 4);
+ /// vec.shrink_to(0);
+ /// assert!(vec.capacity() >= 3);
+ /// ```
+ #[unstable(feature = "shrink_to", reason = "new API", issue = "56431")]
+ pub fn shrink_to(&mut self, min_capacity: usize) {
+ self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
+ }
+
+ /// Converts the vector into [`Box<[T]>`][owned slice].
+ ///
+ /// Note that this will drop any excess capacity.
+ ///
+ /// [owned slice]: ../../std/boxed/struct.Box.html
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let v = vec![1, 2, 3];
+ ///
+ /// let slice = v.into_boxed_slice();
+ /// ```
+ ///
+ /// Any excess capacity is removed:
+ ///
+ /// ```
+ /// let mut vec = Vec::with_capacity(10);
+ /// vec.extend([1, 2, 3].iter().cloned());
+ ///
+ /// assert_eq!(vec.capacity(), 10);
+ /// let slice = vec.into_boxed_slice();
+ /// assert_eq!(slice.into_vec().capacity(), 3);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn into_boxed_slice(mut self) -> Box<[T]> {
+ unsafe {
+ self.shrink_to_fit();
+ let me = ManuallyDrop::new(self);
+ let buf = ptr::read(&me.buf);
+ let len = me.len();
+ buf.into_box(len).assume_init()
+ }
+ }
+
+ /// Shortens the vector, keeping the first `len` elements and dropping
+ /// the rest.
+ ///
+ /// If `len` is greater than the vector's current length, this has no
+ /// effect.
+ ///
+ /// The [`drain`] method can emulate `truncate`, but causes the excess
+ /// elements to be returned instead of dropped.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// Truncating a five element vector to two elements:
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3, 4, 5];
+ /// vec.truncate(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// No truncation occurs when `len` is greater than the vector's current
+ /// length:
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.truncate(8);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
+ ///
+ /// Truncating when `len == 0` is equivalent to calling the [`clear`]
+ /// method.
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.truncate(0);
+ /// assert_eq!(vec, []);
+ /// ```
+ ///
+ /// [`clear`]: #method.clear
+ /// [`drain`]: #method.drain
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn truncate(&mut self, len: usize) {
+ // This is safe because:
+ //
+ // * the slice passed to `drop_in_place` is valid; the `len > self.len`
+ // case avoids creating an invalid slice, and
+ // * the `len` of the vector is shrunk before calling `drop_in_place`,
+ // such that no value will be dropped twice in case `drop_in_place`
+ // were to panic once (if it panics twice, the program aborts).
+ unsafe {
+ if len > self.len {
+ return;
+ }
+ let remaining_len = self.len - len;
+ let s = ptr::slice_from_raw_parts_mut(self.as_mut_ptr().add(len), remaining_len);
+ self.len = len;
+ ptr::drop_in_place(s);
+ }
+ }
+
+ /// Extracts a slice containing the entire vector.
+ ///
+ /// Equivalent to `&s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::io::{self, Write};
+ /// let buffer = vec![1, 2, 3, 5, 8];
+ /// io::sink().write(buffer.as_slice()).unwrap();
+ /// ```
+ #[inline]
+ #[stable(feature = "vec_as_slice", since = "1.7.0")]
+ pub fn as_slice(&self) -> &[T] {
+ self
+ }
+
+ /// Extracts a mutable slice of the entire vector.
+ ///
+ /// Equivalent to `&mut s[..]`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use std::io::{self, Read};
+ /// let mut buffer = vec![0; 3];
+ /// io::repeat(0b101).read_exact(buffer.as_mut_slice()).unwrap();
+ /// ```
+ #[inline]
+ #[stable(feature = "vec_as_slice", since = "1.7.0")]
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ self
+ }
+
+ /// Returns a raw pointer to the vector's buffer.
+ ///
+ /// The caller must ensure that the vector outlives the pointer this
+ /// function returns, or else it will end up pointing to garbage.
+ /// Modifying the vector may cause its buffer to be reallocated,
+ /// which would also make any pointers to it invalid.
+ ///
+ /// The caller must also ensure that the memory the pointer (non-transitively) points to
+ /// is never written to (except inside an `UnsafeCell`) using this pointer or any pointer
+ /// derived from it. If you need to mutate the contents of the slice, use [`as_mut_ptr`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let x = vec![1, 2, 4];
+ /// let x_ptr = x.as_ptr();
+ ///
+ /// unsafe {
+ /// for i in 0..x.len() {
+ /// assert_eq!(*x_ptr.add(i), 1 << i);
+ /// }
+ /// }
+ /// ```
+ ///
+ /// [`as_mut_ptr`]: #method.as_mut_ptr
+ #[stable(feature = "vec_as_ptr", since = "1.37.0")]
+ #[inline]
+ pub fn as_ptr(&self) -> *const T {
+ // We shadow the slice method of the same name to avoid going through
+ // `deref`, which creates an intermediate reference.
+ let ptr = self.buf.ptr();
+ unsafe {
+ assume(!ptr.is_null());
+ }
+ ptr
+ }
+
+ /// Returns an unsafe mutable pointer to the vector's buffer.
+ ///
+ /// The caller must ensure that the vector outlives the pointer this
+ /// function returns, or else it will end up pointing to garbage.
+ /// Modifying the vector may cause its buffer to be reallocated,
+ /// which would also make any pointers to it invalid.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// // Allocate vector big enough for 4 elements.
+ /// let size = 4;
+ /// let mut x: Vec<i32> = Vec::with_capacity(size);
+ /// let x_ptr = x.as_mut_ptr();
+ ///
+ /// // Initialize elements via raw pointer writes, then set length.
+ /// unsafe {
+ /// for i in 0..size {
+ /// *x_ptr.add(i) = i as i32;
+ /// }
+ /// x.set_len(size);
+ /// }
+ /// assert_eq!(&*x, &[0,1,2,3]);
+ /// ```
+ #[stable(feature = "vec_as_ptr", since = "1.37.0")]
+ #[inline]
+ pub fn as_mut_ptr(&mut self) -> *mut T {
+ // We shadow the slice method of the same name to avoid going through
+ // `deref_mut`, which creates an intermediate reference.
+ let ptr = self.buf.ptr();
+ unsafe {
+ assume(!ptr.is_null());
+ }
+ ptr
+ }
+
+ /// Forces the length of the vector to `new_len`.
+ ///
+ /// This is a low-level operation that maintains none of the normal
+ /// invariants of the type. Normally changing the length of a vector
+ /// is done using one of the safe operations instead, such as
+ /// [`truncate`], [`resize`], [`extend`], or [`clear`].
+ ///
+ /// [`truncate`]: #method.truncate
+ /// [`resize`]: #method.resize
+ /// [`extend`]: ../../std/iter/trait.Extend.html#tymethod.extend
+ /// [`clear`]: #method.clear
+ ///
+ /// # Safety
+ ///
+ /// - `new_len` must be less than or equal to [`capacity()`].
+ /// - The elements at `old_len..new_len` must be initialized.
+ ///
+ /// [`capacity()`]: #method.capacity
+ ///
+ /// # Examples
+ ///
+ /// This method can be useful for situations in which the vector
+ /// is serving as a buffer for other code, particularly over FFI:
+ ///
+ /// ```no_run
+ /// # #![allow(dead_code)]
+ /// # // This is just a minimal skeleton for the doc example;
+ /// # // don't use this as a starting point for a real library.
+ /// # pub struct StreamWrapper { strm: *mut std::ffi::c_void }
+ /// # const Z_OK: i32 = 0;
+ /// # extern "C" {
+ /// # fn deflateGetDictionary(
+ /// # strm: *mut std::ffi::c_void,
+ /// # dictionary: *mut u8,
+ /// # dictLength: *mut usize,
+ /// # ) -> i32;
+ /// # }
+ /// # impl StreamWrapper {
+ /// pub fn get_dictionary(&self) -> Option<Vec<u8>> {
+ /// // Per the FFI method's docs, "32768 bytes is always enough".
+ /// let mut dict = Vec::with_capacity(32_768);
+ /// let mut dict_length = 0;
+ /// // SAFETY: When `deflateGetDictionary` returns `Z_OK`, it holds that:
+ /// // 1. `dict_length` elements were initialized.
+ /// // 2. `dict_length` <= the capacity (32_768)
+ /// // which makes `set_len` safe to call.
+ /// unsafe {
+ /// // Make the FFI call...
+ /// let r = deflateGetDictionary(self.strm, dict.as_mut_ptr(), &mut dict_length);
+ /// if r == Z_OK {
+ /// // ...and update the length to what was initialized.
+ /// dict.set_len(dict_length);
+ /// Some(dict)
+ /// } else {
+ /// None
+ /// }
+ /// }
+ /// }
+ /// # }
+ /// ```
+ ///
+ /// While the following example is sound, there is a memory leak since
+ /// the inner vectors were not freed prior to the `set_len` call:
+ ///
+ /// ```
+ /// let mut vec = vec![vec![1, 0, 0],
+ /// vec![0, 1, 0],
+ /// vec![0, 0, 1]];
+ /// // SAFETY:
+ /// // 1. `old_len..0` is empty so no elements need to be initialized.
+ /// // 2. `0 <= capacity` always holds whatever `capacity` is.
+ /// unsafe {
+ /// vec.set_len(0);
+ /// }
+ /// ```
+ ///
+ /// Normally, here, one would use [`clear`] instead to correctly drop
+ /// the contents and thus not leak memory.
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub unsafe fn set_len(&mut self, new_len: usize) {
+ debug_assert!(new_len <= self.capacity());
+
+ self.len = new_len;
+ }
+
+ /// Removes an element from the vector and returns it.
+ ///
+ /// The removed element is replaced by the last element of the vector.
+ ///
+ /// This does not preserve ordering, but is O(1).
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = vec!["foo", "bar", "baz", "qux"];
+ ///
+ /// assert_eq!(v.swap_remove(1), "bar");
+ /// assert_eq!(v, ["foo", "qux", "baz"]);
+ ///
+ /// assert_eq!(v.swap_remove(0), "foo");
+ /// assert_eq!(v, ["baz", "qux"]);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn swap_remove(&mut self, index: usize) -> T {
+ #[cold]
+ #[inline(never)]
+ fn assert_failed(index: usize, len: usize) -> ! {
+ panic!("swap_remove index (is {}) should be < len (is {})", index, len);
+ }
+
+ let len = self.len();
+ if index >= len {
+ assert_failed(index, len);
+ }
+ unsafe {
+ // We replace self[index] with the last element. Note that if the
+ // bounds check above succeeds there must be a last element (which
+ // can be self[index] itself).
+ let last = ptr::read(self.as_ptr().add(len - 1));
+ let hole = self.as_mut_ptr().add(index);
+ self.set_len(len - 1);
+ ptr::replace(hole, last)
+ }
+ }
+
+ /// Inserts an element at position `index` within the vector, shifting all
+ /// elements after it to the right.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.insert(1, 4);
+ /// assert_eq!(vec, [1, 4, 2, 3]);
+ /// vec.insert(4, 5);
+ /// assert_eq!(vec, [1, 4, 2, 3, 5]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn insert(&mut self, index: usize, element: T) {
+ #[cold]
+ #[inline(never)]
+ fn assert_failed(index: usize, len: usize) -> ! {
+ panic!("insertion index (is {}) should be <= len (is {})", index, len);
+ }
+
+ let len = self.len();
+ if index > len {
+ assert_failed(index, len);
+ }
+
+ // space for the new element
+ if len == self.buf.capacity() {
+ self.reserve(1);
+ }
+
+ unsafe {
+ // infallible
+ // The spot to put the new value
+ {
+ let p = self.as_mut_ptr().add(index);
+ // Shift everything over to make space. (Duplicating the
+ // `index`th element into two consecutive places.)
+ ptr::copy(p, p.offset(1), len - index);
+ // Write it in, overwriting the first copy of the `index`th
+ // element.
+ ptr::write(p, element);
+ }
+ self.set_len(len + 1);
+ }
+ }
+
+ /// Removes and returns the element at position `index` within the vector,
+ /// shifting all elements after it to the left.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `index` is out of bounds.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = vec![1, 2, 3];
+ /// assert_eq!(v.remove(1), 2);
+ /// assert_eq!(v, [1, 3]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn remove(&mut self, index: usize) -> T {
+ #[cold]
+ #[inline(never)]
+ fn assert_failed(index: usize, len: usize) -> ! {
+ panic!("removal index (is {}) should be < len (is {})", index, len);
+ }
+
+ let len = self.len();
+ if index >= len {
+ assert_failed(index, len);
+ }
+ unsafe {
+ // infallible
+ let ret;
+ {
+ // the place we are taking from.
+ let ptr = self.as_mut_ptr().add(index);
+ // copy it out, unsafely having a copy of the value on
+ // the stack and in the vector at the same time.
+ ret = ptr::read(ptr);
+
+ // Shift everything down to fill in that spot.
+ ptr::copy(ptr.offset(1), ptr, len - index - 1);
+ }
+ self.set_len(len - 1);
+ ret
+ }
+ }
+
+ /// Retains only the elements specified by the predicate.
+ ///
+ /// In other words, remove all elements `e` such that `f(&e)` returns `false`.
+ /// This method operates in place, visiting each element exactly once in the
+ /// original order, and preserves the order of the retained elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3, 4];
+ /// vec.retain(|&x| x % 2 == 0);
+ /// assert_eq!(vec, [2, 4]);
+ /// ```
+ ///
+ /// The exact order may be useful for tracking external state, like an index.
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3, 4, 5];
+ /// let keep = [false, true, true, false, true];
+ /// let mut i = 0;
+ /// vec.retain(|_| (keep[i], i += 1).0);
+ /// assert_eq!(vec, [2, 3, 5]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn retain<F>(&mut self, mut f: F)
+ where
+ F: FnMut(&T) -> bool,
+ {
+ let len = self.len();
+ let mut del = 0;
+ {
+ let v = &mut **self;
+
+ for i in 0..len {
+ if !f(&v[i]) {
+ del += 1;
+ } else if del > 0 {
+ v.swap(i - del, i);
+ }
+ }
+ }
+ if del > 0 {
+ self.truncate(len - del);
+ }
+ }
+
+ /// Removes all but the first of consecutive elements in the vector that resolve to the same
+ /// key.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![10, 20, 21, 30, 20];
+ ///
+ /// vec.dedup_by_key(|i| *i / 10);
+ ///
+ /// assert_eq!(vec, [10, 20, 30, 20]);
+ /// ```
+ #[stable(feature = "dedup_by", since = "1.16.0")]
+ #[inline]
+ pub fn dedup_by_key<F, K>(&mut self, mut key: F)
+ where
+ F: FnMut(&mut T) -> K,
+ K: PartialEq,
+ {
+ self.dedup_by(|a, b| key(a) == key(b))
+ }
+
+ /// Removes all but the first of consecutive elements in the vector satisfying a given equality
+ /// relation.
+ ///
+ /// The `same_bucket` function is passed references to two elements from the vector and
+ /// must determine if the elements compare equal. The elements are passed in opposite order
+ /// from their order in the slice, so if `same_bucket(a, b)` returns `true`, `a` is removed.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
+ ///
+ /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
+ ///
+ /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
+ /// ```
+ #[stable(feature = "dedup_by", since = "1.16.0")]
+ pub fn dedup_by<F>(&mut self, same_bucket: F)
+ where
+ F: FnMut(&mut T, &mut T) -> bool,
+ {
+ let len = {
+ let (dedup, _) = self.as_mut_slice().partition_dedup_by(same_bucket);
+ dedup.len()
+ };
+ self.truncate(len);
+ }
+
+ /// Appends an element to the back of a collection.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the new capacity exceeds `isize::MAX` bytes.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2];
+ /// vec.push(3);
+ /// assert_eq!(vec, [1, 2, 3]);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn push(&mut self, value: T) {
+ // This will panic or abort if we would allocate > isize::MAX bytes
+ // or if the length increment would overflow for zero-sized types.
+ if self.len == self.buf.capacity() {
+ self.reserve(1);
+ }
+ unsafe {
+ let end = self.as_mut_ptr().add(self.len);
+ ptr::write(end, value);
+ self.len += 1;
+ }
+ }
+
+ /// Removes the last element from a vector and returns it, or [`None`] if it
+ /// is empty.
+ ///
+ /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// assert_eq!(vec.pop(), Some(3));
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn pop(&mut self) -> Option<T> {
+ if self.len == 0 {
+ None
+ } else {
+ unsafe {
+ self.len -= 1;
+ Some(ptr::read(self.as_ptr().add(self.len())))
+ }
+ }
+ }
+
+ /// Moves all the elements of `other` into `Self`, leaving `other` empty.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the number of elements in the vector overflows a `usize`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// let mut vec2 = vec![4, 5, 6];
+ /// vec.append(&mut vec2);
+ /// assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
+ /// assert_eq!(vec2, []);
+ /// ```
+ #[inline]
+ #[stable(feature = "append", since = "1.4.0")]
+ pub fn append(&mut self, other: &mut Self) {
+ unsafe {
+ self.append_elements(other.as_slice() as _);
+ other.set_len(0);
+ }
+ }
+
+ /// Appends elements to `Self` from other buffer.
+ #[inline]
+ unsafe fn append_elements(&mut self, other: *const [T]) {
+ let count = unsafe { (*other).len() };
+ self.reserve(count);
+ let len = self.len();
+ unsafe { ptr::copy_nonoverlapping(other as *const T, self.as_mut_ptr().add(len), count) };
+ self.len += count;
+ }
+
+ /// Creates a draining iterator that removes the specified range in the vector
+ /// and yields the removed items.
+ ///
+ /// When the iterator **is** dropped, all elements in the range are removed
+ /// from the vector, even if the iterator was not fully consumed. If the
+ /// iterator **is not** dropped (with [`mem::forget`] for example), it is
+ /// unspecified how many elements are removed.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = vec![1, 2, 3];
+ /// let u: Vec<_> = v.drain(1..).collect();
+ /// assert_eq!(v, &[1]);
+ /// assert_eq!(u, &[2, 3]);
+ ///
+ /// // A full range clears the vector
+ /// v.drain(..);
+ /// assert_eq!(v, &[]);
+ /// ```
+ #[stable(feature = "drain", since = "1.6.0")]
+ pub fn drain<R>(&mut self, range: R) -> Drain<'_, T>
+ where
+ R: RangeBounds<usize>,
+ {
+ // Memory safety
+ //
+ // When the Drain is first created, it shortens the length of
+ // the source vector to make sure no uninitialized or moved-from elements
+ // are accessible at all if the Drain's destructor never gets to run.
+ //
+ // Drain will ptr::read out the values to remove.
+ // When finished, remaining tail of the vec is copied back to cover
+ // the hole, and the vector length is restored to the new length.
+ //
+ let len = self.len();
+ let start = match range.start_bound() {
+ Included(&n) => n,
+ Excluded(&n) => n + 1,
+ Unbounded => 0,
+ };
+ let end = match range.end_bound() {
+ Included(&n) => n + 1,
+ Excluded(&n) => n,
+ Unbounded => len,
+ };
+
+ #[cold]
+ #[inline(never)]
+ fn start_assert_failed(start: usize, end: usize) -> ! {
+ panic!("start drain index (is {}) should be <= end drain index (is {})", start, end);
+ }
+
+ #[cold]
+ #[inline(never)]
+ fn end_assert_failed(end: usize, len: usize) -> ! {
+ panic!("end drain index (is {}) should be <= len (is {})", end, len);
+ }
+
+ if start > end {
+ start_assert_failed(start, end);
+ }
+ if end > len {
+ end_assert_failed(end, len);
+ }
+
+ unsafe {
+ // set self.vec length's to start, to be safe in case Drain is leaked
+ self.set_len(start);
+ // Use the borrow in the IterMut to indicate borrowing behavior of the
+ // whole Drain iterator (like &mut T).
+ let range_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(start), end - start);
+ Drain {
+ tail_start: end,
+ tail_len: len - end,
+ iter: range_slice.iter(),
+ vec: NonNull::from(self),
+ }
+ }
+ }
+
+ /// Clears the vector, removing all values.
+ ///
+ /// Note that this method has no effect on the allocated capacity
+ /// of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = vec![1, 2, 3];
+ ///
+ /// v.clear();
+ ///
+ /// assert!(v.is_empty());
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn clear(&mut self) {
+ self.truncate(0)
+ }
+
+ /// Returns the number of elements in the vector, also referred to
+ /// as its 'length'.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let a = vec![1, 2, 3];
+ /// assert_eq!(a.len(), 3);
+ /// ```
+ #[inline]
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn len(&self) -> usize {
+ self.len
+ }
+
+ /// Returns `true` if the vector contains no elements.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = Vec::new();
+ /// assert!(v.is_empty());
+ ///
+ /// v.push(1);
+ /// assert!(!v.is_empty());
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+
+ /// Splits the collection into two at the given index.
+ ///
+ /// Returns a newly allocated vector containing the elements in the range
+ /// `[at, len)`. After the call, the original vector will be left containing
+ /// the elements `[0, at)` with its previous capacity unchanged.
+ ///
+ /// # Panics
+ ///
+ /// Panics if `at > len`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1,2,3];
+ /// let vec2 = vec.split_off(1);
+ /// assert_eq!(vec, [1]);
+ /// assert_eq!(vec2, [2, 3]);
+ /// ```
+ #[inline]
+ #[must_use = "use `.truncate()` if you don't need the other half"]
+ #[stable(feature = "split_off", since = "1.4.0")]
+ pub fn split_off(&mut self, at: usize) -> Self {
+ #[cold]
+ #[inline(never)]
+ fn assert_failed(at: usize, len: usize) -> ! {
+ panic!("`at` split index (is {}) should be <= len (is {})", at, len);
+ }
+
+ if at > self.len() {
+ assert_failed(at, self.len());
+ }
+
+ let other_len = self.len - at;
+ let mut other = Vec::with_capacity(other_len);
+
+ // Unsafely `set_len` and copy items to `other`.
+ unsafe {
+ self.set_len(at);
+ other.set_len(other_len);
+
+ ptr::copy_nonoverlapping(self.as_ptr().add(at), other.as_mut_ptr(), other.len());
+ }
+ other
+ }
+
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with the result of
+ /// calling the closure `f`. The return values from `f` will end up
+ /// in the `Vec` in the order they have been generated.
+ ///
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method uses a closure to create new values on every push. If
+ /// you'd rather [`Clone`] a given value, use [`resize`]. If you want
+ /// to use the [`Default`] trait to generate values, you can pass
+ /// [`Default::default()`] as the second argument.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.resize_with(5, Default::default);
+ /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+ ///
+ /// let mut vec = vec![];
+ /// let mut p = 1;
+ /// vec.resize_with(4, || { p *= 2; p });
+ /// assert_eq!(vec, [2, 4, 8, 16]);
+ /// ```
+ ///
+ /// [`resize`]: #method.resize
+ /// [`Clone`]: ../../std/clone/trait.Clone.html
+ #[stable(feature = "vec_resize_with", since = "1.33.0")]
+ pub fn resize_with<F>(&mut self, new_len: usize, f: F)
+ where
+ F: FnMut() -> T,
+ {
+ let len = self.len();
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendFunc(f));
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Consumes and leaks the `Vec`, returning a mutable reference to the contents,
+ /// `&'a mut [T]`. Note that the type `T` must outlive the chosen lifetime
+ /// `'a`. If the type has only static references, or none at all, then this
+ /// may be chosen to be `'static`.
+ ///
+ /// This function is similar to the `leak` function on `Box`.
+ ///
+ /// This function is mainly useful for data that lives for the remainder of
+ /// the program's life. Dropping the returned reference will cause a memory
+ /// leak.
+ ///
+ /// # Examples
+ ///
+ /// Simple usage:
+ ///
+ /// ```
+ /// #![feature(vec_leak)]
+ ///
+ /// let x = vec![1, 2, 3];
+ /// let static_ref: &'static mut [usize] = Vec::leak(x);
+ /// static_ref[0] += 1;
+ /// assert_eq!(static_ref, &[2, 2, 3]);
+ /// ```
+ #[unstable(feature = "vec_leak", issue = "62195")]
+ #[inline]
+ pub fn leak<'a>(vec: Vec<T>) -> &'a mut [T]
+ where
+ T: 'a, // Technically not needed, but kept to be explicit.
+ {
+ Box::leak(vec.into_boxed_slice())
+ }
+}
+
+impl<T: Clone> Vec<T> {
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with `value`.
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method requires `T` to implement [`Clone`],
+ /// in order to be able to clone the passed value.
+ /// If you need more flexibility (or want to rely on [`Default`] instead of
+ /// [`Clone`]), use [`resize_with`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec!["hello"];
+ /// vec.resize(3, "world");
+ /// assert_eq!(vec, ["hello", "world", "world"]);
+ ///
+ /// let mut vec = vec![1, 2, 3, 4];
+ /// vec.resize(2, 0);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// [`Clone`]: ../../std/clone/trait.Clone.html
+ /// [`Default`]: ../../std/default/trait.Default.html
+ /// [`resize_with`]: #method.resize_with
+ #[stable(feature = "vec_resize", since = "1.5.0")]
+ pub fn resize(&mut self, new_len: usize, value: T) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendElement(value))
+ } else {
+ self.truncate(new_len);
+ }
+ }
+
+ /// Clones and appends all elements in a slice to the `Vec`.
+ ///
+ /// Iterates over the slice `other`, clones each element, and then appends
+ /// it to this `Vec`. The `other` vector is traversed in-order.
+ ///
+ /// Note that this function is same as [`extend`] except that it is
+ /// specialized to work with slices instead. If and when Rust gets
+ /// specialization this function will likely be deprecated (but still
+ /// available).
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1];
+ /// vec.extend_from_slice(&[2, 3, 4]);
+ /// assert_eq!(vec, [1, 2, 3, 4]);
+ /// ```
+ ///
+ /// [`extend`]: #method.extend
+ #[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
+ pub fn extend_from_slice(&mut self, other: &[T]) {
+ self.spec_extend(other.iter())
+ }
+}
+
+impl<T: Default> Vec<T> {
+ /// Resizes the `Vec` in-place so that `len` is equal to `new_len`.
+ ///
+ /// If `new_len` is greater than `len`, the `Vec` is extended by the
+ /// difference, with each additional slot filled with [`Default::default()`].
+ /// If `new_len` is less than `len`, the `Vec` is simply truncated.
+ ///
+ /// This method uses [`Default`] to create new values on every push. If
+ /// you'd rather [`Clone`] a given value, use [`resize`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(deprecated)]
+ /// #![feature(vec_resize_default)]
+ ///
+ /// let mut vec = vec![1, 2, 3];
+ /// vec.resize_default(5);
+ /// assert_eq!(vec, [1, 2, 3, 0, 0]);
+ ///
+ /// let mut vec = vec![1, 2, 3, 4];
+ /// vec.resize_default(2);
+ /// assert_eq!(vec, [1, 2]);
+ /// ```
+ ///
+ /// [`resize`]: #method.resize
+ /// [`Default::default()`]: ../../std/default/trait.Default.html#tymethod.default
+ /// [`Default`]: ../../std/default/trait.Default.html
+ /// [`Clone`]: ../../std/clone/trait.Clone.html
+ #[unstable(feature = "vec_resize_default", issue = "41758")]
+ #[rustc_deprecated(
+ reason = "This is moving towards being removed in favor \
+ of `.resize_with(Default::default)`. If you disagree, please comment \
+ in the tracking issue.",
+ since = "1.33.0"
+ )]
+ pub fn resize_default(&mut self, new_len: usize) {
+ let len = self.len();
+
+ if new_len > len {
+ self.extend_with(new_len - len, ExtendDefault);
+ } else {
+ self.truncate(new_len);
+ }
+ }
+}
+
+// This code generalizes `extend_with_{element,default}`.
+trait ExtendWith<T> {
+ fn next(&mut self) -> T;
+ fn last(self) -> T;
+}
+
+struct ExtendElement<T>(T);
+impl<T: Clone> ExtendWith<T> for ExtendElement<T> {
+ fn next(&mut self) -> T {
+ self.0.clone()
+ }
+ fn last(self) -> T {
+ self.0
+ }
+}
+
+struct ExtendDefault;
+impl<T: Default> ExtendWith<T> for ExtendDefault {
+ fn next(&mut self) -> T {
+ Default::default()
+ }
+ fn last(self) -> T {
+ Default::default()
+ }
+}
+
+struct ExtendFunc<F>(F);
+impl<T, F: FnMut() -> T> ExtendWith<T> for ExtendFunc<F> {
+ fn next(&mut self) -> T {
+ (self.0)()
+ }
+ fn last(mut self) -> T {
+ (self.0)()
+ }
+}
+
+impl<T> Vec<T> {
+ /// Extend the vector by `n` values, using the given generator.
+ fn extend_with<E: ExtendWith<T>>(&mut self, n: usize, mut value: E) {
+ self.reserve(n);
+
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(self.len());
+ // Use SetLenOnDrop to work around bug where compiler
+ // may not realize the store through `ptr` through self.set_len()
+ // don't alias.
+ let mut local_len = SetLenOnDrop::new(&mut self.len);
+
+ // Write all elements except the last one
+ for _ in 1..n {
+ ptr::write(ptr, value.next());
+ ptr = ptr.offset(1);
+ // Increment the length in every step in case next() panics
+ local_len.increment_len(1);
+ }
+
+ if n > 0 {
+ // We can write the last element directly without cloning needlessly
+ ptr::write(ptr, value.last());
+ local_len.increment_len(1);
+ }
+
+ // len set by scope guard
+ }
+ }
+}
+
+// Set the length of the vec when the `SetLenOnDrop` value goes out of scope.
+//
+// The idea is: The length field in SetLenOnDrop is a local variable
+// that the optimizer will see does not alias with any stores through the Vec's data
+// pointer. This is a workaround for alias analysis issue #32155
+struct SetLenOnDrop<'a> {
+ len: &'a mut usize,
+ local_len: usize,
+}
+
+impl<'a> SetLenOnDrop<'a> {
+ #[inline]
+ fn new(len: &'a mut usize) -> Self {
+ SetLenOnDrop { local_len: *len, len }
+ }
+
+ #[inline]
+ fn increment_len(&mut self, increment: usize) {
+ self.local_len += increment;
+ }
+}
+
+impl Drop for SetLenOnDrop<'_> {
+ #[inline]
+ fn drop(&mut self) {
+ *self.len = self.local_len;
+ }
+}
+
+impl<T: PartialEq> Vec<T> {
+ /// Removes consecutive repeated elements in the vector according to the
+ /// [`PartialEq`] trait implementation.
+ ///
+ /// If the vector is sorted, this removes all duplicates.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec![1, 2, 2, 3, 2];
+ ///
+ /// vec.dedup();
+ ///
+ /// assert_eq!(vec, [1, 2, 3, 2]);
+ /// ```
+ #[stable(feature = "rust1", since = "1.0.0")]
+ #[inline]
+ pub fn dedup(&mut self) {
+ self.dedup_by(|a, b| a == b)
+ }
+}
+
+impl<T> Vec<T> {
+ /// Removes the first instance of `item` from the vector if the item exists.
+ ///
+ /// This method will be removed soon.
+ #[unstable(feature = "vec_remove_item", reason = "recently added", issue = "40062")]
+ #[rustc_deprecated(
+ reason = "Removing the first item equal to a needle is already easily possible \
+ with iterators and the current Vec methods. Furthermore, having a method for \
+ one particular case of removal (linear search, only the first item, no swap remove) \
+ but not for others is inconsistent. This method will be removed soon.",
+ since = "1.46.0"
+ )]
+ pub fn remove_item<V>(&mut self, item: &V) -> Option<T>
+ where
+ T: PartialEq<V>,
+ {
+ let pos = self.iter().position(|x| *x == *item)?;
+ Some(self.remove(pos))
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Internal methods and functions
+////////////////////////////////////////////////////////////////////////////////
+
+#[doc(hidden)]
+#[stable(feature = "rust1", since = "1.0.0")]
+pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
+ <T as SpecFromElem>::from_elem(elem, n)
+}
+
+// Specialization trait used for Vec::from_elem
+trait SpecFromElem: Sized {
+ fn from_elem(elem: Self, n: usize) -> Vec<Self>;
+}
+
+impl<T: Clone> SpecFromElem for T {
+ default fn from_elem(elem: Self, n: usize) -> Vec<Self> {
+ let mut v = Vec::with_capacity(n);
+ v.extend_with(n, ExtendElement(elem));
+ v
+ }
+}
+
+impl SpecFromElem for i8 {
+ #[inline]
+ fn from_elem(elem: i8, n: usize) -> Vec<i8> {
+ if elem == 0 {
+ return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+ }
+ unsafe {
+ let mut v = Vec::with_capacity(n);
+ ptr::write_bytes(v.as_mut_ptr(), elem as u8, n);
+ v.set_len(n);
+ v
+ }
+ }
+}
+
+impl SpecFromElem for u8 {
+ #[inline]
+ fn from_elem(elem: u8, n: usize) -> Vec<u8> {
+ if elem == 0 {
+ return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+ }
+ unsafe {
+ let mut v = Vec::with_capacity(n);
+ ptr::write_bytes(v.as_mut_ptr(), elem, n);
+ v.set_len(n);
+ v
+ }
+ }
+}
+
+impl<T: Clone + IsZero> SpecFromElem for T {
+ #[inline]
+ fn from_elem(elem: T, n: usize) -> Vec<T> {
+ if elem.is_zero() {
+ return Vec { buf: RawVec::with_capacity_zeroed(n), len: n };
+ }
+ let mut v = Vec::with_capacity(n);
+ v.extend_with(n, ExtendElement(elem));
+ v
+ }
+}
+
+#[rustc_specialization_trait]
+unsafe trait IsZero {
+ /// Whether this value is zero
+ fn is_zero(&self) -> bool;
+}
+
+macro_rules! impl_is_zero {
+ ($t:ty, $is_zero:expr) => {
+ unsafe impl IsZero for $t {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ $is_zero(*self)
+ }
+ }
+ };
+}
+
+impl_is_zero!(i16, |x| x == 0);
+impl_is_zero!(i32, |x| x == 0);
+impl_is_zero!(i64, |x| x == 0);
+impl_is_zero!(i128, |x| x == 0);
+impl_is_zero!(isize, |x| x == 0);
+
+impl_is_zero!(u16, |x| x == 0);
+impl_is_zero!(u32, |x| x == 0);
+impl_is_zero!(u64, |x| x == 0);
+impl_is_zero!(u128, |x| x == 0);
+impl_is_zero!(usize, |x| x == 0);
+
+impl_is_zero!(bool, |x| x == false);
+impl_is_zero!(char, |x| x == '\0');
+
+impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
+impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
+
+unsafe impl<T> IsZero for *const T {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ (*self).is_null()
+ }
+}
+
+unsafe impl<T> IsZero for *mut T {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ (*self).is_null()
+ }
+}
+
+// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
+// For fat pointers, the bytes that would be the pointer metadata in the `Some`
+// variant are padding in the `None` variant, so ignoring them and
+// zero-initializing instead is ok.
+// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
+// `SpecFromElem`.
+
+unsafe impl<T: ?Sized> IsZero for Option<&T> {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ self.is_none()
+ }
+}
+
+unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
+ #[inline]
+ fn is_zero(&self) -> bool {
+ self.is_none()
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Common trait implementations for Vec
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ops::Deref for Vec<T> {
+ type Target = [T];
+
+ fn deref(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.as_ptr(), self.len) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ops::DerefMut for Vec<T> {
+ fn deref_mut(&mut self) -> &mut [T] {
+ unsafe { slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> Clone for Vec<T> {
+ #[cfg(not(test))]
+ fn clone(&self) -> Vec<T> {
+ <[T]>::to_vec(&**self)
+ }
+
+ // HACK(japaric): with cfg(test) the inherent `[T]::to_vec` method, which is
+ // required for this method definition, is not available. Instead use the
+ // `slice::to_vec` function which is only available with cfg(test)
+ // NB see the slice::hack module in slice.rs for more information
+ #[cfg(test)]
+ fn clone(&self) -> Vec<T> {
+ crate::slice::to_vec(&**self)
+ }
+
+ fn clone_from(&mut self, other: &Vec<T>) {
+ other.as_slice().clone_into(self);
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Hash> Hash for Vec<T> {
+ #[inline]
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ Hash::hash(&**self, state)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_on_unimplemented(
+ message = "vector indices are of type `usize` or ranges of `usize`",
+ label = "vector indices are of type `usize` or ranges of `usize`"
+)]
+impl<T, I: SliceIndex<[T]>> Index<I> for Vec<T> {
+ type Output = I::Output;
+
+ #[inline]
+ fn index(&self, index: I) -> &Self::Output {
+ Index::index(&**self, index)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+#[rustc_on_unimplemented(
+ message = "vector indices are of type `usize` or ranges of `usize`",
+ label = "vector indices are of type `usize` or ranges of `usize`"
+)]
+impl<T, I: SliceIndex<[T]>> IndexMut<I> for Vec<T> {
+ #[inline]
+ fn index_mut(&mut self, index: I) -> &mut Self::Output {
+ IndexMut::index_mut(&mut **self, index)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> FromIterator<T> for Vec<T> {
+ #[inline]
+ fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
+ <Self as SpecExtend<T, I::IntoIter>>::from_iter(iter.into_iter())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+
+ /// Creates a consuming iterator, that is, one that moves each value out of
+ /// the vector (from start to end). The vector cannot be used after calling
+ /// this.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let v = vec!["a".to_string(), "b".to_string()];
+ /// for s in v.into_iter() {
+ /// // s has type String, not &String
+ /// println!("{}", s);
+ /// }
+ /// ```
+ #[inline]
+ fn into_iter(self) -> IntoIter<T> {
+ unsafe {
+ let mut me = ManuallyDrop::new(self);
+ let begin = me.as_mut_ptr();
+ let end = if mem::size_of::<T>() == 0 {
+ arith_offset(begin as *const i8, me.len() as isize) as *const T
+ } else {
+ begin.add(me.len()) as *const T
+ };
+ let cap = me.buf.capacity();
+ IntoIter {
+ buf: NonNull::new_unchecked(begin),
+ phantom: PhantomData,
+ cap,
+ ptr: begin,
+ end,
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a Vec<T> {
+ type Item = &'a T;
+ type IntoIter = slice::Iter<'a, T>;
+
+ fn into_iter(self) -> slice::Iter<'a, T> {
+ self.iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> IntoIterator for &'a mut Vec<T> {
+ type Item = &'a mut T;
+ type IntoIter = slice::IterMut<'a, T>;
+
+ fn into_iter(self) -> slice::IterMut<'a, T> {
+ self.iter_mut()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Extend<T> for Vec<T> {
+ #[inline]
+ fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
+ <Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
+ }
+
+ #[inline]
+ fn extend_one(&mut self, item: T) {
+ self.push(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+// Specialization trait used for Vec::from_iter and Vec::extend
+trait SpecExtend<T, I> {
+ fn from_iter(iter: I) -> Self;
+ fn spec_extend(&mut self, iter: I);
+}
+
+impl<T, I> SpecExtend<T, I> for Vec<T>
+where
+ I: Iterator<Item = T>,
+{
+ default fn from_iter(mut iterator: I) -> Self {
+ // Unroll the first iteration, as the vector is going to be
+ // expanded on this iteration in every case when the iterable is not
+ // empty, but the loop in extend_desugared() is not going to see the
+ // vector being full in the few subsequent loop iterations.
+ // So we get better branch prediction.
+ let mut vector = match iterator.next() {
+ None => return Vec::new(),
+ Some(element) => {
+ let (lower, _) = iterator.size_hint();
+ let mut vector = Vec::with_capacity(lower.saturating_add(1));
+ unsafe {
+ ptr::write(vector.as_mut_ptr(), element);
+ vector.set_len(1);
+ }
+ vector
+ }
+ };
+ <Vec<T> as SpecExtend<T, I>>::spec_extend(&mut vector, iterator);
+ vector
+ }
+
+ default fn spec_extend(&mut self, iter: I) {
+ self.extend_desugared(iter)
+ }
+}
+
+impl<T, I> SpecExtend<T, I> for Vec<T>
+where
+ I: TrustedLen<Item = T>,
+{
+ default fn from_iter(iterator: I) -> Self {
+ let mut vector = Vec::new();
+ vector.spec_extend(iterator);
+ vector
+ }
+
+ default fn spec_extend(&mut self, iterator: I) {
+ // This is the case for a TrustedLen iterator.
+ let (low, high) = iterator.size_hint();
+ if let Some(high_value) = high {
+ debug_assert_eq!(
+ low,
+ high_value,
+ "TrustedLen iterator's size hint is not exact: {:?}",
+ (low, high)
+ );
+ }
+ if let Some(additional) = high {
+ self.reserve(additional);
+ unsafe {
+ let mut ptr = self.as_mut_ptr().add(self.len());
+ let mut local_len = SetLenOnDrop::new(&mut self.len);
+ iterator.for_each(move |element| {
+ ptr::write(ptr, element);
+ ptr = ptr.offset(1);
+ // NB can't overflow since we would have had to alloc the address space
+ local_len.increment_len(1);
+ });
+ }
+ } else {
+ self.extend_desugared(iterator)
+ }
+ }
+}
+
+impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
+ fn from_iter(iterator: IntoIter<T>) -> Self {
+ // A common case is passing a vector into a function which immediately
+ // re-collects into a vector. We can short circuit this if the IntoIter
+ // has not been advanced at all.
+ if iterator.buf.as_ptr() as *const _ == iterator.ptr {
+ unsafe {
+ let it = ManuallyDrop::new(iterator);
+ Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap)
+ }
+ } else {
+ let mut vector = Vec::new();
+ vector.spec_extend(iterator);
+ vector
+ }
+ }
+
+ fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
+ unsafe {
+ self.append_elements(iterator.as_slice() as _);
+ }
+ iterator.ptr = iterator.end;
+ }
+}
+
+impl<'a, T: 'a, I> SpecExtend<&'a T, I> for Vec<T>
+where
+ I: Iterator<Item = &'a T>,
+ T: Clone,
+{
+ default fn from_iter(iterator: I) -> Self {
+ SpecExtend::from_iter(iterator.cloned())
+ }
+
+ default fn spec_extend(&mut self, iterator: I) {
+ self.spec_extend(iterator.cloned())
+ }
+}
+
+impl<'a, T: 'a> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T>
+where
+ T: Copy,
+{
+ fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
+ let slice = iterator.as_slice();
+ self.reserve(slice.len());
+ unsafe {
+ let len = self.len();
+ let dst_slice = slice::from_raw_parts_mut(self.as_mut_ptr().add(len), slice.len());
+ dst_slice.copy_from_slice(slice);
+ self.set_len(len + slice.len());
+ }
+ }
+}
+
+impl<T> Vec<T> {
+ fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
+ // This is the case for a general iterator.
+ //
+ // This function should be the moral equivalent of:
+ //
+ // for item in iterator {
+ // self.push(item);
+ // }
+ while let Some(element) = iterator.next() {
+ let len = self.len();
+ if len == self.capacity() {
+ let (lower, _) = iterator.size_hint();
+ self.reserve(lower.saturating_add(1));
+ }
+ unsafe {
+ ptr::write(self.as_mut_ptr().add(len), element);
+ // NB can't overflow since we would have had to alloc the address space
+ self.set_len(len + 1);
+ }
+ }
+ }
+
+ /// Creates a splicing iterator that replaces the specified range in the vector
+ /// with the given `replace_with` iterator and yields the removed items.
+ /// `replace_with` does not need to be the same length as `range`.
+ ///
+ /// The element range is removed even if the iterator is not consumed until the end.
+ ///
+ /// It is unspecified how many elements are removed from the vector
+ /// if the `Splice` value is leaked.
+ ///
+ /// The input iterator `replace_with` is only consumed when the `Splice` value is dropped.
+ ///
+ /// This is optimal if:
+ ///
+ /// * The tail (elements in the vector after `range`) is empty,
+ /// * or `replace_with` yields fewer elements than `range`’s length
+ /// * or the lower bound of its `size_hint()` is exact.
+ ///
+ /// Otherwise, a temporary vector is allocated and the tail is moved twice.
+ ///
+ /// # Panics
+ ///
+ /// Panics if the starting point is greater than the end point or if
+ /// the end point is greater than the length of the vector.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut v = vec![1, 2, 3];
+ /// let new = [7, 8];
+ /// let u: Vec<_> = v.splice(..2, new.iter().cloned()).collect();
+ /// assert_eq!(v, &[7, 8, 3]);
+ /// assert_eq!(u, &[1, 2]);
+ /// ```
+ #[inline]
+ #[stable(feature = "vec_splice", since = "1.21.0")]
+ pub fn splice<R, I>(&mut self, range: R, replace_with: I) -> Splice<'_, I::IntoIter>
+ where
+ R: RangeBounds<usize>,
+ I: IntoIterator<Item = T>,
+ {
+ Splice { drain: self.drain(range), replace_with: replace_with.into_iter() }
+ }
+
+ /// Creates an iterator which uses a closure to determine if an element should be removed.
+ ///
+ /// If the closure returns true, then the element is removed and yielded.
+ /// If the closure returns false, the element will remain in the vector and will not be yielded
+ /// by the iterator.
+ ///
+ /// Using this method is equivalent to the following code:
+ ///
+ /// ```
+ /// # let some_predicate = |x: &mut i32| { *x == 2 || *x == 3 || *x == 6 };
+ /// # let mut vec = vec![1, 2, 3, 4, 5, 6];
+ /// let mut i = 0;
+ /// while i != vec.len() {
+ /// if some_predicate(&mut vec[i]) {
+ /// let val = vec.remove(i);
+ /// // your code here
+ /// } else {
+ /// i += 1;
+ /// }
+ /// }
+ ///
+ /// # assert_eq!(vec, vec![1, 4, 5]);
+ /// ```
+ ///
+ /// But `drain_filter` is easier to use. `drain_filter` is also more efficient,
+ /// because it can backshift the elements of the array in bulk.
+ ///
+ /// Note that `drain_filter` also lets you mutate every element in the filter closure,
+ /// regardless of whether you choose to keep or remove it.
+ ///
+ ///
+ /// # Examples
+ ///
+ /// Splitting an array into evens and odds, reusing the original allocation:
+ ///
+ /// ```
+ /// #![feature(drain_filter)]
+ /// let mut numbers = vec![1, 2, 3, 4, 5, 6, 8, 9, 11, 13, 14, 15];
+ ///
+ /// let evens = numbers.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ /// let odds = numbers;
+ ///
+ /// assert_eq!(evens, vec![2, 4, 6, 8, 14]);
+ /// assert_eq!(odds, vec![1, 3, 5, 9, 11, 13, 15]);
+ /// ```
+ #[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+ pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ let old_len = self.len();
+
+ // Guard against us getting leaked (leak amplification)
+ unsafe {
+ self.set_len(0);
+ }
+
+ DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
+ }
+}
+
+/// Extend implementation that copies elements out of references before pushing them onto the Vec.
+///
+/// This implementation is specialized for slice iterators, where it uses [`copy_from_slice`] to
+/// append the entire slice at once.
+///
+/// [`copy_from_slice`]: ../../std/primitive.slice.html#method.copy_from_slice
+#[stable(feature = "extend_ref", since = "1.2.0")]
+impl<'a, T: 'a + Copy> Extend<&'a T> for Vec<T> {
+ fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
+ self.spec_extend(iter.into_iter())
+ }
+
+ #[inline]
+ fn extend_one(&mut self, &item: &'a T) {
+ self.push(item);
+ }
+
+ #[inline]
+ fn extend_reserve(&mut self, additional: usize) {
+ self.reserve(additional);
+ }
+}
+
+macro_rules! __impl_slice_eq1 {
+ ([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
+ #[$stability]
+ impl<A, B, $($vars)*> PartialEq<$rhs> for $lhs
+ where
+ A: PartialEq<B>,
+ $($ty: $bound)?
+ {
+ #[inline]
+ fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
+ #[inline]
+ fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
+ }
+ }
+}
+
+__impl_slice_eq1! { [] Vec<A>, Vec<B>, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Vec<A>, &[B], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Vec<A>, &mut [B], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] &[A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
+__impl_slice_eq1! { [] &mut [A], Vec<B>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, Vec<B> where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, &[B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [] Cow<'_, [A]>, &mut [B] where A: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [const N: usize] Vec<A>, [B; N], #[stable(feature = "rust1", since = "1.0.0")] }
+__impl_slice_eq1! { [const N: usize] Vec<A>, &[B; N], #[stable(feature = "rust1", since = "1.0.0")] }
+
+// NOTE: some less important impls are omitted to reduce code bloat
+// FIXME(Centril): Reconsider this?
+//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
+//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
+//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }
+
+/// Implements comparison of vectors, lexicographically.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: PartialOrd> PartialOrd for Vec<T> {
+ #[inline]
+ fn partial_cmp(&self, other: &Vec<T>) -> Option<Ordering> {
+ PartialOrd::partial_cmp(&**self, &**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Eq> Eq for Vec<T> {}
+
+/// Implements ordering of vectors, lexicographically.
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Ord> Ord for Vec<T> {
+ #[inline]
+ fn cmp(&self, other: &Vec<T>) -> Ordering {
+ Ord::cmp(&**self, &**other)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for Vec<T> {
+ fn drop(&mut self) {
+ unsafe {
+ // use drop for [T]
+ // use a raw slice to refer to the elements of the vector as weakest necessary type;
+ // could avoid questions of validity in certain cases
+ ptr::drop_in_place(ptr::slice_from_raw_parts_mut(self.as_mut_ptr(), self.len))
+ }
+ // RawVec handles deallocation
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Default for Vec<T> {
+ /// Creates an empty `Vec<T>`.
+ fn default() -> Vec<T> {
+ Vec::new()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: fmt::Debug> fmt::Debug for Vec<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Debug::fmt(&**self, f)
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> AsRef<Vec<T>> for Vec<T> {
+ fn as_ref(&self) -> &Vec<T> {
+ self
+ }
+}
+
+#[stable(feature = "vec_as_mut", since = "1.5.0")]
+impl<T> AsMut<Vec<T>> for Vec<T> {
+ fn as_mut(&mut self) -> &mut Vec<T> {
+ self
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> AsRef<[T]> for Vec<T> {
+ fn as_ref(&self) -> &[T] {
+ self
+ }
+}
+
+#[stable(feature = "vec_as_mut", since = "1.5.0")]
+impl<T> AsMut<[T]> for Vec<T> {
+ fn as_mut(&mut self) -> &mut [T] {
+ self
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T: Clone> From<&[T]> for Vec<T> {
+ #[cfg(not(test))]
+ fn from(s: &[T]) -> Vec<T> {
+ s.to_vec()
+ }
+ #[cfg(test)]
+ fn from(s: &[T]) -> Vec<T> {
+ crate::slice::to_vec(s)
+ }
+}
+
+#[stable(feature = "vec_from_mut", since = "1.19.0")]
+impl<T: Clone> From<&mut [T]> for Vec<T> {
+ #[cfg(not(test))]
+ fn from(s: &mut [T]) -> Vec<T> {
+ s.to_vec()
+ }
+ #[cfg(test)]
+ fn from(s: &mut [T]) -> Vec<T> {
+ crate::slice::to_vec(s)
+ }
+}
+
+#[stable(feature = "vec_from_array", since = "1.44.0")]
+impl<T, const N: usize> From<[T; N]> for Vec<T> {
+ #[cfg(not(test))]
+ fn from(s: [T; N]) -> Vec<T> {
+ <[T]>::into_vec(box s)
+ }
+ #[cfg(test)]
+ fn from(s: [T; N]) -> Vec<T> {
+ crate::slice::into_vec(box s)
+ }
+}
+
+#[stable(feature = "vec_from_cow_slice", since = "1.14.0")]
+impl<'a, T> From<Cow<'a, [T]>> for Vec<T>
+where
+ [T]: ToOwned<Owned = Vec<T>>,
+{
+ fn from(s: Cow<'a, [T]>) -> Vec<T> {
+ s.into_owned()
+ }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "vec_from_box", since = "1.18.0")]
+impl<T> From<Box<[T]>> for Vec<T> {
+ fn from(s: Box<[T]>) -> Vec<T> {
+ s.into_vec()
+ }
+}
+
+// note: test pulls in libstd, which causes errors here
+#[cfg(not(test))]
+#[stable(feature = "box_from_vec", since = "1.20.0")]
+impl<T> From<Vec<T>> for Box<[T]> {
+ fn from(v: Vec<T>) -> Box<[T]> {
+ v.into_boxed_slice()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl From<&str> for Vec<u8> {
+ fn from(s: &str) -> Vec<u8> {
+ From::from(s.as_bytes())
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Clone-on-write
+////////////////////////////////////////////////////////////////////////////////
+
+#[stable(feature = "cow_from_vec", since = "1.8.0")]
+impl<'a, T: Clone> From<&'a [T]> for Cow<'a, [T]> {
+ fn from(s: &'a [T]) -> Cow<'a, [T]> {
+ Cow::Borrowed(s)
+ }
+}
+
+#[stable(feature = "cow_from_vec", since = "1.8.0")]
+impl<'a, T: Clone> From<Vec<T>> for Cow<'a, [T]> {
+ fn from(v: Vec<T>) -> Cow<'a, [T]> {
+ Cow::Owned(v)
+ }
+}
+
+#[stable(feature = "cow_from_vec_ref", since = "1.28.0")]
+impl<'a, T: Clone> From<&'a Vec<T>> for Cow<'a, [T]> {
+ fn from(v: &'a Vec<T>) -> Cow<'a, [T]> {
+ Cow::Borrowed(v.as_slice())
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<'a, T> FromIterator<T> for Cow<'a, [T]>
+where
+ T: Clone,
+{
+ fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
+ Cow::Owned(FromIterator::from_iter(it))
+ }
+}
+
+////////////////////////////////////////////////////////////////////////////////
+// Iterators
+////////////////////////////////////////////////////////////////////////////////
+
+/// An iterator that moves out of a vector.
+///
+/// This `struct` is created by the `into_iter` method on [`Vec`] (provided
+/// by the [`IntoIterator`] trait).
+///
+/// [`Vec`]: struct.Vec.html
+/// [`IntoIterator`]: ../../std/iter/trait.IntoIterator.html
+#[stable(feature = "rust1", since = "1.0.0")]
+pub struct IntoIter<T> {
+ buf: NonNull<T>,
+ phantom: PhantomData<T>,
+ cap: usize,
+ ptr: *const T,
+ end: *const T,
+}
+
+#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
+impl<T: fmt::Debug> fmt::Debug for IntoIter<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
+ }
+}
+
+impl<T> IntoIter<T> {
+ /// Returns the remaining items of this iterator as a slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let vec = vec!['a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// let _ = into_iter.next().unwrap();
+ /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
+ /// ```
+ #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
+ pub fn as_slice(&self) -> &[T] {
+ unsafe { slice::from_raw_parts(self.ptr, self.len()) }
+ }
+
+ /// Returns the remaining items of this iterator as a mutable slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let vec = vec!['a', 'b', 'c'];
+ /// let mut into_iter = vec.into_iter();
+ /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ /// into_iter.as_mut_slice()[2] = 'z';
+ /// assert_eq!(into_iter.next().unwrap(), 'a');
+ /// assert_eq!(into_iter.next().unwrap(), 'b');
+ /// assert_eq!(into_iter.next().unwrap(), 'z');
+ /// ```
+ #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
+ pub fn as_mut_slice(&mut self) -> &mut [T] {
+ unsafe { &mut *self.as_raw_mut_slice() }
+ }
+
+ fn as_raw_mut_slice(&mut self) -> *mut [T] {
+ ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
+ }
+}
+
+#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
+impl<T> AsRef<[T]> for IntoIter<T> {
+ fn as_ref(&self) -> &[T] {
+ self.as_slice()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Send> Send for IntoIter<T> {}
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<T: Sync> Sync for IntoIter<T> {}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ unsafe {
+ if self.ptr as *const _ == self.end {
+ None
+ } else {
+ if mem::size_of::<T>() == 0 {
+ // purposefully don't use 'ptr.offset' because for
+ // vectors with 0-size elements this would return the
+ // same pointer.
+ self.ptr = arith_offset(self.ptr as *const i8, 1) as *mut T;
+
+ // Make up a value of this ZST.
+ Some(mem::zeroed())
+ } else {
+ let old = self.ptr;
+ self.ptr = self.ptr.offset(1);
+
+ Some(ptr::read(old))
+ }
+ }
+ }
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let exact = if mem::size_of::<T>() == 0 {
+ (self.end as usize).wrapping_sub(self.ptr as usize)
+ } else {
+ unsafe { self.end.offset_from(self.ptr) as usize }
+ };
+ (exact, Some(exact))
+ }
+
+ #[inline]
+ fn count(self) -> usize {
+ self.len()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> DoubleEndedIterator for IntoIter<T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ unsafe {
+ if self.end == self.ptr {
+ None
+ } else {
+ if mem::size_of::<T>() == 0 {
+ // See above for why 'ptr.offset' isn't used
+ self.end = arith_offset(self.end as *const i8, -1) as *mut T;
+
+ // Make up a value of this ZST.
+ Some(mem::zeroed())
+ } else {
+ self.end = self.end.offset(-1);
+
+ Some(ptr::read(self.end))
+ }
+ }
+ }
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<T> ExactSizeIterator for IntoIter<T> {
+ fn is_empty(&self) -> bool {
+ self.ptr == self.end
+ }
+}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for IntoIter<T> {}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for IntoIter<T> {}
+
+#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
+impl<T: Clone> Clone for IntoIter<T> {
+ fn clone(&self) -> IntoIter<T> {
+ self.as_slice().to_owned().into_iter()
+ }
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+unsafe impl<#[may_dangle] T> Drop for IntoIter<T> {
+ fn drop(&mut self) {
+ struct DropGuard<'a, T>(&'a mut IntoIter<T>);
+
+ impl<T> Drop for DropGuard<'_, T> {
+ fn drop(&mut self) {
+ // RawVec handles deallocation
+ let _ = unsafe { RawVec::from_raw_parts(self.0.buf.as_ptr(), self.0.cap) };
+ }
+ }
+
+ let guard = DropGuard(self);
+ // destroy the remaining elements
+ unsafe {
+ ptr::drop_in_place(guard.0.as_raw_mut_slice());
+ }
+ // now `guard` will be dropped and do the rest
+ }
+}
+
+/// A draining iterator for `Vec<T>`.
+///
+/// This `struct` is created by the [`drain`] method on [`Vec`].
+///
+/// [`drain`]: struct.Vec.html#method.drain
+/// [`Vec`]: struct.Vec.html
+#[stable(feature = "drain", since = "1.6.0")]
+pub struct Drain<'a, T: 'a> {
+ /// Index of tail to preserve
+ tail_start: usize,
+ /// Length of tail
+ tail_len: usize,
+ /// Current remaining range to remove
+ iter: slice::Iter<'a, T>,
+ vec: NonNull<Vec<T>>,
+}
+
+#[stable(feature = "collection_debug", since = "1.17.0")]
+impl<T: fmt::Debug> fmt::Debug for Drain<'_, T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
+ }
+}
+
+impl<'a, T> Drain<'a, T> {
+ /// Returns the remaining items of this iterator as a slice.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let mut vec = vec!['a', 'b', 'c'];
+ /// let mut drain = vec.drain(..);
+ /// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
+ /// let _ = drain.next().unwrap();
+ /// assert_eq!(drain.as_slice(), &['b', 'c']);
+ /// ```
+ #[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
+ pub fn as_slice(&self) -> &[T] {
+ self.iter.as_slice()
+ }
+}
+
+#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
+impl<'a, T> AsRef<[T]> for Drain<'a, T> {
+ fn as_ref(&self) -> &[T] {
+ self.as_slice()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Sync> Sync for Drain<'_, T> {}
+#[stable(feature = "drain", since = "1.6.0")]
+unsafe impl<T: Send> Send for Drain<'_, T> {}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Iterator for Drain<'_, T> {
+ type Item = T;
+
+ #[inline]
+ fn next(&mut self) -> Option<T> {
+ self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.iter.size_hint()
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> DoubleEndedIterator for Drain<'_, T> {
+ #[inline]
+ fn next_back(&mut self) -> Option<T> {
+ self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> Drop for Drain<'_, T> {
+ fn drop(&mut self) {
+ /// Continues dropping the remaining elements in the `Drain`, then moves back the
+ /// un-`Drain`ed elements to restore the original `Vec`.
+ struct DropGuard<'r, 'a, T>(&'r mut Drain<'a, T>);
+
+ impl<'r, 'a, T> Drop for DropGuard<'r, 'a, T> {
+ fn drop(&mut self) {
+ // Continue the same loop we have below. If the loop already finished, this does
+ // nothing.
+ self.0.for_each(drop);
+
+ if self.0.tail_len > 0 {
+ unsafe {
+ let source_vec = self.0.vec.as_mut();
+ // memmove back untouched tail, update to new length
+ let start = source_vec.len();
+ let tail = self.0.tail_start;
+ if tail != start {
+ let src = source_vec.as_ptr().add(tail);
+ let dst = source_vec.as_mut_ptr().add(start);
+ ptr::copy(src, dst, self.0.tail_len);
+ }
+ source_vec.set_len(start + self.0.tail_len);
+ }
+ }
+ }
+ }
+
+ // exhaust self first
+ while let Some(item) = self.next() {
+ let guard = DropGuard(self);
+ drop(item);
+ mem::forget(guard);
+ }
+
+ // Drop a `DropGuard` to move back the non-drained tail of `self`.
+ DropGuard(self);
+ }
+}
+
+#[stable(feature = "drain", since = "1.6.0")]
+impl<T> ExactSizeIterator for Drain<'_, T> {
+ fn is_empty(&self) -> bool {
+ self.iter.is_empty()
+ }
+}
+
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for Drain<'_, T> {}
+
+#[stable(feature = "fused", since = "1.26.0")]
+impl<T> FusedIterator for Drain<'_, T> {}
+
+/// A splicing iterator for `Vec`.
+///
+/// This struct is created by the [`splice()`] method on [`Vec`]. See its
+/// documentation for more.
+///
+/// [`splice()`]: struct.Vec.html#method.splice
+/// [`Vec`]: struct.Vec.html
+#[derive(Debug)]
+#[stable(feature = "vec_splice", since = "1.21.0")]
+pub struct Splice<'a, I: Iterator + 'a> {
+ drain: Drain<'a, I::Item>,
+ replace_with: I,
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> Iterator for Splice<'_, I> {
+ type Item = I::Item;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.drain.next()
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.drain.size_hint()
+ }
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> DoubleEndedIterator for Splice<'_, I> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.drain.next_back()
+ }
+}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> ExactSizeIterator for Splice<'_, I> {}
+
+#[stable(feature = "vec_splice", since = "1.21.0")]
+impl<I: Iterator> Drop for Splice<'_, I> {
+ fn drop(&mut self) {
+ self.drain.by_ref().for_each(drop);
+
+ unsafe {
+ if self.drain.tail_len == 0 {
+ self.drain.vec.as_mut().extend(self.replace_with.by_ref());
+ return;
+ }
+
+ // First fill the range left by drain().
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+
+ // There may be more elements. Use the lower bound as an estimate.
+ // FIXME: Is the upper bound a better guess? Or something else?
+ let (lower_bound, _upper_bound) = self.replace_with.size_hint();
+ if lower_bound > 0 {
+ self.drain.move_tail(lower_bound);
+ if !self.drain.fill(&mut self.replace_with) {
+ return;
+ }
+ }
+
+ // Collect any remaining elements.
+ // This is a zero-length vector which does not allocate if `lower_bound` was exact.
+ let mut collected = self.replace_with.by_ref().collect::<Vec<I::Item>>().into_iter();
+ // Now we have an exact count.
+ if collected.len() > 0 {
+ self.drain.move_tail(collected.len());
+ let filled = self.drain.fill(&mut collected);
+ debug_assert!(filled);
+ debug_assert_eq!(collected.len(), 0);
+ }
+ }
+ // Let `Drain::drop` move the tail back if necessary and restore `vec.len`.
+ }
+}
+
+/// Private helper methods for `Splice::drop`
+impl<T> Drain<'_, T> {
+ /// The range from `self.vec.len` to `self.tail_start` contains elements
+ /// that have been moved out.
+ /// Fill that range as much as possible with new elements from the `replace_with` iterator.
+ /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
+ unsafe fn fill<I: Iterator<Item = T>>(&mut self, replace_with: &mut I) -> bool {
+ let vec = unsafe { self.vec.as_mut() };
+ let range_start = vec.len;
+ let range_end = self.tail_start;
+ let range_slice = unsafe {
+ slice::from_raw_parts_mut(vec.as_mut_ptr().add(range_start), range_end - range_start)
+ };
+
+ for place in range_slice {
+ if let Some(new_item) = replace_with.next() {
+ unsafe { ptr::write(place, new_item) };
+ vec.len += 1;
+ } else {
+ return false;
+ }
+ }
+ true
+ }
+
+ /// Makes room for inserting more elements before the tail.
+ unsafe fn move_tail(&mut self, additional: usize) {
+ let vec = unsafe { self.vec.as_mut() };
+ let len = self.tail_start + self.tail_len;
+ vec.buf.reserve(len, additional);
+
+ let new_tail_start = self.tail_start + additional;
+ unsafe {
+ let src = vec.as_ptr().add(self.tail_start);
+ let dst = vec.as_mut_ptr().add(new_tail_start);
+ ptr::copy(src, dst, self.tail_len);
+ }
+ self.tail_start = new_tail_start;
+ }
+}
+
+/// An iterator produced by calling `drain_filter` on Vec.
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+#[derive(Debug)]
+pub struct DrainFilter<'a, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ vec: &'a mut Vec<T>,
+ /// The index of the item that will be inspected by the next call to `next`.
+ idx: usize,
+ /// The number of items that have been drained (removed) thus far.
+ del: usize,
+ /// The original length of `vec` prior to draining.
+ old_len: usize,
+ /// The filter test predicate.
+ pred: F,
+ /// A flag that indicates a panic has occurred in the filter test prodicate.
+ /// This is used as a hint in the drop implementation to prevent consumption
+ /// of the remainder of the `DrainFilter`. Any unprocessed items will be
+ /// backshifted in the `vec`, but no further items will be dropped or
+ /// tested by the filter predicate.
+ panic_flag: bool,
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Iterator for DrainFilter<'_, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ type Item = T;
+
+ fn next(&mut self) -> Option<T> {
+ unsafe {
+ while self.idx < self.old_len {
+ let i = self.idx;
+ let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
+ self.panic_flag = true;
+ let drained = (self.pred)(&mut v[i]);
+ self.panic_flag = false;
+ // Update the index *after* the predicate is called. If the index
+ // is updated prior and the predicate panics, the element at this
+ // index would be leaked.
+ self.idx += 1;
+ if drained {
+ self.del += 1;
+ return Some(ptr::read(&v[i]));
+ } else if self.del > 0 {
+ let del = self.del;
+ let src: *const T = &v[i];
+ let dst: *mut T = &mut v[i - del];
+ ptr::copy_nonoverlapping(src, dst, 1);
+ }
+ }
+ None
+ }
+ }
+
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(self.old_len - self.idx))
+ }
+}
+
+#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
+impl<T, F> Drop for DrainFilter<'_, T, F>
+where
+ F: FnMut(&mut T) -> bool,
+{
+ fn drop(&mut self) {
+ struct BackshiftOnDrop<'a, 'b, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ drain: &'b mut DrainFilter<'a, T, F>,
+ }
+
+ impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F>
+ where
+ F: FnMut(&mut T) -> bool,
+ {
+ fn drop(&mut self) {
+ unsafe {
+ if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
+ // This is a pretty messed up state, and there isn't really an
+ // obviously right thing to do. We don't want to keep trying
+ // to execute `pred`, so we just backshift all the unprocessed
+ // elements and tell the vec that they still exist. The backshift
+ // is required to prevent a double-drop of the last successfully
+ // drained item prior to a panic in the predicate.
+ let ptr = self.drain.vec.as_mut_ptr();
+ let src = ptr.add(self.drain.idx);
+ let dst = src.sub(self.drain.del);
+ let tail_len = self.drain.old_len - self.drain.idx;
+ src.copy_to(dst, tail_len);
+ }
+ self.drain.vec.set_len(self.drain.old_len - self.drain.del);
+ }
+ }
+ }
+
+ let backshift = BackshiftOnDrop { drain: self };
+
+ // Attempt to consume any remaining elements if the filter predicate
+ // has not yet panicked. We'll backshift any remaining elements
+ // whether we've already panicked or if the consumption here panics.
+ if !backshift.drain.panic_flag {
+ backshift.drain.for_each(drop);
+ }
+ }
+}
diff --git a/library/alloc/tests/arc.rs b/library/alloc/tests/arc.rs
new file mode 100644
index 00000000000..c02ba267056
--- /dev/null
+++ b/library/alloc/tests/arc.rs
@@ -0,0 +1,197 @@
+use std::any::Any;
+use std::cell::RefCell;
+use std::cmp::PartialEq;
+use std::iter::TrustedLen;
+use std::mem;
+use std::sync::{Arc, Weak};
+
+#[test]
+fn uninhabited() {
+ enum Void {}
+ let mut a = Weak::<Void>::new();
+ a = a.clone();
+ assert!(a.upgrade().is_none());
+
+ let mut a: Weak<dyn Any> = a; // Unsizing
+ a = a.clone();
+ assert!(a.upgrade().is_none());
+}
+
+#[test]
+fn slice() {
+ let a: Arc<[u32; 3]> = Arc::new([3, 2, 1]);
+ let a: Arc<[u32]> = a; // Unsizing
+ let b: Arc<[u32]> = Arc::from(&[3, 2, 1][..]); // Conversion
+ assert_eq!(a, b);
+
+ // Exercise is_dangling() with a DST
+ let mut a = Arc::downgrade(&a);
+ a = a.clone();
+ assert!(a.upgrade().is_some());
+}
+
+#[test]
+fn trait_object() {
+ let a: Arc<u32> = Arc::new(4);
+ let a: Arc<dyn Any> = a; // Unsizing
+
+ // Exercise is_dangling() with a DST
+ let mut a = Arc::downgrade(&a);
+ a = a.clone();
+ assert!(a.upgrade().is_some());
+
+ let mut b = Weak::<u32>::new();
+ b = b.clone();
+ assert!(b.upgrade().is_none());
+ let mut b: Weak<dyn Any> = b; // Unsizing
+ b = b.clone();
+ assert!(b.upgrade().is_none());
+}
+
+#[test]
+fn float_nan_ne() {
+ let x = Arc::new(f32::NAN);
+ assert!(x != x);
+ assert!(!(x == x));
+}
+
+#[test]
+fn partial_eq() {
+ struct TestPEq(RefCell<usize>);
+ impl PartialEq for TestPEq {
+ fn eq(&self, other: &TestPEq) -> bool {
+ *self.0.borrow_mut() += 1;
+ *other.0.borrow_mut() += 1;
+ true
+ }
+ }
+ let x = Arc::new(TestPEq(RefCell::new(0)));
+ assert!(x == x);
+ assert!(!(x != x));
+ assert_eq!(*x.0.borrow(), 4);
+}
+
+#[test]
+fn eq() {
+ #[derive(Eq)]
+ struct TestEq(RefCell<usize>);
+ impl PartialEq for TestEq {
+ fn eq(&self, other: &TestEq) -> bool {
+ *self.0.borrow_mut() += 1;
+ *other.0.borrow_mut() += 1;
+ true
+ }
+ }
+ let x = Arc::new(TestEq(RefCell::new(0)));
+ assert!(x == x);
+ assert!(!(x != x));
+ assert_eq!(*x.0.borrow(), 0);
+}
+
+// The test code below is identical to that in `rc.rs`.
+// For better maintainability we therefore define this type alias.
+type Rc<T> = Arc<T>;
+
+const SHARED_ITER_MAX: u16 = 100;
+
+fn assert_trusted_len<I: TrustedLen>(_: &I) {}
+
+#[test]
+fn shared_from_iter_normal() {
+ // Exercise the base implementation for non-`TrustedLen` iterators.
+ {
+ // `Filter` is never `TrustedLen` since we don't
+ // know statically how many elements will be kept:
+ let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new);
+
+ // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+
+ // Clone a bit and let these get dropped.
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ } // Drop what hasn't been here.
+}
+
+#[test]
+fn shared_from_iter_trustedlen_normal() {
+ // Exercise the `TrustedLen` implementation under normal circumstances
+ // where `size_hint()` matches `(_, Some(exact_len))`.
+ {
+ let iter = (0..SHARED_ITER_MAX).map(Box::new);
+ assert_trusted_len(&iter);
+
+ // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+ assert_eq!(mem::size_of::<Box<u16>>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc));
+
+ // Clone a bit and let these get dropped.
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ } // Drop what hasn't been here.
+
+ // Try a ZST to make sure it is handled well.
+ {
+ let iter = (0..SHARED_ITER_MAX).map(drop);
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+ assert_eq!(0, mem::size_of_val(&*rc));
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ }
+}
+
+#[test]
+#[should_panic = "I've almost got 99 problems."]
+fn shared_from_iter_trustedlen_panic() {
+ // Exercise the `TrustedLen` implementation when `size_hint()` matches
+ // `(_, Some(exact_len))` but where `.next()` drops before the last iteration.
+ let iter = (0..SHARED_ITER_MAX).map(|val| match val {
+ 98 => panic!("I've almost got 99 problems."),
+ _ => Box::new(val),
+ });
+ assert_trusted_len(&iter);
+ let _ = iter.collect::<Rc<[_]>>();
+
+ panic!("I am unreachable.");
+}
+
+#[test]
+fn shared_from_iter_trustedlen_no_fuse() {
+ // Exercise the `TrustedLen` implementation when `size_hint()` matches
+ // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner.
+ struct Iter(std::vec::IntoIter<Option<Box<u8>>>);
+
+ unsafe impl TrustedLen for Iter {}
+
+ impl Iterator for Iter {
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (2, Some(2))
+ }
+
+ type Item = Box<u8>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next().flatten()
+ }
+ }
+
+ let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))];
+ let iter = Iter(vec.into_iter());
+ assert_trusted_len(&iter);
+ assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::<Rc<[_]>>());
+}
diff --git a/library/alloc/tests/binary_heap.rs b/library/alloc/tests/binary_heap.rs
new file mode 100644
index 00000000000..62084ccf53c
--- /dev/null
+++ b/library/alloc/tests/binary_heap.rs
@@ -0,0 +1,464 @@
+use std::collections::binary_heap::{Drain, PeekMut};
+use std::collections::BinaryHeap;
+use std::iter::TrustedLen;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::sync::atomic::{AtomicU32, Ordering};
+
+#[test]
+fn test_iterator() {
+ let data = vec![5, 9, 3];
+ let iterout = [9, 5, 3];
+ let heap = BinaryHeap::from(data);
+ let mut i = 0;
+ for el in &heap {
+ assert_eq!(*el, iterout[i]);
+ i += 1;
+ }
+}
+
+#[test]
+fn test_iter_rev_cloned_collect() {
+ let data = vec![5, 9, 3];
+ let iterout = vec![3, 5, 9];
+ let pq = BinaryHeap::from(data);
+
+ let v: Vec<_> = pq.iter().rev().cloned().collect();
+ assert_eq!(v, iterout);
+}
+
+#[test]
+fn test_into_iter_collect() {
+ let data = vec![5, 9, 3];
+ let iterout = vec![9, 5, 3];
+ let pq = BinaryHeap::from(data);
+
+ let v: Vec<_> = pq.into_iter().collect();
+ assert_eq!(v, iterout);
+}
+
+#[test]
+fn test_into_iter_size_hint() {
+ let data = vec![5, 9];
+ let pq = BinaryHeap::from(data);
+
+ let mut it = pq.into_iter();
+
+ assert_eq!(it.size_hint(), (2, Some(2)));
+ assert_eq!(it.next(), Some(9));
+
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(it.next(), Some(5));
+
+ assert_eq!(it.size_hint(), (0, Some(0)));
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_into_iter_rev_collect() {
+ let data = vec![5, 9, 3];
+ let iterout = vec![3, 5, 9];
+ let pq = BinaryHeap::from(data);
+
+ let v: Vec<_> = pq.into_iter().rev().collect();
+ assert_eq!(v, iterout);
+}
+
+#[test]
+fn test_into_iter_sorted_collect() {
+ let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
+ let it = heap.into_iter_sorted();
+ let sorted = it.collect::<Vec<_>>();
+ assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]);
+}
+
+#[test]
+fn test_drain_sorted_collect() {
+ let mut heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
+ let it = heap.drain_sorted();
+ let sorted = it.collect::<Vec<_>>();
+ assert_eq!(sorted, vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 2, 1, 1, 0]);
+}
+
+fn check_exact_size_iterator<I: ExactSizeIterator>(len: usize, it: I) {
+ let mut it = it;
+
+ for i in 0..it.len() {
+ let (lower, upper) = it.size_hint();
+ assert_eq!(Some(lower), upper);
+ assert_eq!(lower, len - i);
+ assert_eq!(it.len(), len - i);
+ it.next();
+ }
+ assert_eq!(it.len(), 0);
+ assert!(it.is_empty());
+}
+
+#[test]
+fn test_exact_size_iterator() {
+ let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
+ check_exact_size_iterator(heap.len(), heap.iter());
+ check_exact_size_iterator(heap.len(), heap.clone().into_iter());
+ check_exact_size_iterator(heap.len(), heap.clone().into_iter_sorted());
+ check_exact_size_iterator(heap.len(), heap.clone().drain());
+ check_exact_size_iterator(heap.len(), heap.clone().drain_sorted());
+}
+
+fn check_trusted_len<I: TrustedLen>(len: usize, it: I) {
+ let mut it = it;
+ for i in 0..len {
+ let (lower, upper) = it.size_hint();
+ if upper.is_some() {
+ assert_eq!(Some(lower), upper);
+ assert_eq!(lower, len - i);
+ }
+ it.next();
+ }
+}
+
+#[test]
+fn test_trusted_len() {
+ let heap = BinaryHeap::from(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
+ check_trusted_len(heap.len(), heap.clone().into_iter_sorted());
+ check_trusted_len(heap.len(), heap.clone().drain_sorted());
+}
+
+#[test]
+fn test_peek_and_pop() {
+ let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
+ let mut sorted = data.clone();
+ sorted.sort();
+ let mut heap = BinaryHeap::from(data);
+ while !heap.is_empty() {
+ assert_eq!(heap.peek().unwrap(), sorted.last().unwrap());
+ assert_eq!(heap.pop().unwrap(), sorted.pop().unwrap());
+ }
+}
+
+#[test]
+fn test_peek_mut() {
+ let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
+ let mut heap = BinaryHeap::from(data);
+ assert_eq!(heap.peek(), Some(&10));
+ {
+ let mut top = heap.peek_mut().unwrap();
+ *top -= 2;
+ }
+ assert_eq!(heap.peek(), Some(&9));
+}
+
+#[test]
+fn test_peek_mut_pop() {
+ let data = vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1];
+ let mut heap = BinaryHeap::from(data);
+ assert_eq!(heap.peek(), Some(&10));
+ {
+ let mut top = heap.peek_mut().unwrap();
+ *top -= 2;
+ assert_eq!(PeekMut::pop(top), 8);
+ }
+ assert_eq!(heap.peek(), Some(&9));
+}
+
+#[test]
+fn test_push() {
+ let mut heap = BinaryHeap::from(vec![2, 4, 9]);
+ assert_eq!(heap.len(), 3);
+ assert!(*heap.peek().unwrap() == 9);
+ heap.push(11);
+ assert_eq!(heap.len(), 4);
+ assert!(*heap.peek().unwrap() == 11);
+ heap.push(5);
+ assert_eq!(heap.len(), 5);
+ assert!(*heap.peek().unwrap() == 11);
+ heap.push(27);
+ assert_eq!(heap.len(), 6);
+ assert!(*heap.peek().unwrap() == 27);
+ heap.push(3);
+ assert_eq!(heap.len(), 7);
+ assert!(*heap.peek().unwrap() == 27);
+ heap.push(103);
+ assert_eq!(heap.len(), 8);
+ assert!(*heap.peek().unwrap() == 103);
+}
+
+#[test]
+fn test_push_unique() {
+ let mut heap = BinaryHeap::<Box<_>>::from(vec![box 2, box 4, box 9]);
+ assert_eq!(heap.len(), 3);
+ assert!(**heap.peek().unwrap() == 9);
+ heap.push(box 11);
+ assert_eq!(heap.len(), 4);
+ assert!(**heap.peek().unwrap() == 11);
+ heap.push(box 5);
+ assert_eq!(heap.len(), 5);
+ assert!(**heap.peek().unwrap() == 11);
+ heap.push(box 27);
+ assert_eq!(heap.len(), 6);
+ assert!(**heap.peek().unwrap() == 27);
+ heap.push(box 3);
+ assert_eq!(heap.len(), 7);
+ assert!(**heap.peek().unwrap() == 27);
+ heap.push(box 103);
+ assert_eq!(heap.len(), 8);
+ assert!(**heap.peek().unwrap() == 103);
+}
+
+fn check_to_vec(mut data: Vec<i32>) {
+ let heap = BinaryHeap::from(data.clone());
+ let mut v = heap.clone().into_vec();
+ v.sort();
+ data.sort();
+
+ assert_eq!(v, data);
+ assert_eq!(heap.into_sorted_vec(), data);
+}
+
+#[test]
+fn test_to_vec() {
+ check_to_vec(vec![]);
+ check_to_vec(vec![5]);
+ check_to_vec(vec![3, 2]);
+ check_to_vec(vec![2, 3]);
+ check_to_vec(vec![5, 1, 2]);
+ check_to_vec(vec![1, 100, 2, 3]);
+ check_to_vec(vec![1, 3, 5, 7, 9, 2, 4, 6, 8, 0]);
+ check_to_vec(vec![2, 4, 6, 2, 1, 8, 10, 3, 5, 7, 0, 9, 1]);
+ check_to_vec(vec![9, 11, 9, 9, 9, 9, 11, 2, 3, 4, 11, 9, 0, 0, 0, 0]);
+ check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+ check_to_vec(vec![10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0]);
+ check_to_vec(vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 0, 0, 1, 2]);
+ check_to_vec(vec![5, 4, 3, 2, 1, 5, 4, 3, 2, 1, 5, 4, 3, 2, 1]);
+}
+
+#[test]
+fn test_empty_pop() {
+ let mut heap = BinaryHeap::<i32>::new();
+ assert!(heap.pop().is_none());
+}
+
+#[test]
+fn test_empty_peek() {
+ let empty = BinaryHeap::<i32>::new();
+ assert!(empty.peek().is_none());
+}
+
+#[test]
+fn test_empty_peek_mut() {
+ let mut empty = BinaryHeap::<i32>::new();
+ assert!(empty.peek_mut().is_none());
+}
+
+#[test]
+fn test_from_iter() {
+ let xs = vec![9, 8, 7, 6, 5, 4, 3, 2, 1];
+
+ let mut q: BinaryHeap<_> = xs.iter().rev().cloned().collect();
+
+ for &x in &xs {
+ assert_eq!(q.pop().unwrap(), x);
+ }
+}
+
+#[test]
+fn test_drain() {
+ let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
+
+ assert_eq!(q.drain().take(5).count(), 5);
+
+ assert!(q.is_empty());
+}
+
+#[test]
+fn test_drain_sorted() {
+ let mut q: BinaryHeap<_> = [9, 8, 7, 6, 5, 4, 3, 2, 1].iter().cloned().collect();
+
+ assert_eq!(q.drain_sorted().take(5).collect::<Vec<_>>(), vec![9, 8, 7, 6, 5]);
+
+ assert!(q.is_empty());
+}
+
+#[test]
+fn test_drain_sorted_leak() {
+ static DROPS: AtomicU32 = AtomicU32::new(0);
+
+ #[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
+ struct D(u32, bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ DROPS.fetch_add(1, Ordering::SeqCst);
+
+ if self.1 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut q = BinaryHeap::from(vec![
+ D(0, false),
+ D(1, false),
+ D(2, false),
+ D(3, true),
+ D(4, false),
+ D(5, false),
+ ]);
+
+ catch_unwind(AssertUnwindSafe(|| drop(q.drain_sorted()))).ok();
+
+ assert_eq!(DROPS.load(Ordering::SeqCst), 6);
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = BinaryHeap::new();
+ a.push(1);
+ a.push(2);
+
+ a.extend(&[3, 4, 5]);
+
+ assert_eq!(a.len(), 5);
+ assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]);
+
+ let mut a = BinaryHeap::new();
+ a.push(1);
+ a.push(2);
+ let mut b = BinaryHeap::new();
+ b.push(3);
+ b.push(4);
+ b.push(5);
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 5);
+ assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]);
+}
+
+#[test]
+fn test_append() {
+ let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
+ let mut b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.append(&mut b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+ assert!(b.is_empty());
+}
+
+#[test]
+fn test_append_to_empty() {
+ let mut a = BinaryHeap::new();
+ let mut b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.append(&mut b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, 5, 43]);
+ assert!(b.is_empty());
+}
+
+#[test]
+fn test_extend_specialization() {
+ let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]);
+ let b = BinaryHeap::from(vec![-20, 5, 43]);
+
+ a.extend(b);
+
+ assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]);
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
+ d
+ }
+}
+
+#[test]
+fn test_retain() {
+ let mut a = BinaryHeap::from(vec![-10, -5, 1, 2, 4, 13]);
+ a.retain(|x| x % 2 == 0);
+
+ assert_eq!(a.into_sorted_vec(), [-10, 2, 4])
+}
+
+// old binaryheap failed this test
+//
+// Integrity means that all elements are present after a comparison panics,
+// even if the order may not be correct.
+//
+// Destructors must be called exactly once per element.
+// FIXME: re-enable emscripten once it can unwind again
+#[test]
+#[cfg(not(target_os = "emscripten"))]
+fn panic_safe() {
+ use rand::{seq::SliceRandom, thread_rng};
+ use std::cmp;
+ use std::panic::{self, AssertUnwindSafe};
+ use std::sync::atomic::{AtomicUsize, Ordering};
+
+ static DROP_COUNTER: AtomicUsize = AtomicUsize::new(0);
+
+ #[derive(Eq, PartialEq, Ord, Clone, Debug)]
+ struct PanicOrd<T>(T, bool);
+
+ impl<T> Drop for PanicOrd<T> {
+ fn drop(&mut self) {
+ // update global drop count
+ DROP_COUNTER.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ impl<T: PartialOrd> PartialOrd for PanicOrd<T> {
+ fn partial_cmp(&self, other: &Self) -> Option<cmp::Ordering> {
+ if self.1 || other.1 {
+ panic!("Panicking comparison");
+ }
+ self.0.partial_cmp(&other.0)
+ }
+ }
+ let mut rng = thread_rng();
+ const DATASZ: usize = 32;
+ // Miri is too slow
+ let ntest = if cfg!(miri) { 1 } else { 10 };
+
+ // don't use 0 in the data -- we want to catch the zeroed-out case.
+ let data = (1..=DATASZ).collect::<Vec<_>>();
+
+ // since it's a fuzzy test, run several tries.
+ for _ in 0..ntest {
+ for i in 1..=DATASZ {
+ DROP_COUNTER.store(0, Ordering::SeqCst);
+
+ let mut panic_ords: Vec<_> =
+ data.iter().filter(|&&x| x != i).map(|&x| PanicOrd(x, false)).collect();
+ let panic_item = PanicOrd(i, true);
+
+ // heapify the sane items
+ panic_ords.shuffle(&mut rng);
+ let mut heap = BinaryHeap::from(panic_ords);
+ let inner_data;
+
+ {
+ // push the panicking item to the heap and catch the panic
+ let thread_result = {
+ let mut heap_ref = AssertUnwindSafe(&mut heap);
+ panic::catch_unwind(move || {
+ heap_ref.push(panic_item);
+ })
+ };
+ assert!(thread_result.is_err());
+
+ // Assert no elements were dropped
+ let drops = DROP_COUNTER.load(Ordering::SeqCst);
+ assert!(drops == 0, "Must not drop items. drops={}", drops);
+ inner_data = heap.clone().into_vec();
+ drop(heap);
+ }
+ let drops = DROP_COUNTER.load(Ordering::SeqCst);
+ assert_eq!(drops, DATASZ);
+
+ let mut data_sorted = inner_data.into_iter().map(|p| p.0).collect::<Vec<_>>();
+ data_sorted.sort();
+ assert_eq!(data_sorted, data);
+ }
+ }
+}
diff --git a/library/alloc/tests/borrow.rs b/library/alloc/tests/borrow.rs
new file mode 100644
index 00000000000..8bfcf323f67
--- /dev/null
+++ b/library/alloc/tests/borrow.rs
@@ -0,0 +1,47 @@
+use std::borrow::{Cow, ToOwned};
+use std::ffi::{CStr, OsStr};
+use std::path::Path;
+use std::rc::Rc;
+use std::sync::Arc;
+
+macro_rules! test_from_cow {
+ ($value:ident => $($ty:ty),+) => {$(
+ let borrowed = <$ty>::from(Cow::Borrowed($value));
+ let owned = <$ty>::from(Cow::Owned($value.to_owned()));
+ assert_eq!($value, &*borrowed);
+ assert_eq!($value, &*owned);
+ )+};
+ ($value:ident : & $ty:ty) => {
+ test_from_cow!($value => Box<$ty>, Rc<$ty>, Arc<$ty>);
+ }
+}
+
+#[test]
+fn test_from_cow_slice() {
+ let slice: &[i32] = &[1, 2, 3];
+ test_from_cow!(slice: &[i32]);
+}
+
+#[test]
+fn test_from_cow_str() {
+ let string = "hello";
+ test_from_cow!(string: &str);
+}
+
+#[test]
+fn test_from_cow_c_str() {
+ let string = CStr::from_bytes_with_nul(b"hello\0").unwrap();
+ test_from_cow!(string: &CStr);
+}
+
+#[test]
+fn test_from_cow_os_str() {
+ let string = OsStr::new("hello");
+ test_from_cow!(string: &OsStr);
+}
+
+#[test]
+fn test_from_cow_path() {
+ let path = Path::new("hello");
+ test_from_cow!(path: &Path);
+}
diff --git a/library/alloc/tests/boxed.rs b/library/alloc/tests/boxed.rs
new file mode 100644
index 00000000000..5377485da8f
--- /dev/null
+++ b/library/alloc/tests/boxed.rs
@@ -0,0 +1,51 @@
+use std::mem::MaybeUninit;
+use std::ptr::NonNull;
+
+#[test]
+fn unitialized_zero_size_box() {
+ assert_eq!(
+ &*Box::<()>::new_uninit() as *const _,
+ NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+ );
+ assert_eq!(
+ Box::<[()]>::new_uninit_slice(4).as_ptr(),
+ NonNull::<MaybeUninit<()>>::dangling().as_ptr(),
+ );
+ assert_eq!(
+ Box::<[String]>::new_uninit_slice(0).as_ptr(),
+ NonNull::<MaybeUninit<String>>::dangling().as_ptr(),
+ );
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+struct Dummy {
+ _data: u8,
+}
+
+#[test]
+fn box_clone_and_clone_from_equivalence() {
+ for size in (0..8).map(|i| 2usize.pow(i)) {
+ let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
+ let clone = control.clone();
+ let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
+ copy.clone_from(&control);
+ assert_eq!(control, clone);
+ assert_eq!(control, copy);
+ }
+}
+
+/// This test might give a false positive in case the box realocates, but the alocator keeps the
+/// original pointer.
+///
+/// On the other hand it won't give a false negative, if it fails than the memory was definitly not
+/// reused
+#[test]
+fn box_clone_from_ptr_stability() {
+ for size in (0..8).map(|i| 2usize.pow(i)) {
+ let control = vec![Dummy { _data: 42 }; size].into_boxed_slice();
+ let mut copy = vec![Dummy { _data: 84 }; size].into_boxed_slice();
+ let copy_raw = copy.as_ptr() as usize;
+ copy.clone_from(&control);
+ assert_eq!(copy.as_ptr() as usize, copy_raw);
+ }
+}
diff --git a/library/alloc/tests/btree/map.rs b/library/alloc/tests/btree/map.rs
new file mode 100644
index 00000000000..f9f81716e35
--- /dev/null
+++ b/library/alloc/tests/btree/map.rs
@@ -0,0 +1,1463 @@
+use std::collections::btree_map::Entry::{Occupied, Vacant};
+use std::collections::BTreeMap;
+use std::convert::TryFrom;
+use std::fmt::Debug;
+use std::iter::FromIterator;
+use std::mem;
+use std::ops::Bound::{self, Excluded, Included, Unbounded};
+use std::ops::RangeBounds;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::rc::Rc;
+use std::sync::atomic::{AtomicUsize, Ordering};
+
+use super::DeterministicRng;
+
+// Value of node::CAPACITY, thus capacity of a tree with a single level,
+// i.e. a tree who's root is a leaf node at height 0.
+const NODE_CAPACITY: usize = 11;
+
+// Minimum number of elements to insert in order to guarantee a tree with 2 levels,
+// i.e. a tree who's root is an internal node at height 1, with edges to leaf nodes.
+// It's not the minimum size: removing an element from such a tree does not always reduce height.
+const MIN_INSERTS_HEIGHT_1: usize = NODE_CAPACITY + 1;
+
+// Minimum number of elements to insert in order to guarantee a tree with 3 levels,
+// i.e. a tree who's root is an internal node at height 2, with edges to more internal nodes.
+// It's not the minimum size: removing an element from such a tree does not always reduce height.
+const MIN_INSERTS_HEIGHT_2: usize = NODE_CAPACITY + (NODE_CAPACITY + 1) * NODE_CAPACITY + 1;
+
+// Gather all references from a mutable iterator and make sure Miri notices if
+// using them is dangerous.
+fn test_all_refs<'a, T: 'a>(dummy: &mut T, iter: impl Iterator<Item = &'a mut T>) {
+ // Gather all those references.
+ let mut refs: Vec<&mut T> = iter.collect();
+ // Use them all. Twice, to be sure we got all interleavings.
+ for r in refs.iter_mut() {
+ mem::swap(dummy, r);
+ }
+ for r in refs {
+ mem::swap(dummy, r);
+ }
+}
+
+#[test]
+fn test_basic_large() {
+ let mut map = BTreeMap::new();
+ // Miri is too slow
+ let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 } else { 10000 };
+ assert_eq!(map.len(), 0);
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 10 * i), None);
+ assert_eq!(map.len(), i + 1);
+ }
+
+ assert_eq!(map.first_key_value(), Some((&0, &0)));
+ assert_eq!(map.last_key_value(), Some((&(size - 1), &(10 * (size - 1)))));
+ assert_eq!(map.first_entry().unwrap().key(), &0);
+ assert_eq!(map.last_entry().unwrap().key(), &(size - 1));
+
+ for i in 0..size {
+ assert_eq!(map.get(&i).unwrap(), &(i * 10));
+ }
+
+ for i in size..size * 2 {
+ assert_eq!(map.get(&i), None);
+ }
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 100 * i), Some(10 * i));
+ assert_eq!(map.len(), size);
+ }
+
+ for i in 0..size {
+ assert_eq!(map.get(&i).unwrap(), &(i * 100));
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(i * 2)), Some(i * 200));
+ assert_eq!(map.len(), size - i - 1);
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.get(&(2 * i)), None);
+ assert_eq!(map.get(&(2 * i + 1)).unwrap(), &(i * 200 + 100));
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(2 * i)), None);
+ assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
+ assert_eq!(map.len(), size / 2 - i - 1);
+ }
+}
+
+#[test]
+fn test_basic_small() {
+ let mut map = BTreeMap::new();
+ // Empty, root is absent (None):
+ assert_eq!(map.remove(&1), None);
+ assert_eq!(map.len(), 0);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.first_key_value(), None);
+ assert_eq!(map.last_key_value(), None);
+ assert_eq!(map.keys().count(), 0);
+ assert_eq!(map.values().count(), 0);
+ assert_eq!(map.range(..).next(), None);
+ assert_eq!(map.range(..1).next(), None);
+ assert_eq!(map.range(1..).next(), None);
+ assert_eq!(map.range(1..=1).next(), None);
+ assert_eq!(map.range(1..2).next(), None);
+ assert_eq!(map.insert(1, 1), None);
+
+ // 1 key-value pair:
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), Some(&1));
+ assert_eq!(map.get_mut(&1), Some(&mut 1));
+ assert_eq!(map.first_key_value(), Some((&1, &1)));
+ assert_eq!(map.last_key_value(), Some((&1, &1)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.insert(1, 2), Some(1));
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), Some(&2));
+ assert_eq!(map.get_mut(&1), Some(&mut 2));
+ assert_eq!(map.first_key_value(), Some((&1, &2)));
+ assert_eq!(map.last_key_value(), Some((&1, &2)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&2]);
+ assert_eq!(map.insert(2, 4), None);
+
+ // 2 key-value pairs:
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.get(&2), Some(&4));
+ assert_eq!(map.get_mut(&2), Some(&mut 4));
+ assert_eq!(map.first_key_value(), Some((&1, &2)));
+ assert_eq!(map.last_key_value(), Some((&2, &4)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&1, &2]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&2, &4]);
+ assert_eq!(map.remove(&1), Some(2));
+
+ // 1 key-value pair:
+ assert_eq!(map.len(), 1);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.get(&2), Some(&4));
+ assert_eq!(map.get_mut(&2), Some(&mut 4));
+ assert_eq!(map.first_key_value(), Some((&2, &4)));
+ assert_eq!(map.last_key_value(), Some((&2, &4)));
+ assert_eq!(map.keys().collect::<Vec<_>>(), vec![&2]);
+ assert_eq!(map.values().collect::<Vec<_>>(), vec![&4]);
+ assert_eq!(map.remove(&2), Some(4));
+
+ // Empty but root is owned (Some(...)):
+ assert_eq!(map.len(), 0);
+ assert_eq!(map.get(&1), None);
+ assert_eq!(map.get_mut(&1), None);
+ assert_eq!(map.first_key_value(), None);
+ assert_eq!(map.last_key_value(), None);
+ assert_eq!(map.keys().count(), 0);
+ assert_eq!(map.values().count(), 0);
+ assert_eq!(map.range(..).next(), None);
+ assert_eq!(map.range(..1).next(), None);
+ assert_eq!(map.range(1..).next(), None);
+ assert_eq!(map.range(1..=1).next(), None);
+ assert_eq!(map.range(1..2).next(), None);
+ assert_eq!(map.remove(&1), None);
+}
+
+#[test]
+fn test_iter() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+
+ let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)>,
+ {
+ for i in 0..size {
+ assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+}
+
+#[test]
+fn test_iter_rev() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+
+ let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)>,
+ {
+ for i in 0..size {
+ assert_eq!(iter.size_hint(), (size - i, Some(size - i)));
+ assert_eq!(iter.next().unwrap(), (size - i - 1, size - i - 1));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().rev().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().rev().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter().rev());
+}
+
+/// Specifically tests iter_mut's ability to mutate the value of pairs in-line
+fn do_test_iter_mut_mutation<T>(size: usize)
+where
+ T: Copy + Debug + Ord + TryFrom<usize>,
+ <T as std::convert::TryFrom<usize>>::Error: std::fmt::Debug,
+{
+ let zero = T::try_from(0).unwrap();
+ let mut map: BTreeMap<T, T> = (0..size).map(|i| (T::try_from(i).unwrap(), zero)).collect();
+
+ // Forward and backward iteration sees enough pairs (also tested elsewhere)
+ assert_eq!(map.iter_mut().count(), size);
+ assert_eq!(map.iter_mut().rev().count(), size);
+
+ // Iterate forwards, trying to mutate to unique values
+ for (i, (k, v)) in map.iter_mut().enumerate() {
+ assert_eq!(*k, T::try_from(i).unwrap());
+ assert_eq!(*v, zero);
+ *v = T::try_from(i + 1).unwrap();
+ }
+
+ // Iterate backwards, checking that mutations succeeded and trying to mutate again
+ for (i, (k, v)) in map.iter_mut().rev().enumerate() {
+ assert_eq!(*k, T::try_from(size - i - 1).unwrap());
+ assert_eq!(*v, T::try_from(size - i).unwrap());
+ *v = T::try_from(2 * size - i).unwrap();
+ }
+
+ // Check that backward mutations succeeded
+ for (i, (k, v)) in map.iter_mut().enumerate() {
+ assert_eq!(*k, T::try_from(i).unwrap());
+ assert_eq!(*v, T::try_from(size + i + 1).unwrap());
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
+#[repr(align(32))]
+struct Align32(usize);
+
+impl TryFrom<usize> for Align32 {
+ type Error = ();
+
+ fn try_from(s: usize) -> Result<Align32, ()> {
+ Ok(Align32(s))
+ }
+}
+
+#[test]
+fn test_iter_mut_mutation() {
+ // Check many alignments and trees with roots at various heights.
+ do_test_iter_mut_mutation::<u8>(0);
+ do_test_iter_mut_mutation::<u8>(1);
+ do_test_iter_mut_mutation::<u8>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u8>(127); // not enough unique values to test MIN_INSERTS_HEIGHT_2
+ do_test_iter_mut_mutation::<u16>(1);
+ do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u16>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u32>(1);
+ do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u32>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u64>(1);
+ do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u64>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<u128>(1);
+ do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<u128>(MIN_INSERTS_HEIGHT_2);
+ do_test_iter_mut_mutation::<Align32>(1);
+ do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_1);
+ do_test_iter_mut_mutation::<Align32>(MIN_INSERTS_HEIGHT_2);
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // FIXME: fails in Miri <https://github.com/rust-lang/rust/issues/73915>
+fn test_values_mut() {
+ let mut a: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect();
+ test_all_refs(&mut 13, a.values_mut());
+}
+
+#[test]
+fn test_values_mut_mutation() {
+ let mut a = BTreeMap::new();
+ a.insert(1, String::from("hello"));
+ a.insert(2, String::from("goodbye"));
+
+ for value in a.values_mut() {
+ value.push_str("!");
+ }
+
+ let values: Vec<String> = a.values().cloned().collect();
+ assert_eq!(values, [String::from("hello!"), String::from("goodbye!")]);
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // FIXME: fails in Miri <https://github.com/rust-lang/rust/issues/73915>
+fn test_iter_entering_root_twice() {
+ let mut map: BTreeMap<_, _> = (0..2).map(|i| (i, i)).collect();
+ let mut it = map.iter_mut();
+ let front = it.next().unwrap();
+ let back = it.next_back().unwrap();
+ assert_eq!(front, (&0, &mut 0));
+ assert_eq!(back, (&1, &mut 1));
+ *front.1 = 24;
+ *back.1 = 42;
+ assert_eq!(front, (&0, &mut 24));
+ assert_eq!(back, (&1, &mut 42));
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // FIXME: fails in Miri <https://github.com/rust-lang/rust/issues/73915>
+fn test_iter_descending_to_same_node_twice() {
+ let mut map: BTreeMap<_, _> = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i)).collect();
+ let mut it = map.iter_mut();
+ // Descend into first child.
+ let front = it.next().unwrap();
+ // Descend into first child again, after running through second child.
+ while it.next_back().is_some() {}
+ // Check immutable access.
+ assert_eq!(front, (&0, &mut 0));
+ // Perform mutable access.
+ *front.1 = 42;
+}
+
+#[test]
+fn test_iter_mixed() {
+ // Miri is too slow
+ let size = if cfg!(miri) { 200 } else { 10000 };
+
+ let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ fn test<T>(size: usize, mut iter: T)
+ where
+ T: Iterator<Item = (usize, usize)> + DoubleEndedIterator,
+ {
+ for i in 0..size / 4 {
+ assert_eq!(iter.size_hint(), (size - i * 2, Some(size - i * 2)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ assert_eq!(iter.next_back().unwrap(), (size - i - 1, size - i - 1));
+ }
+ for i in size / 4..size * 3 / 4 {
+ assert_eq!(iter.size_hint(), (size * 3 / 4 - i, Some(size * 3 / 4 - i)));
+ assert_eq!(iter.next().unwrap(), (i, i));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ }
+ test(size, map.iter().map(|(&k, &v)| (k, v)));
+ test(size, map.iter_mut().map(|(&k, &mut v)| (k, v)));
+ test(size, map.into_iter());
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // FIXME: fails in Miri <https://github.com/rust-lang/rust/issues/73915>
+fn test_iter_min_max() {
+ let mut a = BTreeMap::new();
+ assert_eq!(a.iter().min(), None);
+ assert_eq!(a.iter().max(), None);
+ assert_eq!(a.iter_mut().min(), None);
+ assert_eq!(a.iter_mut().max(), None);
+ assert_eq!(a.range(..).min(), None);
+ assert_eq!(a.range(..).max(), None);
+ assert_eq!(a.range_mut(..).min(), None);
+ assert_eq!(a.range_mut(..).max(), None);
+ assert_eq!(a.keys().min(), None);
+ assert_eq!(a.keys().max(), None);
+ assert_eq!(a.values().min(), None);
+ assert_eq!(a.values().max(), None);
+ assert_eq!(a.values_mut().min(), None);
+ assert_eq!(a.values_mut().max(), None);
+ a.insert(1, 42);
+ a.insert(2, 24);
+ assert_eq!(a.iter().min(), Some((&1, &42)));
+ assert_eq!(a.iter().max(), Some((&2, &24)));
+ assert_eq!(a.iter_mut().min(), Some((&1, &mut 42)));
+ assert_eq!(a.iter_mut().max(), Some((&2, &mut 24)));
+ assert_eq!(a.range(..).min(), Some((&1, &42)));
+ assert_eq!(a.range(..).max(), Some((&2, &24)));
+ assert_eq!(a.range_mut(..).min(), Some((&1, &mut 42)));
+ assert_eq!(a.range_mut(..).max(), Some((&2, &mut 24)));
+ assert_eq!(a.keys().min(), Some(&1));
+ assert_eq!(a.keys().max(), Some(&2));
+ assert_eq!(a.values().min(), Some(&24));
+ assert_eq!(a.values().max(), Some(&42));
+ assert_eq!(a.values_mut().min(), Some(&mut 24));
+ assert_eq!(a.values_mut().max(), Some(&mut 42));
+}
+
+fn range_keys(map: &BTreeMap<i32, i32>, range: impl RangeBounds<i32>) -> Vec<i32> {
+ map.range(range)
+ .map(|(&k, &v)| {
+ assert_eq!(k, v);
+ k
+ })
+ .collect()
+}
+
+#[test]
+fn test_range_small() {
+ let size = 4;
+
+ let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
+ let all: Vec<_> = (1..=size).collect();
+ let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
+ assert_eq!(range_keys(&map, ..), all);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
+ assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
+
+ assert_eq!(range_keys(&map, ..3), vec![1, 2]);
+ assert_eq!(range_keys(&map, 3..), vec![3, 4]);
+ assert_eq!(range_keys(&map, 2..=3), vec![2, 3]);
+}
+
+#[test]
+fn test_range_height_1() {
+ // Tests tree with a root and 2 leaves. Depending on details we don't want or need
+ // to rely upon, the single key at the root will be 6 or 7.
+
+ let map: BTreeMap<_, _> = (1..=MIN_INSERTS_HEIGHT_1 as i32).map(|i| (i, i)).collect();
+ for &root in &[6, 7] {
+ assert_eq!(range_keys(&map, (Excluded(root), Excluded(root + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(root), Included(root + 1))), vec![root + 1]);
+ assert_eq!(range_keys(&map, (Included(root), Excluded(root + 1))), vec![root]);
+ assert_eq!(range_keys(&map, (Included(root), Included(root + 1))), vec![root, root + 1]);
+
+ assert_eq!(range_keys(&map, (Excluded(root - 1), Excluded(root))), vec![]);
+ assert_eq!(range_keys(&map, (Included(root - 1), Excluded(root))), vec![root - 1]);
+ assert_eq!(range_keys(&map, (Excluded(root - 1), Included(root))), vec![root]);
+ assert_eq!(range_keys(&map, (Included(root - 1), Included(root))), vec![root - 1, root]);
+ }
+}
+
+#[test]
+fn test_range_large() {
+ let size = 200;
+
+ let map: BTreeMap<_, _> = (1..=size).map(|i| (i, i)).collect();
+ let all: Vec<_> = (1..=size).collect();
+ let (first, last) = (vec![all[0]], vec![all[size as usize - 1]]);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Excluded(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(0), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(0), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Included(1), Included(size))), all);
+ assert_eq!(range_keys(&map, (Included(1), Unbounded)), all);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size + 1))), all);
+ assert_eq!(range_keys(&map, (Unbounded, Included(size))), all);
+ assert_eq!(range_keys(&map, ..), all);
+
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(1))), vec![]);
+ assert_eq!(range_keys(&map, (Unbounded, Included(0))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Excluded(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(0), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(0), Included(1))), first);
+ assert_eq!(range_keys(&map, (Included(1), Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Included(1), Included(1))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Excluded(2))), first);
+ assert_eq!(range_keys(&map, (Unbounded, Included(1))), first);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Included(size))), last);
+ assert_eq!(range_keys(&map, (Excluded(size - 1), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Included(size), Excluded(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size + 1))), last);
+ assert_eq!(range_keys(&map, (Included(size), Included(size))), last);
+ assert_eq!(range_keys(&map, (Included(size), Unbounded)), last);
+ assert_eq!(range_keys(&map, (Excluded(size), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Included(size))), vec![]);
+ assert_eq!(range_keys(&map, (Excluded(size), Unbounded)), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Excluded(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Included(size + 1))), vec![]);
+ assert_eq!(range_keys(&map, (Included(size + 1), Unbounded)), vec![]);
+
+ fn check<'a, L, R>(lhs: L, rhs: R)
+ where
+ L: IntoIterator<Item = (&'a i32, &'a i32)>,
+ R: IntoIterator<Item = (&'a i32, &'a i32)>,
+ {
+ let lhs: Vec<_> = lhs.into_iter().collect();
+ let rhs: Vec<_> = rhs.into_iter().collect();
+ assert_eq!(lhs, rhs);
+ }
+
+ check(map.range(..=100), map.range(..101));
+ check(map.range(5..=8), vec![(&5, &5), (&6, &6), (&7, &7), (&8, &8)]);
+ check(map.range(-1..=2), vec![(&1, &1), (&2, &2)]);
+}
+
+#[test]
+fn test_range_inclusive_max_value() {
+ let max = usize::MAX;
+ let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect();
+
+ assert_eq!(map.range(max..=max).collect::<Vec<_>>(), &[(&max, &0)]);
+}
+
+#[test]
+fn test_range_equal_empty_cases() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ assert_eq!(map.range((Included(2), Excluded(2))).next(), None);
+ assert_eq!(map.range((Excluded(2), Included(2))).next(), None);
+}
+
+#[test]
+#[should_panic]
+fn test_range_equal_excluded() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ map.range((Excluded(2), Excluded(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_1() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ map.range((Included(3), Included(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_2() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ map.range((Included(3), Excluded(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_3() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ map.range((Excluded(3), Included(2)));
+}
+
+#[test]
+#[should_panic]
+fn test_range_backwards_4() {
+ let map: BTreeMap<_, _> = (0..5).map(|i| (i, i)).collect();
+ map.range((Excluded(3), Excluded(2)));
+}
+
+#[test]
+fn test_range_1000() {
+ // Miri is too slow
+ let size = if cfg!(miri) { MIN_INSERTS_HEIGHT_2 as u32 } else { 1000 };
+ let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ fn test(map: &BTreeMap<u32, u32>, size: u32, min: Bound<&u32>, max: Bound<&u32>) {
+ let mut kvs = map.range((min, max)).map(|(&k, &v)| (k, v));
+ let mut pairs = (0..size).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ test(&map, size, Included(&0), Excluded(&size));
+ test(&map, size, Unbounded, Excluded(&size));
+ test(&map, size, Included(&0), Included(&(size - 1)));
+ test(&map, size, Unbounded, Included(&(size - 1)));
+ test(&map, size, Included(&0), Unbounded);
+ test(&map, size, Unbounded, Unbounded);
+}
+
+#[test]
+fn test_range_borrowed_key() {
+ let mut map = BTreeMap::new();
+ map.insert("aardvark".to_string(), 1);
+ map.insert("baboon".to_string(), 2);
+ map.insert("coyote".to_string(), 3);
+ map.insert("dingo".to_string(), 4);
+ // NOTE: would like to use simply "b".."d" here...
+ let mut iter = map.range::<str, _>((Included("b"), Excluded("d")));
+ assert_eq!(iter.next(), Some((&"baboon".to_string(), &2)));
+ assert_eq!(iter.next(), Some((&"coyote".to_string(), &3)));
+ assert_eq!(iter.next(), None);
+}
+
+#[test]
+fn test_range() {
+ let size = 200;
+ // Miri is too slow
+ let step = if cfg!(miri) { 66 } else { 1 };
+ let map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ for i in (0..size).step_by(step) {
+ for j in (i..size).step_by(step) {
+ let mut kvs = map.range((Included(&i), Included(&j))).map(|(&k, &v)| (k, v));
+ let mut pairs = (i..=j).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ }
+}
+
+#[test]
+fn test_range_mut() {
+ let size = 200;
+ // Miri is too slow
+ let step = if cfg!(miri) { 66 } else { 1 };
+ let mut map: BTreeMap<_, _> = (0..size).map(|i| (i, i)).collect();
+
+ for i in (0..size).step_by(step) {
+ for j in (i..size).step_by(step) {
+ let mut kvs = map.range_mut((Included(&i), Included(&j))).map(|(&k, &mut v)| (k, v));
+ let mut pairs = (i..=j).map(|i| (i, i));
+
+ for (kv, pair) in kvs.by_ref().zip(pairs.by_ref()) {
+ assert_eq!(kv, pair);
+ }
+ assert_eq!(kvs.next(), None);
+ assert_eq!(pairs.next(), None);
+ }
+ }
+}
+
+mod test_drain_filter {
+ use super::*;
+
+ #[test]
+ fn empty() {
+ let mut map: BTreeMap<i32, i32> = BTreeMap::new();
+ map.drain_filter(|_, _| unreachable!("there's nothing to decide on"));
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn consuming_nothing() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ assert!(map.drain_filter(|_, _| false).eq(std::iter::empty()));
+ }
+
+ #[test]
+ fn consuming_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ assert!(map.drain_filter(|_, _| true).eq(pairs));
+ }
+
+ #[test]
+ fn mutating_and_keeping() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ assert!(
+ map.drain_filter(|_, v| {
+ *v += 6;
+ false
+ })
+ .eq(std::iter::empty())
+ );
+ assert!(map.keys().copied().eq(0..3));
+ assert!(map.values().copied().eq(6..9));
+ }
+
+ #[test]
+ fn mutating_and_removing() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ assert!(
+ map.drain_filter(|_, v| {
+ *v += 6;
+ true
+ })
+ .eq((0..3).map(|i| (i, i + 6)))
+ );
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn underfull_keeping_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| false);
+ assert!(map.keys().copied().eq(0..3));
+ }
+
+ #[test]
+ fn underfull_removing_one() {
+ let pairs = (0..3).map(|i| (i, i));
+ for doomed in 0..3 {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), 2);
+ }
+ }
+
+ #[test]
+ fn underfull_keeping_one() {
+ let pairs = (0..3).map(|i| (i, i));
+ for sacred in 0..3 {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ }
+ }
+
+ #[test]
+ fn underfull_removing_all() {
+ let pairs = (0..3).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn height_0_keeping_all() {
+ let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| false);
+ assert!(map.keys().copied().eq(0..NODE_CAPACITY));
+ }
+
+ #[test]
+ fn height_0_removing_one() {
+ let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
+ for doomed in 0..NODE_CAPACITY {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), NODE_CAPACITY - 1);
+ }
+ }
+
+ #[test]
+ fn height_0_keeping_one() {
+ let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
+ for sacred in 0..NODE_CAPACITY {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ }
+ }
+
+ #[test]
+ fn height_0_removing_all() {
+ let pairs = (0..NODE_CAPACITY).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn height_0_keeping_half() {
+ let mut map: BTreeMap<_, _> = (0..16).map(|i| (i, i)).collect();
+ assert_eq!(map.drain_filter(|i, _| *i % 2 == 0).count(), 8);
+ assert_eq!(map.len(), 8);
+ }
+
+ #[test]
+ fn height_1_removing_all() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn height_1_removing_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ for doomed in 0..MIN_INSERTS_HEIGHT_1 {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_1 - 1);
+ }
+ }
+
+ #[test]
+ fn height_1_keeping_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_1).map(|i| (i, i));
+ for sacred in 0..MIN_INSERTS_HEIGHT_1 {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ }
+ }
+
+ #[cfg(not(miri))] // Miri is too slow
+ #[test]
+ fn height_2_removing_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ for doomed in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i == doomed);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ }
+ }
+
+ #[cfg(not(miri))] // Miri is too slow
+ #[test]
+ fn height_2_keeping_one() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ for sacred in (0..MIN_INSERTS_HEIGHT_2).step_by(12) {
+ let mut map: BTreeMap<_, _> = pairs.clone().collect();
+ map.drain_filter(|i, _| *i != sacred);
+ assert!(map.keys().copied().eq(sacred..=sacred));
+ }
+ }
+
+ #[test]
+ fn height_2_removing_all() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let mut map: BTreeMap<_, _> = pairs.collect();
+ map.drain_filter(|_, _| true);
+ assert!(map.is_empty());
+ }
+
+ #[test]
+ fn drop_panic_leak() {
+ static PREDS: AtomicUsize = AtomicUsize::new(0);
+ static DROPS: AtomicUsize = AtomicUsize::new(0);
+
+ struct D;
+ impl Drop for D {
+ fn drop(&mut self) {
+ if DROPS.fetch_add(1, Ordering::SeqCst) == 1 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ // Keys are multiples of 4, so that each key is counted by a hexadecimal digit.
+ let mut map = (0..3).map(|i| (i * 4, D)).collect::<BTreeMap<_, _>>();
+
+ catch_unwind(move || {
+ drop(map.drain_filter(|i, _| {
+ PREDS.fetch_add(1usize << i, Ordering::SeqCst);
+ true
+ }))
+ })
+ .unwrap_err();
+
+ assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
+ assert_eq!(DROPS.load(Ordering::SeqCst), 3);
+ }
+
+ #[test]
+ fn pred_panic_leak() {
+ static PREDS: AtomicUsize = AtomicUsize::new(0);
+ static DROPS: AtomicUsize = AtomicUsize::new(0);
+
+ struct D;
+ impl Drop for D {
+ fn drop(&mut self) {
+ DROPS.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ // Keys are multiples of 4, so that each key is counted by a hexadecimal digit.
+ let mut map = (0..3).map(|i| (i * 4, D)).collect::<BTreeMap<_, _>>();
+
+ catch_unwind(AssertUnwindSafe(|| {
+ drop(map.drain_filter(|i, _| {
+ PREDS.fetch_add(1usize << i, Ordering::SeqCst);
+ match i {
+ 0 => true,
+ _ => panic!(),
+ }
+ }))
+ }))
+ .unwrap_err();
+
+ assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
+ assert_eq!(DROPS.load(Ordering::SeqCst), 1);
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.first_entry().unwrap().key(), &4);
+ assert_eq!(map.last_entry().unwrap().key(), &8);
+ }
+
+ // Same as above, but attempt to use the iterator again after the panic in the predicate
+ #[test]
+ fn pred_panic_reuse() {
+ static PREDS: AtomicUsize = AtomicUsize::new(0);
+ static DROPS: AtomicUsize = AtomicUsize::new(0);
+
+ struct D;
+ impl Drop for D {
+ fn drop(&mut self) {
+ DROPS.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ // Keys are multiples of 4, so that each key is counted by a hexadecimal digit.
+ let mut map = (0..3).map(|i| (i * 4, D)).collect::<BTreeMap<_, _>>();
+
+ {
+ let mut it = map.drain_filter(|i, _| {
+ PREDS.fetch_add(1usize << i, Ordering::SeqCst);
+ match i {
+ 0 => true,
+ _ => panic!(),
+ }
+ });
+ catch_unwind(AssertUnwindSafe(|| while it.next().is_some() {})).unwrap_err();
+ // Iterator behaviour after a panic is explicitly unspecified,
+ // so this is just the current implementation:
+ let result = catch_unwind(AssertUnwindSafe(|| it.next()));
+ assert!(matches!(result, Ok(None)));
+ }
+
+ assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
+ assert_eq!(DROPS.load(Ordering::SeqCst), 1);
+ assert_eq!(map.len(), 2);
+ assert_eq!(map.first_entry().unwrap().key(), &4);
+ assert_eq!(map.last_entry().unwrap().key(), &8);
+ }
+}
+
+#[test]
+fn test_borrow() {
+ // make sure these compile -- using the Borrow trait
+ {
+ let mut map = BTreeMap::new();
+ map.insert("0".to_string(), 1);
+ assert_eq!(map["0"], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Box::new(0), 1);
+ assert_eq!(map[&0], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Box::new([0, 1]) as Box<[i32]>, 1);
+ assert_eq!(map[&[0, 1][..]], 1);
+ }
+
+ {
+ let mut map = BTreeMap::new();
+ map.insert(Rc::new(0), 1);
+ assert_eq!(map[&0], 1);
+ }
+}
+
+#[test]
+fn test_entry() {
+ let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)];
+
+ let mut map: BTreeMap<_, _> = xs.iter().cloned().collect();
+
+ // Existing key (insert)
+ match map.entry(1) {
+ Vacant(_) => unreachable!(),
+ Occupied(mut view) => {
+ assert_eq!(view.get(), &10);
+ assert_eq!(view.insert(100), 10);
+ }
+ }
+ assert_eq!(map.get(&1).unwrap(), &100);
+ assert_eq!(map.len(), 6);
+
+ // Existing key (update)
+ match map.entry(2) {
+ Vacant(_) => unreachable!(),
+ Occupied(mut view) => {
+ let v = view.get_mut();
+ *v *= 10;
+ }
+ }
+ assert_eq!(map.get(&2).unwrap(), &200);
+ assert_eq!(map.len(), 6);
+
+ // Existing key (take)
+ match map.entry(3) {
+ Vacant(_) => unreachable!(),
+ Occupied(view) => {
+ assert_eq!(view.remove(), 30);
+ }
+ }
+ assert_eq!(map.get(&3), None);
+ assert_eq!(map.len(), 5);
+
+ // Inexistent key (insert)
+ match map.entry(10) {
+ Occupied(_) => unreachable!(),
+ Vacant(view) => {
+ assert_eq!(*view.insert(1000), 1000);
+ }
+ }
+ assert_eq!(map.get(&10).unwrap(), &1000);
+ assert_eq!(map.len(), 6);
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = BTreeMap::new();
+ a.insert(1, "one");
+ let mut b = BTreeMap::new();
+ b.insert(2, "two");
+ b.insert(3, "three");
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 3);
+ assert_eq!(a[&1], "one");
+ assert_eq!(a[&2], "two");
+ assert_eq!(a[&3], "three");
+}
+
+#[test]
+fn test_zst() {
+ let mut m = BTreeMap::new();
+ assert_eq!(m.len(), 0);
+
+ assert_eq!(m.insert((), ()), None);
+ assert_eq!(m.len(), 1);
+
+ assert_eq!(m.insert((), ()), Some(()));
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.iter().count(), 1);
+
+ m.clear();
+ assert_eq!(m.len(), 0);
+
+ for _ in 0..100 {
+ m.insert((), ());
+ }
+
+ assert_eq!(m.len(), 1);
+ assert_eq!(m.iter().count(), 1);
+}
+
+// This test's only purpose is to ensure that zero-sized keys with nonsensical orderings
+// do not cause segfaults when used with zero-sized values. All other map behavior is
+// undefined.
+#[test]
+fn test_bad_zst() {
+ use std::cmp::Ordering;
+
+ struct Bad;
+
+ impl PartialEq for Bad {
+ fn eq(&self, _: &Self) -> bool {
+ false
+ }
+ }
+
+ impl Eq for Bad {}
+
+ impl PartialOrd for Bad {
+ fn partial_cmp(&self, _: &Self) -> Option<Ordering> {
+ Some(Ordering::Less)
+ }
+ }
+
+ impl Ord for Bad {
+ fn cmp(&self, _: &Self) -> Ordering {
+ Ordering::Less
+ }
+ }
+
+ let mut m = BTreeMap::new();
+
+ for _ in 0..100 {
+ m.insert(Bad, Bad);
+ }
+}
+
+#[test]
+fn test_clone() {
+ let mut map = BTreeMap::new();
+ let size = MIN_INSERTS_HEIGHT_1;
+ assert_eq!(map.len(), 0);
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 10 * i), None);
+ assert_eq!(map.len(), i + 1);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size {
+ assert_eq!(map.insert(i, 100 * i), Some(10 * i));
+ assert_eq!(map.len(), size);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(i * 2)), Some(i * 200));
+ assert_eq!(map.len(), size - i - 1);
+ assert_eq!(map, map.clone());
+ }
+
+ for i in 0..size / 2 {
+ assert_eq!(map.remove(&(2 * i)), None);
+ assert_eq!(map.remove(&(2 * i + 1)), Some(i * 200 + 100));
+ assert_eq!(map.len(), size / 2 - i - 1);
+ assert_eq!(map, map.clone());
+ }
+
+ // Test a tree with 2 chock-full levels and a tree with 3 levels.
+ map = (1..MIN_INSERTS_HEIGHT_2).map(|i| (i, i)).collect();
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(map, map.clone());
+ map.insert(0, 0);
+ assert_eq!(map.len(), MIN_INSERTS_HEIGHT_2);
+ assert_eq!(map, map.clone());
+}
+
+#[test]
+fn test_clone_from() {
+ let mut map1 = BTreeMap::new();
+ let max_size = MIN_INSERTS_HEIGHT_1;
+
+ // Range to max_size inclusive, because i is the size of map1 being tested.
+ for i in 0..=max_size {
+ let mut map2 = BTreeMap::new();
+ for j in 0..i {
+ let mut map1_copy = map2.clone();
+ map1_copy.clone_from(&map1); // small cloned from large
+ assert_eq!(map1_copy, map1);
+ let mut map2_copy = map1.clone();
+ map2_copy.clone_from(&map2); // large cloned from small
+ assert_eq!(map2_copy, map2);
+ map2.insert(100 * j + 1, 2 * j + 1);
+ }
+ map2.clone_from(&map1); // same length
+ assert_eq!(map2, map1);
+ map1.insert(i, 10 * i);
+ }
+}
+
+#[test]
+#[allow(dead_code)]
+fn test_variance() {
+ use std::collections::btree_map::{IntoIter, Iter, Keys, Range, Values};
+
+ fn map_key<'new>(v: BTreeMap<&'static str, ()>) -> BTreeMap<&'new str, ()> {
+ v
+ }
+ fn map_val<'new>(v: BTreeMap<(), &'static str>) -> BTreeMap<(), &'new str> {
+ v
+ }
+ fn iter_key<'a, 'new>(v: Iter<'a, &'static str, ()>) -> Iter<'a, &'new str, ()> {
+ v
+ }
+ fn iter_val<'a, 'new>(v: Iter<'a, (), &'static str>) -> Iter<'a, (), &'new str> {
+ v
+ }
+ fn into_iter_key<'new>(v: IntoIter<&'static str, ()>) -> IntoIter<&'new str, ()> {
+ v
+ }
+ fn into_iter_val<'new>(v: IntoIter<(), &'static str>) -> IntoIter<(), &'new str> {
+ v
+ }
+ fn range_key<'a, 'new>(v: Range<'a, &'static str, ()>) -> Range<'a, &'new str, ()> {
+ v
+ }
+ fn range_val<'a, 'new>(v: Range<'a, (), &'static str>) -> Range<'a, (), &'new str> {
+ v
+ }
+ fn keys<'a, 'new>(v: Keys<'a, &'static str, ()>) -> Keys<'a, &'new str, ()> {
+ v
+ }
+ fn vals<'a, 'new>(v: Values<'a, (), &'static str>) -> Values<'a, (), &'new str> {
+ v
+ }
+}
+
+#[test]
+fn test_occupied_entry_key() {
+ let mut a = BTreeMap::new();
+ let key = "hello there";
+ let value = "value goes here";
+ assert!(a.is_empty());
+ a.insert(key.clone(), value.clone());
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+
+ match a.entry(key.clone()) {
+ Vacant(_) => panic!(),
+ Occupied(e) => assert_eq!(key, *e.key()),
+ }
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+}
+
+#[test]
+fn test_vacant_entry_key() {
+ let mut a = BTreeMap::new();
+ let key = "hello there";
+ let value = "value goes here";
+
+ assert!(a.is_empty());
+ match a.entry(key.clone()) {
+ Occupied(_) => panic!(),
+ Vacant(e) => {
+ assert_eq!(key, *e.key());
+ e.insert(value.clone());
+ }
+ }
+ assert_eq!(a.len(), 1);
+ assert_eq!(a[key], value);
+}
+
+#[test]
+fn test_first_last_entry() {
+ let mut a = BTreeMap::new();
+ assert!(a.first_entry().is_none());
+ assert!(a.last_entry().is_none());
+ a.insert(1, 42);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &1);
+ a.insert(2, 24);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &2);
+ a.insert(0, 6);
+ assert_eq!(a.first_entry().unwrap().key(), &0);
+ assert_eq!(a.last_entry().unwrap().key(), &2);
+ let (k1, v1) = a.first_entry().unwrap().remove_entry();
+ assert_eq!(k1, 0);
+ assert_eq!(v1, 6);
+ let (k2, v2) = a.last_entry().unwrap().remove_entry();
+ assert_eq!(k2, 2);
+ assert_eq!(v2, 24);
+ assert_eq!(a.first_entry().unwrap().key(), &1);
+ assert_eq!(a.last_entry().unwrap().key(), &1);
+}
+
+macro_rules! create_append_test {
+ ($name:ident, $len:expr) => {
+ #[test]
+ fn $name() {
+ let mut a = BTreeMap::new();
+ for i in 0..8 {
+ a.insert(i, i);
+ }
+
+ let mut b = BTreeMap::new();
+ for i in 5..$len {
+ b.insert(i, 2 * i);
+ }
+
+ a.append(&mut b);
+
+ assert_eq!(a.len(), $len);
+ assert_eq!(b.len(), 0);
+
+ for i in 0..$len {
+ if i < 5 {
+ assert_eq!(a[&i], i);
+ } else {
+ assert_eq!(a[&i], 2 * i);
+ }
+ }
+
+ assert_eq!(a.remove(&($len - 1)), Some(2 * ($len - 1)));
+ assert_eq!(a.insert($len - 1, 20), None);
+ }
+ };
+}
+
+// These are mostly for testing the algorithm that "fixes" the right edge after insertion.
+// Single node.
+create_append_test!(test_append_9, 9);
+// Two leafs that don't need fixing.
+create_append_test!(test_append_17, 17);
+// Two leafs where the second one ends up underfull and needs stealing at the end.
+create_append_test!(test_append_14, 14);
+// Two leafs where the second one ends up empty because the insertion finished at the root.
+create_append_test!(test_append_12, 12);
+// Three levels; insertion finished at the root.
+create_append_test!(test_append_144, 144);
+// Three levels; insertion finished at leaf while there is an empty node on the second level.
+create_append_test!(test_append_145, 145);
+// Tests for several randomly chosen sizes.
+create_append_test!(test_append_170, 170);
+create_append_test!(test_append_181, 181);
+#[cfg(not(miri))] // Miri is too slow
+create_append_test!(test_append_239, 239);
+#[cfg(not(miri))] // Miri is too slow
+create_append_test!(test_append_1700, 1700);
+
+fn rand_data(len: usize) -> Vec<(u32, u32)> {
+ let mut rng = DeterministicRng::new();
+ Vec::from_iter((0..len).map(|_| (rng.next(), rng.next())))
+}
+
+#[test]
+fn test_split_off_empty_right() {
+ let mut data = rand_data(173);
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let right = map.split_off(&(data.iter().max().unwrap().0 + 1));
+
+ data.sort();
+ assert!(map.into_iter().eq(data));
+ assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+ let mut data = rand_data(314);
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let right = map.split_off(&data.iter().min().unwrap().0);
+
+ data.sort();
+ assert!(map.into_iter().eq(None));
+ assert!(right.into_iter().eq(data));
+}
+
+// In a tree with 3 levels, if all but a part of the first leaf node is split off,
+// make sure fix_top eliminates both top levels.
+#[test]
+fn test_split_off_tiny_left_height_2() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let mut left: BTreeMap<_, _> = pairs.clone().collect();
+ let right = left.split_off(&1);
+ assert_eq!(left.len(), 1);
+ assert_eq!(right.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(*left.first_key_value().unwrap().0, 0);
+ assert_eq!(*right.first_key_value().unwrap().0, 1);
+}
+
+// In a tree with 3 levels, if only part of the last leaf node is split off,
+// make sure fix_top eliminates both top levels.
+#[test]
+fn test_split_off_tiny_right_height_2() {
+ let pairs = (0..MIN_INSERTS_HEIGHT_2).map(|i| (i, i));
+ let last = MIN_INSERTS_HEIGHT_2 - 1;
+ let mut left: BTreeMap<_, _> = pairs.clone().collect();
+ assert_eq!(*left.last_key_value().unwrap().0, last);
+ let right = left.split_off(&last);
+ assert_eq!(left.len(), MIN_INSERTS_HEIGHT_2 - 1);
+ assert_eq!(right.len(), 1);
+ assert_eq!(*left.last_key_value().unwrap().0, last - 1);
+ assert_eq!(*right.last_key_value().unwrap().0, last);
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+ // Miri is too slow
+ let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
+ // special case with maximum height.
+ data.sort();
+
+ let mut map = BTreeMap::from_iter(data.clone());
+ let key = data[data.len() / 2].0;
+ let right = map.split_off(&key);
+
+ assert!(map.into_iter().eq(data.clone().into_iter().filter(|x| x.0 < key)));
+ assert!(right.into_iter().eq(data.into_iter().filter(|x| x.0 >= key)));
+}
+
+#[test]
+fn test_into_iter_drop_leak_height_0() {
+ static DROPS: AtomicUsize = AtomicUsize::new(0);
+
+ struct D;
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ if DROPS.fetch_add(1, Ordering::SeqCst) == 3 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut map = BTreeMap::new();
+ map.insert("a", D);
+ map.insert("b", D);
+ map.insert("c", D);
+ map.insert("d", D);
+ map.insert("e", D);
+
+ catch_unwind(move || drop(map.into_iter())).unwrap_err();
+
+ assert_eq!(DROPS.load(Ordering::SeqCst), 5);
+}
+
+#[test]
+fn test_into_iter_drop_leak_height_1() {
+ let size = MIN_INSERTS_HEIGHT_1;
+ static DROPS: AtomicUsize = AtomicUsize::new(0);
+ static PANIC_POINT: AtomicUsize = AtomicUsize::new(0);
+
+ struct D;
+ impl Drop for D {
+ fn drop(&mut self) {
+ if DROPS.fetch_add(1, Ordering::SeqCst) == PANIC_POINT.load(Ordering::SeqCst) {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ for panic_point in vec![0, 1, size - 2, size - 1] {
+ DROPS.store(0, Ordering::SeqCst);
+ PANIC_POINT.store(panic_point, Ordering::SeqCst);
+ let map: BTreeMap<_, _> = (0..size).map(|i| (i, D)).collect();
+ catch_unwind(move || drop(map.into_iter())).unwrap_err();
+ assert_eq!(DROPS.load(Ordering::SeqCst), size);
+ }
+}
diff --git a/library/alloc/tests/btree/mod.rs b/library/alloc/tests/btree/mod.rs
new file mode 100644
index 00000000000..1d08ae13e05
--- /dev/null
+++ b/library/alloc/tests/btree/mod.rs
@@ -0,0 +1,27 @@
+mod map;
+mod set;
+
+/// XorShiftRng
+struct DeterministicRng {
+ x: u32,
+ y: u32,
+ z: u32,
+ w: u32,
+}
+
+impl DeterministicRng {
+ fn new() -> Self {
+ DeterministicRng { x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, w: 0x113ba7bb }
+ }
+
+ fn next(&mut self) -> u32 {
+ let x = self.x;
+ let t = x ^ (x << 11);
+ self.x = self.y;
+ self.y = self.z;
+ self.z = self.w;
+ let w_ = self.w;
+ self.w = w_ ^ (w_ >> 19) ^ (t ^ (t >> 8));
+ self.w
+ }
+}
diff --git a/library/alloc/tests/btree/set.rs b/library/alloc/tests/btree/set.rs
new file mode 100644
index 00000000000..b6c34b7c6c3
--- /dev/null
+++ b/library/alloc/tests/btree/set.rs
@@ -0,0 +1,666 @@
+use std::collections::BTreeSet;
+use std::iter::FromIterator;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::sync::atomic::{AtomicU32, Ordering};
+
+use super::DeterministicRng;
+
+#[test]
+fn test_clone_eq() {
+ let mut m = BTreeSet::new();
+
+ m.insert(1);
+ m.insert(2);
+
+ assert_eq!(m.clone(), m);
+}
+
+#[test]
+fn test_hash() {
+ use crate::hash;
+
+ let mut x = BTreeSet::new();
+ let mut y = BTreeSet::new();
+
+ x.insert(1);
+ x.insert(2);
+ x.insert(3);
+
+ y.insert(3);
+ y.insert(2);
+ y.insert(1);
+
+ assert_eq!(hash(&x), hash(&y));
+}
+
+#[test]
+fn test_iter_min_max() {
+ let mut a = BTreeSet::new();
+ assert_eq!(a.iter().min(), None);
+ assert_eq!(a.iter().max(), None);
+ assert_eq!(a.range(..).min(), None);
+ assert_eq!(a.range(..).max(), None);
+ assert_eq!(a.difference(&BTreeSet::new()).min(), None);
+ assert_eq!(a.difference(&BTreeSet::new()).max(), None);
+ assert_eq!(a.intersection(&a).min(), None);
+ assert_eq!(a.intersection(&a).max(), None);
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), None);
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), None);
+ assert_eq!(a.union(&a).min(), None);
+ assert_eq!(a.union(&a).max(), None);
+ a.insert(1);
+ a.insert(2);
+ assert_eq!(a.iter().min(), Some(&1));
+ assert_eq!(a.iter().max(), Some(&2));
+ assert_eq!(a.range(..).min(), Some(&1));
+ assert_eq!(a.range(..).max(), Some(&2));
+ assert_eq!(a.difference(&BTreeSet::new()).min(), Some(&1));
+ assert_eq!(a.difference(&BTreeSet::new()).max(), Some(&2));
+ assert_eq!(a.intersection(&a).min(), Some(&1));
+ assert_eq!(a.intersection(&a).max(), Some(&2));
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).min(), Some(&1));
+ assert_eq!(a.symmetric_difference(&BTreeSet::new()).max(), Some(&2));
+ assert_eq!(a.union(&a).min(), Some(&1));
+ assert_eq!(a.union(&a).max(), Some(&2));
+}
+
+fn check<F>(a: &[i32], b: &[i32], expected: &[i32], f: F)
+where
+ F: FnOnce(&BTreeSet<i32>, &BTreeSet<i32>, &mut dyn FnMut(&i32) -> bool) -> bool,
+{
+ let mut set_a = BTreeSet::new();
+ let mut set_b = BTreeSet::new();
+
+ for x in a {
+ assert!(set_a.insert(*x))
+ }
+ for y in b {
+ assert!(set_b.insert(*y))
+ }
+
+ let mut i = 0;
+ f(&set_a, &set_b, &mut |&x| {
+ if i < expected.len() {
+ assert_eq!(x, expected[i]);
+ }
+ i += 1;
+ true
+ });
+ assert_eq!(i, expected.len());
+}
+
+#[test]
+fn test_intersection() {
+ fn check_intersection(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.intersection(y).all(f))
+ }
+
+ check_intersection(&[], &[], &[]);
+ check_intersection(&[1, 2, 3], &[], &[]);
+ check_intersection(&[], &[1, 2, 3], &[]);
+ check_intersection(&[2], &[1, 2, 3], &[2]);
+ check_intersection(&[1, 2, 3], &[2], &[2]);
+ check_intersection(&[11, 1, 3, 77, 103, 5, -5], &[2, 11, 77, -9, -42, 5, 3], &[3, 5, 11, 77]);
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = (0..100).collect::<Vec<_>>();
+ check_intersection(&[], &large, &[]);
+ check_intersection(&large, &[], &[]);
+ check_intersection(&[-1], &large, &[]);
+ check_intersection(&large, &[-1], &[]);
+ check_intersection(&[0], &large, &[0]);
+ check_intersection(&large, &[0], &[0]);
+ check_intersection(&[99], &large, &[99]);
+ check_intersection(&large, &[99], &[99]);
+ check_intersection(&[100], &large, &[]);
+ check_intersection(&large, &[100], &[]);
+ check_intersection(&[11, 5000, 1, 3, 77, 8924], &large, &[1, 3, 11, 77]);
+}
+
+#[test]
+fn test_intersection_size_hint() {
+ let x: BTreeSet<i32> = [3, 4].iter().copied().collect();
+ let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
+ let mut iter = x.intersection(&y);
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+ assert_eq!(iter.next(), Some(&3));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ iter = y.intersection(&y);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+}
+
+#[test]
+fn test_difference() {
+ fn check_difference(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.difference(y).all(f))
+ }
+
+ check_difference(&[], &[], &[]);
+ check_difference(&[1, 12], &[], &[1, 12]);
+ check_difference(&[], &[1, 2, 3, 9], &[]);
+ check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[3, 6, 9], &[1, 5, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[0, 1], &[3, 5, 9, 11]);
+ check_difference(&[1, 3, 5, 9, 11], &[11, 12], &[1, 3, 5, 9]);
+ check_difference(
+ &[-5, 11, 22, 33, 40, 42],
+ &[-12, -5, 14, 23, 34, 38, 39, 50],
+ &[11, 22, 33, 40, 42],
+ );
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = (0..100).collect::<Vec<_>>();
+ check_difference(&[], &large, &[]);
+ check_difference(&[-1], &large, &[-1]);
+ check_difference(&[0], &large, &[]);
+ check_difference(&[99], &large, &[]);
+ check_difference(&[100], &large, &[100]);
+ check_difference(&[11, 5000, 1, 3, 77, 8924], &large, &[5000, 8924]);
+ check_difference(&large, &[], &large);
+ check_difference(&large, &[-1], &large);
+ check_difference(&large, &[100], &large);
+}
+
+#[test]
+fn test_difference_size_hint() {
+ let s246: BTreeSet<i32> = [2, 4, 6].iter().copied().collect();
+ let s23456: BTreeSet<i32> = (2..=6).collect();
+ let mut iter = s246.difference(&s23456);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), None);
+
+ let s12345: BTreeSet<i32> = (1..=5).collect();
+ iter = s246.difference(&s12345);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&6));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ let s34567: BTreeSet<i32> = (3..=7).collect();
+ iter = s246.difference(&s34567);
+ assert_eq!(iter.size_hint(), (0, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+ assert_eq!(iter.next(), None);
+
+ let s1: BTreeSet<i32> = (-9..=1).collect();
+ iter = s246.difference(&s1);
+ assert_eq!(iter.size_hint(), (3, Some(3)));
+
+ let s2: BTreeSet<i32> = (-9..=2).collect();
+ iter = s246.difference(&s2);
+ assert_eq!(iter.size_hint(), (2, Some(2)));
+ assert_eq!(iter.next(), Some(&4));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s23: BTreeSet<i32> = (2..=3).collect();
+ iter = s246.difference(&s23);
+ assert_eq!(iter.size_hint(), (1, Some(3)));
+ assert_eq!(iter.next(), Some(&4));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s4: BTreeSet<i32> = (4..=4).collect();
+ iter = s246.difference(&s4);
+ assert_eq!(iter.size_hint(), (2, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(2)));
+ assert_eq!(iter.next(), Some(&6));
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+
+ let s56: BTreeSet<i32> = (5..=6).collect();
+ iter = s246.difference(&s56);
+ assert_eq!(iter.size_hint(), (1, Some(3)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (0, Some(2)));
+
+ let s6: BTreeSet<i32> = (6..=19).collect();
+ iter = s246.difference(&s6);
+ assert_eq!(iter.size_hint(), (2, Some(2)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(1)));
+
+ let s7: BTreeSet<i32> = (7..=19).collect();
+ iter = s246.difference(&s7);
+ assert_eq!(iter.size_hint(), (3, Some(3)));
+}
+
+#[test]
+fn test_symmetric_difference() {
+ fn check_symmetric_difference(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.symmetric_difference(y).all(f))
+ }
+
+ check_symmetric_difference(&[], &[], &[]);
+ check_symmetric_difference(&[1, 2, 3], &[2], &[1, 3]);
+ check_symmetric_difference(&[2], &[1, 2, 3], &[1, 3]);
+ check_symmetric_difference(&[1, 3, 5, 9, 11], &[-2, 3, 9, 14, 22], &[-2, 1, 5, 11, 14, 22]);
+}
+
+#[test]
+fn test_symmetric_difference_size_hint() {
+ let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
+ let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
+ let mut iter = x.symmetric_difference(&y);
+ assert_eq!(iter.size_hint(), (0, Some(5)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (0, Some(4)));
+ assert_eq!(iter.next(), Some(&3));
+ assert_eq!(iter.size_hint(), (0, Some(1)));
+}
+
+#[test]
+fn test_union() {
+ fn check_union(a: &[i32], b: &[i32], expected: &[i32]) {
+ check(a, b, expected, |x, y, f| x.union(y).all(f))
+ }
+
+ check_union(&[], &[], &[]);
+ check_union(&[1, 2, 3], &[2], &[1, 2, 3]);
+ check_union(&[2], &[1, 2, 3], &[1, 2, 3]);
+ check_union(
+ &[1, 3, 5, 9, 11, 16, 19, 24],
+ &[-2, 1, 5, 9, 13, 19],
+ &[-2, 1, 3, 5, 9, 11, 13, 16, 19, 24],
+ );
+}
+
+#[test]
+fn test_union_size_hint() {
+ let x: BTreeSet<i32> = [2, 4].iter().copied().collect();
+ let y: BTreeSet<i32> = [1, 2, 3].iter().copied().collect();
+ let mut iter = x.union(&y);
+ assert_eq!(iter.size_hint(), (3, Some(5)));
+ assert_eq!(iter.next(), Some(&1));
+ assert_eq!(iter.size_hint(), (2, Some(4)));
+ assert_eq!(iter.next(), Some(&2));
+ assert_eq!(iter.size_hint(), (1, Some(2)));
+}
+
+#[test]
+// Only tests the simple function definition with respect to intersection
+fn test_is_disjoint() {
+ let one = [1].iter().collect::<BTreeSet<_>>();
+ let two = [2].iter().collect::<BTreeSet<_>>();
+ assert!(one.is_disjoint(&two));
+}
+
+#[test]
+// Also implicitly tests the trivial function definition of is_superset
+fn test_is_subset() {
+ fn is_subset(a: &[i32], b: &[i32]) -> bool {
+ let set_a = a.iter().collect::<BTreeSet<_>>();
+ let set_b = b.iter().collect::<BTreeSet<_>>();
+ set_a.is_subset(&set_b)
+ }
+
+ assert_eq!(is_subset(&[], &[]), true);
+ assert_eq!(is_subset(&[], &[1, 2]), true);
+ assert_eq!(is_subset(&[0], &[1, 2]), false);
+ assert_eq!(is_subset(&[1], &[1, 2]), true);
+ assert_eq!(is_subset(&[2], &[1, 2]), true);
+ assert_eq!(is_subset(&[3], &[1, 2]), false);
+ assert_eq!(is_subset(&[1, 2], &[1]), false);
+ assert_eq!(is_subset(&[1, 2], &[1, 2]), true);
+ assert_eq!(is_subset(&[1, 2], &[2, 3]), false);
+ assert_eq!(
+ is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 33, 34, 38, 39, 40, 42]),
+ true
+ );
+ assert_eq!(is_subset(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 11, 14, 22, 23, 34, 38]), false);
+
+ if cfg!(miri) {
+ // Miri is too slow
+ return;
+ }
+
+ let large = (0..100).collect::<Vec<_>>();
+ assert_eq!(is_subset(&[], &large), true);
+ assert_eq!(is_subset(&large, &[]), false);
+ assert_eq!(is_subset(&[-1], &large), false);
+ assert_eq!(is_subset(&[0], &large), true);
+ assert_eq!(is_subset(&[1, 2], &large), true);
+ assert_eq!(is_subset(&[99, 100], &large), false);
+}
+
+#[test]
+fn test_drain_filter() {
+ let mut x: BTreeSet<_> = [1].iter().copied().collect();
+ let mut y: BTreeSet<_> = [1].iter().copied().collect();
+
+ x.drain_filter(|_| true);
+ y.drain_filter(|_| false);
+ assert_eq!(x.len(), 0);
+ assert_eq!(y.len(), 1);
+}
+
+#[test]
+fn test_drain_filter_drop_panic_leak() {
+ static PREDS: AtomicU32 = AtomicU32::new(0);
+ static DROPS: AtomicU32 = AtomicU32::new(0);
+
+ #[derive(PartialEq, Eq, PartialOrd, Ord)]
+ struct D(i32);
+ impl Drop for D {
+ fn drop(&mut self) {
+ if DROPS.fetch_add(1, Ordering::SeqCst) == 1 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut set = BTreeSet::new();
+ set.insert(D(0));
+ set.insert(D(4));
+ set.insert(D(8));
+
+ catch_unwind(move || {
+ drop(set.drain_filter(|d| {
+ PREDS.fetch_add(1u32 << d.0, Ordering::SeqCst);
+ true
+ }))
+ })
+ .ok();
+
+ assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
+ assert_eq!(DROPS.load(Ordering::SeqCst), 3);
+}
+
+#[test]
+fn test_drain_filter_pred_panic_leak() {
+ static PREDS: AtomicU32 = AtomicU32::new(0);
+ static DROPS: AtomicU32 = AtomicU32::new(0);
+
+ #[derive(PartialEq, Eq, PartialOrd, Ord)]
+ struct D(i32);
+ impl Drop for D {
+ fn drop(&mut self) {
+ DROPS.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ let mut set = BTreeSet::new();
+ set.insert(D(0));
+ set.insert(D(4));
+ set.insert(D(8));
+
+ catch_unwind(AssertUnwindSafe(|| {
+ drop(set.drain_filter(|d| {
+ PREDS.fetch_add(1u32 << d.0, Ordering::SeqCst);
+ match d.0 {
+ 0 => true,
+ _ => panic!(),
+ }
+ }))
+ }))
+ .ok();
+
+ assert_eq!(PREDS.load(Ordering::SeqCst), 0x011);
+ assert_eq!(DROPS.load(Ordering::SeqCst), 1);
+ assert_eq!(set.len(), 2);
+ assert_eq!(set.first().unwrap().0, 4);
+ assert_eq!(set.last().unwrap().0, 8);
+}
+
+#[test]
+fn test_clear() {
+ let mut x = BTreeSet::new();
+ x.insert(1);
+
+ x.clear();
+ assert!(x.is_empty());
+}
+
+#[test]
+fn test_zip() {
+ let mut x = BTreeSet::new();
+ x.insert(5);
+ x.insert(12);
+ x.insert(11);
+
+ let mut y = BTreeSet::new();
+ y.insert("foo");
+ y.insert("bar");
+
+ let x = x;
+ let y = y;
+ let mut z = x.iter().zip(&y);
+
+ assert_eq!(z.next().unwrap(), (&5, &("bar")));
+ assert_eq!(z.next().unwrap(), (&11, &("foo")));
+ assert!(z.next().is_none());
+}
+
+#[test]
+fn test_from_iter() {
+ let xs = [1, 2, 3, 4, 5, 6, 7, 8, 9];
+
+ let set: BTreeSet<_> = xs.iter().cloned().collect();
+
+ for x in &xs {
+ assert!(set.contains(x));
+ }
+}
+
+#[test]
+fn test_show() {
+ let mut set = BTreeSet::new();
+ let empty = BTreeSet::<i32>::new();
+
+ set.insert(1);
+ set.insert(2);
+
+ let set_str = format!("{:?}", set);
+
+ assert_eq!(set_str, "{1, 2}");
+ assert_eq!(format!("{:?}", empty), "{}");
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = BTreeSet::new();
+ a.insert(1);
+
+ a.extend(&[2, 3, 4]);
+
+ assert_eq!(a.len(), 4);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+
+ let mut b = BTreeSet::new();
+ b.insert(5);
+ b.insert(6);
+
+ a.extend(&b);
+
+ assert_eq!(a.len(), 6);
+ assert!(a.contains(&1));
+ assert!(a.contains(&2));
+ assert!(a.contains(&3));
+ assert!(a.contains(&4));
+ assert!(a.contains(&5));
+ assert!(a.contains(&6));
+}
+
+#[test]
+fn test_recovery() {
+ use std::cmp::Ordering;
+
+ #[derive(Debug)]
+ struct Foo(&'static str, i32);
+
+ impl PartialEq for Foo {
+ fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0
+ }
+ }
+
+ impl Eq for Foo {}
+
+ impl PartialOrd for Foo {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.0.partial_cmp(&other.0)
+ }
+ }
+
+ impl Ord for Foo {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.0.cmp(&other.0)
+ }
+ }
+
+ let mut s = BTreeSet::new();
+ assert_eq!(s.replace(Foo("a", 1)), None);
+ assert_eq!(s.len(), 1);
+ assert_eq!(s.replace(Foo("a", 2)), Some(Foo("a", 1)));
+ assert_eq!(s.len(), 1);
+
+ {
+ let mut it = s.iter();
+ assert_eq!(it.next(), Some(&Foo("a", 2)));
+ assert_eq!(it.next(), None);
+ }
+
+ assert_eq!(s.get(&Foo("a", 1)), Some(&Foo("a", 2)));
+ assert_eq!(s.take(&Foo("a", 1)), Some(Foo("a", 2)));
+ assert_eq!(s.len(), 0);
+
+ assert_eq!(s.get(&Foo("a", 1)), None);
+ assert_eq!(s.take(&Foo("a", 1)), None);
+
+ assert_eq!(s.iter().next(), None);
+}
+
+#[test]
+#[allow(dead_code)]
+fn test_variance() {
+ use std::collections::btree_set::{IntoIter, Iter, Range};
+
+ fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> {
+ v
+ }
+ fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> {
+ v
+ }
+ fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> {
+ v
+ }
+ fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> {
+ v
+ }
+}
+
+#[test]
+fn test_append() {
+ let mut a = BTreeSet::new();
+ a.insert(1);
+ a.insert(2);
+ a.insert(3);
+
+ let mut b = BTreeSet::new();
+ b.insert(3);
+ b.insert(4);
+ b.insert(5);
+
+ a.append(&mut b);
+
+ assert_eq!(a.len(), 5);
+ assert_eq!(b.len(), 0);
+
+ assert_eq!(a.contains(&1), true);
+ assert_eq!(a.contains(&2), true);
+ assert_eq!(a.contains(&3), true);
+ assert_eq!(a.contains(&4), true);
+ assert_eq!(a.contains(&5), true);
+}
+
+#[test]
+fn test_first_last() {
+ let mut a = BTreeSet::new();
+ assert_eq!(a.first(), None);
+ assert_eq!(a.last(), None);
+ a.insert(1);
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&1));
+ a.insert(2);
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&2));
+ for i in 3..=12 {
+ a.insert(i);
+ }
+ assert_eq!(a.first(), Some(&1));
+ assert_eq!(a.last(), Some(&12));
+ assert_eq!(a.pop_first(), Some(1));
+ assert_eq!(a.pop_last(), Some(12));
+ assert_eq!(a.pop_first(), Some(2));
+ assert_eq!(a.pop_last(), Some(11));
+ assert_eq!(a.pop_first(), Some(3));
+ assert_eq!(a.pop_last(), Some(10));
+ assert_eq!(a.pop_first(), Some(4));
+ assert_eq!(a.pop_first(), Some(5));
+ assert_eq!(a.pop_first(), Some(6));
+ assert_eq!(a.pop_first(), Some(7));
+ assert_eq!(a.pop_first(), Some(8));
+ assert_eq!(a.clone().pop_last(), Some(9));
+ assert_eq!(a.pop_first(), Some(9));
+ assert_eq!(a.pop_first(), None);
+ assert_eq!(a.pop_last(), None);
+}
+
+fn rand_data(len: usize) -> Vec<u32> {
+ let mut rng = DeterministicRng::new();
+ Vec::from_iter((0..len).map(|_| rng.next()))
+}
+
+#[test]
+fn test_split_off_empty_right() {
+ let mut data = rand_data(173);
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let right = set.split_off(&(data.iter().max().unwrap() + 1));
+
+ data.sort();
+ assert!(set.into_iter().eq(data));
+ assert!(right.into_iter().eq(None));
+}
+
+#[test]
+fn test_split_off_empty_left() {
+ let mut data = rand_data(314);
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let right = set.split_off(data.iter().min().unwrap());
+
+ data.sort();
+ assert!(set.into_iter().eq(None));
+ assert!(right.into_iter().eq(data));
+}
+
+#[test]
+fn test_split_off_large_random_sorted() {
+ // Miri is too slow
+ let mut data = if cfg!(miri) { rand_data(529) } else { rand_data(1529) };
+ // special case with maximum height.
+ data.sort();
+
+ let mut set = BTreeSet::from_iter(data.clone());
+ let key = data[data.len() / 2];
+ let right = set.split_off(&key);
+
+ assert!(set.into_iter().eq(data.clone().into_iter().filter(|x| *x < key)));
+ assert!(right.into_iter().eq(data.into_iter().filter(|x| *x >= key)));
+}
diff --git a/library/alloc/tests/cow_str.rs b/library/alloc/tests/cow_str.rs
new file mode 100644
index 00000000000..62a5c245a54
--- /dev/null
+++ b/library/alloc/tests/cow_str.rs
@@ -0,0 +1,144 @@
+use std::borrow::Cow;
+
+// check that Cow<'a, str> implements addition
+#[test]
+fn check_cow_add_cow() {
+ let borrowed1 = Cow::Borrowed("Hello, ");
+ let borrowed2 = Cow::Borrowed("World!");
+ let borrow_empty = Cow::Borrowed("");
+
+ let owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
+ let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!"));
+ let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
+
+ assert_eq!("Hello, World!", borrowed1.clone() + borrowed2.clone());
+ assert_eq!("Hello, Rustaceans!", borrowed1.clone() + owned2.clone());
+
+ assert_eq!("Hi, World!", owned1.clone() + borrowed2.clone());
+ assert_eq!("Hi, Rustaceans!", owned1.clone() + owned2.clone());
+
+ if let Cow::Owned(_) = borrowed1.clone() + borrow_empty.clone() {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ if let Cow::Owned(_) = borrow_empty.clone() + borrowed1.clone() {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ if let Cow::Owned(_) = borrowed1.clone() + owned_empty.clone() {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ if let Cow::Owned(_) = owned_empty.clone() + borrowed1.clone() {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+}
+
+#[test]
+fn check_cow_add_str() {
+ let borrowed = Cow::Borrowed("Hello, ");
+ let borrow_empty = Cow::Borrowed("");
+
+ let owned: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
+ let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
+
+ assert_eq!("Hello, World!", borrowed.clone() + "World!");
+
+ assert_eq!("Hi, World!", owned.clone() + "World!");
+
+ if let Cow::Owned(_) = borrowed.clone() + "" {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ if let Cow::Owned(_) = borrow_empty.clone() + "Hello, " {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ if let Cow::Owned(_) = owned_empty.clone() + "Hello, " {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+}
+
+#[test]
+fn check_cow_add_assign_cow() {
+ let mut borrowed1 = Cow::Borrowed("Hello, ");
+ let borrowed2 = Cow::Borrowed("World!");
+ let borrow_empty = Cow::Borrowed("");
+
+ let mut owned1: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
+ let owned2: Cow<'_, str> = Cow::Owned(String::from("Rustaceans!"));
+ let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
+
+ let mut s = borrowed1.clone();
+ s += borrow_empty.clone();
+ assert_eq!("Hello, ", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ let mut s = borrow_empty.clone();
+ s += borrowed1.clone();
+ assert_eq!("Hello, ", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ let mut s = borrowed1.clone();
+ s += owned_empty.clone();
+ assert_eq!("Hello, ", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ let mut s = owned_empty.clone();
+ s += borrowed1.clone();
+ assert_eq!("Hello, ", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+
+ owned1 += borrowed2;
+ borrowed1 += owned2;
+
+ assert_eq!("Hi, World!", owned1);
+ assert_eq!("Hello, Rustaceans!", borrowed1);
+}
+
+#[test]
+fn check_cow_add_assign_str() {
+ let mut borrowed = Cow::Borrowed("Hello, ");
+ let borrow_empty = Cow::Borrowed("");
+
+ let mut owned: Cow<'_, str> = Cow::Owned(String::from("Hi, "));
+ let owned_empty: Cow<'_, str> = Cow::Owned(String::new());
+
+ let mut s = borrowed.clone();
+ s += "";
+ assert_eq!("Hello, ", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ let mut s = borrow_empty.clone();
+ s += "World!";
+ assert_eq!("World!", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+ let mut s = owned_empty.clone();
+ s += "World!";
+ assert_eq!("World!", s);
+ if let Cow::Owned(_) = s {
+ panic!("Adding empty strings to a borrow should note allocate");
+ }
+
+ owned += "World!";
+ borrowed += "World!";
+
+ assert_eq!("Hi, World!", owned);
+ assert_eq!("Hello, World!", borrowed);
+}
+
+#[test]
+fn check_cow_clone_from() {
+ let mut c1: Cow<'_, str> = Cow::Owned(String::with_capacity(25));
+ let s: String = "hi".to_string();
+ assert!(s.capacity() < 25);
+ let c2: Cow<'_, str> = Cow::Owned(s);
+ c1.clone_from(&c2);
+ assert!(c1.into_owned().capacity() >= 25);
+ let mut c3: Cow<'_, str> = Cow::Borrowed("bye");
+ c3.clone_from(&c2);
+ assert_eq!(c2, c3);
+}
diff --git a/library/alloc/tests/fmt.rs b/library/alloc/tests/fmt.rs
new file mode 100644
index 00000000000..0ad092b4997
--- /dev/null
+++ b/library/alloc/tests/fmt.rs
@@ -0,0 +1,7 @@
+use std::fmt;
+
+#[test]
+fn test_format() {
+ let s = fmt::format(format_args!("Hello, {}!", "world"));
+ assert_eq!(s, "Hello, world!");
+}
diff --git a/library/alloc/tests/heap.rs b/library/alloc/tests/heap.rs
new file mode 100644
index 00000000000..62f062b83d7
--- /dev/null
+++ b/library/alloc/tests/heap.rs
@@ -0,0 +1,47 @@
+use std::alloc::{AllocInit, AllocRef, Global, Layout, System};
+
+/// Issue #45955 and #62251.
+#[test]
+fn alloc_system_overaligned_request() {
+ check_overalign_requests(System)
+}
+
+#[test]
+fn std_heap_overaligned_request() {
+ check_overalign_requests(Global)
+}
+
+fn check_overalign_requests<T: AllocRef>(mut allocator: T) {
+ for &align in &[4, 8, 16, 32] {
+ // less than and bigger than `MIN_ALIGN`
+ for &size in &[align / 2, align - 1] {
+ // size less than alignment
+ let iterations = 128;
+ unsafe {
+ let pointers: Vec<_> = (0..iterations)
+ .map(|_| {
+ allocator
+ .alloc(
+ Layout::from_size_align(size, align).unwrap(),
+ AllocInit::Uninitialized,
+ )
+ .unwrap()
+ .ptr
+ })
+ .collect();
+ for &ptr in &pointers {
+ assert_eq!(
+ (ptr.as_ptr() as usize) % align,
+ 0,
+ "Got a pointer less aligned than requested"
+ )
+ }
+
+ // Clean up
+ for &ptr in &pointers {
+ allocator.dealloc(ptr, Layout::from_size_align(size, align).unwrap())
+ }
+ }
+ }
+ }
+}
diff --git a/library/alloc/tests/lib.rs b/library/alloc/tests/lib.rs
new file mode 100644
index 00000000000..e2dc816b015
--- /dev/null
+++ b/library/alloc/tests/lib.rs
@@ -0,0 +1,57 @@
+#![feature(allocator_api)]
+#![feature(box_syntax)]
+#![feature(btree_drain_filter)]
+#![feature(drain_filter)]
+#![feature(exact_size_is_empty)]
+#![feature(map_first_last)]
+#![feature(new_uninit)]
+#![feature(pattern)]
+#![feature(trusted_len)]
+#![feature(try_reserve)]
+#![feature(unboxed_closures)]
+#![feature(associated_type_bounds)]
+#![feature(binary_heap_into_iter_sorted)]
+#![feature(binary_heap_drain_sorted)]
+#![feature(split_inclusive)]
+#![feature(binary_heap_retain)]
+
+use std::collections::hash_map::DefaultHasher;
+use std::hash::{Hash, Hasher};
+
+mod arc;
+mod binary_heap;
+mod borrow;
+mod boxed;
+mod btree;
+mod cow_str;
+mod fmt;
+mod heap;
+mod linked_list;
+mod rc;
+mod slice;
+mod str;
+mod string;
+mod vec;
+mod vec_deque;
+
+fn hash<T: Hash>(t: &T) -> u64 {
+ let mut s = DefaultHasher::new();
+ t.hash(&mut s);
+ s.finish()
+}
+
+// FIXME: Instantiated functions with i128 in the signature is not supported in Emscripten.
+// See https://github.com/kripken/emscripten-fastcomp/issues/169
+#[cfg(not(target_os = "emscripten"))]
+#[test]
+fn test_boxed_hasher() {
+ let ordinary_hash = hash(&5u32);
+
+ let mut hasher_1 = Box::new(DefaultHasher::new());
+ 5u32.hash(&mut hasher_1);
+ assert_eq!(ordinary_hash, hasher_1.finish());
+
+ let mut hasher_2 = Box::new(DefaultHasher::new()) as Box<dyn Hasher>;
+ 5u32.hash(&mut hasher_2);
+ assert_eq!(ordinary_hash, hasher_2.finish());
+}
diff --git a/library/alloc/tests/linked_list.rs b/library/alloc/tests/linked_list.rs
new file mode 100644
index 00000000000..afcb9e03fd0
--- /dev/null
+++ b/library/alloc/tests/linked_list.rs
@@ -0,0 +1,705 @@
+use std::collections::LinkedList;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+
+#[test]
+fn test_basic() {
+ let mut m = LinkedList::<Box<_>>::new();
+ assert_eq!(m.pop_front(), None);
+ assert_eq!(m.pop_back(), None);
+ assert_eq!(m.pop_front(), None);
+ m.push_front(box 1);
+ assert_eq!(m.pop_front(), Some(box 1));
+ m.push_back(box 2);
+ m.push_back(box 3);
+ assert_eq!(m.len(), 2);
+ assert_eq!(m.pop_front(), Some(box 2));
+ assert_eq!(m.pop_front(), Some(box 3));
+ assert_eq!(m.len(), 0);
+ assert_eq!(m.pop_front(), None);
+ m.push_back(box 1);
+ m.push_back(box 3);
+ m.push_back(box 5);
+ m.push_back(box 7);
+ assert_eq!(m.pop_front(), Some(box 1));
+
+ let mut n = LinkedList::new();
+ n.push_front(2);
+ n.push_front(3);
+ {
+ assert_eq!(n.front().unwrap(), &3);
+ let x = n.front_mut().unwrap();
+ assert_eq!(*x, 3);
+ *x = 0;
+ }
+ {
+ assert_eq!(n.back().unwrap(), &2);
+ let y = n.back_mut().unwrap();
+ assert_eq!(*y, 2);
+ *y = 1;
+ }
+ assert_eq!(n.pop_front(), Some(0));
+ assert_eq!(n.pop_front(), Some(1));
+}
+
+fn generate_test() -> LinkedList<i32> {
+ list_from(&[0, 1, 2, 3, 4, 5, 6])
+}
+
+fn list_from<T: Clone>(v: &[T]) -> LinkedList<T> {
+ v.iter().cloned().collect()
+}
+
+#[test]
+fn test_split_off() {
+ // singleton
+ {
+ let mut m = LinkedList::new();
+ m.push_back(1);
+
+ let p = m.split_off(0);
+ assert_eq!(m.len(), 0);
+ assert_eq!(p.len(), 1);
+ assert_eq!(p.back(), Some(&1));
+ assert_eq!(p.front(), Some(&1));
+ }
+
+ // not singleton, forwards
+ {
+ let u = vec![1, 2, 3, 4, 5];
+ let mut m = list_from(&u);
+ let mut n = m.split_off(2);
+ assert_eq!(m.len(), 2);
+ assert_eq!(n.len(), 3);
+ for elt in 1..3 {
+ assert_eq!(m.pop_front(), Some(elt));
+ }
+ for elt in 3..6 {
+ assert_eq!(n.pop_front(), Some(elt));
+ }
+ }
+ // not singleton, backwards
+ {
+ let u = vec![1, 2, 3, 4, 5];
+ let mut m = list_from(&u);
+ let mut n = m.split_off(4);
+ assert_eq!(m.len(), 4);
+ assert_eq!(n.len(), 1);
+ for elt in 1..5 {
+ assert_eq!(m.pop_front(), Some(elt));
+ }
+ for elt in 5..6 {
+ assert_eq!(n.pop_front(), Some(elt));
+ }
+ }
+
+ // no-op on the last index
+ {
+ let mut m = LinkedList::new();
+ m.push_back(1);
+
+ let p = m.split_off(1);
+ assert_eq!(m.len(), 1);
+ assert_eq!(p.len(), 0);
+ assert_eq!(m.back(), Some(&1));
+ assert_eq!(m.front(), Some(&1));
+ }
+}
+
+#[test]
+fn test_iterator() {
+ let m = generate_test();
+ for (i, elt) in m.iter().enumerate() {
+ assert_eq!(i as i32, *elt);
+ }
+ let mut n = LinkedList::new();
+ assert_eq!(n.iter().next(), None);
+ n.push_front(4);
+ let mut it = n.iter();
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(it.next().unwrap(), &4);
+ assert_eq!(it.size_hint(), (0, Some(0)));
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_iterator_clone() {
+ let mut n = LinkedList::new();
+ n.push_back(2);
+ n.push_back(3);
+ n.push_back(4);
+ let mut it = n.iter();
+ it.next();
+ let mut jt = it.clone();
+ assert_eq!(it.next(), jt.next());
+ assert_eq!(it.next_back(), jt.next_back());
+ assert_eq!(it.next(), jt.next());
+}
+
+#[test]
+fn test_iterator_double_end() {
+ let mut n = LinkedList::new();
+ assert_eq!(n.iter().next(), None);
+ n.push_front(4);
+ n.push_front(5);
+ n.push_front(6);
+ let mut it = n.iter();
+ assert_eq!(it.size_hint(), (3, Some(3)));
+ assert_eq!(it.next().unwrap(), &6);
+ assert_eq!(it.size_hint(), (2, Some(2)));
+ assert_eq!(it.next_back().unwrap(), &4);
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(it.next_back().unwrap(), &5);
+ assert_eq!(it.next_back(), None);
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_rev_iter() {
+ let m = generate_test();
+ for (i, elt) in m.iter().rev().enumerate() {
+ assert_eq!((6 - i) as i32, *elt);
+ }
+ let mut n = LinkedList::new();
+ assert_eq!(n.iter().rev().next(), None);
+ n.push_front(4);
+ let mut it = n.iter().rev();
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(it.next().unwrap(), &4);
+ assert_eq!(it.size_hint(), (0, Some(0)));
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_mut_iter() {
+ let mut m = generate_test();
+ let mut len = m.len();
+ for (i, elt) in m.iter_mut().enumerate() {
+ assert_eq!(i as i32, *elt);
+ len -= 1;
+ }
+ assert_eq!(len, 0);
+ let mut n = LinkedList::new();
+ assert!(n.iter_mut().next().is_none());
+ n.push_front(4);
+ n.push_back(5);
+ let mut it = n.iter_mut();
+ assert_eq!(it.size_hint(), (2, Some(2)));
+ assert!(it.next().is_some());
+ assert!(it.next().is_some());
+ assert_eq!(it.size_hint(), (0, Some(0)));
+ assert!(it.next().is_none());
+}
+
+#[test]
+fn test_iterator_mut_double_end() {
+ let mut n = LinkedList::new();
+ assert!(n.iter_mut().next_back().is_none());
+ n.push_front(4);
+ n.push_front(5);
+ n.push_front(6);
+ let mut it = n.iter_mut();
+ assert_eq!(it.size_hint(), (3, Some(3)));
+ assert_eq!(*it.next().unwrap(), 6);
+ assert_eq!(it.size_hint(), (2, Some(2)));
+ assert_eq!(*it.next_back().unwrap(), 4);
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(*it.next_back().unwrap(), 5);
+ assert!(it.next_back().is_none());
+ assert!(it.next().is_none());
+}
+
+#[test]
+fn test_mut_rev_iter() {
+ let mut m = generate_test();
+ for (i, elt) in m.iter_mut().rev().enumerate() {
+ assert_eq!((6 - i) as i32, *elt);
+ }
+ let mut n = LinkedList::new();
+ assert!(n.iter_mut().rev().next().is_none());
+ n.push_front(4);
+ let mut it = n.iter_mut().rev();
+ assert!(it.next().is_some());
+ assert!(it.next().is_none());
+}
+
+#[test]
+fn test_eq() {
+ let mut n = list_from(&[]);
+ let mut m = list_from(&[]);
+ assert!(n == m);
+ n.push_front(1);
+ assert!(n != m);
+ m.push_back(1);
+ assert!(n == m);
+
+ let n = list_from(&[2, 3, 4]);
+ let m = list_from(&[1, 2, 3]);
+ assert!(n != m);
+}
+
+#[test]
+fn test_hash() {
+ use crate::hash;
+
+ let mut x = LinkedList::new();
+ let mut y = LinkedList::new();
+
+ assert!(hash(&x) == hash(&y));
+
+ x.push_back(1);
+ x.push_back(2);
+ x.push_back(3);
+
+ y.push_front(3);
+ y.push_front(2);
+ y.push_front(1);
+
+ assert!(hash(&x) == hash(&y));
+}
+
+#[test]
+fn test_ord() {
+ let n = list_from(&[]);
+ let m = list_from(&[1, 2, 3]);
+ assert!(n < m);
+ assert!(m > n);
+ assert!(n <= n);
+ assert!(n >= n);
+}
+
+#[test]
+fn test_ord_nan() {
+ let nan = 0.0f64 / 0.0;
+ let n = list_from(&[nan]);
+ let m = list_from(&[nan]);
+ assert!(!(n < m));
+ assert!(!(n > m));
+ assert!(!(n <= m));
+ assert!(!(n >= m));
+
+ let n = list_from(&[nan]);
+ let one = list_from(&[1.0f64]);
+ assert!(!(n < one));
+ assert!(!(n > one));
+ assert!(!(n <= one));
+ assert!(!(n >= one));
+
+ let u = list_from(&[1.0f64, 2.0, nan]);
+ let v = list_from(&[1.0f64, 2.0, 3.0]);
+ assert!(!(u < v));
+ assert!(!(u > v));
+ assert!(!(u <= v));
+ assert!(!(u >= v));
+
+ let s = list_from(&[1.0f64, 2.0, 4.0, 2.0]);
+ let t = list_from(&[1.0f64, 2.0, 3.0, 2.0]);
+ assert!(!(s < t));
+ assert!(s > one);
+ assert!(!(s <= one));
+ assert!(s >= one);
+}
+
+#[test]
+fn test_show() {
+ let list: LinkedList<_> = (0..10).collect();
+ assert_eq!(format!("{:?}", list), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
+
+ let list: LinkedList<_> = vec!["just", "one", "test", "more"].iter().cloned().collect();
+ assert_eq!(format!("{:?}", list), "[\"just\", \"one\", \"test\", \"more\"]");
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = LinkedList::new();
+ a.push_back(1);
+
+ a.extend(&[2, 3, 4]);
+
+ assert_eq!(a.len(), 4);
+ assert_eq!(a, list_from(&[1, 2, 3, 4]));
+
+ let mut b = LinkedList::new();
+ b.push_back(5);
+ b.push_back(6);
+ a.extend(&b);
+
+ assert_eq!(a.len(), 6);
+ assert_eq!(a, list_from(&[1, 2, 3, 4, 5, 6]));
+}
+
+#[test]
+fn test_extend() {
+ let mut a = LinkedList::new();
+ a.push_back(1);
+ a.extend(vec![2, 3, 4]); // uses iterator
+
+ assert_eq!(a.len(), 4);
+ assert!(a.iter().eq(&[1, 2, 3, 4]));
+
+ let b: LinkedList<_> = vec![5, 6, 7].into_iter().collect();
+ a.extend(b); // specializes to `append`
+
+ assert_eq!(a.len(), 7);
+ assert!(a.iter().eq(&[1, 2, 3, 4, 5, 6, 7]));
+}
+
+#[test]
+fn test_contains() {
+ let mut l = LinkedList::new();
+ l.extend(&[2, 3, 4]);
+
+ assert!(l.contains(&3));
+ assert!(!l.contains(&1));
+
+ l.clear();
+
+ assert!(!l.contains(&3));
+}
+
+#[test]
+fn drain_filter_empty() {
+ let mut list: LinkedList<i32> = LinkedList::new();
+
+ {
+ let mut iter = list.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(list.len(), 0);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
+}
+
+#[test]
+fn drain_filter_zst() {
+ let mut list: LinkedList<_> = vec![(), (), (), (), ()].into_iter().collect();
+ let initial_len = list.len();
+ let mut count = 0;
+
+ {
+ let mut iter = list.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ while let Some(_) = iter.next() {
+ count += 1;
+ assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, initial_len);
+ assert_eq!(list.len(), 0);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
+}
+
+#[test]
+fn drain_filter_false() {
+ let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ let initial_len = list.len();
+ let mut count = 0;
+
+ {
+ let mut iter = list.drain_filter(|_| false);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ for _ in iter.by_ref() {
+ count += 1;
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, 0);
+ assert_eq!(list.len(), initial_len);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+}
+
+#[test]
+fn drain_filter_true() {
+ let mut list: LinkedList<_> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ let initial_len = list.len();
+ let mut count = 0;
+
+ {
+ let mut iter = list.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ while let Some(_) = iter.next() {
+ count += 1;
+ assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, initial_len);
+ assert_eq!(list.len(), 0);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![]);
+}
+
+#[test]
+fn drain_filter_complex() {
+ {
+ // [+xxx++++++xxxxx++++x+x++]
+ let mut list = vec![
+ 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37,
+ 39,
+ ]
+ .into_iter()
+ .collect::<LinkedList<_>>();
+
+ let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(list.len(), 14);
+ assert_eq!(
+ list.into_iter().collect::<Vec<_>>(),
+ vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
+ );
+ }
+
+ {
+ // [xxx++++++xxxxx++++x+x++]
+ let mut list = vec![
+ 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
+ ]
+ .into_iter()
+ .collect::<LinkedList<_>>();
+
+ let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(list.len(), 13);
+ assert_eq!(
+ list.into_iter().collect::<Vec<_>>(),
+ vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]
+ );
+ }
+
+ {
+ // [xxx++++++xxxxx++++x+x]
+ let mut list =
+ vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36]
+ .into_iter()
+ .collect::<LinkedList<_>>();
+
+ let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(list.len(), 11);
+ assert_eq!(
+ list.into_iter().collect::<Vec<_>>(),
+ vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]
+ );
+ }
+
+ {
+ // [xxxxxxxxxx+++++++++++]
+ let mut list = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19]
+ .into_iter()
+ .collect::<LinkedList<_>>();
+
+ let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
+
+ assert_eq!(list.len(), 10);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
+ }
+
+ {
+ // [+++++++++++xxxxxxxxxx]
+ let mut list = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20]
+ .into_iter()
+ .collect::<LinkedList<_>>();
+
+ let removed = list.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
+
+ assert_eq!(list.len(), 10);
+ assert_eq!(list.into_iter().collect::<Vec<_>>(), vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
+ }
+}
+
+#[test]
+fn drain_filter_drop_panic_leak() {
+ static mut DROPS: i32 = 0;
+
+ struct D(bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.0 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut q = LinkedList::new();
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_front(D(false));
+ q.push_front(D(true));
+ q.push_front(D(false));
+
+ catch_unwind(AssertUnwindSafe(|| drop(q.drain_filter(|_| true)))).ok();
+
+ assert_eq!(unsafe { DROPS }, 8);
+ assert!(q.is_empty());
+}
+
+#[test]
+fn drain_filter_pred_panic_leak() {
+ static mut DROPS: i32 = 0;
+
+ #[derive(Debug)]
+ struct D(u32);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut q = LinkedList::new();
+ q.push_back(D(3));
+ q.push_back(D(4));
+ q.push_back(D(5));
+ q.push_back(D(6));
+ q.push_back(D(7));
+ q.push_front(D(2));
+ q.push_front(D(1));
+ q.push_front(D(0));
+
+ catch_unwind(AssertUnwindSafe(|| {
+ drop(q.drain_filter(|item| if item.0 >= 2 { panic!() } else { true }))
+ }))
+ .ok();
+
+ assert_eq!(unsafe { DROPS }, 2); // 0 and 1
+ assert_eq!(q.len(), 6);
+}
+
+#[test]
+fn test_drop() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = LinkedList::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ drop(ring);
+
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_with_pop() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = LinkedList::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+
+ drop(ring.pop_back());
+ drop(ring.pop_front());
+ assert_eq!(unsafe { DROPS }, 2);
+
+ drop(ring);
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_clear() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = LinkedList::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.clear();
+ assert_eq!(unsafe { DROPS }, 4);
+
+ drop(ring);
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_panic() {
+ static mut DROPS: i32 = 0;
+
+ struct D(bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.0 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut q = LinkedList::new();
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_front(D(false));
+ q.push_front(D(false));
+ q.push_front(D(true));
+
+ catch_unwind(move || drop(q)).ok();
+
+ assert_eq!(unsafe { DROPS }, 8);
+}
diff --git a/library/alloc/tests/rc.rs b/library/alloc/tests/rc.rs
new file mode 100644
index 00000000000..501b4f0f816
--- /dev/null
+++ b/library/alloc/tests/rc.rs
@@ -0,0 +1,193 @@
+use std::any::Any;
+use std::cell::RefCell;
+use std::cmp::PartialEq;
+use std::iter::TrustedLen;
+use std::mem;
+use std::rc::{Rc, Weak};
+
+#[test]
+fn uninhabited() {
+ enum Void {}
+ let mut a = Weak::<Void>::new();
+ a = a.clone();
+ assert!(a.upgrade().is_none());
+
+ let mut a: Weak<dyn Any> = a; // Unsizing
+ a = a.clone();
+ assert!(a.upgrade().is_none());
+}
+
+#[test]
+fn slice() {
+ let a: Rc<[u32; 3]> = Rc::new([3, 2, 1]);
+ let a: Rc<[u32]> = a; // Unsizing
+ let b: Rc<[u32]> = Rc::from(&[3, 2, 1][..]); // Conversion
+ assert_eq!(a, b);
+
+ // Exercise is_dangling() with a DST
+ let mut a = Rc::downgrade(&a);
+ a = a.clone();
+ assert!(a.upgrade().is_some());
+}
+
+#[test]
+fn trait_object() {
+ let a: Rc<u32> = Rc::new(4);
+ let a: Rc<dyn Any> = a; // Unsizing
+
+ // Exercise is_dangling() with a DST
+ let mut a = Rc::downgrade(&a);
+ a = a.clone();
+ assert!(a.upgrade().is_some());
+
+ let mut b = Weak::<u32>::new();
+ b = b.clone();
+ assert!(b.upgrade().is_none());
+ let mut b: Weak<dyn Any> = b; // Unsizing
+ b = b.clone();
+ assert!(b.upgrade().is_none());
+}
+
+#[test]
+fn float_nan_ne() {
+ let x = Rc::new(f32::NAN);
+ assert!(x != x);
+ assert!(!(x == x));
+}
+
+#[test]
+fn partial_eq() {
+ struct TestPEq(RefCell<usize>);
+ impl PartialEq for TestPEq {
+ fn eq(&self, other: &TestPEq) -> bool {
+ *self.0.borrow_mut() += 1;
+ *other.0.borrow_mut() += 1;
+ true
+ }
+ }
+ let x = Rc::new(TestPEq(RefCell::new(0)));
+ assert!(x == x);
+ assert!(!(x != x));
+ assert_eq!(*x.0.borrow(), 4);
+}
+
+#[test]
+fn eq() {
+ #[derive(Eq)]
+ struct TestEq(RefCell<usize>);
+ impl PartialEq for TestEq {
+ fn eq(&self, other: &TestEq) -> bool {
+ *self.0.borrow_mut() += 1;
+ *other.0.borrow_mut() += 1;
+ true
+ }
+ }
+ let x = Rc::new(TestEq(RefCell::new(0)));
+ assert!(x == x);
+ assert!(!(x != x));
+ assert_eq!(*x.0.borrow(), 0);
+}
+
+const SHARED_ITER_MAX: u16 = 100;
+
+fn assert_trusted_len<I: TrustedLen>(_: &I) {}
+
+#[test]
+fn shared_from_iter_normal() {
+ // Exercise the base implementation for non-`TrustedLen` iterators.
+ {
+ // `Filter` is never `TrustedLen` since we don't
+ // know statically how many elements will be kept:
+ let iter = (0..SHARED_ITER_MAX).filter(|x| x % 2 == 0).map(Box::new);
+
+ // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+
+ // Clone a bit and let these get dropped.
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ } // Drop what hasn't been here.
+}
+
+#[test]
+fn shared_from_iter_trustedlen_normal() {
+ // Exercise the `TrustedLen` implementation under normal circumstances
+ // where `size_hint()` matches `(_, Some(exact_len))`.
+ {
+ let iter = (0..SHARED_ITER_MAX).map(Box::new);
+ assert_trusted_len(&iter);
+
+ // Collecting into a `Vec<T>` or `Rc<[T]>` should make no difference:
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+ assert_eq!(mem::size_of::<Box<u16>>() * SHARED_ITER_MAX as usize, mem::size_of_val(&*rc));
+
+ // Clone a bit and let these get dropped.
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ } // Drop what hasn't been here.
+
+ // Try a ZST to make sure it is handled well.
+ {
+ let iter = (0..SHARED_ITER_MAX).map(drop);
+ let vec = iter.clone().collect::<Vec<_>>();
+ let rc = iter.collect::<Rc<[_]>>();
+ assert_eq!(&*vec, &*rc);
+ assert_eq!(0, mem::size_of_val(&*rc));
+ {
+ let _rc_2 = rc.clone();
+ let _rc_3 = rc.clone();
+ let _rc_4 = Rc::downgrade(&_rc_3);
+ }
+ }
+}
+
+#[test]
+#[should_panic = "I've almost got 99 problems."]
+fn shared_from_iter_trustedlen_panic() {
+ // Exercise the `TrustedLen` implementation when `size_hint()` matches
+ // `(_, Some(exact_len))` but where `.next()` drops before the last iteration.
+ let iter = (0..SHARED_ITER_MAX).map(|val| match val {
+ 98 => panic!("I've almost got 99 problems."),
+ _ => Box::new(val),
+ });
+ assert_trusted_len(&iter);
+ let _ = iter.collect::<Rc<[_]>>();
+
+ panic!("I am unreachable.");
+}
+
+#[test]
+fn shared_from_iter_trustedlen_no_fuse() {
+ // Exercise the `TrustedLen` implementation when `size_hint()` matches
+ // `(_, Some(exact_len))` but where the iterator does not behave in a fused manner.
+ struct Iter(std::vec::IntoIter<Option<Box<u8>>>);
+
+ unsafe impl TrustedLen for Iter {}
+
+ impl Iterator for Iter {
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (2, Some(2))
+ }
+
+ type Item = Box<u8>;
+
+ fn next(&mut self) -> Option<Self::Item> {
+ self.0.next().flatten()
+ }
+ }
+
+ let vec = vec![Some(Box::new(42)), Some(Box::new(24)), None, Some(Box::new(12))];
+ let iter = Iter(vec.into_iter());
+ assert_trusted_len(&iter);
+ assert_eq!(&[Box::new(42), Box::new(24)], &*iter.collect::<Rc<[_]>>());
+}
diff --git a/library/alloc/tests/slice.rs b/library/alloc/tests/slice.rs
new file mode 100644
index 00000000000..75b76bb73ed
--- /dev/null
+++ b/library/alloc/tests/slice.rs
@@ -0,0 +1,1771 @@
+use std::cell::Cell;
+use std::cmp::Ordering::{self, Equal, Greater, Less};
+use std::mem;
+use std::panic;
+use std::rc::Rc;
+use std::sync::atomic::{AtomicUsize, Ordering::Relaxed};
+
+use rand::distributions::Standard;
+use rand::seq::SliceRandom;
+use rand::{thread_rng, Rng, RngCore};
+
+fn square(n: usize) -> usize {
+ n * n
+}
+
+fn is_odd(n: &usize) -> bool {
+ *n % 2 == 1
+}
+
+#[test]
+fn test_from_fn() {
+ // Test on-stack from_fn.
+ let mut v: Vec<_> = (0..3).map(square).collect();
+ {
+ let v = v;
+ assert_eq!(v.len(), 3);
+ assert_eq!(v[0], 0);
+ assert_eq!(v[1], 1);
+ assert_eq!(v[2], 4);
+ }
+
+ // Test on-heap from_fn.
+ v = (0..5).map(square).collect();
+ {
+ let v = v;
+ assert_eq!(v.len(), 5);
+ assert_eq!(v[0], 0);
+ assert_eq!(v[1], 1);
+ assert_eq!(v[2], 4);
+ assert_eq!(v[3], 9);
+ assert_eq!(v[4], 16);
+ }
+}
+
+#[test]
+fn test_from_elem() {
+ // Test on-stack from_elem.
+ let mut v = vec![10, 10];
+ {
+ let v = v;
+ assert_eq!(v.len(), 2);
+ assert_eq!(v[0], 10);
+ assert_eq!(v[1], 10);
+ }
+
+ // Test on-heap from_elem.
+ v = vec![20; 6];
+ {
+ let v = &v[..];
+ assert_eq!(v[0], 20);
+ assert_eq!(v[1], 20);
+ assert_eq!(v[2], 20);
+ assert_eq!(v[3], 20);
+ assert_eq!(v[4], 20);
+ assert_eq!(v[5], 20);
+ }
+}
+
+#[test]
+fn test_is_empty() {
+ let xs: [i32; 0] = [];
+ assert!(xs.is_empty());
+ assert!(![0].is_empty());
+}
+
+#[test]
+fn test_len_divzero() {
+ type Z = [i8; 0];
+ let v0: &[Z] = &[];
+ let v1: &[Z] = &[[]];
+ let v2: &[Z] = &[[], []];
+ assert_eq!(mem::size_of::<Z>(), 0);
+ assert_eq!(v0.len(), 0);
+ assert_eq!(v1.len(), 1);
+ assert_eq!(v2.len(), 2);
+}
+
+#[test]
+fn test_get() {
+ let mut a = vec![11];
+ assert_eq!(a.get(1), None);
+ a = vec![11, 12];
+ assert_eq!(a.get(1).unwrap(), &12);
+ a = vec![11, 12, 13];
+ assert_eq!(a.get(1).unwrap(), &12);
+}
+
+#[test]
+fn test_first() {
+ let mut a = vec![];
+ assert_eq!(a.first(), None);
+ a = vec![11];
+ assert_eq!(a.first().unwrap(), &11);
+ a = vec![11, 12];
+ assert_eq!(a.first().unwrap(), &11);
+}
+
+#[test]
+fn test_first_mut() {
+ let mut a = vec![];
+ assert_eq!(a.first_mut(), None);
+ a = vec![11];
+ assert_eq!(*a.first_mut().unwrap(), 11);
+ a = vec![11, 12];
+ assert_eq!(*a.first_mut().unwrap(), 11);
+}
+
+#[test]
+fn test_split_first() {
+ let mut a = vec![11];
+ let b: &[i32] = &[];
+ assert!(b.split_first().is_none());
+ assert_eq!(a.split_first(), Some((&11, b)));
+ a = vec![11, 12];
+ let b: &[i32] = &[12];
+ assert_eq!(a.split_first(), Some((&11, b)));
+}
+
+#[test]
+fn test_split_first_mut() {
+ let mut a = vec![11];
+ let b: &mut [i32] = &mut [];
+ assert!(b.split_first_mut().is_none());
+ assert!(a.split_first_mut() == Some((&mut 11, b)));
+ a = vec![11, 12];
+ let b: &mut [_] = &mut [12];
+ assert!(a.split_first_mut() == Some((&mut 11, b)));
+}
+
+#[test]
+fn test_split_last() {
+ let mut a = vec![11];
+ let b: &[i32] = &[];
+ assert!(b.split_last().is_none());
+ assert_eq!(a.split_last(), Some((&11, b)));
+ a = vec![11, 12];
+ let b: &[_] = &[11];
+ assert_eq!(a.split_last(), Some((&12, b)));
+}
+
+#[test]
+fn test_split_last_mut() {
+ let mut a = vec![11];
+ let b: &mut [i32] = &mut [];
+ assert!(b.split_last_mut().is_none());
+ assert!(a.split_last_mut() == Some((&mut 11, b)));
+
+ a = vec![11, 12];
+ let b: &mut [_] = &mut [11];
+ assert!(a.split_last_mut() == Some((&mut 12, b)));
+}
+
+#[test]
+fn test_last() {
+ let mut a = vec![];
+ assert_eq!(a.last(), None);
+ a = vec![11];
+ assert_eq!(a.last().unwrap(), &11);
+ a = vec![11, 12];
+ assert_eq!(a.last().unwrap(), &12);
+}
+
+#[test]
+fn test_last_mut() {
+ let mut a = vec![];
+ assert_eq!(a.last_mut(), None);
+ a = vec![11];
+ assert_eq!(*a.last_mut().unwrap(), 11);
+ a = vec![11, 12];
+ assert_eq!(*a.last_mut().unwrap(), 12);
+}
+
+#[test]
+fn test_slice() {
+ // Test fixed length vector.
+ let vec_fixed = [1, 2, 3, 4];
+ let v_a = vec_fixed[1..vec_fixed.len()].to_vec();
+ assert_eq!(v_a.len(), 3);
+
+ assert_eq!(v_a[0], 2);
+ assert_eq!(v_a[1], 3);
+ assert_eq!(v_a[2], 4);
+
+ // Test on stack.
+ let vec_stack: &[_] = &[1, 2, 3];
+ let v_b = vec_stack[1..3].to_vec();
+ assert_eq!(v_b.len(), 2);
+
+ assert_eq!(v_b[0], 2);
+ assert_eq!(v_b[1], 3);
+
+ // Test `Box<[T]>`
+ let vec_unique = vec![1, 2, 3, 4, 5, 6];
+ let v_d = vec_unique[1..6].to_vec();
+ assert_eq!(v_d.len(), 5);
+
+ assert_eq!(v_d[0], 2);
+ assert_eq!(v_d[1], 3);
+ assert_eq!(v_d[2], 4);
+ assert_eq!(v_d[3], 5);
+ assert_eq!(v_d[4], 6);
+}
+
+#[test]
+fn test_slice_from() {
+ let vec: &[_] = &[1, 2, 3, 4];
+ assert_eq!(&vec[..], vec);
+ let b: &[_] = &[3, 4];
+ assert_eq!(&vec[2..], b);
+ let b: &[_] = &[];
+ assert_eq!(&vec[4..], b);
+}
+
+#[test]
+fn test_slice_to() {
+ let vec: &[_] = &[1, 2, 3, 4];
+ assert_eq!(&vec[..4], vec);
+ let b: &[_] = &[1, 2];
+ assert_eq!(&vec[..2], b);
+ let b: &[_] = &[];
+ assert_eq!(&vec[..0], b);
+}
+
+#[test]
+fn test_pop() {
+ let mut v = vec![5];
+ let e = v.pop();
+ assert_eq!(v.len(), 0);
+ assert_eq!(e, Some(5));
+ let f = v.pop();
+ assert_eq!(f, None);
+ let g = v.pop();
+ assert_eq!(g, None);
+}
+
+#[test]
+fn test_swap_remove() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let mut e = v.swap_remove(0);
+ assert_eq!(e, 1);
+ assert_eq!(v, [5, 2, 3, 4]);
+ e = v.swap_remove(3);
+ assert_eq!(e, 4);
+ assert_eq!(v, [5, 2, 3]);
+}
+
+#[test]
+#[should_panic]
+fn test_swap_remove_fail() {
+ let mut v = vec![1];
+ let _ = v.swap_remove(0);
+ let _ = v.swap_remove(0);
+}
+
+#[test]
+fn test_swap_remove_noncopyable() {
+ // Tests that we don't accidentally run destructors twice.
+ let mut v: Vec<Box<_>> = Vec::new();
+ v.push(box 0);
+ v.push(box 0);
+ v.push(box 0);
+ let mut _e = v.swap_remove(0);
+ assert_eq!(v.len(), 2);
+ _e = v.swap_remove(1);
+ assert_eq!(v.len(), 1);
+ _e = v.swap_remove(0);
+ assert_eq!(v.len(), 0);
+}
+
+#[test]
+fn test_push() {
+ // Test on-stack push().
+ let mut v = vec![];
+ v.push(1);
+ assert_eq!(v.len(), 1);
+ assert_eq!(v[0], 1);
+
+ // Test on-heap push().
+ v.push(2);
+ assert_eq!(v.len(), 2);
+ assert_eq!(v[0], 1);
+ assert_eq!(v[1], 2);
+}
+
+#[test]
+fn test_truncate() {
+ let mut v: Vec<Box<_>> = vec![box 6, box 5, box 4];
+ v.truncate(1);
+ let v = v;
+ assert_eq!(v.len(), 1);
+ assert_eq!(*(v[0]), 6);
+ // If the unsafe block didn't drop things properly, we blow up here.
+}
+
+#[test]
+fn test_clear() {
+ let mut v: Vec<Box<_>> = vec![box 6, box 5, box 4];
+ v.clear();
+ assert_eq!(v.len(), 0);
+ // If the unsafe block didn't drop things properly, we blow up here.
+}
+
+#[test]
+fn test_retain() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ v.retain(is_odd);
+ assert_eq!(v, [1, 3, 5]);
+}
+
+#[test]
+fn test_binary_search() {
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&5).ok(), Some(4));
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&4).ok(), Some(3));
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&3).ok(), Some(2));
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&2).ok(), Some(1));
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&1).ok(), Some(0));
+
+ assert_eq!([2, 4, 6, 8, 10].binary_search(&1).ok(), None);
+ assert_eq!([2, 4, 6, 8, 10].binary_search(&5).ok(), None);
+ assert_eq!([2, 4, 6, 8, 10].binary_search(&4).ok(), Some(1));
+ assert_eq!([2, 4, 6, 8, 10].binary_search(&10).ok(), Some(4));
+
+ assert_eq!([2, 4, 6, 8].binary_search(&1).ok(), None);
+ assert_eq!([2, 4, 6, 8].binary_search(&5).ok(), None);
+ assert_eq!([2, 4, 6, 8].binary_search(&4).ok(), Some(1));
+ assert_eq!([2, 4, 6, 8].binary_search(&8).ok(), Some(3));
+
+ assert_eq!([2, 4, 6].binary_search(&1).ok(), None);
+ assert_eq!([2, 4, 6].binary_search(&5).ok(), None);
+ assert_eq!([2, 4, 6].binary_search(&4).ok(), Some(1));
+ assert_eq!([2, 4, 6].binary_search(&6).ok(), Some(2));
+
+ assert_eq!([2, 4].binary_search(&1).ok(), None);
+ assert_eq!([2, 4].binary_search(&5).ok(), None);
+ assert_eq!([2, 4].binary_search(&2).ok(), Some(0));
+ assert_eq!([2, 4].binary_search(&4).ok(), Some(1));
+
+ assert_eq!([2].binary_search(&1).ok(), None);
+ assert_eq!([2].binary_search(&5).ok(), None);
+ assert_eq!([2].binary_search(&2).ok(), Some(0));
+
+ assert_eq!([].binary_search(&1).ok(), None);
+ assert_eq!([].binary_search(&5).ok(), None);
+
+ assert!([1, 1, 1, 1, 1].binary_search(&1).ok() != None);
+ assert!([1, 1, 1, 1, 2].binary_search(&1).ok() != None);
+ assert!([1, 1, 1, 2, 2].binary_search(&1).ok() != None);
+ assert!([1, 1, 2, 2, 2].binary_search(&1).ok() != None);
+ assert_eq!([1, 2, 2, 2, 2].binary_search(&1).ok(), Some(0));
+
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&6).ok(), None);
+ assert_eq!([1, 2, 3, 4, 5].binary_search(&0).ok(), None);
+}
+
+#[test]
+fn test_reverse() {
+ let mut v = vec![10, 20];
+ assert_eq!(v[0], 10);
+ assert_eq!(v[1], 20);
+ v.reverse();
+ assert_eq!(v[0], 20);
+ assert_eq!(v[1], 10);
+
+ let mut v3 = Vec::<i32>::new();
+ v3.reverse();
+ assert!(v3.is_empty());
+
+ // check the 1-byte-types path
+ let mut v = (-50..51i8).collect::<Vec<_>>();
+ v.reverse();
+ assert_eq!(v, (-50..51i8).rev().collect::<Vec<_>>());
+
+ // check the 2-byte-types path
+ let mut v = (-50..51i16).collect::<Vec<_>>();
+ v.reverse();
+ assert_eq!(v, (-50..51i16).rev().collect::<Vec<_>>());
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn test_sort() {
+ let mut rng = thread_rng();
+
+ for len in (2..25).chain(500..510) {
+ for &modulus in &[5, 10, 100, 1000] {
+ for _ in 0..10 {
+ let orig: Vec<_> =
+ rng.sample_iter::<i32, _>(&Standard).map(|x| x % modulus).take(len).collect();
+
+ // Sort in default order.
+ let mut v = orig.clone();
+ v.sort();
+ assert!(v.windows(2).all(|w| w[0] <= w[1]));
+
+ // Sort in ascending order.
+ let mut v = orig.clone();
+ v.sort_by(|a, b| a.cmp(b));
+ assert!(v.windows(2).all(|w| w[0] <= w[1]));
+
+ // Sort in descending order.
+ let mut v = orig.clone();
+ v.sort_by(|a, b| b.cmp(a));
+ assert!(v.windows(2).all(|w| w[0] >= w[1]));
+
+ // Sort in lexicographic order.
+ let mut v1 = orig.clone();
+ let mut v2 = orig.clone();
+ v1.sort_by_key(|x| x.to_string());
+ v2.sort_by_cached_key(|x| x.to_string());
+ assert!(v1.windows(2).all(|w| w[0].to_string() <= w[1].to_string()));
+ assert!(v1 == v2);
+
+ // Sort with many pre-sorted runs.
+ let mut v = orig.clone();
+ v.sort();
+ v.reverse();
+ for _ in 0..5 {
+ let a = rng.gen::<usize>() % len;
+ let b = rng.gen::<usize>() % len;
+ if a < b {
+ v[a..b].reverse();
+ } else {
+ v.swap(a, b);
+ }
+ }
+ v.sort();
+ assert!(v.windows(2).all(|w| w[0] <= w[1]));
+ }
+ }
+ }
+
+ // Sort using a completely random comparison function.
+ // This will reorder the elements *somehow*, but won't panic.
+ let mut v = [0; 500];
+ for i in 0..v.len() {
+ v[i] = i as i32;
+ }
+ v.sort_by(|_, _| *[Less, Equal, Greater].choose(&mut rng).unwrap());
+ v.sort();
+ for i in 0..v.len() {
+ assert_eq!(v[i], i as i32);
+ }
+
+ // Should not panic.
+ [0i32; 0].sort();
+ [(); 10].sort();
+ [(); 100].sort();
+
+ let mut v = [0xDEADBEEFu64];
+ v.sort();
+ assert!(v == [0xDEADBEEF]);
+}
+
+#[test]
+fn test_sort_stability() {
+ // Miri is too slow
+ let large_range = if cfg!(miri) { 0..0 } else { 500..510 };
+ let rounds = if cfg!(miri) { 1 } else { 10 };
+
+ for len in (2..25).chain(large_range) {
+ for _ in 0..rounds {
+ let mut counts = [0; 10];
+
+ // create a vector like [(6, 1), (5, 1), (6, 2), ...],
+ // where the first item of each tuple is random, but
+ // the second item represents which occurrence of that
+ // number this element is, i.e., the second elements
+ // will occur in sorted order.
+ let orig: Vec<_> = (0..len)
+ .map(|_| {
+ let n = thread_rng().gen::<usize>() % 10;
+ counts[n] += 1;
+ (n, counts[n])
+ })
+ .collect();
+
+ let mut v = orig.clone();
+ // Only sort on the first element, so an unstable sort
+ // may mix up the counts.
+ v.sort_by(|&(a, _), &(b, _)| a.cmp(&b));
+
+ // This comparison includes the count (the second item
+ // of the tuple), so elements with equal first items
+ // will need to be ordered with increasing
+ // counts... i.e., exactly asserting that this sort is
+ // stable.
+ assert!(v.windows(2).all(|w| w[0] <= w[1]));
+
+ let mut v = orig.clone();
+ v.sort_by_cached_key(|&(x, _)| x);
+ assert!(v.windows(2).all(|w| w[0] <= w[1]));
+ }
+ }
+}
+
+#[test]
+fn test_rotate_left() {
+ let expected: Vec<_> = (0..13).collect();
+ let mut v = Vec::new();
+
+ // no-ops
+ v.clone_from(&expected);
+ v.rotate_left(0);
+ assert_eq!(v, expected);
+ v.rotate_left(expected.len());
+ assert_eq!(v, expected);
+ let mut zst_array = [(), (), ()];
+ zst_array.rotate_left(2);
+
+ // happy path
+ v = (5..13).chain(0..5).collect();
+ v.rotate_left(8);
+ assert_eq!(v, expected);
+
+ let expected: Vec<_> = (0..1000).collect();
+
+ // small rotations in large slice, uses ptr::copy
+ v = (2..1000).chain(0..2).collect();
+ v.rotate_left(998);
+ assert_eq!(v, expected);
+ v = (998..1000).chain(0..998).collect();
+ v.rotate_left(2);
+ assert_eq!(v, expected);
+
+ // non-small prime rotation, has a few rounds of swapping
+ v = (389..1000).chain(0..389).collect();
+ v.rotate_left(1000 - 389);
+ assert_eq!(v, expected);
+}
+
+#[test]
+fn test_rotate_right() {
+ let expected: Vec<_> = (0..13).collect();
+ let mut v = Vec::new();
+
+ // no-ops
+ v.clone_from(&expected);
+ v.rotate_right(0);
+ assert_eq!(v, expected);
+ v.rotate_right(expected.len());
+ assert_eq!(v, expected);
+ let mut zst_array = [(), (), ()];
+ zst_array.rotate_right(2);
+
+ // happy path
+ v = (5..13).chain(0..5).collect();
+ v.rotate_right(5);
+ assert_eq!(v, expected);
+
+ let expected: Vec<_> = (0..1000).collect();
+
+ // small rotations in large slice, uses ptr::copy
+ v = (2..1000).chain(0..2).collect();
+ v.rotate_right(2);
+ assert_eq!(v, expected);
+ v = (998..1000).chain(0..998).collect();
+ v.rotate_right(998);
+ assert_eq!(v, expected);
+
+ // non-small prime rotation, has a few rounds of swapping
+ v = (389..1000).chain(0..389).collect();
+ v.rotate_right(389);
+ assert_eq!(v, expected);
+}
+
+#[test]
+fn test_concat() {
+ let v: [Vec<i32>; 0] = [];
+ let c = v.concat();
+ assert_eq!(c, []);
+ let d = [vec![1], vec![2, 3]].concat();
+ assert_eq!(d, [1, 2, 3]);
+
+ let v: &[&[_]] = &[&[1], &[2, 3]];
+ assert_eq!(v.join(&0), [1, 0, 2, 3]);
+ let v: &[&[_]] = &[&[1], &[2], &[3]];
+ assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
+}
+
+#[test]
+fn test_join() {
+ let v: [Vec<i32>; 0] = [];
+ assert_eq!(v.join(&0), []);
+ assert_eq!([vec![1], vec![2, 3]].join(&0), [1, 0, 2, 3]);
+ assert_eq!([vec![1], vec![2], vec![3]].join(&0), [1, 0, 2, 0, 3]);
+
+ let v: [&[_]; 2] = [&[1], &[2, 3]];
+ assert_eq!(v.join(&0), [1, 0, 2, 3]);
+ let v: [&[_]; 3] = [&[1], &[2], &[3]];
+ assert_eq!(v.join(&0), [1, 0, 2, 0, 3]);
+}
+
+#[test]
+fn test_join_nocopy() {
+ let v: [String; 0] = [];
+ assert_eq!(v.join(","), "");
+ assert_eq!(["a".to_string(), "ab".into()].join(","), "a,ab");
+ assert_eq!(["a".to_string(), "ab".into(), "abc".into()].join(","), "a,ab,abc");
+ assert_eq!(["a".to_string(), "ab".into(), "".into()].join(","), "a,ab,");
+}
+
+#[test]
+fn test_insert() {
+ let mut a = vec![1, 2, 4];
+ a.insert(2, 3);
+ assert_eq!(a, [1, 2, 3, 4]);
+
+ let mut a = vec![1, 2, 3];
+ a.insert(0, 0);
+ assert_eq!(a, [0, 1, 2, 3]);
+
+ let mut a = vec![1, 2, 3];
+ a.insert(3, 4);
+ assert_eq!(a, [1, 2, 3, 4]);
+
+ let mut a = vec![];
+ a.insert(0, 1);
+ assert_eq!(a, [1]);
+}
+
+#[test]
+#[should_panic]
+fn test_insert_oob() {
+ let mut a = vec![1, 2, 3];
+ a.insert(4, 5);
+}
+
+#[test]
+fn test_remove() {
+ let mut a = vec![1, 2, 3, 4];
+
+ assert_eq!(a.remove(2), 3);
+ assert_eq!(a, [1, 2, 4]);
+
+ assert_eq!(a.remove(2), 4);
+ assert_eq!(a, [1, 2]);
+
+ assert_eq!(a.remove(0), 1);
+ assert_eq!(a, [2]);
+
+ assert_eq!(a.remove(0), 2);
+ assert_eq!(a, []);
+}
+
+#[test]
+#[should_panic]
+fn test_remove_fail() {
+ let mut a = vec![1];
+ let _ = a.remove(0);
+ let _ = a.remove(0);
+}
+
+#[test]
+fn test_capacity() {
+ let mut v = vec![0];
+ v.reserve_exact(10);
+ assert!(v.capacity() >= 11);
+}
+
+#[test]
+fn test_slice_2() {
+ let v = vec![1, 2, 3, 4, 5];
+ let v = &v[1..3];
+ assert_eq!(v.len(), 2);
+ assert_eq!(v[0], 2);
+ assert_eq!(v[1], 3);
+}
+
+macro_rules! assert_order {
+ (Greater, $a:expr, $b:expr) => {
+ assert_eq!($a.cmp($b), Greater);
+ assert!($a > $b);
+ };
+ (Less, $a:expr, $b:expr) => {
+ assert_eq!($a.cmp($b), Less);
+ assert!($a < $b);
+ };
+ (Equal, $a:expr, $b:expr) => {
+ assert_eq!($a.cmp($b), Equal);
+ assert_eq!($a, $b);
+ };
+}
+
+#[test]
+fn test_total_ord_u8() {
+ let c = &[1u8, 2, 3];
+ assert_order!(Greater, &[1u8, 2, 3, 4][..], &c[..]);
+ let c = &[1u8, 2, 3, 4];
+ assert_order!(Less, &[1u8, 2, 3][..], &c[..]);
+ let c = &[1u8, 2, 3, 6];
+ assert_order!(Equal, &[1u8, 2, 3, 6][..], &c[..]);
+ let c = &[1u8, 2, 3, 4, 5, 6];
+ assert_order!(Less, &[1u8, 2, 3, 4, 5, 5, 5, 5][..], &c[..]);
+ let c = &[1u8, 2, 3, 4];
+ assert_order!(Greater, &[2u8, 2][..], &c[..]);
+}
+
+#[test]
+fn test_total_ord_i32() {
+ let c = &[1, 2, 3];
+ assert_order!(Greater, &[1, 2, 3, 4][..], &c[..]);
+ let c = &[1, 2, 3, 4];
+ assert_order!(Less, &[1, 2, 3][..], &c[..]);
+ let c = &[1, 2, 3, 6];
+ assert_order!(Equal, &[1, 2, 3, 6][..], &c[..]);
+ let c = &[1, 2, 3, 4, 5, 6];
+ assert_order!(Less, &[1, 2, 3, 4, 5, 5, 5, 5][..], &c[..]);
+ let c = &[1, 2, 3, 4];
+ assert_order!(Greater, &[2, 2][..], &c[..]);
+}
+
+#[test]
+fn test_iterator() {
+ let xs = [1, 2, 5, 10, 11];
+ let mut it = xs.iter();
+ assert_eq!(it.size_hint(), (5, Some(5)));
+ assert_eq!(it.next().unwrap(), &1);
+ assert_eq!(it.size_hint(), (4, Some(4)));
+ assert_eq!(it.next().unwrap(), &2);
+ assert_eq!(it.size_hint(), (3, Some(3)));
+ assert_eq!(it.next().unwrap(), &5);
+ assert_eq!(it.size_hint(), (2, Some(2)));
+ assert_eq!(it.next().unwrap(), &10);
+ assert_eq!(it.size_hint(), (1, Some(1)));
+ assert_eq!(it.next().unwrap(), &11);
+ assert_eq!(it.size_hint(), (0, Some(0)));
+ assert!(it.next().is_none());
+}
+
+#[test]
+fn test_iter_size_hints() {
+ let mut xs = [1, 2, 5, 10, 11];
+ assert_eq!(xs.iter().size_hint(), (5, Some(5)));
+ assert_eq!(xs.iter_mut().size_hint(), (5, Some(5)));
+}
+
+#[test]
+fn test_iter_as_slice() {
+ let xs = [1, 2, 5, 10, 11];
+ let mut iter = xs.iter();
+ assert_eq!(iter.as_slice(), &[1, 2, 5, 10, 11]);
+ iter.next();
+ assert_eq!(iter.as_slice(), &[2, 5, 10, 11]);
+}
+
+#[test]
+fn test_iter_as_ref() {
+ let xs = [1, 2, 5, 10, 11];
+ let mut iter = xs.iter();
+ assert_eq!(iter.as_ref(), &[1, 2, 5, 10, 11]);
+ iter.next();
+ assert_eq!(iter.as_ref(), &[2, 5, 10, 11]);
+}
+
+#[test]
+fn test_iter_clone() {
+ let xs = [1, 2, 5];
+ let mut it = xs.iter();
+ it.next();
+ let mut jt = it.clone();
+ assert_eq!(it.next(), jt.next());
+ assert_eq!(it.next(), jt.next());
+ assert_eq!(it.next(), jt.next());
+}
+
+#[test]
+fn test_iter_is_empty() {
+ let xs = [1, 2, 5, 10, 11];
+ for i in 0..xs.len() {
+ for j in i..xs.len() {
+ assert_eq!(xs[i..j].iter().is_empty(), xs[i..j].is_empty());
+ }
+ }
+}
+
+#[test]
+fn test_mut_iterator() {
+ let mut xs = [1, 2, 3, 4, 5];
+ for x in &mut xs {
+ *x += 1;
+ }
+ assert!(xs == [2, 3, 4, 5, 6])
+}
+
+#[test]
+fn test_rev_iterator() {
+ let xs = [1, 2, 5, 10, 11];
+ let ys = [11, 10, 5, 2, 1];
+ let mut i = 0;
+ for &x in xs.iter().rev() {
+ assert_eq!(x, ys[i]);
+ i += 1;
+ }
+ assert_eq!(i, 5);
+}
+
+#[test]
+fn test_mut_rev_iterator() {
+ let mut xs = [1, 2, 3, 4, 5];
+ for (i, x) in xs.iter_mut().rev().enumerate() {
+ *x += i;
+ }
+ assert!(xs == [5, 5, 5, 5, 5])
+}
+
+#[test]
+fn test_move_iterator() {
+ let xs = vec![1, 2, 3, 4, 5];
+ assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10 * a + b), 12345);
+}
+
+#[test]
+fn test_move_rev_iterator() {
+ let xs = vec![1, 2, 3, 4, 5];
+ assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10 * a + b), 54321);
+}
+
+#[test]
+fn test_splitator() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[1], &[3], &[5]];
+ assert_eq!(xs.split(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[], &[2, 3, 4, 5]];
+ assert_eq!(xs.split(|x| *x == 1).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4], &[]];
+ assert_eq!(xs.split(|x| *x == 5).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split(|x| *x == 10).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[], &[], &[], &[], &[], &[]];
+ assert_eq!(xs.split(|_| true).collect::<Vec<&[i32]>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
+}
+
+#[test]
+fn test_splitator_inclusive() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
+ assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive(|x| *x == 1).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive(|x| *x == 10).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
+ assert_eq!(xs.split_inclusive(|_| true).collect::<Vec<&[i32]>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split_inclusive(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
+}
+
+#[test]
+fn test_splitator_inclusive_reverse() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
+ assert_eq!(xs.split_inclusive(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
+ assert_eq!(xs.split_inclusive(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
+ assert_eq!(xs.split_inclusive(|_| true).rev().collect::<Vec<_>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split_inclusive(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitator_mut_inclusive() {
+ let xs = &mut [1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1], &[2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 1).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 10).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1], &[2], &[3], &[4], &[5]];
+ assert_eq!(xs.split_inclusive_mut(|_| true).collect::<Vec<_>>(), splits);
+
+ let xs: &mut [i32] = &mut [];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 5).collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitator_mut_inclusive_reverse() {
+ let xs = &mut [1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[2, 3, 4, 5], &[1]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[5], &[4], &[3], &[2], &[1]];
+ assert_eq!(xs.split_inclusive_mut(|_| true).rev().collect::<Vec<_>>(), splits);
+
+ let xs: &mut [i32] = &mut [];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split_inclusive_mut(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitnator() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1], &[3, 4, 5]];
+ assert_eq!(xs.splitn(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[], &[], &[], &[4, 5]];
+ assert_eq!(xs.splitn(4, |_| true).collect::<Vec<_>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.splitn(2, |x| *x == 5).collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_splitnator_mut() {
+ let xs = &mut [1, 2, 3, 4, 5];
+
+ let splits: &[&mut [_]] = &[&mut [1, 2, 3, 4, 5]];
+ assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&mut [_]] = &[&mut [1], &mut [3, 4, 5]];
+ assert_eq!(xs.splitn_mut(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&mut [_]] = &[&mut [], &mut [], &mut [], &mut [4, 5]];
+ assert_eq!(xs.splitn_mut(4, |_| true).collect::<Vec<_>>(), splits);
+
+ let xs: &mut [i32] = &mut [];
+ let splits: &[&mut [i32]] = &[&mut []];
+ assert_eq!(xs.splitn_mut(2, |x| *x == 5).collect::<Vec<_>>(), splits);
+}
+
+#[test]
+fn test_rsplitator() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[5], &[3], &[1]];
+ assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[2, 3, 4, 5], &[]];
+ assert_eq!(xs.split(|x| *x == 1).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[], &[1, 2, 3, 4]];
+ assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.split(|x| *x == 10).rev().collect::<Vec<_>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<&[i32]>>(), splits);
+}
+
+#[test]
+fn test_rsplitnator() {
+ let xs = &[1, 2, 3, 4, 5];
+
+ let splits: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[5], &[1, 2, 3]];
+ assert_eq!(xs.rsplitn(2, |x| *x % 2 == 0).collect::<Vec<_>>(), splits);
+ let splits: &[&[_]] = &[&[], &[], &[], &[1, 2]];
+ assert_eq!(xs.rsplitn(4, |_| true).collect::<Vec<_>>(), splits);
+
+ let xs: &[i32] = &[];
+ let splits: &[&[i32]] = &[&[]];
+ assert_eq!(xs.rsplitn(2, |x| *x == 5).collect::<Vec<&[i32]>>(), splits);
+ assert!(xs.rsplitn(0, |x| *x % 2 == 0).next().is_none());
+}
+
+#[test]
+fn test_windowsator() {
+ let v = &[1, 2, 3, 4];
+
+ let wins: &[&[_]] = &[&[1, 2], &[2, 3], &[3, 4]];
+ assert_eq!(v.windows(2).collect::<Vec<_>>(), wins);
+
+ let wins: &[&[_]] = &[&[1, 2, 3], &[2, 3, 4]];
+ assert_eq!(v.windows(3).collect::<Vec<_>>(), wins);
+ assert!(v.windows(6).next().is_none());
+
+ let wins: &[&[_]] = &[&[3, 4], &[2, 3], &[1, 2]];
+ assert_eq!(v.windows(2).rev().collect::<Vec<&[_]>>(), wins);
+}
+
+#[test]
+#[should_panic]
+fn test_windowsator_0() {
+ let v = &[1, 2, 3, 4];
+ let _it = v.windows(0);
+}
+
+#[test]
+fn test_chunksator() {
+ let v = &[1, 2, 3, 4, 5];
+
+ assert_eq!(v.chunks(2).len(), 3);
+
+ let chunks: &[&[_]] = &[&[1, 2], &[3, 4], &[5]];
+ assert_eq!(v.chunks(2).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[1, 2, 3], &[4, 5]];
+ assert_eq!(v.chunks(3).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(v.chunks(6).collect::<Vec<_>>(), chunks);
+
+ let chunks: &[&[_]] = &[&[5], &[3, 4], &[1, 2]];
+ assert_eq!(v.chunks(2).rev().collect::<Vec<_>>(), chunks);
+}
+
+#[test]
+#[should_panic]
+fn test_chunksator_0() {
+ let v = &[1, 2, 3, 4];
+ let _it = v.chunks(0);
+}
+
+#[test]
+fn test_chunks_exactator() {
+ let v = &[1, 2, 3, 4, 5];
+
+ assert_eq!(v.chunks_exact(2).len(), 2);
+
+ let chunks: &[&[_]] = &[&[1, 2], &[3, 4]];
+ assert_eq!(v.chunks_exact(2).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[1, 2, 3]];
+ assert_eq!(v.chunks_exact(3).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[];
+ assert_eq!(v.chunks_exact(6).collect::<Vec<_>>(), chunks);
+
+ let chunks: &[&[_]] = &[&[3, 4], &[1, 2]];
+ assert_eq!(v.chunks_exact(2).rev().collect::<Vec<_>>(), chunks);
+}
+
+#[test]
+#[should_panic]
+fn test_chunks_exactator_0() {
+ let v = &[1, 2, 3, 4];
+ let _it = v.chunks_exact(0);
+}
+
+#[test]
+fn test_rchunksator() {
+ let v = &[1, 2, 3, 4, 5];
+
+ assert_eq!(v.rchunks(2).len(), 3);
+
+ let chunks: &[&[_]] = &[&[4, 5], &[2, 3], &[1]];
+ assert_eq!(v.rchunks(2).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[3, 4, 5], &[1, 2]];
+ assert_eq!(v.rchunks(3).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[1, 2, 3, 4, 5]];
+ assert_eq!(v.rchunks(6).collect::<Vec<_>>(), chunks);
+
+ let chunks: &[&[_]] = &[&[1], &[2, 3], &[4, 5]];
+ assert_eq!(v.rchunks(2).rev().collect::<Vec<_>>(), chunks);
+}
+
+#[test]
+#[should_panic]
+fn test_rchunksator_0() {
+ let v = &[1, 2, 3, 4];
+ let _it = v.rchunks(0);
+}
+
+#[test]
+fn test_rchunks_exactator() {
+ let v = &[1, 2, 3, 4, 5];
+
+ assert_eq!(v.rchunks_exact(2).len(), 2);
+
+ let chunks: &[&[_]] = &[&[4, 5], &[2, 3]];
+ assert_eq!(v.rchunks_exact(2).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[&[3, 4, 5]];
+ assert_eq!(v.rchunks_exact(3).collect::<Vec<_>>(), chunks);
+ let chunks: &[&[_]] = &[];
+ assert_eq!(v.rchunks_exact(6).collect::<Vec<_>>(), chunks);
+
+ let chunks: &[&[_]] = &[&[2, 3], &[4, 5]];
+ assert_eq!(v.rchunks_exact(2).rev().collect::<Vec<_>>(), chunks);
+}
+
+#[test]
+#[should_panic]
+fn test_rchunks_exactator_0() {
+ let v = &[1, 2, 3, 4];
+ let _it = v.rchunks_exact(0);
+}
+
+#[test]
+fn test_reverse_part() {
+ let mut values = [1, 2, 3, 4, 5];
+ values[1..4].reverse();
+ assert!(values == [1, 4, 3, 2, 5]);
+}
+
+#[test]
+fn test_show() {
+ macro_rules! test_show_vec {
+ ($x:expr, $x_str:expr) => {{
+ let (x, x_str) = ($x, $x_str);
+ assert_eq!(format!("{:?}", x), x_str);
+ assert_eq!(format!("{:?}", x), x_str);
+ }};
+ }
+ let empty = Vec::<i32>::new();
+ test_show_vec!(empty, "[]");
+ test_show_vec!(vec![1], "[1]");
+ test_show_vec!(vec![1, 2, 3], "[1, 2, 3]");
+ test_show_vec!(vec![vec![], vec![1], vec![1, 1]], "[[], [1], [1, 1]]");
+
+ let empty_mut: &mut [i32] = &mut [];
+ test_show_vec!(empty_mut, "[]");
+ let v = &mut [1];
+ test_show_vec!(v, "[1]");
+ let v = &mut [1, 2, 3];
+ test_show_vec!(v, "[1, 2, 3]");
+ let v: &mut [&mut [_]] = &mut [&mut [], &mut [1], &mut [1, 1]];
+ test_show_vec!(v, "[[], [1], [1, 1]]");
+}
+
+#[test]
+fn test_vec_default() {
+ macro_rules! t {
+ ($ty:ty) => {{
+ let v: $ty = Default::default();
+ assert!(v.is_empty());
+ }};
+ }
+
+ t!(&[i32]);
+ t!(Vec<i32>);
+}
+
+#[test]
+#[should_panic]
+fn test_overflow_does_not_cause_segfault() {
+ let mut v = vec![];
+ v.reserve_exact(!0);
+ v.push(1);
+ v.push(2);
+}
+
+#[test]
+#[should_panic]
+fn test_overflow_does_not_cause_segfault_managed() {
+ let mut v = vec![Rc::new(1)];
+ v.reserve_exact(!0);
+ v.push(Rc::new(2));
+}
+
+#[test]
+fn test_mut_split_at() {
+ let mut values = [1, 2, 3, 4, 5];
+ {
+ let (left, right) = values.split_at_mut(2);
+ {
+ let left: &[_] = left;
+ assert!(left[..left.len()] == [1, 2]);
+ }
+ for p in left {
+ *p += 1;
+ }
+
+ {
+ let right: &[_] = right;
+ assert!(right[..right.len()] == [3, 4, 5]);
+ }
+ for p in right {
+ *p += 2;
+ }
+ }
+
+ assert!(values == [2, 3, 5, 6, 7]);
+}
+
+#[derive(Clone, PartialEq)]
+struct Foo;
+
+#[test]
+fn test_iter_zero_sized() {
+ let mut v = vec![Foo, Foo, Foo];
+ assert_eq!(v.len(), 3);
+ let mut cnt = 0;
+
+ for f in &v {
+ assert!(*f == Foo);
+ cnt += 1;
+ }
+ assert_eq!(cnt, 3);
+
+ for f in &v[1..3] {
+ assert!(*f == Foo);
+ cnt += 1;
+ }
+ assert_eq!(cnt, 5);
+
+ for f in &mut v {
+ assert!(*f == Foo);
+ cnt += 1;
+ }
+ assert_eq!(cnt, 8);
+
+ for f in v {
+ assert!(f == Foo);
+ cnt += 1;
+ }
+ assert_eq!(cnt, 11);
+
+ let xs: [Foo; 3] = [Foo, Foo, Foo];
+ cnt = 0;
+ for f in &xs {
+ assert!(*f == Foo);
+ cnt += 1;
+ }
+ assert!(cnt == 3);
+}
+
+#[test]
+fn test_shrink_to_fit() {
+ let mut xs = vec![0, 1, 2, 3];
+ for i in 4..100 {
+ xs.push(i)
+ }
+ assert_eq!(xs.capacity(), 128);
+ xs.shrink_to_fit();
+ assert_eq!(xs.capacity(), 100);
+ assert_eq!(xs, (0..100).collect::<Vec<_>>());
+}
+
+#[test]
+fn test_starts_with() {
+ assert!(b"foobar".starts_with(b"foo"));
+ assert!(!b"foobar".starts_with(b"oob"));
+ assert!(!b"foobar".starts_with(b"bar"));
+ assert!(!b"foo".starts_with(b"foobar"));
+ assert!(!b"bar".starts_with(b"foobar"));
+ assert!(b"foobar".starts_with(b"foobar"));
+ let empty: &[u8] = &[];
+ assert!(empty.starts_with(empty));
+ assert!(!empty.starts_with(b"foo"));
+ assert!(b"foobar".starts_with(empty));
+}
+
+#[test]
+fn test_ends_with() {
+ assert!(b"foobar".ends_with(b"bar"));
+ assert!(!b"foobar".ends_with(b"oba"));
+ assert!(!b"foobar".ends_with(b"foo"));
+ assert!(!b"foo".ends_with(b"foobar"));
+ assert!(!b"bar".ends_with(b"foobar"));
+ assert!(b"foobar".ends_with(b"foobar"));
+ let empty: &[u8] = &[];
+ assert!(empty.ends_with(empty));
+ assert!(!empty.ends_with(b"foo"));
+ assert!(b"foobar".ends_with(empty));
+}
+
+#[test]
+fn test_mut_splitator() {
+ let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0];
+ assert_eq!(xs.split_mut(|x| *x == 0).count(), 6);
+ for slice in xs.split_mut(|x| *x == 0) {
+ slice.reverse();
+ }
+ assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0]);
+
+ let mut xs = [0, 1, 0, 2, 3, 0, 0, 4, 5, 0, 6, 7];
+ for slice in xs.split_mut(|x| *x == 0).take(5) {
+ slice.reverse();
+ }
+ assert!(xs == [0, 1, 0, 3, 2, 0, 0, 5, 4, 0, 6, 7]);
+}
+
+#[test]
+fn test_mut_splitator_rev() {
+ let mut xs = [1, 2, 0, 3, 4, 0, 0, 5, 6, 0];
+ for slice in xs.split_mut(|x| *x == 0).rev().take(4) {
+ slice.reverse();
+ }
+ assert!(xs == [1, 2, 0, 4, 3, 0, 0, 6, 5, 0]);
+}
+
+#[test]
+fn test_get_mut() {
+ let mut v = [0, 1, 2];
+ assert_eq!(v.get_mut(3), None);
+ v.get_mut(1).map(|e| *e = 7);
+ assert_eq!(v[1], 7);
+ let mut x = 2;
+ assert_eq!(v.get_mut(2), Some(&mut x));
+}
+
+#[test]
+fn test_mut_chunks() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ assert_eq!(v.chunks_mut(3).len(), 3);
+ for (i, chunk) in v.chunks_mut(3).enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [0, 0, 0, 1, 1, 1, 2];
+ assert_eq!(v, result);
+}
+
+#[test]
+fn test_mut_chunks_rev() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ for (i, chunk) in v.chunks_mut(3).rev().enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [2, 2, 2, 1, 1, 1, 0];
+ assert_eq!(v, result);
+}
+
+#[test]
+#[should_panic]
+fn test_mut_chunks_0() {
+ let mut v = [1, 2, 3, 4];
+ let _it = v.chunks_mut(0);
+}
+
+#[test]
+fn test_mut_chunks_exact() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ assert_eq!(v.chunks_exact_mut(3).len(), 2);
+ for (i, chunk) in v.chunks_exact_mut(3).enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [0, 0, 0, 1, 1, 1, 6];
+ assert_eq!(v, result);
+}
+
+#[test]
+fn test_mut_chunks_exact_rev() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ for (i, chunk) in v.chunks_exact_mut(3).rev().enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [1, 1, 1, 0, 0, 0, 6];
+ assert_eq!(v, result);
+}
+
+#[test]
+#[should_panic]
+fn test_mut_chunks_exact_0() {
+ let mut v = [1, 2, 3, 4];
+ let _it = v.chunks_exact_mut(0);
+}
+
+#[test]
+fn test_mut_rchunks() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ assert_eq!(v.rchunks_mut(3).len(), 3);
+ for (i, chunk) in v.rchunks_mut(3).enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [2, 1, 1, 1, 0, 0, 0];
+ assert_eq!(v, result);
+}
+
+#[test]
+fn test_mut_rchunks_rev() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ for (i, chunk) in v.rchunks_mut(3).rev().enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [0, 1, 1, 1, 2, 2, 2];
+ assert_eq!(v, result);
+}
+
+#[test]
+#[should_panic]
+fn test_mut_rchunks_0() {
+ let mut v = [1, 2, 3, 4];
+ let _it = v.rchunks_mut(0);
+}
+
+#[test]
+fn test_mut_rchunks_exact() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ assert_eq!(v.rchunks_exact_mut(3).len(), 2);
+ for (i, chunk) in v.rchunks_exact_mut(3).enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [0, 1, 1, 1, 0, 0, 0];
+ assert_eq!(v, result);
+}
+
+#[test]
+fn test_mut_rchunks_exact_rev() {
+ let mut v = [0, 1, 2, 3, 4, 5, 6];
+ for (i, chunk) in v.rchunks_exact_mut(3).rev().enumerate() {
+ for x in chunk {
+ *x = i as u8;
+ }
+ }
+ let result = [0, 0, 0, 0, 1, 1, 1];
+ assert_eq!(v, result);
+}
+
+#[test]
+#[should_panic]
+fn test_mut_rchunks_exact_0() {
+ let mut v = [1, 2, 3, 4];
+ let _it = v.rchunks_exact_mut(0);
+}
+
+#[test]
+fn test_mut_last() {
+ let mut x = [1, 2, 3, 4, 5];
+ let h = x.last_mut();
+ assert_eq!(*h.unwrap(), 5);
+
+ let y: &mut [i32] = &mut [];
+ assert!(y.last_mut().is_none());
+}
+
+#[test]
+fn test_to_vec() {
+ let xs: Box<_> = box [1, 2, 3];
+ let ys = xs.to_vec();
+ assert_eq!(ys, [1, 2, 3]);
+}
+
+#[test]
+fn test_box_slice_clone() {
+ let data = vec![vec![0, 1], vec![0], vec![1]];
+ let data2 = data.clone().into_boxed_slice().clone().to_vec();
+
+ assert_eq!(data, data2);
+}
+
+#[test]
+#[allow(unused_must_use)] // here, we care about the side effects of `.clone()`
+#[cfg_attr(target_os = "emscripten", ignore)]
+fn test_box_slice_clone_panics() {
+ use std::sync::atomic::{AtomicUsize, Ordering};
+ use std::sync::Arc;
+
+ struct Canary {
+ count: Arc<AtomicUsize>,
+ panics: bool,
+ }
+
+ impl Drop for Canary {
+ fn drop(&mut self) {
+ self.count.fetch_add(1, Ordering::SeqCst);
+ }
+ }
+
+ impl Clone for Canary {
+ fn clone(&self) -> Self {
+ if self.panics {
+ panic!()
+ }
+
+ Canary { count: self.count.clone(), panics: self.panics }
+ }
+ }
+
+ let drop_count = Arc::new(AtomicUsize::new(0));
+ let canary = Canary { count: drop_count.clone(), panics: false };
+ let panic = Canary { count: drop_count.clone(), panics: true };
+
+ std::panic::catch_unwind(move || {
+ // When xs is dropped, +5.
+ let xs =
+ vec![canary.clone(), canary.clone(), canary.clone(), panic, canary].into_boxed_slice();
+
+ // When panic is cloned, +3.
+ xs.clone();
+ })
+ .unwrap_err();
+
+ // Total = 8
+ assert_eq!(drop_count.load(Ordering::SeqCst), 8);
+}
+
+#[test]
+fn test_copy_from_slice() {
+ let src = [0, 1, 2, 3, 4, 5];
+ let mut dst = [0; 6];
+ dst.copy_from_slice(&src);
+ assert_eq!(src, dst)
+}
+
+#[test]
+#[should_panic(expected = "destination and source slices have different lengths")]
+fn test_copy_from_slice_dst_longer() {
+ let src = [0, 1, 2, 3];
+ let mut dst = [0; 5];
+ dst.copy_from_slice(&src);
+}
+
+#[test]
+#[should_panic(expected = "destination and source slices have different lengths")]
+fn test_copy_from_slice_dst_shorter() {
+ let src = [0, 1, 2, 3];
+ let mut dst = [0; 3];
+ dst.copy_from_slice(&src);
+}
+
+const MAX_LEN: usize = 80;
+
+static DROP_COUNTS: [AtomicUsize; MAX_LEN] = [
+ // FIXME(RFC 1109): AtomicUsize is not Copy.
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+ AtomicUsize::new(0),
+];
+
+static VERSIONS: AtomicUsize = AtomicUsize::new(0);
+
+#[derive(Clone, Eq)]
+struct DropCounter {
+ x: u32,
+ id: usize,
+ version: Cell<usize>,
+}
+
+impl PartialEq for DropCounter {
+ fn eq(&self, other: &Self) -> bool {
+ self.partial_cmp(other) == Some(Ordering::Equal)
+ }
+}
+
+impl PartialOrd for DropCounter {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ self.version.set(self.version.get() + 1);
+ other.version.set(other.version.get() + 1);
+ VERSIONS.fetch_add(2, Relaxed);
+ self.x.partial_cmp(&other.x)
+ }
+}
+
+impl Ord for DropCounter {
+ fn cmp(&self, other: &Self) -> Ordering {
+ self.partial_cmp(other).unwrap()
+ }
+}
+
+impl Drop for DropCounter {
+ fn drop(&mut self) {
+ DROP_COUNTS[self.id].fetch_add(1, Relaxed);
+ VERSIONS.fetch_sub(self.version.get(), Relaxed);
+ }
+}
+
+macro_rules! test {
+ ($input:ident, $func:ident) => {
+ let len = $input.len();
+
+ // Work out the total number of comparisons required to sort
+ // this array...
+ let mut count = 0usize;
+ $input.to_owned().$func(|a, b| {
+ count += 1;
+ a.cmp(b)
+ });
+
+ // ... and then panic on each and every single one.
+ for panic_countdown in 0..count {
+ // Refresh the counters.
+ VERSIONS.store(0, Relaxed);
+ for i in 0..len {
+ DROP_COUNTS[i].store(0, Relaxed);
+ }
+
+ let v = $input.to_owned();
+ let _ = std::panic::catch_unwind(move || {
+ let mut v = v;
+ let mut panic_countdown = panic_countdown;
+ v.$func(|a, b| {
+ if panic_countdown == 0 {
+ SILENCE_PANIC.with(|s| s.set(true));
+ panic!();
+ }
+ panic_countdown -= 1;
+ a.cmp(b)
+ })
+ });
+
+ // Check that the number of things dropped is exactly
+ // what we expect (i.e., the contents of `v`).
+ for (i, c) in DROP_COUNTS.iter().enumerate().take(len) {
+ let count = c.load(Relaxed);
+ assert!(count == 1, "found drop count == {} for i == {}, len == {}", count, i, len);
+ }
+
+ // Check that the most recent versions of values were dropped.
+ assert_eq!(VERSIONS.load(Relaxed), 0);
+ }
+ };
+}
+
+thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
+
+#[test]
+#[cfg_attr(target_os = "emscripten", ignore)] // no threads
+fn panic_safe() {
+ let prev = panic::take_hook();
+ panic::set_hook(Box::new(move |info| {
+ if !SILENCE_PANIC.with(|s| s.get()) {
+ prev(info);
+ }
+ }));
+
+ let mut rng = thread_rng();
+
+ // Miri is too slow
+ let lens = if cfg!(miri) { (1..10).chain(20..21) } else { (1..20).chain(70..MAX_LEN) };
+ let moduli: &[u32] = if cfg!(miri) { &[5] } else { &[5, 20, 50] };
+
+ for len in lens {
+ for &modulus in moduli {
+ for &has_runs in &[false, true] {
+ let mut input = (0..len)
+ .map(|id| DropCounter {
+ x: rng.next_u32() % modulus,
+ id: id,
+ version: Cell::new(0),
+ })
+ .collect::<Vec<_>>();
+
+ if has_runs {
+ for c in &mut input {
+ c.x = c.id as u32;
+ }
+
+ for _ in 0..5 {
+ let a = rng.gen::<usize>() % len;
+ let b = rng.gen::<usize>() % len;
+ if a < b {
+ input[a..b].reverse();
+ } else {
+ input.swap(a, b);
+ }
+ }
+ }
+
+ test!(input, sort_by);
+ test!(input, sort_unstable_by);
+ }
+ }
+ }
+
+ // Set default panic hook again.
+ drop(panic::take_hook());
+}
+
+#[test]
+fn repeat_generic_slice() {
+ assert_eq!([1, 2].repeat(2), vec![1, 2, 1, 2]);
+ assert_eq!([1, 2, 3, 4].repeat(0), vec![]);
+ assert_eq!([1, 2, 3, 4].repeat(1), vec![1, 2, 3, 4]);
+ assert_eq!([1, 2, 3, 4].repeat(3), vec![1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4]);
+}
diff --git a/library/alloc/tests/str.rs b/library/alloc/tests/str.rs
new file mode 100644
index 00000000000..eee98d45340
--- /dev/null
+++ b/library/alloc/tests/str.rs
@@ -0,0 +1,1899 @@
+use std::borrow::Cow;
+use std::cmp::Ordering::{Equal, Greater, Less};
+use std::str::from_utf8;
+
+#[test]
+fn test_le() {
+ assert!("" <= "");
+ assert!("" <= "foo");
+ assert!("foo" <= "foo");
+ assert_ne!("foo", "bar");
+}
+
+#[test]
+fn test_find() {
+ assert_eq!("hello".find('l'), Some(2));
+ assert_eq!("hello".find(|c: char| c == 'o'), Some(4));
+ assert!("hello".find('x').is_none());
+ assert!("hello".find(|c: char| c == 'x').is_none());
+ assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30));
+ assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30));
+}
+
+#[test]
+fn test_rfind() {
+ assert_eq!("hello".rfind('l'), Some(3));
+ assert_eq!("hello".rfind(|c: char| c == 'o'), Some(4));
+ assert!("hello".rfind('x').is_none());
+ assert!("hello".rfind(|c: char| c == 'x').is_none());
+ assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30));
+ assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30));
+}
+
+#[test]
+fn test_collect() {
+ let empty = "";
+ let s: String = empty.chars().collect();
+ assert_eq!(empty, s);
+ let data = "ประเทศไทย中";
+ let s: String = data.chars().collect();
+ assert_eq!(data, s);
+}
+
+#[test]
+fn test_into_bytes() {
+ let data = String::from("asdf");
+ let buf = data.into_bytes();
+ assert_eq!(buf, b"asdf");
+}
+
+#[test]
+fn test_find_str() {
+ // byte positions
+ assert_eq!("".find(""), Some(0));
+ assert!("banana".find("apple pie").is_none());
+
+ let data = "abcabc";
+ assert_eq!(data[0..6].find("ab"), Some(0));
+ assert_eq!(data[2..6].find("ab"), Some(3 - 2));
+ assert!(data[2..4].find("ab").is_none());
+
+ let string = "ประเทศไทย中华Việt Nam";
+ let mut data = String::from(string);
+ data.push_str(string);
+ assert!(data.find("ไท华").is_none());
+ assert_eq!(data[0..43].find(""), Some(0));
+ assert_eq!(data[6..43].find(""), Some(6 - 6));
+
+ assert_eq!(data[0..43].find("ประ"), Some(0));
+ assert_eq!(data[0..43].find("ทศไ"), Some(12));
+ assert_eq!(data[0..43].find("ย中"), Some(24));
+ assert_eq!(data[0..43].find("iệt"), Some(34));
+ assert_eq!(data[0..43].find("Nam"), Some(40));
+
+ assert_eq!(data[43..86].find("ประ"), Some(43 - 43));
+ assert_eq!(data[43..86].find("ทศไ"), Some(55 - 43));
+ assert_eq!(data[43..86].find("ย中"), Some(67 - 43));
+ assert_eq!(data[43..86].find("iệt"), Some(77 - 43));
+ assert_eq!(data[43..86].find("Nam"), Some(83 - 43));
+
+ // find every substring -- assert that it finds it, or an earlier occurrence.
+ let string = "Việt Namacbaabcaabaaba";
+ for (i, ci) in string.char_indices() {
+ let ip = i + ci.len_utf8();
+ for j in string[ip..].char_indices().map(|(i, _)| i).chain(Some(string.len() - ip)) {
+ let pat = &string[i..ip + j];
+ assert!(match string.find(pat) {
+ None => false,
+ Some(x) => x <= i,
+ });
+ assert!(match string.rfind(pat) {
+ None => false,
+ Some(x) => x >= i,
+ });
+ }
+ }
+}
+
+fn s(x: &str) -> String {
+ x.to_string()
+}
+
+macro_rules! test_concat {
+ ($expected: expr, $string: expr) => {{
+ let s: String = $string.concat();
+ assert_eq!($expected, s);
+ }};
+}
+
+#[test]
+fn test_concat_for_different_types() {
+ test_concat!("ab", vec![s("a"), s("b")]);
+ test_concat!("ab", vec!["a", "b"]);
+}
+
+#[test]
+fn test_concat_for_different_lengths() {
+ let empty: &[&str] = &[];
+ test_concat!("", empty);
+ test_concat!("a", ["a"]);
+ test_concat!("ab", ["a", "b"]);
+ test_concat!("abc", ["", "a", "bc"]);
+}
+
+macro_rules! test_join {
+ ($expected: expr, $string: expr, $delim: expr) => {{
+ let s = $string.join($delim);
+ assert_eq!($expected, s);
+ }};
+}
+
+#[test]
+fn test_join_for_different_types() {
+ test_join!("a-b", ["a", "b"], "-");
+ let hyphen = "-".to_string();
+ test_join!("a-b", [s("a"), s("b")], &*hyphen);
+ test_join!("a-b", vec!["a", "b"], &*hyphen);
+ test_join!("a-b", &*vec!["a", "b"], "-");
+ test_join!("a-b", vec![s("a"), s("b")], "-");
+}
+
+#[test]
+fn test_join_for_different_lengths() {
+ let empty: &[&str] = &[];
+ test_join!("", empty, "-");
+ test_join!("a", ["a"], "-");
+ test_join!("a-b", ["a", "b"], "-");
+ test_join!("-a-bc", ["", "a", "bc"], "-");
+}
+
+// join has fast paths for small separators up to 4 bytes
+// this tests the slow paths.
+#[test]
+fn test_join_for_different_lengths_with_long_separator() {
+ assert_eq!("~~~~~".len(), 15);
+
+ let empty: &[&str] = &[];
+ test_join!("", empty, "~~~~~");
+ test_join!("a", ["a"], "~~~~~");
+ test_join!("a~~~~~b", ["a", "b"], "~~~~~");
+ test_join!("~~~~~a~~~~~bc", ["", "a", "bc"], "~~~~~");
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn test_unsafe_slice() {
+ assert_eq!("ab", unsafe { "abc".get_unchecked(0..2) });
+ assert_eq!("bc", unsafe { "abc".get_unchecked(1..3) });
+ assert_eq!("", unsafe { "abc".get_unchecked(1..1) });
+ fn a_million_letter_a() -> String {
+ let mut i = 0;
+ let mut rs = String::new();
+ while i < 100000 {
+ rs.push_str("aaaaaaaaaa");
+ i += 1;
+ }
+ rs
+ }
+ fn half_a_million_letter_a() -> String {
+ let mut i = 0;
+ let mut rs = String::new();
+ while i < 100000 {
+ rs.push_str("aaaaa");
+ i += 1;
+ }
+ rs
+ }
+ let letters = a_million_letter_a();
+ assert_eq!(half_a_million_letter_a(), unsafe { letters.get_unchecked(0..500000) });
+}
+
+#[test]
+fn test_starts_with() {
+ assert!("".starts_with(""));
+ assert!("abc".starts_with(""));
+ assert!("abc".starts_with("a"));
+ assert!(!"a".starts_with("abc"));
+ assert!(!"".starts_with("abc"));
+ assert!(!"ödd".starts_with("-"));
+ assert!("ödd".starts_with("öd"));
+}
+
+#[test]
+fn test_ends_with() {
+ assert!("".ends_with(""));
+ assert!("abc".ends_with(""));
+ assert!("abc".ends_with("c"));
+ assert!(!"a".ends_with("abc"));
+ assert!(!"".ends_with("abc"));
+ assert!(!"ddö".ends_with("-"));
+ assert!("ddö".ends_with("dö"));
+}
+
+#[test]
+fn test_is_empty() {
+ assert!("".is_empty());
+ assert!(!"a".is_empty());
+}
+
+#[test]
+fn test_replacen() {
+ assert_eq!("".replacen('a', "b", 5), "");
+ assert_eq!("acaaa".replacen("a", "b", 3), "bcbba");
+ assert_eq!("aaaa".replacen("a", "b", 0), "aaaa");
+
+ let test = "test";
+ assert_eq!(" test test ".replacen(test, "toast", 3), " toast toast ");
+ assert_eq!(" test test ".replacen(test, "toast", 0), " test test ");
+ assert_eq!(" test test ".replacen(test, "", 5), " ");
+
+ assert_eq!("qwer123zxc789".replacen(char::is_numeric, "", 3), "qwerzxc789");
+}
+
+#[test]
+fn test_replace() {
+ let a = "a";
+ assert_eq!("".replace(a, "b"), "");
+ assert_eq!("a".replace(a, "b"), "b");
+ assert_eq!("ab".replace(a, "b"), "bb");
+ let test = "test";
+ assert_eq!(" test test ".replace(test, "toast"), " toast toast ");
+ assert_eq!(" test test ".replace(test, ""), " ");
+}
+
+#[test]
+fn test_replace_2a() {
+ let data = "ประเทศไทย中华";
+ let repl = "دولة الكويت";
+
+ let a = "ประเ";
+ let a2 = "دولة الكويتทศไทย中华";
+ assert_eq!(data.replace(a, repl), a2);
+}
+
+#[test]
+fn test_replace_2b() {
+ let data = "ประเทศไทย中华";
+ let repl = "دولة الكويت";
+
+ let b = "ะเ";
+ let b2 = "ปรدولة الكويتทศไทย中华";
+ assert_eq!(data.replace(b, repl), b2);
+}
+
+#[test]
+fn test_replace_2c() {
+ let data = "ประเทศไทย中华";
+ let repl = "دولة الكويت";
+
+ let c = "中华";
+ let c2 = "ประเทศไทยدولة الكويت";
+ assert_eq!(data.replace(c, repl), c2);
+}
+
+#[test]
+fn test_replace_2d() {
+ let data = "ประเทศไทย中华";
+ let repl = "دولة الكويت";
+
+ let d = "ไท华";
+ assert_eq!(data.replace(d, repl), data);
+}
+
+#[test]
+fn test_replace_pattern() {
+ let data = "abcdαβγδabcdαβγδ";
+ assert_eq!(data.replace("dαβ", "😺😺😺"), "abc😺😺😺γδabc😺😺😺γδ");
+ assert_eq!(data.replace('γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ");
+ assert_eq!(data.replace(&['a', 'γ'] as &[_], "😺😺😺"), "😺😺😺bcdαβ😺😺😺δ😺😺😺bcdαβ😺😺😺δ");
+ assert_eq!(data.replace(|c| c == 'γ', "😺😺😺"), "abcdαβ😺😺😺δabcdαβ😺😺😺δ");
+}
+
+// The current implementation of SliceIndex fails to handle methods
+// orthogonally from range types; therefore, it is worth testing
+// all of the indexing operations on each input.
+mod slice_index {
+ // Test a slicing operation **that should succeed,**
+ // testing it on all of the indexing methods.
+ //
+ // This is not suitable for testing failure on invalid inputs.
+ macro_rules! assert_range_eq {
+ ($s:expr, $range:expr, $expected:expr) => {
+ let mut s: String = $s.to_owned();
+ let mut expected: String = $expected.to_owned();
+ {
+ let s: &str = &s;
+ let expected: &str = &expected;
+
+ assert_eq!(&s[$range], expected, "(in assertion for: index)");
+ assert_eq!(s.get($range), Some(expected), "(in assertion for: get)");
+ unsafe {
+ assert_eq!(
+ s.get_unchecked($range),
+ expected,
+ "(in assertion for: get_unchecked)",
+ );
+ }
+ }
+ {
+ let s: &mut str = &mut s;
+ let expected: &mut str = &mut expected;
+
+ assert_eq!(&mut s[$range], expected, "(in assertion for: index_mut)",);
+ assert_eq!(
+ s.get_mut($range),
+ Some(&mut expected[..]),
+ "(in assertion for: get_mut)",
+ );
+ unsafe {
+ assert_eq!(
+ s.get_unchecked_mut($range),
+ expected,
+ "(in assertion for: get_unchecked_mut)",
+ );
+ }
+ }
+ };
+ }
+
+ // Make sure the macro can actually detect bugs,
+ // because if it can't, then what are we even doing here?
+ //
+ // (Be aware this only demonstrates the ability to detect bugs
+ // in the FIRST method that panics, as the macro is not designed
+ // to be used in `should_panic`)
+ #[test]
+ #[should_panic(expected = "out of bounds")]
+ fn assert_range_eq_can_fail_by_panic() {
+ assert_range_eq!("abc", 0..5, "abc");
+ }
+
+ // (Be aware this only demonstrates the ability to detect bugs
+ // in the FIRST method it calls, as the macro is not designed
+ // to be used in `should_panic`)
+ #[test]
+ #[should_panic(expected = "==")]
+ fn assert_range_eq_can_fail_by_inequality() {
+ assert_range_eq!("abc", 0..2, "abc");
+ }
+
+ // Generates test cases for bad index operations.
+ //
+ // This generates `should_panic` test cases for Index/IndexMut
+ // and `None` test cases for get/get_mut.
+ macro_rules! panic_cases {
+ ($(
+ in mod $case_name:ident {
+ data: $data:expr;
+
+ // optional:
+ //
+ // a similar input for which DATA[input] succeeds, and the corresponding
+ // output str. This helps validate "critical points" where an input range
+ // straddles the boundary between valid and invalid.
+ // (such as the input `len..len`, which is just barely valid)
+ $(
+ good: data[$good:expr] == $output:expr;
+ )*
+
+ bad: data[$bad:expr];
+ message: $expect_msg:expr; // must be a literal
+ }
+ )*) => {$(
+ mod $case_name {
+ #[test]
+ fn pass() {
+ let mut v: String = $data.into();
+
+ $( assert_range_eq!(v, $good, $output); )*
+
+ {
+ let v: &str = &v;
+ assert_eq!(v.get($bad), None, "(in None assertion for get)");
+ }
+
+ {
+ let v: &mut str = &mut v;
+ assert_eq!(v.get_mut($bad), None, "(in None assertion for get_mut)");
+ }
+ }
+
+ #[test]
+ #[should_panic(expected = $expect_msg)]
+ fn index_fail() {
+ let v: String = $data.into();
+ let v: &str = &v;
+ let _v = &v[$bad];
+ }
+
+ #[test]
+ #[should_panic(expected = $expect_msg)]
+ fn index_mut_fail() {
+ let mut v: String = $data.into();
+ let v: &mut str = &mut v;
+ let _v = &mut v[$bad];
+ }
+ }
+ )*};
+ }
+
+ #[test]
+ fn simple_ascii() {
+ assert_range_eq!("abc", .., "abc");
+
+ assert_range_eq!("abc", 0..2, "ab");
+ assert_range_eq!("abc", 0..=1, "ab");
+ assert_range_eq!("abc", ..2, "ab");
+ assert_range_eq!("abc", ..=1, "ab");
+
+ assert_range_eq!("abc", 1..3, "bc");
+ assert_range_eq!("abc", 1..=2, "bc");
+ assert_range_eq!("abc", 1..1, "");
+ assert_range_eq!("abc", 1..=0, "");
+ }
+
+ #[test]
+ fn simple_unicode() {
+ // 日本
+ assert_range_eq!("\u{65e5}\u{672c}", .., "\u{65e5}\u{672c}");
+
+ assert_range_eq!("\u{65e5}\u{672c}", 0..3, "\u{65e5}");
+ assert_range_eq!("\u{65e5}\u{672c}", 0..=2, "\u{65e5}");
+ assert_range_eq!("\u{65e5}\u{672c}", ..3, "\u{65e5}");
+ assert_range_eq!("\u{65e5}\u{672c}", ..=2, "\u{65e5}");
+
+ assert_range_eq!("\u{65e5}\u{672c}", 3..6, "\u{672c}");
+ assert_range_eq!("\u{65e5}\u{672c}", 3..=5, "\u{672c}");
+ assert_range_eq!("\u{65e5}\u{672c}", 3.., "\u{672c}");
+
+ let data = "ประเทศไทย中华";
+ assert_range_eq!(data, 0..3, "ป");
+ assert_range_eq!(data, 3..6, "ร");
+ assert_range_eq!(data, 3..3, "");
+ assert_range_eq!(data, 30..33, "华");
+
+ /*0: 中
+ 3: 华
+ 6: V
+ 7: i
+ 8: ệ
+ 11: t
+ 12:
+ 13: N
+ 14: a
+ 15: m */
+ let ss = "中华Việt Nam";
+ assert_range_eq!(ss, 3..6, "华");
+ assert_range_eq!(ss, 6..16, "Việt Nam");
+ assert_range_eq!(ss, 6..=15, "Việt Nam");
+ assert_range_eq!(ss, 6.., "Việt Nam");
+
+ assert_range_eq!(ss, 0..3, "中");
+ assert_range_eq!(ss, 3..7, "华V");
+ assert_range_eq!(ss, 3..=6, "华V");
+ assert_range_eq!(ss, 3..3, "");
+ assert_range_eq!(ss, 3..=2, "");
+ }
+
+ #[test]
+ #[cfg_attr(target_os = "emscripten", ignore)] // hits an OOM
+ #[cfg_attr(miri, ignore)] // Miri is too slow
+ fn simple_big() {
+ fn a_million_letter_x() -> String {
+ let mut i = 0;
+ let mut rs = String::new();
+ while i < 100000 {
+ rs.push_str("华华华华华华华华华华");
+ i += 1;
+ }
+ rs
+ }
+ fn half_a_million_letter_x() -> String {
+ let mut i = 0;
+ let mut rs = String::new();
+ while i < 100000 {
+ rs.push_str("华华华华华");
+ i += 1;
+ }
+ rs
+ }
+ let letters = a_million_letter_x();
+ assert_range_eq!(letters, 0..3 * 500000, half_a_million_letter_x());
+ }
+
+ #[test]
+ #[should_panic]
+ fn test_slice_fail() {
+ &"中华Việt Nam"[0..2];
+ }
+
+ panic_cases! {
+ in mod rangefrom_len {
+ data: "abcdef";
+ good: data[6..] == "";
+ bad: data[7..];
+ message: "out of bounds";
+ }
+
+ in mod rangeto_len {
+ data: "abcdef";
+ good: data[..6] == "abcdef";
+ bad: data[..7];
+ message: "out of bounds";
+ }
+
+ in mod rangetoinclusive_len {
+ data: "abcdef";
+ good: data[..=5] == "abcdef";
+ bad: data[..=6];
+ message: "out of bounds";
+ }
+
+ in mod range_len_len {
+ data: "abcdef";
+ good: data[6..6] == "";
+ bad: data[7..7];
+ message: "out of bounds";
+ }
+
+ in mod rangeinclusive_len_len {
+ data: "abcdef";
+ good: data[6..=5] == "";
+ bad: data[7..=6];
+ message: "out of bounds";
+ }
+ }
+
+ panic_cases! {
+ in mod range_neg_width {
+ data: "abcdef";
+ good: data[4..4] == "";
+ bad: data[4..3];
+ message: "begin <= end (4 <= 3)";
+ }
+
+ in mod rangeinclusive_neg_width {
+ data: "abcdef";
+ good: data[4..=3] == "";
+ bad: data[4..=2];
+ message: "begin <= end (4 <= 3)";
+ }
+ }
+
+ mod overflow {
+ panic_cases! {
+ in mod rangeinclusive {
+ data: "hello";
+ // note: using 0 specifically ensures that the result of overflowing is 0..0,
+ // so that `get` doesn't simply return None for the wrong reason.
+ bad: data[0..=usize::MAX];
+ message: "maximum usize";
+ }
+
+ in mod rangetoinclusive {
+ data: "hello";
+ bad: data[..=usize::MAX];
+ message: "maximum usize";
+ }
+ }
+ }
+
+ mod boundary {
+ const DATA: &str = "abcαβγ";
+
+ const BAD_START: usize = 4;
+ const GOOD_START: usize = 3;
+ const BAD_END: usize = 6;
+ const GOOD_END: usize = 7;
+ const BAD_END_INCL: usize = BAD_END - 1;
+ const GOOD_END_INCL: usize = GOOD_END - 1;
+
+ // it is especially important to test all of the different range types here
+ // because some of the logic may be duplicated as part of micro-optimizations
+ // to dodge unicode boundary checks on half-ranges.
+ panic_cases! {
+ in mod range_1 {
+ data: super::DATA;
+ bad: data[super::BAD_START..super::GOOD_END];
+ message:
+ "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
+ }
+
+ in mod range_2 {
+ data: super::DATA;
+ bad: data[super::GOOD_START..super::BAD_END];
+ message:
+ "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
+ }
+
+ in mod rangefrom {
+ data: super::DATA;
+ bad: data[super::BAD_START..];
+ message:
+ "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
+ }
+
+ in mod rangeto {
+ data: super::DATA;
+ bad: data[..super::BAD_END];
+ message:
+ "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
+ }
+
+ in mod rangeinclusive_1 {
+ data: super::DATA;
+ bad: data[super::BAD_START..=super::GOOD_END_INCL];
+ message:
+ "byte index 4 is not a char boundary; it is inside 'α' (bytes 3..5) of";
+ }
+
+ in mod rangeinclusive_2 {
+ data: super::DATA;
+ bad: data[super::GOOD_START..=super::BAD_END_INCL];
+ message:
+ "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
+ }
+
+ in mod rangetoinclusive {
+ data: super::DATA;
+ bad: data[..=super::BAD_END_INCL];
+ message:
+ "byte index 6 is not a char boundary; it is inside 'β' (bytes 5..7) of";
+ }
+ }
+ }
+
+ const LOREM_PARAGRAPH: &str = "\
+ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem \
+ sit amet dolor ultricies condimentum. Praesent iaculis purus elit, ac malesuada \
+ quam malesuada in. Duis sed orci eros. Suspendisse sit amet magna mollis, mollis \
+ nunc luctus, imperdiet mi. Integer fringilla non sem ut lacinia. Fusce varius \
+ tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec tempus vel, \
+ gravida nec quam.";
+
+ // check the panic includes the prefix of the sliced string
+ #[test]
+ #[should_panic(expected = "byte index 1024 is out of bounds of `Lorem ipsum dolor sit amet")]
+ fn test_slice_fail_truncated_1() {
+ &LOREM_PARAGRAPH[..1024];
+ }
+ // check the truncation in the panic message
+ #[test]
+ #[should_panic(expected = "luctus, im`[...]")]
+ fn test_slice_fail_truncated_2() {
+ &LOREM_PARAGRAPH[..1024];
+ }
+}
+
+#[test]
+fn test_str_slice_rangetoinclusive_ok() {
+ let s = "abcαβγ";
+ assert_eq!(&s[..=2], "abc");
+ assert_eq!(&s[..=4], "abcα");
+}
+
+#[test]
+#[should_panic]
+fn test_str_slice_rangetoinclusive_notok() {
+ let s = "abcαβγ";
+ &s[..=3];
+}
+
+#[test]
+fn test_str_slicemut_rangetoinclusive_ok() {
+ let mut s = "abcαβγ".to_owned();
+ let s: &mut str = &mut s;
+ assert_eq!(&mut s[..=2], "abc");
+ assert_eq!(&mut s[..=4], "abcα");
+}
+
+#[test]
+#[should_panic]
+fn test_str_slicemut_rangetoinclusive_notok() {
+ let mut s = "abcαβγ".to_owned();
+ let s: &mut str = &mut s;
+ &mut s[..=3];
+}
+
+#[test]
+fn test_is_char_boundary() {
+ let s = "ศไทย中华Việt Nam β-release 🐱123";
+ assert!(s.is_char_boundary(0));
+ assert!(s.is_char_boundary(s.len()));
+ assert!(!s.is_char_boundary(s.len() + 1));
+ for (i, ch) in s.char_indices() {
+ // ensure character locations are boundaries and continuation bytes are not
+ assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s);
+ for j in 1..ch.len_utf8() {
+ assert!(
+ !s.is_char_boundary(i + j),
+ "{} should not be a char boundary in {:?}",
+ i + j,
+ s
+ );
+ }
+ }
+}
+
+#[test]
+fn test_trim_start_matches() {
+ let v: &[char] = &[];
+ assert_eq!(" *** foo *** ".trim_start_matches(v), " *** foo *** ");
+ let chars: &[char] = &['*', ' '];
+ assert_eq!(" *** foo *** ".trim_start_matches(chars), "foo *** ");
+ assert_eq!(" *** *** ".trim_start_matches(chars), "");
+ assert_eq!("foo *** ".trim_start_matches(chars), "foo *** ");
+
+ assert_eq!("11foo1bar11".trim_start_matches('1'), "foo1bar11");
+ let chars: &[char] = &['1', '2'];
+ assert_eq!("12foo1bar12".trim_start_matches(chars), "foo1bar12");
+ assert_eq!("123foo1bar123".trim_start_matches(|c: char| c.is_numeric()), "foo1bar123");
+}
+
+#[test]
+fn test_trim_end_matches() {
+ let v: &[char] = &[];
+ assert_eq!(" *** foo *** ".trim_end_matches(v), " *** foo *** ");
+ let chars: &[char] = &['*', ' '];
+ assert_eq!(" *** foo *** ".trim_end_matches(chars), " *** foo");
+ assert_eq!(" *** *** ".trim_end_matches(chars), "");
+ assert_eq!(" *** foo".trim_end_matches(chars), " *** foo");
+
+ assert_eq!("11foo1bar11".trim_end_matches('1'), "11foo1bar");
+ let chars: &[char] = &['1', '2'];
+ assert_eq!("12foo1bar12".trim_end_matches(chars), "12foo1bar");
+ assert_eq!("123foo1bar123".trim_end_matches(|c: char| c.is_numeric()), "123foo1bar");
+}
+
+#[test]
+fn test_trim_matches() {
+ let v: &[char] = &[];
+ assert_eq!(" *** foo *** ".trim_matches(v), " *** foo *** ");
+ let chars: &[char] = &['*', ' '];
+ assert_eq!(" *** foo *** ".trim_matches(chars), "foo");
+ assert_eq!(" *** *** ".trim_matches(chars), "");
+ assert_eq!("foo".trim_matches(chars), "foo");
+
+ assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar");
+ let chars: &[char] = &['1', '2'];
+ assert_eq!("12foo1bar12".trim_matches(chars), "foo1bar");
+ assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar");
+}
+
+#[test]
+fn test_trim_start() {
+ assert_eq!("".trim_start(), "");
+ assert_eq!("a".trim_start(), "a");
+ assert_eq!(" ".trim_start(), "");
+ assert_eq!(" blah".trim_start(), "blah");
+ assert_eq!(" \u{3000} wut".trim_start(), "wut");
+ assert_eq!("hey ".trim_start(), "hey ");
+}
+
+#[test]
+fn test_trim_end() {
+ assert_eq!("".trim_end(), "");
+ assert_eq!("a".trim_end(), "a");
+ assert_eq!(" ".trim_end(), "");
+ assert_eq!("blah ".trim_end(), "blah");
+ assert_eq!("wut \u{3000} ".trim_end(), "wut");
+ assert_eq!(" hey".trim_end(), " hey");
+}
+
+#[test]
+fn test_trim() {
+ assert_eq!("".trim(), "");
+ assert_eq!("a".trim(), "a");
+ assert_eq!(" ".trim(), "");
+ assert_eq!(" blah ".trim(), "blah");
+ assert_eq!("\nwut \u{3000} ".trim(), "wut");
+ assert_eq!(" hey dude ".trim(), "hey dude");
+}
+
+#[test]
+fn test_is_whitespace() {
+ assert!("".chars().all(|c| c.is_whitespace()));
+ assert!(" ".chars().all(|c| c.is_whitespace()));
+ assert!("\u{2009}".chars().all(|c| c.is_whitespace())); // Thin space
+ assert!(" \n\t ".chars().all(|c| c.is_whitespace()));
+ assert!(!" _ ".chars().all(|c| c.is_whitespace()));
+}
+
+#[test]
+fn test_is_utf8() {
+ // deny overlong encodings
+ assert!(from_utf8(&[0xc0, 0x80]).is_err());
+ assert!(from_utf8(&[0xc0, 0xae]).is_err());
+ assert!(from_utf8(&[0xe0, 0x80, 0x80]).is_err());
+ assert!(from_utf8(&[0xe0, 0x80, 0xaf]).is_err());
+ assert!(from_utf8(&[0xe0, 0x81, 0x81]).is_err());
+ assert!(from_utf8(&[0xf0, 0x82, 0x82, 0xac]).is_err());
+ assert!(from_utf8(&[0xf4, 0x90, 0x80, 0x80]).is_err());
+
+ // deny surrogates
+ assert!(from_utf8(&[0xED, 0xA0, 0x80]).is_err());
+ assert!(from_utf8(&[0xED, 0xBF, 0xBF]).is_err());
+
+ assert!(from_utf8(&[0xC2, 0x80]).is_ok());
+ assert!(from_utf8(&[0xDF, 0xBF]).is_ok());
+ assert!(from_utf8(&[0xE0, 0xA0, 0x80]).is_ok());
+ assert!(from_utf8(&[0xED, 0x9F, 0xBF]).is_ok());
+ assert!(from_utf8(&[0xEE, 0x80, 0x80]).is_ok());
+ assert!(from_utf8(&[0xEF, 0xBF, 0xBF]).is_ok());
+ assert!(from_utf8(&[0xF0, 0x90, 0x80, 0x80]).is_ok());
+ assert!(from_utf8(&[0xF4, 0x8F, 0xBF, 0xBF]).is_ok());
+}
+
+#[test]
+fn from_utf8_mostly_ascii() {
+ // deny invalid bytes embedded in long stretches of ascii
+ for i in 32..64 {
+ let mut data = [0; 128];
+ data[i] = 0xC0;
+ assert!(from_utf8(&data).is_err());
+ data[i] = 0xC2;
+ assert!(from_utf8(&data).is_err());
+ }
+}
+
+#[test]
+fn from_utf8_error() {
+ macro_rules! test {
+ ($input: expr, $expected_valid_up_to: expr, $expected_error_len: expr) => {
+ let error = from_utf8($input).unwrap_err();
+ assert_eq!(error.valid_up_to(), $expected_valid_up_to);
+ assert_eq!(error.error_len(), $expected_error_len);
+ };
+ }
+ test!(b"A\xC3\xA9 \xFF ", 4, Some(1));
+ test!(b"A\xC3\xA9 \x80 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC1 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC1", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC2", 4, None);
+ test!(b"A\xC3\xA9 \xC2 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xC2\xC0", 4, Some(1));
+ test!(b"A\xC3\xA9 \xE0", 4, None);
+ test!(b"A\xC3\xA9 \xE0\x9F", 4, Some(1));
+ test!(b"A\xC3\xA9 \xE0\xA0", 4, None);
+ test!(b"A\xC3\xA9 \xE0\xA0\xC0", 4, Some(2));
+ test!(b"A\xC3\xA9 \xE0\xA0 ", 4, Some(2));
+ test!(b"A\xC3\xA9 \xED\xA0\x80 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xF1", 4, None);
+ test!(b"A\xC3\xA9 \xF1\x80", 4, None);
+ test!(b"A\xC3\xA9 \xF1\x80\x80", 4, None);
+ test!(b"A\xC3\xA9 \xF1 ", 4, Some(1));
+ test!(b"A\xC3\xA9 \xF1\x80 ", 4, Some(2));
+ test!(b"A\xC3\xA9 \xF1\x80\x80 ", 4, Some(3));
+}
+
+#[test]
+fn test_as_bytes() {
+ // no null
+ let v = [
+ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+ 86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
+ ];
+ let b: &[u8] = &[];
+ assert_eq!("".as_bytes(), b);
+ assert_eq!("abc".as_bytes(), b"abc");
+ assert_eq!("ศไทย中华Việt Nam".as_bytes(), v);
+}
+
+#[test]
+#[should_panic]
+fn test_as_bytes_fail() {
+ // Don't double free. (I'm not sure if this exercises the
+ // original problem code path anymore.)
+ let s = String::from("");
+ let _bytes = s.as_bytes();
+ panic!();
+}
+
+#[test]
+fn test_as_ptr() {
+ let buf = "hello".as_ptr();
+ unsafe {
+ assert_eq!(*buf.offset(0), b'h');
+ assert_eq!(*buf.offset(1), b'e');
+ assert_eq!(*buf.offset(2), b'l');
+ assert_eq!(*buf.offset(3), b'l');
+ assert_eq!(*buf.offset(4), b'o');
+ }
+}
+
+#[test]
+fn vec_str_conversions() {
+ let s1: String = String::from("All mimsy were the borogoves");
+
+ let v: Vec<u8> = s1.as_bytes().to_vec();
+ let s2: String = String::from(from_utf8(&v).unwrap());
+ let mut i = 0;
+ let n1 = s1.len();
+ let n2 = v.len();
+ assert_eq!(n1, n2);
+ while i < n1 {
+ let a: u8 = s1.as_bytes()[i];
+ let b: u8 = s2.as_bytes()[i];
+ assert_eq!(a, b);
+ i += 1;
+ }
+}
+
+#[test]
+fn test_contains() {
+ assert!("abcde".contains("bcd"));
+ assert!("abcde".contains("abcd"));
+ assert!("abcde".contains("bcde"));
+ assert!("abcde".contains(""));
+ assert!("".contains(""));
+ assert!(!"abcde".contains("def"));
+ assert!(!"".contains("a"));
+
+ let data = "ประเทศไทย中华Việt Nam";
+ assert!(data.contains("ประเ"));
+ assert!(data.contains("ะเ"));
+ assert!(data.contains("中华"));
+ assert!(!data.contains("ไท华"));
+}
+
+#[test]
+fn test_contains_char() {
+ assert!("abc".contains('b'));
+ assert!("a".contains('a'));
+ assert!(!"abc".contains('d'));
+ assert!(!"".contains('a'));
+}
+
+#[test]
+fn test_split_at() {
+ let s = "ศไทย中华Việt Nam";
+ for (index, _) in s.char_indices() {
+ let (a, b) = s.split_at(index);
+ assert_eq!(&s[..a.len()], a);
+ assert_eq!(&s[a.len()..], b);
+ }
+ let (a, b) = s.split_at(s.len());
+ assert_eq!(a, s);
+ assert_eq!(b, "");
+}
+
+#[test]
+fn test_split_at_mut() {
+ let mut s = "Hello World".to_string();
+ {
+ let (a, b) = s.split_at_mut(5);
+ a.make_ascii_uppercase();
+ b.make_ascii_lowercase();
+ }
+ assert_eq!(s, "HELLO world");
+}
+
+#[test]
+#[should_panic]
+fn test_split_at_boundscheck() {
+ let s = "ศไทย中华Việt Nam";
+ s.split_at(1);
+}
+
+#[test]
+fn test_escape_unicode() {
+ assert_eq!("abc".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{63}");
+ assert_eq!("a c".escape_unicode().to_string(), "\\u{61}\\u{20}\\u{63}");
+ assert_eq!("\r\n\t".escape_unicode().to_string(), "\\u{d}\\u{a}\\u{9}");
+ assert_eq!("'\"\\".escape_unicode().to_string(), "\\u{27}\\u{22}\\u{5c}");
+ assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode().to_string(), "\\u{0}\\u{1}\\u{fe}\\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_unicode().to_string(), "\\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_unicode().to_string(), "\\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{fb00}".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{fb00}");
+ assert_eq!("\u{1d4ea}\r".escape_unicode().to_string(), "\\u{1d4ea}\\u{d}");
+}
+
+#[test]
+fn test_escape_debug() {
+ // Note that there are subtleties with the number of backslashes
+ // on the left- and right-hand sides. In particular, Unicode code points
+ // are usually escaped with two backslashes on the right-hand side, as
+ // they are escaped. However, when the character is unescaped (e.g., for
+ // printable characters), only a single backslash appears (as the character
+ // itself appears in the debug string).
+ assert_eq!("abc".escape_debug().to_string(), "abc");
+ assert_eq!("a c".escape_debug().to_string(), "a c");
+ assert_eq!("éèê".escape_debug().to_string(), "éèê");
+ assert_eq!("\r\n\t".escape_debug().to_string(), "\\r\\n\\t");
+ assert_eq!("'\"\\".escape_debug().to_string(), "\\'\\\"\\\\");
+ assert_eq!("\u{7f}\u{ff}".escape_debug().to_string(), "\\u{7f}\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_debug().to_string(), "\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_debug().to_string(), "\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{200b}".escape_debug().to_string(), "ab\\u{200b}");
+ assert_eq!("\u{10d4ea}\r".escape_debug().to_string(), "\\u{10d4ea}\\r");
+ assert_eq!(
+ "\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(),
+ "\\u{301}a\u{301}bé\\u{e000}"
+ );
+}
+
+#[test]
+fn test_escape_default() {
+ assert_eq!("abc".escape_default().to_string(), "abc");
+ assert_eq!("a c".escape_default().to_string(), "a c");
+ assert_eq!("éèê".escape_default().to_string(), "\\u{e9}\\u{e8}\\u{ea}");
+ assert_eq!("\r\n\t".escape_default().to_string(), "\\r\\n\\t");
+ assert_eq!("'\"\\".escape_default().to_string(), "\\'\\\"\\\\");
+ assert_eq!("\u{7f}\u{ff}".escape_default().to_string(), "\\u{7f}\\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_default().to_string(), "\\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_default().to_string(), "\\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{200b}".escape_default().to_string(), "ab\\u{200b}");
+ assert_eq!("\u{10d4ea}\r".escape_default().to_string(), "\\u{10d4ea}\\r");
+}
+
+#[test]
+fn test_total_ord() {
+ assert_eq!("1234".cmp("123"), Greater);
+ assert_eq!("123".cmp("1234"), Less);
+ assert_eq!("1234".cmp("1234"), Equal);
+ assert_eq!("12345555".cmp("123456"), Less);
+ assert_eq!("22".cmp("1234"), Greater);
+}
+
+#[test]
+fn test_iterator() {
+ let s = "ศไทย中华Việt Nam";
+ let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
+
+ let mut pos = 0;
+ let it = s.chars();
+
+ for c in it {
+ assert_eq!(c, v[pos]);
+ pos += 1;
+ }
+ assert_eq!(pos, v.len());
+ assert_eq!(s.chars().count(), v.len());
+}
+
+#[test]
+fn test_rev_iterator() {
+ let s = "ศไทย中华Việt Nam";
+ let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
+
+ let mut pos = 0;
+ let it = s.chars().rev();
+
+ for c in it {
+ assert_eq!(c, v[pos]);
+ pos += 1;
+ }
+ assert_eq!(pos, v.len());
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn test_chars_decoding() {
+ let mut bytes = [0; 4];
+ for c in (0..0x110000).filter_map(std::char::from_u32) {
+ let s = c.encode_utf8(&mut bytes);
+ if Some(c) != s.chars().next() {
+ panic!("character {:x}={} does not decode correctly", c as u32, c);
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn test_chars_rev_decoding() {
+ let mut bytes = [0; 4];
+ for c in (0..0x110000).filter_map(std::char::from_u32) {
+ let s = c.encode_utf8(&mut bytes);
+ if Some(c) != s.chars().rev().next() {
+ panic!("character {:x}={} does not decode correctly", c as u32, c);
+ }
+ }
+}
+
+#[test]
+fn test_iterator_clone() {
+ let s = "ศไทย中华Việt Nam";
+ let mut it = s.chars();
+ it.next();
+ assert!(it.clone().zip(it).all(|(x, y)| x == y));
+}
+
+#[test]
+fn test_iterator_last() {
+ let s = "ศไทย中华Việt Nam";
+ let mut it = s.chars();
+ it.next();
+ assert_eq!(it.last(), Some('m'));
+}
+
+#[test]
+fn test_chars_debug() {
+ let s = "ศไทย中华Việt Nam";
+ let c = s.chars();
+ assert_eq!(
+ format!("{:?}", c),
+ r#"Chars(['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'])"#
+ );
+}
+
+#[test]
+fn test_bytesator() {
+ let s = "ศไทย中华Việt Nam";
+ let v = [
+ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+ 86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
+ ];
+ let mut pos = 0;
+
+ for b in s.bytes() {
+ assert_eq!(b, v[pos]);
+ pos += 1;
+ }
+}
+
+#[test]
+fn test_bytes_revator() {
+ let s = "ศไทย中华Việt Nam";
+ let v = [
+ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+ 86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
+ ];
+ let mut pos = v.len();
+
+ for b in s.bytes().rev() {
+ pos -= 1;
+ assert_eq!(b, v[pos]);
+ }
+}
+
+#[test]
+fn test_bytesator_nth() {
+ let s = "ศไทย中华Việt Nam";
+ let v = [
+ 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228, 184, 173, 229, 141, 142,
+ 86, 105, 225, 187, 135, 116, 32, 78, 97, 109,
+ ];
+
+ let mut b = s.bytes();
+ assert_eq!(b.nth(2).unwrap(), v[2]);
+ assert_eq!(b.nth(10).unwrap(), v[10]);
+ assert_eq!(b.nth(200), None);
+}
+
+#[test]
+fn test_bytesator_count() {
+ let s = "ศไทย中华Việt Nam";
+
+ let b = s.bytes();
+ assert_eq!(b.count(), 28)
+}
+
+#[test]
+fn test_bytesator_last() {
+ let s = "ศไทย中华Việt Nam";
+
+ let b = s.bytes();
+ assert_eq!(b.last().unwrap(), 109)
+}
+
+#[test]
+fn test_char_indicesator() {
+ let s = "ศไทย中华Việt Nam";
+ let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27];
+ let v = ['ศ', 'ไ', 'ท', 'ย', '中', '华', 'V', 'i', 'ệ', 't', ' ', 'N', 'a', 'm'];
+
+ let mut pos = 0;
+ let it = s.char_indices();
+
+ for c in it {
+ assert_eq!(c, (p[pos], v[pos]));
+ pos += 1;
+ }
+ assert_eq!(pos, v.len());
+ assert_eq!(pos, p.len());
+}
+
+#[test]
+fn test_char_indices_revator() {
+ let s = "ศไทย中华Việt Nam";
+ let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0];
+ let v = ['m', 'a', 'N', ' ', 't', 'ệ', 'i', 'V', '华', '中', 'ย', 'ท', 'ไ', 'ศ'];
+
+ let mut pos = 0;
+ let it = s.char_indices().rev();
+
+ for c in it {
+ assert_eq!(c, (p[pos], v[pos]));
+ pos += 1;
+ }
+ assert_eq!(pos, v.len());
+ assert_eq!(pos, p.len());
+}
+
+#[test]
+fn test_char_indices_last() {
+ let s = "ศไทย中华Việt Nam";
+ let mut it = s.char_indices();
+ it.next();
+ assert_eq!(it.last(), Some((27, 'm')));
+}
+
+#[test]
+fn test_splitn_char_iterator() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.splitn(4, ' ').collect();
+ assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
+
+ let split: Vec<&str> = data.splitn(4, |c: char| c == ' ').collect();
+ assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
+
+ // Unicode
+ let split: Vec<&str> = data.splitn(4, 'ä').collect();
+ assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
+
+ let split: Vec<&str> = data.splitn(4, |c: char| c == 'ä').collect();
+ assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
+}
+
+#[test]
+fn test_split_char_iterator_no_trailing() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.split('\n').collect();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
+
+ let split: Vec<&str> = data.split_terminator('\n').collect();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
+}
+
+#[test]
+fn test_split_char_iterator_inclusive() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.split_inclusive('\n').collect();
+ assert_eq!(split, ["\n", "Märy häd ä little lämb\n", "Little lämb\n"]);
+
+ let uppercase_separated = "SheePSharKTurtlECaT";
+ let mut first_char = true;
+ let split: Vec<&str> = uppercase_separated
+ .split_inclusive(|c: char| {
+ let split = !first_char && c.is_uppercase();
+ first_char = split;
+ split
+ })
+ .collect();
+ assert_eq!(split, ["SheeP", "SharK", "TurtlE", "CaT"]);
+}
+
+#[test]
+fn test_split_char_iterator_inclusive_rev() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.split_inclusive('\n').rev().collect();
+ assert_eq!(split, ["Little lämb\n", "Märy häd ä little lämb\n", "\n"]);
+
+ // Note that the predicate is stateful and thus dependent
+ // on the iteration order.
+ // (A different predicate is needed for reverse iterator vs normal iterator.)
+ // Not sure if anything can be done though.
+ let uppercase_separated = "SheePSharKTurtlECaT";
+ let mut term_char = true;
+ let split: Vec<&str> = uppercase_separated
+ .split_inclusive(|c: char| {
+ let split = term_char && c.is_uppercase();
+ term_char = c.is_uppercase();
+ split
+ })
+ .rev()
+ .collect();
+ assert_eq!(split, ["CaT", "TurtlE", "SharK", "SheeP"]);
+}
+
+#[test]
+fn test_rsplit() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.rsplit(' ').collect();
+ assert_eq!(split, ["lämb\n", "lämb\nLittle", "little", "ä", "häd", "\nMäry"]);
+
+ let split: Vec<&str> = data.rsplit("lämb").collect();
+ assert_eq!(split, ["\n", "\nLittle ", "\nMäry häd ä little "]);
+
+ let split: Vec<&str> = data.rsplit(|c: char| c == 'ä').collect();
+ assert_eq!(split, ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"]);
+}
+
+#[test]
+fn test_rsplitn() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.rsplitn(2, ' ').collect();
+ assert_eq!(split, ["lämb\n", "\nMäry häd ä little lämb\nLittle"]);
+
+ let split: Vec<&str> = data.rsplitn(2, "lämb").collect();
+ assert_eq!(split, ["\n", "\nMäry häd ä little lämb\nLittle "]);
+
+ let split: Vec<&str> = data.rsplitn(2, |c: char| c == 'ä').collect();
+ assert_eq!(split, ["mb\n", "\nMäry häd ä little lämb\nLittle l"]);
+}
+
+#[test]
+fn test_split_whitespace() {
+ let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n";
+ let words: Vec<&str> = data.split_whitespace().collect();
+ assert_eq!(words, ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"])
+}
+
+#[test]
+fn test_lines() {
+ let data = "\nMäry häd ä little lämb\n\r\nLittle lämb\n";
+ let lines: Vec<&str> = data.lines().collect();
+ assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
+
+ let data = "\r\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n
+ let lines: Vec<&str> = data.lines().collect();
+ assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
+}
+
+#[test]
+fn test_splitator() {
+ fn t(s: &str, sep: &str, u: &[&str]) {
+ let v: Vec<&str> = s.split(sep).collect();
+ assert_eq!(v, u);
+ }
+ t("--1233345--", "12345", &["--1233345--"]);
+ t("abc::hello::there", "::", &["abc", "hello", "there"]);
+ t("::hello::there", "::", &["", "hello", "there"]);
+ t("hello::there::", "::", &["hello", "there", ""]);
+ t("::hello::there::", "::", &["", "hello", "there", ""]);
+ t("ประเทศไทย中华Việt Nam", "中华", &["ประเทศไทย", "Việt Nam"]);
+ t("zzXXXzzYYYzz", "zz", &["", "XXX", "YYY", ""]);
+ t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]);
+ t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]);
+ t("", ".", &[""]);
+ t("zz", "zz", &["", ""]);
+ t("ok", "z", &["ok"]);
+ t("zzz", "zz", &["", "z"]);
+ t("zzzzz", "zz", &["", "", "z"]);
+}
+
+#[test]
+fn test_str_default() {
+ use std::default::Default;
+
+ fn t<S: Default + AsRef<str>>() {
+ let s: S = Default::default();
+ assert_eq!(s.as_ref(), "");
+ }
+
+ t::<&str>();
+ t::<String>();
+ t::<&mut str>();
+}
+
+#[test]
+fn test_str_container() {
+ fn sum_len(v: &[&str]) -> usize {
+ v.iter().map(|x| x.len()).sum()
+ }
+
+ let s = "01234";
+ assert_eq!(5, sum_len(&["012", "", "34"]));
+ assert_eq!(5, sum_len(&["01", "2", "34", ""]));
+ assert_eq!(5, sum_len(&[s]));
+}
+
+#[test]
+fn test_str_from_utf8() {
+ let xs = b"hello";
+ assert_eq!(from_utf8(xs), Ok("hello"));
+
+ let xs = "ศไทย中华Việt Nam".as_bytes();
+ assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam"));
+
+ let xs = b"hello\xFF";
+ assert!(from_utf8(xs).is_err());
+}
+
+#[test]
+fn test_pattern_deref_forward() {
+ let data = "aabcdaa";
+ assert!(data.contains("bcd"));
+ assert!(data.contains(&"bcd"));
+ assert!(data.contains(&"bcd".to_string()));
+}
+
+#[test]
+fn test_empty_match_indices() {
+ let data = "aä中!";
+ let vec: Vec<_> = data.match_indices("").collect();
+ assert_eq!(vec, [(0, ""), (1, ""), (3, ""), (6, ""), (7, "")]);
+}
+
+#[test]
+fn test_bool_from_str() {
+ assert_eq!("true".parse().ok(), Some(true));
+ assert_eq!("false".parse().ok(), Some(false));
+ assert_eq!("not even a boolean".parse::<bool>().ok(), None);
+}
+
+fn check_contains_all_substrings(s: &str) {
+ assert!(s.contains(""));
+ for i in 0..s.len() {
+ for j in i + 1..=s.len() {
+ assert!(s.contains(&s[i..j]));
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn strslice_issue_16589() {
+ assert!("bananas".contains("nana"));
+
+ // prior to the fix for #16589, x.contains("abcdabcd") returned false
+ // test all substrings for good measure
+ check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
+}
+
+#[test]
+fn strslice_issue_16878() {
+ assert!(!"1234567ah012345678901ah".contains("hah"));
+ assert!(!"00abc01234567890123456789abc".contains("bcabc"));
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri is too slow
+fn test_strslice_contains() {
+ let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
+ check_contains_all_substrings(x);
+}
+
+#[test]
+fn test_rsplitn_char_iterator() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let mut split: Vec<&str> = data.rsplitn(4, ' ').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ // Unicode
+ let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect();
+ split.reverse();
+ assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
+}
+
+#[test]
+fn test_split_char_iterator() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let split: Vec<&str> = data.split(' ').collect();
+ assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
+ assert_eq!(split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
+
+ // Unicode
+ let split: Vec<&str> = data.split('ä').collect();
+ assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
+ assert_eq!(split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+
+ let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
+ rsplit.reverse();
+ assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
+}
+
+#[test]
+fn test_rev_split_char_iterator_no_trailing() {
+ let data = "\nMäry häd ä little lämb\nLittle lämb\n";
+
+ let mut split: Vec<&str> = data.split('\n').rev().collect();
+ split.reverse();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
+
+ let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
+ split.reverse();
+ assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
+}
+
+#[test]
+fn test_utf16_code_units() {
+ assert_eq!("é\u{1F4A9}".encode_utf16().collect::<Vec<u16>>(), [0xE9, 0xD83D, 0xDCA9])
+}
+
+#[test]
+fn starts_with_in_unicode() {
+ assert!(!"├── Cargo.toml".starts_with("# "));
+}
+
+#[test]
+fn starts_short_long() {
+ assert!(!"".starts_with("##"));
+ assert!(!"##".starts_with("####"));
+ assert!("####".starts_with("##"));
+ assert!(!"##ä".starts_with("####"));
+ assert!("####ä".starts_with("##"));
+ assert!(!"##".starts_with("####ä"));
+ assert!("##ä##".starts_with("##ä"));
+
+ assert!("".starts_with(""));
+ assert!("ä".starts_with(""));
+ assert!("#ä".starts_with(""));
+ assert!("##ä".starts_with(""));
+ assert!("ä###".starts_with(""));
+ assert!("#ä##".starts_with(""));
+ assert!("##ä#".starts_with(""));
+}
+
+#[test]
+fn contains_weird_cases() {
+ assert!("* \t".contains(' '));
+ assert!(!"* \t".contains('?'));
+ assert!(!"* \t".contains('\u{1F4A9}'));
+}
+
+#[test]
+fn trim_ws() {
+ assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), "a \t ");
+ assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), " \t a");
+ assert_eq!(" \t a \t ".trim_start_matches(|c: char| c.is_whitespace()), "a \t ");
+ assert_eq!(" \t a \t ".trim_end_matches(|c: char| c.is_whitespace()), " \t a");
+ assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()), "a");
+ assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), "");
+ assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), "");
+ assert_eq!(" \t \t ".trim_start_matches(|c: char| c.is_whitespace()), "");
+ assert_eq!(" \t \t ".trim_end_matches(|c: char| c.is_whitespace()), "");
+ assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()), "");
+}
+
+#[test]
+fn to_lowercase() {
+ assert_eq!("".to_lowercase(), "");
+ assert_eq!("AÉDžaé ".to_lowercase(), "aédžaé ");
+
+ // https://github.com/rust-lang/rust/issues/26035
+ assert_eq!("ΑΣ".to_lowercase(), "ας");
+ assert_eq!("Α'Σ".to_lowercase(), "α'ς");
+ assert_eq!("Α''Σ".to_lowercase(), "α''ς");
+
+ assert_eq!("ΑΣ Α".to_lowercase(), "ας α");
+ assert_eq!("Α'Σ Α".to_lowercase(), "α'ς α");
+ assert_eq!("Α''Σ Α".to_lowercase(), "α''ς α");
+
+ assert_eq!("ΑΣ' Α".to_lowercase(), "ας' α");
+ assert_eq!("ΑΣ'' Α".to_lowercase(), "ας'' α");
+
+ assert_eq!("Α'Σ' Α".to_lowercase(), "α'ς' α");
+ assert_eq!("Α''Σ'' Α".to_lowercase(), "α''ς'' α");
+
+ assert_eq!("Α Σ".to_lowercase(), "α σ");
+ assert_eq!("Α 'Σ".to_lowercase(), "α 'σ");
+ assert_eq!("Α ''Σ".to_lowercase(), "α ''σ");
+
+ assert_eq!("Σ".to_lowercase(), "σ");
+ assert_eq!("'Σ".to_lowercase(), "'σ");
+ assert_eq!("''Σ".to_lowercase(), "''σ");
+
+ assert_eq!("ΑΣΑ".to_lowercase(), "ασα");
+ assert_eq!("ΑΣ'Α".to_lowercase(), "ασ'α");
+ assert_eq!("ΑΣ''Α".to_lowercase(), "ασ''α");
+}
+
+#[test]
+fn to_uppercase() {
+ assert_eq!("".to_uppercase(), "");
+ assert_eq!("aéDžßfiᾀ".to_uppercase(), "AÉDŽSSFIἈΙ");
+}
+
+#[test]
+fn test_into_string() {
+ // The only way to acquire a Box<str> in the first place is through a String, so just
+ // test that we can round-trip between Box<str> and String.
+ let string = String::from("Some text goes here");
+ assert_eq!(string.clone().into_boxed_str().into_string(), string);
+}
+
+#[test]
+fn test_box_slice_clone() {
+ let data = String::from("hello HELLO hello HELLO yes YES 5 中ä华!!!");
+ let data2 = data.clone().into_boxed_str().clone().into_string();
+
+ assert_eq!(data, data2);
+}
+
+#[test]
+fn test_cow_from() {
+ let borrowed = "borrowed";
+ let owned = String::from("owned");
+ match (Cow::from(owned.clone()), Cow::from(borrowed)) {
+ (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed),
+ _ => panic!("invalid `Cow::from`"),
+ }
+}
+
+#[test]
+fn test_repeat() {
+ assert_eq!("".repeat(3), "");
+ assert_eq!("abc".repeat(0), "");
+ assert_eq!("α".repeat(3), "ααα");
+}
+
+mod pattern {
+ use std::str::pattern::SearchStep::{self, Done, Match, Reject};
+ use std::str::pattern::{Pattern, ReverseSearcher, Searcher};
+
+ macro_rules! make_test {
+ ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
+ #[allow(unused_imports)]
+ mod $name {
+ use std::str::pattern::SearchStep::{Match, Reject};
+ use super::{cmp_search_to_vec};
+ #[test]
+ fn fwd() {
+ cmp_search_to_vec(false, $p, $h, vec![$($e),*]);
+ }
+ #[test]
+ fn bwd() {
+ cmp_search_to_vec(true, $p, $h, vec![$($e),*]);
+ }
+ }
+ }
+ }
+
+ fn cmp_search_to_vec<'a>(
+ rev: bool,
+ pat: impl Pattern<'a, Searcher: ReverseSearcher<'a>>,
+ haystack: &'a str,
+ right: Vec<SearchStep>,
+ ) {
+ let mut searcher = pat.into_searcher(haystack);
+ let mut v = vec![];
+ loop {
+ match if !rev { searcher.next() } else { searcher.next_back() } {
+ Match(a, b) => v.push(Match(a, b)),
+ Reject(a, b) => v.push(Reject(a, b)),
+ Done => break,
+ }
+ }
+ if rev {
+ v.reverse();
+ }
+
+ let mut first_index = 0;
+ let mut err = None;
+
+ for (i, e) in right.iter().enumerate() {
+ match *e {
+ Match(a, b) | Reject(a, b) if a <= b && a == first_index => {
+ first_index = b;
+ }
+ _ => {
+ err = Some(i);
+ break;
+ }
+ }
+ }
+
+ if let Some(err) = err {
+ panic!("Input skipped range at {}", err);
+ }
+
+ if first_index != haystack.len() {
+ panic!("Did not cover whole input");
+ }
+
+ assert_eq!(v, right);
+ }
+
+ make_test!(
+ str_searcher_ascii_haystack,
+ "bb",
+ "abbcbbd",
+ [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Reject(6, 7),]
+ );
+ make_test!(
+ str_searcher_ascii_haystack_seq,
+ "bb",
+ "abbcbbbbd",
+ [Reject(0, 1), Match(1, 3), Reject(3, 4), Match(4, 6), Match(6, 8), Reject(8, 9),]
+ );
+ make_test!(
+ str_searcher_empty_needle_ascii_haystack,
+ "",
+ "abbcbbd",
+ [
+ Match(0, 0),
+ Reject(0, 1),
+ Match(1, 1),
+ Reject(1, 2),
+ Match(2, 2),
+ Reject(2, 3),
+ Match(3, 3),
+ Reject(3, 4),
+ Match(4, 4),
+ Reject(4, 5),
+ Match(5, 5),
+ Reject(5, 6),
+ Match(6, 6),
+ Reject(6, 7),
+ Match(7, 7),
+ ]
+ );
+ make_test!(
+ str_searcher_multibyte_haystack,
+ " ",
+ "├──",
+ [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
+ );
+ make_test!(
+ str_searcher_empty_needle_multibyte_haystack,
+ "",
+ "├──",
+ [
+ Match(0, 0),
+ Reject(0, 3),
+ Match(3, 3),
+ Reject(3, 6),
+ Match(6, 6),
+ Reject(6, 9),
+ Match(9, 9),
+ ]
+ );
+ make_test!(str_searcher_empty_needle_empty_haystack, "", "", [Match(0, 0),]);
+ make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", []);
+ make_test!(
+ char_searcher_ascii_haystack,
+ 'b',
+ "abbcbbd",
+ [
+ Reject(0, 1),
+ Match(1, 2),
+ Match(2, 3),
+ Reject(3, 4),
+ Match(4, 5),
+ Match(5, 6),
+ Reject(6, 7),
+ ]
+ );
+ make_test!(
+ char_searcher_multibyte_haystack,
+ ' ',
+ "├──",
+ [Reject(0, 3), Reject(3, 6), Reject(6, 9),]
+ );
+ make_test!(
+ char_searcher_short_haystack,
+ '\u{1F4A9}',
+ "* \t",
+ [Reject(0, 1), Reject(1, 2), Reject(2, 3),]
+ );
+}
+
+macro_rules! generate_iterator_test {
+ {
+ $name:ident {
+ $(
+ ($($arg:expr),*) -> [$($t:tt)*];
+ )*
+ }
+ with $fwd:expr, $bwd:expr;
+ } => {
+ #[test]
+ fn $name() {
+ $(
+ {
+ let res = vec![$($t)*];
+
+ let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
+ assert_eq!(fwd_vec, res);
+
+ let mut bwd_vec: Vec<_> = ($bwd)($($arg),*).collect();
+ bwd_vec.reverse();
+ assert_eq!(bwd_vec, res);
+ }
+ )*
+ }
+ };
+ {
+ $name:ident {
+ $(
+ ($($arg:expr),*) -> [$($t:tt)*];
+ )*
+ }
+ with $fwd:expr;
+ } => {
+ #[test]
+ fn $name() {
+ $(
+ {
+ let res = vec![$($t)*];
+
+ let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
+ assert_eq!(fwd_vec, res);
+ }
+ )*
+ }
+ }
+}
+
+generate_iterator_test! {
+ double_ended_split {
+ ("foo.bar.baz", '.') -> ["foo", "bar", "baz"];
+ ("foo::bar::baz", "::") -> ["foo", "bar", "baz"];
+ }
+ with str::split, str::rsplit;
+}
+
+generate_iterator_test! {
+ double_ended_split_terminator {
+ ("foo;bar;baz;", ';') -> ["foo", "bar", "baz"];
+ }
+ with str::split_terminator, str::rsplit_terminator;
+}
+
+generate_iterator_test! {
+ double_ended_matches {
+ ("a1b2c3", char::is_numeric) -> ["1", "2", "3"];
+ }
+ with str::matches, str::rmatches;
+}
+
+generate_iterator_test! {
+ double_ended_match_indices {
+ ("a1b2c3", char::is_numeric) -> [(1, "1"), (3, "2"), (5, "3")];
+ }
+ with str::match_indices, str::rmatch_indices;
+}
+
+generate_iterator_test! {
+ not_double_ended_splitn {
+ ("foo::bar::baz", 2, "::") -> ["foo", "bar::baz"];
+ }
+ with str::splitn;
+}
+
+generate_iterator_test! {
+ not_double_ended_rsplitn {
+ ("foo::bar::baz", 2, "::") -> ["baz", "foo::bar"];
+ }
+ with str::rsplitn;
+}
+
+#[test]
+fn different_str_pattern_forwarding_lifetimes() {
+ use std::str::pattern::Pattern;
+
+ fn foo<'a, P>(p: P)
+ where
+ for<'b> &'b P: Pattern<'a>,
+ {
+ for _ in 0..3 {
+ "asdf".find(&p);
+ }
+ }
+
+ foo::<&str>("x");
+}
diff --git a/library/alloc/tests/string.rs b/library/alloc/tests/string.rs
new file mode 100644
index 00000000000..d38655af78c
--- /dev/null
+++ b/library/alloc/tests/string.rs
@@ -0,0 +1,723 @@
+use std::borrow::Cow;
+use std::collections::TryReserveError::*;
+use std::mem::size_of;
+
+pub trait IntoCow<'a, B: ?Sized>
+where
+ B: ToOwned,
+{
+ fn into_cow(self) -> Cow<'a, B>;
+}
+
+impl<'a> IntoCow<'a, str> for String {
+ fn into_cow(self) -> Cow<'a, str> {
+ Cow::Owned(self)
+ }
+}
+
+impl<'a> IntoCow<'a, str> for &'a str {
+ fn into_cow(self) -> Cow<'a, str> {
+ Cow::Borrowed(self)
+ }
+}
+
+#[test]
+fn test_from_str() {
+ let owned: Option<std::string::String> = "string".parse().ok();
+ assert_eq!(owned.as_ref().map(|s| &**s), Some("string"));
+}
+
+#[test]
+fn test_from_cow_str() {
+ assert_eq!(String::from(Cow::Borrowed("string")), "string");
+ assert_eq!(String::from(Cow::Owned(String::from("string"))), "string");
+}
+
+#[test]
+fn test_unsized_to_string() {
+ let s: &str = "abc";
+ let _: String = (*s).to_string();
+}
+
+#[test]
+fn test_from_utf8() {
+ let xs = b"hello".to_vec();
+ assert_eq!(String::from_utf8(xs).unwrap(), String::from("hello"));
+
+ let xs = "ศไทย中华Việt Nam".as_bytes().to_vec();
+ assert_eq!(String::from_utf8(xs).unwrap(), String::from("ศไทย中华Việt Nam"));
+
+ let xs = b"hello\xFF".to_vec();
+ let err = String::from_utf8(xs).unwrap_err();
+ assert_eq!(err.as_bytes(), b"hello\xff");
+ let err_clone = err.clone();
+ assert_eq!(err, err_clone);
+ assert_eq!(err.into_bytes(), b"hello\xff".to_vec());
+ assert_eq!(err_clone.utf8_error().valid_up_to(), 5);
+}
+
+#[test]
+fn test_from_utf8_lossy() {
+ let xs = b"hello";
+ let ys: Cow<'_, str> = "hello".into_cow();
+ assert_eq!(String::from_utf8_lossy(xs), ys);
+
+ let xs = "ศไทย中华Việt Nam".as_bytes();
+ let ys: Cow<'_, str> = "ศไทย中华Việt Nam".into_cow();
+ assert_eq!(String::from_utf8_lossy(xs), ys);
+
+ let xs = b"Hello\xC2 There\xFF Goodbye";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("Hello\u{FFFD} There\u{FFFD} Goodbye").into_cow()
+ );
+
+ let xs = b"Hello\xC0\x80 There\xE6\x83 Goodbye";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("Hello\u{FFFD}\u{FFFD} There\u{FFFD} Goodbye").into_cow()
+ );
+
+ let xs = b"\xF5foo\xF5\x80bar";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("\u{FFFD}foo\u{FFFD}\u{FFFD}bar").into_cow()
+ );
+
+ let xs = b"\xF1foo\xF1\x80bar\xF1\x80\x80baz";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}baz").into_cow()
+ );
+
+ let xs = b"\xF4foo\xF4\x80bar\xF4\xBFbaz";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("\u{FFFD}foo\u{FFFD}bar\u{FFFD}\u{FFFD}baz").into_cow()
+ );
+
+ let xs = b"\xF0\x80\x80\x80foo\xF0\x90\x80\x80bar";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("\u{FFFD}\u{FFFD}\u{FFFD}\u{FFFD}foo\u{10000}bar").into_cow()
+ );
+
+ // surrogates
+ let xs = b"\xED\xA0\x80foo\xED\xBF\xBFbar";
+ assert_eq!(
+ String::from_utf8_lossy(xs),
+ String::from("\u{FFFD}\u{FFFD}\u{FFFD}foo\u{FFFD}\u{FFFD}\u{FFFD}bar").into_cow()
+ );
+}
+
+#[test]
+fn test_from_utf16() {
+ let pairs = [
+ (
+ String::from("𐍅𐌿𐌻𐍆𐌹𐌻𐌰\n"),
+ vec![
+ 0xd800, 0xdf45, 0xd800, 0xdf3f, 0xd800, 0xdf3b, 0xd800, 0xdf46, 0xd800, 0xdf39,
+ 0xd800, 0xdf3b, 0xd800, 0xdf30, 0x000a,
+ ],
+ ),
+ (
+ String::from("𐐒𐑉𐐮𐑀𐐲𐑋 𐐏𐐲𐑍\n"),
+ vec![
+ 0xd801, 0xdc12, 0xd801, 0xdc49, 0xd801, 0xdc2e, 0xd801, 0xdc40, 0xd801, 0xdc32,
+ 0xd801, 0xdc4b, 0x0020, 0xd801, 0xdc0f, 0xd801, 0xdc32, 0xd801, 0xdc4d, 0x000a,
+ ],
+ ),
+ (
+ String::from("𐌀𐌖𐌋𐌄𐌑𐌉·𐌌𐌄𐌕𐌄𐌋𐌉𐌑\n"),
+ vec![
+ 0xd800, 0xdf00, 0xd800, 0xdf16, 0xd800, 0xdf0b, 0xd800, 0xdf04, 0xd800, 0xdf11,
+ 0xd800, 0xdf09, 0x00b7, 0xd800, 0xdf0c, 0xd800, 0xdf04, 0xd800, 0xdf15, 0xd800,
+ 0xdf04, 0xd800, 0xdf0b, 0xd800, 0xdf09, 0xd800, 0xdf11, 0x000a,
+ ],
+ ),
+ (
+ String::from("𐒋𐒘𐒈𐒑𐒛𐒒 𐒕𐒓 𐒈𐒚𐒍 𐒏𐒜𐒒𐒖𐒆 𐒕𐒆\n"),
+ vec![
+ 0xd801, 0xdc8b, 0xd801, 0xdc98, 0xd801, 0xdc88, 0xd801, 0xdc91, 0xd801, 0xdc9b,
+ 0xd801, 0xdc92, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc93, 0x0020, 0xd801, 0xdc88,
+ 0xd801, 0xdc9a, 0xd801, 0xdc8d, 0x0020, 0xd801, 0xdc8f, 0xd801, 0xdc9c, 0xd801,
+ 0xdc92, 0xd801, 0xdc96, 0xd801, 0xdc86, 0x0020, 0xd801, 0xdc95, 0xd801, 0xdc86,
+ 0x000a,
+ ],
+ ),
+ // Issue #12318, even-numbered non-BMP planes
+ (String::from("\u{20000}"), vec![0xD840, 0xDC00]),
+ ];
+
+ for p in &pairs {
+ let (s, u) = (*p).clone();
+ let s_as_utf16 = s.encode_utf16().collect::<Vec<u16>>();
+ let u_as_string = String::from_utf16(&u).unwrap();
+
+ assert!(core::char::decode_utf16(u.iter().cloned()).all(|r| r.is_ok()));
+ assert_eq!(s_as_utf16, u);
+
+ assert_eq!(u_as_string, s);
+ assert_eq!(String::from_utf16_lossy(&u), s);
+
+ assert_eq!(String::from_utf16(&s_as_utf16).unwrap(), s);
+ assert_eq!(u_as_string.encode_utf16().collect::<Vec<u16>>(), u);
+ }
+}
+
+#[test]
+fn test_utf16_invalid() {
+ // completely positive cases tested above.
+ // lead + eof
+ assert!(String::from_utf16(&[0xD800]).is_err());
+ // lead + lead
+ assert!(String::from_utf16(&[0xD800, 0xD800]).is_err());
+
+ // isolated trail
+ assert!(String::from_utf16(&[0x0061, 0xDC00]).is_err());
+
+ // general
+ assert!(String::from_utf16(&[0xD800, 0xd801, 0xdc8b, 0xD800]).is_err());
+}
+
+#[test]
+fn test_from_utf16_lossy() {
+ // completely positive cases tested above.
+ // lead + eof
+ assert_eq!(String::from_utf16_lossy(&[0xD800]), String::from("\u{FFFD}"));
+ // lead + lead
+ assert_eq!(String::from_utf16_lossy(&[0xD800, 0xD800]), String::from("\u{FFFD}\u{FFFD}"));
+
+ // isolated trail
+ assert_eq!(String::from_utf16_lossy(&[0x0061, 0xDC00]), String::from("a\u{FFFD}"));
+
+ // general
+ assert_eq!(
+ String::from_utf16_lossy(&[0xD800, 0xd801, 0xdc8b, 0xD800]),
+ String::from("\u{FFFD}𐒋\u{FFFD}")
+ );
+}
+
+#[test]
+fn test_push_bytes() {
+ let mut s = String::from("ABC");
+ unsafe {
+ let mv = s.as_mut_vec();
+ mv.extend_from_slice(&[b'D']);
+ }
+ assert_eq!(s, "ABCD");
+}
+
+#[test]
+fn test_push_str() {
+ let mut s = String::new();
+ s.push_str("");
+ assert_eq!(&s[0..], "");
+ s.push_str("abc");
+ assert_eq!(&s[0..], "abc");
+ s.push_str("ประเทศไทย中华Việt Nam");
+ assert_eq!(&s[0..], "abcประเทศไทย中华Việt Nam");
+}
+
+#[test]
+fn test_add_assign() {
+ let mut s = String::new();
+ s += "";
+ assert_eq!(s.as_str(), "");
+ s += "abc";
+ assert_eq!(s.as_str(), "abc");
+ s += "ประเทศไทย中华Việt Nam";
+ assert_eq!(s.as_str(), "abcประเทศไทย中华Việt Nam");
+}
+
+#[test]
+fn test_push() {
+ let mut data = String::from("ประเทศไทย中");
+ data.push('华');
+ data.push('b'); // 1 byte
+ data.push('¢'); // 2 byte
+ data.push('€'); // 3 byte
+ data.push('𤭢'); // 4 byte
+ assert_eq!(data, "ประเทศไทย中华b¢€𤭢");
+}
+
+#[test]
+fn test_pop() {
+ let mut data = String::from("ประเทศไทย中华b¢€𤭢");
+ assert_eq!(data.pop().unwrap(), '𤭢'); // 4 bytes
+ assert_eq!(data.pop().unwrap(), '€'); // 3 bytes
+ assert_eq!(data.pop().unwrap(), '¢'); // 2 bytes
+ assert_eq!(data.pop().unwrap(), 'b'); // 1 bytes
+ assert_eq!(data.pop().unwrap(), '华');
+ assert_eq!(data, "ประเทศไทย中");
+}
+
+#[test]
+fn test_split_off_empty() {
+ let orig = "Hello, world!";
+ let mut split = String::from(orig);
+ let empty: String = split.split_off(orig.len());
+ assert!(empty.is_empty());
+}
+
+#[test]
+#[should_panic]
+fn test_split_off_past_end() {
+ let orig = "Hello, world!";
+ let mut split = String::from(orig);
+ let _ = split.split_off(orig.len() + 1);
+}
+
+#[test]
+#[should_panic]
+fn test_split_off_mid_char() {
+ let mut orig = String::from("山");
+ let _ = orig.split_off(1);
+}
+
+#[test]
+fn test_split_off_ascii() {
+ let mut ab = String::from("ABCD");
+ let cd = ab.split_off(2);
+ assert_eq!(ab, "AB");
+ assert_eq!(cd, "CD");
+}
+
+#[test]
+fn test_split_off_unicode() {
+ let mut nihon = String::from("日本語");
+ let go = nihon.split_off("日本".len());
+ assert_eq!(nihon, "日本");
+ assert_eq!(go, "語");
+}
+
+#[test]
+fn test_str_truncate() {
+ let mut s = String::from("12345");
+ s.truncate(5);
+ assert_eq!(s, "12345");
+ s.truncate(3);
+ assert_eq!(s, "123");
+ s.truncate(0);
+ assert_eq!(s, "");
+
+ let mut s = String::from("12345");
+ let p = s.as_ptr();
+ s.truncate(3);
+ s.push_str("6");
+ let p_ = s.as_ptr();
+ assert_eq!(p_, p);
+}
+
+#[test]
+fn test_str_truncate_invalid_len() {
+ let mut s = String::from("12345");
+ s.truncate(6);
+ assert_eq!(s, "12345");
+}
+
+#[test]
+#[should_panic]
+fn test_str_truncate_split_codepoint() {
+ let mut s = String::from("\u{FC}"); // ü
+ s.truncate(1);
+}
+
+#[test]
+fn test_str_clear() {
+ let mut s = String::from("12345");
+ s.clear();
+ assert_eq!(s.len(), 0);
+ assert_eq!(s, "");
+}
+
+#[test]
+fn test_str_add() {
+ let a = String::from("12345");
+ let b = a + "2";
+ let b = b + "2";
+ assert_eq!(b.len(), 7);
+ assert_eq!(b, "1234522");
+}
+
+#[test]
+fn remove() {
+ let mut s = "ศไทย中华Việt Nam; foobar".to_string();
+ assert_eq!(s.remove(0), 'ศ');
+ assert_eq!(s.len(), 33);
+ assert_eq!(s, "ไทย中华Việt Nam; foobar");
+ assert_eq!(s.remove(17), 'ệ');
+ assert_eq!(s, "ไทย中华Vit Nam; foobar");
+}
+
+#[test]
+#[should_panic]
+fn remove_bad() {
+ "ศ".to_string().remove(1);
+}
+
+#[test]
+fn test_retain() {
+ let mut s = String::from("α_β_γ");
+
+ s.retain(|_| true);
+ assert_eq!(s, "α_β_γ");
+
+ s.retain(|c| c != '_');
+ assert_eq!(s, "αβγ");
+
+ s.retain(|c| c != 'β');
+ assert_eq!(s, "αγ");
+
+ s.retain(|c| c == 'α');
+ assert_eq!(s, "α");
+
+ s.retain(|_| false);
+ assert_eq!(s, "");
+}
+
+#[test]
+fn insert() {
+ let mut s = "foobar".to_string();
+ s.insert(0, 'ệ');
+ assert_eq!(s, "ệfoobar");
+ s.insert(6, 'ย');
+ assert_eq!(s, "ệfooยbar");
+}
+
+#[test]
+#[should_panic]
+fn insert_bad1() {
+ "".to_string().insert(1, 't');
+}
+#[test]
+#[should_panic]
+fn insert_bad2() {
+ "ệ".to_string().insert(1, 't');
+}
+
+#[test]
+fn test_slicing() {
+ let s = "foobar".to_string();
+ assert_eq!("foobar", &s[..]);
+ assert_eq!("foo", &s[..3]);
+ assert_eq!("bar", &s[3..]);
+ assert_eq!("oob", &s[1..4]);
+}
+
+#[test]
+fn test_simple_types() {
+ assert_eq!(1.to_string(), "1");
+ assert_eq!((-1).to_string(), "-1");
+ assert_eq!(200.to_string(), "200");
+ assert_eq!(2.to_string(), "2");
+ assert_eq!(true.to_string(), "true");
+ assert_eq!(false.to_string(), "false");
+ assert_eq!(("hi".to_string()).to_string(), "hi");
+}
+
+#[test]
+fn test_vectors() {
+ let x: Vec<i32> = vec![];
+ assert_eq!(format!("{:?}", x), "[]");
+ assert_eq!(format!("{:?}", vec![1]), "[1]");
+ assert_eq!(format!("{:?}", vec![1, 2, 3]), "[1, 2, 3]");
+ assert!(format!("{:?}", vec![vec![], vec![1], vec![1, 1]]) == "[[], [1], [1, 1]]");
+}
+
+#[test]
+fn test_from_iterator() {
+ let s = "ศไทย中华Việt Nam".to_string();
+ let t = "ศไทย中华";
+ let u = "Việt Nam";
+
+ let a: String = s.chars().collect();
+ assert_eq!(s, a);
+
+ let mut b = t.to_string();
+ b.extend(u.chars());
+ assert_eq!(s, b);
+
+ let c: String = vec![t, u].into_iter().collect();
+ assert_eq!(s, c);
+
+ let mut d = t.to_string();
+ d.extend(vec![u]);
+ assert_eq!(s, d);
+}
+
+#[test]
+fn test_drain() {
+ let mut s = String::from("αβγ");
+ assert_eq!(s.drain(2..4).collect::<String>(), "β");
+ assert_eq!(s, "αγ");
+
+ let mut t = String::from("abcd");
+ t.drain(..0);
+ assert_eq!(t, "abcd");
+ t.drain(..1);
+ assert_eq!(t, "bcd");
+ t.drain(3..);
+ assert_eq!(t, "bcd");
+ t.drain(..);
+ assert_eq!(t, "");
+}
+
+#[test]
+fn test_replace_range() {
+ let mut s = "Hello, world!".to_owned();
+ s.replace_range(7..12, "世界");
+ assert_eq!(s, "Hello, 世界!");
+}
+
+#[test]
+#[should_panic]
+fn test_replace_range_char_boundary() {
+ let mut s = "Hello, 世界!".to_owned();
+ s.replace_range(..8, "");
+}
+
+#[test]
+fn test_replace_range_inclusive_range() {
+ let mut v = String::from("12345");
+ v.replace_range(2..=3, "789");
+ assert_eq!(v, "127895");
+ v.replace_range(1..=2, "A");
+ assert_eq!(v, "1A895");
+}
+
+#[test]
+#[should_panic]
+fn test_replace_range_out_of_bounds() {
+ let mut s = String::from("12345");
+ s.replace_range(5..6, "789");
+}
+
+#[test]
+#[should_panic]
+fn test_replace_range_inclusive_out_of_bounds() {
+ let mut s = String::from("12345");
+ s.replace_range(5..=5, "789");
+}
+
+#[test]
+fn test_replace_range_empty() {
+ let mut s = String::from("12345");
+ s.replace_range(1..2, "");
+ assert_eq!(s, "1345");
+}
+
+#[test]
+fn test_replace_range_unbounded() {
+ let mut s = String::from("12345");
+ s.replace_range(.., "");
+ assert_eq!(s, "");
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut a = "foo".to_string();
+ a.extend(&['b', 'a', 'r']);
+
+ assert_eq!(&a, "foobar");
+}
+
+#[test]
+fn test_into_boxed_str() {
+ let xs = String::from("hello my name is bob");
+ let ys = xs.into_boxed_str();
+ assert_eq!(&*ys, "hello my name is bob");
+}
+
+#[test]
+fn test_reserve_exact() {
+ // This is all the same as test_reserve
+
+ let mut s = String::new();
+ assert_eq!(s.capacity(), 0);
+
+ s.reserve_exact(2);
+ assert!(s.capacity() >= 2);
+
+ for _i in 0..16 {
+ s.push('0');
+ }
+
+ assert!(s.capacity() >= 16);
+ s.reserve_exact(16);
+ assert!(s.capacity() >= 32);
+
+ s.push('0');
+
+ s.reserve_exact(16);
+ assert!(s.capacity() >= 33)
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve() {
+ // These are the interesting cases:
+ // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
+ // * > isize::MAX should always fail
+ // * On 16/32-bit should CapacityOverflow
+ // * On 64-bit should OOM
+ // * overflow may trigger when adding `len` to `cap` (in number of elements)
+ // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
+
+ const MAX_CAP: usize = isize::MAX as usize;
+ const MAX_USIZE: usize = usize::MAX;
+
+ // On 16/32-bit, we check that allocations don't exceed isize::MAX,
+ // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
+ // Any platform that succeeds for these requests is technically broken with
+ // ptr::offset because LLVM is the worst.
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ // Note: basic stuff is checked by test_reserve
+ let mut empty_string: String = String::new();
+
+ // Check isize::MAX doesn't count as an overflow
+ if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ // Play it again, frank! (just to be sure)
+ if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ // Check isize::MAX + 1 does count as overflow
+ if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ // Check usize::MAX does count as overflow
+ if let Err(CapacityOverflow) = empty_string.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ // Check isize::MAX + 1 is an OOM
+ if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+
+ // Check usize::MAX is an OOM
+ if let Err(AllocError { .. }) = empty_string.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ // Same basic idea, but with non-zero len
+ let mut ten_bytes: String = String::from("0123456789");
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ // Should always overflow in the add-to-len
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve_exact() {
+ // This is exactly the same as test_try_reserve with the method changed.
+ // See that test for comments.
+
+ const MAX_CAP: usize = isize::MAX as usize;
+ const MAX_USIZE: usize = usize::MAX;
+
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ let mut empty_string: String = String::new();
+
+ if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ if let Err(CapacityOverflow) = empty_string.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+
+ if let Err(AllocError { .. }) = empty_string.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ let mut ten_bytes: String = String::from("0123456789");
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+}
+
+#[test]
+fn test_from_char() {
+ assert_eq!(String::from('a'), 'a'.to_string());
+ let s: String = 'x'.into();
+ assert_eq!(s, 'x'.to_string());
+}
diff --git a/library/alloc/tests/vec.rs b/library/alloc/tests/vec.rs
new file mode 100644
index 00000000000..ffff543b07f
--- /dev/null
+++ b/library/alloc/tests/vec.rs
@@ -0,0 +1,1629 @@
+use std::borrow::Cow;
+use std::collections::TryReserveError::*;
+use std::fmt::Debug;
+use std::mem::size_of;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+use std::vec::{Drain, IntoIter};
+
+struct DropCounter<'a> {
+ count: &'a mut u32,
+}
+
+impl Drop for DropCounter<'_> {
+ fn drop(&mut self) {
+ *self.count += 1;
+ }
+}
+
+#[test]
+fn test_small_vec_struct() {
+ assert_eq!(size_of::<Vec<u8>>(), size_of::<usize>() * 3);
+}
+
+#[test]
+fn test_double_drop() {
+ struct TwoVec<T> {
+ x: Vec<T>,
+ y: Vec<T>,
+ }
+
+ let (mut count_x, mut count_y) = (0, 0);
+ {
+ let mut tv = TwoVec { x: Vec::new(), y: Vec::new() };
+ tv.x.push(DropCounter { count: &mut count_x });
+ tv.y.push(DropCounter { count: &mut count_y });
+
+ // If Vec had a drop flag, here is where it would be zeroed.
+ // Instead, it should rely on its internal state to prevent
+ // doing anything significant when dropped multiple times.
+ drop(tv.x);
+
+ // Here tv goes out of scope, tv.y should be dropped, but not tv.x.
+ }
+
+ assert_eq!(count_x, 1);
+ assert_eq!(count_y, 1);
+}
+
+#[test]
+fn test_reserve() {
+ let mut v = Vec::new();
+ assert_eq!(v.capacity(), 0);
+
+ v.reserve(2);
+ assert!(v.capacity() >= 2);
+
+ for i in 0..16 {
+ v.push(i);
+ }
+
+ assert!(v.capacity() >= 16);
+ v.reserve(16);
+ assert!(v.capacity() >= 32);
+
+ v.push(16);
+
+ v.reserve(16);
+ assert!(v.capacity() >= 33)
+}
+
+#[test]
+fn test_zst_capacity() {
+ assert_eq!(Vec::<()>::new().capacity(), usize::MAX);
+}
+
+#[test]
+fn test_extend() {
+ let mut v = Vec::new();
+ let mut w = Vec::new();
+
+ v.extend(w.clone());
+ assert_eq!(v, &[]);
+
+ v.extend(0..3);
+ for i in 0..3 {
+ w.push(i)
+ }
+
+ assert_eq!(v, w);
+
+ v.extend(3..10);
+ for i in 3..10 {
+ w.push(i)
+ }
+
+ assert_eq!(v, w);
+
+ v.extend(w.clone()); // specializes to `append`
+ assert!(v.iter().eq(w.iter().chain(w.iter())));
+
+ // Zero sized types
+ #[derive(PartialEq, Debug)]
+ struct Foo;
+
+ let mut a = Vec::new();
+ let b = vec![Foo, Foo];
+
+ a.extend(b);
+ assert_eq!(a, &[Foo, Foo]);
+
+ // Double drop
+ let mut count_x = 0;
+ {
+ let mut x = Vec::new();
+ let y = vec![DropCounter { count: &mut count_x }];
+ x.extend(y);
+ }
+ assert_eq!(count_x, 1);
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut v = vec![1, 2];
+ v.extend(&[3, 4, 5]);
+
+ assert_eq!(v.len(), 5);
+ assert_eq!(v, [1, 2, 3, 4, 5]);
+
+ let w = vec![6, 7];
+ v.extend(&w);
+
+ assert_eq!(v.len(), 7);
+ assert_eq!(v, [1, 2, 3, 4, 5, 6, 7]);
+}
+
+#[test]
+fn test_slice_from_mut() {
+ let mut values = vec![1, 2, 3, 4, 5];
+ {
+ let slice = &mut values[2..];
+ assert!(slice == [3, 4, 5]);
+ for p in slice {
+ *p += 2;
+ }
+ }
+
+ assert!(values == [1, 2, 5, 6, 7]);
+}
+
+#[test]
+fn test_slice_to_mut() {
+ let mut values = vec![1, 2, 3, 4, 5];
+ {
+ let slice = &mut values[..2];
+ assert!(slice == [1, 2]);
+ for p in slice {
+ *p += 1;
+ }
+ }
+
+ assert!(values == [2, 3, 3, 4, 5]);
+}
+
+#[test]
+fn test_split_at_mut() {
+ let mut values = vec![1, 2, 3, 4, 5];
+ {
+ let (left, right) = values.split_at_mut(2);
+ {
+ let left: &[_] = left;
+ assert!(&left[..left.len()] == &[1, 2]);
+ }
+ for p in left {
+ *p += 1;
+ }
+
+ {
+ let right: &[_] = right;
+ assert!(&right[..right.len()] == &[3, 4, 5]);
+ }
+ for p in right {
+ *p += 2;
+ }
+ }
+
+ assert_eq!(values, [2, 3, 5, 6, 7]);
+}
+
+#[test]
+fn test_clone() {
+ let v: Vec<i32> = vec![];
+ let w = vec![1, 2, 3];
+
+ assert_eq!(v, v.clone());
+
+ let z = w.clone();
+ assert_eq!(w, z);
+ // they should be disjoint in memory.
+ assert!(w.as_ptr() != z.as_ptr())
+}
+
+#[test]
+fn test_clone_from() {
+ let mut v = vec![];
+ let three: Vec<Box<_>> = vec![box 1, box 2, box 3];
+ let two: Vec<Box<_>> = vec![box 4, box 5];
+ // zero, long
+ v.clone_from(&three);
+ assert_eq!(v, three);
+
+ // equal
+ v.clone_from(&three);
+ assert_eq!(v, three);
+
+ // long, short
+ v.clone_from(&two);
+ assert_eq!(v, two);
+
+ // short, long
+ v.clone_from(&three);
+ assert_eq!(v, three)
+}
+
+#[test]
+fn test_retain() {
+ let mut vec = vec![1, 2, 3, 4];
+ vec.retain(|&x| x % 2 == 0);
+ assert_eq!(vec, [2, 4]);
+}
+
+#[test]
+fn test_dedup() {
+ fn case(a: Vec<i32>, b: Vec<i32>) {
+ let mut v = a;
+ v.dedup();
+ assert_eq!(v, b);
+ }
+ case(vec![], vec![]);
+ case(vec![1], vec![1]);
+ case(vec![1, 1], vec![1]);
+ case(vec![1, 2, 3], vec![1, 2, 3]);
+ case(vec![1, 1, 2, 3], vec![1, 2, 3]);
+ case(vec![1, 2, 2, 3], vec![1, 2, 3]);
+ case(vec![1, 2, 3, 3], vec![1, 2, 3]);
+ case(vec![1, 1, 2, 2, 2, 3, 3], vec![1, 2, 3]);
+}
+
+#[test]
+fn test_dedup_by_key() {
+ fn case(a: Vec<i32>, b: Vec<i32>) {
+ let mut v = a;
+ v.dedup_by_key(|i| *i / 10);
+ assert_eq!(v, b);
+ }
+ case(vec![], vec![]);
+ case(vec![10], vec![10]);
+ case(vec![10, 11], vec![10]);
+ case(vec![10, 20, 30], vec![10, 20, 30]);
+ case(vec![10, 11, 20, 30], vec![10, 20, 30]);
+ case(vec![10, 20, 21, 30], vec![10, 20, 30]);
+ case(vec![10, 20, 30, 31], vec![10, 20, 30]);
+ case(vec![10, 11, 20, 21, 22, 30, 31], vec![10, 20, 30]);
+}
+
+#[test]
+fn test_dedup_by() {
+ let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
+ vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b));
+
+ assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
+
+ let mut vec = vec![("foo", 1), ("foo", 2), ("bar", 3), ("bar", 4), ("bar", 5)];
+ vec.dedup_by(|a, b| {
+ a.0 == b.0 && {
+ b.1 += a.1;
+ true
+ }
+ });
+
+ assert_eq!(vec, [("foo", 3), ("bar", 12)]);
+}
+
+#[test]
+fn test_dedup_unique() {
+ let mut v0: Vec<Box<_>> = vec![box 1, box 1, box 2, box 3];
+ v0.dedup();
+ let mut v1: Vec<Box<_>> = vec![box 1, box 2, box 2, box 3];
+ v1.dedup();
+ let mut v2: Vec<Box<_>> = vec![box 1, box 2, box 3, box 3];
+ v2.dedup();
+ // If the boxed pointers were leaked or otherwise misused, valgrind
+ // and/or rt should raise errors.
+}
+
+#[test]
+fn zero_sized_values() {
+ let mut v = Vec::new();
+ assert_eq!(v.len(), 0);
+ v.push(());
+ assert_eq!(v.len(), 1);
+ v.push(());
+ assert_eq!(v.len(), 2);
+ assert_eq!(v.pop(), Some(()));
+ assert_eq!(v.pop(), Some(()));
+ assert_eq!(v.pop(), None);
+
+ assert_eq!(v.iter().count(), 0);
+ v.push(());
+ assert_eq!(v.iter().count(), 1);
+ v.push(());
+ assert_eq!(v.iter().count(), 2);
+
+ for &() in &v {}
+
+ assert_eq!(v.iter_mut().count(), 2);
+ v.push(());
+ assert_eq!(v.iter_mut().count(), 3);
+ v.push(());
+ assert_eq!(v.iter_mut().count(), 4);
+
+ for &mut () in &mut v {}
+ unsafe {
+ v.set_len(0);
+ }
+ assert_eq!(v.iter_mut().count(), 0);
+}
+
+#[test]
+fn test_partition() {
+ assert_eq!(vec![].into_iter().partition(|x: &i32| *x < 3), (vec![], vec![]));
+ assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 4), (vec![1, 2, 3], vec![]));
+ assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 2), (vec![1], vec![2, 3]));
+ assert_eq!(vec![1, 2, 3].into_iter().partition(|x| *x < 0), (vec![], vec![1, 2, 3]));
+}
+
+#[test]
+fn test_zip_unzip() {
+ let z1 = vec![(1, 4), (2, 5), (3, 6)];
+
+ let (left, right): (Vec<_>, Vec<_>) = z1.iter().cloned().unzip();
+
+ assert_eq!((1, 4), (left[0], right[0]));
+ assert_eq!((2, 5), (left[1], right[1]));
+ assert_eq!((3, 6), (left[2], right[2]));
+}
+
+#[test]
+fn test_vec_truncate_drop() {
+ static mut DROPS: u32 = 0;
+ struct Elem(i32);
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)];
+ assert_eq!(unsafe { DROPS }, 0);
+ v.truncate(3);
+ assert_eq!(unsafe { DROPS }, 2);
+ v.truncate(0);
+ assert_eq!(unsafe { DROPS }, 5);
+}
+
+#[test]
+#[should_panic]
+fn test_vec_truncate_fail() {
+ struct BadElem(i32);
+ impl Drop for BadElem {
+ fn drop(&mut self) {
+ let BadElem(ref mut x) = *self;
+ if *x == 0xbadbeef {
+ panic!("BadElem panic: 0xbadbeef")
+ }
+ }
+ }
+
+ let mut v = vec![BadElem(1), BadElem(2), BadElem(0xbadbeef), BadElem(4)];
+ v.truncate(0);
+}
+
+#[test]
+fn test_index() {
+ let vec = vec![1, 2, 3];
+ assert!(vec[1] == 2);
+}
+
+#[test]
+#[should_panic]
+fn test_index_out_of_bounds() {
+ let vec = vec![1, 2, 3];
+ let _ = vec[3];
+}
+
+#[test]
+#[should_panic]
+fn test_slice_out_of_bounds_1() {
+ let x = vec![1, 2, 3, 4, 5];
+ &x[!0..];
+}
+
+#[test]
+#[should_panic]
+fn test_slice_out_of_bounds_2() {
+ let x = vec![1, 2, 3, 4, 5];
+ &x[..6];
+}
+
+#[test]
+#[should_panic]
+fn test_slice_out_of_bounds_3() {
+ let x = vec![1, 2, 3, 4, 5];
+ &x[!0..4];
+}
+
+#[test]
+#[should_panic]
+fn test_slice_out_of_bounds_4() {
+ let x = vec![1, 2, 3, 4, 5];
+ &x[1..6];
+}
+
+#[test]
+#[should_panic]
+fn test_slice_out_of_bounds_5() {
+ let x = vec![1, 2, 3, 4, 5];
+ &x[3..2];
+}
+
+#[test]
+#[should_panic]
+fn test_swap_remove_empty() {
+ let mut vec = Vec::<i32>::new();
+ vec.swap_remove(0);
+}
+
+#[test]
+fn test_move_items() {
+ let vec = vec![1, 2, 3];
+ let mut vec2 = vec![];
+ for i in vec {
+ vec2.push(i);
+ }
+ assert_eq!(vec2, [1, 2, 3]);
+}
+
+#[test]
+fn test_move_items_reverse() {
+ let vec = vec![1, 2, 3];
+ let mut vec2 = vec![];
+ for i in vec.into_iter().rev() {
+ vec2.push(i);
+ }
+ assert_eq!(vec2, [3, 2, 1]);
+}
+
+#[test]
+fn test_move_items_zero_sized() {
+ let vec = vec![(), (), ()];
+ let mut vec2 = vec![];
+ for i in vec {
+ vec2.push(i);
+ }
+ assert_eq!(vec2, [(), (), ()]);
+}
+
+#[test]
+fn test_drain_items() {
+ let mut vec = vec![1, 2, 3];
+ let mut vec2 = vec![];
+ for i in vec.drain(..) {
+ vec2.push(i);
+ }
+ assert_eq!(vec, []);
+ assert_eq!(vec2, [1, 2, 3]);
+}
+
+#[test]
+fn test_drain_items_reverse() {
+ let mut vec = vec![1, 2, 3];
+ let mut vec2 = vec![];
+ for i in vec.drain(..).rev() {
+ vec2.push(i);
+ }
+ assert_eq!(vec, []);
+ assert_eq!(vec2, [3, 2, 1]);
+}
+
+#[test]
+fn test_drain_items_zero_sized() {
+ let mut vec = vec![(), (), ()];
+ let mut vec2 = vec![];
+ for i in vec.drain(..) {
+ vec2.push(i);
+ }
+ assert_eq!(vec, []);
+ assert_eq!(vec2, [(), (), ()]);
+}
+
+#[test]
+#[should_panic]
+fn test_drain_out_of_bounds() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ v.drain(5..6);
+}
+
+#[test]
+fn test_drain_range() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ for _ in v.drain(4..) {}
+ assert_eq!(v, &[1, 2, 3, 4]);
+
+ let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
+ for _ in v.drain(1..4) {}
+ assert_eq!(v, &[1.to_string(), 5.to_string()]);
+
+ let mut v: Vec<_> = (1..6).map(|x| x.to_string()).collect();
+ for _ in v.drain(1..4).rev() {}
+ assert_eq!(v, &[1.to_string(), 5.to_string()]);
+
+ let mut v: Vec<_> = vec![(); 5];
+ for _ in v.drain(1..4).rev() {}
+ assert_eq!(v, &[(), ()]);
+}
+
+#[test]
+fn test_drain_inclusive_range() {
+ let mut v = vec!['a', 'b', 'c', 'd', 'e'];
+ for _ in v.drain(1..=3) {}
+ assert_eq!(v, &['a', 'e']);
+
+ let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
+ for _ in v.drain(1..=5) {}
+ assert_eq!(v, &["0".to_string()]);
+
+ let mut v: Vec<String> = (0..=5).map(|x| x.to_string()).collect();
+ for _ in v.drain(0..=5) {}
+ assert_eq!(v, Vec::<String>::new());
+
+ let mut v: Vec<_> = (0..=5).map(|x| x.to_string()).collect();
+ for _ in v.drain(0..=3) {}
+ assert_eq!(v, &["4".to_string(), "5".to_string()]);
+
+ let mut v: Vec<_> = (0..=1).map(|x| x.to_string()).collect();
+ for _ in v.drain(..=0) {}
+ assert_eq!(v, &["1".to_string()]);
+}
+
+#[test]
+fn test_drain_max_vec_size() {
+ let mut v = Vec::<()>::with_capacity(usize::MAX);
+ unsafe {
+ v.set_len(usize::MAX);
+ }
+ for _ in v.drain(usize::MAX - 1..) {}
+ assert_eq!(v.len(), usize::MAX - 1);
+
+ let mut v = Vec::<()>::with_capacity(usize::MAX);
+ unsafe {
+ v.set_len(usize::MAX);
+ }
+ for _ in v.drain(usize::MAX - 1..=usize::MAX - 1) {}
+ assert_eq!(v.len(), usize::MAX - 1);
+}
+
+#[test]
+#[should_panic]
+fn test_drain_inclusive_out_of_bounds() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ v.drain(5..=5);
+}
+
+#[test]
+fn test_drain_leak() {
+ static mut DROPS: i32 = 0;
+
+ #[derive(Debug, PartialEq)]
+ struct D(u32, bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.1 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut v = vec![
+ D(0, false),
+ D(1, false),
+ D(2, false),
+ D(3, false),
+ D(4, true),
+ D(5, false),
+ D(6, false),
+ ];
+
+ catch_unwind(AssertUnwindSafe(|| {
+ v.drain(2..=5);
+ }))
+ .ok();
+
+ assert_eq!(unsafe { DROPS }, 4);
+ assert_eq!(v, vec![D(0, false), D(1, false), D(6, false),]);
+}
+
+#[test]
+fn test_splice() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(2..4, a.iter().cloned());
+ assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
+ v.splice(1..3, Some(20));
+ assert_eq!(v, &[1, 20, 11, 12, 5]);
+}
+
+#[test]
+fn test_splice_inclusive_range() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ let t1: Vec<_> = v.splice(2..=3, a.iter().cloned()).collect();
+ assert_eq!(v, &[1, 2, 10, 11, 12, 5]);
+ assert_eq!(t1, &[3, 4]);
+ let t2: Vec<_> = v.splice(1..=2, Some(20)).collect();
+ assert_eq!(v, &[1, 20, 11, 12, 5]);
+ assert_eq!(t2, &[2, 10]);
+}
+
+#[test]
+#[should_panic]
+fn test_splice_out_of_bounds() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(5..6, a.iter().cloned());
+}
+
+#[test]
+#[should_panic]
+fn test_splice_inclusive_out_of_bounds() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ v.splice(5..=5, a.iter().cloned());
+}
+
+#[test]
+fn test_splice_items_zero_sized() {
+ let mut vec = vec![(), (), ()];
+ let vec2 = vec![];
+ let t: Vec<_> = vec.splice(1..2, vec2.iter().cloned()).collect();
+ assert_eq!(vec, &[(), ()]);
+ assert_eq!(t, &[()]);
+}
+
+#[test]
+fn test_splice_unbounded() {
+ let mut vec = vec![1, 2, 3, 4, 5];
+ let t: Vec<_> = vec.splice(.., None).collect();
+ assert_eq!(vec, &[]);
+ assert_eq!(t, &[1, 2, 3, 4, 5]);
+}
+
+#[test]
+fn test_splice_forget() {
+ let mut v = vec![1, 2, 3, 4, 5];
+ let a = [10, 11, 12];
+ std::mem::forget(v.splice(2..4, a.iter().cloned()));
+ assert_eq!(v, &[1, 2]);
+}
+
+#[test]
+fn test_into_boxed_slice() {
+ let xs = vec![1, 2, 3];
+ let ys = xs.into_boxed_slice();
+ assert_eq!(&*ys, [1, 2, 3]);
+}
+
+#[test]
+fn test_append() {
+ let mut vec = vec![1, 2, 3];
+ let mut vec2 = vec![4, 5, 6];
+ vec.append(&mut vec2);
+ assert_eq!(vec, [1, 2, 3, 4, 5, 6]);
+ assert_eq!(vec2, []);
+}
+
+#[test]
+fn test_split_off() {
+ let mut vec = vec![1, 2, 3, 4, 5, 6];
+ let vec2 = vec.split_off(4);
+ assert_eq!(vec, [1, 2, 3, 4]);
+ assert_eq!(vec2, [5, 6]);
+}
+
+#[test]
+fn test_into_iter_as_slice() {
+ let vec = vec!['a', 'b', 'c'];
+ let mut into_iter = vec.into_iter();
+ assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ let _ = into_iter.next().unwrap();
+ assert_eq!(into_iter.as_slice(), &['b', 'c']);
+ let _ = into_iter.next().unwrap();
+ let _ = into_iter.next().unwrap();
+ assert_eq!(into_iter.as_slice(), &[]);
+}
+
+#[test]
+fn test_into_iter_as_mut_slice() {
+ let vec = vec!['a', 'b', 'c'];
+ let mut into_iter = vec.into_iter();
+ assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
+ into_iter.as_mut_slice()[0] = 'x';
+ into_iter.as_mut_slice()[1] = 'y';
+ assert_eq!(into_iter.next().unwrap(), 'x');
+ assert_eq!(into_iter.as_slice(), &['y', 'c']);
+}
+
+#[test]
+fn test_into_iter_debug() {
+ let vec = vec!['a', 'b', 'c'];
+ let into_iter = vec.into_iter();
+ let debug = format!("{:?}", into_iter);
+ assert_eq!(debug, "IntoIter(['a', 'b', 'c'])");
+}
+
+#[test]
+fn test_into_iter_count() {
+ assert_eq!(vec![1, 2, 3].into_iter().count(), 3);
+}
+
+#[test]
+fn test_into_iter_clone() {
+ fn iter_equal<I: Iterator<Item = i32>>(it: I, slice: &[i32]) {
+ let v: Vec<i32> = it.collect();
+ assert_eq!(&v[..], slice);
+ }
+ let mut it = vec![1, 2, 3].into_iter();
+ iter_equal(it.clone(), &[1, 2, 3]);
+ assert_eq!(it.next(), Some(1));
+ let mut it = it.rev();
+ iter_equal(it.clone(), &[3, 2]);
+ assert_eq!(it.next(), Some(3));
+ iter_equal(it.clone(), &[2]);
+ assert_eq!(it.next(), Some(2));
+ iter_equal(it.clone(), &[]);
+ assert_eq!(it.next(), None);
+}
+
+#[test]
+fn test_into_iter_leak() {
+ static mut DROPS: i32 = 0;
+
+ struct D(bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.0 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let v = vec![D(false), D(true), D(false)];
+
+ catch_unwind(move || drop(v.into_iter())).ok();
+
+ assert_eq!(unsafe { DROPS }, 3);
+}
+
+#[test]
+fn test_cow_from() {
+ let borrowed: &[_] = &["borrowed", "(slice)"];
+ let owned = vec!["owned", "(vec)"];
+ match (Cow::from(owned.clone()), Cow::from(borrowed)) {
+ (Cow::Owned(o), Cow::Borrowed(b)) => assert!(o == owned && b == borrowed),
+ _ => panic!("invalid `Cow::from`"),
+ }
+}
+
+#[test]
+fn test_from_cow() {
+ let borrowed: &[_] = &["borrowed", "(slice)"];
+ let owned = vec!["owned", "(vec)"];
+ assert_eq!(Vec::from(Cow::Borrowed(borrowed)), vec!["borrowed", "(slice)"]);
+ assert_eq!(Vec::from(Cow::Owned(owned)), vec!["owned", "(vec)"]);
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
+ d
+ }
+ fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> {
+ i
+ }
+}
+
+#[test]
+fn from_into_inner() {
+ let vec = vec![1, 2, 3];
+ let ptr = vec.as_ptr();
+ let vec = vec.into_iter().collect::<Vec<_>>();
+ assert_eq!(vec, [1, 2, 3]);
+ assert_eq!(vec.as_ptr(), ptr);
+
+ let ptr = &vec[1] as *const _;
+ let mut it = vec.into_iter();
+ it.next().unwrap();
+ let vec = it.collect::<Vec<_>>();
+ assert_eq!(vec, [2, 3]);
+ assert!(ptr != vec.as_ptr());
+}
+
+#[test]
+fn overaligned_allocations() {
+ #[repr(align(256))]
+ struct Foo(usize);
+ let mut v = vec![Foo(273)];
+ for i in 0..0x1000 {
+ v.reserve_exact(i);
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
+ v.shrink_to_fit();
+ assert!(v[0].0 == 273);
+ assert!(v.as_ptr() as usize & 0xff == 0);
+ }
+}
+
+#[test]
+fn drain_filter_empty() {
+ let mut vec: Vec<i32> = vec![];
+
+ {
+ let mut iter = vec.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+ assert_eq!(vec.len(), 0);
+ assert_eq!(vec, vec![]);
+}
+
+#[test]
+fn drain_filter_zst() {
+ let mut vec = vec![(), (), (), (), ()];
+ let initial_len = vec.len();
+ let mut count = 0;
+ {
+ let mut iter = vec.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ while let Some(_) = iter.next() {
+ count += 1;
+ assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, initial_len);
+ assert_eq!(vec.len(), 0);
+ assert_eq!(vec, vec![]);
+}
+
+#[test]
+fn drain_filter_false() {
+ let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ let initial_len = vec.len();
+ let mut count = 0;
+ {
+ let mut iter = vec.drain_filter(|_| false);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ for _ in iter.by_ref() {
+ count += 1;
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, 0);
+ assert_eq!(vec.len(), initial_len);
+ assert_eq!(vec, vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]);
+}
+
+#[test]
+fn drain_filter_true() {
+ let mut vec = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ let initial_len = vec.len();
+ let mut count = 0;
+ {
+ let mut iter = vec.drain_filter(|_| true);
+ assert_eq!(iter.size_hint(), (0, Some(initial_len)));
+ while let Some(_) = iter.next() {
+ count += 1;
+ assert_eq!(iter.size_hint(), (0, Some(initial_len - count)));
+ }
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert_eq!(count, initial_len);
+ assert_eq!(vec.len(), 0);
+ assert_eq!(vec, vec![]);
+}
+
+#[test]
+fn drain_filter_complex() {
+ {
+ // [+xxx++++++xxxxx++++x+x++]
+ let mut vec = vec![
+ 1, 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37,
+ 39,
+ ];
+
+ let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(vec.len(), 14);
+ assert_eq!(vec, vec![1, 7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
+ }
+
+ {
+ // [xxx++++++xxxxx++++x+x++]
+ let mut vec = vec![
+ 2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39,
+ ];
+
+ let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(vec.len(), 13);
+ assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35, 37, 39]);
+ }
+
+ {
+ // [xxx++++++xxxxx++++x+x]
+ let mut vec =
+ vec![2, 4, 6, 7, 9, 11, 13, 15, 17, 18, 20, 22, 24, 26, 27, 29, 31, 33, 34, 35, 36];
+
+ let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 18, 20, 22, 24, 26, 34, 36]);
+
+ assert_eq!(vec.len(), 11);
+ assert_eq!(vec, vec![7, 9, 11, 13, 15, 17, 27, 29, 31, 33, 35]);
+ }
+
+ {
+ // [xxxxxxxxxx+++++++++++]
+ let mut vec = vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 1, 3, 5, 7, 9, 11, 13, 15, 17, 19];
+
+ let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
+
+ assert_eq!(vec.len(), 10);
+ assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
+ }
+
+ {
+ // [+++++++++++xxxxxxxxxx]
+ let mut vec = vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20];
+
+ let removed = vec.drain_filter(|x| *x % 2 == 0).collect::<Vec<_>>();
+ assert_eq!(removed.len(), 10);
+ assert_eq!(removed, vec![2, 4, 6, 8, 10, 12, 14, 16, 18, 20]);
+
+ assert_eq!(vec.len(), 10);
+ assert_eq!(vec, vec![1, 3, 5, 7, 9, 11, 13, 15, 17, 19]);
+ }
+}
+
+// FIXME: re-enable emscripten once it can unwind again
+#[test]
+#[cfg(not(target_os = "emscripten"))]
+fn drain_filter_consumed_panic() {
+ use std::rc::Rc;
+ use std::sync::Mutex;
+
+ struct Check {
+ index: usize,
+ drop_counts: Rc<Mutex<Vec<usize>>>,
+ };
+
+ impl Drop for Check {
+ fn drop(&mut self) {
+ self.drop_counts.lock().unwrap()[self.index] += 1;
+ println!("drop: {}", self.index);
+ }
+ }
+
+ let check_count = 10;
+ let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count]));
+ let mut data: Vec<Check> = (0..check_count)
+ .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) })
+ .collect();
+
+ let _ = std::panic::catch_unwind(move || {
+ let filter = |c: &mut Check| {
+ if c.index == 2 {
+ panic!("panic at index: {}", c.index);
+ }
+ // Verify that if the filter could panic again on another element
+ // that it would not cause a double panic and all elements of the
+ // vec would still be dropped exactly once.
+ if c.index == 4 {
+ panic!("panic at index: {}", c.index);
+ }
+ c.index < 6
+ };
+ let drain = data.drain_filter(filter);
+
+ // NOTE: The DrainFilter is explicitly consumed
+ drain.for_each(drop);
+ });
+
+ let drop_counts = drop_counts.lock().unwrap();
+ assert_eq!(check_count, drop_counts.len());
+
+ for (index, count) in drop_counts.iter().cloned().enumerate() {
+ assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count);
+ }
+}
+
+// FIXME: Re-enable emscripten once it can catch panics
+#[test]
+#[cfg(not(target_os = "emscripten"))]
+fn drain_filter_unconsumed_panic() {
+ use std::rc::Rc;
+ use std::sync::Mutex;
+
+ struct Check {
+ index: usize,
+ drop_counts: Rc<Mutex<Vec<usize>>>,
+ };
+
+ impl Drop for Check {
+ fn drop(&mut self) {
+ self.drop_counts.lock().unwrap()[self.index] += 1;
+ println!("drop: {}", self.index);
+ }
+ }
+
+ let check_count = 10;
+ let drop_counts = Rc::new(Mutex::new(vec![0_usize; check_count]));
+ let mut data: Vec<Check> = (0..check_count)
+ .map(|index| Check { index, drop_counts: Rc::clone(&drop_counts) })
+ .collect();
+
+ let _ = std::panic::catch_unwind(move || {
+ let filter = |c: &mut Check| {
+ if c.index == 2 {
+ panic!("panic at index: {}", c.index);
+ }
+ // Verify that if the filter could panic again on another element
+ // that it would not cause a double panic and all elements of the
+ // vec would still be dropped exactly once.
+ if c.index == 4 {
+ panic!("panic at index: {}", c.index);
+ }
+ c.index < 6
+ };
+ let _drain = data.drain_filter(filter);
+
+ // NOTE: The DrainFilter is dropped without being consumed
+ });
+
+ let drop_counts = drop_counts.lock().unwrap();
+ assert_eq!(check_count, drop_counts.len());
+
+ for (index, count) in drop_counts.iter().cloned().enumerate() {
+ assert_eq!(1, count, "unexpected drop count at index: {} (count: {})", index, count);
+ }
+}
+
+#[test]
+fn drain_filter_unconsumed() {
+ let mut vec = vec![1, 2, 3, 4];
+ let drain = vec.drain_filter(|&mut x| x % 2 != 0);
+ drop(drain);
+ assert_eq!(vec, [2, 4]);
+}
+
+#[test]
+fn test_reserve_exact() {
+ // This is all the same as test_reserve
+
+ let mut v = Vec::new();
+ assert_eq!(v.capacity(), 0);
+
+ v.reserve_exact(2);
+ assert!(v.capacity() >= 2);
+
+ for i in 0..16 {
+ v.push(i);
+ }
+
+ assert!(v.capacity() >= 16);
+ v.reserve_exact(16);
+ assert!(v.capacity() >= 32);
+
+ v.push(16);
+
+ v.reserve_exact(16);
+ assert!(v.capacity() >= 33)
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve() {
+ // These are the interesting cases:
+ // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
+ // * > isize::MAX should always fail
+ // * On 16/32-bit should CapacityOverflow
+ // * On 64-bit should OOM
+ // * overflow may trigger when adding `len` to `cap` (in number of elements)
+ // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
+
+ const MAX_CAP: usize = isize::MAX as usize;
+ const MAX_USIZE: usize = usize::MAX;
+
+ // On 16/32-bit, we check that allocations don't exceed isize::MAX,
+ // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
+ // Any platform that succeeds for these requests is technically broken with
+ // ptr::offset because LLVM is the worst.
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ // Note: basic stuff is checked by test_reserve
+ let mut empty_bytes: Vec<u8> = Vec::new();
+
+ // Check isize::MAX doesn't count as an overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ // Play it again, frank! (just to be sure)
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ // Check isize::MAX + 1 does count as overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ // Check usize::MAX does count as overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ // Check isize::MAX + 1 is an OOM
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+
+ // Check usize::MAX is an OOM
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ // Same basic idea, but with non-zero len
+ let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ // Should always overflow in the add-to-len
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+
+ {
+ // Same basic idea, but with interesting type size
+ let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ // Should fail in the mul-by-size
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!");
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve_exact() {
+ // This is exactly the same as test_try_reserve with the method changed.
+ // See that test for comments.
+
+ const MAX_CAP: usize = isize::MAX as usize;
+ const MAX_USIZE: usize = usize::MAX;
+
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ let mut empty_bytes: Vec<u8> = Vec::new();
+
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ let mut ten_bytes: Vec<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+
+ {
+ let mut ten_u32s: Vec<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10];
+
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+}
+
+#[test]
+fn test_stable_pointers() {
+ /// Pull an element from the iterator, then drop it.
+ /// Useful to cover both the `next` and `drop` paths of an iterator.
+ fn next_then_drop<I: Iterator>(mut i: I) {
+ i.next().unwrap();
+ drop(i);
+ }
+
+ // Test that, if we reserved enough space, adding and removing elements does not
+ // invalidate references into the vector (such as `v0`). This test also
+ // runs in Miri, which would detect such problems.
+ let mut v = Vec::with_capacity(128);
+ v.push(13);
+
+ // Laundering the lifetime -- we take care that `v` does not reallocate, so that's okay.
+ let v0 = &mut v[0];
+ let v0 = unsafe { &mut *(v0 as *mut _) };
+ // Now do a bunch of things and occasionally use `v0` again to assert it is still valid.
+
+ // Pushing/inserting and popping/removing
+ v.push(1);
+ v.push(2);
+ v.insert(1, 1);
+ assert_eq!(*v0, 13);
+ v.remove(1);
+ v.pop().unwrap();
+ assert_eq!(*v0, 13);
+ v.push(1);
+ v.swap_remove(1);
+ assert_eq!(v.len(), 2);
+ v.swap_remove(1); // swap_remove the last element
+ assert_eq!(*v0, 13);
+
+ // Appending
+ v.append(&mut vec![27, 19]);
+ assert_eq!(*v0, 13);
+
+ // Extending
+ v.extend_from_slice(&[1, 2]);
+ v.extend(&[1, 2]); // `slice::Iter` (with `T: Copy`) specialization
+ v.extend(vec![2, 3]); // `vec::IntoIter` specialization
+ v.extend(std::iter::once(3)); // `TrustedLen` specialization
+ v.extend(std::iter::empty::<i32>()); // `TrustedLen` specialization with empty iterator
+ v.extend(std::iter::once(3).filter(|_| true)); // base case
+ v.extend(std::iter::once(&3)); // `cloned` specialization
+ assert_eq!(*v0, 13);
+
+ // Truncation
+ v.truncate(2);
+ assert_eq!(*v0, 13);
+
+ // Resizing
+ v.resize_with(v.len() + 10, || 42);
+ assert_eq!(*v0, 13);
+ v.resize_with(2, || panic!());
+ assert_eq!(*v0, 13);
+
+ // No-op reservation
+ v.reserve(32);
+ v.reserve_exact(32);
+ assert_eq!(*v0, 13);
+
+ // Partial draining
+ v.resize_with(10, || 42);
+ next_then_drop(v.drain(5..));
+ assert_eq!(*v0, 13);
+
+ // Splicing
+ v.resize_with(10, || 42);
+ next_then_drop(v.splice(5.., vec![1, 2, 3, 4, 5])); // empty tail after range
+ assert_eq!(*v0, 13);
+ next_then_drop(v.splice(5..8, vec![1])); // replacement is smaller than original range
+ assert_eq!(*v0, 13);
+ next_then_drop(v.splice(5..6, vec![1; 10].into_iter().filter(|_| true))); // lower bound not exact
+ assert_eq!(*v0, 13);
+
+ // Smoke test that would fire even outside Miri if an actual relocation happened.
+ *v0 -= 13;
+ assert_eq!(v[0], 0);
+}
+
+// https://github.com/rust-lang/rust/pull/49496 introduced specialization based on:
+//
+// ```
+// unsafe impl<T: ?Sized> IsZero for *mut T {
+// fn is_zero(&self) -> bool {
+// (*self).is_null()
+// }
+// }
+// ```
+//
+// … to call `RawVec::with_capacity_zeroed` for creating `Vec<*mut T>`,
+// which is incorrect for fat pointers since `<*mut T>::is_null` only looks at the data component.
+// That is, a fat pointer can be “null” without being made entirely of zero bits.
+#[test]
+fn vec_macro_repeating_null_raw_fat_pointer() {
+ let raw_dyn = &mut (|| ()) as &mut dyn Fn() as *mut dyn Fn();
+ let vtable = dbg!(ptr_metadata(raw_dyn));
+ let null_raw_dyn = ptr_from_raw_parts(std::ptr::null_mut(), vtable);
+ assert!(null_raw_dyn.is_null());
+
+ let vec = vec![null_raw_dyn; 1];
+ dbg!(ptr_metadata(vec[0]));
+ assert!(vec[0] == null_raw_dyn);
+
+ // Polyfill for https://github.com/rust-lang/rfcs/pull/2580
+
+ fn ptr_metadata(ptr: *mut dyn Fn()) -> *mut () {
+ unsafe { std::mem::transmute::<*mut dyn Fn(), DynRepr>(ptr).vtable }
+ }
+
+ fn ptr_from_raw_parts(data: *mut (), vtable: *mut ()) -> *mut dyn Fn() {
+ unsafe { std::mem::transmute::<DynRepr, *mut dyn Fn()>(DynRepr { data, vtable }) }
+ }
+
+ #[repr(C)]
+ struct DynRepr {
+ data: *mut (),
+ vtable: *mut (),
+ }
+}
+
+// This test will likely fail if you change the capacities used in
+// `RawVec::grow_amortized`.
+#[test]
+fn test_push_growth_strategy() {
+ // If the element size is 1, we jump from 0 to 8, then double.
+ {
+ let mut v1: Vec<u8> = vec![];
+ assert_eq!(v1.capacity(), 0);
+
+ for _ in 0..8 {
+ v1.push(0);
+ assert_eq!(v1.capacity(), 8);
+ }
+
+ for _ in 8..16 {
+ v1.push(0);
+ assert_eq!(v1.capacity(), 16);
+ }
+
+ for _ in 16..32 {
+ v1.push(0);
+ assert_eq!(v1.capacity(), 32);
+ }
+
+ for _ in 32..64 {
+ v1.push(0);
+ assert_eq!(v1.capacity(), 64);
+ }
+ }
+
+ // If the element size is 2..=1024, we jump from 0 to 4, then double.
+ {
+ let mut v2: Vec<u16> = vec![];
+ let mut v1024: Vec<[u8; 1024]> = vec![];
+ assert_eq!(v2.capacity(), 0);
+ assert_eq!(v1024.capacity(), 0);
+
+ for _ in 0..4 {
+ v2.push(0);
+ v1024.push([0; 1024]);
+ assert_eq!(v2.capacity(), 4);
+ assert_eq!(v1024.capacity(), 4);
+ }
+
+ for _ in 4..8 {
+ v2.push(0);
+ v1024.push([0; 1024]);
+ assert_eq!(v2.capacity(), 8);
+ assert_eq!(v1024.capacity(), 8);
+ }
+
+ for _ in 8..16 {
+ v2.push(0);
+ v1024.push([0; 1024]);
+ assert_eq!(v2.capacity(), 16);
+ assert_eq!(v1024.capacity(), 16);
+ }
+
+ for _ in 16..32 {
+ v2.push(0);
+ v1024.push([0; 1024]);
+ assert_eq!(v2.capacity(), 32);
+ assert_eq!(v1024.capacity(), 32);
+ }
+
+ for _ in 32..64 {
+ v2.push(0);
+ v1024.push([0; 1024]);
+ assert_eq!(v2.capacity(), 64);
+ assert_eq!(v1024.capacity(), 64);
+ }
+ }
+
+ // If the element size is > 1024, we jump from 0 to 1, then double.
+ {
+ let mut v1025: Vec<[u8; 1025]> = vec![];
+ assert_eq!(v1025.capacity(), 0);
+
+ for _ in 0..1 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 1);
+ }
+
+ for _ in 1..2 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 2);
+ }
+
+ for _ in 2..4 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 4);
+ }
+
+ for _ in 4..8 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 8);
+ }
+
+ for _ in 8..16 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 16);
+ }
+
+ for _ in 16..32 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 32);
+ }
+
+ for _ in 32..64 {
+ v1025.push([0; 1025]);
+ assert_eq!(v1025.capacity(), 64);
+ }
+ }
+}
+
+macro_rules! generate_assert_eq_vec_and_prim {
+ ($name:ident<$B:ident>($type:ty)) => {
+ fn $name<A: PartialEq<$B> + Debug, $B: Debug>(a: Vec<A>, b: $type) {
+ assert!(a == b);
+ assert_eq!(a, b);
+ }
+ };
+}
+
+generate_assert_eq_vec_and_prim! { assert_eq_vec_and_slice <B>(&[B]) }
+generate_assert_eq_vec_and_prim! { assert_eq_vec_and_array_3<B>([B; 3]) }
+
+#[test]
+fn partialeq_vec_and_prim() {
+ assert_eq_vec_and_slice(vec![1, 2, 3], &[1, 2, 3]);
+ assert_eq_vec_and_array_3(vec![1, 2, 3], [1, 2, 3]);
+}
+
+macro_rules! assert_partial_eq_valid {
+ ($a2:ident, $a3:ident; $b2:ident, $b3: ident) => {
+ assert!($a2 == $b2);
+ assert!($a2 != $b3);
+ assert!($a3 != $b2);
+ assert!($a3 == $b3);
+ assert_eq!($a2, $b2);
+ assert_ne!($a2, $b3);
+ assert_ne!($a3, $b2);
+ assert_eq!($a3, $b3);
+ };
+}
+
+#[test]
+fn partialeq_vec_full() {
+ let vec2: Vec<_> = vec![1, 2];
+ let vec3: Vec<_> = vec![1, 2, 3];
+ let slice2: &[_] = &[1, 2];
+ let slice3: &[_] = &[1, 2, 3];
+ let slicemut2: &[_] = &mut [1, 2];
+ let slicemut3: &[_] = &mut [1, 2, 3];
+ let array2: [_; 2] = [1, 2];
+ let array3: [_; 3] = [1, 2, 3];
+ let arrayref2: &[_; 2] = &[1, 2];
+ let arrayref3: &[_; 3] = &[1, 2, 3];
+
+ assert_partial_eq_valid!(vec2,vec3; vec2,vec3);
+ assert_partial_eq_valid!(vec2,vec3; slice2,slice3);
+ assert_partial_eq_valid!(vec2,vec3; slicemut2,slicemut3);
+ assert_partial_eq_valid!(slice2,slice3; vec2,vec3);
+ assert_partial_eq_valid!(slicemut2,slicemut3; vec2,vec3);
+ assert_partial_eq_valid!(vec2,vec3; array2,array3);
+ assert_partial_eq_valid!(vec2,vec3; arrayref2,arrayref3);
+}
diff --git a/library/alloc/tests/vec_deque.rs b/library/alloc/tests/vec_deque.rs
new file mode 100644
index 00000000000..762dc4be44d
--- /dev/null
+++ b/library/alloc/tests/vec_deque.rs
@@ -0,0 +1,1646 @@
+use std::collections::TryReserveError::*;
+use std::collections::{vec_deque::Drain, VecDeque};
+use std::fmt::Debug;
+use std::mem::size_of;
+use std::panic::{catch_unwind, AssertUnwindSafe};
+
+use crate::hash;
+
+use Taggy::*;
+use Taggypar::*;
+
+#[test]
+fn test_simple() {
+ let mut d = VecDeque::new();
+ assert_eq!(d.len(), 0);
+ d.push_front(17);
+ d.push_front(42);
+ d.push_back(137);
+ assert_eq!(d.len(), 3);
+ d.push_back(137);
+ assert_eq!(d.len(), 4);
+ assert_eq!(*d.front().unwrap(), 42);
+ assert_eq!(*d.back().unwrap(), 137);
+ let mut i = d.pop_front();
+ assert_eq!(i, Some(42));
+ i = d.pop_back();
+ assert_eq!(i, Some(137));
+ i = d.pop_back();
+ assert_eq!(i, Some(137));
+ i = d.pop_back();
+ assert_eq!(i, Some(17));
+ assert_eq!(d.len(), 0);
+ d.push_back(3);
+ assert_eq!(d.len(), 1);
+ d.push_front(2);
+ assert_eq!(d.len(), 2);
+ d.push_back(4);
+ assert_eq!(d.len(), 3);
+ d.push_front(1);
+ assert_eq!(d.len(), 4);
+ assert_eq!(d[0], 1);
+ assert_eq!(d[1], 2);
+ assert_eq!(d[2], 3);
+ assert_eq!(d[3], 4);
+}
+
+fn test_parameterized<T: Clone + PartialEq + Debug>(a: T, b: T, c: T, d: T) {
+ let mut deq = VecDeque::new();
+ assert_eq!(deq.len(), 0);
+ deq.push_front(a.clone());
+ deq.push_front(b.clone());
+ deq.push_back(c.clone());
+ assert_eq!(deq.len(), 3);
+ deq.push_back(d.clone());
+ assert_eq!(deq.len(), 4);
+ assert_eq!((*deq.front().unwrap()).clone(), b.clone());
+ assert_eq!((*deq.back().unwrap()).clone(), d.clone());
+ assert_eq!(deq.pop_front().unwrap(), b.clone());
+ assert_eq!(deq.pop_back().unwrap(), d.clone());
+ assert_eq!(deq.pop_back().unwrap(), c.clone());
+ assert_eq!(deq.pop_back().unwrap(), a.clone());
+ assert_eq!(deq.len(), 0);
+ deq.push_back(c.clone());
+ assert_eq!(deq.len(), 1);
+ deq.push_front(b.clone());
+ assert_eq!(deq.len(), 2);
+ deq.push_back(d.clone());
+ assert_eq!(deq.len(), 3);
+ deq.push_front(a.clone());
+ assert_eq!(deq.len(), 4);
+ assert_eq!(deq[0].clone(), a.clone());
+ assert_eq!(deq[1].clone(), b.clone());
+ assert_eq!(deq[2].clone(), c.clone());
+ assert_eq!(deq[3].clone(), d.clone());
+}
+
+#[test]
+fn test_push_front_grow() {
+ let mut deq = VecDeque::new();
+ for i in 0..66 {
+ deq.push_front(i);
+ }
+ assert_eq!(deq.len(), 66);
+
+ for i in 0..66 {
+ assert_eq!(deq[i], 65 - i);
+ }
+
+ let mut deq = VecDeque::new();
+ for i in 0..66 {
+ deq.push_back(i);
+ }
+
+ for i in 0..66 {
+ assert_eq!(deq[i], i);
+ }
+}
+
+#[test]
+fn test_index() {
+ let mut deq = VecDeque::new();
+ for i in 1..4 {
+ deq.push_front(i);
+ }
+ assert_eq!(deq[1], 2);
+}
+
+#[test]
+#[should_panic]
+fn test_index_out_of_bounds() {
+ let mut deq = VecDeque::new();
+ for i in 1..4 {
+ deq.push_front(i);
+ }
+ deq[3];
+}
+
+#[derive(Clone, PartialEq, Debug)]
+enum Taggy {
+ One(i32),
+ Two(i32, i32),
+ Three(i32, i32, i32),
+}
+
+#[derive(Clone, PartialEq, Debug)]
+enum Taggypar<T> {
+ Onepar(T),
+ Twopar(T, T),
+ Threepar(T, T, T),
+}
+
+#[derive(Clone, PartialEq, Debug)]
+struct RecCy {
+ x: i32,
+ y: i32,
+ t: Taggy,
+}
+
+#[test]
+fn test_param_int() {
+ test_parameterized::<i32>(5, 72, 64, 175);
+}
+
+#[test]
+fn test_param_taggy() {
+ test_parameterized::<Taggy>(One(1), Two(1, 2), Three(1, 2, 3), Two(17, 42));
+}
+
+#[test]
+fn test_param_taggypar() {
+ test_parameterized::<Taggypar<i32>>(
+ Onepar::<i32>(1),
+ Twopar::<i32>(1, 2),
+ Threepar::<i32>(1, 2, 3),
+ Twopar::<i32>(17, 42),
+ );
+}
+
+#[test]
+fn test_param_reccy() {
+ let reccy1 = RecCy { x: 1, y: 2, t: One(1) };
+ let reccy2 = RecCy { x: 345, y: 2, t: Two(1, 2) };
+ let reccy3 = RecCy { x: 1, y: 777, t: Three(1, 2, 3) };
+ let reccy4 = RecCy { x: 19, y: 252, t: Two(17, 42) };
+ test_parameterized::<RecCy>(reccy1, reccy2, reccy3, reccy4);
+}
+
+#[test]
+fn test_with_capacity() {
+ let mut d = VecDeque::with_capacity(0);
+ d.push_back(1);
+ assert_eq!(d.len(), 1);
+ let mut d = VecDeque::with_capacity(50);
+ d.push_back(1);
+ assert_eq!(d.len(), 1);
+}
+
+#[test]
+fn test_with_capacity_non_power_two() {
+ let mut d3 = VecDeque::with_capacity(3);
+ d3.push_back(1);
+
+ // X = None, | = lo
+ // [|1, X, X]
+ assert_eq!(d3.pop_front(), Some(1));
+ // [X, |X, X]
+ assert_eq!(d3.front(), None);
+
+ // [X, |3, X]
+ d3.push_back(3);
+ // [X, |3, 6]
+ d3.push_back(6);
+ // [X, X, |6]
+ assert_eq!(d3.pop_front(), Some(3));
+
+ // Pushing the lo past half way point to trigger
+ // the 'B' scenario for growth
+ // [9, X, |6]
+ d3.push_back(9);
+ // [9, 12, |6]
+ d3.push_back(12);
+
+ d3.push_back(15);
+ // There used to be a bug here about how the
+ // VecDeque made growth assumptions about the
+ // underlying Vec which didn't hold and lead
+ // to corruption.
+ // (Vec grows to next power of two)
+ // good- [9, 12, 15, X, X, X, X, |6]
+ // bug- [15, 12, X, X, X, |6, X, X]
+ assert_eq!(d3.pop_front(), Some(6));
+
+ // Which leads us to the following state which
+ // would be a failure case.
+ // bug- [15, 12, X, X, X, X, |X, X]
+ assert_eq!(d3.front(), Some(&9));
+}
+
+#[test]
+fn test_reserve_exact() {
+ let mut d = VecDeque::new();
+ d.push_back(0);
+ d.reserve_exact(50);
+ assert!(d.capacity() >= 51);
+}
+
+#[test]
+fn test_reserve() {
+ let mut d = VecDeque::new();
+ d.push_back(0);
+ d.reserve(50);
+ assert!(d.capacity() >= 51);
+}
+
+#[test]
+fn test_swap() {
+ let mut d: VecDeque<_> = (0..5).collect();
+ d.pop_front();
+ d.swap(0, 3);
+ assert_eq!(d.iter().cloned().collect::<Vec<_>>(), [4, 2, 3, 1]);
+}
+
+#[test]
+fn test_iter() {
+ let mut d = VecDeque::new();
+ assert_eq!(d.iter().next(), None);
+ assert_eq!(d.iter().size_hint(), (0, Some(0)));
+
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ {
+ let b: &[_] = &[&0, &1, &2, &3, &4];
+ assert_eq!(d.iter().collect::<Vec<_>>(), b);
+ }
+
+ for i in 6..9 {
+ d.push_front(i);
+ }
+ {
+ let b: &[_] = &[&8, &7, &6, &0, &1, &2, &3, &4];
+ assert_eq!(d.iter().collect::<Vec<_>>(), b);
+ }
+
+ let mut it = d.iter();
+ let mut len = d.len();
+ loop {
+ match it.next() {
+ None => break,
+ _ => {
+ len -= 1;
+ assert_eq!(it.size_hint(), (len, Some(len)))
+ }
+ }
+ }
+}
+
+#[test]
+fn test_rev_iter() {
+ let mut d = VecDeque::new();
+ assert_eq!(d.iter().rev().next(), None);
+
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ {
+ let b: &[_] = &[&4, &3, &2, &1, &0];
+ assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
+ }
+
+ for i in 6..9 {
+ d.push_front(i);
+ }
+ let b: &[_] = &[&4, &3, &2, &1, &0, &6, &7, &8];
+ assert_eq!(d.iter().rev().collect::<Vec<_>>(), b);
+}
+
+#[test]
+fn test_mut_rev_iter_wrap() {
+ let mut d = VecDeque::with_capacity(3);
+ assert!(d.iter_mut().rev().next().is_none());
+
+ d.push_back(1);
+ d.push_back(2);
+ d.push_back(3);
+ assert_eq!(d.pop_front(), Some(1));
+ d.push_back(4);
+
+ assert_eq!(d.iter_mut().rev().map(|x| *x).collect::<Vec<_>>(), vec![4, 3, 2]);
+}
+
+#[test]
+fn test_mut_iter() {
+ let mut d = VecDeque::new();
+ assert!(d.iter_mut().next().is_none());
+
+ for i in 0..3 {
+ d.push_front(i);
+ }
+
+ for (i, elt) in d.iter_mut().enumerate() {
+ assert_eq!(*elt, 2 - i);
+ *elt = i;
+ }
+
+ {
+ let mut it = d.iter_mut();
+ assert_eq!(*it.next().unwrap(), 0);
+ assert_eq!(*it.next().unwrap(), 1);
+ assert_eq!(*it.next().unwrap(), 2);
+ assert!(it.next().is_none());
+ }
+}
+
+#[test]
+fn test_mut_rev_iter() {
+ let mut d = VecDeque::new();
+ assert!(d.iter_mut().rev().next().is_none());
+
+ for i in 0..3 {
+ d.push_front(i);
+ }
+
+ for (i, elt) in d.iter_mut().rev().enumerate() {
+ assert_eq!(*elt, i);
+ *elt = i;
+ }
+
+ {
+ let mut it = d.iter_mut().rev();
+ assert_eq!(*it.next().unwrap(), 0);
+ assert_eq!(*it.next().unwrap(), 1);
+ assert_eq!(*it.next().unwrap(), 2);
+ assert!(it.next().is_none());
+ }
+}
+
+#[test]
+fn test_into_iter() {
+ // Empty iter
+ {
+ let d: VecDeque<i32> = VecDeque::new();
+ let mut iter = d.into_iter();
+
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ // simple iter
+ {
+ let mut d = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+
+ let b = vec![0, 1, 2, 3, 4];
+ assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
+ }
+
+ // wrapped iter
+ {
+ let mut d = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ for i in 6..9 {
+ d.push_front(i);
+ }
+
+ let b = vec![8, 7, 6, 0, 1, 2, 3, 4];
+ assert_eq!(d.into_iter().collect::<Vec<_>>(), b);
+ }
+
+ // partially used
+ {
+ let mut d = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ for i in 6..9 {
+ d.push_front(i);
+ }
+
+ let mut it = d.into_iter();
+ assert_eq!(it.size_hint(), (8, Some(8)));
+ assert_eq!(it.next(), Some(8));
+ assert_eq!(it.size_hint(), (7, Some(7)));
+ assert_eq!(it.next_back(), Some(4));
+ assert_eq!(it.size_hint(), (6, Some(6)));
+ assert_eq!(it.next(), Some(7));
+ assert_eq!(it.size_hint(), (5, Some(5)));
+ }
+}
+
+#[test]
+fn test_drain() {
+ // Empty iter
+ {
+ let mut d: VecDeque<i32> = VecDeque::new();
+
+ {
+ let mut iter = d.drain(..);
+
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ assert_eq!(iter.next(), None);
+ assert_eq!(iter.size_hint(), (0, Some(0)));
+ }
+
+ assert!(d.is_empty());
+ }
+
+ // simple iter
+ {
+ let mut d = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+
+ assert_eq!(d.drain(..).collect::<Vec<_>>(), [0, 1, 2, 3, 4]);
+ assert!(d.is_empty());
+ }
+
+ // wrapped iter
+ {
+ let mut d = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ for i in 6..9 {
+ d.push_front(i);
+ }
+
+ assert_eq!(d.drain(..).collect::<Vec<_>>(), [8, 7, 6, 0, 1, 2, 3, 4]);
+ assert!(d.is_empty());
+ }
+
+ // partially used
+ {
+ let mut d: VecDeque<_> = VecDeque::new();
+ for i in 0..5 {
+ d.push_back(i);
+ }
+ for i in 6..9 {
+ d.push_front(i);
+ }
+
+ {
+ let mut it = d.drain(..);
+ assert_eq!(it.size_hint(), (8, Some(8)));
+ assert_eq!(it.next(), Some(8));
+ assert_eq!(it.size_hint(), (7, Some(7)));
+ assert_eq!(it.next_back(), Some(4));
+ assert_eq!(it.size_hint(), (6, Some(6)));
+ assert_eq!(it.next(), Some(7));
+ assert_eq!(it.size_hint(), (5, Some(5)));
+ }
+ assert!(d.is_empty());
+ }
+}
+
+#[test]
+fn test_from_iter() {
+ let v = vec![1, 2, 3, 4, 5, 6, 7];
+ let deq: VecDeque<_> = v.iter().cloned().collect();
+ let u: Vec<_> = deq.iter().cloned().collect();
+ assert_eq!(u, v);
+
+ let seq = (0..).step_by(2).take(256);
+ let deq: VecDeque<_> = seq.collect();
+ for (i, &x) in deq.iter().enumerate() {
+ assert_eq!(2 * i, x);
+ }
+ assert_eq!(deq.len(), 256);
+}
+
+#[test]
+fn test_clone() {
+ let mut d = VecDeque::new();
+ d.push_front(17);
+ d.push_front(42);
+ d.push_back(137);
+ d.push_back(137);
+ assert_eq!(d.len(), 4);
+ let mut e = d.clone();
+ assert_eq!(e.len(), 4);
+ while !d.is_empty() {
+ assert_eq!(d.pop_back(), e.pop_back());
+ }
+ assert_eq!(d.len(), 0);
+ assert_eq!(e.len(), 0);
+}
+
+#[test]
+fn test_eq() {
+ let mut d = VecDeque::new();
+ assert!(d == VecDeque::with_capacity(0));
+ d.push_front(137);
+ d.push_front(17);
+ d.push_front(42);
+ d.push_back(137);
+ let mut e = VecDeque::with_capacity(0);
+ e.push_back(42);
+ e.push_back(17);
+ e.push_back(137);
+ e.push_back(137);
+ assert!(&e == &d);
+ e.pop_back();
+ e.push_back(0);
+ assert!(e != d);
+ e.clear();
+ assert!(e == VecDeque::new());
+}
+
+#[test]
+fn test_partial_eq_array() {
+ let d = VecDeque::<char>::new();
+ assert!(d == []);
+
+ let mut d = VecDeque::new();
+ d.push_front('a');
+ assert!(d == ['a']);
+
+ let mut d = VecDeque::new();
+ d.push_back('a');
+ assert!(d == ['a']);
+
+ let mut d = VecDeque::new();
+ d.push_back('a');
+ d.push_back('b');
+ assert!(d == ['a', 'b']);
+}
+
+#[test]
+fn test_hash() {
+ let mut x = VecDeque::new();
+ let mut y = VecDeque::new();
+
+ x.push_back(1);
+ x.push_back(2);
+ x.push_back(3);
+
+ y.push_back(0);
+ y.push_back(1);
+ y.pop_front();
+ y.push_back(2);
+ y.push_back(3);
+
+ assert!(hash(&x) == hash(&y));
+}
+
+#[test]
+fn test_hash_after_rotation() {
+ // test that two deques hash equal even if elements are laid out differently
+ let len = 28;
+ let mut ring: VecDeque<i32> = (0..len as i32).collect();
+ let orig = ring.clone();
+ for _ in 0..ring.capacity() {
+ // shift values 1 step to the right by pop, sub one, push
+ ring.pop_front();
+ for elt in &mut ring {
+ *elt -= 1;
+ }
+ ring.push_back(len - 1);
+ assert_eq!(hash(&orig), hash(&ring));
+ assert_eq!(orig, ring);
+ assert_eq!(ring, orig);
+ }
+}
+
+#[test]
+fn test_eq_after_rotation() {
+ // test that two deques are equal even if elements are laid out differently
+ let len = 28;
+ let mut ring: VecDeque<i32> = (0..len as i32).collect();
+ let mut shifted = ring.clone();
+ for _ in 0..10 {
+ // shift values 1 step to the right by pop, sub one, push
+ ring.pop_front();
+ for elt in &mut ring {
+ *elt -= 1;
+ }
+ ring.push_back(len - 1);
+ }
+
+ // try every shift
+ for _ in 0..shifted.capacity() {
+ shifted.pop_front();
+ for elt in &mut shifted {
+ *elt -= 1;
+ }
+ shifted.push_back(len - 1);
+ assert_eq!(shifted, ring);
+ assert_eq!(ring, shifted);
+ }
+}
+
+#[test]
+fn test_ord() {
+ let x = VecDeque::new();
+ let mut y = VecDeque::new();
+ y.push_back(1);
+ y.push_back(2);
+ y.push_back(3);
+ assert!(x < y);
+ assert!(y > x);
+ assert!(x <= x);
+ assert!(x >= x);
+}
+
+#[test]
+fn test_show() {
+ let ringbuf: VecDeque<_> = (0..10).collect();
+ assert_eq!(format!("{:?}", ringbuf), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]");
+
+ let ringbuf: VecDeque<_> = vec!["just", "one", "test", "more"].iter().cloned().collect();
+ assert_eq!(format!("{:?}", ringbuf), "[\"just\", \"one\", \"test\", \"more\"]");
+}
+
+#[test]
+fn test_drop() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = VecDeque::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ drop(ring);
+
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_with_pop() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = VecDeque::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+
+ drop(ring.pop_back());
+ drop(ring.pop_front());
+ assert_eq!(unsafe { DROPS }, 2);
+
+ drop(ring);
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_clear() {
+ static mut DROPS: i32 = 0;
+ struct Elem;
+ impl Drop for Elem {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+ }
+ }
+
+ let mut ring = VecDeque::new();
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.push_back(Elem);
+ ring.push_front(Elem);
+ ring.clear();
+ assert_eq!(unsafe { DROPS }, 4);
+
+ drop(ring);
+ assert_eq!(unsafe { DROPS }, 4);
+}
+
+#[test]
+fn test_drop_panic() {
+ static mut DROPS: i32 = 0;
+
+ struct D(bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.0 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut q = VecDeque::new();
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_front(D(false));
+ q.push_front(D(false));
+ q.push_front(D(true));
+
+ catch_unwind(move || drop(q)).ok();
+
+ assert_eq!(unsafe { DROPS }, 8);
+}
+
+#[test]
+fn test_reserve_grow() {
+ // test growth path A
+ // [T o o H] -> [T o o H . . . . ]
+ let mut ring = VecDeque::with_capacity(4);
+ for i in 0..3 {
+ ring.push_back(i);
+ }
+ ring.reserve(7);
+ for i in 0..3 {
+ assert_eq!(ring.pop_front(), Some(i));
+ }
+
+ // test growth path B
+ // [H T o o] -> [. T o o H . . . ]
+ let mut ring = VecDeque::with_capacity(4);
+ for i in 0..1 {
+ ring.push_back(i);
+ assert_eq!(ring.pop_front(), Some(i));
+ }
+ for i in 0..3 {
+ ring.push_back(i);
+ }
+ ring.reserve(7);
+ for i in 0..3 {
+ assert_eq!(ring.pop_front(), Some(i));
+ }
+
+ // test growth path C
+ // [o o H T] -> [o o H . . . . T ]
+ let mut ring = VecDeque::with_capacity(4);
+ for i in 0..3 {
+ ring.push_back(i);
+ assert_eq!(ring.pop_front(), Some(i));
+ }
+ for i in 0..3 {
+ ring.push_back(i);
+ }
+ ring.reserve(7);
+ for i in 0..3 {
+ assert_eq!(ring.pop_front(), Some(i));
+ }
+}
+
+#[test]
+fn test_get() {
+ let mut ring = VecDeque::new();
+ ring.push_back(0);
+ assert_eq!(ring.get(0), Some(&0));
+ assert_eq!(ring.get(1), None);
+
+ ring.push_back(1);
+ assert_eq!(ring.get(0), Some(&0));
+ assert_eq!(ring.get(1), Some(&1));
+ assert_eq!(ring.get(2), None);
+
+ ring.push_back(2);
+ assert_eq!(ring.get(0), Some(&0));
+ assert_eq!(ring.get(1), Some(&1));
+ assert_eq!(ring.get(2), Some(&2));
+ assert_eq!(ring.get(3), None);
+
+ assert_eq!(ring.pop_front(), Some(0));
+ assert_eq!(ring.get(0), Some(&1));
+ assert_eq!(ring.get(1), Some(&2));
+ assert_eq!(ring.get(2), None);
+
+ assert_eq!(ring.pop_front(), Some(1));
+ assert_eq!(ring.get(0), Some(&2));
+ assert_eq!(ring.get(1), None);
+
+ assert_eq!(ring.pop_front(), Some(2));
+ assert_eq!(ring.get(0), None);
+ assert_eq!(ring.get(1), None);
+}
+
+#[test]
+fn test_get_mut() {
+ let mut ring = VecDeque::new();
+ for i in 0..3 {
+ ring.push_back(i);
+ }
+
+ match ring.get_mut(1) {
+ Some(x) => *x = -1,
+ None => (),
+ };
+
+ assert_eq!(ring.get_mut(0), Some(&mut 0));
+ assert_eq!(ring.get_mut(1), Some(&mut -1));
+ assert_eq!(ring.get_mut(2), Some(&mut 2));
+ assert_eq!(ring.get_mut(3), None);
+
+ assert_eq!(ring.pop_front(), Some(0));
+ assert_eq!(ring.get_mut(0), Some(&mut -1));
+ assert_eq!(ring.get_mut(1), Some(&mut 2));
+ assert_eq!(ring.get_mut(2), None);
+}
+
+#[test]
+fn test_front() {
+ let mut ring = VecDeque::new();
+ ring.push_back(10);
+ ring.push_back(20);
+ assert_eq!(ring.front(), Some(&10));
+ ring.pop_front();
+ assert_eq!(ring.front(), Some(&20));
+ ring.pop_front();
+ assert_eq!(ring.front(), None);
+}
+
+#[test]
+fn test_as_slices() {
+ let mut ring: VecDeque<i32> = VecDeque::with_capacity(127);
+ let cap = ring.capacity() as i32;
+ let first = cap / 2;
+ let last = cap - first;
+ for i in 0..first {
+ ring.push_back(i);
+
+ let (left, right) = ring.as_slices();
+ let expected: Vec<_> = (0..=i).collect();
+ assert_eq!(left, &expected[..]);
+ assert_eq!(right, []);
+ }
+
+ for j in -last..0 {
+ ring.push_front(j);
+ let (left, right) = ring.as_slices();
+ let expected_left: Vec<_> = (-last..=j).rev().collect();
+ let expected_right: Vec<_> = (0..first).collect();
+ assert_eq!(left, &expected_left[..]);
+ assert_eq!(right, &expected_right[..]);
+ }
+
+ assert_eq!(ring.len() as i32, cap);
+ assert_eq!(ring.capacity() as i32, cap);
+}
+
+#[test]
+fn test_as_mut_slices() {
+ let mut ring: VecDeque<i32> = VecDeque::with_capacity(127);
+ let cap = ring.capacity() as i32;
+ let first = cap / 2;
+ let last = cap - first;
+ for i in 0..first {
+ ring.push_back(i);
+
+ let (left, right) = ring.as_mut_slices();
+ let expected: Vec<_> = (0..=i).collect();
+ assert_eq!(left, &expected[..]);
+ assert_eq!(right, []);
+ }
+
+ for j in -last..0 {
+ ring.push_front(j);
+ let (left, right) = ring.as_mut_slices();
+ let expected_left: Vec<_> = (-last..=j).rev().collect();
+ let expected_right: Vec<_> = (0..first).collect();
+ assert_eq!(left, &expected_left[..]);
+ assert_eq!(right, &expected_right[..]);
+ }
+
+ assert_eq!(ring.len() as i32, cap);
+ assert_eq!(ring.capacity() as i32, cap);
+}
+
+#[test]
+fn test_append() {
+ let mut a: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
+ let mut b: VecDeque<_> = vec![4, 5, 6].into_iter().collect();
+
+ // normal append
+ a.append(&mut b);
+ assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
+ assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
+
+ // append nothing to something
+ a.append(&mut b);
+ assert_eq!(a.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
+ assert_eq!(b.iter().cloned().collect::<Vec<_>>(), []);
+
+ // append something to nothing
+ b.append(&mut a);
+ assert_eq!(b.iter().cloned().collect::<Vec<_>>(), [1, 2, 3, 4, 5, 6]);
+ assert_eq!(a.iter().cloned().collect::<Vec<_>>(), []);
+}
+
+#[test]
+fn test_append_permutations() {
+ fn construct_vec_deque(
+ push_back: usize,
+ pop_back: usize,
+ push_front: usize,
+ pop_front: usize,
+ ) -> VecDeque<usize> {
+ let mut out = VecDeque::new();
+ for a in 0..push_back {
+ out.push_back(a);
+ }
+ for b in 0..push_front {
+ out.push_front(push_back + b);
+ }
+ for _ in 0..pop_back {
+ out.pop_back();
+ }
+ for _ in 0..pop_front {
+ out.pop_front();
+ }
+ out
+ }
+
+ // Miri is too slow
+ let max = if cfg!(miri) { 3 } else { 5 };
+
+ // Many different permutations of both the `VecDeque` getting appended to
+ // and the one getting appended are generated to check `append`.
+ // This ensures all 6 code paths of `append` are tested.
+ for src_push_back in 0..max {
+ for src_push_front in 0..max {
+ // doesn't pop more values than are pushed
+ for src_pop_back in 0..(src_push_back + src_push_front) {
+ for src_pop_front in 0..(src_push_back + src_push_front - src_pop_back) {
+ let src = construct_vec_deque(
+ src_push_back,
+ src_pop_back,
+ src_push_front,
+ src_pop_front,
+ );
+
+ for dst_push_back in 0..max {
+ for dst_push_front in 0..max {
+ for dst_pop_back in 0..(dst_push_back + dst_push_front) {
+ for dst_pop_front in
+ 0..(dst_push_back + dst_push_front - dst_pop_back)
+ {
+ let mut dst = construct_vec_deque(
+ dst_push_back,
+ dst_pop_back,
+ dst_push_front,
+ dst_pop_front,
+ );
+ let mut src = src.clone();
+
+ // Assert that appending `src` to `dst` gives the same order
+ // of values as iterating over both in sequence.
+ let correct = dst
+ .iter()
+ .chain(src.iter())
+ .cloned()
+ .collect::<Vec<usize>>();
+ dst.append(&mut src);
+ assert_eq!(dst, correct);
+ assert!(src.is_empty());
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+struct DropCounter<'a> {
+ count: &'a mut u32,
+}
+
+impl Drop for DropCounter<'_> {
+ fn drop(&mut self) {
+ *self.count += 1;
+ }
+}
+
+#[test]
+fn test_append_double_drop() {
+ let (mut count_a, mut count_b) = (0, 0);
+ {
+ let mut a = VecDeque::new();
+ let mut b = VecDeque::new();
+ a.push_back(DropCounter { count: &mut count_a });
+ b.push_back(DropCounter { count: &mut count_b });
+
+ a.append(&mut b);
+ }
+ assert_eq!(count_a, 1);
+ assert_eq!(count_b, 1);
+}
+
+#[test]
+fn test_retain() {
+ let mut buf = VecDeque::new();
+ buf.extend(1..5);
+ buf.retain(|&x| x % 2 == 0);
+ let v: Vec<_> = buf.into_iter().collect();
+ assert_eq!(&v[..], &[2, 4]);
+}
+
+#[test]
+fn test_extend_ref() {
+ let mut v = VecDeque::new();
+ v.push_back(1);
+ v.extend(&[2, 3, 4]);
+
+ assert_eq!(v.len(), 4);
+ assert_eq!(v[0], 1);
+ assert_eq!(v[1], 2);
+ assert_eq!(v[2], 3);
+ assert_eq!(v[3], 4);
+
+ let mut w = VecDeque::new();
+ w.push_back(5);
+ w.push_back(6);
+ v.extend(&w);
+
+ assert_eq!(v.len(), 6);
+ assert_eq!(v[0], 1);
+ assert_eq!(v[1], 2);
+ assert_eq!(v[2], 3);
+ assert_eq!(v[3], 4);
+ assert_eq!(v[4], 5);
+ assert_eq!(v[5], 6);
+}
+
+#[test]
+fn test_contains() {
+ let mut v = VecDeque::new();
+ v.extend(&[2, 3, 4]);
+
+ assert!(v.contains(&3));
+ assert!(!v.contains(&1));
+
+ v.clear();
+
+ assert!(!v.contains(&3));
+}
+
+#[allow(dead_code)]
+fn assert_covariance() {
+ fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> {
+ d
+ }
+}
+
+#[test]
+fn test_is_empty() {
+ let mut v = VecDeque::<i32>::new();
+ assert!(v.is_empty());
+ assert!(v.iter().is_empty());
+ assert!(v.iter_mut().is_empty());
+ v.extend(&[2, 3, 4]);
+ assert!(!v.is_empty());
+ assert!(!v.iter().is_empty());
+ assert!(!v.iter_mut().is_empty());
+ while let Some(_) = v.pop_front() {
+ assert_eq!(v.is_empty(), v.len() == 0);
+ assert_eq!(v.iter().is_empty(), v.iter().len() == 0);
+ assert_eq!(v.iter_mut().is_empty(), v.iter_mut().len() == 0);
+ }
+ assert!(v.is_empty());
+ assert!(v.iter().is_empty());
+ assert!(v.iter_mut().is_empty());
+ assert!(v.into_iter().is_empty());
+}
+
+#[test]
+fn test_reserve_exact_2() {
+ // This is all the same as test_reserve
+
+ let mut v = VecDeque::new();
+
+ v.reserve_exact(2);
+ assert!(v.capacity() >= 2);
+
+ for i in 0..16 {
+ v.push_back(i);
+ }
+
+ assert!(v.capacity() >= 16);
+ v.reserve_exact(16);
+ assert!(v.capacity() >= 32);
+
+ v.push_back(16);
+
+ v.reserve_exact(16);
+ assert!(v.capacity() >= 48)
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve() {
+ // These are the interesting cases:
+ // * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
+ // * > isize::MAX should always fail
+ // * On 16/32-bit should CapacityOverflow
+ // * On 64-bit should OOM
+ // * overflow may trigger when adding `len` to `cap` (in number of elements)
+ // * overflow may trigger when multiplying `new_cap` by size_of::<T> (to get bytes)
+
+ const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1;
+ const MAX_USIZE: usize = usize::MAX;
+
+ // On 16/32-bit, we check that allocations don't exceed isize::MAX,
+ // on 64-bit, we assume the OS will give an OOM for such a ridiculous size.
+ // Any platform that succeeds for these requests is technically broken with
+ // ptr::offset because LLVM is the worst.
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ // Note: basic stuff is checked by test_reserve
+ let mut empty_bytes: VecDeque<u8> = VecDeque::new();
+
+ // Check isize::MAX doesn't count as an overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ // Play it again, frank! (just to be sure)
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ // Check isize::MAX + 1 does count as overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ // Check usize::MAX does count as overflow
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ // Check isize::MAX is an OOM
+ // VecDeque starts with capacity 7, always adds 1 to the capacity
+ // and also rounds the number to next power of 2 so this is the
+ // furthest we can go without triggering CapacityOverflow
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve(MAX_CAP) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ // Same basic idea, but with non-zero len
+ let mut ten_bytes: VecDeque<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ // Should always overflow in the add-to-len
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+
+ {
+ // Same basic idea, but with interesting type size
+ let mut ten_u32s: VecDeque<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_u32s.try_reserve(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ // Should fail in the mul-by-size
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve(MAX_USIZE - 20) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!");
+ }
+ }
+}
+
+#[test]
+#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
+#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
+fn test_try_reserve_exact() {
+ // This is exactly the same as test_try_reserve with the method changed.
+ // See that test for comments.
+
+ const MAX_CAP: usize = (isize::MAX as usize + 1) / 2 - 1;
+ const MAX_USIZE: usize = usize::MAX;
+
+ let guards_against_isize = size_of::<usize>() < 8;
+
+ {
+ let mut empty_bytes: VecDeque<u8> = VecDeque::new();
+
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_CAP + 1) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!")
+ }
+
+ if let Err(CapacityOverflow) = empty_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ } else {
+ // Check isize::MAX is an OOM
+ // VecDeque starts with capacity 7, always adds 1 to the capacity
+ // and also rounds the number to next power of 2 so this is the
+ // furthest we can go without triggering CapacityOverflow
+ if let Err(AllocError { .. }) = empty_bytes.try_reserve_exact(MAX_CAP) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ }
+
+ {
+ let mut ten_bytes: VecDeque<u8> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_bytes.try_reserve_exact(MAX_CAP - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ if let Err(CapacityOverflow) = ten_bytes.try_reserve_exact(MAX_USIZE) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+
+ {
+ let mut ten_u32s: VecDeque<u32> = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10].into_iter().collect();
+
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 10) {
+ panic!("isize::MAX shouldn't trigger an overflow!");
+ }
+ if guards_against_isize {
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an overflow!");
+ }
+ } else {
+ if let Err(AllocError { .. }) = ten_u32s.try_reserve_exact(MAX_CAP / 4 - 9) {
+ } else {
+ panic!("isize::MAX + 1 should trigger an OOM!")
+ }
+ }
+ if let Err(CapacityOverflow) = ten_u32s.try_reserve_exact(MAX_USIZE - 20) {
+ } else {
+ panic!("usize::MAX should trigger an overflow!")
+ }
+ }
+}
+
+#[test]
+fn test_rotate_nop() {
+ let mut v: VecDeque<_> = (0..10).collect();
+ assert_unchanged(&v);
+
+ v.rotate_left(0);
+ assert_unchanged(&v);
+
+ v.rotate_left(10);
+ assert_unchanged(&v);
+
+ v.rotate_right(0);
+ assert_unchanged(&v);
+
+ v.rotate_right(10);
+ assert_unchanged(&v);
+
+ v.rotate_left(3);
+ v.rotate_right(3);
+ assert_unchanged(&v);
+
+ v.rotate_right(3);
+ v.rotate_left(3);
+ assert_unchanged(&v);
+
+ v.rotate_left(6);
+ v.rotate_right(6);
+ assert_unchanged(&v);
+
+ v.rotate_right(6);
+ v.rotate_left(6);
+ assert_unchanged(&v);
+
+ v.rotate_left(3);
+ v.rotate_left(7);
+ assert_unchanged(&v);
+
+ v.rotate_right(4);
+ v.rotate_right(6);
+ assert_unchanged(&v);
+
+ v.rotate_left(1);
+ v.rotate_left(2);
+ v.rotate_left(3);
+ v.rotate_left(4);
+ assert_unchanged(&v);
+
+ v.rotate_right(1);
+ v.rotate_right(2);
+ v.rotate_right(3);
+ v.rotate_right(4);
+ assert_unchanged(&v);
+
+ fn assert_unchanged(v: &VecDeque<i32>) {
+ assert_eq!(v, &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
+ }
+}
+
+#[test]
+fn test_rotate_left_parts() {
+ let mut v: VecDeque<_> = (1..=7).collect();
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[3, 4, 5, 6, 7, 1][..], &[2][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[5, 6, 7, 1][..], &[2, 3, 4][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[7, 1][..], &[2, 3, 4, 5, 6][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7, 1][..], &[][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[4, 5, 6, 7, 1, 2][..], &[3][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[6, 7, 1, 2][..], &[3, 4, 5][..]));
+ v.rotate_left(2);
+ assert_eq!(v.as_slices(), (&[1, 2][..], &[3, 4, 5, 6, 7][..]));
+}
+
+#[test]
+fn test_rotate_right_parts() {
+ let mut v: VecDeque<_> = (1..=7).collect();
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[6, 7][..], &[1, 2, 3, 4, 5][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[4, 5, 6, 7][..], &[1, 2, 3][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[2, 3, 4, 5, 6, 7][..], &[1][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[7, 1, 2, 3, 4, 5, 6][..], &[][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[5, 6][..], &[7, 1, 2, 3, 4][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[3, 4, 5, 6][..], &[7, 1, 2][..]));
+ v.rotate_right(2);
+ assert_eq!(v.as_slices(), (&[1, 2, 3, 4, 5, 6][..], &[7][..]));
+}
+
+#[test]
+fn test_rotate_left_random() {
+ let shifts = [
+ 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3,
+ 12, 9, 11, 1, 7, 9, 7, 2,
+ ];
+ let n = 12;
+ let mut v: VecDeque<_> = (0..n).collect();
+ let mut total_shift = 0;
+ for shift in shifts.iter().cloned() {
+ v.rotate_left(shift);
+ total_shift += shift;
+ for i in 0..n {
+ assert_eq!(v[i], (i + total_shift) % n);
+ }
+ }
+}
+
+#[test]
+fn test_rotate_right_random() {
+ let shifts = [
+ 6, 1, 0, 11, 12, 1, 11, 7, 9, 3, 6, 1, 4, 0, 5, 1, 3, 1, 12, 8, 3, 1, 11, 11, 9, 4, 12, 3,
+ 12, 9, 11, 1, 7, 9, 7, 2,
+ ];
+ let n = 12;
+ let mut v: VecDeque<_> = (0..n).collect();
+ let mut total_shift = 0;
+ for shift in shifts.iter().cloned() {
+ v.rotate_right(shift);
+ total_shift += shift;
+ for i in 0..n {
+ assert_eq!(v[(i + total_shift) % n], i);
+ }
+ }
+}
+
+#[test]
+fn test_try_fold_empty() {
+ assert_eq!(Some(0), VecDeque::<u32>::new().iter().try_fold(0, |_, _| None));
+}
+
+#[test]
+fn test_try_fold_none() {
+ let v: VecDeque<u32> = (0..12).collect();
+ assert_eq!(None, v.into_iter().try_fold(0, |a, b| if b < 11 { Some(a + b) } else { None }));
+}
+
+#[test]
+fn test_try_fold_ok() {
+ let v: VecDeque<u32> = (0..12).collect();
+ assert_eq!(Ok::<_, ()>(66), v.into_iter().try_fold(0, |a, b| Ok(a + b)));
+}
+
+#[test]
+fn test_try_fold_unit() {
+ let v: VecDeque<()> = std::iter::repeat(()).take(42).collect();
+ assert_eq!(Some(()), v.into_iter().try_fold((), |(), ()| Some(())));
+}
+
+#[test]
+fn test_try_fold_unit_none() {
+ let v: std::collections::VecDeque<()> = [(); 10].iter().cloned().collect();
+ let mut iter = v.into_iter();
+ assert!(iter.try_fold((), |_, _| None).is_none());
+ assert_eq!(iter.len(), 9);
+}
+
+#[test]
+fn test_try_fold_rotated() {
+ let mut v: VecDeque<_> = (0..12).collect();
+ for n in 0..10 {
+ if n & 1 == 0 {
+ v.rotate_left(n);
+ } else {
+ v.rotate_right(n);
+ }
+ assert_eq!(Ok::<_, ()>(66), v.iter().try_fold(0, |a, b| Ok(a + b)));
+ }
+}
+
+#[test]
+fn test_try_fold_moves_iter() {
+ let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect();
+ let mut iter = v.into_iter();
+ assert_eq!(iter.try_fold(0_i8, |acc, &x| acc.checked_add(x)), None);
+ assert_eq!(iter.next(), Some(&60));
+}
+
+#[test]
+fn test_try_fold_exhaust_wrap() {
+ let mut v = VecDeque::with_capacity(7);
+ v.push_back(1);
+ v.push_back(1);
+ v.push_back(1);
+ v.pop_front();
+ v.pop_front();
+ let mut iter = v.iter();
+ let _ = iter.try_fold(0, |_, _| Some(1));
+ assert!(iter.is_empty());
+}
+
+#[test]
+fn test_try_fold_wraparound() {
+ let mut v = VecDeque::with_capacity(8);
+ v.push_back(7);
+ v.push_back(8);
+ v.push_back(9);
+ v.push_front(2);
+ v.push_front(1);
+ let mut iter = v.iter();
+ let _ = iter.find(|&&x| x == 2);
+ assert_eq!(Some(&7), iter.next());
+}
+
+#[test]
+fn test_try_rfold_rotated() {
+ let mut v: VecDeque<_> = (0..12).collect();
+ for n in 0..10 {
+ if n & 1 == 0 {
+ v.rotate_left(n);
+ } else {
+ v.rotate_right(n);
+ }
+ assert_eq!(Ok::<_, ()>(66), v.iter().try_rfold(0, |a, b| Ok(a + b)));
+ }
+}
+
+#[test]
+fn test_try_rfold_moves_iter() {
+ let v: VecDeque<_> = [10, 20, 30, 40, 100, 60, 70, 80, 90].iter().collect();
+ let mut iter = v.into_iter();
+ assert_eq!(iter.try_rfold(0_i8, |acc, &x| acc.checked_add(x)), None);
+ assert_eq!(iter.next_back(), Some(&70));
+}
+
+#[test]
+fn truncate_leak() {
+ static mut DROPS: i32 = 0;
+
+ struct D(bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.0 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut q = VecDeque::new();
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_back(D(false));
+ q.push_front(D(true));
+ q.push_front(D(false));
+ q.push_front(D(false));
+
+ catch_unwind(AssertUnwindSafe(|| q.truncate(1))).ok();
+
+ assert_eq!(unsafe { DROPS }, 7);
+}
+
+#[test]
+fn test_drain_leak() {
+ static mut DROPS: i32 = 0;
+
+ #[derive(Debug, PartialEq)]
+ struct D(u32, bool);
+
+ impl Drop for D {
+ fn drop(&mut self) {
+ unsafe {
+ DROPS += 1;
+ }
+
+ if self.1 {
+ panic!("panic in `drop`");
+ }
+ }
+ }
+
+ let mut v = VecDeque::new();
+ v.push_back(D(4, false));
+ v.push_back(D(5, false));
+ v.push_back(D(6, false));
+ v.push_front(D(3, false));
+ v.push_front(D(2, true));
+ v.push_front(D(1, false));
+ v.push_front(D(0, false));
+
+ catch_unwind(AssertUnwindSafe(|| {
+ v.drain(1..=4);
+ }))
+ .ok();
+
+ assert_eq!(unsafe { DROPS }, 4);
+ assert_eq!(v.len(), 3);
+ drop(v);
+ assert_eq!(unsafe { DROPS }, 7);
+}