1 #![warn(rust_2018_idioms)]
2 #![cfg(feature = "full")]
3
4 use std::pin::Pin;
5 use std::task::{Context, Poll};
6 use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf};
7 use tokio_test::assert_ok;
8 use tokio_test::io::Builder;
9
10 #[tokio::test]
read_to_end()11 async fn read_to_end() {
12 let mut buf = vec![];
13 let mut rd: &[u8] = b"hello world";
14
15 let n = assert_ok!(rd.read_to_end(&mut buf).await);
16 assert_eq!(n, 11);
17 assert_eq!(buf[..], b"hello world"[..]);
18 }
19
20 #[derive(Copy, Clone, Debug)]
21 enum State {
22 Initializing,
23 JustFilling,
24 Done,
25 }
26
27 struct UninitTest {
28 num_init: usize,
29 state: State,
30 }
31
32 impl AsyncRead for UninitTest {
poll_read( self: Pin<&mut Self>, _cx: &mut Context<'_>, buf: &mut ReadBuf<'_>, ) -> Poll<std::io::Result<()>>33 fn poll_read(
34 self: Pin<&mut Self>,
35 _cx: &mut Context<'_>,
36 buf: &mut ReadBuf<'_>,
37 ) -> Poll<std::io::Result<()>> {
38 let me = Pin::into_inner(self);
39 let real_num_init = buf.initialized().len() - buf.filled().len();
40 assert_eq!(real_num_init, me.num_init, "{:?}", me.state);
41
42 match me.state {
43 State::Initializing => {
44 buf.initialize_unfilled_to(me.num_init + 2);
45 buf.advance(1);
46 me.num_init += 1;
47
48 if me.num_init == 24 {
49 me.state = State::JustFilling;
50 }
51 }
52 State::JustFilling => {
53 buf.advance(1);
54 me.num_init -= 1;
55
56 if me.num_init == 15 {
57 // The buffer is resized on next call.
58 me.num_init = 0;
59 me.state = State::Done;
60 }
61 }
62 State::Done => { /* .. do nothing .. */ }
63 }
64
65 Poll::Ready(Ok(()))
66 }
67 }
68
69 #[tokio::test]
read_to_end_uninit()70 async fn read_to_end_uninit() {
71 let mut buf = Vec::with_capacity(64);
72 let mut test = UninitTest {
73 num_init: 0,
74 state: State::Initializing,
75 };
76
77 test.read_to_end(&mut buf).await.unwrap();
78 assert_eq!(buf.len(), 33);
79 }
80
81 #[tokio::test]
82 #[cfg_attr(miri, ignore)] // too slow with miri
read_to_end_doesnt_grow_with_capacity()83 async fn read_to_end_doesnt_grow_with_capacity() {
84 let arr: Vec<u8> = (0..100).collect();
85
86 // We only test from 32 since we allocate at least 32 bytes each time
87 for len in 32..100 {
88 let bytes = &arr[..len];
89 for split in 0..len {
90 for cap in 0..101 {
91 let mut mock = if split == 0 {
92 Builder::new().read(bytes).build()
93 } else {
94 Builder::new()
95 .read(&bytes[..split])
96 .read(&bytes[split..])
97 .build()
98 };
99 let mut buf = Vec::with_capacity(cap);
100 AsyncReadExt::read_to_end(&mut mock, &mut buf)
101 .await
102 .unwrap();
103 // It has the right data.
104 assert_eq!(buf.as_slice(), bytes);
105 // Unless cap was smaller than length, then we did not reallocate.
106 if cap >= len {
107 assert_eq!(buf.capacity(), cap);
108 }
109 }
110 }
111 }
112 }
113
114 #[tokio::test]
read_to_end_grows_capacity_if_unfit()115 async fn read_to_end_grows_capacity_if_unfit() {
116 let bytes = b"the_vector_startingcap_will_be_smaller";
117 let mut mock = Builder::new().read(bytes).build();
118 let initial_capacity = bytes.len() - 4;
119 let mut buf = Vec::with_capacity(initial_capacity);
120 AsyncReadExt::read_to_end(&mut mock, &mut buf)
121 .await
122 .unwrap();
123 // *4 since it doubles when it doesn't fit and again when reaching EOF
124 assert_eq!(buf.capacity(), initial_capacity * 4);
125 }
126