1
0
Fork 0
mirror of https://github.com/zhaofengli/attic.git synced 2025-03-16 13:28:17 +00:00

Use tokio::test instead of tokio-test

This commit is contained in:
Zhaofeng Li 2024-08-19 14:49:56 -04:00
parent a41e2d1724
commit 49c565f792
6 changed files with 105 additions and 137 deletions

14
Cargo.lock generated
View file

@ -334,7 +334,6 @@ dependencies = [
"serde_with",
"sha2",
"tokio",
"tokio-test",
"tokio-util",
"toml",
"tower-http",
@ -4748,19 +4747,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-test"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
dependencies = [
"async-stream",
"bytes",
"futures-core",
"tokio",
"tokio-stream",
]
[[package]]
name = "tokio-util"
version = "0.7.11"

View file

@ -35,6 +35,7 @@ optional = true
features = [
"fs",
"io-util",
"macros",
"process",
"sync",
]
@ -43,7 +44,6 @@ features = [
criterion = { version = "0.5", features = ["html_reports", "async_tokio"] }
fastcdc = { version = "*", features = ["tokio"] }
serde_json = "1.0.96"
tokio-test = "0.4.2"
[build-dependencies]
cc = "1.1.13"

View file

@ -72,15 +72,13 @@ mod tests {
use std::io::Cursor;
use futures::StreamExt;
use tokio_test::block_on;
use crate::testing::get_fake_data;
/// Chunks and reconstructs a file.
#[test]
fn test_chunking_basic() {
fn case(size: usize) {
block_on(async move {
#[tokio::test]
async fn test_chunking_basic() {
async fn case(size: usize) {
let test_file = get_fake_data(size); // 32 MiB
let mut reconstructed_file = Vec::new();
@ -94,11 +92,10 @@ mod tests {
}
assert_eq!(reconstructed_file, test_file);
});
}
case(32 * 1024 * 1024 - 1);
case(32 * 1024 * 1024);
case(32 * 1024 * 1024 + 1);
case(32 * 1024 * 1024 - 1).await;
case(32 * 1024 * 1024).await;
case(32 * 1024 * 1024 + 1).await;
}
}

View file

@ -6,7 +6,6 @@ use std::os::unix::ffi::OsStrExt;
use std::process::Command;
use serde::de::DeserializeOwned;
use tokio_test::block_on;
pub mod test_nar;
@ -143,11 +142,10 @@ fn test_store_path_hash() {
StorePathHash::new(h).unwrap_err();
}
#[test]
fn test_nar_streaming() {
#[tokio::test]
async fn test_nar_streaming() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
let test_nar = test_nar::NO_DEPS;
test_nar.import().await.expect("Could not import test NAR");
@ -163,16 +161,14 @@ fn test_nar_streaming() {
.validate()
.await
.expect("Could not validate resulting dump");
});
}
#[test]
fn test_compute_fs_closure() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
#[tokio::test]
async fn test_compute_fs_closure() {
use test_nar::{WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
for nar in [WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
@ -189,16 +185,14 @@ fn test_compute_fs_closure() {
assert_eq!(nar.closure(), actual);
}
});
}
#[test]
fn test_compute_fs_closure_multi() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
#[tokio::test]
async fn test_compute_fs_closure_multi() {
use test_nar::{NO_DEPS, WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
for nar in [NO_DEPS, WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
nar.import().await.expect("Could not import test NAR");
}
@ -221,16 +215,14 @@ fn test_compute_fs_closure_multi() {
eprintln!("Closure: {:#?}", actual);
assert_eq!(expected, actual);
});
}
#[test]
fn test_query_path_info() {
let store = NixStore::connect().expect("Failed to connect to the Nix store");
block_on(async move {
#[tokio::test]
async fn test_query_path_info() {
use test_nar::{WITH_DEPS_B, WITH_DEPS_C};
let store = NixStore::connect().expect("Failed to connect to the Nix store");
for nar in [WITH_DEPS_C, WITH_DEPS_B] {
nar.import().await.expect("Could not import test NAR");
}
@ -251,5 +243,4 @@ fn test_query_path_info() {
),],
path_info.references
);
});
}

View file

@ -176,10 +176,9 @@ mod tests {
use bytes::{BufMut, BytesMut};
use futures::future;
use tokio::io::AsyncReadExt;
use tokio_test::block_on;
#[test]
fn test_stream_hasher() {
#[tokio::test]
async fn test_stream_hasher() {
let expected = b"hello world";
let expected_sha256 =
hex::decode("b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9")
@ -191,10 +190,10 @@ mod tests {
// force multiple reads
let mut buf = vec![0u8; 100];
let mut bytes_read = 0;
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
assert_eq!(expected.len(), bytes_read);
assert_eq!(expected, &buf[..bytes_read]);
@ -206,8 +205,8 @@ mod tests {
eprintln!("finalized = {:x?}", finalized);
}
#[test]
fn test_merge_chunks() {
#[tokio::test]
async fn test_merge_chunks() {
let chunk_a: BoxStream<Result<Bytes, ()>> = {
let s = stream! {
yield Ok(Bytes::from_static(b"Hello"));
@ -236,13 +235,11 @@ mod tests {
let streamer = |c, _| future::ok(c);
let mut merged = merge_chunks(chunks, streamer, (), 2);
let bytes = block_on(async move {
let mut bytes = BytesMut::with_capacity(100);
while let Some(item) = merged.next().await {
bytes.put(item.unwrap());
}
bytes.freeze()
});
let bytes = bytes.freeze();
assert_eq!(&*bytes, b"Hello, world!");
}

View file

@ -94,6 +94,3 @@ features = [
"rt-multi-thread",
"sync",
]
[dev-dependencies]
tokio-test = "0.4.2"