mirror of
https://github.com/zhaofengli/attic.git
synced 2025-03-15 13:07:49 +00:00
Use tokio::test instead of tokio-test
This commit is contained in:
parent
a41e2d1724
commit
49c565f792
6 changed files with 105 additions and 137 deletions
14
Cargo.lock
generated
14
Cargo.lock
generated
|
@ -334,7 +334,6 @@ dependencies = [
|
||||||
"serde_with",
|
"serde_with",
|
||||||
"sha2",
|
"sha2",
|
||||||
"tokio",
|
"tokio",
|
||||||
"tokio-test",
|
|
||||||
"tokio-util",
|
"tokio-util",
|
||||||
"toml",
|
"toml",
|
||||||
"tower-http",
|
"tower-http",
|
||||||
|
@ -4748,19 +4747,6 @@ dependencies = [
|
||||||
"tokio",
|
"tokio",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "tokio-test"
|
|
||||||
version = "0.4.4"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7"
|
|
||||||
dependencies = [
|
|
||||||
"async-stream",
|
|
||||||
"bytes",
|
|
||||||
"futures-core",
|
|
||||||
"tokio",
|
|
||||||
"tokio-stream",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio-util"
|
name = "tokio-util"
|
||||||
version = "0.7.11"
|
version = "0.7.11"
|
||||||
|
|
|
@ -35,6 +35,7 @@ optional = true
|
||||||
features = [
|
features = [
|
||||||
"fs",
|
"fs",
|
||||||
"io-util",
|
"io-util",
|
||||||
|
"macros",
|
||||||
"process",
|
"process",
|
||||||
"sync",
|
"sync",
|
||||||
]
|
]
|
||||||
|
@ -43,7 +44,6 @@ features = [
|
||||||
criterion = { version = "0.5", features = ["html_reports", "async_tokio"] }
|
criterion = { version = "0.5", features = ["html_reports", "async_tokio"] }
|
||||||
fastcdc = { version = "*", features = ["tokio"] }
|
fastcdc = { version = "*", features = ["tokio"] }
|
||||||
serde_json = "1.0.96"
|
serde_json = "1.0.96"
|
||||||
tokio-test = "0.4.2"
|
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
cc = "1.1.13"
|
cc = "1.1.13"
|
||||||
|
|
|
@ -72,33 +72,30 @@ mod tests {
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
|
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use tokio_test::block_on;
|
|
||||||
|
|
||||||
use crate::testing::get_fake_data;
|
use crate::testing::get_fake_data;
|
||||||
|
|
||||||
/// Chunks and reconstructs a file.
|
/// Chunks and reconstructs a file.
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_chunking_basic() {
|
async fn test_chunking_basic() {
|
||||||
fn case(size: usize) {
|
async fn case(size: usize) {
|
||||||
block_on(async move {
|
let test_file = get_fake_data(size); // 32 MiB
|
||||||
let test_file = get_fake_data(size); // 32 MiB
|
let mut reconstructed_file = Vec::new();
|
||||||
let mut reconstructed_file = Vec::new();
|
|
||||||
|
|
||||||
let cursor = Cursor::new(&test_file);
|
let cursor = Cursor::new(&test_file);
|
||||||
let mut chunks = chunk_stream(cursor, 8 * 1024, 16 * 1024, 32 * 1024);
|
let mut chunks = chunk_stream(cursor, 8 * 1024, 16 * 1024, 32 * 1024);
|
||||||
|
|
||||||
while let Some(chunk) = chunks.next().await {
|
while let Some(chunk) = chunks.next().await {
|
||||||
let chunk = chunk.unwrap();
|
let chunk = chunk.unwrap();
|
||||||
eprintln!("Got a {}-byte chunk", chunk.len());
|
eprintln!("Got a {}-byte chunk", chunk.len());
|
||||||
reconstructed_file.extend(chunk);
|
reconstructed_file.extend(chunk);
|
||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(reconstructed_file, test_file);
|
assert_eq!(reconstructed_file, test_file);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
case(32 * 1024 * 1024 - 1);
|
case(32 * 1024 * 1024 - 1).await;
|
||||||
case(32 * 1024 * 1024);
|
case(32 * 1024 * 1024).await;
|
||||||
case(32 * 1024 * 1024 + 1);
|
case(32 * 1024 * 1024 + 1).await;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ use std::os::unix::ffi::OsStrExt;
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
|
||||||
use serde::de::DeserializeOwned;
|
use serde::de::DeserializeOwned;
|
||||||
use tokio_test::block_on;
|
|
||||||
|
|
||||||
pub mod test_nar;
|
pub mod test_nar;
|
||||||
|
|
||||||
|
@ -143,113 +142,105 @@ fn test_store_path_hash() {
|
||||||
StorePathHash::new(h).unwrap_err();
|
StorePathHash::new(h).unwrap_err();
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_nar_streaming() {
|
async fn test_nar_streaming() {
|
||||||
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
||||||
|
|
||||||
block_on(async move {
|
let test_nar = test_nar::NO_DEPS;
|
||||||
let test_nar = test_nar::NO_DEPS;
|
test_nar.import().await.expect("Could not import test NAR");
|
||||||
test_nar.import().await.expect("Could not import test NAR");
|
|
||||||
|
|
||||||
let target = test_nar.get_target().expect("Could not create dump target");
|
let target = test_nar.get_target().expect("Could not create dump target");
|
||||||
let writer = target.get_writer().await.expect("Could not get writer");
|
let writer = target.get_writer().await.expect("Could not get writer");
|
||||||
|
|
||||||
let store_path = store.parse_store_path(test_nar.path()).unwrap();
|
let store_path = store.parse_store_path(test_nar.path()).unwrap();
|
||||||
|
|
||||||
let stream = store.nar_from_path(store_path);
|
let stream = store.nar_from_path(store_path);
|
||||||
stream.write_all(writer).await.unwrap();
|
stream.write_all(writer).await.unwrap();
|
||||||
|
|
||||||
target
|
target
|
||||||
.validate()
|
.validate()
|
||||||
.await
|
.await
|
||||||
.expect("Could not validate resulting dump");
|
.expect("Could not validate resulting dump");
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_compute_fs_closure() {
|
async fn test_compute_fs_closure() {
|
||||||
|
use test_nar::{WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
|
||||||
|
|
||||||
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
||||||
|
|
||||||
block_on(async move {
|
for nar in [WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
|
||||||
use test_nar::{WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
|
nar.import().await.expect("Could not import test NAR");
|
||||||
|
|
||||||
for nar in [WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
|
let path = store
|
||||||
nar.import().await.expect("Could not import test NAR");
|
.parse_store_path(nar.path())
|
||||||
|
.expect("Could not parse store path");
|
||||||
let path = store
|
|
||||||
.parse_store_path(nar.path())
|
|
||||||
.expect("Could not parse store path");
|
|
||||||
|
|
||||||
let actual: HashSet<StorePath> = store
|
|
||||||
.compute_fs_closure(path, false, false, false)
|
|
||||||
.await
|
|
||||||
.expect("Could not compute closure")
|
|
||||||
.into_iter()
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
assert_eq!(nar.closure(), actual);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_compute_fs_closure_multi() {
|
|
||||||
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
|
||||||
|
|
||||||
block_on(async move {
|
|
||||||
use test_nar::{NO_DEPS, WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
|
|
||||||
|
|
||||||
for nar in [NO_DEPS, WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
|
|
||||||
nar.import().await.expect("Could not import test NAR");
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut expected = NO_DEPS.closure();
|
|
||||||
expected.extend(WITH_DEPS_A.closure());
|
|
||||||
|
|
||||||
let paths = vec![
|
|
||||||
store.parse_store_path(WITH_DEPS_A.path()).unwrap(),
|
|
||||||
store.parse_store_path(NO_DEPS.path()).unwrap(),
|
|
||||||
];
|
|
||||||
|
|
||||||
let actual: HashSet<StorePath> = store
|
let actual: HashSet<StorePath> = store
|
||||||
.compute_fs_closure_multi(paths, false, false, false)
|
.compute_fs_closure(path, false, false, false)
|
||||||
.await
|
.await
|
||||||
.expect("Could not compute closure")
|
.expect("Could not compute closure")
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
eprintln!("Closure: {:#?}", actual);
|
assert_eq!(nar.closure(), actual);
|
||||||
|
}
|
||||||
assert_eq!(expected, actual);
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_query_path_info() {
|
async fn test_compute_fs_closure_multi() {
|
||||||
|
use test_nar::{NO_DEPS, WITH_DEPS_A, WITH_DEPS_B, WITH_DEPS_C};
|
||||||
|
|
||||||
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
||||||
|
|
||||||
block_on(async move {
|
for nar in [NO_DEPS, WITH_DEPS_C, WITH_DEPS_B, WITH_DEPS_A] {
|
||||||
use test_nar::{WITH_DEPS_B, WITH_DEPS_C};
|
nar.import().await.expect("Could not import test NAR");
|
||||||
|
}
|
||||||
|
|
||||||
for nar in [WITH_DEPS_C, WITH_DEPS_B] {
|
let mut expected = NO_DEPS.closure();
|
||||||
nar.import().await.expect("Could not import test NAR");
|
expected.extend(WITH_DEPS_A.closure());
|
||||||
}
|
|
||||||
|
|
||||||
let nar = WITH_DEPS_B;
|
let paths = vec![
|
||||||
let path = store.parse_store_path(nar.path()).unwrap();
|
store.parse_store_path(WITH_DEPS_A.path()).unwrap(),
|
||||||
let path_info = store
|
store.parse_store_path(NO_DEPS.path()).unwrap(),
|
||||||
.query_path_info(path)
|
];
|
||||||
.await
|
|
||||||
.expect("Could not query path info");
|
|
||||||
|
|
||||||
eprintln!("Path info: {:?}", path_info);
|
let actual: HashSet<StorePath> = store
|
||||||
|
.compute_fs_closure_multi(paths, false, false, false)
|
||||||
|
.await
|
||||||
|
.expect("Could not compute closure")
|
||||||
|
.into_iter()
|
||||||
|
.collect();
|
||||||
|
|
||||||
assert_eq!(nar.nar().len() as u64, path_info.nar_size);
|
eprintln!("Closure: {:#?}", actual);
|
||||||
assert_eq!(
|
|
||||||
vec![PathBuf::from(
|
assert_eq!(expected, actual);
|
||||||
"3k1wymic8p7h5pfcqfhh0jan8ny2a712-attic-test-with-deps-c-final"
|
}
|
||||||
),],
|
|
||||||
path_info.references
|
#[tokio::test]
|
||||||
);
|
async fn test_query_path_info() {
|
||||||
});
|
use test_nar::{WITH_DEPS_B, WITH_DEPS_C};
|
||||||
|
|
||||||
|
let store = NixStore::connect().expect("Failed to connect to the Nix store");
|
||||||
|
|
||||||
|
for nar in [WITH_DEPS_C, WITH_DEPS_B] {
|
||||||
|
nar.import().await.expect("Could not import test NAR");
|
||||||
|
}
|
||||||
|
|
||||||
|
let nar = WITH_DEPS_B;
|
||||||
|
let path = store.parse_store_path(nar.path()).unwrap();
|
||||||
|
let path_info = store
|
||||||
|
.query_path_info(path)
|
||||||
|
.await
|
||||||
|
.expect("Could not query path info");
|
||||||
|
|
||||||
|
eprintln!("Path info: {:?}", path_info);
|
||||||
|
|
||||||
|
assert_eq!(nar.nar().len() as u64, path_info.nar_size);
|
||||||
|
assert_eq!(
|
||||||
|
vec![PathBuf::from(
|
||||||
|
"3k1wymic8p7h5pfcqfhh0jan8ny2a712-attic-test-with-deps-c-final"
|
||||||
|
),],
|
||||||
|
path_info.references
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -176,10 +176,9 @@ mod tests {
|
||||||
use bytes::{BufMut, BytesMut};
|
use bytes::{BufMut, BytesMut};
|
||||||
use futures::future;
|
use futures::future;
|
||||||
use tokio::io::AsyncReadExt;
|
use tokio::io::AsyncReadExt;
|
||||||
use tokio_test::block_on;
|
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_stream_hasher() {
|
async fn test_stream_hasher() {
|
||||||
let expected = b"hello world";
|
let expected = b"hello world";
|
||||||
let expected_sha256 =
|
let expected_sha256 =
|
||||||
hex::decode("b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9")
|
hex::decode("b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9")
|
||||||
|
@ -191,10 +190,10 @@ mod tests {
|
||||||
// force multiple reads
|
// force multiple reads
|
||||||
let mut buf = vec![0u8; 100];
|
let mut buf = vec![0u8; 100];
|
||||||
let mut bytes_read = 0;
|
let mut bytes_read = 0;
|
||||||
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
|
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
|
||||||
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
|
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
|
||||||
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
|
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
|
||||||
bytes_read += block_on(read.read(&mut buf[bytes_read..bytes_read + 5])).unwrap();
|
bytes_read += read.read(&mut buf[bytes_read..bytes_read + 5]).await.unwrap();
|
||||||
|
|
||||||
assert_eq!(expected.len(), bytes_read);
|
assert_eq!(expected.len(), bytes_read);
|
||||||
assert_eq!(expected, &buf[..bytes_read]);
|
assert_eq!(expected, &buf[..bytes_read]);
|
||||||
|
@ -206,8 +205,8 @@ mod tests {
|
||||||
eprintln!("finalized = {:x?}", finalized);
|
eprintln!("finalized = {:x?}", finalized);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[tokio::test]
|
||||||
fn test_merge_chunks() {
|
async fn test_merge_chunks() {
|
||||||
let chunk_a: BoxStream<Result<Bytes, ()>> = {
|
let chunk_a: BoxStream<Result<Bytes, ()>> = {
|
||||||
let s = stream! {
|
let s = stream! {
|
||||||
yield Ok(Bytes::from_static(b"Hello"));
|
yield Ok(Bytes::from_static(b"Hello"));
|
||||||
|
@ -236,13 +235,11 @@ mod tests {
|
||||||
let streamer = |c, _| future::ok(c);
|
let streamer = |c, _| future::ok(c);
|
||||||
let mut merged = merge_chunks(chunks, streamer, (), 2);
|
let mut merged = merge_chunks(chunks, streamer, (), 2);
|
||||||
|
|
||||||
let bytes = block_on(async move {
|
let mut bytes = BytesMut::with_capacity(100);
|
||||||
let mut bytes = BytesMut::with_capacity(100);
|
while let Some(item) = merged.next().await {
|
||||||
while let Some(item) = merged.next().await {
|
bytes.put(item.unwrap());
|
||||||
bytes.put(item.unwrap());
|
}
|
||||||
}
|
let bytes = bytes.freeze();
|
||||||
bytes.freeze()
|
|
||||||
});
|
|
||||||
|
|
||||||
assert_eq!(&*bytes, b"Hello, world!");
|
assert_eq!(&*bytes, b"Hello, world!");
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,3 @@ features = [
|
||||||
"rt-multi-thread",
|
"rt-multi-thread",
|
||||||
"sync",
|
"sync",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dev-dependencies]
|
|
||||||
tokio-test = "0.4.2"
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue